/home/lnzliplg/www/pip.tar
cmdoptions.py000064400000040132151733136150007302 0ustar00"""
shared options and groups

The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parses general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.

"""
from __future__ import absolute_import

from functools import partial
from optparse import OptionGroup, SUPPRESS_HELP, Option
import warnings

from pip.index import (
    FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
    fmt_ctl_no_use_wheel)
from pip.models import PyPI
from pip.locations import USER_CACHE_DIR, src_prefix
from pip.utils.hashes import STRONG_HASHES


def make_option_group(group, parser):
    """
    Return an OptionGroup object
    group  -- assumed to be dict with 'name' and 'options' keys
    parser -- an optparse Parser
    """
    option_group = OptionGroup(parser, group['name'])
    for option in group['options']:
        option_group.add_option(option())
    return option_group


def resolve_wheel_no_use_binary(options):
    if not options.use_wheel:
        control = options.format_control
        fmt_ctl_no_use_wheel(control)


def check_install_build_global(options, check_options=None):
    """Disable wheels if per-setup.py call options are set.

    :param options: The OptionParser options to update.
    :param check_options: The options to check, if not supplied defaults to
        options.
    """
    if check_options is None:
        check_options = options

    def getname(n):
        return getattr(check_options, n, None)
    names = ["build_options", "global_options", "install_options"]
    if any(map(getname, names)):
        control = options.format_control
        fmt_ctl_no_binary(control)
        warnings.warn(
            'Disabling all use of wheels due to the use of --build-options '
            '/ --global-options / --install-options.', stacklevel=2)


###########
# options #
###########

help_ = partial(
    Option,
    '-h', '--help',
    dest='help',
    action='help',
    help='Show help.')

isolated_mode = partial(
    Option,
    "--isolated",
    dest="isolated_mode",
    action="store_true",
    default=False,
    help=(
        "Run pip in an isolated mode, ignoring environment variables and user "
        "configuration."
    ),
)

require_virtualenv = partial(
    Option,
    # Run only if inside a virtualenv, bail if not.
    '--require-virtualenv', '--require-venv',
    dest='require_venv',
    action='store_true',
    default=False,
    help=SUPPRESS_HELP)

verbose = partial(
    Option,
    '-v', '--verbose',
    dest='verbose',
    action='count',
    default=0,
    help='Give more output. Option is additive, and can be used up to 3 times.'
)

version = partial(
    Option,
    '-V', '--version',
    dest='version',
    action='store_true',
    help='Show version and exit.')

quiet = partial(
    Option,
    '-q', '--quiet',
    dest='quiet',
    action='count',
    default=0,
    help=('Give less output. Option is additive, and can be used up to 3'
          ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
          ' levels).')
)

log = partial(
    Option,
    "--log", "--log-file", "--local-log",
    dest="log",
    metavar="path",
    help="Path to a verbose appending log."
)

no_input = partial(
    Option,
    # Don't ask for input
    '--no-input',
    dest='no_input',
    action='store_true',
    default=False,
    help=SUPPRESS_HELP)

proxy = partial(
    Option,
    '--proxy',
    dest='proxy',
    type='str',
    default='',
    help="Specify a proxy in the form [user:passwd@]proxy.server:port.")

retries = partial(
    Option,
    '--retries',
    dest='retries',
    type='int',
    default=5,
    help="Maximum number of retries each connection should attempt "
         "(default %default times).")

timeout = partial(
    Option,
    '--timeout', '--default-timeout',
    metavar='sec',
    dest='timeout',
    type='float',
    default=15,
    help='Set the socket timeout (default %default seconds).')

default_vcs = partial(
    Option,
    # The default version control system for editables, e.g. 'svn'
    '--default-vcs',
    dest='default_vcs',
    type='str',
    default='',
    help=SUPPRESS_HELP)

skip_requirements_regex = partial(
    Option,
    # A regex to be used to skip requirements
    '--skip-requirements-regex',
    dest='skip_requirements_regex',
    type='str',
    default='',
    help=SUPPRESS_HELP)


def exists_action():
    return Option(
        # Option when path already exist
        '--exists-action',
        dest='exists_action',
        type='choice',
        choices=['s', 'i', 'w', 'b', 'a'],
        default=[],
        action='append',
        metavar='action',
        help="Default action when a path already exists: "
        "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.")


cert = partial(
    Option,
    '--cert',
    dest='cert',
    type='str',
    metavar='path',
    help="Path to alternate CA bundle.")

client_cert = partial(
    Option,
    '--client-cert',
    dest='client_cert',
    type='str',
    default=None,
    metavar='path',
    help="Path to SSL client certificate, a single file containing the "
         "private key and the certificate in PEM format.")

index_url = partial(
    Option,
    '-i', '--index-url', '--pypi-url',
    dest='index_url',
    metavar='URL',
    default=PyPI.simple_url,
    help="Base URL of Python Package Index (default %default). "
         "This should point to a repository compliant with PEP 503 "
         "(the simple repository API) or a local directory laid out "
         "in the same format.")


def extra_index_url():
    return Option(
        '--extra-index-url',
        dest='extra_index_urls',
        metavar='URL',
        action='append',
        default=[],
        help="Extra URLs of package indexes to use in addition to "
             "--index-url. Should follow the same rules as "
             "--index-url."
    )


no_index = partial(
    Option,
    '--no-index',
    dest='no_index',
    action='store_true',
    default=False,
    help='Ignore package index (only looking at --find-links URLs instead).')


def find_links():
    return Option(
        '-f', '--find-links',
        dest='find_links',
        action='append',
        default=[],
        metavar='url',
        help="If a url or path to an html file, then parse for links to "
             "archives. If a local path or file:// url that's a directory, "
             "then look for archives in the directory listing.")


def allow_external():
    return Option(
        "--allow-external",
        dest="allow_external",
        action="append",
        default=[],
        metavar="PACKAGE",
        help=SUPPRESS_HELP,
    )


allow_all_external = partial(
    Option,
    "--allow-all-external",
    dest="allow_all_external",
    action="store_true",
    default=False,
    help=SUPPRESS_HELP,
)


def trusted_host():
    return Option(
        "--trusted-host",
        dest="trusted_hosts",
        action="append",
        metavar="HOSTNAME",
        default=[],
        help="Mark this host as trusted, even though it does not have valid "
             "or any HTTPS.",
    )


# Remove after 7.0
no_allow_external = partial(
    Option,
    "--no-allow-external",
    dest="allow_all_external",
    action="store_false",
    default=False,
    help=SUPPRESS_HELP,
)


# Remove --allow-insecure after 7.0
def allow_unsafe():
    return Option(
        "--allow-unverified", "--allow-insecure",
        dest="allow_unverified",
        action="append",
        default=[],
        metavar="PACKAGE",
        help=SUPPRESS_HELP,
    )

# Remove after 7.0
no_allow_unsafe = partial(
    Option,
    "--no-allow-insecure",
    dest="allow_all_insecure",
    action="store_false",
    default=False,
    help=SUPPRESS_HELP
)

# Remove after 1.5
process_dependency_links = partial(
    Option,
    "--process-dependency-links",
    dest="process_dependency_links",
    action="store_true",
    default=False,
    help="Enable the processing of dependency links.",
)


def constraints():
    return Option(
        '-c', '--constraint',
        dest='constraints',
        action='append',
        default=[],
        metavar='file',
        help='Constrain versions using the given constraints file. '
        'This option can be used multiple times.')


def requirements():
    return Option(
        '-r', '--requirement',
        dest='requirements',
        action='append',
        default=[],
        metavar='file',
        help='Install from the given requirements file. '
        'This option can be used multiple times.')


def editable():
    return Option(
        '-e', '--editable',
        dest='editables',
        action='append',
        default=[],
        metavar='path/url',
        help=('Install a project in editable mode (i.e. setuptools '
              '"develop mode") from a local project path or a VCS url.'),
    )

src = partial(
    Option,
    '--src', '--source', '--source-dir', '--source-directory',
    dest='src_dir',
    metavar='dir',
    default=src_prefix,
    help='Directory to check out editable projects into. '
    'The default in a virtualenv is "<venv path>/src". '
    'The default for global installs is "<current dir>/src".'
)

# XXX: deprecated, remove in 9.0
use_wheel = partial(
    Option,
    '--use-wheel',
    dest='use_wheel',
    action='store_true',
    default=True,
    help=SUPPRESS_HELP,
)

# XXX: deprecated, remove in 9.0
no_use_wheel = partial(
    Option,
    '--no-use-wheel',
    dest='use_wheel',
    action='store_false',
    default=True,
    help=('Do not Find and prefer wheel archives when searching indexes and '
          'find-links locations. DEPRECATED in favour of --no-binary.'),
)


def _get_format_control(values, option):
    """Get a format_control object."""
    return getattr(values, option.dest)


def _handle_no_binary(option, opt_str, value, parser):
    existing = getattr(parser.values, option.dest)
    fmt_ctl_handle_mutual_exclude(
        value, existing.no_binary, existing.only_binary)


def _handle_only_binary(option, opt_str, value, parser):
    existing = getattr(parser.values, option.dest)
    fmt_ctl_handle_mutual_exclude(
        value, existing.only_binary, existing.no_binary)


def no_binary():
    return Option(
        "--no-binary", dest="format_control", action="callback",
        callback=_handle_no_binary, type="str",
        default=FormatControl(set(), set()),
        help="Do not use binary packages. Can be supplied multiple times, and "
             "each time adds to the existing value. Accepts either :all: to "
             "disable all binary packages, :none: to empty the set, or one or "
             "more package names with commas between them. Note that some "
             "packages are tricky to compile and may fail to install when "
             "this option is used on them.")


def only_binary():
    return Option(
        "--only-binary", dest="format_control", action="callback",
        callback=_handle_only_binary, type="str",
        default=FormatControl(set(), set()),
        help="Do not use source packages. Can be supplied multiple times, and "
             "each time adds to the existing value. Accepts either :all: to "
             "disable all source packages, :none: to empty the set, or one or "
             "more package names with commas between them. Packages without "
             "binary distributions will fail to install when this option is "
             "used on them.")


cache_dir = partial(
    Option,
    "--cache-dir",
    dest="cache_dir",
    default=USER_CACHE_DIR,
    metavar="dir",
    help="Store the cache data in <dir>."
)

no_cache = partial(
    Option,
    "--no-cache-dir",
    dest="cache_dir",
    action="store_false",
    help="Disable the cache.",
)

no_deps = partial(
    Option,
    '--no-deps', '--no-dependencies',
    dest='ignore_dependencies',
    action='store_true',
    default=False,
    help="Don't install package dependencies.")

build_dir = partial(
    Option,
    '-b', '--build', '--build-dir', '--build-directory',
    dest='build_dir',
    metavar='dir',
    help='Directory to unpack packages into and build in.'
)

ignore_requires_python = partial(
    Option,
    '--ignore-requires-python',
    dest='ignore_requires_python',
    action='store_true',
    help='Ignore the Requires-Python information.')

install_options = partial(
    Option,
    '--install-option',
    dest='install_options',
    action='append',
    metavar='options',
    help="Extra arguments to be supplied to the setup.py install "
         "command (use like --install-option=\"--install-scripts=/usr/local/"
         "bin\"). Use multiple --install-option options to pass multiple "
         "options to setup.py install. If you are using an option with a "
         "directory path, be sure to use absolute path.")

global_options = partial(
    Option,
    '--global-option',
    dest='global_options',
    action='append',
    metavar='options',
    help="Extra global options to be supplied to the setup.py "
         "call before the install command.")

no_clean = partial(
    Option,
    '--no-clean',
    action='store_true',
    default=False,
    help="Don't clean up build directories.")

pre = partial(
    Option,
    '--pre',
    action='store_true',
    default=False,
    help="Include pre-release and development versions. By default, "
         "pip only finds stable versions.")

disable_pip_version_check = partial(
    Option,
    "--disable-pip-version-check",
    dest="disable_pip_version_check",
    action="store_true",
    default=False,
    help="Don't periodically check PyPI to determine whether a new version "
         "of pip is available for download. Implied with --no-index.")

# Deprecated, Remove later
always_unzip = partial(
    Option,
    '-Z', '--always-unzip',
    dest='always_unzip',
    action='store_true',
    help=SUPPRESS_HELP,
)


def _merge_hash(option, opt_str, value, parser):
    """Given a value spelled "algo:digest", append the digest to a list
    pointed to in a dict by the algo name."""
    if not parser.values.hashes:
        parser.values.hashes = {}
    try:
        algo, digest = value.split(':', 1)
    except ValueError:
        parser.error('Arguments to %s must be a hash name '
                     'followed by a value, like --hash=sha256:abcde...' %
                     opt_str)
    if algo not in STRONG_HASHES:
        parser.error('Allowed hash algorithms for %s are %s.' %
                     (opt_str, ', '.join(STRONG_HASHES)))
    parser.values.hashes.setdefault(algo, []).append(digest)


hash = partial(
    Option,
    '--hash',
    # Hash values eventually end up in InstallRequirement.hashes due to
    # __dict__ copying in process_line().
    dest='hashes',
    action='callback',
    callback=_merge_hash,
    type='string',
    help="Verify that the package's archive matches this "
         'hash before installing. Example: --hash=sha256:abcdef...')


require_hashes = partial(
    Option,
    '--require-hashes',
    dest='require_hashes',
    action='store_true',
    default=False,
    help='Require a hash to check each requirement against, for '
         'repeatable installs. This option is implied when any package in a '
         'requirements file has a --hash option.')


##########
# groups #
##########

general_group = {
    'name': 'General Options',
    'options': [
        help_,
        isolated_mode,
        require_virtualenv,
        verbose,
        version,
        quiet,
        log,
        no_input,
        proxy,
        retries,
        timeout,
        default_vcs,
        skip_requirements_regex,
        exists_action,
        trusted_host,
        cert,
        client_cert,
        cache_dir,
        no_cache,
        disable_pip_version_check,
    ]
}

non_deprecated_index_group = {
    'name': 'Package Index Options',
    'options': [
        index_url,
        extra_index_url,
        no_index,
        find_links,
        process_dependency_links,
    ]
}

index_group = {
    'name': 'Package Index Options (including deprecated options)',
    'options': non_deprecated_index_group['options'] + [
        allow_external,
        allow_all_external,
        no_allow_external,
        allow_unsafe,
        no_allow_unsafe,
    ]
}
__pycache__/index.cpython-36.opt-1.pyc000064400000074036151733136150013467 0ustar003

�PfW��@sdZddlmZddlZddlZddlmZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlmZddlmZddlmZddlmZmZmZmZmZdd	lmZdd
lmZddlm Z ddl!m"Z"m#Z#m$Z$m%Z%dd
l&m'Z'm(Z(m)Z)m*Z*ddl+m,Z,m-Z-ddl.m/Z/ddl0m1Z1m2Z2m3Z3ddl4mZ5ddl6m7Z7ddl8m9Z9ddl:m;Z;ddl<m=Z=dddgZ>d3d4d5d6d7d8gZ?ej@eA�ZBGdd �d eC�ZDGd!d�deC�ZEe
jFd"e
jG�fd#d$�ZHGd%d&�d&eC�ZIGd'd(�d(eC�ZJedd)�ZKd*d�ZLd+d,�ZMd-d.�ZNd/d0�ZOed1d2�ZPdS)9z!Routines related to PyPI, indexes�)�absolute_importN)�
namedtuple)�parse)�request)�	ipaddress)�cached_property�splitext�normalize_path�ARCHIVE_EXTENSIONS�SUPPORTED_EXTENSIONS)�RemovedInPip10Warning)�
indent_log)�check_requires_python)�DistributionNotFound�BestVersionAlreadyInstalled�InvalidWheelFilename�UnsupportedWheel)�HAS_TLS�is_url�path_to_url�url_to_path)�Wheel�	wheel_ext)�
get_supported)�html5lib�requests�six)�canonicalize_name)�
specifiers)�SSLError)�unescape�
FormatControl�fmt_ctl_handle_mutual_exclude�
PackageFinder�https�*�	localhost�127.0.0.0/8�::1/128�file�sshc@s\eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�ZdS)�InstallationCandidatecCs,||_t|�|_||_|j|j|jf|_dS)N)�project�
parse_version�version�location�_key)�selfr,r.r/�r2�/usr/lib/python3.6/index.py�__init__>s
zInstallationCandidate.__init__cCsdj|j|j|j�S)Nz,<InstallationCandidate({0!r}, {1!r}, {2!r})>)�formatr,r.r/)r1r2r2r3�__repr__DszInstallationCandidate.__repr__cCs
t|j�S)N)�hashr0)r1r2r2r3�__hash__IszInstallationCandidate.__hash__cCs|j|dd��S)NcSs||kS)Nr2)�s�or2r2r3�<lambda>Msz.InstallationCandidate.__lt__.<locals>.<lambda>)�_compare)r1�otherr2r2r3�__lt__LszInstallationCandidate.__lt__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Psz.InstallationCandidate.__le__.<locals>.<lambda>)r<)r1r=r2r2r3�__le__OszInstallationCandidate.__le__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Ssz.InstallationCandidate.__eq__.<locals>.<lambda>)r<)r1r=r2r2r3�__eq__RszInstallationCandidate.__eq__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Vsz.InstallationCandidate.__ge__.<locals>.<lambda>)r<)r1r=r2r2r3�__ge__UszInstallationCandidate.__ge__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Ysz.InstallationCandidate.__gt__.<locals>.<lambda>)r<)r1r=r2r2r3�__gt__XszInstallationCandidate.__gt__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;\sz.InstallationCandidate.__ne__.<locals>.<lambda>)r<)r1r=r2r2r3�__ne__[szInstallationCandidate.__ne__cCst|t�stS||j|j�S)N)�
isinstancer+�NotImplementedr0)r1r=�methodr2r2r3r<^s
zInstallationCandidate._compareN)
�__name__�
__module__�__qualname__r4r6r8r>r?r@rArBrCr<r2r2r2r3r+<sr+c	@s�eZdZdZd!dd�Zdd�Zed"dd	��Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
ejd�Zdd�Zdd�Zdd�Zdd�Zdd �ZdS)#r#z�This finds packages.

    This is meant to match easy_install's technique for looking for
    packages, by reading pages and looking for appropriate links.
    FNcCs�|dkrtd��g|_x:|D]2}|jd�rBt|�}
tjj|
�rB|
}|jj|�qW||_g|_	t
�|_|pvtt
�t
��|_
dd�|r�|ngD�|_||_||_||_t|	||
|d�|_ts�x8tj|j|j�D]$}tj|�}|jdkr�tjd�Pq�WdS)	a�Create a PackageFinder.

        :param format_control: A FormatControl object or None. Used to control
            the selection of source packages / binary packages when consulting
            the index and links.
        :param platform: A string or None. If None, searches for packages
            that are supported by the current system. Otherwise, will find
            packages that can be built on the platform passed in. These
            packages will only be downloaded for distribution: they will
            not be built locally.
        :param versions: A list of strings or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param abi: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param implementation: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        Nz>PackageFinder() missing 1 required keyword argument: 'session'�~cSsg|]}d|df�qS)r%r2)�.0�hostr2r2r3�
<listcomp>�sz*PackageFinder.__init__.<locals>.<listcomp>)�versions�platform�abi�implr$zipip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.)�	TypeError�
find_links�
startswithr	�os�path�exists�append�
index_urls�dependency_links�set�logged_linksr!�format_control�secure_origins�allow_all_prereleases�process_dependency_links�sessionr�
valid_tagsr�	itertools�chain�urllib_parse�urlparse�scheme�logger�warning)r1rSrYr_Z
trusted_hostsr`rar]rOrNrP�implementation�linkZnew_link�parsedr2r2r3r4ls>	




zPackageFinder.__init__cCs"|jrtjdt�|jj|�dS)NzXDependency Links processing has been deprecated and will be removed in a future release.)r`�warnings�warnrrZ�extend)r1�linksr2r2r3�add_dependency_links�s
z"PackageFinder.add_dependency_linkscs�g�g���fdd�}x�|D]�}tjj|�}|jd�}|s>|r�|rH|}nt|�}tjj|�r�|r�tjj|�}x4tj|�D]}|tjj||��qxWq�|rƈj	|�q�tjj
|�r�||�q�tjd|�qt
|�r܈j	|�qtjd|�qW��fS)zt
        Sort locations into "files" (archives) and "urls", and return
        a pair of lists (files,urls)
        cs8t|�}tj|dd�ddkr*�j|�n
�j|�dS)NF)�strictrz	text/html)r�	mimetypesZ
guess_typerX)rV�url)�files�urlsr2r3�	sort_path�sz0PackageFinder._sort_locations.<locals>.sort_pathzfile:z:Url '%s' is ignored: it is neither a file nor a directory.zQUrl '%s' is ignored. It is either a non-existing path or lacks a specific scheme.)rUrVrWrTr�isdir�realpath�listdir�joinrX�isfilerhrir)�	locations�
expand_dirrwrtZ
is_local_pathZis_file_urlrV�itemr2)rurvr3�_sort_locations�s8



zPackageFinder._sort_locationscCsXt|j�}|jjrHt|jj�}|j|j�s8td|j��|j|j�}n|}|j	|fS)a[
        Function used to generate link sort key for link tuples.
        The greater the return value, the more preferred it is.
        If not finding wheels, then sorted by version only.
        If finding wheels, then the sort order is by version, then:
          1. existing installs
          2. wheels ordered via Wheel.support_index_min(self.valid_tags)
          3. source archives
        Note: it was considered to embed this logic into the Link
              comparison operators, but then different sdist links
              with the same version, would have to be considered equal
        zB%s is not a supported wheel for this platform. It can't be sorted.)
�lenrbr/�is_wheelr�filename�	supportedrZsupport_index_minr.)r1�	candidateZsupport_num�wheelZprir2r2r3�_candidate_sort_key�s

z!PackageFinder._candidate_sort_keyc	Csltjt|��}|j|j|jf}|djdd�d
}�x t|jD�]}||dkr`|ddkr`q@yht	j
t|dtj
�s�|ddkr�|dn|djd��}t	jt|dtj
�r�|dn|djd��}WnJtk
�r|d�r|dj�|dj�k�r|ddk�rw@YnX||k�r q@|d|dk�rP|ddk�rP|ddk	�rPq@dSW|jd|j|j�d	S)Nr�+�r%�utf8�Tz�The repository located at %s is not a trusted or secure host and is being ignored. If this repository is available via HTTPS it is recommended to use HTTPS instead, otherwise you may silence this warning and allow it anyways with '--trusted-host %s'.F���)rerf�strrgZhostnameZport�rsplit�SECURE_ORIGINSr^rZ
ip_addressrDrZ	text_type�decodeZ
ip_network�
ValueError�lowerri)	r1rhr/rl�originZprotocolZ
secure_originZaddrZnetworkr2r2r3�_validate_secure_origins>

z%PackageFinder._validate_secure_origincs �fdd���fdd�|jD�S)z�Returns the locations found via self.index_urls

        Checks the url_name on the main (first in the list) index and
        use this url_name to produce all locations
        cs,tj|tjt����}|jd�s(|d}|S)N�/)�	posixpathr{reZquoter�endswith)rt�loc)�project_namer2r3�mkurl_pypi_urlhs
z?PackageFinder._get_index_urls_locations.<locals>.mkurl_pypi_urlcsg|]}�|��qSr2r2)rKrt)r�r2r3rMusz;PackageFinder._get_index_urls_locations.<locals>.<listcomp>)rY)r1r�r2)r�r�r3�_get_index_urls_locationsas
z'PackageFinder._get_index_urls_locationscs��j|�}�j|�\}}�j�jdd�\}}�j�j�\}}dd�tj|||�D�}	�fdd�tjdd�|D�dd�|D�d	d�|D��D�}
tjd
t|
�|�x|
D]}tjd|�q�Wt	|�}t
�j|�}
t|||
�}�j
dd��jD�|�}g}xJ�j|
|�D]:}tjd
|j�t��|j�j
|j|��WdQRX�qW�j
dd��jD�|�}|�r|tjddjdd�|D����j
|	|�}|�r�|jdd�tjddjdd�|D���||||S)aFind all available InstallationCandidate for project_name

        This checks index_urls, find_links and dependency_links.
        All versions found are returned as an InstallationCandidate list.

        See _link_package_versions for details on which files are accepted
        T)r~css|]}t|�VqdS)N)�Link)rKrtr2r2r3�	<genexpr>�sz4PackageFinder.find_all_candidates.<locals>.<genexpr>csg|]}�jt|�r|�qSr2)r�rh)rKrk)r1r2r3rM�sz5PackageFinder.find_all_candidates.<locals>.<listcomp>css|]}t|�VqdS)N)r�)rKrtr2r2r3r��scss|]}t|�VqdS)N)r�)rKrtr2r2r3r��scss|]}t|�VqdS)N)r�)rKrtr2r2r3r��sz,%d location(s) to search for versions of %s:z* %scss|]}t|d�VqdS)z-fN)r�)rKrtr2r2r3r��szAnalyzing links from page %sNcss|]}t|�VqdS)N)r�)rKrtr2r2r3r��szdependency_links found: %sz, cSsg|]}|jj�qSr2)r/rt)rKr.r2r2r3rM�s)�reversezLocal files found: %scSsg|]}t|jj��qSr2)rr/rt)rKr�r2r2r3rM�s)r�r�rSrZrcrdrh�debugr�r�fmt_ctl_formatsr]�Search�_package_versions�
_get_pagesrtr
rorpr{�sort)r1r�Zindex_locationsZindex_file_locZ
index_url_locZfl_file_locZ
fl_url_locZdep_file_locZdep_url_locZfile_locationsZ
url_locationsr/�canonical_name�formats�searchZfind_links_versionsZ
page_versions�pageZdependency_versionsZ
file_versionsr2)r1r3�find_all_candidateswsX


 
z!PackageFinder.find_all_candidatesc
s�|j|j�}t|jjdd�|D�|jr,|jndd����fdd�|D�}|r�t||jd�}t|j	dd�r�d	d�|D�}t|�r�t||jd�}q�d
j
|j|j|j	�}|j	j
r�|dj
|j	j
�7}tj|�nd}|jdk	r�t|jj�}nd}|dko�|dk�r0tjd|d
jttdd�|D��td���td|��d}	|�rT|dk�sP|j|k�rTd}	|�r�|dk	�r�|	�rztjd|�ntjd||j�dS|	�r�tjd|d
jt�td���p�d�t�tjd|jd
jt�td���|j	S)z�Try to find a Link matching req

        Expects req, an InstallRequirement and upgrade, a boolean
        Returns a Link if found,
        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
        cSsg|]}t|j��qSr2)r�r.)rK�cr2r2r3rM�sz2PackageFinder.find_requirement.<locals>.<listcomp>N)Zprereleasescsg|]}t|j��kr|�qSr2)r�r.)rKr�)�compatible_versionsr2r3rM�s)�key�yankedFcSsg|]}t|jdd�s|�qS)r�F)�getattrr/)rKr�r2r2r3rM�sznWARNING: The candidate selected for download or install is a yanked version: '{}' candidate (version {} at {})z
Reason for being yanked: {}zNCould not find a version that satisfies the requirement %s (from versions: %s)z, css|]}t|j�VqdS)N)r�r.)rKr�r2r2r3r�sz1PackageFinder.find_requirement.<locals>.<genexpr>z%No matching distribution found for %sTzLExisting installed version (%s) is most up-to-date and satisfies requirementzUExisting installed version (%s) satisfies requirement (most up-to-date version is %s)z=Installed version (%s) is most up-to-date (past versions: %s)Znonez)Using version %s (newest of versions: %s))r��namer[Z	specifier�filterr_�maxr�r�r/r5r,r.�
yanked_reasonrhriZsatisfied_byr-Zcriticalr{�sortedrr�r)
r1ZreqZupgradeZall_candidatesZapplicable_candidatesZbest_candidateZnonyanked_candidatesZwarning_messageZinstalled_versionZbest_installedr2)r�r3�find_requirement�sx



zPackageFinder.find_requirementccsFt�}x:|D]2}||krq|j|�|j|�}|dkr8q|VqWdS)zp
        Yields (page, page_url) from the given locations, skipping
        locations that have errors.
        N)r[�add�	_get_page)r1r}r��seenr/r�r2r2r3r�Bs


zPackageFinder._get_pagesz-py([123]\.?[0-9]?)$cCsTgg}}t�}x:|D]2}||kr|j|�|jr>|j|�q|j|�qW||S)z�
        Returns elements of links in order, non-egg links first, egg links
        second, while eliminating duplicates
        )r[r��egg_fragmentrX)r1rpZeggsZno_eggsr�rkr2r2r3�_sort_linksUs


zPackageFinder._sort_linkscCs:g}x0|j|�D]"}|j||�}|dk	r|j|�qW|S)N)r��_link_package_versionsrX)r1rpr��resultrk�vr2r2r3r�eszPackageFinder._package_versionscCs(||jkr$tjd||�|jj|�dS)NzSkipping link %s; %s)r\rhr�r�)r1rk�reasonr2r2r3�_log_skipped_linkms
zPackageFinder._log_skipped_linkc
CsJd}|jr|j}|j}�n|j�\}}|s:|j|d�dS|tkrV|j|d|�dSd|jkr~|tkr~|j|d|j�dSd|jkr�|dkr�|j|d�dS|tk�r&yt	|j
�}Wn tk
r�|j|d	�dSXt|j
�|jk�r|j|d
|j�dS|j|j��s |j|d�dS|j}d|jk�rR|tk�rR|j|d
|j�dS|�sft||j|�}|dk�r�|j|d
|j�dS|jj|�}|�r�|d|j��}|jd�}|tjdd�k�r�|j|d�dSyt|j�}	Wn.tjk
�rtjd|j
|j�d}	YnX|	�s.tjd||j�dStjd||�t|j||�S)z'Return an InstallationCandidate or NoneNz
not a filezunsupported archive format: %s�binaryzNo binaries permitted for %sZmacosx10z.zipzmacosx10 onezinvalid wheel filenamezwrong project name (not %s)z%it is not compatible with this Python�sourcezNo sources permitted for %sr��zPython version is incorrectz3Package %s has an invalid Requires-Python entry: %sTz_The package %s is incompatible with the pythonversion in use. Acceptable python versions are:%szFound link %s, version: %s)r��extrr�rr�rZsuppliedrVrr�rrr�Z	canonicalr�rbr.�egg_info_matches�_py_version_rer��start�group�sysr�requires_pythonrZInvalidSpecifierrhr�r+)
r1rkr�r.�egg_infor�r��match�
py_versionZsupport_this_pythonr2r2r3r�rs�





z$PackageFinder._link_package_versionscCstj||jd�S)N)ra)�HTMLPage�get_pagera)r1rkr2r2r3r��szPackageFinder._get_page)	FNFNNNNNN)F)rGrHrI�__doc__r4rq�staticmethodr�r�r�r�r�r�r��re�compiler�r�r�r�r�r�r2r2r2r3r#es(
Q
1GSx
Mz([a-z0-9_.]+)-([a-z0-9_.!+-]+)cCs�|j|�}|stjd|�dS|dkrB|jd�}||jd�d�S|jd�j�}|jdd�}|j�d}|j|�r�|jd�t|�d�SdSdS)axPull the version part out of a string.

    :param egg_info: The string to parse. E.g. foo-2.1
    :param search_name: The name of the package this belongs to. None to
        infer the name. Note that this cannot unambiguously parse strings
        like foo-2-2 which might be foo, 2-2 or foo-2, 2.
    :param link: The link the string came from, for logging on failure.
    z%Could not parse version from link: %sNr�-�_)	r�rhr�r��indexr��replacerTr�)r�Zsearch_namerkZ_egg_info_rer�Z
full_matchr�Zlook_forr2r2r3r��s


r�c@sxeZdZdZddd�Zdd�Zeddd	��Zedd
d��Z	edd
��Z
edd��Ze
dd��Zejdej�Zdd�ZdS)r�z'Represents one page, along with its URLNcCs\d}|r2d|kr2tj|d�\}}d|kr2|d}||_tj|j|dd�|_||_||_dS)NzContent-Type�charsetF)Ztransport_encodingZnamespaceHTMLElements)�cgiZparse_header�contentrrrlrt�headers)r1r�rtr��encoding�content_type�paramsr2r2r3r4�s
zHTMLPage.__init__cCs|jS)N)rt)r1r2r2r3�__str__�szHTMLPage.__str__TcCsl|dkrtd��|j}|jdd�d}ddlm}x>|jD]4}|j�j|�r:|t|�dkr:t	j
d||�dSq:W�y"|r�|j}xHtD]@}|j
|�r�|j||d�}	|	j�jd	�r�Pq�t	j
d
||	�dSq�Wt	j
d|�tj|�\}}
}}}
}|dk�r6tjjtj|���r6|j
d
��s|d
7}tj|d�}t	j
d|�|j|d	dd�d�}|j�|jjdd�}	|	j�jd	��s�t	j
d
||	�dS||j|j|j�}Wn�tjk
�r�}z|j|||�WYdd}~Xn�tk
�r}z"d|}|j|||t	jd�WYdd}~Xn`tj k
�r>}z|j|d||�WYdd}~Xn*tj!k
�rb|j|d|�YnX|SdS)Nz9get_page() missing 1 required keyword argument: 'session'�#r�r)�
VcsSupportz+:zCannot look at %s URL %s)raz	text/htmlz,Skipping page %s because of Content-Type: %szGetting page %sr)r�z
index.htmlz# file: URL is directory, getting %szmax-age=600)ZAcceptz
Cache-Control)r�zContent-Type�unknownz6There was a problem confirming the ssl certificate: %s)�methzconnection error: %sz	timed out)"rRrt�split�pip.vcsr�Zschemesr�rTr�rhr�r�r
r��_get_content_typererfrUrVrx�urllib_requestZurl2pathname�urljoin�get�raise_for_statusr�r�rZ	HTTPError�_handle_failr�info�ConnectionErrorZTimeout)�clsrkZ
skip_archivesrartr�rgr�Zbad_extr��netlocrVr��query�fragment�respZinst�excr�r2r2r3r��sp



$"zHTMLPage.get_pagecCs|dkrtj}|d||�dS)Nz%Could not fetch URL %s: %s - skipping)rhr�)rkr�rtr�r2r2r3r�NszHTMLPage._handle_failcCsDtj|�\}}}}}|dkr dS|j|dd�}|j�|jjdd�S)z;Get the Content-Type of the given url, using a HEAD request�httpr$�T)Zallow_redirectszContent-Type)r�r$)re�urlsplit�headr�r�r�)rtrargr�rVr�r�r�r2r2r3r�UszHTMLPage._get_content_typecCs@dd�|jjd�D�}|r6|djd�r6|djd�S|jSdS)NcSsg|]}|jd�dk	r|�qS)�hrefN)r�)rK�xr2r2r3rMfsz%HTMLPage.base_url.<locals>.<listcomp>z.//baserr�)rl�findallr�rt)r1�basesr2r2r3�base_urlcszHTMLPage.base_urlccs�x�|jjd�D]v}|jd�r|jd�}|jtj|j|��}|jd�}|rPt|�nd}|jddd�}|dk	rrt|�}t||||d�VqWdS)zYields all links in the pagez.//ar�zdata-requires-pythonNzdata-yanked)�default)r�r�)	rlr�r��
clean_linkrer�r�r r�)r1Zanchorr�rtZ	pyrequirer�r2r2r3rpns


zHTMLPage.linksz[^a-z0-9$&+,/:;=?@.#%_\\|-]cCs|jjdd�|�S)z�Makes sure a link is fully encoded.  That is, if a ' ' shows up in
        the link, it will be rewritten to %20 (while not over-quoting
        % or other characters).cSsdt|jd��S)Nz%%%2xr)�ordr�)r�r2r2r3r;�sz%HTMLPage.clean_link.<locals>.<lambda>)�	_clean_re�sub)r1rtr2r2r3r��szHTMLPage.clean_link)N)TN)N)rGrHrIr�r4r��classmethodr�r�r�r�rr��propertyrpr�r��Ir�r�r2r2r2r3r��s
Ur�c@s eZdZd5dd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Ze
dd��Ze
dd��Ze
dd��Ze
dd��Zdd�Ze
d d!��Ze
d"d#��Zejd$�Ze
d%d&��Zejd'�Ze
d(d)��Zejd*�Ze
d+d,��Ze
d-d.��Ze
d/d0��Ze
d1d2��Ze
d3d4��Z dS)6r�NcCs@|jd�rt|�}||_||_|r&|nd|_||_|dk	|_dS)a�
        Object representing a parsed link from https://pypi.python.org/simple/*

        url:
            url of the resource pointed to (href of the link)
        comes_from:
            instance of HTMLPage where the link was found, or string.
        requires_python:
            String containing the `Requires-Python` metadata field, specified
            in PEP 345. This may be specified by a data-requires-python
            attribute in the HTML link tag, as described in PEP 503.
        z\\N)rTrrt�
comes_fromr�r�r�)r1rtrr�r�r2r2r3r4�s
z
Link.__init__cCs<|jrd|j}nd}|jr.d|j|j|fSt|j�SdS)Nz (requires-python:%s)r�z%s (from %s)%s)r�rrtr�)r1Zrpr2r2r3r��szLink.__str__cCsd|S)Nz	<Link %s>r2)r1r2r2r3r6�sz
Link.__repr__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3r@�s
zLink.__eq__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3rC�s
zLink.__ne__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3r>�s
zLink.__lt__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3r?�s
zLink.__le__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3rB�s
zLink.__gt__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3rA�s
zLink.__ge__cCs
t|j�S)N)r7rt)r1r2r2r3r8�sz
Link.__hash__cCs8tj|j�\}}}}}tj|jd��p(|}tj|�}|S)Nr�)rer�rtr��basename�rstrip�unquote)r1r�r�rVr�r2r2r3r��s
z
Link.filenamecCstj|j�dS)Nr)rer�rt)r1r2r2r3rg�szLink.schemecCstj|j�dS)Nr�)rer�rt)r1r2r2r3r��szLink.netloccCstjtj|j�d�S)Nr�)rerr�rt)r1r2r2r3rV�sz	Link.pathcCsttj|jjd���S)Nr�)rr�rrVr)r1r2r2r3r�sz
Link.splitextcCs|j�dS)Nr�)r)r1r2r2r3r��szLink.extcCs*tj|j�\}}}}}tj||||df�S)N)rer�rtZ
urlunsplit)r1rgr�rVr�r�r2r2r3�url_without_fragment�szLink.url_without_fragmentz[#&]egg=([^&]*)cCs |jj|j�}|sdS|jd�S)Nr�)�_egg_fragment_rer�rtr�)r1r�r2r2r3r��szLink.egg_fragmentz[#&]subdirectory=([^&]*)cCs |jj|j�}|sdS|jd�S)Nr�)�_subdirectory_fragment_rer�rtr�)r1r�r2r2r3�subdirectory_fragment�szLink.subdirectory_fragmentz2(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)cCs |jj|j�}|r|jd�SdS)Nr�)�_hash_rer�rtr�)r1r�r2r2r3r7s
z	Link.hashcCs |jj|j�}|r|jd�SdS)Nr�)rr�rtr�)r1r�r2r2r3�	hash_names
zLink.hash_namecCs$tj|jjdd�djdd�d�S)Nr�r�r�?)r�rrtr�)r1r2r2r3�show_urlsz
Link.show_urlcCs
|jtkS)N)r�r)r1r2r2r3r�sz
Link.is_wheelcCs ddlm}|j|jkrdSdS)z�
        Determines if this points to an actual artifact (e.g. a tarball) or if
        it points to an "abstract" thing like a path or a VCS location.
        r)�vcsFT)r�rrgZall_schemes)r1rr2r2r3�is_artifactszLink.is_artifact)NNN)!rGrHrIr4r�r6r@rCr>r?rBrAr8rr�rgr�rVrr�rr�r�rr�r	r
rr7rrr�rr2r2r2r3r��s8



r�zno_binary only_binarycCs�|jd�}xFd|krP|j�|j�|jd�|d|jd�d�=d|krdSqWx:|D]2}|dkrn|j�qXt|�}|j|�|j|�qXWdS)N�,z:all:r�z:none:)r��clearr�r�r�discard)�value�targetr=�newr�r2r2r3r"5s




cCsjtddg�}||jkr"|jd�n@||jkr8|jd�n*d|jkrN|jd�nd|jkrb|jd�t|�S)Nr�r�z:all:)r[�only_binaryr�	no_binary�	frozenset)�fmt_ctlr�r�r2r2r3r�Hs




r�cCstd|j|j�dS)Nz:all:)r"rr)rr2r2r3�fmt_ctl_no_binaryUsrcCst|�tjdtdd�dS)Nzf--no-use-wheel is deprecated and will be removed in the future.  Please use --no-binary :all: instead.r�)�
stacklevel)rrmrnr)rr2r2r3�fmt_ctl_no_use_wheelZs
rr�zsupplied canonical formats)r$r%r%)r%r&r%)r%r'r%)r%r(r%)r)r%N)r*r%r%)Qr�Z
__future__rZloggingr��collectionsrrcr�rUr�rsr�rmZpip._vendor.six.moves.urllibrrerr�Z
pip.compatrZ	pip.utilsrrr	r
rZpip.utils.deprecationrZpip.utils.loggingr
Zpip.utils.packagingrZpip.exceptionsrrrrZpip.downloadrrrrZ	pip.wheelrrZpip.pep425tagsrZpip._vendorrrrZpip._vendor.packaging.versionr-Zpip._vendor.packaging.utilsrZpip._vendor.packagingrZpip._vendor.requests.exceptionsrZpip._vendor.distlib.compatr �__all__r�Z	getLoggerrGrh�objectr+r#r�rr�r�r�r!r"r�rrr�r2r2r2r3�<module>sl

)d*#



__pycache__/baseparser.cpython-36.pyc000064400000022052151733136150013537 0ustar003

�Pf�(�@s�dZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZmZmZmZddlmZmZejd	ej�ZGd
d�dej�ZGdd
�d
e�ZGdd�dej�ZGdd�de�ZdS)zBase option parser setup�)�absolute_importN)�	strtobool)�string_types)�configparser)�legacy_config_file�config_basename�running_under_virtualenv�site_config_files)�appdirs�get_terminal_sizez^PIP_c@sReZdZdZdd�Zdd�Zddd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dd�ZdS)�PrettyHelpFormatterz4A prettier/less verbose help formatter for optparse.cOs:d|d<d|d<t�dd|d<tjj|f|�|�dS)N�Zmax_help_position�Zindent_incrementr��width)r�optparse�IndentedHelpFormatter�__init__)�self�args�kwargs�r� /usr/lib/python3.6/baseparser.pyrszPrettyHelpFormatter.__init__cCs|j|dd�S)Nz <%s>z, )�_format_option_strings)r�optionrrr�format_option_strings!sz)PrettyHelpFormatter.format_option_strings� <%s>�, cCs|g}|jr|j|jd�|jr0|j|jd�t|�dkrH|jd|�|j�rr|jp^|jj�}|j||j��dj	|�S)a
        Return a comma-separated list of option strings and metavars.

        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
        :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
        :param optsep:  separator
        rr�)
Z_short_opts�appendZ
_long_opts�len�insertZtakes_value�metavar�dest�lower�join)rrZmvarfmtZoptsepZoptsr"rrrr$sz*PrettyHelpFormatter._format_option_stringscCs|dkrdS|dS)NZOptionsrz:
r)rZheadingrrr�format_heading;sz"PrettyHelpFormatter.format_headingcCsd|jtj|�d�}|S)zz
        Ensure there is only one newline between usage and the first heading
        if there is no description.
        z
Usage: %s
z  )�indent_lines�textwrap�dedent)rZusage�msgrrr�format_usage@sz PrettyHelpFormatter.format_usagecCsV|rNt|jd�rd}nd}|jd�}|j�}|jtj|�d�}d||f}|SdSdS)N�mainZCommandsZDescription�
z  z%s:
%s
r)�hasattr�parser�lstrip�rstripr'r(r))r�descriptionZlabelrrr�format_descriptionHs
z&PrettyHelpFormatter.format_descriptioncCs|r|SdSdS)Nrr)rZepilogrrr�
format_epilogZsz!PrettyHelpFormatter.format_epilogcs"�fdd�|jd�D�}dj|�S)Ncsg|]}�|�qSrr)�.0�line)�indentrr�
<listcomp>bsz4PrettyHelpFormatter.indent_lines.<locals>.<listcomp>r-)�splitr%)r�textr7Z	new_linesr)r7rr'asz PrettyHelpFormatter.indent_linesN)rr)�__name__�
__module__�__qualname__�__doc__rrrr&r+r3r4r'rrrrrs
rc@seZdZdZdd�ZdS)�UpdatingDefaultsHelpFormatterz�Custom help formatter for use in ConfigOptionParser.

    This is updates the defaults before expanding them, allowing
    them to show up correctly in the help listing.
    cCs(|jdk	r|jj|jj�tjj||�S)N)r/�_update_defaults�defaultsrr�expand_default)rrrrrrBms
z,UpdatingDefaultsHelpFormatter.expand_defaultN)r;r<r=r>rBrrrrr?fsr?c@s eZdZdd�Zedd��ZdS)�CustomOptionParsercOs(|j||�}|jj�|jj||�|S)z*Insert an OptionGroup at a given position.)Zadd_option_group�
option_groups�popr!)r�idxrr�grouprrr�insert_option_groupus
z&CustomOptionParser.insert_option_groupcCs.|jdd�}x|jD]}|j|j�qW|S)z<Get a list of all options, including those in option groups.N)Zoption_listrD�extend)r�res�irrr�option_list_all~sz"CustomOptionParser.option_list_allN)r;r<r=rH�propertyrLrrrrrCss	rCc@s\eZdZdZdZdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
d�Z
dd�Zdd�Zdd�Z
dS)�ConfigOptionParserzsCustom option parser which updates its defaults by checking the
    configuration files and environmental variablesFcOsdtj�|_|jd�|_|jdd�|_|j�|_|jrB|jj|j�|jsLt	�t
jj|f|�|�dS)N�name�isolatedF)
rZRawConfigParser�configrErOrP�get_config_files�files�read�AssertionErrorr�OptionParserr)rrrrrrr�s


zConfigOptionParser.__init__cCs�tjjdd�}|tjkrgStt�}|jsj|rFtjj|�rF|j	|�n$|j	t
�|j	tjjtj
d�t��t�r�tjjtjt�}tjj|�r�|j	|�|S)NZPIP_CONFIG_FILEFZpip)�os�environ�get�devnull�listr	rP�path�existsrrr%r
Zuser_config_dirrr�sys�prefix)rZconfig_filerSZvenv_config_filerrrrR�s&


z#ConfigOptionParser.get_config_filescCsLy|j||�Stjk
rF}ztd|�tjd�WYdd}~XnXdS)Nz*An error occurred during configuration: %s�)�check_valuerZOptionValueError�printr^�exit)rr�key�val�excrrr�
check_default�s
z ConfigOptionParser.check_defaultc	sji}x(d�jfD]}|j�j�j|���qW�jsH|j�j�j���tj�j��_	t
�}x�|j�D]�\�}|stqf�j����dkr�qf�j
d
kr�t|�}n��j
dkr�|j�}���fdd�|D�}nl�j
d	k�r$|j�j��j�}�j||�}�j�p�f}�j�pi}�j�||�f|�|�n�j��|�}||�j<qfWx|D]�t�j	��|�<�qFWd�_	|S)z�Updates the given defaults with values from the config files and
        the environ. Does a little special handling for certain types of
        options (lists).�globalN�
store_true�store_false�countrcsg|]}�j��|��qSr)rg)r5�v)rdrrrrr8�sz7ConfigOptionParser._update_defaults.<locals>.<listcomp>�callback)rirjrk)rO�update�normalize_keys�get_config_sectionrP�get_environ_varsr�ValuesrA�values�set�itemsZ
get_option�actionrr9�addr#�get_opt_stringZ
convert_valueZ
callback_argsZcallback_kwargsrmrg�getattr)	rrArQZsectionZ	late_evalre�opt_strrrr)rdrrrr@�s@




z#ConfigOptionParser._update_defaultscCs@i}x6|D].\}}|jdd�}|jd�s0d|}|||<q
W|S)z�Return a config dictionary with normalized keys regardless of
        whether the keys were specified in environment variables or in config
        files�_�-z--z--%s)�replace�
startswith)rruZ
normalizedrdrerrrro�s
z!ConfigOptionParser.normalize_keyscCs|jj|�r|jj|�SgS)z Get a section of a configuration)rQZhas_sectionru)rrOrrrrpsz%ConfigOptionParser.get_config_sectionccs<x6tjj�D](\}}tj|�rtjd|�j�|fVqWdS)z@Returns a generator with all environmental vars with prefix PIP_rN)rWrXru�_environ_prefix_re�search�subr$)rrdrerrrrqs
z#ConfigOptionParser.get_environ_varscCsn|jstj|j�S|j|jj��}x@|j�D]4}|j|j�}t	|t
�r,|j�}|j||�||j<q,Wtj|�S)z�Overriding to make updating the defaults after instantiation of
        the option parser possible, _update_defaults() does the dirty work.)
Zprocess_default_valuesrrrrAr@�copyZ_get_all_optionsrYr#�
isinstancerrxra)rrAr�defaultrzrrr�get_default_valuess
z%ConfigOptionParser.get_default_valuescCs |jtj�|jdd|�dS)Nrz%s
)Zprint_usager^�stderrrc)rr*rrr�error#szConfigOptionParser.errorN)r;r<r=r>rPrrRrgr@rorprqr�r�rrrrrN�s
(5rN)r>Z
__future__rr^rrW�rer(Zdistutils.utilrZpip._vendor.sixrZpip._vendor.six.movesrZ
pip.locationsrrrr	Z	pip.utilsr
r�compile�Irrrr?rVrCrNrrrr�<module>s O
__pycache__/wheel.cpython-36.pyc000064400000052356151733136150012526 0ustar003

�Pf~�@s$dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z	ddlZddlZddl
Z
ddlZddlZddlZddlmZddlmZddlmZddlZddlmZddlmZmZdd	lmZmZm Z dd
l!m"Z"m#Z#ddlm$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*dd
l+m,Z,ddl-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6ddl7m8Z8dZ9d9Z:ej;e<�Z=Gdd�de>�Z?dd�Z@dd�ZAd;dd�ZBd d!�ZCd"d#�ZDejEd$ejF�ZGd%d&�ZHd'd(�ZId<d+d,�ZJd-d.�ZKeKd/d0��ZLd1d2�ZMd3d4�ZNGd5d6�d6e>�ZOGd7d8�d8e>�ZPdS)=zH
Support for installing and building the "wheel" binary package format.
�)�absolute_importN)�urlsafe_b64encode)�Parser)�StringIO)�
expanduser)�path_to_url�
unpack_url)�InstallationError�InvalidWheelFilename�UnsupportedWheel)�distutils_scheme�PIP_DELETE_MARKER_FILENAME)�
pep425tags)�call_subprocess�
ensure_dir�captured_stdout�rmtree�read_chunks)�open_spinner)�
indent_log)�SETUPTOOLS_SHIM)�ScriptMaker)�
pkg_resources)�canonicalize_name)�configparserz.whl�c@s eZdZdZdd�Zdd�ZdS)�
WheelCachez&A cache of wheels for future installs.cCs|rt|�nd|_||_dS)z�Create a wheel cache.

        :param cache_dir: The root of the cache.
        :param format_control: A pip.index.FormatControl object to limit
            binaries being read from the cache.
        N)r�
_cache_dir�_format_control)�self�	cache_dir�format_control�r"�/usr/lib/python3.6/wheel.py�__init__8szWheelCache.__init__cCst|j||j|�S)N)�cached_wheelrr)r�link�package_namer"r"r#r%BszWheelCache.cached_wheelN)�__name__�
__module__�__qualname__�__doc__r$r%r"r"r"r#r5s
rcCs�|jg}|jdk	r4|jdk	r4|jdj|j|jg��dj|�}tj|j��j�}|dd�|dd�|dd�|dd�g}t	j
j|df|��S)a�
    Return a directory to store cached wheels in for link.

    Because there are M wheels for any one sdist, we provide a directory
    to cache them in, and then consult that directory when looking up
    cache hits.

    We only insert things into the cache if they have plausible version
    numbers, so that we don't contaminate the cache with things that were not
    unique. E.g. ./package might have dozens of installs done for it and build
    a version of 0.0...and if we built and cached a wheel, we'd end up using
    the same wheel even if the source has been edited.

    :param cache_dir: The cache_dir being used by pip.
    :param link: The link of the sdist for which this will cache wheels.
    N�=�#���Zwheels)Zurl_without_fragmentZ	hash_name�hash�append�join�hashlibZsha224�encodeZ	hexdigest�os�path)r r&Z	key_partsZkey_urlZhashed�partsr"r"r#�_cache_for_linkGs
,r9c
Cs,|s|S|s|S|jr|S|js$|S|s,|St|�}tjj||�}d|krN|St||�}ytj|�}Wn:t	k
r�}z|j
t
jt
jfkr�|S�WYdd}~XnXg}	xL|D]D}
yt
|
�}Wntk
r�w�YnX|j�s�q�|	j|j�|
f�q�W|	�s�|S|	j�tjj||	dd�}tjjt|��S)N�binaryrr)�is_wheel�is_artifactr�pip�index�fmt_ctl_formatsr9r6�listdir�OSError�errno�ENOENT�ENOTDIR�Wheelr
�	supportedr2�support_index_min�sortr7r3�Linkr)
r r&r!r'Zcanonical_nameZformats�rootZwheel_names�eZ
candidates�
wheel_name�wheelr7r"r"r#r%psF

r%�sha256�cCsttj|�}d}t|d��2}x*t||d�D]}|t|�7}|j|�q(WWdQRXdt|j��jd�j	d�}||fS)z6Return (hash, length) for path using hashlib.new(algo)r�rb)�sizeNzsha256=�latin1r,)
r4�new�openr�len�updater�digest�decode�rstrip)r7ZalgoZ	blocksize�hZlength�f�blockrWr"r"r#�rehash�s

r]cCs6tjddkri}d}nddi}d}t|||f|�S)Nr��b�newline�)�sys�version_inforT)�name�mode�nl�binr"r"r#�open_for_csv�srhcCs�tjj|�r�t|d��H}|j�}|jd�s.dStjjtj	��}d|tj
jd�}|j�}WdQRXt|d��}|j|�|j|�WdQRXdSdS)	zLReplace #!python with #!/path/to/python
    Return True if file was changed.rPs#!pythonFs#!�asciiN�wbT)
r6r7�isfilerT�readline�
startswithrb�
executabler5�getfilesystemencoding�linesep�read�write)r7Zscript�	firstlineZexename�restr"r"r#�
fix_script�s

ruzZ^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
                                \.dist-info$cCs�|jdd�}xttj|�D]f}tj|�}|r|jd�|krttjj||d���,}x$|D]}|j	�j
�}|dkrTdSqTWWdQRXqWdS)	zP
    Return True if the extracted wheel in wheeldir should go into purelib.
    �-�_rd�WHEELzroot-is-purelib: trueTNF)�replacer6r@�dist_info_re�match�grouprTr7r3�lowerrY)rd�wheeldirZname_folded�itemr{rM�liner"r"r#�root_is_purelib�s

r�c
Cs�tjj|�siifSt|��<}t�}x$|D]}|j|j��|jd�q*W|jd�WdQRXtj	�}dd�|_
|j|�i}i}|jd�r�t
|jd��}|jd�r�t
|jd��}||fS)N�
rcSs|S)Nr")Zoptionr"r"r#�<lambda>�sz!get_entrypoints.<locals>.<lambda>Zconsole_scriptsZgui_scripts)r6r7�existsrTrrr�strip�seekrZRawConfigParserZoptionxformZreadfpZhas_section�dict�items)�filename�fp�datar�Zcp�console�guir"r"r#�get_entrypoints�s$





r�FTc+)s*|st||||||	d�}t|��r,|d�n|d�g�g��jtjj�tjj}i�t��g}|r�t��4}
tj	�� tj
d�tj|ddd�WdQRXWdQRXt
j|
j��dd	��d3�����fdd�	�	d4���	�
fd
d�	}||�d���std�
��tjj�dd�}t|�\����fdd�}xv�D]n}d}d}x^tjtjj�|��D]F}d}|dk�rpt}|}tjj�||�}||}|||d
||d��qVW�q4Wtd|d��d�_td5��_d�_��
fdd�}|�_d�_�jdd�}|�r�dtjk�rd|}|j�j|��tjjdd�dk�rTdt j!dd�|f}|j�j|��dt j!dd �|f}|j�j|��d!d"��D�}x|D]}�|=�q�W�jd#d�}|�rdtjk�r�d$|}|j�j|��d%t j!dd �|f}|j�j|��d&d"��D�}x|D]}�|=�qWt"��dk�rJ|j�j#d'd"��j$�D���t"��dk�r||j�j#d(d"��j$�D�d)di��tjj�dd*�}tjj�dd+�}t%|d,��}|j&d-�WdQRXt'j(||�|j)|�tjj�dd.�} tjj�dd/�}!t*| d0���}"t*|!d1���}#t+j,|"�}$t+j-|#�}%xV|$D]N}&�j|&d|&d�|&d<|&d�k�rpt.|&d�\|&d<|&d2<|%j/|&��q.Wx`|D]X}'t.|'�\}(})�|'��}*|
�r�|*j0|
��r�tjjtjtjj1|*|
��}*|%j/|*|(|)f��q�Wx"�D]}'|%j/�|'ddf��q�WWdQRXWdQRXt'j(|!| �dS)6zInstall a wheel)�user�homerJ�isolated�prefix�purelib�platlib�ignoreT)�force�quietNcSstjj||�jtjjd�S)N�/)r6r7�relpathry�sep)�src�pr"r"r#�normpathsz"move_wheel_files.<locals>.normpathFcs.�|��}�|��}|�|<|r*�j|�dS)z6Map archive RECORD paths to installation RECORD paths.N)�add)�srcfile�destfileZmodifiedZoldpath�newpath)�changed�	installed�lib_dirr�r~r"r#�record_installeds


z*move_wheel_files.<locals>.record_installedcs�t|��x�tj|�D�]�\}}}|t|�d�jtjj�}tjj||�}	|rj|jtjjd�dj	d�rjqx�|D]�}
tjj|||
�}|r�|dkr�|j	d�r��j
|
�qpqp|rp|
j	d�rpt|
�jt�j
��rp�s�td|ddj�����j
|�qpWx�|D]�}|�r||��rq�tjj||�}
tjj|||�}t|	�tj|
|�tj|
�}ttd��rptj||j|jf�tj|
tj��r�tj|
�}|jtjBtjBtjB}tj||�d	}|�r�||�}�|
||�q�WqWdS)
Nrrz.dataraz
.dist-infoz!Multiple .dist-info directories: z, �utimeF)rr6�walkrU�lstripr7r�r3�split�endswithr2rrmrd�AssertionError�shutilZcopyfile�stat�hasattrr��st_atime�st_mtime�access�X_OK�st_mode�S_IXUSR�S_IXGRP�S_IXOTH�chmod)�source�destZis_base�fixer�filter�dirZsubdirs�filesZbasedirZdestdir�sZ
destsubdirr[r�r��stZpermissionsr�)�	data_dirs�info_dirr��reqr"r#�clobbersJ






z!move_wheel_files.<locals>.clobberz!%s .dist-info directory not foundrzentry_points.txtcsh|j�jd�r|dd�}n<|j�jd�r8|dd�}n |j�jd�rT|dd�}n|}|�kpf|�kS)	Nz.exer/z
-script.py�
z.pya���i����r�)r}r�)rdZ	matchname)r�r�r"r#�is_entrypoint_wrapperasz/move_wheel_files.<locals>.is_entrypoint_wrapper�scripts)r�r�racs<|jdkrtd|�f���j|j|jjd�d|jd�S)Nz�Invalid script entry point: %s for req: %s - A callable suffix is required. Cf https://packaging.python.org/en/latest/distributing.html#console-scripts for more information.�.r)�moduleZimport_name�func)�suffixr	�script_templater�r�)�entry)�makerr�r"r#�_get_script_text�s
z*move_wheel_files.<locals>._get_script_textz�# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
r=ZENSUREPIP_OPTIONSzpip = Z
altinstallz
pip%s = %srr^cSsg|]}tjd|�r|�qS)zpip(\d(\.\d)?)?$)�rer{)�.0�kr"r"r#�
<listcomp>�sz$move_wheel_files.<locals>.<listcomp>Zeasy_installzeasy_install = zeasy_install-%s = %scSsg|]}tjd|�r|�qS)zeasy_install(-\d\.\d)?$)r�r{)r�r�r"r"r#r��scSsg|]}d|�qS)z%s = %sr")r��kvr"r"r#r��scSsg|]}d|�qS)z%s = %sr")r�r�r"r"r#r��sr�Z	INSTALLERz
INSTALLER.piprjspip
�RECORDz
RECORD.pip�rzw+r.)F)NN)ra)2rr�rYr6r7r��setr�warnings�catch_warnings�filterwarnings�
compileall�compile_dir�logger�debug�getvaluer�r3r�r@rurr�ZvariantsZset_moder�r��pop�environ�extendZmake�getrb�versionrUZ
make_multipler�rTrrr��mover2rh�csv�reader�writerr]Zwriterowrmr�)+rdr�r~r�r�rJZ	pycompile�schemer�r�Zstrip_file_prefixr�Z	generated�stdoutr�Zep_filer�Zdatadirr�r�Zsubdirr�r�Z
pip_script�specZpip_epr�Zeasy_install_scriptZeasy_install_epZ	installerZtemp_installerZinstaller_file�recordZtemp_recordZ	record_inZ
record_outr�r��rowr[rZ�lZ
final_pathr")r�r�r�r�r�r�r�r�r�r�r�r~r#�move_wheel_files�s�




$;



#









.r�cstj���fdd��}|S)Nc?s6t�}x*�||�D]}||kr|j|�|VqWdS)N)r�r�)�args�kw�seenr)�fnr"r#�uniques

z_unique.<locals>.unique)�	functools�wraps)r�r�r")r�r#�_uniquesr�ccs�ddlm}tj||jd���}xd|D]\}tjj|j|d�}|V|j	d�r&tjj
|�\}}|dd�}tjj||d�}|Vq&WdS)	a
    Yield all the uninstallation paths for dist based on RECORD-without-.pyc

    Yield paths to all the files in RECORD. For each .py file in RECORD, add
    the .pyc in the same directory.

    UninstallPathSet.add() takes care of the __pycache__ .pyc.
    r)�FakeFiler�z.pyNr^z.pyc���)�	pip.utilsr�r�r�Zget_metadata_linesr6r7r3�locationr�r�)�distr�r�r�r7Zdnr��baser"r"r#�uninstallation_paths"s


r�cCsdyTdd�tjd|�D�d}|jd�}t�j|�}|dj�}ttt|j	d���}|SdSdS)	z�
    Return the Wheel-Version of an extracted wheel, if possible.

    Otherwise, return False if we couldn't parse / extract it.
    cSsg|]}|�qSr"r")r��dr"r"r#r�?sz!wheel_version.<locals>.<listcomp>Nrrxz
Wheel-Versionr�F)
rZfind_on_pathZget_metadatarZparsestrr��tuple�map�intr�)�
source_dirr�Z
wheel_datar�r"r"r#�
wheel_version8s
rcCsb|std|��|dtdkr>td|djtt|��f��n |tkr^tjddjtt|���dS)a�
    Raises errors or warns if called with an incompatible Wheel-Version.

    Pip should refuse to install a Wheel-Version that's a major series
    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
    installing a version only minor version ahead (e.g 1.2 > 1.1).

    version: a 2-tuple representing a Wheel-Version (Major, Minor)
    name: name of wheel or package to raise exception about

    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
    z(%s is in an unsupported or invalid wheelrzB%s's Wheel-Version (%s) is not compatible with this version of pipr�z*Installing from a newer Wheel-Version (%s)N)r�VERSION_COMPATIBLEr3r�strr��warning)r�rdr"r"r#�check_compatibilityKs

rc@s:eZdZdZejdej�Zdd�Zd
dd�Z	ddd	�Z
dS)rEzA wheel filez�^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
        ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
        \.whl|\.dist-info)$cs��jj|�}|std|��|�_|jd�jdd��_|jd�jdd��_|jd�jd��_	|jd�jd��_
|jd	�jd��_t�fd
d��j	D���_
dS)
zX
        :raises InvalidWheelFilename: when the filename is invalid for a wheel
        z!%s is not a valid wheel filename.rdrwrvZverZpyverr��abiZplatc3s0|](}�jD]}�jD]}|||fVqqqdS)N)�abis�plats)r��x�y�z)rr"r#�	<genexpr>�sz!Wheel.__init__.<locals>.<genexpr>N)�
wheel_file_rer{r
r�r|ryrdr�r�Z
pyversionsr
rr��	file_tags)rr�Z
wheel_infor")rr#r$ts
zWheel.__init__Ncs2�dkrtj��fdd�|jD�}|r.t|�SdS)a"
        Return the lowest index that one of the wheel's file_tag combinations
        achieves in the supported_tags list e.g. if there are 8 supported tags,
        and one of the file tags is first in the list, then return 0.  Returns
        None is the wheel is not supported.
        Ncsg|]}|�kr�j|��qSr")r>)r��c)�tagsr"r#r��sz+Wheel.support_index_min.<locals>.<listcomp>)r�supported_tagsr�min)rrZindexesr")rr#rG�szWheel.support_index_mincCs"|dkrtj}tt|�j|j��S)z'Is this wheel supported on this system?N)rr�boolr��intersectionr)rrr"r"r#rF�szWheel.supported)N)N)r(r)r*r+r��compile�VERBOSErr$rGrFr"r"r"r#rEhs
rEc@sHeZdZdZddd�Zddd�Zdd�Zdd	d
�Zdd�Zddd�Z	dS)�WheelBuilderz#Build wheels from a RequirementSet.NcCs6||_||_|jj|_|j|_|p$g|_|p.g|_dS)N)	�requirement_set�finderZ_wheel_cacher�_cache_rootZwheel_download_dir�
_wheel_dir�
build_options�global_options)rrrrr r"r"r#r$�s

zWheelBuilder.__init__cCs�tjd�}zn|j|||d�rlyBtj|�d}tjj||�}tjtjj||�|�t	j
d|�|SYnX|j|�dSt|�XdS)ziBuild one wheel.

        :return: The filename of the built wheel, or None if the build failed.
        z
pip-wheel-)�
python_tagrzStored in directory: %sN)
�tempfileZmkdtemp�_WheelBuilder__build_oner6r@r7r3r�r�r��info�
_clean_oner)rr��
output_dirr!�tempdrLZ
wheel_pathr"r"r#�
_build_one�s

zWheelBuilder._build_onecCstjddt|jgt|j�S)Nz-uz-c)rbrnrZsetup_py�listr )rr�r"r"r#�_base_setup_args�s
zWheelBuilder._base_setup_argscCs�|j|�}d|jf}t|��t}tjd|�|dd|g|j}|dk	rT|d|g7}yt||jd|d�dS|jd	�tj	d
|j�dSWdQRXdS)Nz#Running setup.py bdist_wheel for %szDestination directory: %sZbdist_wheelz-dz--python-tagF)�cwd�show_stdout�spinnerT�errorzFailed building wheel for %s)
r*rdrr�r�rrZsetup_py_dirZfinishr.)rr�r'r!�	base_argsZspin_messager-Z
wheel_argsr"r"r#Z__build_one�s



zWheelBuilder.__build_onecCsV|j|�}tjd|j�|ddg}yt||jdd�dStjd|j�dSdS)NzRunning setup.py clean for %sZcleanz--allF)r+r,Tz Failed cleaning build dir for %s)r*r�r$rdrrr.)rr�r/Z
clean_argsr"r"r#r%�s
zWheelBuilder._clean_oneFcCs�|js|r|jst�|jj|j�|jjj�}g}x�|D]�}|jrDq8|j	r^|s�t
jd|j�q8|rj|j
rjq8|r�|jr�|jjr�q8|r�|jr�q8|r�|j}|j�\}}tjj|d|�dkr�q8dtjj|jjt|j��kr�t
jd|j�q8|j|�q8W|�s�dSt
jddjdd	�|D���t���Jgg}}	�x6|D�],}d}
|�r�tj}
t|j|j�}yt|�WnBtk
�r�}z$t
j d
|j|�|	j|��w6WYdd}~XnXn|j}|j!|||
d�}
|
�rX|j|�|�rb|j�rt"j#j$t"j#j|jt%���rtd��|j&�|j'|jj(�|_tjj)t*|
��|_|jj	�s<t�t+|j|jdd
|jj,d�n
|	j|��q6WWdQRX|�r�t
jddjdd	�|D���|	�r�t
jddjdd	�|	D���t-|	�dkS)z�Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        z(Skipping %s, due to already being wheel.Nr:zCSkipping bdist_wheel for %s, due to binaries being disabled for it.Tz*Building wheels for collected packages: %sz, cSsg|]
}|j�qSr")rd)r�r�r"r"r#r�sz&WheelBuilder.build.<locals>.<listcomp>z Building wheel for %s failed: %s)r!zbad source dir - missing markerF)�sessionzSuccessfully built %s� cSsg|]
}|j�qSr")rd)r�r�r"r"r#r�QszFailed to build %scSsg|]
}|j�qSr")rd)r�r�r"r"r#r�Vsr).rrr�rZ
prepare_filesrZrequirements�valuesZ
constraintr;r�r$rdZeditabler&r<r�splitextr=r>Zegg_info_matchesr?r!rr2r3rrZimplementation_tagr9rrArr(r6r7r�r
Zremove_temporary_sourceZbuild_locationZ	build_dirrIrrr0rU)rZautobuildingZreqsetZbuildsetr�r&r�ZextZ
build_successZ
build_failurer!r&rKZ
wheel_filer"r"r#�build�s�






zWheelBuilder.build)NN)N)N)F)
r(r)r*r+r$r(r*r#r%r4r"r"r"r#r�s


r)rr�)rNr5)FNNTNFNN)Qr+Z
__future__rr�r�rBr�r4Zloggingr6Zos.pathr�r�r�rbr"r��base64rZemail.parserrZpip._vendor.sixrr=Z
pip.compatrZpip.downloadrrZpip.exceptionsr	r
rZ
pip.locationsrr
rr�rrrrrZpip.utils.uirZpip.utils.loggingrZpip.utils.setuptools_buildrZpip._vendor.distlib.scriptsrZpip._vendorrZpip._vendor.packaging.utilsrZpip._vendor.six.movesrZ	wheel_extrZ	getLoggerr(r��objectrr9r%r]rhrurrrzr�r�r�r�r�rrrErr"r"r"r#�<module>sn
)'



'7__pycache__/baseparser.cpython-36.opt-1.pyc000064400000022016151733136150014476 0ustar003

�Pf�(�@s�dZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZmZmZmZddlmZmZejd	ej�ZGd
d�dej�ZGdd
�d
e�ZGdd�dej�ZGdd�de�ZdS)zBase option parser setup�)�absolute_importN)�	strtobool)�string_types)�configparser)�legacy_config_file�config_basename�running_under_virtualenv�site_config_files)�appdirs�get_terminal_sizez^PIP_c@sReZdZdZdd�Zdd�Zddd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dd�ZdS)�PrettyHelpFormatterz4A prettier/less verbose help formatter for optparse.cOs:d|d<d|d<t�dd|d<tjj|f|�|�dS)N�Zmax_help_position�Zindent_incrementr��width)r�optparse�IndentedHelpFormatter�__init__)�self�args�kwargs�r� /usr/lib/python3.6/baseparser.pyrszPrettyHelpFormatter.__init__cCs|j|dd�S)Nz <%s>z, )�_format_option_strings)r�optionrrr�format_option_strings!sz)PrettyHelpFormatter.format_option_strings� <%s>�, cCs|g}|jr|j|jd�|jr0|j|jd�t|�dkrH|jd|�|j�rr|jp^|jj�}|j||j��dj	|�S)a
        Return a comma-separated list of option strings and metavars.

        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
        :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
        :param optsep:  separator
        rr�)
Z_short_opts�appendZ
_long_opts�len�insertZtakes_value�metavar�dest�lower�join)rrZmvarfmtZoptsepZoptsr"rrrr$sz*PrettyHelpFormatter._format_option_stringscCs|dkrdS|dS)NZOptionsrz:
r)rZheadingrrr�format_heading;sz"PrettyHelpFormatter.format_headingcCsd|jtj|�d�}|S)zz
        Ensure there is only one newline between usage and the first heading
        if there is no description.
        z
Usage: %s
z  )�indent_lines�textwrap�dedent)rZusage�msgrrr�format_usage@sz PrettyHelpFormatter.format_usagecCsV|rNt|jd�rd}nd}|jd�}|j�}|jtj|�d�}d||f}|SdSdS)N�mainZCommandsZDescription�
z  z%s:
%s
r)�hasattr�parser�lstrip�rstripr'r(r))r�descriptionZlabelrrr�format_descriptionHs
z&PrettyHelpFormatter.format_descriptioncCs|r|SdSdS)Nrr)rZepilogrrr�
format_epilogZsz!PrettyHelpFormatter.format_epilogcs"�fdd�|jd�D�}dj|�S)Ncsg|]}�|�qSrr)�.0�line)�indentrr�
<listcomp>bsz4PrettyHelpFormatter.indent_lines.<locals>.<listcomp>r-)�splitr%)r�textr7Z	new_linesr)r7rr'asz PrettyHelpFormatter.indent_linesN)rr)�__name__�
__module__�__qualname__�__doc__rrrr&r+r3r4r'rrrrrs
rc@seZdZdZdd�ZdS)�UpdatingDefaultsHelpFormatterz�Custom help formatter for use in ConfigOptionParser.

    This is updates the defaults before expanding them, allowing
    them to show up correctly in the help listing.
    cCs(|jdk	r|jj|jj�tjj||�S)N)r/�_update_defaults�defaultsrr�expand_default)rrrrrrBms
z,UpdatingDefaultsHelpFormatter.expand_defaultN)r;r<r=r>rBrrrrr?fsr?c@s eZdZdd�Zedd��ZdS)�CustomOptionParsercOs(|j||�}|jj�|jj||�|S)z*Insert an OptionGroup at a given position.)Zadd_option_group�
option_groups�popr!)r�idxrr�grouprrr�insert_option_groupus
z&CustomOptionParser.insert_option_groupcCs.|jdd�}x|jD]}|j|j�qW|S)z<Get a list of all options, including those in option groups.N)Zoption_listrD�extend)r�res�irrr�option_list_all~sz"CustomOptionParser.option_list_allN)r;r<r=rH�propertyrLrrrrrCss	rCc@s\eZdZdZdZdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
d�Z
dd�Zdd�Zdd�Z
dS)�ConfigOptionParserzsCustom option parser which updates its defaults by checking the
    configuration files and environmental variablesFcOsZtj�|_|jd�|_|jdd�|_|j�|_|jrB|jj|j�t	j
j|f|�|�dS)N�name�isolatedF)rZRawConfigParser�configrErOrP�get_config_files�files�readr�OptionParserr)rrrrrrr�s

zConfigOptionParser.__init__cCs�tjjdd�}|tjkrgStt�}|jsj|rFtjj|�rF|j	|�n$|j	t
�|j	tjjtj
d�t��t�r�tjjtjt�}tjj|�r�|j	|�|S)NZPIP_CONFIG_FILEFZpip)�os�environ�get�devnull�listr	rP�path�existsrrr%r
Zuser_config_dirrr�sys�prefix)rZconfig_filerSZvenv_config_filerrrrR�s&


z#ConfigOptionParser.get_config_filescCsLy|j||�Stjk
rF}ztd|�tjd�WYdd}~XnXdS)Nz*An error occurred during configuration: %s�)�check_valuerZOptionValueError�printr]�exit)rr�key�val�excrrr�
check_default�s
z ConfigOptionParser.check_defaultc	sji}x(d�jfD]}|j�j�j|���qW�jsH|j�j�j���tj�j��_	t
�}x�|j�D]�\�}|stqf�j����dkr�qf�j
d
kr�t|�}n��j
dkr�|j�}���fdd�|D�}nl�j
d	k�r$|j�j��j�}�j||�}�j�p�f}�j�pi}�j�||�f|�|�n�j��|�}||�j<qfWx|D]�t�j	��|�<�qFWd�_	|S)z�Updates the given defaults with values from the config files and
        the environ. Does a little special handling for certain types of
        options (lists).�globalN�
store_true�store_false�countrcsg|]}�j��|��qSr)rf)r5�v)rcrrrrr8�sz7ConfigOptionParser._update_defaults.<locals>.<listcomp>�callback)rhrirj)rO�update�normalize_keys�get_config_sectionrP�get_environ_varsr�ValuesrA�values�set�itemsZ
get_option�actionrr9�addr#�get_opt_stringZ
convert_valueZ
callback_argsZcallback_kwargsrlrf�getattr)	rrArQZsectionZ	late_evalrd�opt_strrrr)rcrrrr@�s@




z#ConfigOptionParser._update_defaultscCs@i}x6|D].\}}|jdd�}|jd�s0d|}|||<q
W|S)z�Return a config dictionary with normalized keys regardless of
        whether the keys were specified in environment variables or in config
        files�_�-z--z--%s)�replace�
startswith)rrtZ
normalizedrcrdrrrrn�s
z!ConfigOptionParser.normalize_keyscCs|jj|�r|jj|�SgS)z Get a section of a configuration)rQZhas_sectionrt)rrOrrrrosz%ConfigOptionParser.get_config_sectionccs<x6tjj�D](\}}tj|�rtjd|�j�|fVqWdS)z@Returns a generator with all environmental vars with prefix PIP_rN)rVrWrt�_environ_prefix_re�search�subr$)rrcrdrrrrps
z#ConfigOptionParser.get_environ_varscCsn|jstj|j�S|j|jj��}x@|j�D]4}|j|j�}t	|t
�r,|j�}|j||�||j<q,Wtj|�S)z�Overriding to make updating the defaults after instantiation of
        the option parser possible, _update_defaults() does the dirty work.)
Zprocess_default_valuesrrqrAr@�copyZ_get_all_optionsrXr#�
isinstancerrwr`)rrAr�defaultryrrr�get_default_valuess
z%ConfigOptionParser.get_default_valuescCs |jtj�|jdd|�dS)Nrz%s
)Zprint_usager]�stderrrb)rr*rrr�error#szConfigOptionParser.errorN)r;r<r=r>rPrrRrfr@rnrorpr�r�rrrrrN�s
(5rN)r>Z
__future__rr]rrV�rer(Zdistutils.utilrZpip._vendor.sixrZpip._vendor.six.movesrZ
pip.locationsrrrr	Z	pip.utilsr
r�compile�Ir~rrr?rUrCrNrrrr�<module>s O
__pycache__/download.cpython-36.pyc000064400000050500151733136150013216 0ustar003

�PfO��@s^ddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZyddlZdZWnek
r�dZYnXddlmZddlmZddlZddlmZmZddlmZdd	lmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'dd
l(m)Z)ddl*m+Z+ddl,m-Z-dd
l.m/Z/ddl0m1Z1ddl2m3Z3m4Z4ddl5m6Z6ddl7m8Z8ddl9m:Z:m;Z;ddl<m=Z=m>Z>ddl?m@Z@mAZAddlBmCZCmDZDddlEmFZFddlGmHZHddl9mIZIddlJmKZKddlLmMZMddlNmOZOddlPmQZQdddd d!d"d#d$d%d&d'd(d)g
ZRejSeT�ZUd*d+�ZVGd,d-�d-e@�ZWGd.d/�d/e=�ZXGd0d1�d1eM�ZYGd2d3�d3e>�ZZGd4d5�d5e:j[�Z\dWd6d�Z]ej^d7ej_�Z`ej^d8ej_�Zad9d�Zbd:d�Zcd;d �Zdd<d!�Zed=d"�Zfd>d?�Zgd@d$�ZhdAd%�ZidBdC�ZjdDdE�ZkdFdG�ZldHdI�ZmdXdJd&�ZndYdKd#�ZodLdM�ZpGdNdO�dOeQjq�ZrdZdPd'�ZsdQd)�ZtdRd(�ZudSdT�ZvdUdV�ZwdS)[�)�absolute_importNTF)�parse)�request)�InstallationError�HashMismatch)�PyPI)
�splitext�rmtree�format_size�display_path�
backup_dir�ask_path_exists�unpack_file�ARCHIVE_EXTENSIONS�consume�call_subprocess)�auto_decode)�check_path_owner)�
indent_log)�SETUPTOOLS_SHIM)�libc_ver)�DownloadProgressBar�DownloadProgressSpinner)�write_delete_marker_file)�vcs)�requests�six)�BaseAdapter�HTTPAdapter)�AuthBase�
HTTPBasicAuth)�CONTENT_CHUNK_SIZE�Response)�get_netrc_auth)�CaseInsensitiveDict)�urllib3)�CacheControlAdapter)�	FileCache)�	LockError)�
xmlrpc_client�get_file_content�is_url�url_to_path�path_to_url�is_archive_file�unpack_vcs_link�unpack_file_url�
is_vcs_url�is_file_url�unpack_http_url�
unpack_url�parse_content_disposition�sanitize_content_filenamecCsdtjd�tj�dtj�id�}|dddkrBtj�|dd<n�|dddkr�tjjd	krntjd
d�}ntj}djd
d�|D��|dd<nB|dddkr�tj�|dd<n |dddkr�tj�|dd<tjj	d��rJddl
m}tt
dd�tdddg|j����}tt
dd�tddgt����}|�r<||d<|�rJ||d<tjj	d��r|tj�d�r|dtj�dd�|d<tj��r�tj�|jdi�d<tj��r�tj�|jdi�d<tj��r�tj�|d<t�r�tjd
d �d)k�r�tj|d"<d#j|tj|d*d&d'�d(�S)+z6
    Return a string representing the user agent.
    �pip)�name�versionr8)Z	installer�python�implementationr;ZCPythonr9ZPyPy�finalN��.cSsg|]}t|��qS�)�str)�.0�xr?r?�/usr/lib/python3.6/download.py�
<listcomp>Tszuser_agent.<locals>.<listcomp>ZJythonZ
IronPython�linuxr)�distrocSs|dS)N�r?)rBr?r?rC�<lambda>`szuser_agent.<locals>.<lambda>�idcSs|dS)NrGr?)rBr?r?rCrHds�lib�libcrF�darwinZmacOS�system�releaseZcpu��Zopenssl_versionz9{data[installer][name]}/{data[installer][version]} {json}�,�:T)Z
separatorsZ	sort_keys)�data�json)rOrP)rQrR)r7�__version__�platformZpython_versionZpython_implementation�sys�pypy_version_info�releaselevel�join�
startswith�pip._vendorrF�dict�filter�zipZlinux_distributionrZmac_verrM�
setdefaultrN�machine�HAS_TLS�version_info�sslZOPENSSL_VERSION�formatrT�dumps)rSrXrFZdistro_infosrKr?r?rC�
user_agent@sP




rgc@s.eZdZddd�Zdd�Zdd�Zdd	�Zd
S)�MultiDomainBasicAuthTcCs||_i|_dS)N)�	prompting�	passwords)�selfrir?r?rC�__init__�szMultiDomainBasicAuth.__init__cCs�tj|j�}|jjdd�d}tj|dd�|f|dd��|_|jj|d�\}}|dkrn|j|j�\}}|dkr�|dkr�t	|j�}|r�|nd\}}|s�|r�||f|j|<t
|p�d|p�d�|�}|jd|j�|S)	N�@rGrO��response���)NN)NN)
�urllib_parse�urlparse�url�netloc�rsplit�
urlunparserj�get�parse_credentialsr#r Z
register_hook�
handle_401)rk�req�parsedrt�username�passwordZ
netrc_authr?r?rC�__call__�s&
zMultiDomainBasicAuth.__call__cKs�|jdkr|S|js|Stj|j�}tjjd|j�}t	j	d�}|sH|rX||f|j
|j<|j|jj
�t|ppd|pvd�|j�}|jj|f|�}|jj|�|S)Ni�z
User for %s: z
Password: rn)�status_coderirqrrrsrZmoves�inputrt�getpassrj�content�rawZrelease_connr rZ
connection�send�history�append)rk�resp�kwargsr{r|r}rzZnew_respr?r?rCry�s


zMultiDomainBasicAuth.handle_401cCs8d|kr4|jdd�d}d|kr,|jdd�S|dfSdS)NrmrGrrR)NN)ru�split)rkrtZuserinfor?r?rCrx�sz&MultiDomainBasicAuth.parse_credentialsN)T)�__name__�
__module__�__qualname__rlr~ryrxr?r?r?rCrh�s
!"rhc@seZdZddd�Zdd�ZdS)�LocalFSAdapterNc
Cs�t|j�}t�}d|_|j|_ytj|�}	Wn.tk
rZ}
zd|_|
|_WYdd}
~
XnPXtj	j
|	jdd�}tj
|�dp~d}t||	j|d��|_t|d�|_|jj|_|S)	N��i�T)Zusegmtrz
text/plain)zContent-TypezContent-Lengthz
Last-Modified�rb)r,rsr"r�os�stat�OSErrorr��emailZutilsZ
formatdate�st_mtime�	mimetypes�
guess_typer$�st_size�headers�open�close)
rkr�stream�timeout�verify�certZproxies�pathnamer�Zstats�excZmodified�content_typer?r?rCr��s$

zLocalFSAdapter.sendcCsdS)Nr?)rkr?r?rCr��szLocalFSAdapter.close)NNNNN)r�r�r�r�r�r?r?r?rCr��s
r�csDeZdZdZ�fdd�Z�fdd�Z�fdd�Z�fdd	�Z�ZS)
�
SafeFileCachezw
    A file based cache which is safe to use even when the target directory may
    not be accessible or writable.
    cs4tt|�j||�t|j�s0tjd|j�d|_dS)Nz�The directory '%s' or its parent directory is not owned by the current user and the cache has been disabled. Please check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.)�superr�rlr�	directory�logger�warning)rk�argsr�)�	__class__r?rCrl�s
zSafeFileCache.__init__c
s@|jdkrdSytt|�j||�Stttfk
r:YnXdS)N)r�r�r�rwr(r��IOError)rkr�r�)r�r?rCrws
zSafeFileCache.getc
s@|jdkrdSytt|�j||�Stttfk
r:YnXdS)N)r�r�r��setr(r�r�)rkr�r�)r�r?rCr�s
zSafeFileCache.setc
s@|jdkrdSytt|�j||�Stttfk
r:YnXdS)N)r�r�r��deleter(r�r�)rkr�r�)r�r?rCr�)s
zSafeFileCache.delete)	r�r�r��__doc__rlrwr�r��
__classcell__r?r?)r�rCr��s


r�c@seZdZdd�ZdS)�InsecureHTTPAdaptercCsd|_d|_dS)NZ	CERT_NONE)Z	cert_reqsZca_certs)rkZconnrsr�r�r?r?rC�cert_verify9szInsecureHTTPAdapter.cert_verifyN)r�r�r�r�r?r?r?rCr�7sr�cs,eZdZdZ�fdd�Z�fdd�Z�ZS)�
PipSessionNc	s�|jdd�}|jdd�}|jdg�}tt|�j||�t�|jd<t�|_tj	|dgdd�}|rvt
t|d	d
�|d�}n
t|d�}t
|d�}|jd
|�|jd|�|jdt��x|D]}|jdj|�|�q�WdS)N�retriesr�cache�insecure_hostsz
User-Agenti�g�?)ZtotalZstatus_forcelistZbackoff_factorT)Zuse_dir_lock)r��max_retries)r�zhttps://zhttp://zfile://zhttps://{0}/)�popr�r�rlrgr�rhZauthr%ZRetryr&r�rr�Zmountr�re)	rkr�r�r�r�r�Zsecure_adapterZinsecure_adapter�host)r�r?rCrlBs*




zPipSession.__init__cs(|jd|j�tt|�j||f|�|�S)Nr�)r`r�r�r�r)rk�methodrsr�r�)r�r?rCr~szPipSession.request)r�r�r�r�rlrr�r?r?)r�rCr�>s<r�c
CsL|dkrtd��tj|�}|r�|jd�j�}|dkrR|rR|jd�rRtd||f��|dkr�|jdd�d}|jdd	�}t	j
|�}|r�|jd�d|jd
d�d}tj|�}|jd	�r�d	|j
d	�}|}n|j|�}|j�|j|jfSy&t|d��}t|j��}WdQRXWn4tk
�rB}	ztdt|	���WYdd}	~	XnX||fS)
z�Gets the content of a file; it may be a filename, file: URL, or
    http: URL.  Returns (location, content).  Content is unicode.NzAget_file_content() missing 1 required keyword argument: 'session'rG�file�httpz6Requirements file %s references URL %s, which is localrR�\�/�|r�z$Could not open requirements file: %s)�	TypeError�
_scheme_re�search�group�lowerr[rr��replace�_url_slash_drive_re�matchrqZunquote�lstriprw�raise_for_statusrs�textr�r�readr�r@)
rsZ
comes_from�sessionr��scheme�pathr��fr�r�r?r?rCr*�s>





 z^(http|https|file):z/*([a-z])\|cCs6d|krdS|jdd�dj�}|ddddgtjkS)	z)Returns true if the name looks like a URLrRFrGrr�Zhttpsr�Zftp)r�r�rZall_schemes)r8r�r?r?rCr+�scCsH|jd�std|��tj|�\}}}}}|r6d|}tj||�}|S)z(
    Convert a file: URL to a path.
    zfile:z4You can only turn file: urls into filenames (not %r)z\\)r[�AssertionErrorrqZurlsplit�urllib_requestZurl2pathname)rs�_rtr�r?r?rCr,�s
cCs*tjjtjj|��}tjdtj|��}|S)zh
    Convert a path to a file: URL.  The path will be made absolute and have
    quoted path parts.
    zfile:)r�r��normpath�abspathrqZurljoinr�Zpathname2url)r�rsr?r?rCr-�scCs t|�dj�}|tkrdSdS)z9Return True if `name` is a considered as an archive file.rGTF)rr�r)r8�extr?r?rCr.�scCst|�}|j|�dS)N)�_get_used_vcs_backend�unpack)�link�location�vcs_backendr?r?rCr/�scCs.x(tjD]}|j|jkr||j�}|SqWdS)N)rZbackendsr�Zschemesrs)r�Zbackendr�r?r?rCr��s
r�cCstt|��S)N)�boolr�)r�r?r?rCr1�scCs|jj�jd�S)Nzfile:)rsr�r[)r�r?r?rCr2�scCst|j�}tjj|�S)z�Return whether a file:// Link points to a directory.

    ``link`` must not have any other scheme but file://. Call is_file_url()
    first.

    )r,�url_without_fragmentr�r��isdir)r��	link_pathr?r?rC�
is_dir_url�s
r�cOs|S)Nr?)�iterabler�r�r?r?rC�_progress_indicator�sr�c

sLyt�jd�}Wntttfk
r0d}YnXt�dd�}tj�tj	krRd}n&|r\d}n|dkrjd}n|std}nd}|j
}�fdd	�}�fd
d�}	t}
|jt
jkr�|}n|j}|r�|r�tjd|t|��t|d
�j}
ntjd|�t�j}
n |�rtjd|�ntjd|�tjd|�|	|
|t�t��}|�r@|j|�nt|�dS)Nzcontent-lengthrZ
from_cacheF�(i�Tc3s\y$x�jj|dd�D]
}|VqWWn2tk
rVx�jj|�}|sHP|Vq6WYnXdS)NF)Zdecode_content)r�r��AttributeErrorr�)Z
chunk_size�chunk)r�r?rC�	resp_readsz _download_url.<locals>.resp_readc3s"x|D]}�j|�|VqWdS)N)�write)Zchunksr�)�content_filer?rC�written_chunks;s

z%_download_url.<locals>.written_chunkszDownloading %s (%s))�maxzDownloading %szUsing cached %szDownloading from URL %si@�)�intr��
ValueError�KeyErrorr��getattrr�ZgetEffectiveLevel�logging�INFO�show_urlr�rtrr��infor
r�iterr�debugr!Zcheck_against_chunksr)
r�r�r��hashesZtotal_lengthZcached_respZ
show_progressr�r�r�Zprogress_indicatorrsZdownloaded_chunksr?)r�r�rC�
_download_urlsL
%
r�cCs�d}tjj||j�}tjj|�r�tdt|�d�}|dkr@d}nj|dkrdtjdt|��tj	|�nF|dkr�t
|�}tjd	t|�t|��tj||�n|dkr�t
jd
�|r�tj||�tjdt|��dS)NTz8The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort�i�w�b�aFzDeleting %szBacking up %s to %srGzSaved %s)r�r�r�r�rp)r�r�rZ�filename�existsr
rr�r��remover�shutilZmoverW�exit�copyr�)r�r�r�r�Zdownload_locationroZ	dest_filer?r?rC�
_copy_fileas.

r�c	Cs�|dkrtd��tjdd�}d}|r0t|||�}|rH|}tj|�d}nt||||�\}}t||||�|r~|r~t|||�|s�t	j
|�t|�dS)Nz@unpack_http_url() missing 1 required keyword argument: 'session'z-unpackzpip-r)r��tempfileZmkdtemp�_check_download_dirr�r��_download_http_urlrr�r��unlinkr	)	r�r��download_dirr�r��temp_dir�already_downloaded_path�	from_pathr�r?r?rCr3|s,


cCs�t|j�}t|�rHtjj|�r&t|�tj||dd�|rDt	j
d�dS|rV|j|�d}|rjt|||�}|rt|}n|}t
j|�d}t||||�|r�|r�t|||�dS)z�Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    T)Zsymlinksz*Link is a directory, ignoring download_dirNr)r,r�r�r�r�r�r	r�Zcopytreer�r��check_against_pathr�r�r�rr�)r�r�rr�r�rrr�r?r?rCr0�s,



c
Cs�tjj|�rt|�d}tjg}|jd�|jt|�|jd�|d|g7}tj	d|�t
��t||dd�WdQRXtjj|tj
|�d	�}tj	d
||�t||ddd�dS)z�Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    zsetup.pyz-c�sdistz
--dist-dirzRunning setup.py sdist for %sF)�cwdZshow_stdoutNrzUnpacking sdist %s into %s)r�r�)r�r�r�r	rW�
executabler�rr�r�rrrZ�listdirr)r�r�Zsetup_pyZ
sdist_argsrr?r?rC�_copy_dist_from_dir�s

rc@s$eZdZdZddd�Zd	dd�ZdS)
�PipXmlrpcTransportzRProvide a `xmlrpclib.Transport` implementation via a `PipSession`
    object.
    FcCs*tjj||�tj|�}|j|_||_dS)N)r)�	Transportrlrqrrr��_scheme�_session)rkZ	index_urlr�Zuse_datetimeZindex_partsr?r?rCrl�s
zPipXmlrpcTransport.__init__c
Cs�|j||dddf}tj|�}y6ddi}|jj|||dd�}|j�||_|j|j�St	j
k
r�}	ztjd|	j
j|��WYdd}	~	XnXdS)NzContent-Typeztext/xmlT)rSr�r�zHTTP error %s while getting %s)rrqrvrZpostr��verboseZparse_responser�r�	HTTPErrorr��criticalror)
rkr�ZhandlerZrequest_bodyr�partsrsr�ror�r?r?rCrs


zPipXmlrpcTransport.requestN)F)F)r�r�r�r�rlrr?r?r?rCr�s
rcCs^t|�rt||�n:t|�r.t||||d�n |dkr<t�}t|||||d�|rZt|�dS)avUnpack link.
       If link is a VCS link:
         if only_download, export into download_dir and ignore location
          else unpack into location
       for other types of link:
         - unpack into location
         - if download_dir, copy the file into download_dir
         - if only_download, mark location for deletion

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    )r�N)r1r/r2r0r�r3r)r�r�rZ
only_downloadr�r�r?r?rCr4scCstjj|�S)zJ
    Sanitize the "filename" value from a Content-Disposition header.
    )r�r��basename)r�r?r?rCr6<scCs,tj|�\}}|jd�}|r$t|�}|p*|S)z�
    Parse the "filename" value from a Content-Disposition header, and
    return the default filename if the result is empty.
    r�)�cgiZparse_headerrwr6)�content_dispositionZdefault_filenameZ_typeZparamsr�r?r?rCr5Ds

c
Cs*|jjdd�d}y |j|ddidd�}|j�Wn8tjk
rj}ztjd|jj	|��WYd	d	}~XnX|j
jd
d�}|j}|j
jd�}	|	r�t|	|�}t
|�d}
|
s�tj|�}
|
r�||
7}|
r�|j|jkr�tjj
|j�d}
|
r�||
7}tjj||�}t|d
��}t||||�Wd	QRX||fS)z6Download link url into temp_dir using provided session�#rGrzAccept-EncodingZidentityT)r�r�zHTTP error %s while getting %sNzcontent-typernzcontent-disposition�wb)rsr�rwr�rrr�rrorr�r�r5rr�Zguess_extensionr�r�rZr�r�)
r�r�rr�Z
target_urlr�r�r�r�rr�Z	file_pathr�r?r?rCrSs:

rcCsntjj||j�}tjj|�rjtjd|�|rfy|j|�Wn*tk
rdtj	d|�tj
|�dSX|SdS)z� Check download_dir for previously downloaded file with correct hash
        If a correct file is found return its path else None
    zFile was already downloaded %sz;Previously-downloaded file %s has bad hash. Re-downloading.N)r�r�rZr�r�r�r�rrr�r)r�rr�Z
download_pathr?r?rCr��s
r�)NN)NNN)NN)NFNN)xZ
__future__rrZemail.utilsr�r�rTr�r�r�rV�rer�rWr�rdrb�ImportErrorZpip._vendor.six.moves.urllibrrqrr�r7Zpip.exceptionsrrZ
pip.modelsrZ	pip.utilsrr	r
rrr
rrrrZpip.utils.encodingrZpip.utils.filesystemrZpip.utils.loggingrZpip.utils.setuptools_buildrZpip.utils.glibcrZpip.utils.uirrZ
pip.locationsrZpip.vcsrr\rrZpip._vendor.requests.adaptersrrZpip._vendor.requests.authrr Zpip._vendor.requests.modelsr!r"Zpip._vendor.requests.utilsr#Zpip._vendor.requests.structuresr$r%Zpip._vendor.cachecontrolr&Zpip._vendor.cachecontrol.cachesr'Zpip._vendor.lockfiler(Zpip._vendor.six.movesr)�__all__Z	getLoggerr�r�rgrhr�r�r�ZSessionr�r*�compile�Ir�r�r+r,r-r.r/r�r1r2r�r�r�r�r3r0rr
rr4r6r5rr�r?r?r?rC�<module>s�
0
BR!BH
)
`
&
0$
'8__pycache__/locations.cpython-36.opt-1.pyc000064400000007307151733136150014350 0ustar003

�Pf��
@sdZddlmZddlZddlZddlZddlZddlmZddl	m
Z
mZddlm
Z
mZddlmZejd�Zd	Zd
Zdd�Zd
d�Zdd�Ze�r�ejjejd�Zn6yejjej�d�ZWnek
r�ejd�YnXejje�Zej �Z!ej"Z#ed�Z$e
�rtejjejd�Z%ejje#d�Z&ejj'e%��sRejjejd�Z%ejje#d�Z&dZ(ejje$d�Z)ejje)e(�Z*njejjejd�Z%ejje#d�Z&dZ(ejje$d�Z)ejje)e(�Z*ej+dd�dk�r�ejdd�dk�r�dZ%dd�ej,d�D�Z-d#d!d"�Z.dS)$z7Locations where we look for configs, install stuff, etc�)�absolute_importN)�	sysconfig)�install�SCHEME_KEYS)�WINDOWS�
expanduser)�appdirsZpipz�This file is placed here by pip to indicate the source was put
here by pip.

Once this package is successfully installed this source code will be
deleted (unless you remove this file).
zpip-delete-this-directory.txtc	Cs2tjj|t�}t|d��}|jt�WdQRXdS)z?
    Write the pip delete marker file into this directory.
    �wN)�os�path�join�PIP_DELETE_MARKER_FILENAME�open�write�DELETE_MARKER_MESSAGE)Z	directory�filepathZ	marker_fp�r�/usr/lib/python3.6/locations.py�write_delete_marker_filesrcCs*ttd�rdStjttdtj�kr&dSdS)zM
    Return True if we're running inside a virtualenv, False otherwise.

    Zreal_prefixT�base_prefixF)�hasattr�sys�prefix�getattrrrrr�running_under_virtualenv's

rcCs>tjjtjjtj��}tjj|d�}t�r:tjj|�r:dSdS)z?
    Return True if in a venv and no system site packages.
    zno-global-site-packages.txtTN)	r
r�dirname�abspath�site�__file__rr�isfile)Zsite_mod_dirZno_global_filerrr�virtualenv_no_global4sr �srcz=The folder you are executing pip from can no longer be found.�~ZScripts�binzpip.inizpip.confz.pip��darwin�z/System/Library/z/usr/local/bincCsg|]}tjj|t��qSr)r
rr�config_basename)�.0rrrr�
<listcomp>wsr)FcCsNddlm}i}|r ddgi}ni}d|i}	|	j|�||	�}
|
j�|
jddd�}|p\|j|_|rjd	|_|pr|j|_|p~|j|_|p�|j|_|j	�xt
D]}t|d
|�||<q�Wd|
jd�kr�|jt
|j|jd��t��rJtjjtjd
ddtjdd�|�|d<|dk	�rJtjjtjj|d��d}
tjj||
dd��|d<|S)z+
    Return a distutils install scheme
    r)�DistributionZscript_argsz
--no-user-cfg�namerT)Zcreate�Zinstall_�install_lib)�purelib�platlib�includer�pythonN�Zheaders�)Zdistutils.distr*�updateZparse_config_filesZget_command_obj�userr�home�rootZfinalize_optionsrrZget_option_dict�dictr-rr
rrr�version�
splitdriver)Z	dist_namer5r6r7�isolatedrr*�schemeZextra_dist_argsZ	dist_args�d�i�keyZ
path_no_driverrr�distutils_scheme|sF



r@)FNNFN)/�__doc__Z
__future__rr
Zos.pathrrZ	distutilsrZdistutils.command.installrrZ
pip.compatrrZ	pip.utilsrZuser_cache_dirZUSER_CACHE_DIRrr
rrr rrrZ
src_prefix�getcwd�OSError�exitrZget_python_libZ
site_packages�	USER_SITE�	user_siteZuser_dirZbin_pyZbin_user�existsr'Zlegacy_storage_dirZlegacy_config_file�platformZsite_config_dirsZsite_config_filesr@rrrr�<module>sd
		
(__pycache__/status_codes.cpython-36.opt-1.pyc000064400000000505151733136150015046 0ustar003

�Pf��@s(ddlmZdZdZdZdZdZdZdS)�)�absolute_import�����N)Z
__future__r�SUCCESSZERRORZ
UNKNOWN_ERRORZVIRTUALENV_NOT_FOUNDZPREVIOUS_BUILD_DIR_ERRORZNO_MATCHES_FOUND�r	r	�"/usr/lib/python3.6/status_codes.py�<module>s__pycache__/exceptions.cpython-36.pyc000064400000024346151733136150013601 0ustar003

�Pf��@sddZddlmZddlmZmZmZddlmZGdd�de	�Z
Gdd�de
�ZGd	d
�d
e
�ZGdd�de�Z
Gd
d�de�ZGdd�de
�ZGdd�de
�ZGdd�de
�ZGdd�de
�ZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�ZGdd �d e�ZGd!d"�d"e�ZGd#d$�d$e�ZGd%d&�d&e�ZGd'd(�d(e�ZGd)d*�d*e�Zd+S),z"Exceptions used throughout package�)�absolute_import)�chain�groupby�repeat)�	iteritemsc@seZdZdZdS)�PipErrorzBase pip exceptionN)�__name__�
__module__�__qualname__�__doc__�rr� /usr/lib/python3.6/exceptions.pyr	src@seZdZdZdS)�InstallationErrorz%General exception during installationN)rr	r
rrrrr
r
src@seZdZdZdS)�UninstallationErrorz'General exception during uninstallationN)rr	r
rrrrr
rsrc@seZdZdZdS)�DistributionNotFoundzCRaised when a distribution cannot be found to satisfy a requirementN)rr	r
rrrrr
rsrc@seZdZdZdS)�RequirementsFileParseErrorzDRaised when a general error occurs parsing a requirements file line.N)rr	r
rrrrr
rsrc@seZdZdZdS)�BestVersionAlreadyInstalledzNRaised when the most up-to-date version of a package is already
    installed.N)rr	r
rrrrr
rsrc@seZdZdZdS)�
BadCommandz0Raised when virtualenv or a command is not foundN)rr	r
rrrrr
r"src@seZdZdZdS)�CommandErrorz7Raised when there is an error in command-line argumentsN)rr	r
rrrrr
r&src@seZdZdZdS)�PreviousBuildDirErrorz:Raised when there's a previous conflicting build directoryN)rr	r
rrrrr
r*src@seZdZdZdS)�InvalidWheelFilenamezInvalid wheel filename.N)rr	r
rrrrr
r.src@seZdZdZdS)�UnsupportedWheelzUnsupported wheel.N)rr	r
rrrrr
r2src@s8eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
�
HashErrorsz:Multiple HashError instances rolled into one for reportingcCs
g|_dS)N)�errors)�selfrrr
�__init__9szHashErrors.__init__cCs|jj|�dS)N)r�append)r�errorrrr
r<szHashErrors.appendcCsfg}|jjdd�d�x<t|jdd��D](\}}|j|j�|jdd�|D��q(W|rbdj|�SdS)NcSs|jS)N)�order)�errr
�<lambda>Asz$HashErrors.__str__.<locals>.<lambda>)�keycSs|jS)N)�	__class__)rrrr
r Bscss|]}|j�VqdS)N)�body)�.0rrrr
�	<genexpr>Dsz%HashErrors.__str__.<locals>.<genexpr>�
)r�sortrr�head�extend�join)r�lines�clsZ
errors_of_clsrrr
�__str__?szHashErrors.__str__cCs
t|j�S)N)�boolr)rrrr
�__nonzero__HszHashErrors.__nonzero__cCs|j�S)N)r/)rrrr
�__bool__KszHashErrors.__bool__N)	rr	r
rrrr-r/r0rrrr
r6s	rc@s0eZdZdZdZdZdd�Zdd�Zdd	�ZdS)
�	HashErrora�
    A failure to verify a package against known-good hashes

    :cvar order: An int sorting hash exception classes by difficulty of
        recovery (lower being harder), so the user doesn't bother fretting
        about unpinned packages when he has deeper issues, like VCS
        dependencies, to deal with. Also keeps error reports in a
        deterministic order.
    :cvar head: A section heading for display above potentially many
        exceptions of this kind
    :ivar req: The InstallRequirement that triggered this error. This is
        pasted on after the exception is instantiated, because it's not
        typically available earlier.

    N�cCsd|j�S)a)Return a summary of me for display under the heading.

        This default implementation simply prints a description of the
        triggering requirement.

        :param req: The InstallRequirement that provoked this error, with
            populate_link() having already been called

        z    %s)�_requirement_name)rrrr
r#bs
zHashError.bodycCsd|j|j�fS)Nz%s
%s)r(r#)rrrr
r-nszHashError.__str__cCs|jrt|j�SdS)z�Return a description of the requirement that triggered me.

        This default implementation returns long description of the req, with
        line numbers

        zunknown package)�req�str)rrrr
r3qszHashError._requirement_name)	rr	r
rr4r(r#r-r3rrrr
r1Osr1c@seZdZdZdZdZdS)�VcsHashUnsupportedzuA hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those.rzlCan't verify hashes for these requirements because we don't have a way to hash version control repositories:N)rr	r
rrr(rrrr
r6{sr6c@seZdZdZdZdZdS)�DirectoryUrlHashUnsupportedzuA hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those.�zUCan't verify hashes for these file:// requirements because they point to directories:N)rr	r
rrr(rrrr
r7�sr7c@s(eZdZdZdZdZdd�Zdd�ZdS)	�HashMissingz2A hash was needed for a requirement but is absent.�awHashes are required in --require-hashes mode, but they are missing from some requirements. Here is a list of those requirements along with the hashes their downloaded archives actually had. Add lines like these to your requirements files to prevent tampering. (If you did not enable --require-hashes manually, note that it turns on automatically when any package has a hash.)cCs
||_dS)zq
        :param gotten_hash: The hash of the (possibly malicious) archive we
            just downloaded
        N)�gotten_hash)rr;rrr
r�szHashMissing.__init__cCsHddlm}d}|jr4|jjr&|jjnt|jdd�}d|p<d||jfS)Nr)�
FAVORITE_HASHr4z    %s --hash=%s:%szunknown package)Zpip.utils.hashesr<r4Z
original_link�getattrr;)rr<�packagerrr
r#�szHashMissing.bodyN)rr	r
rrr(rr#rrrr
r9�s
r9c@seZdZdZdZdZdS)�HashUnpinnedzPA requirement had a hash specified but was not pinned to a specific
    version.�zaIn --require-hashes mode, all requirements must have their versions pinned with ==. These do not:N)rr	r
rrr(rrrr
r?�sr?c@s0eZdZdZdZdZdd�Zdd�Zdd	�Zd
S)�HashMismatchz�
    Distribution file hash values don't match.

    :ivar package_name: The name of the package that triggered the hash
        mismatch. Feel free to write to this after the exception is raise to
        improve its error message.

    �z�THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS FILE. If you have updated the package versions, please update the hashes. Otherwise, examine the package contents carefully; someone may have tampered with them.cCs||_||_dS)z�
        :param allowed: A dict of algorithm names pointing to lists of allowed
            hex digests
        :param gots: A dict of algorithm names pointing to hashes we
            actually got from the files under suspicion
        N)�allowed�gots)rrCrDrrr
r�szHashMismatch.__init__cCsd|j�|j�fS)Nz
    %s:
%s)r3�_hash_comparison)rrrr
r#�szHashMismatch.bodycsjdd�}g}xRt|j�D]D\}}||��|j�fdd�|D��|jd|j|j��d�qWdj|�S)aE
        Return a comparison of actual and expected hash values.

        Example::

               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
                            or 123451234512345123451234512345123451234512345
                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef

        cSst|gtd��S)Nz    or)rr)�	hash_namerrr
�hash_then_or�sz3HashMismatch._hash_comparison.<locals>.hash_then_orc3s|]}dt��|fVqdS)z        Expected %s %sN)�next)r$r)�prefixrr
r%�sz0HashMismatch._hash_comparison.<locals>.<genexpr>z             Got        %s
z    orr&)rrCr)rrDZ	hexdigestr*)rrGr+rFZ	expectedsr)rIr
rE�s
zHashMismatch._hash_comparisonN)	rr	r
rrr(rr#rErrrr
rA�s
rAc@seZdZdZdS)�UnsupportedPythonVersionzMUnsupported python version according to Requires-Python package
    metadata.N)rr	r
rrrrr
rJ�srJN)rZ
__future__r�	itertoolsrrrZpip._vendor.sixr�	Exceptionrrrrrrrrrrrrr1r6r7r9r?rArJrrrr
�<module>s,,		$	8__pycache__/download.cpython-36.opt-1.pyc000064400000050324151733136150014161 0ustar003

�PfO��@s^ddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZyddlZdZWnek
r�dZYnXddlmZddlmZddlZddlmZmZddlmZdd	lmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'dd
l(m)Z)ddl*m+Z+ddl,m-Z-dd
l.m/Z/ddl0m1Z1ddl2m3Z3m4Z4ddl5m6Z6ddl7m8Z8ddl9m:Z:m;Z;ddl<m=Z=m>Z>ddl?m@Z@mAZAddlBmCZCmDZDddlEmFZFddlGmHZHddl9mIZIddlJmKZKddlLmMZMddlNmOZOddlPmQZQdddd d!d"d#d$d%d&d'd(d)g
ZRejSeT�ZUd*d+�ZVGd,d-�d-e@�ZWGd.d/�d/e=�ZXGd0d1�d1eM�ZYGd2d3�d3e>�ZZGd4d5�d5e:j[�Z\dWd6d�Z]ej^d7ej_�Z`ej^d8ej_�Zad9d�Zbd:d�Zcd;d �Zdd<d!�Zed=d"�Zfd>d?�Zgd@d$�ZhdAd%�ZidBdC�ZjdDdE�ZkdFdG�ZldHdI�ZmdXdJd&�ZndYdKd#�ZodLdM�ZpGdNdO�dOeQjq�ZrdZdPd'�ZsdQd)�ZtdRd(�ZudSdT�ZvdUdV�ZwdS)[�)�absolute_importNTF)�parse)�request)�InstallationError�HashMismatch)�PyPI)
�splitext�rmtree�format_size�display_path�
backup_dir�ask_path_exists�unpack_file�ARCHIVE_EXTENSIONS�consume�call_subprocess)�auto_decode)�check_path_owner)�
indent_log)�SETUPTOOLS_SHIM)�libc_ver)�DownloadProgressBar�DownloadProgressSpinner)�write_delete_marker_file)�vcs)�requests�six)�BaseAdapter�HTTPAdapter)�AuthBase�
HTTPBasicAuth)�CONTENT_CHUNK_SIZE�Response)�get_netrc_auth)�CaseInsensitiveDict)�urllib3)�CacheControlAdapter)�	FileCache)�	LockError)�
xmlrpc_client�get_file_content�is_url�url_to_path�path_to_url�is_archive_file�unpack_vcs_link�unpack_file_url�
is_vcs_url�is_file_url�unpack_http_url�
unpack_url�parse_content_disposition�sanitize_content_filenamecCsdtjd�tj�dtj�id�}|dddkrBtj�|dd<n�|dddkr�tjjd	krntjd
d�}ntj}djd
d�|D��|dd<nB|dddkr�tj�|dd<n |dddkr�tj�|dd<tjj	d��rJddl
m}tt
dd�tdddg|j����}tt
dd�tddgt����}|�r<||d<|�rJ||d<tjj	d��r|tj�d�r|dtj�dd�|d<tj��r�tj�|jdi�d<tj��r�tj�|jdi�d<tj��r�tj�|d<t�r�tjd
d �d)k�r�tj|d"<d#j|tj|d*d&d'�d(�S)+z6
    Return a string representing the user agent.
    �pip)�name�versionr8)Z	installer�python�implementationr;ZCPythonr9ZPyPy�finalN��.cSsg|]}t|��qS�)�str)�.0�xr?r?�/usr/lib/python3.6/download.py�
<listcomp>Tszuser_agent.<locals>.<listcomp>ZJythonZ
IronPython�linuxr)�distrocSs|dS)N�r?)rBr?r?rC�<lambda>`szuser_agent.<locals>.<lambda>�idcSs|dS)NrGr?)rBr?r?rCrHds�lib�libcrF�darwinZmacOS�system�releaseZcpu��Zopenssl_versionz9{data[installer][name]}/{data[installer][version]} {json}�,�:T)Z
separatorsZ	sort_keys)�data�json)rOrP)rQrR)r7�__version__�platformZpython_versionZpython_implementation�sys�pypy_version_info�releaselevel�join�
startswith�pip._vendorrF�dict�filter�zipZlinux_distributionrZmac_verrM�
setdefaultrN�machine�HAS_TLS�version_info�sslZOPENSSL_VERSION�formatrT�dumps)rSrXrFZdistro_infosrKr?r?rC�
user_agent@sP




rgc@s.eZdZddd�Zdd�Zdd�Zdd	�Zd
S)�MultiDomainBasicAuthTcCs||_i|_dS)N)�	prompting�	passwords)�selfrir?r?rC�__init__�szMultiDomainBasicAuth.__init__cCs�tj|j�}|jjdd�d}tj|dd�|f|dd��|_|jj|d�\}}|dkrn|j|j�\}}|dkr�|dkr�t	|j�}|r�|nd\}}|s�|r�||f|j|<t
|p�d|p�d�|�}|jd|j�|S)	N�@rGrO��response���)NN)NN)
�urllib_parse�urlparse�url�netloc�rsplit�
urlunparserj�get�parse_credentialsr#r Z
register_hook�
handle_401)rk�req�parsedrt�username�passwordZ
netrc_authr?r?rC�__call__�s&
zMultiDomainBasicAuth.__call__cKs�|jdkr|S|js|Stj|j�}tjjd|j�}t	j	d�}|sH|rX||f|j
|j<|j|jj
�t|ppd|pvd�|j�}|jj|f|�}|jj|�|S)Ni�z
User for %s: z
Password: rn)�status_coderirqrrrsrZmoves�inputrt�getpassrj�content�rawZrelease_connr rZ
connection�send�history�append)rk�resp�kwargsr{r|r}rzZnew_respr?r?rCry�s


zMultiDomainBasicAuth.handle_401cCs8d|kr4|jdd�d}d|kr,|jdd�S|dfSdS)NrmrGrrR)NN)ru�split)rkrtZuserinfor?r?rCrx�sz&MultiDomainBasicAuth.parse_credentialsN)T)�__name__�
__module__�__qualname__rlr~ryrxr?r?r?rCrh�s
!"rhc@seZdZddd�Zdd�ZdS)�LocalFSAdapterNc
Cs�t|j�}t�}d|_|j|_ytj|�}	Wn.tk
rZ}
zd|_|
|_WYdd}
~
XnPXtj	j
|	jdd�}tj
|�dp~d}t||	j|d��|_t|d�|_|jj|_|S)	N��i�T)Zusegmtrz
text/plain)zContent-TypezContent-Lengthz
Last-Modified�rb)r,rsr"r�os�stat�OSErrorr��emailZutilsZ
formatdate�st_mtime�	mimetypes�
guess_typer$�st_size�headers�open�close)
rkr�stream�timeout�verify�certZproxies�pathnamer�Zstats�excZmodified�content_typer?r?rCr��s$

zLocalFSAdapter.sendcCsdS)Nr?)rkr?r?rCr��szLocalFSAdapter.close)NNNNN)r�r�r�r�r�r?r?r?rCr��s
r�csDeZdZdZ�fdd�Z�fdd�Z�fdd�Z�fdd	�Z�ZS)
�
SafeFileCachezw
    A file based cache which is safe to use even when the target directory may
    not be accessible or writable.
    cs4tt|�j||�t|j�s0tjd|j�d|_dS)Nz�The directory '%s' or its parent directory is not owned by the current user and the cache has been disabled. Please check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.)�superr�rlr�	directory�logger�warning)rk�argsr�)�	__class__r?rCrl�s
zSafeFileCache.__init__c
s@|jdkrdSytt|�j||�Stttfk
r:YnXdS)N)r�r�r�rwr(r��IOError)rkr�r�)r�r?rCrws
zSafeFileCache.getc
s@|jdkrdSytt|�j||�Stttfk
r:YnXdS)N)r�r�r��setr(r�r�)rkr�r�)r�r?rCr�s
zSafeFileCache.setc
s@|jdkrdSytt|�j||�Stttfk
r:YnXdS)N)r�r�r��deleter(r�r�)rkr�r�)r�r?rCr�)s
zSafeFileCache.delete)	r�r�r��__doc__rlrwr�r��
__classcell__r?r?)r�rCr��s


r�c@seZdZdd�ZdS)�InsecureHTTPAdaptercCsd|_d|_dS)NZ	CERT_NONE)Z	cert_reqsZca_certs)rkZconnrsr�r�r?r?rC�cert_verify9szInsecureHTTPAdapter.cert_verifyN)r�r�r�r�r?r?r?rCr�7sr�cs,eZdZdZ�fdd�Z�fdd�Z�ZS)�
PipSessionNc	s�|jdd�}|jdd�}|jdg�}tt|�j||�t�|jd<t�|_tj	|dgdd�}|rvt
t|d	d
�|d�}n
t|d�}t
|d�}|jd
|�|jd|�|jdt��x|D]}|jdj|�|�q�WdS)N�retriesr�cache�insecure_hostsz
User-Agenti�g�?)ZtotalZstatus_forcelistZbackoff_factorT)Zuse_dir_lock)r��max_retries)r�zhttps://zhttp://zfile://zhttps://{0}/)�popr�r�rlrgr�rhZauthr%ZRetryr&r�rr�Zmountr�re)	rkr�r�r�r�r�Zsecure_adapterZinsecure_adapter�host)r�r?rCrlBs*




zPipSession.__init__cs(|jd|j�tt|�j||f|�|�S)Nr�)r`r�r�r�r)rk�methodrsr�r�)r�r?rCr~szPipSession.request)r�r�r�r�rlrr�r?r?)r�rCr�>s<r�c
CsL|dkrtd��tj|�}|r�|jd�j�}|dkrR|rR|jd�rRtd||f��|dkr�|jdd�d}|jdd	�}t	j
|�}|r�|jd�d|jd
d�d}tj|�}|jd	�r�d	|j
d	�}|}n|j|�}|j�|j|jfSy&t|d��}t|j��}WdQRXWn4tk
�rB}	ztdt|	���WYdd}	~	XnX||fS)
z�Gets the content of a file; it may be a filename, file: URL, or
    http: URL.  Returns (location, content).  Content is unicode.NzAget_file_content() missing 1 required keyword argument: 'session'rG�file�httpz6Requirements file %s references URL %s, which is localrR�\�/�|r�z$Could not open requirements file: %s)�	TypeError�
_scheme_re�search�group�lowerr[rr��replace�_url_slash_drive_re�matchrqZunquote�lstriprw�raise_for_statusrs�textr�r�readr�r@)
rsZ
comes_from�sessionr��scheme�pathr��fr�r�r?r?rCr*�s>





 z^(http|https|file):z/*([a-z])\|cCs6d|krdS|jdd�dj�}|ddddgtjkS)	z)Returns true if the name looks like a URLrRFrGrr�Zhttpsr�Zftp)r�r�rZall_schemes)r8r�r?r?rCr+�scCs2tj|�\}}}}}|r d|}tj||�}|S)z(
    Convert a file: URL to a path.
    z\\)rqZurlsplit�urllib_requestZurl2pathname)rs�_rtr�r?r?rCr,�s
cCs*tjjtjj|��}tjdtj|��}|S)zh
    Convert a path to a file: URL.  The path will be made absolute and have
    quoted path parts.
    zfile:)r�r��normpath�abspathrqZurljoinr�Zpathname2url)r�rsr?r?rCr-�scCs t|�dj�}|tkrdSdS)z9Return True if `name` is a considered as an archive file.rGTF)rr�r)r8�extr?r?rCr.�scCst|�}|j|�dS)N)�_get_used_vcs_backend�unpack)�link�location�vcs_backendr?r?rCr/�scCs.x(tjD]}|j|jkr||j�}|SqWdS)N)rZbackendsr�Zschemesrs)r�Zbackendr�r?r?rCr��s
r�cCstt|��S)N)�boolr�)r�r?r?rCr1�scCs|jj�jd�S)Nzfile:)rsr�r[)r�r?r?rCr2�scCst|j�}tjj|�S)z�Return whether a file:// Link points to a directory.

    ``link`` must not have any other scheme but file://. Call is_file_url()
    first.

    )r,�url_without_fragmentr�r��isdir)r��	link_pathr?r?rC�
is_dir_url�s
r�cOs|S)Nr?)�iterabler�r�r?r?rC�_progress_indicator�sr�c

sLyt�jd�}Wntttfk
r0d}YnXt�dd�}tj�tj	krRd}n&|r\d}n|dkrjd}n|std}nd}|j
}�fdd	�}�fd
d�}	t}
|jt
jkr�|}n|j}|r�|r�tjd|t|��t|d
�j}
ntjd|�t�j}
n |�rtjd|�ntjd|�tjd|�|	|
|t�t��}|�r@|j|�nt|�dS)Nzcontent-lengthrZ
from_cacheF�(i�Tc3s\y$x�jj|dd�D]
}|VqWWn2tk
rVx�jj|�}|sHP|Vq6WYnXdS)NF)Zdecode_content)r�r��AttributeErrorr�)Z
chunk_size�chunk)r�r?rC�	resp_readsz _download_url.<locals>.resp_readc3s"x|D]}�j|�|VqWdS)N)�write)Zchunksr�)�content_filer?rC�written_chunks;s

z%_download_url.<locals>.written_chunkszDownloading %s (%s))�maxzDownloading %szUsing cached %szDownloading from URL %si@�)�intr��
ValueError�KeyErrorr��getattrr�ZgetEffectiveLevel�logging�INFO�show_urlr�rtrr��infor
r�iterr�debugr!Zcheck_against_chunksr)
r�r�r��hashesZtotal_lengthZcached_respZ
show_progressr�r�r�Zprogress_indicatorrsZdownloaded_chunksr?)r�r�rC�
_download_urlsL
%
r�cCs�d}tjj||j�}tjj|�r�tdt|�d�}|dkr@d}nj|dkrdtjdt|��tj	|�nF|dkr�t
|�}tjd	t|�t|��tj||�n|dkr�t
jd
�|r�tj||�tjdt|��dS)NTz8The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort�i�w�b�aFzDeleting %szBacking up %s to %srGzSaved %s)r�r�r�r�rp)r�r�rZ�filename�existsr
rr�r��remover�shutilZmoverW�exit�copyr�)r�r�r�r�Zdownload_locationroZ	dest_filer?r?rC�
_copy_fileas.

r�c	Cs�|dkrtd��tjdd�}d}|r0t|||�}|rH|}tj|�d}nt||||�\}}t||||�|r~|r~t|||�|s�t	j
|�t|�dS)Nz@unpack_http_url() missing 1 required keyword argument: 'session'z-unpackzpip-r)r��tempfileZmkdtemp�_check_download_dirr�r��_download_http_urlrr�r��unlinkr	)	r�r��download_dirr�r��temp_dir�already_downloaded_path�	from_pathr�r?r?rCr3|s,


cCs�t|j�}t|�rHtjj|�r&t|�tj||dd�|rDt	j
d�dS|rV|j|�d}|rjt|||�}|rt|}n|}t
j|�d}t||||�|r�|r�t|||�dS)z�Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    T)Zsymlinksz*Link is a directory, ignoring download_dirNr)r,r�r�r�r�r�r	r�Zcopytreer�r��check_against_pathr�r�r�rr�)r�r�rr�r�rrr�r?r?rCr0�s,



c
Cs�tjj|�rt|�d}tjg}|jd�|jt|�|jd�|d|g7}tj	d|�t
��t||dd�WdQRXtjj|tj
|�d	�}tj	d
||�t||ddd�dS)z�Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    zsetup.pyz-c�sdistz
--dist-dirzRunning setup.py sdist for %sF)�cwdZshow_stdoutNrzUnpacking sdist %s into %s)r�r�)r�r�r�r	rW�
executabler�rr�r�rrrZ�listdirr)r�r�Zsetup_pyZ
sdist_argsrr?r?rC�_copy_dist_from_dir�s

r
c@s$eZdZdZddd�Zd	dd�ZdS)
�PipXmlrpcTransportzRProvide a `xmlrpclib.Transport` implementation via a `PipSession`
    object.
    FcCs*tjj||�tj|�}|j|_||_dS)N)r)�	Transportrlrqrrr��_scheme�_session)rkZ	index_urlr�Zuse_datetimeZindex_partsr?r?rCrl�s
zPipXmlrpcTransport.__init__c
Cs�|j||dddf}tj|�}y6ddi}|jj|||dd�}|j�||_|j|j�St	j
k
r�}	ztjd|	j
j|��WYdd}	~	XnXdS)NzContent-Typeztext/xmlT)rSr�r�zHTTP error %s while getting %s)r
rqrvrZpostr��verboseZparse_responser�r�	HTTPErrorr��criticalror)
rkr�ZhandlerZrequest_bodyr�partsrsr�ror�r?r?rCrs


zPipXmlrpcTransport.requestN)F)F)r�r�r�r�rlrr?r?r?rCr�s
rcCs^t|�rt||�n:t|�r.t||||d�n |dkr<t�}t|||||d�|rZt|�dS)avUnpack link.
       If link is a VCS link:
         if only_download, export into download_dir and ignore location
          else unpack into location
       for other types of link:
         - unpack into location
         - if download_dir, copy the file into download_dir
         - if only_download, mark location for deletion

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    )r�N)r1r/r2r0r�r3r)r�r�rZ
only_downloadr�r�r?r?rCr4scCstjj|�S)zJ
    Sanitize the "filename" value from a Content-Disposition header.
    )r�r��basename)r�r?r?rCr6<scCs,tj|�\}}|jd�}|r$t|�}|p*|S)z�
    Parse the "filename" value from a Content-Disposition header, and
    return the default filename if the result is empty.
    r�)�cgiZparse_headerrwr6)�content_dispositionZdefault_filenameZ_typeZparamsr�r?r?rCr5Ds

c
Cs*|jjdd�d}y |j|ddidd�}|j�Wn8tjk
rj}ztjd|jj	|��WYd	d	}~XnX|j
jd
d�}|j}|j
jd�}	|	r�t|	|�}t
|�d}
|
s�tj|�}
|
r�||
7}|
r�|j|jkr�tjj
|j�d}
|
r�||
7}tjj||�}t|d
��}t||||�Wd	QRX||fS)z6Download link url into temp_dir using provided session�#rGrzAccept-EncodingZidentityT)r�r�zHTTP error %s while getting %sNzcontent-typernzcontent-disposition�wb)rsr�rwr�rrr�rrorr�r�r5rr�Zguess_extensionr�r�rZr�r�)
r�r�rr�Z
target_urlr�r�r�r�rr�Z	file_pathr�r?r?rCr�Ss:

r�cCsntjj||j�}tjj|�rjtjd|�|rfy|j|�Wn*tk
rdtj	d|�tj
|�dSX|SdS)z� Check download_dir for previously downloaded file with correct hash
        If a correct file is found return its path else None
    zFile was already downloaded %sz;Previously-downloaded file %s has bad hash. Re-downloading.N)r�r�rZr�r�r�r�rrr�r)r�rr�Z
download_pathr?r?rCr��s
r�)NN)NNN)NN)NFNN)xZ
__future__rrZemail.utilsr�r�rTr�r�r�rV�rer�rWr�rdrb�ImportErrorZpip._vendor.six.moves.urllibrrqrr�r7Zpip.exceptionsrrZ
pip.modelsrZ	pip.utilsrr	r
rrr
rrrrZpip.utils.encodingrZpip.utils.filesystemrZpip.utils.loggingrZpip.utils.setuptools_buildrZpip.utils.glibcrZpip.utils.uirrZ
pip.locationsrZpip.vcsrr\rrZpip._vendor.requests.adaptersrrZpip._vendor.requests.authrr Zpip._vendor.requests.modelsr!r"Zpip._vendor.requests.utilsr#Zpip._vendor.requests.structuresr$r%Zpip._vendor.cachecontrolr&Zpip._vendor.cachecontrol.cachesr'Zpip._vendor.lockfiler(Zpip._vendor.six.movesr)�__all__Z	getLoggerr�r�rgrhr�r�r�ZSessionr�r*�compile�Ir�r�r+r,r-r.r/r�r1r2r�r�r�r�r3r0r
rrr4r6r5r�r�r?r?r?rC�<module>s�
0
BR!BH
)
`
&
0$
'8__pycache__/__init__.cpython-36.pyc000064400000020671151733136150013154 0ustar003

�Pf�.�@s�ddlmZddlZddlZddlZddlZddlZddlZddlZddl	m
Z
ejde
d�yddlZWne
k
r~YnNXejdkr�eedd�dkr�ydd	lmZWne
efk
r�Yn
Xej�dd
lmZmZmZddlmZmZddlmZmZdd
lmZmZm Z m!Z!ddl"m#Z#m$Z$ddl%m&Z&m'Z'ddl%m(Z(ddl	m)Z)ddl*Z+e+j,Z,dZ-ej.e/�Z0ejde)d�dd�Z1dd�Z2dd�Z3dd�Z4d dd�Z5Gdd�de6�Z7e/dk�r�ej8e5��dS)!�)�absolute_importN)�DependencyWarning�ignore)�category�darwinZOPENSSL_VERSION_NUMBERi)�securetransport)�InstallationError�CommandError�PipError)�get_installed_distributions�get_prog)�deprecation�dist_is_editable)�git�	mercurial�
subversion�bazaar)�ConfigOptionParser�UpdatingDefaultsHelpFormatter)�
get_summaries�get_similar_commands)�
commands_dict)�InsecureRequestWarningz9.0.3csZdtjkrdStjdj�dd�}ttjd�}y||d�Wntk
rZd�YnXdd�t�D��g}y�fd	d�|D�d
}Wntk
r�d}YnXt�}|�r�|dkr�tjd�|dkoԈj	d
��rJg}�j
�}x<tdd�D].}|jj	|�r�|j|dd�kr�|j
|j�q�W|�rJx|D]}t|��q.Wtjd�t|�}|dd�|jjD�7}dd�|d|d�D���fdd�|D�}�fdd�|D�}x�|D](}	|	d
}
|	d�r�|
d7}
t|
��q�Wnp�j	d
��s�j	d��r0dd�|jD�}|j
|j�dd�|D�}�dd�|D�7�tdj�fdd��D���tjd�dS)z�Command and option completion for the main option parser (and options)
    and its subcommands (and options).

    Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
    ZPIP_AUTO_COMPLETENZ
COMP_WORDS�Z
COMP_CWORD�cSsg|]\}}|�qS�r)�.0�cmdZsummaryrr�/usr/lib/python3.6/__init__.py�
<listcomp>Usz autocomplete.<locals>.<listcomp>csg|]}|�kr|�qSrr)r�w)�subcommandsrrrYsr�helpZ	uninstall�-T)Z
local_onlycSs&g|]}|jtjkr|j�|jf�qSr)r"�optparse�
SUPPRESS_HELP�get_opt_string�nargs)r�optrrrrqscSsg|]}|jd�d�qS)�=r)�split)r�xrrrrvscs g|]\}}|�kr||f�qSrr)rr+�v)�	prev_optsrrrwscs"g|]\}}|j��r||f�qSr)�
startswith)r�kr,)�currentrrrysr)z--cSsg|]
}|j�qSr)�option_list)r�irrrr�scss|]}|D]
}|Vq
qdS)Nr)r�it�orrr�	<genexpr>�szautocomplete.<locals>.<genexpr>cSs g|]}|jtjkr|j��qSr)r"r$r%r&)rr2rrrr�s� csg|]}|j��r|�qSr)r.)rr+)r0rrr�s)�os�environr*�int�
IndexErrorr�create_main_parser�sys�exitr.�lowerr�key�append�printr�parserZoption_list_allZ
option_groupsr1�join)ZcwordsZcwordZoptionsZsubcommand_namerBZ	installedZlc�distZ
subcommandZoptionZ	opt_labelZoptsr)r0r-r!r�autocompleteEs\








rEcCs�ddt�dt�d�}tf|�}|j�tjjtjjtjjt���}dt	|t
jdd�f|_tj
tj|�}|j|�d|_t�}dgd	d
�|D�}dj|�|_|S)Nz
%prog <command> [options]F�global)ZusageZadd_help_optionZ	formatter�name�progzpip %s from %s (python %s)�TrcSsg|]\}}d||f�qS)z%-27s %sr)rr2�jrrrr�sz&create_main_parser.<locals>.<listcomp>�
)rrrZdisable_interspersed_argsr7�path�dirname�abspath�__file__�__version__r<�version�
cmdoptionsZmake_option_groupZ
general_groupZadd_option_group�mainrrC�description)Z	parser_kwrBZpip_pkg_dirZgen_optsZcommand_summariesrTrrrr;�s"


r;cCs�t�}|j|�\}}|jr>tjj|j�tjjtj�tj�|s\|ddkrlt	|�dkrl|j
�tj�|d}|tkr�t|�}d|g}|r�|j
d|�tdj|���|dd�}|j|�||fS)Nrr"rzunknown command "%s"zmaybe you meant "%s"z - )r;�
parse_argsrQr<�stdout�writer7�linesepr=�lenZ
print_helprrr@r	rC�remove)�argsrBZgeneral_optionsZ	args_else�cmd_nameZguess�msg�cmd_argsrrr�	parseopts�s&	

r_cCsd}d|krd}|S)NFz
--isolatedTr)r[�isolatedrrr�check_isolated�sracCs�|dkrtjdd�}tj�t�yt|�\}}WnJtk
r~}z.tjjd|�tjjt	j
�tjd�WYdd}~XnXytj
tjd�Wn0tjk
r�}ztjd|�WYdd}~XnXt|t|�d�}|j|�S)Nrz	ERROR: %srz%Ignoring error %s when setting locale)r`)r<�argvr
Zinstall_warning_loggerrEr_r
�stderrrWr7rXr=�locale�	setlocale�LC_ALL�Error�logger�debugrrarS)r[r\r^�exc�eZcommandrrrrS�s rSc@sLeZdZffdd�Zejd�Zejd�Zedd��Z	e
dd��Zd	d
�ZdS)�FrozenRequirementcCs||_||_||_||_dS)N)rG�req�editable�comments)�selfrGrmrnrorrr�__init__�szFrozenRequirement.__init__z-r(\d+)$z-(20\d\d\d\d\d\d)$cCs�tjjtjj|j��}g}ddlm}m}t|�r�|j	|�r�d}y|||�}Wn2t
k
r�}	ztjd|	�d}WYdd}	~	XnX|dkr�tjd|�|j
d�|j�}d}n�d}|j�}|j}
t|
�dkr�|
dddks�td|
|f��|
dd}|jj|�}|jj|�}
|�s|
�r�|jd�}|�r:|�j||�}|�sXtjd
|�|j
d�nF|j
d|�|�rx|jd�}nd|
jd�}d}d|||j|�f}||j|||�S)Nr)�vcs�get_src_requirementTzYError when trying to get requirement for VCS system %s, falling back to uneditable formatz-Could not determine repository location of %sz-## !! Could not determine repository locationFr�==�===z5Expected 1 spec with == or ===; specs = %r; dist = %rZsvnz(Warning: cannot find svn location for %szF## FIXME: could not find svn URL in dependency_links for this package:z3# Installing as editable to satisfy requirement %s:z{%s}z%s@%s#egg=%s)rtru)r7rL�normcaserN�location�pip.vcsrrrsrZget_backend_namerrhZwarningr@Zas_requirement�specsrY�AssertionError�_rev_re�search�_date_reZget_backendZget_location�group�egg_nameZproject_name)�clsrDZdependency_linksrwrorrrsrnrmrjryrQZ	ver_matchZ
date_matchZsvn_backendZsvn_locationZrevrrr�	from_distsf
zFrozenRequirement.from_distcCs,|j�}tjd|�}|r(|d|j��}|S)Nz
-py\d\.\d$)r�rer|�start)rDrG�matchrrrrIs
zFrozenRequirement.egg_namecCs2|j}|jrd|}djt|j�t|�g�dS)Nz-e %srK)rmrnrC�listro�str)rprmrrr�__str__QszFrozenRequirement.__str__N)
�__name__�
__module__�__qualname__rqr��compiler{r}�classmethodr��staticmethodrr�rrrrrl�s

Arl�__main__)N)9Z
__future__rrdZloggingr7r$�warningsr<r�Zpip._vendor.urllib3.exceptionsr�filterwarningsZssl�ImportError�platform�getattrZpip._vendor.urllib3.contribr�OSErrorZinject_into_urllib3Zpip.exceptionsrr	r
Z	pip.utilsrrr
rrxrrrrZpip.baseparserrrZpip.commandsrrrrZpip.cmdoptionsZpiprRrPZ	getLoggerr�rhrEr;r_rarS�objectrlr=rrrr�<module>sR


I*	
[
__pycache__/__main__.cpython-36.pyc000064400000000551151733136150013130 0ustar003

�PfH�@shddlmZddlZddlZedkrFejjejje��Zejjde�ddl	Z	e
dkrdeje	j��dS)�)�absolute_importN��__main__)
Z
__future__r�os�sys�__package__�path�dirname�__file__�insertZpip�__name__�exit�main�rr�/usr/lib/python3.6/__main__.py�<module>s__pycache__/pep425tags.cpython-36.pyc000064400000016433151733136150013314 0ustar003

�Pf�*�
@sdZddlmZddlZddlZddlZddlZddlZyddlZWne	k
rbddl
jZYnXddlZddl
mZddlZeje�Zejd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Zd!dd�Zdd�Zdd�Zdd�Zdd�Zdd�Z d"dd�Z!e!�Z"e!dd �Z#e�Z$dS)#z2Generate and work with PEP 425 Compatibility Tags.�)�absolute_importN)�OrderedDictz(.+)_(\d+)_(\d+)_(.+)cCsBy
tj|�Stk
r<}ztjdj|�t�dSd}~XnXdS)Nz{0})�	sysconfig�get_config_var�IOError�warnings�warn�format�RuntimeWarning)�var�e�r
� /usr/lib/python3.6/pep425tags.pyrs

rcCs:ttd�rd}n&tjjd�r"d}ntjdkr2d}nd}|S)z'Return abbreviated implementation name.�pypy_version_info�pp�javaZjyZcliZip�cp)�hasattr�sys�platform�
startswith)Zpyimplr
r
r�
get_abbr_impl!s

rcCs.td�}|st�dkr*djttt���}|S)zReturn implementation version.�py_version_nodotr�)rr�join�map�str�get_impl_version_info)Zimpl_verr
r
r�get_impl_ver.srcCs:t�dkr"tjdtjjtjjfStjdtjdfSdS)zQReturn sys.version_info-like tuple for use in decrementing the minor
    version.rr�N)rr�version_infor�major�minorr
r
r
rr6s

rcCsdjt�t��S)z;
    Returns the Tag for this specific implementation.
    z{0}{1})r	rrr
r
r
r�get_impl_tagAsr#TcCs.t|�}|dkr&|r tjd|�|�S||kS)zgUse a fallback method for determining SOABI flags if the needed config
    var is unset or unavailable.Nz>Config variable '%s' is unset, Python ABI tag may be incorrect)r�logger�debug)rZfallback�expectedr�valr
r
r�get_flagHsr(cs�td�}t��|r��dkr�ttd�r�d}d}d}tddd��dkd	�rLd
}td�fdd��dkd	�rjd
}tddd�d�dko�tjdkd�r�tjdkr�d}d�t�|||f}n@|r�|jd�r�d|jd�d}n|r�|j	dd�j	dd�}nd}|S)zXReturn the ABI tag based on SOABI (if available) or emulate SOABI
    (CPython 2, PyPy).�SOABIrr�
maxunicoder�Py_DEBUGcSs
ttd�S)N�gettotalrefcount)rrr
r
r
r�<lambda>^szget_abi_tag.<locals>.<lambda>)r�d�
WITH_PYMALLOCcs�dkS)Nrr
r
)�implr
rr-bs�mZPy_UNICODE_SIZEcSs
tjdkS)Ni��)rr*r
r
r
rr-fs��)r&r�uz
%s%s%s%s%szcpython-�-r�.�_N)rr)r3r3)r3r3)
rrrrr(r rr�split�replace)Zsoabir.r1r4�abir
)r0r�get_abi_tagTs8

r;cCs
tjdkS)Ni���)r�maxsizer
r
r
r�_is_running_32bitvsr=cCs�tjdkr^tj�\}}}|jd�}|dkr6t�r6d}n|dkrHt�rHd}dj|d|d	|�Stjj�j	dd
�j	dd
�}|dkr�t�r�d
}|S)z0Return our platform name 'win32', 'linux_x86_64'�darwinr6�x86_64�i386�ppc64�ppczmacosx_{0}_{1}_{2}rrr7r5�linux_x86_64�
linux_i686)
rrZmac_verr8r=r	�	distutils�util�get_platformr9)�releaser7�machineZ	split_ver�resultr
r
rrGzs

rGcCsJt�dkrdSyddl}t|j�Sttfk
r8YnXtjjj	dd�S)NrCrDFr��)rCrD)
rG�
_manylinux�boolZmanylinux1_compatible�ImportError�AttributeError�pipZutilsZglibcZhave_compatible_glibc)rMr
r
r�is_manylinux1_compatible�s

rRcsvg}��fdd��td
dddg���|||�r8|j|�x.�D]&}|�|kr>�|||�r>|j|�q>W|jd�|S)z�Return a list of supported arches (including group arches) for
    the given major, minor and machine architecture of an macOS machine.
    cs~|dkr||fd
kS|dkr(||fdkS|dkr<||fdkS|dkrP||fd
kS|�krzx �|D]}�|||�rbdSqbWd	S)NrB�
rLrAr@r2r?TF)rSrL)rSrL)rSr2)rSrLr
)r!r"�arch�garch)�_supports_arch�groupsr
rrV�sz)get_darwin_arches.<locals>._supports_arch�fatr@rB�intelr?�fat64rA�fat32Z	universal�r@rB)rXr\�r?r@)rYr]�r?rA)rZr^�r?r@rB)r[r_)r�append)r!r"rI�archesrUr
)rVrWr�get_darwin_arches�s$


rbFcCsg}|dkrXg}t�}|dd�}x4t|ddd�D] }|jdjtt||f���q4W|p`t�}g}	|pnt�}|r�|g|	dd�<t�}
ddl	}x8|j
�D],}|djd�r�|
j|dj
dd�d�q�W|	jtt|
���|	jd�|�sx|p�t�}
|
jd	��r�tj|
�}|�r�|j�\}}}}d
j||�}g}xTttt|�d��D]4}x,tt|�||�D]}|j|||f��q^W�qHWn|
g}n*|dk�r�t��r�|
jdd�|
g}n|
g}x:|	D]2}x*|D]"}
|jd
||df||
f��q�W�q�WxZ|dd�D]J}|dk�rPx6|
D].}x&|D]}
|jd
||f||
f��qW�qW�q�Wx*|D]"}
|jd|ddd|
f��qRW|jd
||dfddf�|jd
||ddfddf�xNt|�D]B\}}|jd|fddf�|dk�r�|jd|dddf��q�W|S)acReturn a list of supported tags for each version specified in
    `versions`.

    :param versions: a list of string versions, of the form ["33", "32"],
        or None. The first version will be assumed to support our ABI.
    :param platform: specify the exact platform you want valid
        tags for, or None. If None, use the local system platform.
    :param impl: specify the exact implementation you want valid
        tags for, or None. If None, use the local interpreter impl.
    :param abi: specify the exact abi you want valid
        tags for, or None. If None, use the local interpreter abi.
    Nrrrz.abir6rKZnoneZmacosxz
{0}_{1}_%i_%s�linuxZ
manylinux1z%s%s�31�30zpy%s�any���rgrgrg)rdre)r�ranger`rrrrr;�set�impZget_suffixesr�addr8�extend�sorted�listrG�
_osx_arch_pat�matchrWr	�reversed�intrbrRr9�	enumerate)Zversions�noarchrr0r:Z	supportedr r!r"ZabisZabi3srj�suffixrTrp�nameZactual_archZtplrar1�a�version�ir
r
r�
get_supported�sh 




 

(


*
" 
rz)rt)TT)NFNNN)%�__doc__Z
__future__r�rerrrZloggingrrOZdistutils.sysconfigZdistutils.utilrEZ
pip.compatrZpip.utils.glibcrQZ	getLogger�__name__r$�compilerorrrrr#r(r;r=rGrRrbrzZsupported_tagsZsupported_tags_noarchZimplementation_tagr
r
r
r�<module>s>



"=
^
__pycache__/status_codes.cpython-36.pyc000064400000000505151733136150014107 0ustar003

�Pf��@s(ddlmZdZdZdZdZdZdZdS)�)�absolute_import�����N)Z
__future__r�SUCCESSZERRORZ
UNKNOWN_ERRORZVIRTUALENV_NOT_FOUNDZPREVIOUS_BUILD_DIR_ERRORZNO_MATCHES_FOUND�r	r	�"/usr/lib/python3.6/status_codes.py�<module>s__pycache__/locations.cpython-36.pyc000064400000007420151733136150013405 0ustar003

�Pf��
@sdZddlmZddlZddlZddlZddlZddlmZddl	m
Z
mZddlm
Z
mZddlmZejd�Zd	Zd
Zdd�Zd
d�Zdd�Ze�r�ejjejd�Zn6yejjej�d�ZWnek
r�ejd�YnXejje�Zej �Z!ej"Z#ed�Z$e
�rtejjejd�Z%ejje#d�Z&ejj'e%��sRejjejd�Z%ejje#d�Z&dZ(ejje$d�Z)ejje)e(�Z*njejjejd�Z%ejje#d�Z&dZ(ejje$d�Z)ejje)e(�Z*ej+dd�dk�r�ejdd�dk�r�dZ%dd�ej,d�D�Z-d#d!d"�Z.dS)$z7Locations where we look for configs, install stuff, etc�)�absolute_importN)�	sysconfig)�install�SCHEME_KEYS)�WINDOWS�
expanduser)�appdirsZpipz�This file is placed here by pip to indicate the source was put
here by pip.

Once this package is successfully installed this source code will be
deleted (unless you remove this file).
zpip-delete-this-directory.txtc	Cs2tjj|t�}t|d��}|jt�WdQRXdS)z?
    Write the pip delete marker file into this directory.
    �wN)�os�path�join�PIP_DELETE_MARKER_FILENAME�open�write�DELETE_MARKER_MESSAGE)Z	directory�filepathZ	marker_fp�r�/usr/lib/python3.6/locations.py�write_delete_marker_filesrcCs*ttd�rdStjttdtj�kr&dSdS)zM
    Return True if we're running inside a virtualenv, False otherwise.

    Zreal_prefixT�base_prefixF)�hasattr�sys�prefix�getattrrrrr�running_under_virtualenv's

rcCs>tjjtjjtj��}tjj|d�}t�r:tjj|�r:dSdS)z?
    Return True if in a venv and no system site packages.
    zno-global-site-packages.txtTN)	r
r�dirname�abspath�site�__file__rr�isfile)Zsite_mod_dirZno_global_filerrr�virtualenv_no_global4sr �srcz=The folder you are executing pip from can no longer be found.�~ZScripts�binzpip.inizpip.confz.pip��darwin�z/System/Library/z/usr/local/bincCsg|]}tjj|t��qSr)r
rr�config_basename)�.0rrrr�
<listcomp>wsr)FcCshddlm}i}|r ddgi}ni}d|i}	|	j|�||	�}
|
j�|
jddd�}|oZ|sntd	j||���|pv|j|_|r�d
|_|p�|j|_|p�|j	|_	|p�|j
|_
|j�xtD]}t
|d|�||<q�Wd|
jd�kr�|jt|j|jd
��t��rdtjjtjdddtjdd�|�|d<|dk	�rdtjjtjj|d��d}
tjj||
dd��|d<|S)z+
    Return a distutils install scheme
    r)�DistributionZscript_argsz
--no-user-cfg�namerT)Zcreatezuser={0} prefix={1}�Zinstall_�install_lib)�purelib�platlib�includer�pythonN�Zheaders�)Zdistutils.distr*�updateZparse_config_filesZget_command_obj�AssertionError�format�userr�home�rootZfinalize_optionsrrZget_option_dict�dictr-rr
rrr�version�
splitdriver)Z	dist_namer7r8r9�isolatedrr*�schemeZextra_dist_argsZ	dist_args�d�i�keyZ
path_no_driverrr�distutils_scheme|sH



rB)FNNFN)/�__doc__Z
__future__rr
Zos.pathrrZ	distutilsrZdistutils.command.installrrZ
pip.compatrrZ	pip.utilsrZuser_cache_dirZUSER_CACHE_DIRrr
rrr rrrZ
src_prefix�getcwd�OSError�exitrZget_python_libZ
site_packages�	USER_SITE�	user_siteZuser_dirZbin_pyZbin_user�existsr'Zlegacy_storage_dirZlegacy_config_file�platformZsite_config_dirsZsite_config_filesrBrrrr�<module>sd
		
(__pycache__/exceptions.cpython-36.opt-1.pyc000064400000024346151733136150014540 0ustar003

�Pf��@sddZddlmZddlmZmZmZddlmZGdd�de	�Z
Gdd�de
�ZGd	d
�d
e
�ZGdd�de�Z
Gd
d�de�ZGdd�de
�ZGdd�de
�ZGdd�de
�ZGdd�de
�ZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�ZGdd �d e�ZGd!d"�d"e�ZGd#d$�d$e�ZGd%d&�d&e�ZGd'd(�d(e�ZGd)d*�d*e�Zd+S),z"Exceptions used throughout package�)�absolute_import)�chain�groupby�repeat)�	iteritemsc@seZdZdZdS)�PipErrorzBase pip exceptionN)�__name__�
__module__�__qualname__�__doc__�rr� /usr/lib/python3.6/exceptions.pyr	src@seZdZdZdS)�InstallationErrorz%General exception during installationN)rr	r
rrrrr
r
src@seZdZdZdS)�UninstallationErrorz'General exception during uninstallationN)rr	r
rrrrr
rsrc@seZdZdZdS)�DistributionNotFoundzCRaised when a distribution cannot be found to satisfy a requirementN)rr	r
rrrrr
rsrc@seZdZdZdS)�RequirementsFileParseErrorzDRaised when a general error occurs parsing a requirements file line.N)rr	r
rrrrr
rsrc@seZdZdZdS)�BestVersionAlreadyInstalledzNRaised when the most up-to-date version of a package is already
    installed.N)rr	r
rrrrr
rsrc@seZdZdZdS)�
BadCommandz0Raised when virtualenv or a command is not foundN)rr	r
rrrrr
r"src@seZdZdZdS)�CommandErrorz7Raised when there is an error in command-line argumentsN)rr	r
rrrrr
r&src@seZdZdZdS)�PreviousBuildDirErrorz:Raised when there's a previous conflicting build directoryN)rr	r
rrrrr
r*src@seZdZdZdS)�InvalidWheelFilenamezInvalid wheel filename.N)rr	r
rrrrr
r.src@seZdZdZdS)�UnsupportedWheelzUnsupported wheel.N)rr	r
rrrrr
r2src@s8eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
�
HashErrorsz:Multiple HashError instances rolled into one for reportingcCs
g|_dS)N)�errors)�selfrrr
�__init__9szHashErrors.__init__cCs|jj|�dS)N)r�append)r�errorrrr
r<szHashErrors.appendcCsfg}|jjdd�d�x<t|jdd��D](\}}|j|j�|jdd�|D��q(W|rbdj|�SdS)NcSs|jS)N)�order)�errr
�<lambda>Asz$HashErrors.__str__.<locals>.<lambda>)�keycSs|jS)N)�	__class__)rrrr
r Bscss|]}|j�VqdS)N)�body)�.0rrrr
�	<genexpr>Dsz%HashErrors.__str__.<locals>.<genexpr>�
)r�sortrr�head�extend�join)r�lines�clsZ
errors_of_clsrrr
�__str__?szHashErrors.__str__cCs
t|j�S)N)�boolr)rrrr
�__nonzero__HszHashErrors.__nonzero__cCs|j�S)N)r/)rrrr
�__bool__KszHashErrors.__bool__N)	rr	r
rrrr-r/r0rrrr
r6s	rc@s0eZdZdZdZdZdd�Zdd�Zdd	�ZdS)
�	HashErrora�
    A failure to verify a package against known-good hashes

    :cvar order: An int sorting hash exception classes by difficulty of
        recovery (lower being harder), so the user doesn't bother fretting
        about unpinned packages when he has deeper issues, like VCS
        dependencies, to deal with. Also keeps error reports in a
        deterministic order.
    :cvar head: A section heading for display above potentially many
        exceptions of this kind
    :ivar req: The InstallRequirement that triggered this error. This is
        pasted on after the exception is instantiated, because it's not
        typically available earlier.

    N�cCsd|j�S)a)Return a summary of me for display under the heading.

        This default implementation simply prints a description of the
        triggering requirement.

        :param req: The InstallRequirement that provoked this error, with
            populate_link() having already been called

        z    %s)�_requirement_name)rrrr
r#bs
zHashError.bodycCsd|j|j�fS)Nz%s
%s)r(r#)rrrr
r-nszHashError.__str__cCs|jrt|j�SdS)z�Return a description of the requirement that triggered me.

        This default implementation returns long description of the req, with
        line numbers

        zunknown package)�req�str)rrrr
r3qszHashError._requirement_name)	rr	r
rr4r(r#r-r3rrrr
r1Osr1c@seZdZdZdZdZdS)�VcsHashUnsupportedzuA hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those.rzlCan't verify hashes for these requirements because we don't have a way to hash version control repositories:N)rr	r
rrr(rrrr
r6{sr6c@seZdZdZdZdZdS)�DirectoryUrlHashUnsupportedzuA hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those.�zUCan't verify hashes for these file:// requirements because they point to directories:N)rr	r
rrr(rrrr
r7�sr7c@s(eZdZdZdZdZdd�Zdd�ZdS)	�HashMissingz2A hash was needed for a requirement but is absent.�awHashes are required in --require-hashes mode, but they are missing from some requirements. Here is a list of those requirements along with the hashes their downloaded archives actually had. Add lines like these to your requirements files to prevent tampering. (If you did not enable --require-hashes manually, note that it turns on automatically when any package has a hash.)cCs
||_dS)zq
        :param gotten_hash: The hash of the (possibly malicious) archive we
            just downloaded
        N)�gotten_hash)rr;rrr
r�szHashMissing.__init__cCsHddlm}d}|jr4|jjr&|jjnt|jdd�}d|p<d||jfS)Nr)�
FAVORITE_HASHr4z    %s --hash=%s:%szunknown package)Zpip.utils.hashesr<r4Z
original_link�getattrr;)rr<�packagerrr
r#�szHashMissing.bodyN)rr	r
rrr(rr#rrrr
r9�s
r9c@seZdZdZdZdZdS)�HashUnpinnedzPA requirement had a hash specified but was not pinned to a specific
    version.�zaIn --require-hashes mode, all requirements must have their versions pinned with ==. These do not:N)rr	r
rrr(rrrr
r?�sr?c@s0eZdZdZdZdZdd�Zdd�Zdd	�Zd
S)�HashMismatchz�
    Distribution file hash values don't match.

    :ivar package_name: The name of the package that triggered the hash
        mismatch. Feel free to write to this after the exception is raise to
        improve its error message.

    �z�THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS FILE. If you have updated the package versions, please update the hashes. Otherwise, examine the package contents carefully; someone may have tampered with them.cCs||_||_dS)z�
        :param allowed: A dict of algorithm names pointing to lists of allowed
            hex digests
        :param gots: A dict of algorithm names pointing to hashes we
            actually got from the files under suspicion
        N)�allowed�gots)rrCrDrrr
r�szHashMismatch.__init__cCsd|j�|j�fS)Nz
    %s:
%s)r3�_hash_comparison)rrrr
r#�szHashMismatch.bodycsjdd�}g}xRt|j�D]D\}}||��|j�fdd�|D��|jd|j|j��d�qWdj|�S)aE
        Return a comparison of actual and expected hash values.

        Example::

               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
                            or 123451234512345123451234512345123451234512345
                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef

        cSst|gtd��S)Nz    or)rr)�	hash_namerrr
�hash_then_or�sz3HashMismatch._hash_comparison.<locals>.hash_then_orc3s|]}dt��|fVqdS)z        Expected %s %sN)�next)r$r)�prefixrr
r%�sz0HashMismatch._hash_comparison.<locals>.<genexpr>z             Got        %s
z    orr&)rrCr)rrDZ	hexdigestr*)rrGr+rFZ	expectedsr)rIr
rE�s
zHashMismatch._hash_comparisonN)	rr	r
rrr(rr#rErrrr
rA�s
rAc@seZdZdZdS)�UnsupportedPythonVersionzMUnsupported python version according to Requires-Python package
    metadata.N)rr	r
rrrrr
rJ�srJN)rZ
__future__r�	itertoolsrrrZpip._vendor.sixr�	Exceptionrrrrrrrrrrrrr1r6r7r9r?rArJrrrr
�<module>s,,		$	8__pycache__/__init__.cpython-36.opt-1.pyc000064400000020444151733136150014111 0ustar003

�Pf�.�@s�ddlmZddlZddlZddlZddlZddlZddlZddlZddl	m
Z
ejde
d�yddlZWne
k
r~YnNXejdkr�eedd�dkr�ydd	lmZWne
efk
r�Yn
Xej�dd
lmZmZmZddlmZmZddlmZmZdd
lmZmZm Z m!Z!ddl"m#Z#m$Z$ddl%m&Z&m'Z'ddl%m(Z(ddl	m)Z)ddl*Z+e+j,Z,dZ-ej.e/�Z0ejde)d�dd�Z1dd�Z2dd�Z3dd�Z4d dd�Z5Gdd�de6�Z7e/dk�r�ej8e5��dS)!�)�absolute_importN)�DependencyWarning�ignore)�category�darwinZOPENSSL_VERSION_NUMBERi)�securetransport)�InstallationError�CommandError�PipError)�get_installed_distributions�get_prog)�deprecation�dist_is_editable)�git�	mercurial�
subversion�bazaar)�ConfigOptionParser�UpdatingDefaultsHelpFormatter)�
get_summaries�get_similar_commands)�
commands_dict)�InsecureRequestWarningz9.0.3csZdtjkrdStjdj�dd�}ttjd�}y||d�Wntk
rZd�YnXdd�t�D��g}y�fd	d�|D�d
}Wntk
r�d}YnXt�}|�r�|dkr�tjd�|dkoԈj	d
��rJg}�j
�}x<tdd�D].}|jj	|�r�|j|dd�kr�|j
|j�q�W|�rJx|D]}t|��q.Wtjd�t|�}|dd�|jjD�7}dd�|d|d�D���fdd�|D�}�fdd�|D�}x�|D](}	|	d
}
|	d�r�|
d7}
t|
��q�Wnp�j	d
��s�j	d��r0dd�|jD�}|j
|j�dd�|D�}�dd�|D�7�tdj�fdd��D���tjd�dS)z�Command and option completion for the main option parser (and options)
    and its subcommands (and options).

    Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
    ZPIP_AUTO_COMPLETENZ
COMP_WORDS�Z
COMP_CWORD�cSsg|]\}}|�qS�r)�.0�cmdZsummaryrr�/usr/lib/python3.6/__init__.py�
<listcomp>Usz autocomplete.<locals>.<listcomp>csg|]}|�kr|�qSrr)r�w)�subcommandsrrrYsr�helpZ	uninstall�-T)Z
local_onlycSs&g|]}|jtjkr|j�|jf�qSr)r"�optparse�
SUPPRESS_HELP�get_opt_string�nargs)r�optrrrrqscSsg|]}|jd�d�qS)�=r)�split)r�xrrrrvscs g|]\}}|�kr||f�qSrr)rr+�v)�	prev_optsrrrwscs"g|]\}}|j��r||f�qSr)�
startswith)r�kr,)�currentrrrysr)z--cSsg|]
}|j�qSr)�option_list)r�irrrr�scss|]}|D]
}|Vq
qdS)Nr)r�it�orrr�	<genexpr>�szautocomplete.<locals>.<genexpr>cSs g|]}|jtjkr|j��qSr)r"r$r%r&)rr2rrrr�s� csg|]}|j��r|�qSr)r.)rr+)r0rrr�s)�os�environr*�int�
IndexErrorr�create_main_parser�sys�exitr.�lowerr�key�append�printr�parserZoption_list_allZ
option_groupsr1�join)ZcwordsZcwordZoptionsZsubcommand_namerBZ	installedZlc�distZ
subcommandZoptionZ	opt_labelZoptsr)r0r-r!r�autocompleteEs\








rEcCs�ddt�dt�d�}tf|�}|j�tjjtjjtjjt���}dt	|t
jdd�f|_tj
tj|�}|j|�d|_t�}dgd	d
�|D�}dj|�|_|S)Nz
%prog <command> [options]F�global)ZusageZadd_help_optionZ	formatter�name�progzpip %s from %s (python %s)�TrcSsg|]\}}d||f�qS)z%-27s %sr)rr2�jrrrr�sz&create_main_parser.<locals>.<listcomp>�
)rrrZdisable_interspersed_argsr7�path�dirname�abspath�__file__�__version__r<�version�
cmdoptionsZmake_option_groupZ
general_groupZadd_option_group�mainrrC�description)Z	parser_kwrBZpip_pkg_dirZgen_optsZcommand_summariesrTrrrr;�s"


r;cCs�t�}|j|�\}}|jr>tjj|j�tjjtj�tj�|s\|ddkrlt	|�dkrl|j
�tj�|d}|tkr�t|�}d|g}|r�|j
d|�tdj|���|dd�}|j|�||fS)Nrr"rzunknown command "%s"zmaybe you meant "%s"z - )r;�
parse_argsrQr<�stdout�writer7�linesepr=�lenZ
print_helprrr@r	rC�remove)�argsrBZgeneral_optionsZ	args_else�cmd_nameZguess�msg�cmd_argsrrr�	parseopts�s&	

r_cCsd}d|krd}|S)NFz
--isolatedTr)r[�isolatedrrr�check_isolated�sracCs�|dkrtjdd�}tj�t�yt|�\}}WnJtk
r~}z.tjjd|�tjjt	j
�tjd�WYdd}~XnXytj
tjd�Wn0tjk
r�}ztjd|�WYdd}~XnXt|t|�d�}|j|�S)Nrz	ERROR: %srz%Ignoring error %s when setting locale)r`)r<�argvr
Zinstall_warning_loggerrEr_r
�stderrrWr7rXr=�locale�	setlocale�LC_ALL�Error�logger�debugrrarS)r[r\r^�exc�eZcommandrrrrS�s rSc@sLeZdZffdd�Zejd�Zejd�Zedd��Z	e
dd��Zd	d
�ZdS)�FrozenRequirementcCs||_||_||_||_dS)N)rG�req�editable�comments)�selfrGrmrnrorrr�__init__�szFrozenRequirement.__init__z-r(\d+)$z-(20\d\d\d\d\d\d)$cCs�tjjtjj|j��}g}ddlm}m}t|�r�|j	|�r�d}y|||�}Wn2t
k
r�}	ztjd|	�d}WYdd}	~	XnX|dkr�tjd|�|j
d�|j�}d}n�d}|j�}|j}
|
dd}|jj|�}|jj|�}
|s�|
�rp|jd	�}|�r|�j||�}|�s*tjd
|�|j
d�nF|j
d|�|�rJ|jd�}nd
|
jd�}d}d|||j|�f}||j|||�S)Nr)�vcs�get_src_requirementTzYError when trying to get requirement for VCS system %s, falling back to uneditable formatz-Could not determine repository location of %sz-## !! Could not determine repository locationFrZsvnz(Warning: cannot find svn location for %szF## FIXME: could not find svn URL in dependency_links for this package:z3# Installing as editable to satisfy requirement %s:z{%s}z%s@%s#egg=%s)r7rL�normcaserN�location�pip.vcsrrrsrZget_backend_namerrhZwarningr@Zas_requirement�specs�_rev_re�search�_date_reZget_backendZget_location�group�egg_nameZproject_name)�clsrDZdependency_linksrurorrrsrnrmrjrwrQZ	ver_matchZ
date_matchZsvn_backendZsvn_locationZrevrrr�	from_dists`

zFrozenRequirement.from_distcCs,|j�}tjd|�}|r(|d|j��}|S)Nz
-py\d\.\d$)r|�rery�start)rDrG�matchrrrr|Is
zFrozenRequirement.egg_namecCs2|j}|jrd|}djt|j�t|�g�dS)Nz-e %srK)rmrnrC�listro�str)rprmrrr�__str__QszFrozenRequirement.__str__N)
�__name__�
__module__�__qualname__rqr�compilerxrz�classmethodr~�staticmethodr|r�rrrrrl�s

Arl�__main__)N)9Z
__future__rrdZloggingr7r$�warningsr<rZpip._vendor.urllib3.exceptionsr�filterwarningsZssl�ImportError�platform�getattrZpip._vendor.urllib3.contribr�OSErrorZinject_into_urllib3Zpip.exceptionsrr	r
Z	pip.utilsrrr
rrvrrrrZpip.baseparserrrZpip.commandsrrrrZpip.cmdoptionsZpiprRrPZ	getLoggerr�rhrEr;r_rarS�objectrlr=rrrr�<module>sR


I*	
[
__pycache__/cmdoptions.cpython-36.opt-1.pyc000064400000031164151733136150014532 0ustar003

�PfZ@�@sndZddlmZddlmZddlmZmZmZddl	Z	ddl
mZmZm
Z
mZddlmZddlmZmZdd	lmZd
d�Zdd
�Zd�dd�Zeedddddd�Zeedddddd�Zeeddddded�Zeeddd d!dd"d�Zeed#d$d%dd&d�Zeed'd(d)d!dd*d�Zeed+d,d-d.d/d0d1�Zeed2d3dded�Z eed4d5d6d7d8d9�Z!eed:d;d<d=d>d9�Z"eed?d@dAdBdCdDdEdF�Z#eedGdHd6d7ed9�Z$eedIdJd6d7ed9�Z%dKdL�Z&eedMdNd6d/dOdP�Z'eedQdRd6dd/dSdT�Z(eedUdVdWdXdYej)dZd[�Z*d\d]�Z+eed^d_ddd`d�Z,dadb�Z-dcdd�Z.eededfdded�Z/dgdh�Z0eedidfdjded�Z1dkdl�Z2eedmdndjded�Z3eedodpdddqd�Z4drds�Z5dtdu�Z6dvdw�Z7eedxdydzd{d|d}ed~d[�	Z8eedd�dd�ed�Z9eed�d�djd�d�d�Z:d�d��Z;d�d��Z<d�d��Z=d�d��Z>d�d��Z?eed�d�ed}d�d��Z@eed�d�djd�d�ZAeed�d�d�ddd�d�ZBeed�d�d�d�d�d}d�d1�ZCeed�d�dd�d�ZDeed�d�d�d�d�d��ZEeed�d�d�d�d�d��ZFeed�ddd�d��ZGeed�ddd�d��ZHeed�d�ddd�d�ZIeed�d�d�ded�ZJd�d��ZKeed�d�d�eKd�d�d��ZLeed�d�ddd�d�ZMd�eeeeeeee e!e"e#e$e%e&e0e'e(e@eAeIgd��ZNd�e*e+e,e-e4gd��ZOd�eOd�e.e/e1e2e3gd��ZPdS)�aD
shared options and groups

The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parses general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.

�)�absolute_import)�partial)�OptionGroup�
SUPPRESS_HELP�OptionN)�
FormatControl�fmt_ctl_handle_mutual_exclude�fmt_ctl_no_binary�fmt_ctl_no_use_wheel)�PyPI)�USER_CACHE_DIR�
src_prefix)�
STRONG_HASHEScCs0t||d�}x|dD]}|j|��qW|S)z�
    Return an OptionGroup object
    group  -- assumed to be dict with 'name' and 'options' keys
    parser -- an optparse Parser
    �name�options)rZ
add_option)�group�parserZoption_group�option�r� /usr/lib/python3.6/cmdoptions.py�make_option_groupsrcCs|js|j}t|�dS)N)�	use_wheel�format_controlr
)r�controlrrr�resolve_wheel_no_use_binary$srcsP�dkr|��fdd�}dddg}tt||��rL|j}t|�tjddd	�dS)
z�Disable wheels if per-setup.py call options are set.

    :param options: The OptionParser options to update.
    :param check_options: The options to check, if not supplied defaults to
        options.
    Ncst�|d�S)N)�getattr)�n)�
check_optionsrr�getname4sz+check_install_build_global.<locals>.getnameZ
build_options�global_options�install_optionszeDisabling all use of wheels due to the use of --build-options / --global-options / --install-options.�)�
stacklevel)�any�maprr	�warnings�warn)rrr�namesrr)rr�check_install_build_global*s
r(z-hz--help�helpz
Show help.)�dest�actionr)z
--isolated�
isolated_mode�
store_trueFzSRun pip in an isolated mode, ignoring environment variables and user configuration.)r*r+�defaultr)z--require-virtualenvz--require-venvZrequire_venvz-vz	--verbose�verbose�countzDGive more output. Option is additive, and can be used up to 3 times.z-Vz	--version�versionzShow version and exit.z-qz--quiet�quietz�Give less output. Option is additive, and can be used up to 3 times (corresponding to WARNING, ERROR, and CRITICAL logging levels).z--logz
--log-filez--local-log�log�pathz Path to a verbose appending log.)r*�metavarr)z
--no-input�no_inputz--proxy�proxy�str�z<Specify a proxy in the form [user:passwd@]proxy.server:port.)r*�typer.r)z	--retries�retries�int�zRMaximum number of retries each connection should attempt (default %default times).z	--timeoutz--default-timeoutZsec�timeout�float�z2Set the socket timeout (default %default seconds).)r5r*r:r.r)z
--default-vcs�default_vcsz--skip-requirements-regex�skip_requirements_regexc
Cs"tddddddddggd	d
dd�S)
Nz--exists-action�
exists_actionZchoice�s�i�w�b�a�appendr+zYDefault action when a path already exists: (s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.)r*r:�choicesr.r+r5r))rrrrrrC�srCz--cert�certzPath to alternate CA bundle.)r*r:r5r)z
--client-cert�client_certzkPath to SSL client certificate, a single file containing the private key and the certificate in PEM format.)r*r:r.r5r)z-iz--index-urlz
--pypi-url�	index_url�URLz�Base URL of Python Package Index (default %default). This should point to a repository compliant with PEP 503 (the simple repository API) or a local directory laid out in the same format.)r*r5r.r)cCstddddgdd�S)Nz--extra-index-urlZextra_index_urlsrNrIzmExtra URLs of package indexes to use in addition to --index-url. Should follow the same rules as --index-url.)r*r5r+r.r))rrrrr�extra_index_url�srOz
--no-index�no_indexzAIgnore package index (only looking at --find-links URLs instead).c	Cstddddgddd�S)Nz-fz--find-links�
find_linksrIZurlz�If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.)r*r+r.r5r))rrrrrrQ�srQcCstdddgdtd�S)Nz--allow-external�allow_externalrI�PACKAGE)r*r+r.r5r))rrrrrrrRsrRz--allow-all-external�allow_all_externalcCstddddgdd�S)Nz--trusted-hostZ
trusted_hostsrIZHOSTNAMEzKMark this host as trusted, even though it does not have valid or any HTTPS.)r*r+r5r.r))rrrrr�trusted_hostsrUz--no-allow-externalZstore_falsec	Cstddddgdtd�S)Nz--allow-unverifiedz--allow-insecureZallow_unverifiedrIrS)r*r+r.r5r))rrrrrr�allow_unsafe3srVz--no-allow-insecureZallow_all_insecurez--process-dependency-links�process_dependency_linksz*Enable the processing of dependency links.c	Cstddddgddd�S)Nz-cz--constraint�constraintsrI�filez\Constrain versions using the given constraints file. This option can be used multiple times.)r*r+r.r5r))rrrrrrXRsrXc	Cstddddgddd�S)Nz-rz
--requirement�requirementsrIrYzQInstall from the given requirements file. This option can be used multiple times.)r*r+r.r5r))rrrrrrZ]srZc	Cstddddgddd�S)Nz-ez
--editableZ	editablesrIzpath/urlzkInstall a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.)r*r+r.r5r))rrrrr�editablehsr[z--srcz--sourcez--source-dirz--source-directoryZsrc_dir�dirz�Directory to check out editable projects into. The default in a virtualenv is "<venv path>/src". The default for global installs is "<current dir>/src".z--use-wheelrTz--no-use-wheelz{Do not Find and prefer wheel archives when searching indexes and find-links locations. DEPRECATED in favour of --no-binary.cCst||j�S)zGet a format_control object.)rr*)�valuesrrrr�_get_format_control�sr^cCs"t|j|j�}t||j|j�dS)N)rr]r*r�	no_binary�only_binary)r�opt_str�valuer�existingrrr�_handle_no_binary�srdcCs"t|j|j�}t||j|j�dS)N)rr]r*rr`r_)rrarbrrcrrr�_handle_only_binary�srec	Cs tdddtdtt�t��dd�S)Nz--no-binaryr�callbackr8aRDo not use binary packages. Can be supplied multiple times, and each time adds to the existing value. Accepts either :all: to disable all binary packages, :none: to empty the set, or one or more package names with commas between them. Note that some packages are tricky to compile and may fail to install when this option is used on them.)r*r+rfr:r.r))rrdr�setrrrrr_�s
r_c	Cs tdddtdtt�t��dd�S)Nz
--only-binaryrrfr8aGDo not use source packages. Can be supplied multiple times, and each time adds to the existing value. Accepts either :all: to disable all source packages, :none: to empty the set, or one or more package names with commas between them. Packages without binary distributions will fail to install when this option is used on them.)r*r+rfr:r.r))rrerrgrrrrr`�s
r`z--cache-dir�	cache_dirzStore the cache data in <dir>.)r*r.r5r)z--no-cache-dirzDisable the cache.z	--no-depsz--no-dependenciesZignore_dependenciesz#Don't install package dependencies.z-bz--buildz--build-dirz--build-directory�	build_dirz/Directory to unpack packages into and build in.z--ignore-requires-python�ignore_requires_pythonz'Ignore the Requires-Python information.z--install-optionr rIra"Extra arguments to be supplied to the setup.py install command (use like --install-option="--install-scripts=/usr/local/bin"). Use multiple --install-option options to pass multiple options to setup.py install. If you are using an option with a directory path, be sure to use absolute path.)r*r+r5r)z--global-optionrzTExtra global options to be supplied to the setup.py call before the install command.z
--no-cleanz!Don't clean up build directories.)r+r.r)z--prezYInclude pre-release and development versions. By default, pip only finds stable versions.z--disable-pip-version-check�disable_pip_version_checkz{Don't periodically check PyPI to determine whether a new version of pip is available for download. Implied with --no-index.z-Zz--always-unzip�always_unzipc
Cs�|jjsi|j_y|jdd�\}}Wn"tk
rF|jd|�YnX|tkrh|jd|djt�f�|jjj|g�j|�dS)zkGiven a value spelled "algo:digest", append the digest to a list
    pointed to in a dict by the algo name.�:�zTArguments to %s must be a hash name followed by a value, like --hash=sha256:abcde...z&Allowed hash algorithms for %s are %s.z, N)	r]�hashes�split�
ValueError�errorr�join�
setdefaultrI)rrarbrZalgoZdigestrrr�_merge_hashsruz--hashrorf�stringzgVerify that the package's archive matches this hash before installing. Example: --hash=sha256:abcdef...)r*r+rfr:r)z--require-hashes�require_hashesz�Require a hash to check each requirement against, for repeatable installs. This option is implied when any package in a requirements file has a --hash option.zGeneral Options)rrzPackage Index Optionsz4Package Index Options (including deprecated options))N)Q�__doc__Z
__future__r�	functoolsrZoptparserrrr%Z	pip.indexrrr	r
Z
pip.modelsrZ
pip.locationsrr
Zpip.utils.hashesrrrr(Zhelp_r,Zrequire_virtualenvr/r1r2r3r6r7r;r>rArBrCrKrLZ
simple_urlrMrOrPrQrRrTrUZno_allow_externalrVZno_allow_unsaferWrXrZr[�srcrZno_use_wheelr^rdrer_r`rhZno_cacheZno_depsrirjr rZno_cleanZprerkrlru�hashrwZ
general_groupZnon_deprecated_index_groupZindex_grouprrrr�<module>	sr







__pycache__/basecommand.cpython-36.pyc000064400000016035151733136150013665 0ustar003

�Pf�.�@s,dZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZddlmZmZmZmZmZdd	lmZdd
lmZmZddlmZmZddlmZm Z m!Z!m"Z"m#Z#dd
l$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+dgZ,ej-e.�Z/Gdd�de0�Z1Gdd�de1�Z2dS)z(Base Command class, and related routines�)�absolute_importN)�
cmdoptions)�
PackageFinder)�running_under_virtualenv)�
PipSession)�
BadCommand�InstallationError�UninstallationError�CommandError�PreviousBuildDirError)�logging_dictConfig)�ConfigOptionParser�UpdatingDefaultsHelpFormatter)�InstallRequirement�parse_requirements)�SUCCESS�ERROR�
UNKNOWN_ERROR�VIRTUALENV_NOT_FOUND�PREVIOUS_BUILD_DIR_ERROR)�deprecation�get_prog�normalize_path)�IndentingFormatter)�pip_version_check�Commandc@s@eZdZdZdZdZd
Zddd�Zddd�Zd	d
�Z	dd�Z
dS)rNF�ext://sys.stdout�ext://sys.stderrcCsr|jdt�|jft�d|j|j|d�}tf|�|_d|jj�}tj	|j|�|_
tjtj
|j�}|jj|�dS)Nz%s %sF)�usage�prog�	formatterZadd_help_option�name�description�isolatedz
%s Options)rrr!r�__doc__r
�parser�
capitalize�optparseZOptionGroupZcmd_optsrZmake_option_groupZ
general_groupZadd_option_group)�selfr#Z	parser_kwZ
optgroup_nameZgen_opts�r)�!/usr/lib/python3.6/basecommand.py�__init__)szCommand.__init__cCs�t|jrttjj|jd��nd|dk	r*|n|j|jd�}|jrF|j|_	|j
rT|j
|_|js^|rr|dk	rj|n|j|_|jr�|j|jd�|_
|j|j_|S)N�http)�cache�retriesZinsecure_hosts)r,Zhttps)r�	cache_dirr�os�path�joinr.�
trusted_hostsZcertZverifyZclient_cert�timeout�proxyZproxies�no_inputZauthZ	prompting)r(�optionsr.r4�sessionr)r)r*�_build_sessionAs

zCommand._build_sessioncCs|jj|�S)N)r%�
parse_args)r(�argsr)r)r*r:eszCommand.parse_argscs�|j|�\}}|jr8|jdkr"d�|jdkr2d�qHd�n|jrDd�nd��}|jrVd}tddd	d
tjd�idtd
d�i�d|jdd	gdd�dd|jddd�dd|jp�dddd�d�|t	t
ddd|jr�dndg��d�t�fdd�d2D��d"��tj
dd�d3k�rtjd$tj�|j�r(d%tjd&<|j�rBd'j|j�tjd(<|j�rft��sftjd)�tjt��z$y"|j||�}t|t��r�|SW�n�t k
�r�}z tjt!|��tj"d*dd+�t#Sd}~Xn�t$t%t&fk
�r}z tjt!|��tj"d*dd+�t'Sd}~Xn~t(k
�rF}ztjd,|�tj"d*dd+�t'Sd}~XnDt)k
�rrtjd-�tj"d*dd+�t'Stjd.dd+�t*SWd|j+�r�t,|d/d��r�|j-|dt.d0|j/�d1��}t0|�WdQRXXt1S)4N��WARNING�rZCRITICAL�DEBUG�INFOFZexclude_warningsz pip.utils.logging.MaxLevelFilter)z()�level�indentz%(message)s)z()�formatz(pip.utils.logging.ColorizedStreamHandlerr)rA�class�stream�filtersr )rArDrEr z+pip.utils.logging.BetterRotatingFileHandlerz	/dev/nullT)rArD�filenameZdelayr )�console�console_errors�user_logrHrIrJ)rA�handlersc3s&|]}|d�dkrdndifVqdS)rAr@rr=r?N)r@rr))�.0r!)rAr)r*�	<genexpr>�s
zCommand.main.<locals>.<genexpr>�pip._vendor�distlib�requests�urllib3)�versionZdisable_existing_loggersrFZ
formattersrK�rootZloggers�z�Python 2.6 is no longer supported by the Python core team, please upgrade your Python. A future version of pip will drop support for Python 2.6�1ZPIP_NO_INPUT� ZPIP_EXISTS_ACTIONz2Could not find an activated virtualenv (required).zException information:)�exc_infoz	ERROR: %szOperation cancelled by userz
Exception:�no_index�)r.r4)rNrOrPrQ)r>rT)2r:�quiet�verbose�logr�loggingr=r�log_streams�list�filter�dict�sys�version_info�warnings�warnrZPython26DeprecationWarningr6r0�environZ
exists_actionr2Zrequire_venvr�loggerZcritical�exitrZrun�
isinstance�intr�str�debugrrr	rrr
�KeyboardInterruptrZdisable_pip_version_check�getattrr9�minr4rr)r(r;r7Z
root_levelZstatus�excr8r))rAr*�mainis�










zCommand.main)rr)F)NN)�__name__�
__module__�__qualname__r!rZhiddenr^r+r9r:rqr)r)r)r*r#s

$c@s"eZdZedd��Zddd�ZdS)�RequirementCommandc	Cs"x6|jD],}x&t|d||||d�D]}|j|�q"WqWx&|D]}|jtj|d|j|d��q>Wx*|jD] }|jtj||j|j|d��qhWd}	x8|j	D].}x(t|||||d�D]}d}	|j|�q�Wq�W|j
|_
|p�|jp�|	�sd|i}
|j�rd	t|
d
j
|j�d�}nd|
}tj|�dS)
z?
        Marshal cmd line args into a requirement set.
        T)Z
constraint�finderr7r8�wheel_cacheN)r#rw)�default_vcsr#rwF)rvr7r8rwr!z^You must give at least one requirement to %(name)s (maybe you meant "pip %(name)s %(links)s"?)rV)ZlinkszLYou must give at least one requirement to %(name)s (see "pip help %(name)s"))ZconstraintsrZadd_requirementrZ	from_lineZ
isolated_modeZ	editablesZ
from_editablerxZrequirementsZrequire_hashes�
find_linksrar2rgZwarning)Zrequirement_setr;r7rvr8r!rwrGZreqZfound_req_in_fileZopts�msgr)r)r*�populate_requirement_setsF
z+RequirementCommand.populate_requirement_setNc
CsR|jg|j}|jr*tjddj|��g}t|j|j||j	|j
|j|||||d�S)zR
        Create a package finder appropriate to this requirement command.
        zIgnoring indexes: %s�,)ry�format_control�
index_urlsr3Zallow_all_prereleases�process_dependency_linksr8�platformZversions�abi�implementation)Z	index_urlZextra_index_urlsrXrgrlr2rryr}r3Zprer)r(r7r8r�Zpython_versionsr�r�r~r)r)r*�_build_package_finder:s z(RequirementCommand._build_package_finder)NNNN)rrrsrt�staticmethodr{r�r)r)r)r*rus8ru)3r$Z
__future__rr]r0rbr'rdZpiprZ	pip.indexrZ
pip.locationsrZpip.downloadrZpip.exceptionsrrr	r
rZ
pip.compatrZpip.baseparserr
rZpip.reqrrZpip.status_codesrrrrrZ	pip.utilsrrrZpip.utils.loggingrZpip.utils.outdatedr�__all__Z	getLoggerrrrg�objectrrur)r)r)r*�<module>s.
___pycache__/wheel.cpython-36.opt-1.pyc000064400000052064151733136150013461 0ustar003

�Pf~�@s$dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z	ddlZddlZddl
Z
ddlZddlZddlZddlmZddlmZddlmZddlZddlmZddlmZmZdd	lmZmZm Z dd
l!m"Z"m#Z#ddlm$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*dd
l+m,Z,ddl-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6ddl7m8Z8dZ9d9Z:ej;e<�Z=Gdd�de>�Z?dd�Z@dd�ZAd;dd�ZBd d!�ZCd"d#�ZDejEd$ejF�ZGd%d&�ZHd'd(�ZId<d+d,�ZJd-d.�ZKeKd/d0��ZLd1d2�ZMd3d4�ZNGd5d6�d6e>�ZOGd7d8�d8e>�ZPdS)=zH
Support for installing and building the "wheel" binary package format.
�)�absolute_importN)�urlsafe_b64encode)�Parser)�StringIO)�
expanduser)�path_to_url�
unpack_url)�InstallationError�InvalidWheelFilename�UnsupportedWheel)�distutils_scheme�PIP_DELETE_MARKER_FILENAME)�
pep425tags)�call_subprocess�
ensure_dir�captured_stdout�rmtree�read_chunks)�open_spinner)�
indent_log)�SETUPTOOLS_SHIM)�ScriptMaker)�
pkg_resources)�canonicalize_name)�configparserz.whl�c@s eZdZdZdd�Zdd�ZdS)�
WheelCachez&A cache of wheels for future installs.cCs|rt|�nd|_||_dS)z�Create a wheel cache.

        :param cache_dir: The root of the cache.
        :param format_control: A pip.index.FormatControl object to limit
            binaries being read from the cache.
        N)r�
_cache_dir�_format_control)�self�	cache_dir�format_control�r"�/usr/lib/python3.6/wheel.py�__init__8szWheelCache.__init__cCst|j||j|�S)N)�cached_wheelrr)r�link�package_namer"r"r#r%BszWheelCache.cached_wheelN)�__name__�
__module__�__qualname__�__doc__r$r%r"r"r"r#r5s
rcCs�|jg}|jdk	r4|jdk	r4|jdj|j|jg��dj|�}tj|j��j�}|dd�|dd�|dd�|dd�g}t	j
j|df|��S)a�
    Return a directory to store cached wheels in for link.

    Because there are M wheels for any one sdist, we provide a directory
    to cache them in, and then consult that directory when looking up
    cache hits.

    We only insert things into the cache if they have plausible version
    numbers, so that we don't contaminate the cache with things that were not
    unique. E.g. ./package might have dozens of installs done for it and build
    a version of 0.0...and if we built and cached a wheel, we'd end up using
    the same wheel even if the source has been edited.

    :param cache_dir: The cache_dir being used by pip.
    :param link: The link of the sdist for which this will cache wheels.
    N�=�#���Zwheels)Zurl_without_fragmentZ	hash_name�hash�append�join�hashlibZsha224�encodeZ	hexdigest�os�path)r r&Z	key_partsZkey_urlZhashed�partsr"r"r#�_cache_for_linkGs
,r9c
Cs,|s|S|s|S|jr|S|js$|S|s,|St|�}tjj||�}d|krN|St||�}ytj|�}Wn:t	k
r�}z|j
t
jt
jfkr�|S�WYdd}~XnXg}	xL|D]D}
yt
|
�}Wntk
r�w�YnX|j�s�q�|	j|j�|
f�q�W|	�s�|S|	j�tjj||	dd�}tjjt|��S)N�binaryrr)�is_wheel�is_artifactr�pip�index�fmt_ctl_formatsr9r6�listdir�OSError�errno�ENOENT�ENOTDIR�Wheelr
�	supportedr2�support_index_min�sortr7r3�Linkr)
r r&r!r'Zcanonical_nameZformats�rootZwheel_names�eZ
candidates�
wheel_name�wheelr7r"r"r#r%psF

r%�sha256�cCsttj|�}d}t|d��2}x*t||d�D]}|t|�7}|j|�q(WWdQRXdt|j��jd�j	d�}||fS)z6Return (hash, length) for path using hashlib.new(algo)r�rb)�sizeNzsha256=�latin1r,)
r4�new�openr�len�updater�digest�decode�rstrip)r7ZalgoZ	blocksize�hZlength�f�blockrWr"r"r#�rehash�s

r]cCs6tjddkri}d}nddi}d}t|||f|�S)Nr��b�newline�)�sys�version_inforT)�name�mode�nl�binr"r"r#�open_for_csv�srhcCs�tjj|�r�t|d��H}|j�}|jd�s.dStjjtj	��}d|tj
jd�}|j�}WdQRXt|d��}|j|�|j|�WdQRXdSdS)	zLReplace #!python with #!/path/to/python
    Return True if file was changed.rPs#!pythonFs#!�asciiN�wbT)
r6r7�isfilerT�readline�
startswithrb�
executabler5�getfilesystemencoding�linesep�read�write)r7Zscript�	firstlineZexename�restr"r"r#�
fix_script�s

ruzZ^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
                                \.dist-info$cCs�|jdd�}xttj|�D]f}tj|�}|r|jd�|krttjj||d���,}x$|D]}|j	�j
�}|dkrTdSqTWWdQRXqWdS)	zP
    Return True if the extracted wheel in wheeldir should go into purelib.
    �-�_rd�WHEELzroot-is-purelib: trueTNF)�replacer6r@�dist_info_re�match�grouprTr7r3�lowerrY)rd�wheeldirZname_folded�itemr{rM�liner"r"r#�root_is_purelib�s

r�c
Cs�tjj|�siifSt|��<}t�}x$|D]}|j|j��|jd�q*W|jd�WdQRXtj	�}dd�|_
|j|�i}i}|jd�r�t
|jd��}|jd�r�t
|jd��}||fS)N�
rcSs|S)Nr")Zoptionr"r"r#�<lambda>�sz!get_entrypoints.<locals>.<lambda>Zconsole_scriptsZgui_scripts)r6r7�existsrTrrr�strip�seekrZRawConfigParserZoptionxformZreadfpZhas_section�dict�items)�filename�fp�datar�Zcp�console�guir"r"r#�get_entrypoints�s$





r�FTc+)s|st||||||	d�}t|��r,|d�n|d�g�g��jtjj�tjj}i�t��g}|r�t��4}
tj	�� tj
d�tj|ddd�WdQRXWdQRXt
j|
j��dd	��d2�����fdd�	�	d3���	�
fd
d�	}||�d�tjj�dd�}t|�\����fdd�}xv�D]n}d}d}x^tjtjj�|��D]F}d}|dk�r^t}|}tjj�||�}||}|||d
||d��qDW�q"Wtd|d��d�_td4��_d�_��
fdd�}|�_d�_�jdd�}|�r�dtjk�rd|}|j�j|��tjjdd�dk�rBdtj dd�|f}|j�j|��dtj dd�|f}|j�j|��d d!��D�}x|D]}�|=�q|W�jd"d�}|�rdtjk�r�d#|}|j�j|��d$tj dd�|f}|j�j|��d%d!��D�}x|D]}�|=�q�Wt!��dk�r8|j�j"d&d!��j#�D���t!��dk�rj|j�j"d'd!��j#�D�d(di��tjj�dd)�}tjj�dd*�}t$|d+��}|j%d,�WdQRXt&j'||�|j(|�tjj�dd-�} tjj�dd.�}!t)| d/���}"t)|!d0���}#t*j+|"�}$t*j,|#�}%xV|$D]N}&�j|&d|&d�|&d<|&d�k�r^t-|&d�\|&d<|&d1<|%j.|&��qWx`|D]X}'t-|'�\}(})�|'��}*|
�r�|*j/|
��r�tjjtjtjj0|*|
��}*|%j.|*|(|)f��qtWx"�D]}'|%j.�|'ddf��q�WWdQRXWdQRXt&j'|!| �dS)5zInstall a wheel)�user�homerJ�isolated�prefix�purelib�platlib�ignoreT)�force�quietNcSstjj||�jtjjd�S)N�/)r6r7�relpathry�sep)�src�pr"r"r#�normpathsz"move_wheel_files.<locals>.normpathFcs.�|��}�|��}|�|<|r*�j|�dS)z6Map archive RECORD paths to installation RECORD paths.N)�add)�srcfile�destfileZmodifiedZoldpath�newpath)�changed�	installed�lib_dirr�r~r"r#�record_installeds


z*move_wheel_files.<locals>.record_installedcs�t|��x�tj|�D�]�\}}}|t|�d�jtjj�}tjj||�}	|rj|jtjjd�dj	d�rjqxl|D]d}
tjj|||
�}|r�|dkr�|j	d�r��j
|
�qpqp|rp|
j	d�rpt|
�jt�j
��rp�j
|�qpWx�|D]�}|r�||�r�q�tjj||�}
tjj|||�}t|	�tj|
|�tj|
�}ttd��rLtj||j|jf�tj|
tj��r�tj|
�}|jtjBtjBtjB}tj||�d}|�r�||�}�|
||�q�WqWdS)Nrrz.dataraz
.dist-info�utimeF)rr6�walkrU�lstripr7r�r3�split�endswithr2rrmrd�shutilZcopyfile�stat�hasattrr��st_atime�st_mtime�access�X_OK�st_mode�S_IXUSR�S_IXGRP�S_IXOTH�chmod)�source�destZis_base�fixer�filter�dirZsubdirs�filesZbasedirZdestdir�sZ
destsubdirr[r�r��stZpermissionsr�)�	data_dirs�info_dirr��reqr"r#�clobbersD





z!move_wheel_files.<locals>.clobberrzentry_points.txtcsh|j�jd�r|dd�}n<|j�jd�r8|dd�}n |j�jd�rT|dd�}n|}|�kpf|�kS)	Nz.exer/z
-script.py�
z.pya���i����r�)r}r�)rdZ	matchname)r�r�r"r#�is_entrypoint_wrapperasz/move_wheel_files.<locals>.is_entrypoint_wrapper�scripts)r�r�racs<|jdkrtd|�f���j|j|jjd�d|jd�S)Nz�Invalid script entry point: %s for req: %s - A callable suffix is required. Cf https://packaging.python.org/en/latest/distributing.html#console-scripts for more information.�.r)�moduleZimport_name�func)�suffixr	�script_templater�r�)�entry)�makerr�r"r#�_get_script_text�s
z*move_wheel_files.<locals>._get_script_textz�# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
r=ZENSUREPIP_OPTIONSzpip = Z
altinstallz
pip%s = %srr^cSsg|]}tjd|�r|�qS)zpip(\d(\.\d)?)?$)�rer{)�.0�kr"r"r#�
<listcomp>�sz$move_wheel_files.<locals>.<listcomp>Zeasy_installzeasy_install = zeasy_install-%s = %scSsg|]}tjd|�r|�qS)zeasy_install(-\d\.\d)?$)r�r{)r�r�r"r"r#r��scSsg|]}d|�qS)z%s = %sr")r��kvr"r"r#r��scSsg|]}d|�qS)z%s = %sr")r�r�r"r"r#r��sr�Z	INSTALLERz
INSTALLER.piprjspip
�RECORDz
RECORD.pip�rzw+r.)F)NN)ra)1rr�rYr6r7r��setr�warnings�catch_warnings�filterwarnings�
compileall�compile_dir�logger�debug�getvaluer3r�r@rurr�ZvariantsZset_moder�r��pop�environ�extendZmake�getrb�versionrUZ
make_multipler�rTrrr��mover2rh�csv�reader�writerr]Zwriterowrmr�)+rdr�r~r�r�rJZ	pycompile�schemer�r�Zstrip_file_prefixr�Z	generated�stdoutr�Zep_filer�Zdatadirr�r�Zsubdirr�r�Z
pip_script�specZpip_epr�Zeasy_install_scriptZeasy_install_epZ	installerZtemp_installerZinstaller_file�recordZtemp_recordZ	record_inZ
record_outr�r��rowr[rZ�lZ
final_pathr")r�r�r�r�r�r�r�r�r�r�r�r~r#�move_wheel_files�s�




$;



#









.r�cstj���fdd��}|S)Nc?s6t�}x*�||�D]}||kr|j|�|VqWdS)N)r�r�)�args�kw�seenr)�fnr"r#�uniques

z_unique.<locals>.unique)�	functools�wraps)r�r�r")r�r#�_uniquesr�ccs�ddlm}tj||jd���}xd|D]\}tjj|j|d�}|V|j	d�r&tjj
|�\}}|dd�}tjj||d�}|Vq&WdS)	a
    Yield all the uninstallation paths for dist based on RECORD-without-.pyc

    Yield paths to all the files in RECORD. For each .py file in RECORD, add
    the .pyc in the same directory.

    UninstallPathSet.add() takes care of the __pycache__ .pyc.
    r)�FakeFiler�z.pyNr^z.pyc���)�	pip.utilsr�r�r�Zget_metadata_linesr6r7r3�locationr�r�)�distr�r�r�r7Zdnr��baser"r"r#�uninstallation_paths"s


r�cCsdyTdd�tjd|�D�d}|jd�}t�j|�}|dj�}ttt|j	d���}|SdSdS)	z�
    Return the Wheel-Version of an extracted wheel, if possible.

    Otherwise, return False if we couldn't parse / extract it.
    cSsg|]}|�qSr"r")r��dr"r"r#r�?sz!wheel_version.<locals>.<listcomp>Nrrxz
Wheel-Versionr�F)
rZfind_on_pathZget_metadatarZparsestrr��tuple�map�intr�)�
source_dirr�Z
wheel_datar�r"r"r#�
wheel_version8s
rcCsb|std|��|dtdkr>td|djtt|��f��n |tkr^tjddjtt|���dS)a�
    Raises errors or warns if called with an incompatible Wheel-Version.

    Pip should refuse to install a Wheel-Version that's a major series
    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
    installing a version only minor version ahead (e.g 1.2 > 1.1).

    version: a 2-tuple representing a Wheel-Version (Major, Minor)
    name: name of wheel or package to raise exception about

    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
    z(%s is in an unsupported or invalid wheelrzB%s's Wheel-Version (%s) is not compatible with this version of pipr�z*Installing from a newer Wheel-Version (%s)N)r�VERSION_COMPATIBLEr3r�strr��warning)r�rdr"r"r#�check_compatibilityKs

rc@s:eZdZdZejdej�Zdd�Zd
dd�Z	ddd	�Z
dS)rEzA wheel filez�^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
        ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
        \.whl|\.dist-info)$cs��jj|�}|std|��|�_|jd�jdd��_|jd�jdd��_|jd�jd��_	|jd�jd��_
|jd	�jd��_t�fd
d��j	D���_
dS)
zX
        :raises InvalidWheelFilename: when the filename is invalid for a wheel
        z!%s is not a valid wheel filename.rdrwrvZverZpyverr��abiZplatc3s0|](}�jD]}�jD]}|||fVqqqdS)N)�abis�plats)r��x�y�z)rr"r#�	<genexpr>�sz!Wheel.__init__.<locals>.<genexpr>N)�
wheel_file_rer{r
r�r|ryrdr�r�Z
pyversionsr	r
r��	file_tags)rr�Z
wheel_infor")rr#r$ts
zWheel.__init__Ncs2�dkrtj��fdd�|jD�}|r.t|�SdS)a"
        Return the lowest index that one of the wheel's file_tag combinations
        achieves in the supported_tags list e.g. if there are 8 supported tags,
        and one of the file tags is first in the list, then return 0.  Returns
        None is the wheel is not supported.
        Ncsg|]}|�kr�j|��qSr")r>)r��c)�tagsr"r#r��sz+Wheel.support_index_min.<locals>.<listcomp>)r�supported_tagsr�min)rrZindexesr")rr#rG�szWheel.support_index_mincCs"|dkrtj}tt|�j|j��S)z'Is this wheel supported on this system?N)rr�boolr��intersectionr)rrr"r"r#rF�szWheel.supported)N)N)r(r)r*r+r��compile�VERBOSErr$rGrFr"r"r"r#rEhs
rEc@sHeZdZdZddd�Zddd�Zdd�Zdd	d
�Zdd�Zddd�Z	dS)�WheelBuilderz#Build wheels from a RequirementSet.NcCs6||_||_|jj|_|j|_|p$g|_|p.g|_dS)N)	�requirement_set�finderZ_wheel_cacher�_cache_rootZwheel_download_dir�
_wheel_dir�
build_options�global_options)rrrrrr"r"r#r$�s

zWheelBuilder.__init__cCs�tjd�}zn|j|||d�rlyBtj|�d}tjj||�}tjtjj||�|�t	j
d|�|SYnX|j|�dSt|�XdS)ziBuild one wheel.

        :return: The filename of the built wheel, or None if the build failed.
        z
pip-wheel-)�
python_tagrzStored in directory: %sN)
�tempfileZmkdtemp�_WheelBuilder__build_oner6r@r7r3r�r�r��info�
_clean_oner)rr��
output_dirr �tempdrLZ
wheel_pathr"r"r#�
_build_one�s

zWheelBuilder._build_onecCstjddt|jgt|j�S)Nz-uz-c)rbrnrZsetup_py�listr)rr�r"r"r#�_base_setup_args�s
zWheelBuilder._base_setup_argscCs�|j|�}d|jf}t|��t}tjd|�|dd|g|j}|dk	rT|d|g7}yt||jd|d�dS|jd	�tj	d
|j�dSWdQRXdS)Nz#Running setup.py bdist_wheel for %szDestination directory: %sZbdist_wheelz-dz--python-tagF)�cwd�show_stdout�spinnerT�errorzFailed building wheel for %s)
r)rdrr�r�rrZsetup_py_dirZfinishr-)rr�r&r �	base_argsZspin_messager,Z
wheel_argsr"r"r#Z__build_one�s



zWheelBuilder.__build_onecCsV|j|�}tjd|j�|ddg}yt||jdd�dStjd|j�dSdS)NzRunning setup.py clean for %sZcleanz--allF)r*r+Tz Failed cleaning build dir for %s)r)r�r#rdrrr-)rr�r.Z
clean_argsr"r"r#r$�s
zWheelBuilder._clean_oneFcCs�|jj|j�|jjj�}g}x�|D]�}|jr0q$|jrJ|s�tjd|j	�q$|rV|j
rVq$|rl|jrl|jjrlq$|rz|j
rzq$|r�|j}|j�\}}tjj|d|�dkr�q$dtjj|jjt|j	��kr�tjd|j	�q$|j|�q$W|s�dStjddjdd	�|D���t���<gg}}	�x(|D�]}d}
|�r�tj}
t|j|j�}yt|�WnBtk
�r�}z$tjd
|j	|�|	j|��w WYdd}~XnXn|j}|j |||
d�}
|
�r4|j|�|�r>|j
�r�t!j"j#t!j"j|j
t$���r�t%d��|j&�|j'|jj(�|_
tjj)t*|
��|_t+|j|j
dd
|jj,d�n
|	j|��q WWdQRX|�rptjddjdd	�|D���|	�r�tjddjdd	�|	D���t-|	�dkS)z�Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        z(Skipping %s, due to already being wheel.Nr:zCSkipping bdist_wheel for %s, due to binaries being disabled for it.Tz*Building wheels for collected packages: %sz, cSsg|]
}|j�qSr")rd)r�r�r"r"r#r�sz&WheelBuilder.build.<locals>.<listcomp>z Building wheel for %s failed: %s)r zbad source dir - missing markerF)�sessionzSuccessfully built %s� cSsg|]
}|j�qSr")rd)r�r�r"r"r#r�QszFailed to build %scSsg|]
}|j�qSr")rd)r�r�r"r"r#r�Vsr).rZ
prepare_filesrZrequirements�valuesZ
constraintr;r�r#rdZeditabler&r<r�splitextr=r>Zegg_info_matchesr?r!rr2r3rrZimplementation_tagr9rrrArrr'r6r7r�r
�AssertionErrorZremove_temporary_sourceZbuild_locationZ	build_dirrIrrr/rU)rZautobuildingZreqsetZbuildsetr�r&r�ZextZ
build_successZ
build_failurer r%rKZ
wheel_filer"r"r#�build�s�	






zWheelBuilder.build)NN)N)N)F)
r(r)r*r+r$r'r)r"r$r4r"r"r"r#r�s


r)rr�)rNr5)FNNTNFNN)Qr+Z
__future__rr�r�rBr�r4Zloggingr6Zos.pathr�r�r�rbr!r��base64rZemail.parserrZpip._vendor.sixrr=Z
pip.compatrZpip.downloadrrZpip.exceptionsr	r
rZ
pip.locationsrr
rr�rrrrrZpip.utils.uirZpip.utils.loggingrZpip.utils.setuptools_buildrZpip._vendor.distlib.scriptsrZpip._vendorrZpip._vendor.packaging.utilsrZpip._vendor.six.movesrZ	wheel_extrZ	getLoggerr(r��objectrr9r%r]rhrurrrzr�r�r�r�r�rrrErr"r"r"r#�<module>sn
)'



'7__pycache__/__main__.cpython-36.opt-1.pyc000064400000000551151733136150014067 0ustar003

�PfH�@shddlmZddlZddlZedkrFejjejje��Zejjde�ddl	Z	e
dkrdeje	j��dS)�)�absolute_importN��__main__)
Z
__future__r�os�sys�__package__�path�dirname�__file__�insertZpip�__name__�exit�main�rr�/usr/lib/python3.6/__main__.py�<module>s__pycache__/basecommand.cpython-36.opt-1.pyc000064400000016035151733136150014624 0ustar003

�Pf�.�@s,dZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZddlmZmZmZmZmZdd	lmZdd
lmZmZddlmZmZddlmZm Z m!Z!m"Z"m#Z#dd
l$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+dgZ,ej-e.�Z/Gdd�de0�Z1Gdd�de1�Z2dS)z(Base Command class, and related routines�)�absolute_importN)�
cmdoptions)�
PackageFinder)�running_under_virtualenv)�
PipSession)�
BadCommand�InstallationError�UninstallationError�CommandError�PreviousBuildDirError)�logging_dictConfig)�ConfigOptionParser�UpdatingDefaultsHelpFormatter)�InstallRequirement�parse_requirements)�SUCCESS�ERROR�
UNKNOWN_ERROR�VIRTUALENV_NOT_FOUND�PREVIOUS_BUILD_DIR_ERROR)�deprecation�get_prog�normalize_path)�IndentingFormatter)�pip_version_check�Commandc@s@eZdZdZdZdZd
Zddd�Zddd�Zd	d
�Z	dd�Z
dS)rNF�ext://sys.stdout�ext://sys.stderrcCsr|jdt�|jft�d|j|j|d�}tf|�|_d|jj�}tj	|j|�|_
tjtj
|j�}|jj|�dS)Nz%s %sF)�usage�prog�	formatterZadd_help_option�name�description�isolatedz
%s Options)rrr!r�__doc__r
�parser�
capitalize�optparseZOptionGroupZcmd_optsrZmake_option_groupZ
general_groupZadd_option_group)�selfr#Z	parser_kwZ
optgroup_nameZgen_opts�r)�!/usr/lib/python3.6/basecommand.py�__init__)szCommand.__init__cCs�t|jrttjj|jd��nd|dk	r*|n|j|jd�}|jrF|j|_	|j
rT|j
|_|js^|rr|dk	rj|n|j|_|jr�|j|jd�|_
|j|j_|S)N�http)�cache�retriesZinsecure_hosts)r,Zhttps)r�	cache_dirr�os�path�joinr.�
trusted_hostsZcertZverifyZclient_cert�timeout�proxyZproxies�no_inputZauthZ	prompting)r(�optionsr.r4�sessionr)r)r*�_build_sessionAs

zCommand._build_sessioncCs|jj|�S)N)r%�
parse_args)r(�argsr)r)r*r:eszCommand.parse_argscs�|j|�\}}|jr8|jdkr"d�|jdkr2d�qHd�n|jrDd�nd��}|jrVd}tddd	d
tjd�idtd
d�i�d|jdd	gdd�dd|jddd�dd|jp�dddd�d�|t	t
ddd|jr�dndg��d�t�fdd�d2D��d"��tj
dd�d3k�rtjd$tj�|j�r(d%tjd&<|j�rBd'j|j�tjd(<|j�rft��sftjd)�tjt��z$y"|j||�}t|t��r�|SW�n�t k
�r�}z tjt!|��tj"d*dd+�t#Sd}~Xn�t$t%t&fk
�r}z tjt!|��tj"d*dd+�t'Sd}~Xn~t(k
�rF}ztjd,|�tj"d*dd+�t'Sd}~XnDt)k
�rrtjd-�tj"d*dd+�t'Stjd.dd+�t*SWd|j+�r�t,|d/d��r�|j-|dt.d0|j/�d1��}t0|�WdQRXXt1S)4N��WARNING�rZCRITICAL�DEBUG�INFOFZexclude_warningsz pip.utils.logging.MaxLevelFilter)z()�level�indentz%(message)s)z()�formatz(pip.utils.logging.ColorizedStreamHandlerr)rA�class�stream�filtersr )rArDrEr z+pip.utils.logging.BetterRotatingFileHandlerz	/dev/nullT)rArD�filenameZdelayr )�console�console_errors�user_logrHrIrJ)rA�handlersc3s&|]}|d�dkrdndifVqdS)rAr@rr=r?N)r@rr))�.0r!)rAr)r*�	<genexpr>�s
zCommand.main.<locals>.<genexpr>�pip._vendor�distlib�requests�urllib3)�versionZdisable_existing_loggersrFZ
formattersrK�rootZloggers�z�Python 2.6 is no longer supported by the Python core team, please upgrade your Python. A future version of pip will drop support for Python 2.6�1ZPIP_NO_INPUT� ZPIP_EXISTS_ACTIONz2Could not find an activated virtualenv (required).zException information:)�exc_infoz	ERROR: %szOperation cancelled by userz
Exception:�no_index�)r.r4)rNrOrPrQ)r>rT)2r:�quiet�verbose�logr�loggingr=r�log_streams�list�filter�dict�sys�version_info�warnings�warnrZPython26DeprecationWarningr6r0�environZ
exists_actionr2Zrequire_venvr�loggerZcritical�exitrZrun�
isinstance�intr�str�debugrrr	rrr
�KeyboardInterruptrZdisable_pip_version_check�getattrr9�minr4rr)r(r;r7Z
root_levelZstatus�excr8r))rAr*�mainis�










zCommand.main)rr)F)NN)�__name__�
__module__�__qualname__r!rZhiddenr^r+r9r:rqr)r)r)r*r#s

$c@s"eZdZedd��Zddd�ZdS)�RequirementCommandc	Cs"x6|jD],}x&t|d||||d�D]}|j|�q"WqWx&|D]}|jtj|d|j|d��q>Wx*|jD] }|jtj||j|j|d��qhWd}	x8|j	D].}x(t|||||d�D]}d}	|j|�q�Wq�W|j
|_
|p�|jp�|	�sd|i}
|j�rd	t|
d
j
|j�d�}nd|
}tj|�dS)
z?
        Marshal cmd line args into a requirement set.
        T)Z
constraint�finderr7r8�wheel_cacheN)r#rw)�default_vcsr#rwF)rvr7r8rwr!z^You must give at least one requirement to %(name)s (maybe you meant "pip %(name)s %(links)s"?)rV)ZlinkszLYou must give at least one requirement to %(name)s (see "pip help %(name)s"))ZconstraintsrZadd_requirementrZ	from_lineZ
isolated_modeZ	editablesZ
from_editablerxZrequirementsZrequire_hashes�
find_linksrar2rgZwarning)Zrequirement_setr;r7rvr8r!rwrGZreqZfound_req_in_fileZopts�msgr)r)r*�populate_requirement_setsF
z+RequirementCommand.populate_requirement_setNc
CsR|jg|j}|jr*tjddj|��g}t|j|j||j	|j
|j|||||d�S)zR
        Create a package finder appropriate to this requirement command.
        zIgnoring indexes: %s�,)ry�format_control�
index_urlsr3Zallow_all_prereleases�process_dependency_linksr8�platformZversions�abi�implementation)Z	index_urlZextra_index_urlsrXrgrlr2rryr}r3Zprer)r(r7r8r�Zpython_versionsr�r�r~r)r)r*�_build_package_finder:s z(RequirementCommand._build_package_finder)NNNN)rrrsrt�staticmethodr{r�r)r)r)r*rus8ru)3r$Z
__future__rr]r0rbr'rdZpiprZ	pip.indexrZ
pip.locationsrZpip.downloadrZpip.exceptionsrrr	r
rZ
pip.compatrZpip.baseparserr
rZpip.reqrrZpip.status_codesrrrrrZ	pip.utilsrrrZpip.utils.loggingrZpip.utils.outdatedr�__all__Z	getLoggerrrrg�objectrrur)r)r)r*�<module>s.
___pycache__/pep425tags.cpython-36.opt-1.pyc000064400000016433151733136150014253 0ustar003

�Pf�*�
@sdZddlmZddlZddlZddlZddlZddlZyddlZWne	k
rbddl
jZYnXddlZddl
mZddlZeje�Zejd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Zd!dd�Zdd�Zdd�Zdd�Zdd�Zdd�Z d"dd�Z!e!�Z"e!dd �Z#e�Z$dS)#z2Generate and work with PEP 425 Compatibility Tags.�)�absolute_importN)�OrderedDictz(.+)_(\d+)_(\d+)_(.+)cCsBy
tj|�Stk
r<}ztjdj|�t�dSd}~XnXdS)Nz{0})�	sysconfig�get_config_var�IOError�warnings�warn�format�RuntimeWarning)�var�e�r
� /usr/lib/python3.6/pep425tags.pyrs

rcCs:ttd�rd}n&tjjd�r"d}ntjdkr2d}nd}|S)z'Return abbreviated implementation name.�pypy_version_info�pp�javaZjyZcliZip�cp)�hasattr�sys�platform�
startswith)Zpyimplr
r
r�
get_abbr_impl!s

rcCs.td�}|st�dkr*djttt���}|S)zReturn implementation version.�py_version_nodotr�)rr�join�map�str�get_impl_version_info)Zimpl_verr
r
r�get_impl_ver.srcCs:t�dkr"tjdtjjtjjfStjdtjdfSdS)zQReturn sys.version_info-like tuple for use in decrementing the minor
    version.rr�N)rr�version_infor�major�minorr
r
r
rr6s

rcCsdjt�t��S)z;
    Returns the Tag for this specific implementation.
    z{0}{1})r	rrr
r
r
r�get_impl_tagAsr#TcCs.t|�}|dkr&|r tjd|�|�S||kS)zgUse a fallback method for determining SOABI flags if the needed config
    var is unset or unavailable.Nz>Config variable '%s' is unset, Python ABI tag may be incorrect)r�logger�debug)rZfallback�expectedr�valr
r
r�get_flagHsr(cs�td�}t��|r��dkr�ttd�r�d}d}d}tddd��dkd	�rLd
}td�fdd��dkd	�rjd
}tddd�d�dko�tjdkd�r�tjdkr�d}d�t�|||f}n@|r�|jd�r�d|jd�d}n|r�|j	dd�j	dd�}nd}|S)zXReturn the ABI tag based on SOABI (if available) or emulate SOABI
    (CPython 2, PyPy).�SOABIrr�
maxunicoder�Py_DEBUGcSs
ttd�S)N�gettotalrefcount)rrr
r
r
r�<lambda>^szget_abi_tag.<locals>.<lambda>)r�d�
WITH_PYMALLOCcs�dkS)Nrr
r
)�implr
rr-bs�mZPy_UNICODE_SIZEcSs
tjdkS)Ni��)rr*r
r
r
rr-fs��)r&r�uz
%s%s%s%s%szcpython-�-r�.�_N)rr)r3r3)r3r3)
rrrrr(r rr�split�replace)Zsoabir.r1r4�abir
)r0r�get_abi_tagTs8

r;cCs
tjdkS)Ni���)r�maxsizer
r
r
r�_is_running_32bitvsr=cCs�tjdkr^tj�\}}}|jd�}|dkr6t�r6d}n|dkrHt�rHd}dj|d|d	|�Stjj�j	dd
�j	dd
�}|dkr�t�r�d
}|S)z0Return our platform name 'win32', 'linux_x86_64'�darwinr6�x86_64�i386�ppc64�ppczmacosx_{0}_{1}_{2}rrr7r5�linux_x86_64�
linux_i686)
rrZmac_verr8r=r	�	distutils�util�get_platformr9)�releaser7�machineZ	split_ver�resultr
r
rrGzs

rGcCsJt�dkrdSyddl}t|j�Sttfk
r8YnXtjjj	dd�S)NrCrDFr��)rCrD)
rG�
_manylinux�boolZmanylinux1_compatible�ImportError�AttributeError�pipZutilsZglibcZhave_compatible_glibc)rMr
r
r�is_manylinux1_compatible�s

rRcsvg}��fdd��td
dddg���|||�r8|j|�x.�D]&}|�|kr>�|||�r>|j|�q>W|jd�|S)z�Return a list of supported arches (including group arches) for
    the given major, minor and machine architecture of an macOS machine.
    cs~|dkr||fd
kS|dkr(||fdkS|dkr<||fdkS|dkrP||fd
kS|�krzx �|D]}�|||�rbdSqbWd	S)NrB�
rLrAr@r2r?TF)rSrL)rSrL)rSr2)rSrLr
)r!r"�arch�garch)�_supports_arch�groupsr
rrV�sz)get_darwin_arches.<locals>._supports_arch�fatr@rB�intelr?�fat64rA�fat32Z	universal�r@rB)rXr\�r?r@)rYr]�r?rA)rZr^�r?r@rB)r[r_)r�append)r!r"rI�archesrUr
)rVrWr�get_darwin_arches�s$


rbFcCsg}|dkrXg}t�}|dd�}x4t|ddd�D] }|jdjtt||f���q4W|p`t�}g}	|pnt�}|r�|g|	dd�<t�}
ddl	}x8|j
�D],}|djd�r�|
j|dj
dd�d�q�W|	jtt|
���|	jd�|�sx|p�t�}
|
jd	��r�tj|
�}|�r�|j�\}}}}d
j||�}g}xTttt|�d��D]4}x,tt|�||�D]}|j|||f��q^W�qHWn|
g}n*|dk�r�t��r�|
jdd�|
g}n|
g}x:|	D]2}x*|D]"}
|jd
||df||
f��q�W�q�WxZ|dd�D]J}|dk�rPx6|
D].}x&|D]}
|jd
||f||
f��qW�qW�q�Wx*|D]"}
|jd|ddd|
f��qRW|jd
||dfddf�|jd
||ddfddf�xNt|�D]B\}}|jd|fddf�|dk�r�|jd|dddf��q�W|S)acReturn a list of supported tags for each version specified in
    `versions`.

    :param versions: a list of string versions, of the form ["33", "32"],
        or None. The first version will be assumed to support our ABI.
    :param platform: specify the exact platform you want valid
        tags for, or None. If None, use the local system platform.
    :param impl: specify the exact implementation you want valid
        tags for, or None. If None, use the local interpreter impl.
    :param abi: specify the exact abi you want valid
        tags for, or None. If None, use the local interpreter abi.
    Nrrrz.abir6rKZnoneZmacosxz
{0}_{1}_%i_%s�linuxZ
manylinux1z%s%s�31�30zpy%s�any���rgrgrg)rdre)r�ranger`rrrrr;�set�impZget_suffixesr�addr8�extend�sorted�listrG�
_osx_arch_pat�matchrWr	�reversed�intrbrRr9�	enumerate)Zversions�noarchrr0r:Z	supportedr r!r"ZabisZabi3srj�suffixrTrp�nameZactual_archZtplrar1�a�version�ir
r
r�
get_supported�sh 




 

(


*
" 
rz)rt)TT)NFNNN)%�__doc__Z
__future__r�rerrrZloggingrrOZdistutils.sysconfigZdistutils.utilrEZ
pip.compatrZpip.utils.glibcrQZ	getLogger�__name__r$�compilerorrrrr#r(r;r=rGrRrbrzZsupported_tagsZsupported_tags_noarchZimplementation_tagr
r
r
r�<module>s>



"=
^
__pycache__/index.cpython-36.pyc000064400000074137151733136150012532 0ustar003

�PfW��@sdZddlmZddlZddlZddlmZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlmZddlmZddlmZddlmZmZmZmZmZdd	lmZdd
lmZddlm Z ddl!m"Z"m#Z#m$Z$m%Z%dd
l&m'Z'm(Z(m)Z)m*Z*ddl+m,Z,m-Z-ddl.m/Z/ddl0m1Z1m2Z2m3Z3ddl4mZ5ddl6m7Z7ddl8m9Z9ddl:m;Z;ddl<m=Z=dddgZ>d3d4d5d6d7d8gZ?ej@eA�ZBGdd �d eC�ZDGd!d�deC�ZEe
jFd"e
jG�fd#d$�ZHGd%d&�d&eC�ZIGd'd(�d(eC�ZJedd)�ZKd*d�ZLd+d,�ZMd-d.�ZNd/d0�ZOed1d2�ZPdS)9z!Routines related to PyPI, indexes�)�absolute_importN)�
namedtuple)�parse)�request)�	ipaddress)�cached_property�splitext�normalize_path�ARCHIVE_EXTENSIONS�SUPPORTED_EXTENSIONS)�RemovedInPip10Warning)�
indent_log)�check_requires_python)�DistributionNotFound�BestVersionAlreadyInstalled�InvalidWheelFilename�UnsupportedWheel)�HAS_TLS�is_url�path_to_url�url_to_path)�Wheel�	wheel_ext)�
get_supported)�html5lib�requests�six)�canonicalize_name)�
specifiers)�SSLError)�unescape�
FormatControl�fmt_ctl_handle_mutual_exclude�
PackageFinder�https�*�	localhost�127.0.0.0/8�::1/128�file�sshc@s\eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�ZdS)�InstallationCandidatecCs,||_t|�|_||_|j|j|jf|_dS)N)�project�
parse_version�version�location�_key)�selfr,r.r/�r2�/usr/lib/python3.6/index.py�__init__>s
zInstallationCandidate.__init__cCsdj|j|j|j�S)Nz,<InstallationCandidate({0!r}, {1!r}, {2!r})>)�formatr,r.r/)r1r2r2r3�__repr__DszInstallationCandidate.__repr__cCs
t|j�S)N)�hashr0)r1r2r2r3�__hash__IszInstallationCandidate.__hash__cCs|j|dd��S)NcSs||kS)Nr2)�s�or2r2r3�<lambda>Msz.InstallationCandidate.__lt__.<locals>.<lambda>)�_compare)r1�otherr2r2r3�__lt__LszInstallationCandidate.__lt__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Psz.InstallationCandidate.__le__.<locals>.<lambda>)r<)r1r=r2r2r3�__le__OszInstallationCandidate.__le__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Ssz.InstallationCandidate.__eq__.<locals>.<lambda>)r<)r1r=r2r2r3�__eq__RszInstallationCandidate.__eq__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Vsz.InstallationCandidate.__ge__.<locals>.<lambda>)r<)r1r=r2r2r3�__ge__UszInstallationCandidate.__ge__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;Ysz.InstallationCandidate.__gt__.<locals>.<lambda>)r<)r1r=r2r2r3�__gt__XszInstallationCandidate.__gt__cCs|j|dd��S)NcSs||kS)Nr2)r9r:r2r2r3r;\sz.InstallationCandidate.__ne__.<locals>.<lambda>)r<)r1r=r2r2r3�__ne__[szInstallationCandidate.__ne__cCst|t�stS||j|j�S)N)�
isinstancer+�NotImplementedr0)r1r=�methodr2r2r3r<^s
zInstallationCandidate._compareN)
�__name__�
__module__�__qualname__r4r6r8r>r?r@rArBrCr<r2r2r2r3r+<sr+c	@s�eZdZdZd!dd�Zdd�Zed"dd	��Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
ejd�Zdd�Zdd�Zdd�Zdd�Zdd �ZdS)#r#z�This finds packages.

    This is meant to match easy_install's technique for looking for
    packages, by reading pages and looking for appropriate links.
    FNcCs�|dkrtd��g|_x:|D]2}|jd�rBt|�}
tjj|
�rB|
}|jj|�qW||_g|_	t
�|_|pvtt
�t
��|_
dd�|r�|ngD�|_||_||_||_t|	||
|d�|_ts�x8tj|j|j�D]$}tj|�}|jdkr�tjd�Pq�WdS)	a�Create a PackageFinder.

        :param format_control: A FormatControl object or None. Used to control
            the selection of source packages / binary packages when consulting
            the index and links.
        :param platform: A string or None. If None, searches for packages
            that are supported by the current system. Otherwise, will find
            packages that can be built on the platform passed in. These
            packages will only be downloaded for distribution: they will
            not be built locally.
        :param versions: A list of strings or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param abi: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param implementation: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        Nz>PackageFinder() missing 1 required keyword argument: 'session'�~cSsg|]}d|df�qS)r%r2)�.0�hostr2r2r3�
<listcomp>�sz*PackageFinder.__init__.<locals>.<listcomp>)�versions�platform�abi�implr$zipip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.)�	TypeError�
find_links�
startswithr	�os�path�exists�append�
index_urls�dependency_links�set�logged_linksr!�format_control�secure_origins�allow_all_prereleases�process_dependency_links�sessionr�
valid_tagsr�	itertools�chain�urllib_parse�urlparse�scheme�logger�warning)r1rSrYr_Z
trusted_hostsr`rar]rOrNrP�implementation�linkZnew_link�parsedr2r2r3r4ls>	




zPackageFinder.__init__cCs"|jrtjdt�|jj|�dS)NzXDependency Links processing has been deprecated and will be removed in a future release.)r`�warnings�warnrrZ�extend)r1�linksr2r2r3�add_dependency_links�s
z"PackageFinder.add_dependency_linkscs�g�g���fdd�}x�|D]�}tjj|�}|jd�}|s>|r�|rH|}nt|�}tjj|�r�|r�tjj|�}x4tj|�D]}|tjj||��qxWq�|rƈj	|�q�tjj
|�r�||�q�tjd|�qt
|�r܈j	|�qtjd|�qW��fS)zt
        Sort locations into "files" (archives) and "urls", and return
        a pair of lists (files,urls)
        cs8t|�}tj|dd�ddkr*�j|�n
�j|�dS)NF)�strictrz	text/html)r�	mimetypesZ
guess_typerX)rV�url)�files�urlsr2r3�	sort_path�sz0PackageFinder._sort_locations.<locals>.sort_pathzfile:z:Url '%s' is ignored: it is neither a file nor a directory.zQUrl '%s' is ignored. It is either a non-existing path or lacks a specific scheme.)rUrVrWrTr�isdir�realpath�listdir�joinrX�isfilerhrir)�	locations�
expand_dirrwrtZ
is_local_pathZis_file_urlrV�itemr2)rurvr3�_sort_locations�s8



zPackageFinder._sort_locationscCsXt|j�}|jjrHt|jj�}|j|j�s8td|j��|j|j�}n|}|j	|fS)a[
        Function used to generate link sort key for link tuples.
        The greater the return value, the more preferred it is.
        If not finding wheels, then sorted by version only.
        If finding wheels, then the sort order is by version, then:
          1. existing installs
          2. wheels ordered via Wheel.support_index_min(self.valid_tags)
          3. source archives
        Note: it was considered to embed this logic into the Link
              comparison operators, but then different sdist links
              with the same version, would have to be considered equal
        zB%s is not a supported wheel for this platform. It can't be sorted.)
�lenrbr/�is_wheelr�filename�	supportedrZsupport_index_minr.)r1�	candidateZsupport_num�wheelZprir2r2r3�_candidate_sort_key�s

z!PackageFinder._candidate_sort_keyc	Csltjt|��}|j|j|jf}|djdd�d
}�x t|jD�]}||dkr`|ddkr`q@yht	j
t|dtj
�s�|ddkr�|dn|djd��}t	jt|dtj
�r�|dn|djd��}WnJtk
�r|d�r|dj�|dj�k�r|ddk�rw@YnX||k�r q@|d|dk�rP|ddk�rP|ddk	�rPq@dSW|jd|j|j�d	S)Nr�+�r%�utf8�Tz�The repository located at %s is not a trusted or secure host and is being ignored. If this repository is available via HTTPS it is recommended to use HTTPS instead, otherwise you may silence this warning and allow it anyways with '--trusted-host %s'.F���)rerf�strrgZhostnameZport�rsplit�SECURE_ORIGINSr^rZ
ip_addressrDrZ	text_type�decodeZ
ip_network�
ValueError�lowerri)	r1rhr/rl�originZprotocolZ
secure_originZaddrZnetworkr2r2r3�_validate_secure_origins>

z%PackageFinder._validate_secure_origincs �fdd���fdd�|jD�S)z�Returns the locations found via self.index_urls

        Checks the url_name on the main (first in the list) index and
        use this url_name to produce all locations
        cs,tj|tjt����}|jd�s(|d}|S)N�/)�	posixpathr{reZquoter�endswith)rt�loc)�project_namer2r3�mkurl_pypi_urlhs
z?PackageFinder._get_index_urls_locations.<locals>.mkurl_pypi_urlcsg|]}�|��qSr2r2)rKrt)r�r2r3rMusz;PackageFinder._get_index_urls_locations.<locals>.<listcomp>)rY)r1r�r2)r�r�r3�_get_index_urls_locationsas
z'PackageFinder._get_index_urls_locationscs��j|�}�j|�\}}�j�jdd�\}}�j�j�\}}dd�tj|||�D�}	�fdd�tjdd�|D�dd�|D�d	d�|D��D�}
tjd
t|
�|�x|
D]}tjd|�q�Wt	|�}t
�j|�}
t|||
�}�j
dd��jD�|�}g}xJ�j|
|�D]:}tjd
|j�t��|j�j
|j|��WdQRX�qW�j
dd��jD�|�}|�r|tjddjdd�|D����j
|	|�}|�r�|jdd�tjddjdd�|D���||||S)aFind all available InstallationCandidate for project_name

        This checks index_urls, find_links and dependency_links.
        All versions found are returned as an InstallationCandidate list.

        See _link_package_versions for details on which files are accepted
        T)r~css|]}t|�VqdS)N)�Link)rKrtr2r2r3�	<genexpr>�sz4PackageFinder.find_all_candidates.<locals>.<genexpr>csg|]}�jt|�r|�qSr2)r�rh)rKrk)r1r2r3rM�sz5PackageFinder.find_all_candidates.<locals>.<listcomp>css|]}t|�VqdS)N)r�)rKrtr2r2r3r��scss|]}t|�VqdS)N)r�)rKrtr2r2r3r��scss|]}t|�VqdS)N)r�)rKrtr2r2r3r��sz,%d location(s) to search for versions of %s:z* %scss|]}t|d�VqdS)z-fN)r�)rKrtr2r2r3r��szAnalyzing links from page %sNcss|]}t|�VqdS)N)r�)rKrtr2r2r3r��szdependency_links found: %sz, cSsg|]}|jj�qSr2)r/rt)rKr.r2r2r3rM�s)�reversezLocal files found: %scSsg|]}t|jj��qSr2)rr/rt)rKr�r2r2r3rM�s)r�r�rSrZrcrdrh�debugr�r�fmt_ctl_formatsr]�Search�_package_versions�
_get_pagesrtr
rorpr{�sort)r1r�Zindex_locationsZindex_file_locZ
index_url_locZfl_file_locZ
fl_url_locZdep_file_locZdep_url_locZfile_locationsZ
url_locationsr/�canonical_name�formats�searchZfind_links_versionsZ
page_versions�pageZdependency_versionsZ
file_versionsr2)r1r3�find_all_candidateswsX


 
z!PackageFinder.find_all_candidatesc
s�|j|j�}t|jjdd�|D�|jr,|jndd����fdd�|D�}|r�t||jd�}t|j	dd�r�d	d�|D�}t|�r�t||jd�}q�d
j
|j|j|j	�}|j	j
r�|dj
|j	j
�7}tj|�nd}|jdk	r�t|jj�}nd}|dko�|dk�r0tjd|d
jttdd�|D��td���td|��d}	|�rT|dk�sP|j|k�rTd}	|�r�|dk	�r�|	�rztjd|�ntjd||j�dS|	�r�tjd|d
jt�td���p�d�t�tjd|jd
jt�td���|j	S)z�Try to find a Link matching req

        Expects req, an InstallRequirement and upgrade, a boolean
        Returns a Link if found,
        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
        cSsg|]}t|j��qSr2)r�r.)rK�cr2r2r3rM�sz2PackageFinder.find_requirement.<locals>.<listcomp>N)Zprereleasescsg|]}t|j��kr|�qSr2)r�r.)rKr�)�compatible_versionsr2r3rM�s)�key�yankedFcSsg|]}t|jdd�s|�qS)r�F)�getattrr/)rKr�r2r2r3rM�sznWARNING: The candidate selected for download or install is a yanked version: '{}' candidate (version {} at {})z
Reason for being yanked: {}zNCould not find a version that satisfies the requirement %s (from versions: %s)z, css|]}t|j�VqdS)N)r�r.)rKr�r2r2r3r�sz1PackageFinder.find_requirement.<locals>.<genexpr>z%No matching distribution found for %sTzLExisting installed version (%s) is most up-to-date and satisfies requirementzUExisting installed version (%s) satisfies requirement (most up-to-date version is %s)z=Installed version (%s) is most up-to-date (past versions: %s)Znonez)Using version %s (newest of versions: %s))r��namer[Z	specifier�filterr_�maxr�r�r/r5r,r.�
yanked_reasonrhriZsatisfied_byr-Zcriticalr{�sortedrr�r)
r1ZreqZupgradeZall_candidatesZapplicable_candidatesZbest_candidateZnonyanked_candidatesZwarning_messageZinstalled_versionZbest_installedr2)r�r3�find_requirement�sx



zPackageFinder.find_requirementccsFt�}x:|D]2}||krq|j|�|j|�}|dkr8q|VqWdS)zp
        Yields (page, page_url) from the given locations, skipping
        locations that have errors.
        N)r[�add�	_get_page)r1r}r��seenr/r�r2r2r3r�Bs


zPackageFinder._get_pagesz-py([123]\.?[0-9]?)$cCsTgg}}t�}x:|D]2}||kr|j|�|jr>|j|�q|j|�qW||S)z�
        Returns elements of links in order, non-egg links first, egg links
        second, while eliminating duplicates
        )r[r��egg_fragmentrX)r1rpZeggsZno_eggsr�rkr2r2r3�_sort_linksUs


zPackageFinder._sort_linkscCs:g}x0|j|�D]"}|j||�}|dk	r|j|�qW|S)N)r��_link_package_versionsrX)r1rpr��resultrk�vr2r2r3r�eszPackageFinder._package_versionscCs(||jkr$tjd||�|jj|�dS)NzSkipping link %s; %s)r\rhr�r�)r1rk�reasonr2r2r3�_log_skipped_linkms
zPackageFinder._log_skipped_linkc
CsJd}|jr|j}|j}�n|j�\}}|s:|j|d�dS|tkrV|j|d|�dSd|jkr~|tkr~|j|d|j�dSd|jkr�|dkr�|j|d�dS|tk�r&yt	|j
�}Wn tk
r�|j|d	�dSXt|j
�|jk�r|j|d
|j�dS|j|j��s |j|d�dS|j}d|jk�rR|tk�rR|j|d
|j�dS|�sft||j|�}|dk�r�|j|d
|j�dS|jj|�}|�r�|d|j��}|jd�}|tjdd�k�r�|j|d�dSyt|j�}	Wn.tjk
�rtjd|j
|j�d}	YnX|	�s.tjd||j�dStjd||�t|j||�S)z'Return an InstallationCandidate or NoneNz
not a filezunsupported archive format: %s�binaryzNo binaries permitted for %sZmacosx10z.zipzmacosx10 onezinvalid wheel filenamezwrong project name (not %s)z%it is not compatible with this Python�sourcezNo sources permitted for %sr��zPython version is incorrectz3Package %s has an invalid Requires-Python entry: %sTz_The package %s is incompatible with the pythonversion in use. Acceptable python versions are:%szFound link %s, version: %s)r��extrr�rr�rZsuppliedrVrr�rrr�Z	canonicalr�rbr.�egg_info_matches�_py_version_rer��start�group�sysr�requires_pythonrZInvalidSpecifierrhr�r+)
r1rkr�r.�egg_infor�r��match�
py_versionZsupport_this_pythonr2r2r3r�rs�





z$PackageFinder._link_package_versionscCstj||jd�S)N)ra)�HTMLPage�get_pagera)r1rkr2r2r3r��szPackageFinder._get_page)	FNFNNNNNN)F)rGrHrI�__doc__r4rq�staticmethodr�r�r�r�r�r�r��re�compiler�r�r�r�r�r�r2r2r2r3r#es(
Q
1GSx
Mz([a-z0-9_.]+)-([a-z0-9_.!+-]+)cCs�|j|�}|stjd|�dS|dkrB|jd�}||jd�d�S|jd�j�}|jdd�}|j�d}|j|�r�|jd�t|�d�SdSdS)axPull the version part out of a string.

    :param egg_info: The string to parse. E.g. foo-2.1
    :param search_name: The name of the package this belongs to. None to
        infer the name. Note that this cannot unambiguously parse strings
        like foo-2-2 which might be foo, 2-2 or foo-2, 2.
    :param link: The link the string came from, for logging on failure.
    z%Could not parse version from link: %sNr�-�_)	r�rhr�r��indexr��replacerTr�)r�Zsearch_namerkZ_egg_info_rer�Z
full_matchr�Zlook_forr2r2r3r��s


r�c@sxeZdZdZddd�Zdd�Zeddd	��Zedd
d��Z	edd
��Z
edd��Ze
dd��Zejdej�Zdd�ZdS)r�z'Represents one page, along with its URLNcCs\d}|r2d|kr2tj|d�\}}d|kr2|d}||_tj|j|dd�|_||_||_dS)NzContent-Type�charsetF)Ztransport_encodingZnamespaceHTMLElements)�cgiZparse_header�contentrrrlrt�headers)r1r�rtr��encoding�content_type�paramsr2r2r3r4�s
zHTMLPage.__init__cCs|jS)N)rt)r1r2r2r3�__str__�szHTMLPage.__str__TcCsl|dkrtd��|j}|jdd�d}ddlm}x>|jD]4}|j�j|�r:|t|�dkr:t	j
d||�dSq:W�y"|r�|j}xHtD]@}|j
|�r�|j||d�}	|	j�jd	�r�Pq�t	j
d
||	�dSq�Wt	j
d|�tj|�\}}
}}}
}|dk�r6tjjtj|���r6|j
d
��s|d
7}tj|d�}t	j
d|�|j|d	dd�d�}|j�|jjdd�}	|	j�jd	��s�t	j
d
||	�dS||j|j|j�}Wn�tjk
�r�}z|j|||�WYdd}~Xn�tk
�r}z"d|}|j|||t	jd�WYdd}~Xn`tj k
�r>}z|j|d||�WYdd}~Xn*tj!k
�rb|j|d|�YnX|SdS)Nz9get_page() missing 1 required keyword argument: 'session'�#r�r)�
VcsSupportz+:zCannot look at %s URL %s)raz	text/htmlz,Skipping page %s because of Content-Type: %szGetting page %sr)r�z
index.htmlz# file: URL is directory, getting %szmax-age=600)ZAcceptz
Cache-Control)r�zContent-Type�unknownz6There was a problem confirming the ssl certificate: %s)�methzconnection error: %sz	timed out)"rRrt�split�pip.vcsr�Zschemesr�rTr�rhr�r�r
r��_get_content_typererfrUrVrx�urllib_requestZurl2pathname�urljoin�get�raise_for_statusr�r�rZ	HTTPError�_handle_failr�info�ConnectionErrorZTimeout)�clsrkZ
skip_archivesrartr�rgr�Zbad_extr��netlocrVr��query�fragment�respZinst�excr�r2r2r3r��sp



$"zHTMLPage.get_pagecCs|dkrtj}|d||�dS)Nz%Could not fetch URL %s: %s - skipping)rhr�)rkr�rtr�r2r2r3r�NszHTMLPage._handle_failcCsDtj|�\}}}}}|dkr dS|j|dd�}|j�|jjdd�S)z;Get the Content-Type of the given url, using a HEAD request�httpr$�T)Zallow_redirectszContent-Type)r�r$)re�urlsplit�headr�r�r�)rtrargr�rVr�r�r�r2r2r3r�UszHTMLPage._get_content_typecCs@dd�|jjd�D�}|r6|djd�r6|djd�S|jSdS)NcSsg|]}|jd�dk	r|�qS)�hrefN)r�)rK�xr2r2r3rMfsz%HTMLPage.base_url.<locals>.<listcomp>z.//baserr�)rl�findallr�rt)r1�basesr2r2r3�base_urlcszHTMLPage.base_urlccs�x�|jjd�D]v}|jd�r|jd�}|jtj|j|��}|jd�}|rPt|�nd}|jddd�}|dk	rrt|�}t||||d�VqWdS)zYields all links in the pagez.//ar�zdata-requires-pythonNzdata-yanked)�default)r�r�)	rlr�r��
clean_linkrer�r�r r�)r1Zanchorr�rtZ	pyrequirer�r2r2r3rpns


zHTMLPage.linksz[^a-z0-9$&+,/:;=?@.#%_\\|-]cCs|jjdd�|�S)z�Makes sure a link is fully encoded.  That is, if a ' ' shows up in
        the link, it will be rewritten to %20 (while not over-quoting
        % or other characters).cSsdt|jd��S)Nz%%%2xr)�ordr�)r�r2r2r3r;�sz%HTMLPage.clean_link.<locals>.<lambda>)�	_clean_re�sub)r1rtr2r2r3r��szHTMLPage.clean_link)N)TN)N)rGrHrIr�r4r��classmethodr�r�r�r�rr��propertyrpr�r��Ir�r�r2r2r2r3r��s
Ur�c@s eZdZd5dd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Ze
dd��Ze
dd��Ze
dd��Ze
dd��Zdd�Ze
d d!��Ze
d"d#��Zejd$�Ze
d%d&��Zejd'�Ze
d(d)��Zejd*�Ze
d+d,��Ze
d-d.��Ze
d/d0��Ze
d1d2��Ze
d3d4��Z dS)6r�NcCs@|jd�rt|�}||_||_|r&|nd|_||_|dk	|_dS)a�
        Object representing a parsed link from https://pypi.python.org/simple/*

        url:
            url of the resource pointed to (href of the link)
        comes_from:
            instance of HTMLPage where the link was found, or string.
        requires_python:
            String containing the `Requires-Python` metadata field, specified
            in PEP 345. This may be specified by a data-requires-python
            attribute in the HTML link tag, as described in PEP 503.
        z\\N)rTrrt�
comes_fromr�r�r�)r1rtrr�r�r2r2r3r4�s
z
Link.__init__cCs<|jrd|j}nd}|jr.d|j|j|fSt|j�SdS)Nz (requires-python:%s)r�z%s (from %s)%s)r�rrtr�)r1Zrpr2r2r3r��szLink.__str__cCsd|S)Nz	<Link %s>r2)r1r2r2r3r6�sz
Link.__repr__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3r@�s
zLink.__eq__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3rC�s
zLink.__ne__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3r>�s
zLink.__lt__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3r?�s
zLink.__le__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3rB�s
zLink.__gt__cCst|t�stS|j|jkS)N)rDr�rErt)r1r=r2r2r3rA�s
zLink.__ge__cCs
t|j�S)N)r7rt)r1r2r2r3r8�sz
Link.__hash__cCsJtj|j�\}}}}}tj|jd��p(|}tj|�}|sFtd|j��|S)Nr�zURL %r produced no filename)rer�rtr��basename�rstrip�unquote�AssertionError)r1r�r�rVr�r2r2r3r��s

z
Link.filenamecCstj|j�dS)Nr)rer�rt)r1r2r2r3rg�szLink.schemecCstj|j�dS)Nr�)rer�rt)r1r2r2r3r��szLink.netloccCstjtj|j�d�S)Nr�)rerr�rt)r1r2r2r3rV�sz	Link.pathcCsttj|jjd���S)Nr�)rr�rrVr)r1r2r2r3r�sz
Link.splitextcCs|j�dS)Nr�)r)r1r2r2r3r��szLink.extcCs*tj|j�\}}}}}tj||||df�S)N)rer�rtZ
urlunsplit)r1rgr�rVr�r�r2r2r3�url_without_fragment�szLink.url_without_fragmentz[#&]egg=([^&]*)cCs |jj|j�}|sdS|jd�S)Nr�)�_egg_fragment_rer�rtr�)r1r�r2r2r3r��szLink.egg_fragmentz[#&]subdirectory=([^&]*)cCs |jj|j�}|sdS|jd�S)Nr�)�_subdirectory_fragment_rer�rtr�)r1r�r2r2r3�subdirectory_fragment�szLink.subdirectory_fragmentz2(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)cCs |jj|j�}|r|jd�SdS)Nr�)�_hash_rer�rtr�)r1r�r2r2r3r7s
z	Link.hashcCs |jj|j�}|r|jd�SdS)Nr�)rr�rtr�)r1r�r2r2r3�	hash_names
zLink.hash_namecCs$tj|jjdd�djdd�d�S)Nr�r�r�?)r�rrtr�)r1r2r2r3�show_urlsz
Link.show_urlcCs
|jtkS)N)r�r)r1r2r2r3r�sz
Link.is_wheelcCs ddlm}|j|jkrdSdS)z�
        Determines if this points to an actual artifact (e.g. a tarball) or if
        it points to an "abstract" thing like a path or a VCS location.
        r)�vcsFT)r�rrgZall_schemes)r1rr2r2r3�is_artifactszLink.is_artifact)NNN)!rGrHrIr4r�r6r@rCr>r?rBrAr8rr�rgr�rVrr�rr�r�r	r�r
rrr7r
rr�rr2r2r2r3r��s8



r�zno_binary only_binarycCs�|jd�}xFd|krP|j�|j�|jd�|d|jd�d�=d|krdSqWx:|D]2}|dkrn|j�qXt|�}|j|�|j|�qXWdS)N�,z:all:r�z:none:)r��clearr�r�r�discard)�value�targetr=�newr�r2r2r3r"5s




cCsjtddg�}||jkr"|jd�n@||jkr8|jd�n*d|jkrN|jd�nd|jkrb|jd�t|�S)Nr�r�z:all:)r[�only_binaryr�	no_binary�	frozenset)�fmt_ctlr�r�r2r2r3r�Hs




r�cCstd|j|j�dS)Nz:all:)r"rr)rr2r2r3�fmt_ctl_no_binaryUsrcCst|�tjdtdd�dS)Nzf--no-use-wheel is deprecated and will be removed in the future.  Please use --no-binary :all: instead.r�)�
stacklevel)rrmrnr)rr2r2r3�fmt_ctl_no_use_wheelZs
rr�zsupplied canonical formats)r$r%r%)r%r&r%)r%r'r%)r%r(r%)r)r%N)r*r%r%)Qr�Z
__future__rZloggingr��collectionsrrcr�rUr�rsr�rmZpip._vendor.six.moves.urllibrrerr�Z
pip.compatrZ	pip.utilsrrr	r
rZpip.utils.deprecationrZpip.utils.loggingr
Zpip.utils.packagingrZpip.exceptionsrrrrZpip.downloadrrrrZ	pip.wheelrrZpip.pep425tagsrZpip._vendorrrrZpip._vendor.packaging.versionr-Zpip._vendor.packaging.utilsrZpip._vendor.packagingrZpip._vendor.requests.exceptionsrZpip._vendor.distlib.compatr �__all__r�Z	getLoggerrGrh�objectr+r#r�rr�r�r�r!r"r�rrr�r2r2r2r3�<module>sl

)d*#



__pycache__/cmdoptions.cpython-36.pyc000064400000031164151733136150013573 0ustar003

�PfZ@�@sndZddlmZddlmZddlmZmZmZddl	Z	ddl
mZmZm
Z
mZddlmZddlmZmZdd	lmZd
d�Zdd
�Zd�dd�Zeedddddd�Zeedddddd�Zeeddddded�Zeeddd d!dd"d�Zeed#d$d%dd&d�Zeed'd(d)d!dd*d�Zeed+d,d-d.d/d0d1�Zeed2d3dded�Z eed4d5d6d7d8d9�Z!eed:d;d<d=d>d9�Z"eed?d@dAdBdCdDdEdF�Z#eedGdHd6d7ed9�Z$eedIdJd6d7ed9�Z%dKdL�Z&eedMdNd6d/dOdP�Z'eedQdRd6dd/dSdT�Z(eedUdVdWdXdYej)dZd[�Z*d\d]�Z+eed^d_ddd`d�Z,dadb�Z-dcdd�Z.eededfdded�Z/dgdh�Z0eedidfdjded�Z1dkdl�Z2eedmdndjded�Z3eedodpdddqd�Z4drds�Z5dtdu�Z6dvdw�Z7eedxdydzd{d|d}ed~d[�	Z8eedd�dd�ed�Z9eed�d�djd�d�d�Z:d�d��Z;d�d��Z<d�d��Z=d�d��Z>d�d��Z?eed�d�ed}d�d��Z@eed�d�djd�d�ZAeed�d�d�ddd�d�ZBeed�d�d�d�d�d}d�d1�ZCeed�d�dd�d�ZDeed�d�d�d�d�d��ZEeed�d�d�d�d�d��ZFeed�ddd�d��ZGeed�ddd�d��ZHeed�d�ddd�d�ZIeed�d�d�ded�ZJd�d��ZKeed�d�d�eKd�d�d��ZLeed�d�ddd�d�ZMd�eeeeeeee e!e"e#e$e%e&e0e'e(e@eAeIgd��ZNd�e*e+e,e-e4gd��ZOd�eOd�e.e/e1e2e3gd��ZPdS)�aD
shared options and groups

The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parses general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.

�)�absolute_import)�partial)�OptionGroup�
SUPPRESS_HELP�OptionN)�
FormatControl�fmt_ctl_handle_mutual_exclude�fmt_ctl_no_binary�fmt_ctl_no_use_wheel)�PyPI)�USER_CACHE_DIR�
src_prefix)�
STRONG_HASHEScCs0t||d�}x|dD]}|j|��qW|S)z�
    Return an OptionGroup object
    group  -- assumed to be dict with 'name' and 'options' keys
    parser -- an optparse Parser
    �name�options)rZ
add_option)�group�parserZoption_group�option�r� /usr/lib/python3.6/cmdoptions.py�make_option_groupsrcCs|js|j}t|�dS)N)�	use_wheel�format_controlr
)r�controlrrr�resolve_wheel_no_use_binary$srcsP�dkr|��fdd�}dddg}tt||��rL|j}t|�tjddd	�dS)
z�Disable wheels if per-setup.py call options are set.

    :param options: The OptionParser options to update.
    :param check_options: The options to check, if not supplied defaults to
        options.
    Ncst�|d�S)N)�getattr)�n)�
check_optionsrr�getname4sz+check_install_build_global.<locals>.getnameZ
build_options�global_options�install_optionszeDisabling all use of wheels due to the use of --build-options / --global-options / --install-options.�)�
stacklevel)�any�maprr	�warnings�warn)rrr�namesrr)rr�check_install_build_global*s
r(z-hz--help�helpz
Show help.)�dest�actionr)z
--isolated�
isolated_mode�
store_trueFzSRun pip in an isolated mode, ignoring environment variables and user configuration.)r*r+�defaultr)z--require-virtualenvz--require-venvZrequire_venvz-vz	--verbose�verbose�countzDGive more output. Option is additive, and can be used up to 3 times.z-Vz	--version�versionzShow version and exit.z-qz--quiet�quietz�Give less output. Option is additive, and can be used up to 3 times (corresponding to WARNING, ERROR, and CRITICAL logging levels).z--logz
--log-filez--local-log�log�pathz Path to a verbose appending log.)r*�metavarr)z
--no-input�no_inputz--proxy�proxy�str�z<Specify a proxy in the form [user:passwd@]proxy.server:port.)r*�typer.r)z	--retries�retries�int�zRMaximum number of retries each connection should attempt (default %default times).z	--timeoutz--default-timeoutZsec�timeout�float�z2Set the socket timeout (default %default seconds).)r5r*r:r.r)z
--default-vcs�default_vcsz--skip-requirements-regex�skip_requirements_regexc
Cs"tddddddddggd	d
dd�S)
Nz--exists-action�
exists_actionZchoice�s�i�w�b�a�appendr+zYDefault action when a path already exists: (s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.)r*r:�choicesr.r+r5r))rrrrrrC�srCz--cert�certzPath to alternate CA bundle.)r*r:r5r)z
--client-cert�client_certzkPath to SSL client certificate, a single file containing the private key and the certificate in PEM format.)r*r:r.r5r)z-iz--index-urlz
--pypi-url�	index_url�URLz�Base URL of Python Package Index (default %default). This should point to a repository compliant with PEP 503 (the simple repository API) or a local directory laid out in the same format.)r*r5r.r)cCstddddgdd�S)Nz--extra-index-urlZextra_index_urlsrNrIzmExtra URLs of package indexes to use in addition to --index-url. Should follow the same rules as --index-url.)r*r5r+r.r))rrrrr�extra_index_url�srOz
--no-index�no_indexzAIgnore package index (only looking at --find-links URLs instead).c	Cstddddgddd�S)Nz-fz--find-links�
find_linksrIZurlz�If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.)r*r+r.r5r))rrrrrrQ�srQcCstdddgdtd�S)Nz--allow-external�allow_externalrI�PACKAGE)r*r+r.r5r))rrrrrrrRsrRz--allow-all-external�allow_all_externalcCstddddgdd�S)Nz--trusted-hostZ
trusted_hostsrIZHOSTNAMEzKMark this host as trusted, even though it does not have valid or any HTTPS.)r*r+r5r.r))rrrrr�trusted_hostsrUz--no-allow-externalZstore_falsec	Cstddddgdtd�S)Nz--allow-unverifiedz--allow-insecureZallow_unverifiedrIrS)r*r+r.r5r))rrrrrr�allow_unsafe3srVz--no-allow-insecureZallow_all_insecurez--process-dependency-links�process_dependency_linksz*Enable the processing of dependency links.c	Cstddddgddd�S)Nz-cz--constraint�constraintsrI�filez\Constrain versions using the given constraints file. This option can be used multiple times.)r*r+r.r5r))rrrrrrXRsrXc	Cstddddgddd�S)Nz-rz
--requirement�requirementsrIrYzQInstall from the given requirements file. This option can be used multiple times.)r*r+r.r5r))rrrrrrZ]srZc	Cstddddgddd�S)Nz-ez
--editableZ	editablesrIzpath/urlzkInstall a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.)r*r+r.r5r))rrrrr�editablehsr[z--srcz--sourcez--source-dirz--source-directoryZsrc_dir�dirz�Directory to check out editable projects into. The default in a virtualenv is "<venv path>/src". The default for global installs is "<current dir>/src".z--use-wheelrTz--no-use-wheelz{Do not Find and prefer wheel archives when searching indexes and find-links locations. DEPRECATED in favour of --no-binary.cCst||j�S)zGet a format_control object.)rr*)�valuesrrrr�_get_format_control�sr^cCs"t|j|j�}t||j|j�dS)N)rr]r*r�	no_binary�only_binary)r�opt_str�valuer�existingrrr�_handle_no_binary�srdcCs"t|j|j�}t||j|j�dS)N)rr]r*rr`r_)rrarbrrcrrr�_handle_only_binary�srec	Cs tdddtdtt�t��dd�S)Nz--no-binaryr�callbackr8aRDo not use binary packages. Can be supplied multiple times, and each time adds to the existing value. Accepts either :all: to disable all binary packages, :none: to empty the set, or one or more package names with commas between them. Note that some packages are tricky to compile and may fail to install when this option is used on them.)r*r+rfr:r.r))rrdr�setrrrrr_�s
r_c	Cs tdddtdtt�t��dd�S)Nz
--only-binaryrrfr8aGDo not use source packages. Can be supplied multiple times, and each time adds to the existing value. Accepts either :all: to disable all source packages, :none: to empty the set, or one or more package names with commas between them. Packages without binary distributions will fail to install when this option is used on them.)r*r+rfr:r.r))rrerrgrrrrr`�s
r`z--cache-dir�	cache_dirzStore the cache data in <dir>.)r*r.r5r)z--no-cache-dirzDisable the cache.z	--no-depsz--no-dependenciesZignore_dependenciesz#Don't install package dependencies.z-bz--buildz--build-dirz--build-directory�	build_dirz/Directory to unpack packages into and build in.z--ignore-requires-python�ignore_requires_pythonz'Ignore the Requires-Python information.z--install-optionr rIra"Extra arguments to be supplied to the setup.py install command (use like --install-option="--install-scripts=/usr/local/bin"). Use multiple --install-option options to pass multiple options to setup.py install. If you are using an option with a directory path, be sure to use absolute path.)r*r+r5r)z--global-optionrzTExtra global options to be supplied to the setup.py call before the install command.z
--no-cleanz!Don't clean up build directories.)r+r.r)z--prezYInclude pre-release and development versions. By default, pip only finds stable versions.z--disable-pip-version-check�disable_pip_version_checkz{Don't periodically check PyPI to determine whether a new version of pip is available for download. Implied with --no-index.z-Zz--always-unzip�always_unzipc
Cs�|jjsi|j_y|jdd�\}}Wn"tk
rF|jd|�YnX|tkrh|jd|djt�f�|jjj|g�j|�dS)zkGiven a value spelled "algo:digest", append the digest to a list
    pointed to in a dict by the algo name.�:�zTArguments to %s must be a hash name followed by a value, like --hash=sha256:abcde...z&Allowed hash algorithms for %s are %s.z, N)	r]�hashes�split�
ValueError�errorr�join�
setdefaultrI)rrarbrZalgoZdigestrrr�_merge_hashsruz--hashrorf�stringzgVerify that the package's archive matches this hash before installing. Example: --hash=sha256:abcdef...)r*r+rfr:r)z--require-hashes�require_hashesz�Require a hash to check each requirement against, for repeatable installs. This option is implied when any package in a requirements file has a --hash option.zGeneral Options)rrzPackage Index Optionsz4Package Index Options (including deprecated options))N)Q�__doc__Z
__future__r�	functoolsrZoptparserrrr%Z	pip.indexrrr	r
Z
pip.modelsrZ
pip.locationsrr
Zpip.utils.hashesrrrr(Zhelp_r,Zrequire_virtualenvr/r1r2r3r6r7r;r>rArBrCrKrLZ
simple_urlrMrOrPrQrRrTrUZno_allow_externalrVZno_allow_unsaferWrXrZr[�srcrZno_use_wheelr^rdrer_r`rhZno_cacheZno_depsrirjr rZno_cleanZprerkrlru�hashrwZ
general_groupZnon_deprecated_index_groupZindex_grouprrrr�<module>	sr







operations/__pycache__/freeze.cpython-36.pyc000064400000005451151733136150015057 0ustar003

�PfJ�	@s�ddlmZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZddl
mZddlmZeje�Zddddddd	dff	d
d�ZdS)�)�absolute_importN)�InstallRequirement)�
COMMENT_RE)�get_installed_distributions)�
pkg_resources)�canonicalize_name)�RequirementParseErrorFc	cs�|pg}d}	|rtj|�j}	g}
x(tjD]}|jd�r(|
j|jd��q(Wx|D]}d|krP|
j|�qPWx|D]}d|VqpWi}
xXt	|f|d�D]F}yt
jj||
�}Wn$t
k
r�tjd|j�w�YnX||
|j<q�W|�rvt�}�x�|D�]v}t|���b}�xX|D�]N}|j��sL|j�jd��sL|	�r@|	|��sL|jd!��rr|j�}||k�r|j|�|V�q|jd��s�|jd��r�|jd��r�|dd�j�}n|td�d�j�jd�}tj||||d�}ntjtjd|�j�||d�}|j�stjd||j��tjd�nD|j|
k�r@tjd|tjd|�j��nt |
|j�j�V|
|j=�qWWdQRXq�WdVx<t!|
j"�dd�d �D]$}t#|j�|k�r�t |�j�V�q�WdS)"Nzdependency_links.txtz#egg=z-f %s)�
local_only�skip�	user_onlyzCould not parse requirement: %s�#�-r�
--requirement�-Z�--always-unzip�-f�--find-links�-i�--index-url�--pre�--trusted-host�--process-dependency-links�--extra-index-urlz-ez
--editable��=)�default_vcs�isolated�wheel_cache�)rrzWSkipping line in requirement file [%s] because it's not clear what it would install: %sz9  (add #egg=PackageName to the URL to avoid this warning)zDRequirement file [%s] contains %s, but that package is not installedz7## The following requirements were added by pip freeze:cSs
|jj�S)N)�name�lower)�x�r"�/usr/lib/python3.6/freeze.py�<lambda>�szfreeze.<locals>.<lambda>)�key)r
rrrrrrrrrrr)$�re�compile�searchrZworking_setZhas_metadata�extendZget_metadata_lines�appendr�pipZFrozenRequirementZ	from_distr�loggerZwarningZproject_namer�set�open�strip�
startswith�rstrip�add�len�lstriprZ
from_editableZ	from_liner�sub�info�str�sorted�valuesr)ZrequirementZ
find_linksr	rZ
skip_regexrrrr
Z
skip_matchZdependency_linksZdist�linkZ
installationsZreqZemitted_optionsZ
req_file_pathZreq_file�lineZline_reqZinstallationr"r"r#�freezes�






r<)Z
__future__rZloggingr&r+Zpip.reqrZpip.req.req_filerZ	pip.utilsrZpip._vendorrZpip._vendor.packaging.utilsrZpip._vendor.pkg_resourcesrZ	getLogger�__name__r,r<r"r"r"r#�<module>s 
operations/__pycache__/__init__.cpython-36.pyc000064400000000161151733136150015327 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>soperations/__pycache__/check.cpython-36.opt-1.pyc000064400000002570151733136150015612 0ustar003

�Pf6�@sdd�Zdd�Zdd�ZdS)cCsbi}i}xP|D]H}d|j|jf}tt||��}|r<|||<tt||��}|r|||<qW||fS)Nz%s==%s)�project_name�version�list�get_missing_reqs�get_incompatible_reqs)�installed_distsZmissing_reqs_dictZincompatible_reqs_dict�dist�keyZmissing_reqsZincompatible_reqs�r	�/usr/lib/python3.6/check.py�check_requirementss

rccsLtdd�|D��}t�}x.|j�D]"}|jj�|kr"|j|�|Vq"WdS)z\Return all of the requirements of `dist` that aren't present in
    `installed_dists`.

    css|]}|jj�VqdS)N)r�lower)�.0�dr	r	r
�	<genexpr>sz#get_missing_reqs.<locals>.<genexpr>N)�set�requiresrr�add)rrZinstalled_namesZmissing_requirements�requirementr	r	r
rs
rccsTi}x|D]}|||j<q
Wx2|j�D]&}|j|j�}|r&||kr&||fVq&WdS)zyReturn all of the requirements of `dist` that are present in
    `installed_dists`, but have incompatible versions.

    N)rr�get)rrZinstalled_dists_by_nameZinstalled_distrZpresent_distr	r	r
r$s
rN)rrrr	r	r	r
�<module>soperations/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000161151733136150016266 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>soperations/__pycache__/freeze.cpython-36.opt-1.pyc000064400000005451151733136150016016 0ustar003

�PfJ�	@s�ddlmZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZddl
mZddlmZeje�Zddddddd	dff	d
d�ZdS)�)�absolute_importN)�InstallRequirement)�
COMMENT_RE)�get_installed_distributions)�
pkg_resources)�canonicalize_name)�RequirementParseErrorFc	cs�|pg}d}	|rtj|�j}	g}
x(tjD]}|jd�r(|
j|jd��q(Wx|D]}d|krP|
j|�qPWx|D]}d|VqpWi}
xXt	|f|d�D]F}yt
jj||
�}Wn$t
k
r�tjd|j�w�YnX||
|j<q�W|�rvt�}�x�|D�]v}t|���b}�xX|D�]N}|j��sL|j�jd��sL|	�r@|	|��sL|jd!��rr|j�}||k�r|j|�|V�q|jd��s�|jd��r�|jd��r�|dd�j�}n|td�d�j�jd�}tj||||d�}ntjtjd|�j�||d�}|j�stjd||j��tjd�nD|j|
k�r@tjd|tjd|�j��nt |
|j�j�V|
|j=�qWWdQRXq�WdVx<t!|
j"�dd�d �D]$}t#|j�|k�r�t |�j�V�q�WdS)"Nzdependency_links.txtz#egg=z-f %s)�
local_only�skip�	user_onlyzCould not parse requirement: %s�#�-r�
--requirement�-Z�--always-unzip�-f�--find-links�-i�--index-url�--pre�--trusted-host�--process-dependency-links�--extra-index-urlz-ez
--editable��=)�default_vcs�isolated�wheel_cache�)rrzWSkipping line in requirement file [%s] because it's not clear what it would install: %sz9  (add #egg=PackageName to the URL to avoid this warning)zDRequirement file [%s] contains %s, but that package is not installedz7## The following requirements were added by pip freeze:cSs
|jj�S)N)�name�lower)�x�r"�/usr/lib/python3.6/freeze.py�<lambda>�szfreeze.<locals>.<lambda>)�key)r
rrrrrrrrrrr)$�re�compile�searchrZworking_setZhas_metadata�extendZget_metadata_lines�appendr�pipZFrozenRequirementZ	from_distr�loggerZwarningZproject_namer�set�open�strip�
startswith�rstrip�add�len�lstriprZ
from_editableZ	from_liner�sub�info�str�sorted�valuesr)ZrequirementZ
find_linksr	rZ
skip_regexrrrr
Z
skip_matchZdependency_linksZdist�linkZ
installationsZreqZemitted_optionsZ
req_file_pathZreq_file�lineZline_reqZinstallationr"r"r#�freezes�






r<)Z
__future__rZloggingr&r+Zpip.reqrZpip.req.req_filerZ	pip.utilsrZpip._vendorrZpip._vendor.packaging.utilsrZpip._vendor.pkg_resourcesrZ	getLogger�__name__r,r<r"r"r"r#�<module>s 
operations/__pycache__/check.cpython-36.pyc000064400000002570151733136150014653 0ustar003

�Pf6�@sdd�Zdd�Zdd�ZdS)cCsbi}i}xP|D]H}d|j|jf}tt||��}|r<|||<tt||��}|r|||<qW||fS)Nz%s==%s)�project_name�version�list�get_missing_reqs�get_incompatible_reqs)�installed_distsZmissing_reqs_dictZincompatible_reqs_dict�dist�keyZmissing_reqsZincompatible_reqs�r	�/usr/lib/python3.6/check.py�check_requirementss

rccsLtdd�|D��}t�}x.|j�D]"}|jj�|kr"|j|�|Vq"WdS)z\Return all of the requirements of `dist` that aren't present in
    `installed_dists`.

    css|]}|jj�VqdS)N)r�lower)�.0�dr	r	r
�	<genexpr>sz#get_missing_reqs.<locals>.<genexpr>N)�set�requiresrr�add)rrZinstalled_namesZmissing_requirements�requirementr	r	r
rs
rccsTi}x|D]}|||j<q
Wx2|j�D]&}|j|j�}|r&||kr&||fVq&WdS)zyReturn all of the requirements of `dist` that are present in
    `installed_dists`, but have incompatible versions.

    N)rr�get)rrZinstalled_dists_by_nameZinstalled_distrZpresent_distr	r	r
r$s
rN)rrrr	r	r	r
�<module>soperations/freeze.py000064400000012112151733136150010563 0ustar00from __future__ import absolute_import

import logging
import re

import pip
from pip.req import InstallRequirement
from pip.req.req_file import COMMENT_RE
from pip.utils import get_installed_distributions
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import RequirementParseError


logger = logging.getLogger(__name__)


def freeze(
        requirement=None,
        find_links=None, local_only=None, user_only=None, skip_regex=None,
        default_vcs=None,
        isolated=False,
        wheel_cache=None,
        skip=()):
    find_links = find_links or []
    skip_match = None

    if skip_regex:
        skip_match = re.compile(skip_regex).search

    dependency_links = []

    for dist in pkg_resources.working_set:
        if dist.has_metadata('dependency_links.txt'):
            dependency_links.extend(
                dist.get_metadata_lines('dependency_links.txt')
            )
    for link in find_links:
        if '#egg=' in link:
            dependency_links.append(link)
    for link in find_links:
        yield '-f %s' % link
    installations = {}
    for dist in get_installed_distributions(local_only=local_only,
                                            skip=(),
                                            user_only=user_only):
        try:
            req = pip.FrozenRequirement.from_dist(
                dist,
                dependency_links
            )
        except RequirementParseError:
            logger.warning(
                "Could not parse requirement: %s",
                dist.project_name
            )
            continue
        installations[req.name] = req

    if requirement:
        # the options that don't get turned into an InstallRequirement
        # should only be emitted once, even if the same option is in multiple
        # requirements files, so we need to keep track of what has been emitted
        # so that we don't emit it again if it's seen again
        emitted_options = set()
        for req_file_path in requirement:
            with open(req_file_path) as req_file:
                for line in req_file:
                    if (not line.strip() or
                            line.strip().startswith('#') or
                            (skip_match and skip_match(line)) or
                            line.startswith((
                                '-r', '--requirement',
                                '-Z', '--always-unzip',
                                '-f', '--find-links',
                                '-i', '--index-url',
                                '--pre',
                                '--trusted-host',
                                '--process-dependency-links',
                                '--extra-index-url'))):
                        line = line.rstrip()
                        if line not in emitted_options:
                            emitted_options.add(line)
                            yield line
                        continue

                    if line.startswith('-e') or line.startswith('--editable'):
                        if line.startswith('-e'):
                            line = line[2:].strip()
                        else:
                            line = line[len('--editable'):].strip().lstrip('=')
                        line_req = InstallRequirement.from_editable(
                            line,
                            default_vcs=default_vcs,
                            isolated=isolated,
                            wheel_cache=wheel_cache,
                        )
                    else:
                        line_req = InstallRequirement.from_line(
                            COMMENT_RE.sub('', line).strip(),
                            isolated=isolated,
                            wheel_cache=wheel_cache,
                        )

                    if not line_req.name:
                        logger.info(
                            "Skipping line in requirement file [%s] because "
                            "it's not clear what it would install: %s",
                            req_file_path, line.strip(),
                        )
                        logger.info(
                            "  (add #egg=PackageName to the URL to avoid"
                            " this warning)"
                        )
                    elif line_req.name not in installations:
                        logger.warning(
                            "Requirement file [%s] contains %s, but that "
                            "package is not installed",
                            req_file_path, COMMENT_RE.sub('', line).strip(),
                        )
                    else:
                        yield str(installations[line_req.name]).rstrip()
                        del installations[line_req.name]

        yield(
            '## The following requirements were added by '
            'pip freeze:'
        )
    for installation in sorted(
            installations.values(), key=lambda x: x.name.lower()):
        if canonicalize_name(installation.name) not in skip:
            yield str(installation).rstrip()
operations/__init__.py000064400000000000151733136160011034 0ustar00operations/check.py000064400000003066151733136160010371 0ustar00

def check_requirements(installed_dists):
    missing_reqs_dict = {}
    incompatible_reqs_dict = {}

    for dist in installed_dists:
        key = '%s==%s' % (dist.project_name, dist.version)

        missing_reqs = list(get_missing_reqs(dist, installed_dists))
        if missing_reqs:
            missing_reqs_dict[key] = missing_reqs

        incompatible_reqs = list(get_incompatible_reqs(
            dist, installed_dists))
        if incompatible_reqs:
            incompatible_reqs_dict[key] = incompatible_reqs

    return (missing_reqs_dict, incompatible_reqs_dict)


def get_missing_reqs(dist, installed_dists):
    """Return all of the requirements of `dist` that aren't present in
    `installed_dists`.

    """
    installed_names = set(d.project_name.lower() for d in installed_dists)
    missing_requirements = set()

    for requirement in dist.requires():
        if requirement.project_name.lower() not in installed_names:
            missing_requirements.add(requirement)
            yield requirement


def get_incompatible_reqs(dist, installed_dists):
    """Return all of the requirements of `dist` that are present in
    `installed_dists`, but have incompatible versions.

    """
    installed_dists_by_name = {}
    for installed_dist in installed_dists:
        installed_dists_by_name[installed_dist.project_name] = installed_dist

    for requirement in dist.requires():
        present_dist = installed_dists_by_name.get(requirement.project_name)

        if present_dist and present_dist not in requirement:
            yield (requirement, present_dist)
status_codes.py000064400000000234151733136160007623 0ustar00from __future__ import absolute_import

SUCCESS = 0
ERROR = 1
UNKNOWN_ERROR = 2
VIRTUALENV_NOT_FOUND = 3
PREVIOUS_BUILD_DIR_ERROR = 4
NO_MATCHES_FOUND = 23
compat/__pycache__/__init__.cpython-36.pyc000064400000007541151733136160014441 0ustar003

�Pf@�0@s dZddlmZmZddlZddlZddlmZyddlm	Z
Wn ek
r`ddlm	Z
YnXyddl
mZWn ek
r�ddlmZYnXyddlZWnRek
r�yddlmZWn,ek
r�ddlZeje_eje_YnXYnXyddlZdd	�ZWn*ek
�r2dd
lmZdd	�ZYnXdd
ddddddddg
Zejd.k�rjdZddlmZn$ddlZe ed�Ze�r�ejZndZejd/k�r�dd�Z!d0dd�Z"ndd�Z!d1dd�Z"d d!�Z#d"d�Z$d#d$�Z%d2Z&ejd3k�r�e&d47Z&ej'j(d*��pej'd+k�oej)d,kZ*d-d�Z+dS)5zKStuff that differs in different Python versions and platform
distributions.�)�absolute_import�divisionN)�	text_type)�
dictConfig)�OrderedDict)�	ipaddresscCs"tjd�tjd�g}ttt|��S)N�stdlib�
platstdlib)�	sysconfig�get_path�set�filter�bool)�paths�r�/usr/lib/python3.6/__init__.py�
get_stdlib"sr)r
cCs(tjdd�tjddd�g}ttt|��S)NT)�standard_lib)rZ
plat_specific)r
Zget_python_librr
r)rrrrr+s
�logging_dictConfigr�uses_pycache�console_to_str�
native_str�get_path_uid�stdlib_pkgs�WINDOWS�samefiler��T)�cache_from_sourcercCs.y|jtjj�Stk
r(|jd�SXdS)N�utf_8)�decode�sys�
__stdout__�encoding�UnicodeDecodeError)�srrrrGsFcCs"t|t�r|jd|rdnd�S|S)Nzutf-8�replace�strict)�
isinstance�bytesr )r%r&rrrrMs
cCs|S)Nr)r%rrrrSscCst|t�r|jd�S|S)Nzutf-8)r(r�encode)r%r&rrrrVs

cCs<t|d�r|j�S|j|j|jddd}|dSdS)N�
total_seconds�i�
�i@Bi@B)�hasattrr+ZmicrosecondsZsecondsZdays)Ztd�valrrrr+]s
r+cCs`ttd�r6tj|tjtjB�}tj|�j}tj|�n&tjj	|�sPtj
|�j}ntd|��|S)a)
    Return path's uid.

    Does not follow symlinks:
        https://github.com/pypa/pip/pull/935#discussion_r5307003

    Placed this function in compat due to differences on AIX and
    Jython, that should eventually go away.

    :raises OSError: When path is a symlink or can't be read.
    �
O_NOFOLLOWz1%s is a symlink; Will not return uid for symlinks)r/�os�open�O_RDONLYr1�fstat�st_uid�close�path�islink�stat�OSError)r8�fdZfile_uidrrrres

cCs0tjj|�}|jd�r,|jd�r,|dd�}|S)zl
    Expand ~ and ~user constructions.

    Includes a workaround for http://bugs.python.org/issue14768
    z~/z//�N)r2r8�
expanduser�
startswith)r8Zexpandedrrrr>�sr>�python�wsgiref���argparse�winZcli�ntcCsNttjd�rtjj||�Stjjtjj|��}tjjtjj|��}||kSdS)z>Provide an alternative for os.path.samefile on Windows/Python2rN)r/r2r8r�normcase�abspath)Zfile1Zfile2Zpath1Zpath2rrrr�s
)rr)r)F)F)r@rA)rBrC)rD),�__doc__Z
__future__rrr2r!Zpip._vendor.sixrZlogging.configrr�ImportErrorZpip.compat.dictconfig�collectionsrZpip._vendor.ordereddictrZpip._vendorZipaddrZ	IPAddressZ
ip_addressZ	IPNetworkZ
ip_networkr
rZ	distutils�__all__�version_inforZimportlib.utilrZimpr/rrr+rr>r�platformr?�namerrrrrr�<module>sh	


compat/__pycache__/__init__.cpython-36.opt-1.pyc000064400000007541151733136160015400 0ustar003

�Pf@�0@s dZddlmZmZddlZddlZddlmZyddlm	Z
Wn ek
r`ddlm	Z
YnXyddl
mZWn ek
r�ddlmZYnXyddlZWnRek
r�yddlmZWn,ek
r�ddlZeje_eje_YnXYnXyddlZdd	�ZWn*ek
�r2dd
lmZdd	�ZYnXdd
ddddddddg
Zejd.k�rjdZddlmZn$ddlZe ed�Ze�r�ejZndZejd/k�r�dd�Z!d0dd�Z"ndd�Z!d1dd�Z"d d!�Z#d"d�Z$d#d$�Z%d2Z&ejd3k�r�e&d47Z&ej'j(d*��pej'd+k�oej)d,kZ*d-d�Z+dS)5zKStuff that differs in different Python versions and platform
distributions.�)�absolute_import�divisionN)�	text_type)�
dictConfig)�OrderedDict)�	ipaddresscCs"tjd�tjd�g}ttt|��S)N�stdlib�
platstdlib)�	sysconfig�get_path�set�filter�bool)�paths�r�/usr/lib/python3.6/__init__.py�
get_stdlib"sr)r
cCs(tjdd�tjddd�g}ttt|��S)NT)�standard_lib)rZ
plat_specific)r
Zget_python_librr
r)rrrrr+s
�logging_dictConfigr�uses_pycache�console_to_str�
native_str�get_path_uid�stdlib_pkgs�WINDOWS�samefiler��T)�cache_from_sourcercCs.y|jtjj�Stk
r(|jd�SXdS)N�utf_8)�decode�sys�
__stdout__�encoding�UnicodeDecodeError)�srrrrGsFcCs"t|t�r|jd|rdnd�S|S)Nzutf-8�replace�strict)�
isinstance�bytesr )r%r&rrrrMs
cCs|S)Nr)r%rrrrSscCst|t�r|jd�S|S)Nzutf-8)r(r�encode)r%r&rrrrVs

cCs<t|d�r|j�S|j|j|jddd}|dSdS)N�
total_seconds�i�
�i@Bi@B)�hasattrr+ZmicrosecondsZsecondsZdays)Ztd�valrrrr+]s
r+cCs`ttd�r6tj|tjtjB�}tj|�j}tj|�n&tjj	|�sPtj
|�j}ntd|��|S)a)
    Return path's uid.

    Does not follow symlinks:
        https://github.com/pypa/pip/pull/935#discussion_r5307003

    Placed this function in compat due to differences on AIX and
    Jython, that should eventually go away.

    :raises OSError: When path is a symlink or can't be read.
    �
O_NOFOLLOWz1%s is a symlink; Will not return uid for symlinks)r/�os�open�O_RDONLYr1�fstat�st_uid�close�path�islink�stat�OSError)r8�fdZfile_uidrrrres

cCs0tjj|�}|jd�r,|jd�r,|dd�}|S)zl
    Expand ~ and ~user constructions.

    Includes a workaround for http://bugs.python.org/issue14768
    z~/z//�N)r2r8�
expanduser�
startswith)r8Zexpandedrrrr>�sr>�python�wsgiref���argparse�winZcli�ntcCsNttjd�rtjj||�Stjjtjj|��}tjjtjj|��}||kSdS)z>Provide an alternative for os.path.samefile on Windows/Python2rN)r/r2r8r�normcase�abspath)Zfile1Zfile2Zpath1Zpath2rrrr�s
)rr)r)F)F)r@rA)rBrC)rD),�__doc__Z
__future__rrr2r!Zpip._vendor.sixrZlogging.configrr�ImportErrorZpip.compat.dictconfig�collectionsrZpip._vendor.ordereddictrZpip._vendorZipaddrZ	IPAddressZ
ip_addressZ	IPNetworkZ
ip_networkr
rZ	distutils�__all__�version_inforZimportlib.utilrZimpr/rrr+rr>r�platformr?�namerrrrrr�<module>sh	


compat/__pycache__/dictconfig.cpython-36.pyc000064400000032420151733136160015005 0ustar003

�Pf8Z�@s�ddlmZddlZddlZddlZddlZddlmZej	dej
�Zdd�Zyddlm
Z
Wnek
rzdd	�Z
YnXGd
d�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd�de�ZeZdd�ZdS)�)�absolute_importN)�sixz^[a-z_][a-z0-9_]*$cCstj|�}|std|��dS)Nz!Not a valid Python identifier: %rT)�
IDENTIFIER�match�
ValueError)�s�m�r	� /usr/lib/python3.6/dictconfig.py�valid_ident"s
r)�_checkLevelcCsNt|t�r|}n:t|�|kr>|tjkr2td|��tj|}ntd|��|S)NzUnknown level: %rz*Level not an integer or a valid string: %r)�
isinstance�int�str�loggingZ_levelNamesr�	TypeError)�level�rvr	r	r
r.s

rc@s,eZdZdZdd�Zd	dd�Zd
dd�ZdS)�ConvertingDictz A converting dictionary wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
�dict�__getitem__�configurator�convert�typer�ConvertingList�ConvertingTuple�parent�key)�selfr�value�resultr	r	r
rGs
zConvertingDict.__getitem__NcCsLtj|||�}|jj|�}||k	rH|||<t|�tttfkrH||_||_	|S)N)
r�getrrrrrrrr)rr�defaultrr r	r	r
r!Ss
zConvertingDict.getcCsDtj|||�}|jj|�}||k	r@t|�tttfkr@||_||_	|S)N)
r�poprrrrrrrr)rrr"rr r	r	r
r#_s
zConvertingDict.pop)N)N)�__name__�
__module__�__qualname__�__doc__rr!r#r	r	r	r
rDs
rc@s"eZdZdZdd�Zd	dd�ZdS)
rzA converting list wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
�listrrrrrrrrr)rrrr r	r	r
rls
zConvertingList.__getitem__�cCs<tj||�}|jj|�}||k	r8t|�tttfkr8||_|S)N)	r(r#rrrrrrr)r�idxrr r	r	r
r#xs
zConvertingList.popN���)r+)r$r%r&r'rr#r	r	r	r
rjsrc@seZdZdZdd�ZdS)rzA converting tuple wrapper.cCsBtj||�}|jj|�}||k	r>t|�tttfkr>||_||_	|S)N)
�tuplerrrrrrrrr)rrrr r	r	r
r�s
zConvertingTuple.__getitem__N)r$r%r&r'rr	r	r	r
r�src@s�eZdZdZejd�Zejd�Zejd�Zejd�Z	ejd�Z
ddd	�ZeZ
d
d�Zdd
�Zdd�Zdd�Zdd�Zdd�Zdd�ZdS)�BaseConfiguratorzI
    The configurator base class which defines some useful defaults.
    z%^(?P<prefix>[a-z]+)://(?P<suffix>.*)$z^\s*(\w+)\s*z^\.\s*(\w+)\s*z^\[\s*(\w+)\s*\]\s*z^\d+$�ext_convert�cfg_convert)ZextZcfgcCst|�|_||j_dS)N)r�configr)rr0r	r	r
�__init__�s
zBaseConfigurator.__init__c	Cs�|jd�}|jd�}y`|j|�}xP|D]H}|d|7}yt||�}Wq&tk
rl|j|�t||�}Yq&Xq&W|Stk
r�tj�dd�\}}td||f�}|||_	|_
|�YnXdS)z`
        Resolve strings to objects using standard import and attribute
        syntax.
        �.rr)NzCannot resolve %r: %s)�splitr#�importer�getattr�AttributeError�ImportError�sys�exc_infor�	__cause__�
__traceback__)	rr�nameZused�foundZfrag�e�tb�vr	r	r
�resolve�s"




zBaseConfigurator.resolvecCs
|j|�S)z*Default converter for the ext:// protocol.)rA)rrr	r	r
r.�szBaseConfigurator.ext_convertcCs|}|jj|�}|dkr&td|��n�||j�d�}|j|j�d}x�|r�|jj|�}|rp||j�d}nd|jj|�}|r�|j�d}|jj|�s�||}n2yt	|�}||}Wnt
k
r�||}YnX|r�||j�d�}qJtd||f��qJW|S)z*Default converter for the cfg:// protocol.NzUnable to convert %rrzUnable to convert %r at %r)�WORD_PATTERNrr�endr0�groups�DOT_PATTERN�
INDEX_PATTERN�
DIGIT_PATTERNrr)rr�restr�dr*�nr	r	r
r/�s2
zBaseConfigurator.cfg_convertcCs�t|t�r&t|t�r&t|�}||_n�t|t�rLt|t�rLt|�}||_n~t|t�rrt|t�rrt|�}||_nXt|tj	�r�|j
j|�}|r�|j�}|d}|j
j|d�}|r�|d}t||�}||�}|S)z�
        Convert values to an appropriate type. dicts, lists and tuples are
        replaced by their converting alternatives. Strings are checked to
        see if they have a conversion format and are converted if they do.
        �prefixN�suffix)r
rrrrr(rr,rZstring_types�CONVERT_PATTERNr�	groupdict�value_convertersr!r5)rrrrIrKZ	converterrLr	r	r
r�s*

zBaseConfigurator.convertcs��jd�}t|d�r8ttd�r8t|�tjkr8|j|�}�jdd�}t�fdd��D��}|f|�}|r�x |j�D]\}}t|||�qrW|S)z1Configure an object with a user-supplied factory.z()�__call__�	ClassTyper2Nc3s"|]}t|�r|�|fVqdS)N)r)�.0�k)r0r	r
�	<genexpr>sz4BaseConfigurator.configure_custom.<locals>.<genexpr>)	r#�hasattr�typesrrQrAr�items�setattr)rr0�cZprops�kwargsr r<rr	)r0r
�configure_customs
$

z!BaseConfigurator.configure_customcCst|t�rt|�}|S)z0Utility function which converts lists to tuples.)r
r(r,)rrr	r	r
�as_tuples
zBaseConfigurator.as_tupleN)r$r%r&r'�re�compilerMrBrErFrGrO�
__import__r4r1rAr.r/rr[r\r	r	r	r
r-�s 




"r-c@s^eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	ddd�Z
ddd�Zddd�ZdS)�DictConfiguratorz]
    Configure logging using a dictionary-like object to describe the
    configuration.
    cCs�|j}d|krtd��|ddkr2td|d��|jdd�}i}tj��z||�r�|jd|�}tjdd	�dk�rx�|D]�}|tjkr�td|��qzy4tj|}||}|jdd�}|r�|j	t
|��Wqztk
�r�}	ztd
||	f��WYdd}	~	XqzXqzW|jd|�}
xZ|
D]R}y|j||
|d�Wn4tk
�rd}	ztd||	f��WYdd}	~	XnX�qW|jdd�}|�r�y|j
|d�Wn0tk
�r�}	ztd|	��WYdd}	~	XnX�n|jdd�}tjj�tjdd�=|jd|�}
xZ|
D]R}y|j|
|�|
|<Wn4tk
�rF}	ztd||	f��WYdd}	~	XnX�q�W|jd|�}xZ|D]R}y|j||�||<Wn4tk
�r�}	ztd||	f��WYdd}	~	XnX�q`W|jd|�}xht|�D]\}y |j||�}||_|||<Wn4tk
�r$}	ztd
||	f��WYdd}	~	XnX�q�Wtj}t|jj�}|j�g}|jd|�}
x�|
D]�}||k�r�|j|�}|d}t|�}t|�}|d}x<||k�r�||d|�|k�r�|j||�|d}�q�W|j|�y|j||
|�Wn4tk
�r$}	ztd||	f��WYdd}	~	XnX�q\WxF|D]>}|jj|}||k�rbtj|_g|_ d|_!n|�r2d|_"�q2W|jdd�}|�r�y|j
|�Wn0tk
�r�}	ztd|	��WYdd}	~	XnXWdtj#�XdS)zDo the configuration.�versionz$dictionary doesn't specify a versionr)zUnsupported version: %s�incrementalF�handlersN��zNo handler found with name %rrz"Unable to configure handler %r: %s�loggersTz!Unable to configure logger %r: %s�rootz#Unable to configure root logger: %sZdisable_existing_loggers�
formattersz$Unable to configure formatter %r: %s�filtersz!Unable to configure filter %r: %sr2)rdre)$r0rr#rZ_acquireLockr!r8�version_infoZ	_handlers�setLevelr�
StandardError�configure_logger�configure_root�clearZ_handlerList�configure_formatter�configure_filter�sorted�configure_handlerr<rgr(ZmanagerZ
loggerDict�sort�index�len�append�removeZNOTSETrrc�	propagateZdisabledZ_releaseLock)rr0rbZ
EMPTY_DICTrcr<ZhandlerZhandler_configrr>rfrgZdisable_existingrhriZexistingZ
child_loggers�iZprefixedZpflenZnum_existing�log�loggerr	r	r
�	configures�



"
$

$
$$



$

zDictConfigurator.configurecCs�d|krr|d}y|j|�}Wq�tk
rn}z4dt|�kr>�|jd�|d<||d<|j|�}WYdd}~Xq�Xn$|jdd�}|jdd�}tj||�}|S)z(Configure a formatter from a dictionary.z()z'format'�format�fmtNZdatefmt)r[rrr#r!rZ	Formatter)rr0�factoryr �terZdfmtr	r	r
rp�sz$DictConfigurator.configure_formattercCs.d|kr|j|�}n|jdd�}tj|�}|S)z%Configure a filter from a dictionary.z()r<�)r[r!r�Filter)rr0r r<r	r	r
rq�s

z!DictConfigurator.configure_filtercCs^xX|D]P}y|j|jd|�Wqtk
rT}ztd||f��WYdd}~XqXqWdS)z/Add filters to a filterer from a list of names.rizUnable to add filter %r: %sN)Z	addFilterr0rlr)rZfiltererri�fr>r	r	r
�add_filters�s

zDictConfigurator.add_filtersc-s@�jdd�}|rVy|jd|}Wn2tk
rT}ztd||f��WYdd}~XnX�jdd�}�jdd�}d�kr��jd�}t|d�r�ttd	�r�t|�tjkr�|j|�}|}n�|j�jd
��}t	|t
jj�o�d�k�r2y|jd�d�d<Wn8tk
�r.}ztd
�d|f��WYdd}~XnXnZt	|t
jj
��r`d�k�r`|j�d��d<n,t	|t
jj��r�d�k�r�|j�d��d<|}t�fdd��D��}	y|f|	�}
WnLtk
�r}z.dt|�k�r؂|	jd�|	d<|f|	�}
WYdd}~XnX|�r|
j|�|dk	�r*|
jt|��|�r<|j|
|�|
S)z&Configure a handler from a dictionary.�	formatterNrhzUnable to set formatter %r: %srriz()rPrQ�class�targetrcz#Unable to set target handler %r: %sZmailhostZaddressc3s"|]}t|�r|�|fVqdS)N)r)rRrS)r0r	r
rT�sz5DictConfigurator.configure_handler.<locals>.<genexpr>z'stream'�streamZstrm)r#r0rlrrUrVrrQrA�
issubclassrrcZ
MemoryHandlerZSMTPHandlerr\Z
SysLogHandlerrrrZsetFormatterrkrr�)rr0r�r>rrirYr��klassrZr r�r	)r0r
rs�sX
$

$



z"DictConfigurator.configure_handlercCs^xX|D]P}y|j|jd|�Wqtk
rT}ztd||f��WYdd}~XqXqWdS)z.Add handlers to a logger from a list of names.rczUnable to add handler %r: %sN)Z
addHandlerr0rlr)rr|rc�hr>r	r	r
�add_handlers	s

zDictConfigurator.add_handlersFcCs�|jdd�}|dk	r"|jt|��|s�x |jdd�D]}|j|�q6W|jdd�}|rd|j||�|jdd�}|r�|j||�dS)zU
        Perform configuration which is common to root and non-root loggers.
        rNrcri)r!rkrrcZ
removeHandlerr�r�)rr|r0rbrr�rcrir	r	r
�common_logger_configsz%DictConfigurator.common_logger_configcCs6tj|�}|j|||�|jdd�}|dk	r2||_dS)z.Configure a non-root logger from a dictionary.ryN)r�	getLoggerr�r!ry)rr<r0rbr|ryr	r	r
rm#s

z!DictConfigurator.configure_loggercCstj�}|j|||�dS)z*Configure a root logger from a dictionary.N)rr�r�)rr0rbrgr	r	r
rn+szDictConfigurator.configure_rootN)F)F)F)
r$r%r&r'r}rprqr�rsr�r�rmrnr	r	r	r
r`s	5

r`cCst|�j�dS)z%Configure logging using a dictionary.N)�dictConfigClassr})r0r	r	r
�
dictConfig3sr�)Z
__future__rZlogging.handlersrr]r8rVZpip._vendorrr^�Irrrr7rrr(rr,r�objectr-r`r�r�r	r	r	r
�<module>s*	&
compat/__pycache__/dictconfig.cpython-36.opt-1.pyc000064400000032420151733136160015744 0ustar003

�Pf8Z�@s�ddlmZddlZddlZddlZddlZddlmZej	dej
�Zdd�Zyddlm
Z
Wnek
rzdd	�Z
YnXGd
d�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd�de�ZeZdd�ZdS)�)�absolute_importN)�sixz^[a-z_][a-z0-9_]*$cCstj|�}|std|��dS)Nz!Not a valid Python identifier: %rT)�
IDENTIFIER�match�
ValueError)�s�m�r	� /usr/lib/python3.6/dictconfig.py�valid_ident"s
r)�_checkLevelcCsNt|t�r|}n:t|�|kr>|tjkr2td|��tj|}ntd|��|S)NzUnknown level: %rz*Level not an integer or a valid string: %r)�
isinstance�int�str�loggingZ_levelNamesr�	TypeError)�level�rvr	r	r
r.s

rc@s,eZdZdZdd�Zd	dd�Zd
dd�ZdS)�ConvertingDictz A converting dictionary wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
�dict�__getitem__�configurator�convert�typer�ConvertingList�ConvertingTuple�parent�key)�selfr�value�resultr	r	r
rGs
zConvertingDict.__getitem__NcCsLtj|||�}|jj|�}||k	rH|||<t|�tttfkrH||_||_	|S)N)
r�getrrrrrrrr)rr�defaultrr r	r	r
r!Ss
zConvertingDict.getcCsDtj|||�}|jj|�}||k	r@t|�tttfkr@||_||_	|S)N)
r�poprrrrrrrr)rrr"rr r	r	r
r#_s
zConvertingDict.pop)N)N)�__name__�
__module__�__qualname__�__doc__rr!r#r	r	r	r
rDs
rc@s"eZdZdZdd�Zd	dd�ZdS)
rzA converting list wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
�listrrrrrrrrr)rrrr r	r	r
rls
zConvertingList.__getitem__�cCs<tj||�}|jj|�}||k	r8t|�tttfkr8||_|S)N)	r(r#rrrrrrr)r�idxrr r	r	r
r#xs
zConvertingList.popN���)r+)r$r%r&r'rr#r	r	r	r
rjsrc@seZdZdZdd�ZdS)rzA converting tuple wrapper.cCsBtj||�}|jj|�}||k	r>t|�tttfkr>||_||_	|S)N)
�tuplerrrrrrrrr)rrrr r	r	r
r�s
zConvertingTuple.__getitem__N)r$r%r&r'rr	r	r	r
r�src@s�eZdZdZejd�Zejd�Zejd�Zejd�Z	ejd�Z
ddd	�ZeZ
d
d�Zdd
�Zdd�Zdd�Zdd�Zdd�Zdd�ZdS)�BaseConfiguratorzI
    The configurator base class which defines some useful defaults.
    z%^(?P<prefix>[a-z]+)://(?P<suffix>.*)$z^\s*(\w+)\s*z^\.\s*(\w+)\s*z^\[\s*(\w+)\s*\]\s*z^\d+$�ext_convert�cfg_convert)ZextZcfgcCst|�|_||j_dS)N)r�configr)rr0r	r	r
�__init__�s
zBaseConfigurator.__init__c	Cs�|jd�}|jd�}y`|j|�}xP|D]H}|d|7}yt||�}Wq&tk
rl|j|�t||�}Yq&Xq&W|Stk
r�tj�dd�\}}td||f�}|||_	|_
|�YnXdS)z`
        Resolve strings to objects using standard import and attribute
        syntax.
        �.rr)NzCannot resolve %r: %s)�splitr#�importer�getattr�AttributeError�ImportError�sys�exc_infor�	__cause__�
__traceback__)	rr�nameZused�foundZfrag�e�tb�vr	r	r
�resolve�s"




zBaseConfigurator.resolvecCs
|j|�S)z*Default converter for the ext:// protocol.)rA)rrr	r	r
r.�szBaseConfigurator.ext_convertcCs|}|jj|�}|dkr&td|��n�||j�d�}|j|j�d}x�|r�|jj|�}|rp||j�d}nd|jj|�}|r�|j�d}|jj|�s�||}n2yt	|�}||}Wnt
k
r�||}YnX|r�||j�d�}qJtd||f��qJW|S)z*Default converter for the cfg:// protocol.NzUnable to convert %rrzUnable to convert %r at %r)�WORD_PATTERNrr�endr0�groups�DOT_PATTERN�
INDEX_PATTERN�
DIGIT_PATTERNrr)rr�restr�dr*�nr	r	r
r/�s2
zBaseConfigurator.cfg_convertcCs�t|t�r&t|t�r&t|�}||_n�t|t�rLt|t�rLt|�}||_n~t|t�rrt|t�rrt|�}||_nXt|tj	�r�|j
j|�}|r�|j�}|d}|j
j|d�}|r�|d}t||�}||�}|S)z�
        Convert values to an appropriate type. dicts, lists and tuples are
        replaced by their converting alternatives. Strings are checked to
        see if they have a conversion format and are converted if they do.
        �prefixN�suffix)r
rrrrr(rr,rZstring_types�CONVERT_PATTERNr�	groupdict�value_convertersr!r5)rrrrIrKZ	converterrLr	r	r
r�s*

zBaseConfigurator.convertcs��jd�}t|d�r8ttd�r8t|�tjkr8|j|�}�jdd�}t�fdd��D��}|f|�}|r�x |j�D]\}}t|||�qrW|S)z1Configure an object with a user-supplied factory.z()�__call__�	ClassTyper2Nc3s"|]}t|�r|�|fVqdS)N)r)�.0�k)r0r	r
�	<genexpr>sz4BaseConfigurator.configure_custom.<locals>.<genexpr>)	r#�hasattr�typesrrQrAr�items�setattr)rr0�cZprops�kwargsr r<rr	)r0r
�configure_customs
$

z!BaseConfigurator.configure_customcCst|t�rt|�}|S)z0Utility function which converts lists to tuples.)r
r(r,)rrr	r	r
�as_tuples
zBaseConfigurator.as_tupleN)r$r%r&r'�re�compilerMrBrErFrGrO�
__import__r4r1rAr.r/rr[r\r	r	r	r
r-�s 




"r-c@s^eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	ddd�Z
ddd�Zddd�ZdS)�DictConfiguratorz]
    Configure logging using a dictionary-like object to describe the
    configuration.
    cCs�|j}d|krtd��|ddkr2td|d��|jdd�}i}tj��z||�r�|jd|�}tjdd	�dk�rx�|D]�}|tjkr�td|��qzy4tj|}||}|jdd�}|r�|j	t
|��Wqztk
�r�}	ztd
||	f��WYdd}	~	XqzXqzW|jd|�}
xZ|
D]R}y|j||
|d�Wn4tk
�rd}	ztd||	f��WYdd}	~	XnX�qW|jdd�}|�r�y|j
|d�Wn0tk
�r�}	ztd|	��WYdd}	~	XnX�n|jdd�}tjj�tjdd�=|jd|�}
xZ|
D]R}y|j|
|�|
|<Wn4tk
�rF}	ztd||	f��WYdd}	~	XnX�q�W|jd|�}xZ|D]R}y|j||�||<Wn4tk
�r�}	ztd||	f��WYdd}	~	XnX�q`W|jd|�}xht|�D]\}y |j||�}||_|||<Wn4tk
�r$}	ztd
||	f��WYdd}	~	XnX�q�Wtj}t|jj�}|j�g}|jd|�}
x�|
D]�}||k�r�|j|�}|d}t|�}t|�}|d}x<||k�r�||d|�|k�r�|j||�|d}�q�W|j|�y|j||
|�Wn4tk
�r$}	ztd||	f��WYdd}	~	XnX�q\WxF|D]>}|jj|}||k�rbtj|_g|_ d|_!n|�r2d|_"�q2W|jdd�}|�r�y|j
|�Wn0tk
�r�}	ztd|	��WYdd}	~	XnXWdtj#�XdS)zDo the configuration.�versionz$dictionary doesn't specify a versionr)zUnsupported version: %s�incrementalF�handlersN��zNo handler found with name %rrz"Unable to configure handler %r: %s�loggersTz!Unable to configure logger %r: %s�rootz#Unable to configure root logger: %sZdisable_existing_loggers�
formattersz$Unable to configure formatter %r: %s�filtersz!Unable to configure filter %r: %sr2)rdre)$r0rr#rZ_acquireLockr!r8�version_infoZ	_handlers�setLevelr�
StandardError�configure_logger�configure_root�clearZ_handlerList�configure_formatter�configure_filter�sorted�configure_handlerr<rgr(ZmanagerZ
loggerDict�sort�index�len�append�removeZNOTSETrrc�	propagateZdisabledZ_releaseLock)rr0rbZ
EMPTY_DICTrcr<ZhandlerZhandler_configrr>rfrgZdisable_existingrhriZexistingZ
child_loggers�iZprefixedZpflenZnum_existing�log�loggerr	r	r
�	configures�



"
$

$
$$



$

zDictConfigurator.configurecCs�d|krr|d}y|j|�}Wq�tk
rn}z4dt|�kr>�|jd�|d<||d<|j|�}WYdd}~Xq�Xn$|jdd�}|jdd�}tj||�}|S)z(Configure a formatter from a dictionary.z()z'format'�format�fmtNZdatefmt)r[rrr#r!rZ	Formatter)rr0�factoryr �terZdfmtr	r	r
rp�sz$DictConfigurator.configure_formattercCs.d|kr|j|�}n|jdd�}tj|�}|S)z%Configure a filter from a dictionary.z()r<�)r[r!r�Filter)rr0r r<r	r	r
rq�s

z!DictConfigurator.configure_filtercCs^xX|D]P}y|j|jd|�Wqtk
rT}ztd||f��WYdd}~XqXqWdS)z/Add filters to a filterer from a list of names.rizUnable to add filter %r: %sN)Z	addFilterr0rlr)rZfiltererri�fr>r	r	r
�add_filters�s

zDictConfigurator.add_filtersc-s@�jdd�}|rVy|jd|}Wn2tk
rT}ztd||f��WYdd}~XnX�jdd�}�jdd�}d�kr��jd�}t|d�r�ttd	�r�t|�tjkr�|j|�}|}n�|j�jd
��}t	|t
jj�o�d�k�r2y|jd�d�d<Wn8tk
�r.}ztd
�d|f��WYdd}~XnXnZt	|t
jj
��r`d�k�r`|j�d��d<n,t	|t
jj��r�d�k�r�|j�d��d<|}t�fdd��D��}	y|f|	�}
WnLtk
�r}z.dt|�k�r؂|	jd�|	d<|f|	�}
WYdd}~XnX|�r|
j|�|dk	�r*|
jt|��|�r<|j|
|�|
S)z&Configure a handler from a dictionary.�	formatterNrhzUnable to set formatter %r: %srriz()rPrQ�class�targetrcz#Unable to set target handler %r: %sZmailhostZaddressc3s"|]}t|�r|�|fVqdS)N)r)rRrS)r0r	r
rT�sz5DictConfigurator.configure_handler.<locals>.<genexpr>z'stream'�streamZstrm)r#r0rlrrUrVrrQrA�
issubclassrrcZ
MemoryHandlerZSMTPHandlerr\Z
SysLogHandlerrrrZsetFormatterrkrr�)rr0r�r>rrirYr��klassrZr r�r	)r0r
rs�sX
$

$



z"DictConfigurator.configure_handlercCs^xX|D]P}y|j|jd|�Wqtk
rT}ztd||f��WYdd}~XqXqWdS)z.Add handlers to a logger from a list of names.rczUnable to add handler %r: %sN)Z
addHandlerr0rlr)rr|rc�hr>r	r	r
�add_handlers	s

zDictConfigurator.add_handlersFcCs�|jdd�}|dk	r"|jt|��|s�x |jdd�D]}|j|�q6W|jdd�}|rd|j||�|jdd�}|r�|j||�dS)zU
        Perform configuration which is common to root and non-root loggers.
        rNrcri)r!rkrrcZ
removeHandlerr�r�)rr|r0rbrr�rcrir	r	r
�common_logger_configsz%DictConfigurator.common_logger_configcCs6tj|�}|j|||�|jdd�}|dk	r2||_dS)z.Configure a non-root logger from a dictionary.ryN)r�	getLoggerr�r!ry)rr<r0rbr|ryr	r	r
rm#s

z!DictConfigurator.configure_loggercCstj�}|j|||�dS)z*Configure a root logger from a dictionary.N)rr�r�)rr0rbrgr	r	r
rn+szDictConfigurator.configure_rootN)F)F)F)
r$r%r&r'r}rprqr�rsr�r�rmrnr	r	r	r
r`s	5

r`cCst|�j�dS)z%Configure logging using a dictionary.N)�dictConfigClassr})r0r	r	r
�
dictConfig3sr�)Z
__future__rZlogging.handlersrr]r8rVZpip._vendorrr^�Irrrr7rrr(rr,r�objectr-r`r�r�r	r	r	r
�<module>s*	&
compat/__init__.py000064400000011100151733136160010137 0ustar00"""Stuff that differs in different Python versions and platform
distributions."""
from __future__ import absolute_import, division

import os
import sys

from pip._vendor.six import text_type

try:
    from logging.config import dictConfig as logging_dictConfig
except ImportError:
    from pip.compat.dictconfig import dictConfig as logging_dictConfig

try:
    from collections import OrderedDict
except ImportError:
    from pip._vendor.ordereddict import OrderedDict

try:
    import ipaddress
except ImportError:
    try:
        from pip._vendor import ipaddress
    except ImportError:
        import ipaddr as ipaddress
        ipaddress.ip_address = ipaddress.IPAddress
        ipaddress.ip_network = ipaddress.IPNetwork


try:
    import sysconfig

    def get_stdlib():
        paths = [
            sysconfig.get_path("stdlib"),
            sysconfig.get_path("platstdlib"),
        ]
        return set(filter(bool, paths))
except ImportError:
    from distutils import sysconfig

    def get_stdlib():
        paths = [
            sysconfig.get_python_lib(standard_lib=True),
            sysconfig.get_python_lib(standard_lib=True, plat_specific=True),
        ]
        return set(filter(bool, paths))


__all__ = [
    "logging_dictConfig", "ipaddress", "uses_pycache", "console_to_str",
    "native_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile",
    "OrderedDict",
]


if sys.version_info >= (3, 4):
    uses_pycache = True
    from importlib.util import cache_from_source
else:
    import imp
    uses_pycache = hasattr(imp, 'cache_from_source')
    if uses_pycache:
        cache_from_source = imp.cache_from_source
    else:
        cache_from_source = None


if sys.version_info >= (3,):
    def console_to_str(s):
        try:
            return s.decode(sys.__stdout__.encoding)
        except UnicodeDecodeError:
            return s.decode('utf_8')

    def native_str(s, replace=False):
        if isinstance(s, bytes):
            return s.decode('utf-8', 'replace' if replace else 'strict')
        return s

else:
    def console_to_str(s):
        return s

    def native_str(s, replace=False):
        # Replace is ignored -- unicode to UTF-8 can't fail
        if isinstance(s, text_type):
            return s.encode('utf-8')
        return s


def total_seconds(td):
    if hasattr(td, "total_seconds"):
        return td.total_seconds()
    else:
        val = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6
        return val / 10 ** 6


def get_path_uid(path):
    """
    Return path's uid.

    Does not follow symlinks:
        https://github.com/pypa/pip/pull/935#discussion_r5307003

    Placed this function in compat due to differences on AIX and
    Jython, that should eventually go away.

    :raises OSError: When path is a symlink or can't be read.
    """
    if hasattr(os, 'O_NOFOLLOW'):
        fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
        file_uid = os.fstat(fd).st_uid
        os.close(fd)
    else:  # AIX and Jython
        # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
        if not os.path.islink(path):
            # older versions of Jython don't have `os.fstat`
            file_uid = os.stat(path).st_uid
        else:
            # raise OSError for parity with os.O_NOFOLLOW above
            raise OSError(
                "%s is a symlink; Will not return uid for symlinks" % path
            )
    return file_uid


def expanduser(path):
    """
    Expand ~ and ~user constructions.

    Includes a workaround for http://bugs.python.org/issue14768
    """
    expanded = os.path.expanduser(path)
    if path.startswith('~/') and expanded.startswith('//'):
        expanded = expanded[1:]
    return expanded


# packages in the stdlib that may have installation metadata, but should not be
# considered 'installed'.  this theoretically could be determined based on
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
# make this ineffective, so hard-coding
stdlib_pkgs = ('python', 'wsgiref')
if sys.version_info >= (2, 7):
    stdlib_pkgs += ('argparse',)


# windows detection, covers cpython and ironpython
WINDOWS = (sys.platform.startswith("win") or
           (sys.platform == 'cli' and os.name == 'nt'))


def samefile(file1, file2):
    """Provide an alternative for os.path.samefile on Windows/Python2"""
    if hasattr(os.path, 'samefile'):
        return os.path.samefile(file1, file2)
    else:
        path1 = os.path.normcase(os.path.abspath(file1))
        path2 = os.path.normcase(os.path.abspath(file2))
        return path1 == path2
compat/dictconfig.py000064400000055070151733136160010527 0ustar00# This is a copy of the Python logging.config.dictconfig module,
# reproduced with permission. It is provided here for backwards
# compatibility for Python versions prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import

import logging.handlers
import re
import sys
import types

from pip._vendor import six

# flake8: noqa

IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)


def valid_ident(s):
    m = IDENTIFIER.match(s)
    if not m:
        raise ValueError('Not a valid Python identifier: %r' % s)
    return True

#
# This function is defined in logging only in recent versions of Python
#
try:
    from logging import _checkLevel
except ImportError:
    def _checkLevel(level):
        if isinstance(level, int):
            rv = level
        elif str(level) == level:
            if level not in logging._levelNames:
                raise ValueError('Unknown level: %r' % level)
            rv = logging._levelNames[level]
        else:
            raise TypeError('Level not an integer or a '
                            'valid string: %r' % level)
        return rv

# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.


class ConvertingDict(dict):
    """A converting dictionary wrapper."""

    def __getitem__(self, key):
        value = dict.__getitem__(self, key)
        result = self.configurator.convert(value)
        # If the converted value is different, save for next time
        if value is not result:
            self[key] = result
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    def get(self, key, default=None):
        value = dict.get(self, key, default)
        result = self.configurator.convert(value)
        # If the converted value is different, save for next time
        if value is not result:
            self[key] = result
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    def pop(self, key, default=None):
        value = dict.pop(self, key, default)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result


class ConvertingList(list):
    """A converting list wrapper."""
    def __getitem__(self, key):
        value = list.__getitem__(self, key)
        result = self.configurator.convert(value)
        # If the converted value is different, save for next time
        if value is not result:
            self[key] = result
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    def pop(self, idx=-1):
        value = list.pop(self, idx)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
        return result


class ConvertingTuple(tuple):
    """A converting tuple wrapper."""
    def __getitem__(self, key):
        value = tuple.__getitem__(self, key)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result


class BaseConfigurator(object):
    """
    The configurator base class which defines some useful defaults.
    """

    CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')

    WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
    DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
    INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
    DIGIT_PATTERN = re.compile(r'^\d+$')

    value_converters = {
        'ext' : 'ext_convert',
        'cfg' : 'cfg_convert',
    }

    # We might want to use a different one, e.g. importlib
    importer = __import__

    def __init__(self, config):
        self.config = ConvertingDict(config)
        self.config.configurator = self

    def resolve(self, s):
        """
        Resolve strings to objects using standard import and attribute
        syntax.
        """
        name = s.split('.')
        used = name.pop(0)
        try:
            found = self.importer(used)
            for frag in name:
                used += '.' + frag
                try:
                    found = getattr(found, frag)
                except AttributeError:
                    self.importer(used)
                    found = getattr(found, frag)
            return found
        except ImportError:
            e, tb = sys.exc_info()[1:]
            v = ValueError('Cannot resolve %r: %s' % (s, e))
            v.__cause__, v.__traceback__ = e, tb
            raise v

    def ext_convert(self, value):
        """Default converter for the ext:// protocol."""
        return self.resolve(value)

    def cfg_convert(self, value):
        """Default converter for the cfg:// protocol."""
        rest = value
        m = self.WORD_PATTERN.match(rest)
        if m is None:
            raise ValueError("Unable to convert %r" % value)
        else:
            rest = rest[m.end():]
            d = self.config[m.groups()[0]]
            # print d, rest
            while rest:
                m = self.DOT_PATTERN.match(rest)
                if m:
                    d = d[m.groups()[0]]
                else:
                    m = self.INDEX_PATTERN.match(rest)
                    if m:
                        idx = m.groups()[0]
                        if not self.DIGIT_PATTERN.match(idx):
                            d = d[idx]
                        else:
                            try:
                                n = int(idx)  # try as number first (most likely)
                                d = d[n]
                            except TypeError:
                                d = d[idx]
                if m:
                    rest = rest[m.end():]
                else:
                    raise ValueError('Unable to convert '
                                     '%r at %r' % (value, rest))
        # rest should be empty
        return d

    def convert(self, value):
        """
        Convert values to an appropriate type. dicts, lists and tuples are
        replaced by their converting alternatives. Strings are checked to
        see if they have a conversion format and are converted if they do.
        """
        if not isinstance(value, ConvertingDict) and isinstance(value, dict):
            value = ConvertingDict(value)
            value.configurator = self
        elif not isinstance(value, ConvertingList) and isinstance(value, list):
            value = ConvertingList(value)
            value.configurator = self
        elif not isinstance(value, ConvertingTuple) and\
                 isinstance(value, tuple):
            value = ConvertingTuple(value)
            value.configurator = self
        elif isinstance(value, six.string_types):  # str for py3k
            m = self.CONVERT_PATTERN.match(value)
            if m:
                d = m.groupdict()
                prefix = d['prefix']
                converter = self.value_converters.get(prefix, None)
                if converter:
                    suffix = d['suffix']
                    converter = getattr(self, converter)
                    value = converter(suffix)
        return value

    def configure_custom(self, config):
        """Configure an object with a user-supplied factory."""
        c = config.pop('()')
        if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
            c = self.resolve(c)
        props = config.pop('.', None)
        # Check for valid identifiers
        kwargs = dict((k, config[k]) for k in config if valid_ident(k))
        result = c(**kwargs)
        if props:
            for name, value in props.items():
                setattr(result, name, value)
        return result

    def as_tuple(self, value):
        """Utility function which converts lists to tuples."""
        if isinstance(value, list):
            value = tuple(value)
        return value


class DictConfigurator(BaseConfigurator):
    """
    Configure logging using a dictionary-like object to describe the
    configuration.
    """

    def configure(self):
        """Do the configuration."""

        config = self.config
        if 'version' not in config:
            raise ValueError("dictionary doesn't specify a version")
        if config['version'] != 1:
            raise ValueError("Unsupported version: %s" % config['version'])
        incremental = config.pop('incremental', False)
        EMPTY_DICT = {}
        logging._acquireLock()
        try:
            if incremental:
                handlers = config.get('handlers', EMPTY_DICT)
                # incremental handler config only if handler name
                # ties in to logging._handlers (Python 2.7)
                if sys.version_info[:2] == (2, 7):
                    for name in handlers:
                        if name not in logging._handlers:
                            raise ValueError('No handler found with '
                                             'name %r'  % name)
                        else:
                            try:
                                handler = logging._handlers[name]
                                handler_config = handlers[name]
                                level = handler_config.get('level', None)
                                if level:
                                    handler.setLevel(_checkLevel(level))
                            except StandardError as e:
                                raise ValueError('Unable to configure handler '
                                                 '%r: %s' % (name, e))
                loggers = config.get('loggers', EMPTY_DICT)
                for name in loggers:
                    try:
                        self.configure_logger(name, loggers[name], True)
                    except StandardError as e:
                        raise ValueError('Unable to configure logger '
                                         '%r: %s' % (name, e))
                root = config.get('root', None)
                if root:
                    try:
                        self.configure_root(root, True)
                    except StandardError as e:
                        raise ValueError('Unable to configure root '
                                         'logger: %s' % e)
            else:
                disable_existing = config.pop('disable_existing_loggers', True)

                logging._handlers.clear()
                del logging._handlerList[:]

                # Do formatters first - they don't refer to anything else
                formatters = config.get('formatters', EMPTY_DICT)
                for name in formatters:
                    try:
                        formatters[name] = self.configure_formatter(
                                                            formatters[name])
                    except StandardError as e:
                        raise ValueError('Unable to configure '
                                         'formatter %r: %s' % (name, e))
                # Next, do filters - they don't refer to anything else, either
                filters = config.get('filters', EMPTY_DICT)
                for name in filters:
                    try:
                        filters[name] = self.configure_filter(filters[name])
                    except StandardError as e:
                        raise ValueError('Unable to configure '
                                         'filter %r: %s' % (name, e))

                # Next, do handlers - they refer to formatters and filters
                # As handlers can refer to other handlers, sort the keys
                # to allow a deterministic order of configuration
                handlers = config.get('handlers', EMPTY_DICT)
                for name in sorted(handlers):
                    try:
                        handler = self.configure_handler(handlers[name])
                        handler.name = name
                        handlers[name] = handler
                    except StandardError as e:
                        raise ValueError('Unable to configure handler '
                                         '%r: %s' % (name, e))
                # Next, do loggers - they refer to handlers and filters

                # we don't want to lose the existing loggers,
                # since other threads may have pointers to them.
                # existing is set to contain all existing loggers,
                # and as we go through the new configuration we
                # remove any which are configured. At the end,
                # what's left in existing is the set of loggers
                # which were in the previous configuration but
                # which are not in the new configuration.
                root = logging.root
                existing = list(root.manager.loggerDict)
                # The list needs to be sorted so that we can
                # avoid disabling child loggers of explicitly
                # named loggers. With a sorted list it is easier
                # to find the child loggers.
                existing.sort()
                # We'll keep the list of existing loggers
                # which are children of named loggers here...
                child_loggers = []
                # now set up the new ones...
                loggers = config.get('loggers', EMPTY_DICT)
                for name in loggers:
                    if name in existing:
                        i = existing.index(name)
                        prefixed = name + "."
                        pflen = len(prefixed)
                        num_existing = len(existing)
                        i = i + 1  # look at the entry after name
                        while (i < num_existing) and\
                              (existing[i][:pflen] == prefixed):
                            child_loggers.append(existing[i])
                            i = i + 1
                        existing.remove(name)
                    try:
                        self.configure_logger(name, loggers[name])
                    except StandardError as e:
                        raise ValueError('Unable to configure logger '
                                         '%r: %s' % (name, e))

                # Disable any old loggers. There's no point deleting
                # them as other threads may continue to hold references
                # and by disabling them, you stop them doing any logging.
                # However, don't disable children of named loggers, as that's
                # probably not what was intended by the user.
                for log in existing:
                    logger = root.manager.loggerDict[log]
                    if log in child_loggers:
                        logger.level = logging.NOTSET
                        logger.handlers = []
                        logger.propagate = True
                    elif disable_existing:
                        logger.disabled = True

                # And finally, do the root logger
                root = config.get('root', None)
                if root:
                    try:
                        self.configure_root(root)
                    except StandardError as e:
                        raise ValueError('Unable to configure root '
                                         'logger: %s' % e)
        finally:
            logging._releaseLock()

    def configure_formatter(self, config):
        """Configure a formatter from a dictionary."""
        if '()' in config:
            factory = config['()']  # for use in exception handler
            try:
                result = self.configure_custom(config)
            except TypeError as te:
                if "'format'" not in str(te):
                    raise
                # Name of parameter changed from fmt to format.
                # Retry with old name.
                # This is so that code can be used with older Python versions
                #(e.g. by Django)
                config['fmt'] = config.pop('format')
                config['()'] = factory
                result = self.configure_custom(config)
        else:
            fmt = config.get('format', None)
            dfmt = config.get('datefmt', None)
            result = logging.Formatter(fmt, dfmt)
        return result

    def configure_filter(self, config):
        """Configure a filter from a dictionary."""
        if '()' in config:
            result = self.configure_custom(config)
        else:
            name = config.get('name', '')
            result = logging.Filter(name)
        return result

    def add_filters(self, filterer, filters):
        """Add filters to a filterer from a list of names."""
        for f in filters:
            try:
                filterer.addFilter(self.config['filters'][f])
            except StandardError as e:
                raise ValueError('Unable to add filter %r: %s' % (f, e))

    def configure_handler(self, config):
        """Configure a handler from a dictionary."""
        formatter = config.pop('formatter', None)
        if formatter:
            try:
                formatter = self.config['formatters'][formatter]
            except StandardError as e:
                raise ValueError('Unable to set formatter '
                                 '%r: %s' % (formatter, e))
        level = config.pop('level', None)
        filters = config.pop('filters', None)
        if '()' in config:
            c = config.pop('()')
            if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
                c = self.resolve(c)
            factory = c
        else:
            klass = self.resolve(config.pop('class'))
            # Special case for handler which refers to another handler
            if issubclass(klass, logging.handlers.MemoryHandler) and\
                'target' in config:
                try:
                    config['target'] = self.config['handlers'][config['target']]
                except StandardError as e:
                    raise ValueError('Unable to set target handler '
                                     '%r: %s' % (config['target'], e))
            elif issubclass(klass, logging.handlers.SMTPHandler) and\
                'mailhost' in config:
                config['mailhost'] = self.as_tuple(config['mailhost'])
            elif issubclass(klass, logging.handlers.SysLogHandler) and\
                'address' in config:
                config['address'] = self.as_tuple(config['address'])
            factory = klass
        kwargs = dict((k, config[k]) for k in config if valid_ident(k))
        try:
            result = factory(**kwargs)
        except TypeError as te:
            if "'stream'" not in str(te):
                raise
            # The argument name changed from strm to stream
            # Retry with old name.
            # This is so that code can be used with older Python versions
            #(e.g. by Django)
            kwargs['strm'] = kwargs.pop('stream')
            result = factory(**kwargs)
        if formatter:
            result.setFormatter(formatter)
        if level is not None:
            result.setLevel(_checkLevel(level))
        if filters:
            self.add_filters(result, filters)
        return result

    def add_handlers(self, logger, handlers):
        """Add handlers to a logger from a list of names."""
        for h in handlers:
            try:
                logger.addHandler(self.config['handlers'][h])
            except StandardError as e:
                raise ValueError('Unable to add handler %r: %s' % (h, e))

    def common_logger_config(self, logger, config, incremental=False):
        """
        Perform configuration which is common to root and non-root loggers.
        """
        level = config.get('level', None)
        if level is not None:
            logger.setLevel(_checkLevel(level))
        if not incremental:
            # Remove any existing handlers
            for h in logger.handlers[:]:
                logger.removeHandler(h)
            handlers = config.get('handlers', None)
            if handlers:
                self.add_handlers(logger, handlers)
            filters = config.get('filters', None)
            if filters:
                self.add_filters(logger, filters)

    def configure_logger(self, name, config, incremental=False):
        """Configure a non-root logger from a dictionary."""
        logger = logging.getLogger(name)
        self.common_logger_config(logger, config, incremental)
        propagate = config.get('propagate', None)
        if propagate is not None:
            logger.propagate = propagate

    def configure_root(self, config, incremental=False):
        """Configure a root logger from a dictionary."""
        root = logging.getLogger()
        self.common_logger_config(root, config, incremental)

dictConfigClass = DictConfigurator


def dictConfig(config):
    """Configure logging using a dictionary."""
    dictConfigClass(config).configure()
__init__.py000064400000027236151733136160006675 0ustar00from __future__ import absolute_import

import locale
import logging
import os
import optparse
import warnings

import sys
import re

# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
# isn't available.  requests unconditionally imports urllib3's socks contrib
# module, triggering this warning.  The warning breaks DEP-8 tests (because of
# the stderr output) and is just plain annoying in normal usage.  I don't want
# to add socks as yet another dependency for pip, nor do I want to allow-stder
# in the DEP-8 tests, so just suppress the warning.  pdb tells me this has to
# be done before the import of pip.vcs.
from pip._vendor.urllib3.exceptions import DependencyWarning
warnings.filterwarnings("ignore", category=DependencyWarning)  # noqa

# We want to inject the use of SecureTransport as early as possible so that any
# references or sessions or what have you are ensured to have it, however we
# only want to do this in the case that we're running on macOS and the linked
# OpenSSL is too old to handle TLSv1.2
try:
    import ssl
except ImportError:
    pass
else:
    if (sys.platform == "darwin" and
            getattr(ssl, "OPENSSL_VERSION_NUMBER", 0) < 0x1000100f):  # OpenSSL 1.0.1
        try:
            from pip._vendor.urllib3.contrib import securetransport
        except (ImportError, OSError):
            pass
        else:
            securetransport.inject_into_urllib3()

from pip.exceptions import InstallationError, CommandError, PipError
from pip.utils import get_installed_distributions, get_prog
from pip.utils import deprecation, dist_is_editable
from pip.vcs import git, mercurial, subversion, bazaar  # noqa
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.commands import get_summaries, get_similar_commands
from pip.commands import commands_dict
from pip._vendor.urllib3.exceptions import InsecureRequestWarning


# assignment for flake8 to be happy

# This fixes a peculiarity when importing via __import__ - as we are
# initialising the pip module, "from pip import cmdoptions" is recursive
# and appears not to work properly in that situation.
import pip.cmdoptions
cmdoptions = pip.cmdoptions

# The version as used in the setup.py and the docs conf.py
__version__ = "9.0.3"


logger = logging.getLogger(__name__)

# Hide the InsecureRequestWarning from urllib3
warnings.filterwarnings("ignore", category=InsecureRequestWarning)


def autocomplete():
    """Command and option completion for the main option parser (and options)
    and its subcommands (and options).

    Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
    """
    # Don't complete if user hasn't sourced bash_completion file.
    if 'PIP_AUTO_COMPLETE' not in os.environ:
        return
    cwords = os.environ['COMP_WORDS'].split()[1:]
    cword = int(os.environ['COMP_CWORD'])
    try:
        current = cwords[cword - 1]
    except IndexError:
        current = ''

    subcommands = [cmd for cmd, summary in get_summaries()]
    options = []
    # subcommand
    try:
        subcommand_name = [w for w in cwords if w in subcommands][0]
    except IndexError:
        subcommand_name = None

    parser = create_main_parser()
    # subcommand options
    if subcommand_name:
        # special case: 'help' subcommand has no options
        if subcommand_name == 'help':
            sys.exit(1)
        # special case: list locally installed dists for uninstall command
        if subcommand_name == 'uninstall' and not current.startswith('-'):
            installed = []
            lc = current.lower()
            for dist in get_installed_distributions(local_only=True):
                if dist.key.startswith(lc) and dist.key not in cwords[1:]:
                    installed.append(dist.key)
            # if there are no dists installed, fall back to option completion
            if installed:
                for dist in installed:
                    print(dist)
                sys.exit(1)

        subcommand = commands_dict[subcommand_name]()
        options += [(opt.get_opt_string(), opt.nargs)
                    for opt in subcommand.parser.option_list_all
                    if opt.help != optparse.SUPPRESS_HELP]

        # filter out previously specified options from available options
        prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
        options = [(x, v) for (x, v) in options if x not in prev_opts]
        # filter options by current input
        options = [(k, v) for k, v in options if k.startswith(current)]
        for option in options:
            opt_label = option[0]
            # append '=' to options which require args
            if option[1]:
                opt_label += '='
            print(opt_label)
    else:
        # show main parser options only when necessary
        if current.startswith('-') or current.startswith('--'):
            opts = [i.option_list for i in parser.option_groups]
            opts.append(parser.option_list)
            opts = (o for it in opts for o in it)

            subcommands += [i.get_opt_string() for i in opts
                            if i.help != optparse.SUPPRESS_HELP]

        print(' '.join([x for x in subcommands if x.startswith(current)]))
    sys.exit(1)


def create_main_parser():
    parser_kw = {
        'usage': '\n%prog <command> [options]',
        'add_help_option': False,
        'formatter': UpdatingDefaultsHelpFormatter(),
        'name': 'global',
        'prog': get_prog(),
    }

    parser = ConfigOptionParser(**parser_kw)
    parser.disable_interspersed_args()

    pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    parser.version = 'pip %s from %s (python %s)' % (
        __version__, pip_pkg_dir, sys.version[:3])

    # add the general options
    gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
    parser.add_option_group(gen_opts)

    parser.main = True  # so the help formatter knows

    # create command listing for description
    command_summaries = get_summaries()
    description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
    parser.description = '\n'.join(description)

    return parser


def parseopts(args):
    parser = create_main_parser()

    # Note: parser calls disable_interspersed_args(), so the result of this
    # call is to split the initial args into the general options before the
    # subcommand and everything else.
    # For example:
    #  args: ['--timeout=5', 'install', '--user', 'INITools']
    #  general_options: ['--timeout==5']
    #  args_else: ['install', '--user', 'INITools']
    general_options, args_else = parser.parse_args(args)

    # --version
    if general_options.version:
        sys.stdout.write(parser.version)
        sys.stdout.write(os.linesep)
        sys.exit()

    # pip || pip help -> print_help()
    if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
        parser.print_help()
        sys.exit()

    # the subcommand name
    cmd_name = args_else[0]

    if cmd_name not in commands_dict:
        guess = get_similar_commands(cmd_name)

        msg = ['unknown command "%s"' % cmd_name]
        if guess:
            msg.append('maybe you meant "%s"' % guess)

        raise CommandError(' - '.join(msg))

    # all the args without the subcommand
    cmd_args = args[:]
    cmd_args.remove(cmd_name)

    return cmd_name, cmd_args


def check_isolated(args):
    isolated = False

    if "--isolated" in args:
        isolated = True

    return isolated


def main(args=None):
    if args is None:
        args = sys.argv[1:]

    # Configure our deprecation warnings to be sent through loggers
    deprecation.install_warning_logger()

    autocomplete()

    try:
        cmd_name, cmd_args = parseopts(args)
    except PipError as exc:
        sys.stderr.write("ERROR: %s" % exc)
        sys.stderr.write(os.linesep)
        sys.exit(1)

    # Needed for locale.getpreferredencoding(False) to work
    # in pip.utils.encoding.auto_decode
    try:
        locale.setlocale(locale.LC_ALL, '')
    except locale.Error as e:
        # setlocale can apparently crash if locale are uninitialized
        logger.debug("Ignoring error %s when setting locale", e)
    command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
    return command.main(cmd_args)


# ###########################################################
# # Writing freeze files

class FrozenRequirement(object):

    def __init__(self, name, req, editable, comments=()):
        self.name = name
        self.req = req
        self.editable = editable
        self.comments = comments

    _rev_re = re.compile(r'-r(\d+)$')
    _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')

    @classmethod
    def from_dist(cls, dist, dependency_links):
        location = os.path.normcase(os.path.abspath(dist.location))
        comments = []
        from pip.vcs import vcs, get_src_requirement
        if dist_is_editable(dist) and vcs.get_backend_name(location):
            editable = True
            try:
                req = get_src_requirement(dist, location)
            except InstallationError as exc:
                logger.warning(
                    "Error when trying to get requirement for VCS system %s, "
                    "falling back to uneditable format", exc
                )
                req = None
            if req is None:
                logger.warning(
                    'Could not determine repository location of %s', location
                )
                comments.append(
                    '## !! Could not determine repository location'
                )
                req = dist.as_requirement()
                editable = False
        else:
            editable = False
            req = dist.as_requirement()
            specs = req.specs
            assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
                'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
                (specs, dist)
            version = specs[0][1]
            ver_match = cls._rev_re.search(version)
            date_match = cls._date_re.search(version)
            if ver_match or date_match:
                svn_backend = vcs.get_backend('svn')
                if svn_backend:
                    svn_location = svn_backend().get_location(
                        dist,
                        dependency_links,
                    )
                if not svn_location:
                    logger.warning(
                        'Warning: cannot find svn location for %s', req)
                    comments.append(
                        '## FIXME: could not find svn URL in dependency_links '
                        'for this package:'
                    )
                else:
                    comments.append(
                        '# Installing as editable to satisfy requirement %s:' %
                        req
                    )
                    if ver_match:
                        rev = ver_match.group(1)
                    else:
                        rev = '{%s}' % date_match.group(1)
                    editable = True
                    req = '%s@%s#egg=%s' % (
                        svn_location,
                        rev,
                        cls.egg_name(dist)
                    )
        return cls(dist.project_name, req, editable, comments)

    @staticmethod
    def egg_name(dist):
        name = dist.egg_name()
        match = re.search(r'-py\d\.\d$', name)
        if match:
            name = name[:match.start()]
        return name

    def __str__(self):
        req = self.req
        if self.editable:
            req = '-e %s' % req
        return '\n'.join(list(self.comments) + [str(req)]) + '\n'


if __name__ == '__main__':
    sys.exit(main())
locations.py000064400000012772151733136160007130 0ustar00"""Locations where we look for configs, install stuff, etc"""
from __future__ import absolute_import

import os
import os.path
import site
import sys

from distutils import sysconfig
from distutils.command.install import install, SCHEME_KEYS  # noqa

from pip.compat import WINDOWS, expanduser
from pip.utils import appdirs


# Application Directories
USER_CACHE_DIR = appdirs.user_cache_dir("pip")


DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
here by pip.

Once this package is successfully installed this source code will be
deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'


def write_delete_marker_file(directory):
    """
    Write the pip delete marker file into this directory.
    """
    filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
    with open(filepath, 'w') as marker_fp:
        marker_fp.write(DELETE_MARKER_MESSAGE)


def running_under_virtualenv():
    """
    Return True if we're running inside a virtualenv, False otherwise.

    """
    if hasattr(sys, 'real_prefix'):
        return True
    elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
        return True

    return False


def virtualenv_no_global():
    """
    Return True if in a venv and no system site packages.
    """
    # this mirrors the logic in virtualenv.py for locating the
    # no-global-site-packages.txt file
    site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
    no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
    if running_under_virtualenv() and os.path.isfile(no_global_file):
        return True


if running_under_virtualenv():
    src_prefix = os.path.join(sys.prefix, 'src')
else:
    # FIXME: keep src in cwd for now (it is not a temporary folder)
    try:
        src_prefix = os.path.join(os.getcwd(), 'src')
    except OSError:
        # In case the current working directory has been renamed or deleted
        sys.exit(
            "The folder you are executing pip from can no longer be found."
        )

# under macOS + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
# Note: using realpath due to tmp dirs on OSX being symlinks
src_prefix = os.path.abspath(src_prefix)

# FIXME doesn't account for venv linked to global site-packages

site_packages = sysconfig.get_python_lib()
user_site = site.USER_SITE
user_dir = expanduser('~')
if WINDOWS:
    bin_py = os.path.join(sys.prefix, 'Scripts')
    bin_user = os.path.join(user_site, 'Scripts')
    # buildout uses 'bin' on Windows too?
    if not os.path.exists(bin_py):
        bin_py = os.path.join(sys.prefix, 'bin')
        bin_user = os.path.join(user_site, 'bin')

    config_basename = 'pip.ini'

    legacy_storage_dir = os.path.join(user_dir, 'pip')
    legacy_config_file = os.path.join(
        legacy_storage_dir,
        config_basename,
    )
else:
    bin_py = os.path.join(sys.prefix, 'bin')
    bin_user = os.path.join(user_site, 'bin')

    config_basename = 'pip.conf'

    legacy_storage_dir = os.path.join(user_dir, '.pip')
    legacy_config_file = os.path.join(
        legacy_storage_dir,
        config_basename,
    )

    # Forcing to use /usr/local/bin for standard macOS framework installs
    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
    if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
        bin_py = '/usr/local/bin'

site_config_files = [
    os.path.join(path, config_basename)
    for path in appdirs.site_config_dirs('pip')
]


def distutils_scheme(dist_name, user=False, home=None, root=None,
                     isolated=False, prefix=None):
    """
    Return a distutils install scheme
    """
    from distutils.dist import Distribution

    scheme = {}

    if isolated:
        extra_dist_args = {"script_args": ["--no-user-cfg"]}
    else:
        extra_dist_args = {}
    dist_args = {'name': dist_name}
    dist_args.update(extra_dist_args)

    d = Distribution(dist_args)
    d.parse_config_files()
    i = d.get_command_obj('install', create=True)
    # NOTE: setting user or home has the side-effect of creating the home dir
    # or user base for installations during finalize_options()
    # ideally, we'd prefer a scheme class that has no side-effects.
    assert not (user and prefix), "user={0} prefix={1}".format(user, prefix)
    i.user = user or i.user
    if user:
        i.prefix = ""
    i.prefix = prefix or i.prefix
    i.home = home or i.home
    i.root = root or i.root
    i.finalize_options()
    for key in SCHEME_KEYS:
        scheme[key] = getattr(i, 'install_' + key)

    # install_lib specified in setup.cfg should install *everything*
    # into there (i.e. it takes precedence over both purelib and
    # platlib).  Note, i.install_lib is *always* set after
    # finalize_options(); we only want to override here if the user
    # has explicitly requested it hence going back to the config
    if 'install_lib' in d.get_option_dict('install'):
        scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))

    if running_under_virtualenv():
        scheme['headers'] = os.path.join(
            sys.prefix,
            'include',
            'site',
            'python' + sys.version[:3],
            dist_name,
        )

        if root is not None:
            path_no_drive = os.path.splitdrive(
                os.path.abspath(scheme["headers"]))[1]
            scheme["headers"] = os.path.join(
                root,
                path_no_drive[1:],
            )

    return scheme
basecommand.py000064400000027206151733136160007404 0ustar00"""Base Command class, and related routines"""
from __future__ import absolute_import

import logging
import os
import sys
import optparse
import warnings

from pip import cmdoptions
from pip.index import PackageFinder
from pip.locations import running_under_virtualenv
from pip.download import PipSession
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
                            CommandError, PreviousBuildDirError)

from pip.compat import logging_dictConfig
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.req import InstallRequirement, parse_requirements
from pip.status_codes import (
    SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
    PREVIOUS_BUILD_DIR_ERROR,
)
from pip.utils import deprecation, get_prog, normalize_path
from pip.utils.logging import IndentingFormatter
from pip.utils.outdated import pip_version_check


__all__ = ['Command']


logger = logging.getLogger(__name__)


class Command(object):
    name = None
    usage = None
    hidden = False
    log_streams = ("ext://sys.stdout", "ext://sys.stderr")

    def __init__(self, isolated=False):
        parser_kw = {
            'usage': self.usage,
            'prog': '%s %s' % (get_prog(), self.name),
            'formatter': UpdatingDefaultsHelpFormatter(),
            'add_help_option': False,
            'name': self.name,
            'description': self.__doc__,
            'isolated': isolated,
        }

        self.parser = ConfigOptionParser(**parser_kw)

        # Commands should add options to this option group
        optgroup_name = '%s Options' % self.name.capitalize()
        self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)

        # Add the general options
        gen_opts = cmdoptions.make_option_group(
            cmdoptions.general_group,
            self.parser,
        )
        self.parser.add_option_group(gen_opts)

    def _build_session(self, options, retries=None, timeout=None):
        session = PipSession(
            cache=(
                normalize_path(os.path.join(options.cache_dir, "http"))
                if options.cache_dir else None
            ),
            retries=retries if retries is not None else options.retries,
            insecure_hosts=options.trusted_hosts,
        )

        # Handle custom ca-bundles from the user
        if options.cert:
            session.verify = options.cert

        # Handle SSL client certificate
        if options.client_cert:
            session.cert = options.client_cert

        # Handle timeouts
        if options.timeout or timeout:
            session.timeout = (
                timeout if timeout is not None else options.timeout
            )

        # Handle configured proxies
        if options.proxy:
            session.proxies = {
                "http": options.proxy,
                "https": options.proxy,
            }

        # Determine if we can prompt the user for authentication or not
        session.auth.prompting = not options.no_input

        return session

    def parse_args(self, args):
        # factored out for testability
        return self.parser.parse_args(args)

    def main(self, args):
        options, args = self.parse_args(args)

        if options.quiet:
            if options.quiet == 1:
                level = "WARNING"
            if options.quiet == 2:
                level = "ERROR"
            else:
                level = "CRITICAL"
        elif options.verbose:
            level = "DEBUG"
        else:
            level = "INFO"

        # The root logger should match the "console" level *unless* we
        # specified "--log" to send debug logs to a file.
        root_level = level
        if options.log:
            root_level = "DEBUG"

        logging_dictConfig({
            "version": 1,
            "disable_existing_loggers": False,
            "filters": {
                "exclude_warnings": {
                    "()": "pip.utils.logging.MaxLevelFilter",
                    "level": logging.WARNING,
                },
            },
            "formatters": {
                "indent": {
                    "()": IndentingFormatter,
                    "format": "%(message)s",
                },
            },
            "handlers": {
                "console": {
                    "level": level,
                    "class": "pip.utils.logging.ColorizedStreamHandler",
                    "stream": self.log_streams[0],
                    "filters": ["exclude_warnings"],
                    "formatter": "indent",
                },
                "console_errors": {
                    "level": "WARNING",
                    "class": "pip.utils.logging.ColorizedStreamHandler",
                    "stream": self.log_streams[1],
                    "formatter": "indent",
                },
                "user_log": {
                    "level": "DEBUG",
                    "class": "pip.utils.logging.BetterRotatingFileHandler",
                    "filename": options.log or "/dev/null",
                    "delay": True,
                    "formatter": "indent",
                },
            },
            "root": {
                "level": root_level,
                "handlers": list(filter(None, [
                    "console",
                    "console_errors",
                    "user_log" if options.log else None,
                ])),
            },
            # Disable any logging besides WARNING unless we have DEBUG level
            # logging enabled. These use both pip._vendor and the bare names
            # for the case where someone unbundles our libraries.
            "loggers": dict(
                (
                    name,
                    {
                        "level": (
                            "WARNING"
                            if level in ["INFO", "ERROR"]
                            else "DEBUG"
                        ),
                    },
                )
                for name in ["pip._vendor", "distlib", "requests", "urllib3"]
            ),
        })

        if sys.version_info[:2] == (2, 6):
            warnings.warn(
                "Python 2.6 is no longer supported by the Python core team, "
                "please upgrade your Python. A future version of pip will "
                "drop support for Python 2.6",
                deprecation.Python26DeprecationWarning
            )

        # TODO: try to get these passing down from the command?
        #      without resorting to os.environ to hold these.

        if options.no_input:
            os.environ['PIP_NO_INPUT'] = '1'

        if options.exists_action:
            os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)

        if options.require_venv:
            # If a venv is required check if it can really be found
            if not running_under_virtualenv():
                logger.critical(
                    'Could not find an activated virtualenv (required).'
                )
                sys.exit(VIRTUALENV_NOT_FOUND)

        try:
            status = self.run(options, args)
            # FIXME: all commands should return an exit status
            # and when it is done, isinstance is not needed anymore
            if isinstance(status, int):
                return status
        except PreviousBuildDirError as exc:
            logger.critical(str(exc))
            logger.debug('Exception information:', exc_info=True)

            return PREVIOUS_BUILD_DIR_ERROR
        except (InstallationError, UninstallationError, BadCommand) as exc:
            logger.critical(str(exc))
            logger.debug('Exception information:', exc_info=True)

            return ERROR
        except CommandError as exc:
            logger.critical('ERROR: %s', exc)
            logger.debug('Exception information:', exc_info=True)

            return ERROR
        except KeyboardInterrupt:
            logger.critical('Operation cancelled by user')
            logger.debug('Exception information:', exc_info=True)

            return ERROR
        except:
            logger.critical('Exception:', exc_info=True)

            return UNKNOWN_ERROR
        finally:
            # Check if we're using the latest version of pip available
            if (not options.disable_pip_version_check and not
                    getattr(options, "no_index", False)):
                with self._build_session(
                        options,
                        retries=0,
                        timeout=min(5, options.timeout)) as session:
                    pip_version_check(session)

        return SUCCESS


class RequirementCommand(Command):

    @staticmethod
    def populate_requirement_set(requirement_set, args, options, finder,
                                 session, name, wheel_cache):
        """
        Marshal cmd line args into a requirement set.
        """
        for filename in options.constraints:
            for req in parse_requirements(
                    filename,
                    constraint=True, finder=finder, options=options,
                    session=session, wheel_cache=wheel_cache):
                requirement_set.add_requirement(req)

        for req in args:
            requirement_set.add_requirement(
                InstallRequirement.from_line(
                    req, None, isolated=options.isolated_mode,
                    wheel_cache=wheel_cache
                )
            )

        for req in options.editables:
            requirement_set.add_requirement(
                InstallRequirement.from_editable(
                    req,
                    default_vcs=options.default_vcs,
                    isolated=options.isolated_mode,
                    wheel_cache=wheel_cache
                )
            )

        found_req_in_file = False
        for filename in options.requirements:
            for req in parse_requirements(
                    filename,
                    finder=finder, options=options, session=session,
                    wheel_cache=wheel_cache):
                found_req_in_file = True
                requirement_set.add_requirement(req)
        # If --require-hashes was a line in a requirements file, tell
        # RequirementSet about it:
        requirement_set.require_hashes = options.require_hashes

        if not (args or options.editables or found_req_in_file):
            opts = {'name': name}
            if options.find_links:
                msg = ('You must give at least one requirement to '
                       '%(name)s (maybe you meant "pip %(name)s '
                       '%(links)s"?)' %
                       dict(opts, links=' '.join(options.find_links)))
            else:
                msg = ('You must give at least one requirement '
                       'to %(name)s (see "pip help %(name)s")' % opts)
            logger.warning(msg)

    def _build_package_finder(self, options, session,
                              platform=None, python_versions=None,
                              abi=None, implementation=None):
        """
        Create a package finder appropriate to this requirement command.
        """
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.debug('Ignoring indexes: %s', ','.join(index_urls))
            index_urls = []

        return PackageFinder(
            find_links=options.find_links,
            format_control=options.format_control,
            index_urls=index_urls,
            trusted_hosts=options.trusted_hosts,
            allow_all_prereleases=options.pre,
            process_dependency_links=options.process_dependency_links,
            session=session,
            platform=platform,
            versions=python_versions,
            abi=abi,
            implementation=implementation,
        )
req/req_set.py000064400000104036151733136160007361 0ustar00from __future__ import absolute_import

from collections import defaultdict
from itertools import chain
import logging
import os

from pip._vendor import pkg_resources
from pip._vendor import requests

from pip.compat import expanduser
from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path,
                          unpack_url)
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
                            DistributionNotFound, PreviousBuildDirError,
                            HashError, HashErrors, HashUnpinned,
                            DirectoryUrlHashUnsupported, VcsHashUnsupported,
                            UnsupportedPythonVersion)
from pip.req.req_install import InstallRequirement
from pip.utils import (
    display_path, dist_in_usersite, dist_in_install_path, ensure_dir,
    normalize_path)
from pip.utils.hashes import MissingHashes
from pip.utils.logging import indent_log
from pip.utils.packaging import check_dist_requires_python
from pip.vcs import vcs
from pip.wheel import Wheel

logger = logging.getLogger(__name__)


class Requirements(object):

    def __init__(self):
        self._keys = []
        self._dict = {}

    def keys(self):
        return self._keys

    def values(self):
        return [self._dict[key] for key in self._keys]

    def __contains__(self, item):
        return item in self._keys

    def __setitem__(self, key, value):
        if key not in self._keys:
            self._keys.append(key)
        self._dict[key] = value

    def __getitem__(self, key):
        return self._dict[key]

    def __repr__(self):
        values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
        return 'Requirements({%s})' % ', '.join(values)


class DistAbstraction(object):
    """Abstracts out the wheel vs non-wheel prepare_files logic.

    The requirements for anything installable are as follows:
     - we must be able to determine the requirement name
       (or we can't correctly handle the non-upgrade case).
     - we must be able to generate a list of run-time dependencies
       without installing any additional packages (or we would
       have to either burn time by doing temporary isolated installs
       or alternatively violate pips 'don't start installing unless
       all requirements are available' rule - neither of which are
       desirable).
     - for packages with setup requirements, we must also be able
       to determine their requirements without installing additional
       packages (for the same reason as run-time dependencies)
     - we must be able to create a Distribution object exposing the
       above metadata.
    """

    def __init__(self, req_to_install):
        self.req_to_install = req_to_install

    def dist(self, finder):
        """Return a setuptools Dist object."""
        raise NotImplementedError(self.dist)

    def prep_for_dist(self):
        """Ensure that we can get a Dist for this requirement."""
        raise NotImplementedError(self.dist)


def make_abstract_dist(req_to_install):
    """Factory to make an abstract dist object.

    Preconditions: Either an editable req with a source_dir, or satisfied_by or
    a wheel link, or a non-editable req with a source_dir.

    :return: A concrete DistAbstraction.
    """
    if req_to_install.editable:
        return IsSDist(req_to_install)
    elif req_to_install.link and req_to_install.link.is_wheel:
        return IsWheel(req_to_install)
    else:
        return IsSDist(req_to_install)


class IsWheel(DistAbstraction):

    def dist(self, finder):
        return list(pkg_resources.find_distributions(
            self.req_to_install.source_dir))[0]

    def prep_for_dist(self):
        # FIXME:https://github.com/pypa/pip/issues/1112
        pass


class IsSDist(DistAbstraction):

    def dist(self, finder):
        dist = self.req_to_install.get_dist()
        # FIXME: shouldn't be globally added:
        if dist.has_metadata('dependency_links.txt'):
            finder.add_dependency_links(
                dist.get_metadata_lines('dependency_links.txt')
            )
        return dist

    def prep_for_dist(self):
        self.req_to_install.run_egg_info()
        self.req_to_install.assert_source_matches_version()


class Installed(DistAbstraction):

    def dist(self, finder):
        return self.req_to_install.satisfied_by

    def prep_for_dist(self):
        pass


class RequirementSet(object):

    def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
                 upgrade_strategy=None, ignore_installed=False, as_egg=False,
                 target_dir=None, ignore_dependencies=False,
                 force_reinstall=False, use_user_site=False, session=None,
                 pycompile=True, isolated=False, wheel_download_dir=None,
                 wheel_cache=None, require_hashes=False,
                 ignore_requires_python=False):
        """Create a RequirementSet.

        :param wheel_download_dir: Where still-packed .whl files should be
            written to. If None they are written to the download_dir parameter.
            Separate to download_dir to permit only keeping wheel archives for
            pip wheel.
        :param download_dir: Where still packed archives should be written to.
            If None they are not saved, and are deleted immediately after
            unpacking.
        :param wheel_cache: The pip wheel cache, for passing to
            InstallRequirement.
        """
        if session is None:
            raise TypeError(
                "RequirementSet() missing 1 required keyword argument: "
                "'session'"
            )

        self.build_dir = build_dir
        self.src_dir = src_dir
        # XXX: download_dir and wheel_download_dir overlap semantically and may
        # be combined if we're willing to have non-wheel archives present in
        # the wheelhouse output by 'pip wheel'.
        self.download_dir = download_dir
        self.upgrade = upgrade
        self.upgrade_strategy = upgrade_strategy
        self.ignore_installed = ignore_installed
        self.force_reinstall = force_reinstall
        self.requirements = Requirements()
        # Mapping of alias: real_name
        self.requirement_aliases = {}
        self.unnamed_requirements = []
        self.ignore_dependencies = ignore_dependencies
        self.ignore_requires_python = ignore_requires_python
        self.successfully_downloaded = []
        self.successfully_installed = []
        self.reqs_to_cleanup = []
        self.as_egg = as_egg
        self.use_user_site = use_user_site
        self.target_dir = target_dir  # set from --target option
        self.session = session
        self.pycompile = pycompile
        self.isolated = isolated
        if wheel_download_dir:
            wheel_download_dir = normalize_path(wheel_download_dir)
        self.wheel_download_dir = wheel_download_dir
        self._wheel_cache = wheel_cache
        self.require_hashes = require_hashes
        # Maps from install_req -> dependencies_of_install_req
        self._dependencies = defaultdict(list)

    def __str__(self):
        reqs = [req for req in self.requirements.values()
                if not req.comes_from]
        reqs.sort(key=lambda req: req.name.lower())
        return ' '.join([str(req.req) for req in reqs])

    def __repr__(self):
        reqs = [req for req in self.requirements.values()]
        reqs.sort(key=lambda req: req.name.lower())
        reqs_str = ', '.join([str(req.req) for req in reqs])
        return ('<%s object; %d requirement(s): %s>'
                % (self.__class__.__name__, len(reqs), reqs_str))

    def add_requirement(self, install_req, parent_req_name=None,
                        extras_requested=None):
        """Add install_req as a requirement to install.

        :param parent_req_name: The name of the requirement that needed this
            added. The name is used because when multiple unnamed requirements
            resolve to the same name, we could otherwise end up with dependency
            links that point outside the Requirements set. parent_req must
            already be added. Note that None implies that this is a user
            supplied requirement, vs an inferred one.
        :param extras_requested: an iterable of extras used to evaluate the
            environement markers.
        :return: Additional requirements to scan. That is either [] if
            the requirement is not applicable, or [install_req] if the
            requirement is applicable and has just been added.
        """
        name = install_req.name
        if not install_req.match_markers(extras_requested):
            logger.warning("Ignoring %s: markers '%s' don't match your "
                           "environment", install_req.name,
                           install_req.markers)
            return []

        # This check has to come after we filter requirements with the
        # environment markers.
        if install_req.link and install_req.link.is_wheel:
            wheel = Wheel(install_req.link.filename)
            if not wheel.supported():
                raise InstallationError(
                    "%s is not a supported wheel on this platform." %
                    wheel.filename
                )

        install_req.as_egg = self.as_egg
        install_req.use_user_site = self.use_user_site
        install_req.target_dir = self.target_dir
        install_req.pycompile = self.pycompile
        install_req.is_direct = (parent_req_name is None)

        if not name:
            # url or path requirement w/o an egg fragment
            self.unnamed_requirements.append(install_req)
            return [install_req]
        else:
            try:
                existing_req = self.get_requirement(name)
            except KeyError:
                existing_req = None
            if (parent_req_name is None and existing_req and not
                    existing_req.constraint and
                    existing_req.extras == install_req.extras and not
                    existing_req.req.specifier == install_req.req.specifier):
                raise InstallationError(
                    'Double requirement given: %s (already in %s, name=%r)'
                    % (install_req, existing_req, name))
            if not existing_req:
                # Add requirement
                self.requirements[name] = install_req
                # FIXME: what about other normalizations?  E.g., _ vs. -?
                if name.lower() != name:
                    self.requirement_aliases[name.lower()] = name
                result = [install_req]
            else:
                # Assume there's no need to scan, and that we've already
                # encountered this for scanning.
                result = []
                if not install_req.constraint and existing_req.constraint:
                    if (install_req.link and not (existing_req.link and
                       install_req.link.path == existing_req.link.path)):
                        self.reqs_to_cleanup.append(install_req)
                        raise InstallationError(
                            "Could not satisfy constraints for '%s': "
                            "installation from path or url cannot be "
                            "constrained to a version" % name)
                    # If we're now installing a constraint, mark the existing
                    # object for real installation.
                    existing_req.constraint = False
                    existing_req.extras = tuple(
                        sorted(set(existing_req.extras).union(
                               set(install_req.extras))))
                    logger.debug("Setting %s extras to: %s",
                                 existing_req, existing_req.extras)
                    # And now we need to scan this.
                    result = [existing_req]
                # Canonicalise to the already-added object for the backref
                # check below.
                install_req = existing_req
            if parent_req_name:
                parent_req = self.get_requirement(parent_req_name)
                self._dependencies[parent_req].append(install_req)
            return result

    def has_requirement(self, project_name):
        name = project_name.lower()
        if (name in self.requirements and
           not self.requirements[name].constraint or
           name in self.requirement_aliases and
           not self.requirements[self.requirement_aliases[name]].constraint):
            return True
        return False

    @property
    def has_requirements(self):
        return list(req for req in self.requirements.values() if not
                    req.constraint) or self.unnamed_requirements

    @property
    def is_download(self):
        if self.download_dir:
            self.download_dir = expanduser(self.download_dir)
            if os.path.exists(self.download_dir):
                return True
            else:
                logger.critical('Could not find download directory')
                raise InstallationError(
                    "Could not find or access download directory '%s'"
                    % display_path(self.download_dir))
        return False

    def get_requirement(self, project_name):
        for name in project_name, project_name.lower():
            if name in self.requirements:
                return self.requirements[name]
            if name in self.requirement_aliases:
                return self.requirements[self.requirement_aliases[name]]
        raise KeyError("No project with the name %r" % project_name)

    def uninstall(self, auto_confirm=False):
        for req in self.requirements.values():
            if req.constraint:
                continue
            req.uninstall(auto_confirm=auto_confirm)
            req.commit_uninstall()

    def prepare_files(self, finder):
        """
        Prepare process. Create temp directories, download and/or unpack files.
        """
        # make the wheelhouse
        if self.wheel_download_dir:
            ensure_dir(self.wheel_download_dir)

        # If any top-level requirement has a hash specified, enter
        # hash-checking mode, which requires hashes from all.
        root_reqs = self.unnamed_requirements + self.requirements.values()
        require_hashes = (self.require_hashes or
                          any(req.has_hash_options for req in root_reqs))
        if require_hashes and self.as_egg:
            raise InstallationError(
                '--egg is not allowed with --require-hashes mode, since it '
                'delegates dependency resolution to setuptools and could thus '
                'result in installation of unhashed packages.')

        # Actually prepare the files, and collect any exceptions. Most hash
        # exceptions cannot be checked ahead of time, because
        # req.populate_link() needs to be called before we can make decisions
        # based on link type.
        discovered_reqs = []
        hash_errors = HashErrors()
        for req in chain(root_reqs, discovered_reqs):
            try:
                discovered_reqs.extend(self._prepare_file(
                    finder,
                    req,
                    require_hashes=require_hashes,
                    ignore_dependencies=self.ignore_dependencies))
            except HashError as exc:
                exc.req = req
                hash_errors.append(exc)

        if hash_errors:
            raise hash_errors

    def _is_upgrade_allowed(self, req):
        return self.upgrade and (
            self.upgrade_strategy == "eager" or (
                self.upgrade_strategy == "only-if-needed" and req.is_direct
            )
        )

    def _check_skip_installed(self, req_to_install, finder):
        """Check if req_to_install should be skipped.

        This will check if the req is installed, and whether we should upgrade
        or reinstall it, taking into account all the relevant user options.

        After calling this req_to_install will only have satisfied_by set to
        None if the req_to_install is to be upgraded/reinstalled etc. Any
        other value will be a dist recording the current thing installed that
        satisfies the requirement.

        Note that for vcs urls and the like we can't assess skipping in this
        routine - we simply identify that we need to pull the thing down,
        then later on it is pulled down and introspected to assess upgrade/
        reinstalls etc.

        :return: A text reason for why it was skipped, or None.
        """
        # Check whether to upgrade/reinstall this req or not.
        req_to_install.check_if_exists()
        if req_to_install.satisfied_by:
            upgrade_allowed = self._is_upgrade_allowed(req_to_install)

            # Is the best version is installed.
            best_installed = False

            if upgrade_allowed:
                # For link based requirements we have to pull the
                # tree down and inspect to assess the version #, so
                # its handled way down.
                if not (self.force_reinstall or req_to_install.link):
                    try:
                        finder.find_requirement(
                            req_to_install, upgrade_allowed)
                    except BestVersionAlreadyInstalled:
                        best_installed = True
                    except DistributionNotFound:
                        # No distribution found, so we squash the
                        # error - it will be raised later when we
                        # re-try later to do the install.
                        # Why don't we just raise here?
                        pass

                if not best_installed:
                    # don't uninstall conflict if user install and
                    # conflict is not user install or conflict lives
                    # in a different path (/usr/lib vs /usr/local/lib/)
                    if not (self.use_user_site and not
                            dist_in_usersite(req_to_install.satisfied_by) or not
                            dist_in_install_path(req_to_install.satisfied_by)):
                        req_to_install.conflicts_with = \
                            req_to_install.satisfied_by
                    req_to_install.satisfied_by = None

            # Figure out a nice message to say why we're skipping this.
            if best_installed:
                skip_reason = 'already up-to-date'
            elif self.upgrade_strategy == "only-if-needed":
                skip_reason = 'not upgraded as not directly required'
            else:
                skip_reason = 'already satisfied'

            return skip_reason
        else:
            return None

    def _prepare_file(self,
                      finder,
                      req_to_install,
                      require_hashes=False,
                      ignore_dependencies=False):
        """Prepare a single requirements file.

        :return: A list of additional InstallRequirements to also install.
        """
        # Tell user what we are doing for this requirement:
        # obtain (editable), skipping, processing (local url), collecting
        # (remote url or package name)
        if req_to_install.constraint or req_to_install.prepared:
            return []

        req_to_install.prepared = True

        # ###################### #
        # # print log messages # #
        # ###################### #
        if req_to_install.editable:
            logger.info('Obtaining %s', req_to_install)
        else:
            # satisfied_by is only evaluated by calling _check_skip_installed,
            # so it must be None here.
            assert req_to_install.satisfied_by is None
            if not self.ignore_installed:
                skip_reason = self._check_skip_installed(
                    req_to_install, finder)

            if req_to_install.satisfied_by:
                assert skip_reason is not None, (
                    '_check_skip_installed returned None but '
                    'req_to_install.satisfied_by is set to %r'
                    % (req_to_install.satisfied_by,))
                logger.info(
                    'Requirement %s: %s', skip_reason,
                    req_to_install)
            else:
                if (req_to_install.link and
                        req_to_install.link.scheme == 'file'):
                    path = url_to_path(req_to_install.link.url)
                    logger.info('Processing %s', display_path(path))
                else:
                    logger.info('Collecting %s', req_to_install)

        with indent_log():
            # ################################ #
            # # vcs update or unpack archive # #
            # ################################ #
            if req_to_install.editable:
                if require_hashes:
                    raise InstallationError(
                        'The editable requirement %s cannot be installed when '
                        'requiring hashes, because there is no single file to '
                        'hash.' % req_to_install)
                req_to_install.ensure_has_source_dir(self.src_dir)
                req_to_install.update_editable(not self.is_download)
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    req_to_install.archive(self.download_dir)
                req_to_install.check_if_exists()
            elif req_to_install.satisfied_by:
                if require_hashes:
                    logger.debug(
                        'Since it is already installed, we are trusting this '
                        'package without checking its hash. To ensure a '
                        'completely repeatable environment, install into an '
                        'empty virtualenv.')
                abstract_dist = Installed(req_to_install)
            else:
                # @@ if filesystem packages are not marked
                # editable in a req, a non deterministic error
                # occurs when the script attempts to unpack the
                # build directory
                req_to_install.ensure_has_source_dir(self.build_dir)
                # If a checkout exists, it's unwise to keep going.  version
                # inconsistencies are logged later, but do not fail the
                # installation.
                # FIXME: this won't upgrade when there's an existing
                # package unpacked in `req_to_install.source_dir`
                if os.path.exists(
                        os.path.join(req_to_install.source_dir, 'setup.py')):
                    raise PreviousBuildDirError(
                        "pip can't proceed with requirements '%s' due to a"
                        " pre-existing build directory (%s). This is "
                        "likely due to a previous installation that failed"
                        ". pip is being responsible and not assuming it "
                        "can delete this. Please delete it and try again."
                        % (req_to_install, req_to_install.source_dir)
                    )
                req_to_install.populate_link(
                    finder,
                    self._is_upgrade_allowed(req_to_install),
                    require_hashes
                )
                # We can't hit this spot and have populate_link return None.
                # req_to_install.satisfied_by is None here (because we're
                # guarded) and upgrade has no impact except when satisfied_by
                # is not None.
                # Then inside find_requirement existing_applicable -> False
                # If no new versions are found, DistributionNotFound is raised,
                # otherwise a result is guaranteed.
                assert req_to_install.link
                link = req_to_install.link

                # Now that we have the real link, we can tell what kind of
                # requirements we have and raise some more informative errors
                # than otherwise. (For example, we can raise VcsHashUnsupported
                # for a VCS URL rather than HashMissing.)
                if require_hashes:
                    # We could check these first 2 conditions inside
                    # unpack_url and save repetition of conditions, but then
                    # we would report less-useful error messages for
                    # unhashable requirements, complaining that there's no
                    # hash provided.
                    if is_vcs_url(link):
                        raise VcsHashUnsupported()
                    elif is_file_url(link) and is_dir_url(link):
                        raise DirectoryUrlHashUnsupported()
                    if (not req_to_install.original_link and
                            not req_to_install.is_pinned):
                        # Unpinned packages are asking for trouble when a new
                        # version is uploaded. This isn't a security check, but
                        # it saves users a surprising hash mismatch in the
                        # future.
                        #
                        # file:/// URLs aren't pinnable, so don't complain
                        # about them not being pinned.
                        raise HashUnpinned()
                hashes = req_to_install.hashes(
                    trust_internet=not require_hashes)
                if require_hashes and not hashes:
                    # Known-good hashes are missing for this requirement, so
                    # shim it with a facade object that will provoke hash
                    # computation and then raise a HashMissing exception
                    # showing the user what the hash should be.
                    hashes = MissingHashes()

                try:
                    download_dir = self.download_dir
                    # We always delete unpacked sdists after pip ran.
                    autodelete_unpacked = True
                    if req_to_install.link.is_wheel \
                            and self.wheel_download_dir:
                        # when doing 'pip wheel` we download wheels to a
                        # dedicated dir.
                        download_dir = self.wheel_download_dir
                    if req_to_install.link.is_wheel:
                        if download_dir:
                            # When downloading, we only unpack wheels to get
                            # metadata.
                            autodelete_unpacked = True
                        else:
                            # When installing a wheel, we use the unpacked
                            # wheel.
                            autodelete_unpacked = False
                    unpack_url(
                        req_to_install.link, req_to_install.source_dir,
                        download_dir, autodelete_unpacked,
                        session=self.session, hashes=hashes)
                except requests.HTTPError as exc:
                    logger.critical(
                        'Could not install requirement %s because '
                        'of error %s',
                        req_to_install,
                        exc,
                    )
                    raise InstallationError(
                        'Could not install requirement %s because '
                        'of HTTP error %s for URL %s' %
                        (req_to_install, exc, req_to_install.link)
                    )
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    # Make a .zip of the source_dir we already created.
                    if req_to_install.link.scheme in vcs.all_schemes:
                        req_to_install.archive(self.download_dir)
                # req_to_install.req is only avail after unpack for URL
                # pkgs repeat check_if_exists to uninstall-on-upgrade
                # (#14)
                if not self.ignore_installed:
                    req_to_install.check_if_exists()
                if req_to_install.satisfied_by:
                    if self.upgrade or self.ignore_installed:
                        # don't uninstall conflict if user install and
                        # conflict is not user install or conflict lives
                        # in a different path (/usr/lib vs /usr/local/lib/)
                        if not (self.use_user_site and not
                                dist_in_usersite(
                                    req_to_install.satisfied_by) or not
                                dist_in_install_path(req_to_install.satisfied_by)):
                            req_to_install.conflicts_with = \
                                req_to_install.satisfied_by
                        req_to_install.satisfied_by = None
                    else:
                        logger.info(
                            'Requirement already satisfied (use '
                            '--upgrade to upgrade): %s',
                            req_to_install,
                        )

            # ###################### #
            # # parse dependencies # #
            # ###################### #
            dist = abstract_dist.dist(finder)
            try:
                check_dist_requires_python(dist)
            except UnsupportedPythonVersion as e:
                if self.ignore_requires_python:
                    logger.warning(e.args[0])
                else:
                    req_to_install.remove_temporary_source()
                    raise
            more_reqs = []

            def add_req(subreq, extras_requested):
                sub_install_req = InstallRequirement(
                    str(subreq),
                    req_to_install,
                    isolated=self.isolated,
                    wheel_cache=self._wheel_cache,
                )
                more_reqs.extend(self.add_requirement(
                    sub_install_req, req_to_install.name,
                    extras_requested=extras_requested))

            # We add req_to_install before its dependencies, so that we
            # can refer to it when adding dependencies.
            if not self.has_requirement(req_to_install.name):
                # 'unnamed' requirements will get added here
                self.add_requirement(req_to_install, None)

            if not ignore_dependencies:
                if (req_to_install.extras):
                    logger.debug(
                        "Installing extra requirements: %r",
                        ','.join(req_to_install.extras),
                    )
                missing_requested = sorted(
                    set(req_to_install.extras) - set(dist.extras)
                )
                for missing in missing_requested:
                    logger.warning(
                        '%s does not provide the extra \'%s\'',
                        dist, missing
                    )

                available_requested = sorted(
                    set(dist.extras) & set(req_to_install.extras)
                )
                for subreq in dist.requires(available_requested):
                    add_req(subreq, extras_requested=available_requested)

            # cleanup tmp src
            self.reqs_to_cleanup.append(req_to_install)

            if not req_to_install.editable and not req_to_install.satisfied_by:
                # XXX: --no-install leads this to report 'Successfully
                # downloaded' for only non-editable reqs, even though we took
                # action on them.
                self.successfully_downloaded.append(req_to_install)

        return more_reqs

    def cleanup_files(self):
        """Clean up files, remove builds."""
        logger.debug('Cleaning up...')
        with indent_log():
            for req in self.reqs_to_cleanup:
                req.remove_temporary_source()

    def _to_install(self):
        """Create the installation order.

        The installation order is topological - requirements are installed
        before the requiring thing. We break cycles at an arbitrary point,
        and make no other guarantees.
        """
        # The current implementation, which we may change at any point
        # installs the user specified things in the order given, except when
        # dependencies must come earlier to achieve topological order.
        order = []
        ordered_reqs = set()

        def schedule(req):
            if req.satisfied_by or req in ordered_reqs:
                return
            if req.constraint:
                return
            ordered_reqs.add(req)
            for dep in self._dependencies[req]:
                schedule(dep)
            order.append(req)
        for install_req in self.requirements.values():
            schedule(install_req)
        return order

    def install(self, install_options, global_options=(), *args, **kwargs):
        """
        Install everything in this set (after having downloaded and unpacked
        the packages)
        """
        to_install = self._to_install()

        if to_install:
            logger.info(
                'Installing collected packages: %s',
                ', '.join([req.name for req in to_install]),
            )

        with indent_log():
            for requirement in to_install:
                if requirement.conflicts_with:
                    logger.info(
                        'Found existing installation: %s',
                        requirement.conflicts_with,
                    )
                    with indent_log():
                        requirement.uninstall(auto_confirm=True)
                try:
                    requirement.install(
                        install_options,
                        global_options,
                        *args,
                        **kwargs
                    )
                except:
                    # if install did not succeed, rollback previous uninstall
                    if (requirement.conflicts_with and not
                            requirement.install_succeeded):
                        requirement.rollback_uninstall()
                    raise
                else:
                    if (requirement.conflicts_with and
                            requirement.install_succeeded):
                        requirement.commit_uninstall()
                requirement.remove_temporary_source()

        self.successfully_installed = to_install
req/__pycache__/req_file.cpython-36.opt-1.pyc000064400000020322151733136160014723 0ustar003

�Pf�.�@sxdZddlmZddlZddlZddlZddlZddlZddlZddl	m
Zddlm
Z
ddlZddlmZddlmZddlmZdd	lmZdd
lmZdgZejdej�Zejd
�Zejejejej ej!ej"ej#ej$ej%ej&ej'ej(ej)ej*ej+ej,ej-ej.ej/ej0ej1gZ2ej3ej4ej5gZ6dd�e6D�Z7d dd�Z8dd�Z9d!dd�Z:dd�Z;dd�Z<dd�Z=dd�Z>dd�Z?dS)"z
Requirements file parsing
�)�absolute_importN)�parse)�filterfalse)�get_file_content)�InstallRequirement)�RequirementsFileParseError)�RemovedInPip10Warning)�
cmdoptions�parse_requirementsz^(http|https|file):z(^|\s)+#.*$cCsg|]}|�j�qS�)�dest)�.0�orr�/usr/lib/python3.6/req_file.py�
<listcomp>=srFccsp|dkrtd��t|||d�\}}t||�}	x>|	D]6\}
}t|||
||||||d�	}x|D]
}
|
VqZWq2WdS)a�Parse a requirements file and yield InstallRequirement instances.

    :param filename:    Path or url of requirements file.
    :param finder:      Instance of pip.index.PackageFinder.
    :param comes_from:  Origin description of requirements.
    :param options:     cli options.
    :param session:     Instance of pip.download.PipSession.
    :param constraint:  If true, parsing a constraint file rather than
        requirements file.
    :param wheel_cache: Instance of pip.wheel.WheelCache
    NzCparse_requirements() missing 1 required keyword argument: 'session')�
comes_from�session)�
constraint)�	TypeErrorr�
preprocess�process_line)�filename�finderr�optionsrr�wheel_cache�_�content�
lines_enum�line_number�lineZreq_iter�reqrrrr
@s



cCs.t|j�dd�}t|�}t|�}t||�}|S)z�Split, filter, and join lines, and return a line iterator

    :param content: the content of the requirements file
    :param options: cli options
    �)�start)�	enumerate�
splitlines�
join_lines�ignore_comments�
skip_regex)rrrrrrras

rc		cs
t�}	|	j�}
d|
_|r |j|
_t|�\}}tjdkr@|jd�}|	jt	j
|�|
�\}
}d|r`dnd||f}|r�|rz|jnd	}|r�tj
||
�i}x.tD]&}||
jkr�|
j|r�|
j|||<q�Wtj||||||d
�V�n(|
j�r&|r�|jnd	}|�r|jnd}tj|
jd|||||d�V�n�|
j�s6|
j�r�|
j�rN|
jd}d	}n|
jd}d
}tj|��rvtj||�}n"tj|��s�tjjtjj|�|�}t|||||||d�}	x|	D]}|V�q�W�n>|
j �r�|
j |_ �n*|�r|
j!�r�t"j#dt$�|
j%�r
t"j#dt$�|
j&�rt"j#dt$�|
j�r0|
jg|_'|
j(d	k�rPd	|_(t)j*j+|j�|
j,d
k�rbg|_'|
j-�rx|j'j.|
j-�|
j/�r�|
j/d}tjjtjj0|��}tjj||�}tjj1|��r�|}|j/j2|�|
j3�r�d
|_4|
j5�r�d
|_5|
j6�r|j7j.dd�|
j6D��dS)a#Process a single requirements line; This can result in creating/yielding
    requirements, or updating the finder.

    For lines that contain requirements, the only options that have an effect
    are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
    requirement. Other options from SUPPORTED_OPTIONS may be present, but are
    ignored.

    For lines that do not contain requirements, the only options that have an
    effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
    be present, but are ignored. These lines may contain multiple options
    (although our docs imply only one is supported), and all our parsed and
    affect the finder.

    :param constraint: If True, parsing a constraints file.
    :param options: OptionParser options that we may update
    N����utf8z%s %s (line %s)z-cz-rF)r�isolatedrrr)rr�default_vcsr,rT)rrz�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.css|]}d|dfVqdS)�*Nr)r
�hostrrr�	<genexpr>�szprocess_line.<locals>.<genexpr>)r(r)r*)8�build_parserZget_default_values�	index_urlZformat_control�break_args_options�sys�version_info�encode�
parse_args�shlex�splitZ
isolated_moder	Zcheck_install_build_global�SUPPORTED_OPTIONS_REQ_DEST�__dict__rZ	from_lineZ	editablesr-Z
from_editable�requirements�constraints�	SCHEME_RE�search�urllib_parseZurljoin�os�path�join�dirnamer
�require_hashes�allow_external�warnings�warnr�allow_all_externalZallow_unverifiedZ
index_urls�	use_wheel�pip�indexZfmt_ctl_no_use_wheel�no_indexZextra_index_urls�extend�
find_links�abspath�exists�append�preZallow_all_prereleases�process_dependency_linksZ
trusted_hostsZsecure_origins)rrrrrrrrr�parserZdefaultsZargs_strZoptions_strZoptsrZline_comes_fromr,Zreq_optionsrr-Zreq_pathZnested_constraintr �valueZreq_dirZrelative_to_reqs_filerrrrns�










rcCsh|jd�}g}|dd�}x8|D]0}|jd�s8|jd�r<Pq |j|�|jd�q Wdj|�dj|�fS)z�Break up the line into an args and options string.  We only want to shlex
    (and then optparse) the options, not the args.  args can contain markers
    which are corrupted by shlex.
    � N�-z--r)r9�
startswithrR�poprC)r�tokens�argsr�tokenrrrr3�s


r3cCsDtjdd�}tt}x|D]}|�}|j|�qWdd�}||_|S)z7
    Return a parser for parsing requirement lines
    F)Zadd_help_optioncSst|��dS)N)r)�self�msgrrr�parser_exitsz!build_parser.<locals>.parser_exit)�optparseZOptionParser�SUPPORTED_OPTIONS�SUPPORTED_OPTIONS_REQZ
add_option�exit)rUZoption_factoriesZoption_factoryZoptionr`rrrr1s
r1ccs�d}g}x�|D]x\}}|jd�s,tj|�rntj|�r>d|}|rb|j|�|dj|�fVg}q�||fVq|sv|}|j|jd��qW|r�|dj|�fVdS)z�Joins a line ending in '' with the previous line (except when following
    comments).  The joined line takes on the index of the first line.
    N�\rW�)�endswith�
COMMENT_RE�matchrRrC�strip)rZprimary_line_numberZnew_linerrrrrr%"s 

r%ccs8x2|D]*\}}tjd|�}|j�}|r||fVqWdS)z1
    Strips comments and filter empty lines.
    rfN)rh�subrj)rrrrrrr&?s
r&cs2|r
|jnd}|r.tj|��t�fdd�|�}|S)zs
    Skip lines that match '--skip-requirements-regex' pattern

    Note: the regex pattern is only built once
    Ncs�j|d�S)Nr!)r?)�e)�patternrr�<lambda>Tszskip_regex.<locals>.<lambda>)Zskip_requirements_regex�re�compiler)rrr'r)rmrr'Js

r')NNNNFN)NNNNNF)@�__doc__Z
__future__rrAror8r4rarGZpip._vendor.six.moves.urllibrr@Zpip._vendor.six.movesrrKZpip.downloadrZpip.req.req_installrZpip.exceptionsrZpip.utils.deprecationrr	�__all__rp�Ir>rhr=Zeditabler<rMr2rOZextra_index_urlrFrIZno_allow_externalZallow_unsafeZno_allow_unsaferJZno_use_wheelZalways_unzipZ	no_binaryZonly_binaryrSrTZtrusted_hostrErbZinstall_optionsZglobal_options�hashrcr:r
rrr3r1r%r&r'rrrr�<module>sn

 

req/__pycache__/req_uninstall.cpython-36.opt-1.pyc000064400000014171151733136160016022 0ustar003

�Pf��@s�ddlmZddlZddlZddlZddlmZmZmZddl	m
Z
ddlmZm
Z
mZmZmZddlmZeje�ZGdd�de�ZGd	d
�d
e�ZdS)�)�absolute_importN)�uses_pycache�WINDOWS�cache_from_source)�UninstallationError)�rmtree�ask�is_local�renames�normalize_path)�
indent_logc@sZeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	ddd�Z
dd�Zdd�ZdS)�UninstallPathSetzMA set of file paths to be removed in the uninstallation of a
    requirement.cCs,t�|_t�|_i|_||_d|_g|_dS)N)�set�paths�_refuse�pth�dist�save_dir�_moved_paths)�selfr�r�#/usr/lib/python3.6/req_uninstall.py�__init__szUninstallPathSet.__init__cCst|�S)zs
        Return True if the given path is one we are permitted to
        remove/modify, False otherwise.

        )r	)r�pathrrr�
_permittedszUninstallPathSet._permittedcCs�tjj|�\}}tjjt|�tjj|��}tjj|�s:dS|j|�rR|jj	|�n|j
j	|�tjj|�ddkr�tr�|j	t
|��dS)N�z.py)�osr�split�joinr�normcase�existsrr�addr�splitextrr)rr�head�tailrrrr!#s
zUninstallPathSet.addcCsLt|�}|j|�r<||jkr*t|�|j|<|j|j|�n|jj|�dS)N)rrr�UninstallPthEntriesr!r)r�pth_file�entryrrr�add_pth6s

zUninstallPathSet.add_pthcs@t�}x4t|td�D]$�t�fdd�|D��s|j��qW|S)z�Compact a path set to contain the minimal number of paths
        necessary to contain all paths in the set. If /a/path/ and
        /a/path/to/a/file.txt are both in the set, leave only the
        shorter path.)�keycs4g|],}�j|�o.�t|jtjj��tjjk�qSr)�
startswith�len�rstriprr�sep)�.0Z	shortpath)rrr�
<listcomp>Gsz,UninstallPathSet.compact.<locals>.<listcomp>)r�sortedr+�anyr!)rrZshort_pathsr)rr�compact?s

zUninstallPathSet.compactcCs&tjj|jtjj|�djtjj��S)Nr)rrrr�
splitdrive�lstripr-)rrrrr�_stashMszUninstallPathSet._stashFcCs:|jstjd|jj�dStjd|jj|jj�t���t|j|j��}|rRd}n"x|D]}tj|�qXWt	dd
�}|j
r�tjd�x|j|j
�D]}tj|�q�W|dk�r,tjdd	d
�|_
x8|D]0}|j|�}tjd|�|jj|�t||�q�Wx|jj�D]}|j��qWtjd|jj|jj�WdQRXdS)z[Remove paths in ``self.paths`` with confirmation (unless
        ``auto_confirm`` is True).z7Can't uninstall '%s'. No files were found to uninstall.NzUninstalling %s-%s:�yzProceed (y/n)? �nz.Not removing or modifying (outside of prefix):z
-uninstallzpip-)�suffix�prefixzRemoving file or directory %szSuccessfully uninstalled %s-%s)r6r7)r�logger�infor�project_name�versionrr0r2rr�tempfileZmkdtemprr5�debugr�appendr
r�values�remove)rZauto_confirmrZresponser�new_pathrrrrrBQs@







zUninstallPathSet.removecCs~|jdkrtjd|jj�dStjd|jj�x.|jD]$}|j|�}tjd|�t	||�q6Wx|j
j�D]}|j�qjWdS)z1Rollback the changes previously made by remove().Nz'Can't roll back %s; was not uninstalledFzRolling back uninstall of %szReplacing %s)
rr:�errorrr<r;rr5r?r
rrA�rollback)rrZtmp_pathrrrrrE{s


zUninstallPathSet.rollbackcCs$|jdk	r t|j�d|_g|_dS)z?Remove temporary save dir: rollback will no longer be possible.N)rrr)rrrr�commit�s

zUninstallPathSet.commitN)F)
�__name__�
__module__�__qualname__�__doc__rrr!r(r2r5rBrErFrrrrr
s	
*r
c@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
r%cCs0tjj|�std|��||_t�|_d|_dS)Nz.Cannot remove entries from nonexistent file %s)rr�isfiler�filer�entries�_saved_lines)rr&rrrr�s
zUninstallPthEntries.__init__cCs>tjj|�}tr.tjj|�dr.|jdd�}|jj|�dS)Nr�\�/)rrrrr3�replacerMr!)rr'rrrr!�szUninstallPthEntries.addcCs�tjd|j�t|jd��}|j�}||_WdQRXtdd�|D��rLd}nd}xH|jD]>}y$tjd|�|j||j	d��WqXt
k
r�YqXXqXWt|jd	��}|j|�WdQRXdS)
NzRemoving pth entries from %s:�rbcss|]}d|kVqdS)s
Nr)r.�linerrr�	<genexpr>�sz-UninstallPthEntries.remove.<locals>.<genexpr>z
�
zRemoving entry: %szutf-8�wb)r:r?rL�open�	readlinesrNr1rMrB�encode�
ValueError�
writelines)r�fh�linesZendliner'rrrrB�s
zUninstallPthEntries.removec	CsR|jdkrtjd|j�dStjd|j�t|jd��}|j|j�WdQRXdS)Nz.Cannot roll back changes to %s, none were madeFz!Rolling %s back to previous staterVT)rNr:rDrLr?rWr[)rr\rrrrE�s

zUninstallPthEntries.rollbackN)rGrHrIrr!rBrErrrrr%�s	
r%)Z
__future__rZloggingrr>Z
pip.compatrrrZpip.exceptionsrZ	pip.utilsrrr	r
rZpip.utils.loggingrZ	getLoggerrGr:�objectr
r%rrrr�<module>s
req/__pycache__/__init__.cpython-36.pyc000064400000000603151733136160013735 0ustar003

�Pf�@sDddlmZddlmZddlmZmZddlmZdddd	gZ	d
S)�)�absolute_import�)�InstallRequirement)�RequirementSet�Requirements)�parse_requirementsrrrrN)
Z
__future__rZreq_installrZreq_setrrZreq_filer�__all__�r	r	�/usr/lib/python3.6/__init__.py�<module>s
req/__pycache__/req_install.cpython-36.opt-1.pyc000064400000071670151733136160015466 0ustar003

�Pf���@s0ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZddlmZddlmZmZddlmZddlmZdd	lmZmZdd
lmZddlmZmZ ddl!m"Z"ddl#Z$dd
l%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+m,Z,m-Z-ddl.m/Z/m0Z0ddl1m2Z2m3Z3m4Z4m5Z5ddl6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=m>Z>m?Z?m@Z@mAZAmBZBmCZCmDZDmEZEmFZFmGZGddlHmIZIddlJmKZKddlLmMZMddlNmOZOddlPmQZQddlRmSZSddlTmUZUddl#mVZVmWZWejXeY�ZZej[j\j]�Z^dd�Z_dd�Z`Gdd�dea�Zbd d!�Zcd$d"d#�ZddS)%�)�absolute_importN)�	sysconfig)�change_root)�
FeedParser)�
pkg_resources�six)�
specifiers)�Marker)�InvalidRequirement�Requirement)�canonicalize_name)�Version�parse)�configparser)�
native_str�
get_stdlib�WINDOWS)�is_url�url_to_path�path_to_url�is_archive_file)�InstallationError�UninstallationError)�bin_py�running_under_virtualenv�PIP_DELETE_MARKER_FILENAME�bin_user)�display_path�rmtree�ask_path_exists�
backup_dir�is_installable_dir�dist_in_usersite�dist_in_site_packages�dist_in_install_path�
egg_link_path�call_subprocess�read_text_file�FakeFile�_make_build_dir�
ensure_dir�get_installed_version�normalize_path�
dist_is_local)�Hashes)�RemovedInPip10Warning)�
indent_log)�SETUPTOOLS_SHIM)�open_spinner)�UninstallPathSet)�vcs)�move_wheel_files�WheelcCs6tjd|�}d}|r*|jd�}|jd�}n|}||fS)Nz^(.+)(\[[^\]]+\])$��)�re�match�group)�path�m�extrasZpath_no_extras�r?�!/usr/lib/python3.6/req_install.py�
_strip_extras9s
rAcCstdd�|D��S)Ncss|]}tj|�VqdS)N)rZ
safe_extra)�.0�extrar?r?r@�	<genexpr>Fsz_safe_extras.<locals>.<genexpr>)�set)r>r?r?r@�_safe_extrasEsrFc@s�eZdZdQdd�ZedRdd��ZedSdd	��Zd
d�Zdd
�Zdd�Z	e
dd��Ze
dd��Zdd�Z
dd�Zdd�Ze
dd��Ze
dd��Ze
dd��Zd d!�Zd"d#�Zd$d%�Zd&d'�Zejd(�Ze
d)d*��Zd+d,�ZdTd-d.�ZdUd/d0�Zd1d2�Zd3d4�Zd5d6�Z d7d8�Z!dVd9d:�Z"gdddfd;d<�Z#d=d>�Z$d?d@�Z%dAdB�Z&fdfdCdD�Z'dEdF�Z(e
dGdH��Z)dWdIdJ�Z*dKdL�Z+e
dMdN��Z,dXdOdP�Z-dS)Y�InstallRequirementNFTcsJf|_t�tj�r�yt���Wndtk
r�tjj�kr@d}n.d�krft	�fdd�t
D��rfd}ntj�}t
d�|f��YnXt�j�|_�|_||_|
|_||_||_||_||_|_||_|	dk	r�|	|_n�oވj|_d|_d|_d|_d|_d|_||_d|_ d|_!d|_"d|_#d|_$|�r.|ni|_%||_&d|_'|
|_(dS)Nz%It looks like a path. Does it exist ?�=c3s|]}|�kVqdS)Nr?)rB�op)�reqr?r@rDVsz.InstallRequirement.__init__.<locals>.<genexpr>z,= is not a valid operator. Did you mean == ?zInvalid requirement: '%s'
%sF))r>�
isinstancer�string_typesrr
�osr<�sep�any�	operators�	traceback�
format_excrrFrJ�
comes_from�
constraint�
source_dir�editable�_wheel_cache�link�
original_link�as_egg�markers�marker�_egg_info_path�satisfied_by�conflicts_with�_temp_build_dir�_ideal_build_dir�update�install_succeeded�uninstalled�nothing_to_uninstall�
use_user_site�
target_dir�options�	pycompileZprepared�isolated)�selfrJrSrUrVrXrZrbrir[rjrh�wheel_cacherT�add_msgr?)rJr@�__init__KsN zInstallRequirement.__init__cCspddlm}t||�\}	}
}|
jd�r0t|
�}nd}||	||d||
�|||rP|ni|d�	}
|dk	rlt|�|
_|
S)Nr)�Linkzfile:T)rUrVrXrTrjrhrl)�	pip.indexro�parse_editable�
startswithrrFr>)�cls�editable_reqrS�default_vcsrjrhrlrTro�name�urlZextras_overriderU�resr?r?r@�
from_editable�s 



z InstallRequirement.from_editablec
Cs�ddlm}t|�rd}nd}||krR|j|d�\}}	|	j�}	|	sHd}	qVt|	�}	nd}	|j�}d}
tjjtjj	|��}d}d}
t|�r�||�}n�t
|�\}}
tjj|�r�tjj|ks�|j
d�r�t|�s�td|��|t|��}n0t|��rtjj|��stjd	|�|t|��}|�r||jd
k�rPtjd|j��rP|ttjjtjj	|j����}|j�rtt|j�}d|j|jf}
n|j}
n|}
|�r�|ni}||
|||	||||d
�}|
�r�tt d|
�j!�|_!|S)z�Creates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        r)roz; �;r7N�.z;Directory %r is not installable. File 'setup.py' not found.zARequirement %r looks like a filename, but the file does not exist�filez\.\./z%s==%s)rXr[rjrhrlrT�placeholder)"rpror�split�stripr	rMr<�normpath�abspathrA�isdirrNrrr!rrr�isfile�logger�warning�schemer9�searchrw�is_wheelr6�filenamerv�version�egg_fragmentrFrr>)rsrvrSrjrhrlrTroZ
marker_sepr[rJr<rXr>�p�wheelrxr?r?r@�	from_line�sb





zInstallRequirement.from_linecCs�|jr(t|j�}|jr:|d|jj7}n|jr6|jjnd}|jdk	rX|dt|jj�7}|jr�t|jt	j
�rt|j}n
|jj�}|r�|d|7}|S)Nz from %sz in %sz
 (from %s))rJ�strrXrwr^r�locationrSrKrrL�	from_path)rk�srSr?r?r@�__str__�s


zInstallRequirement.__str__cCsd|jjt|�|jfS)Nz<%s object: %s editable=%r>)�	__class__�__name__r�rV)rkr?r?r@�__repr__szInstallRequirement.__repr__cCs^|jdkr|j||�|_|jdk	rZ|rZ|j}|jj|j|j�|_||jkrZtjd|j�dS)aEnsure that if a link can be found for this, that it is found.

        Note that self.link may still be None - if Upgrade is False and the
        requirement is already installed.

        If require_hashes is True, don't use the wheel cache, because cached
        wheels, always built locally, have different hashes than the files
        downloaded from the index server and thus throw false hash mismatches.
        Furthermore, cached wheels at present have undeterministic contents due
        to file modification times.
        NzUsing cached wheel link: %s)rXZfind_requirementrWZcached_wheelrvr��debug)rk�finderZupgradeZrequire_hashesZold_linkr?r?r@�
populate_link	s

z InstallRequirement.populate_linkcCs|jjS)N)rJ�	specifier)rkr?r?r@r�szInstallRequirement.specifiercCs$|j}t|�dko"tt|��jdkS)z�Return whether I am pinned to an exact version.

        For example, some-package==1.2 is pinned; some-package>1.2 is not.
        r7�==�===)r�r�)r��len�next�iter�operator)rkrr?r?r@�	is_pinned!szInstallRequirement.is_pinnedcCsR|jdkrdSt|j�}|jrNt|jtj�r4|j}n
|jj�}|rN|d|7}|S)Nz->)rJr�rSrKrrLr�)rkr�rSr?r?r@r�+s


zInstallRequirement.from_pathcCs�|jdk	r|jS|jdkr<tjjtjdd��|_||_|jS|jrN|j	j
�}n|j	}tjj|�sttj
d|�t|�tjj||�S)Nz-buildzpip-zCreating directory %s)r`rJrMr<�realpath�tempfile�mkdtemprarVrv�lower�existsr�r�r)�join)rk�	build_dirrvr?r?r@�build_location8s

z!InstallRequirement.build_locationcCs�|jdk	rdS|j}d|_|j|j�}tjj|�rBtdt|���t	j
d|t|�t|��tj||�||_d|_||_d|_
dS)a�Move self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.egg_info_path to fix the temporary build
        directory.
        Nz<A package already exists in %s; please remove it to continuez,Moving package %s from %s to new location %s)rUr`r�rarMr<r�rrr�r��shutil�mover])rkZold_locationZnew_locationr?r?r@�_correct_build_locationSs"

z*InstallRequirement._correct_build_locationcCs |jdkrdSttj|jj��S)N)rJrrZ	safe_namerv)rkr?r?r@rvss
zInstallRequirement.namecCstjj|j|jr|jjpd�S)N�)rMr<r�rUrXZsubdirectory_fragment)rkr?r?r@�setup_py_diryszInstallRequirement.setup_py_dircCs|yddl}Wn:tk
rFtd�dkr.d}ntj�}td|��YnXtjj|j	d�}t
jrxt|t
j
�rx|jtj��}|S)Nr�
setuptoolszPlease install setuptools.zWCould not import setuptools which is required to install from a source distribution.
%szsetup.py)r��ImportErrorr+rQrRrrMr<r�r�r�PY2rKZ	text_type�encode�sys�getfilesystemencoding)rkr�rm�setup_pyr?r?r@r�szInstallRequirement.setup_pyc
CsP|jrtjd|j|j�ntjd|j|j�t��xt|j}tjd|g}|j	rZ|dg7}|dg}|j
rpg}n tjj
|jd�}t|�ddg}t|||jdd	d
�WdQRX|j�stt|j�d�t�r�d}nd
}tdj
|j�d||j�dg��|_|j�nDt|j�d�}t|jj�|k�rLtjd|j|j||j�t|�|_dS)Nz2Running setup.py (path:%s) egg_info for package %sz7Running setup.py (path:%s) egg_info for package from %sz-cz
--no-user-cfg�egg_infozpip-egg-infoz
--egg-baseFzpython setup.py egg_info)�cwd�show_stdoutZcommand_descr
z==z===r��NamezuRunning setup.py (path:%s) egg_info for package %s produced metadata for project name %s. Fix your #egg=%s fragments.)rvr�r�r�rXr0r1r��
executablerjrVrMr<r�r�r*r&rJrK�
parse_version�pkg_infor
rr�rr�)rk�scriptZbase_cmdZegg_info_cmdZegg_base_option�egg_info_dirrIZ
metadata_namer?r?r@�run_egg_info�sN




zInstallRequirement.run_egg_infocCsL|jdk	r&|jj|�sdS|jj|�S|j|�}tjj|�s@dSt|�}|S)N)r^�has_metadata�get_metadata�
egg_info_pathrMr<r�r')rkr��datar?r?r@�
egg_info_data�s

z InstallRequirement.egg_info_datacsf|jdk�rV|jr|j}ntjj|jd�}tj|�}|j�rg}x�tj|�D]�\�}}x t	j
D]}||kr^|j|�q^Wxjt|�D]^}tjj
tjj�|dd��s�tjjtjj�|dd��r�|j|�q�|dks�|dkr�|j|�q�W|j�fdd	�|D��qLWd
d	�|D�}|�s$td||f��t|�dk�rB|jd
d�d�tjj||d�|_tjj|j|�S)Nzpip-egg-info�bin�pythonZScriptsz
Python.exeZtestZtestscsg|]}tjj�|��qSr?)rMr<r�)rB�dir)�rootr?r@�
<listcomp>�sz4InstallRequirement.egg_info_path.<locals>.<listcomp>cSsg|]}|jd�r|�qS)z	.egg-info)�endswith)rB�fr?r?r@r��sz$No files/directories in %s (from %s)r7cSs(|jtjj�tjjr"|jtjj�p$dS)Nr)�countrMr<rN�altsep)�xr?r?r@�<lambda>
sz2InstallRequirement.egg_info_path.<locals>.<lambda>)�keyr)r]rVrUrMr<r�r��listdir�walkr4�dirnames�remove�list�lexistsr��extendrr��sort)rkr��base�	filenames�dirs�filesr�r?)r�r@r��s>
z InstallRequirement.egg_info_pathcCs@t�}|jd�}|s*tjdt|jd���|j|p4d�|j�S)NzPKG-INFOzNo PKG-INFO file found in %sr�)rr�r�r�rr�Zfeed�close)rkr�r�r?r?r@r�s
zInstallRequirement.pkg_infoz	\[(.*?)\]cCs
t|j�S)N)r+rv)rkr?r?r@�installed_version sz$InstallRequirement.installed_versioncCsL|j�d}|jjr2||jjkr2tjd||j�ntjdt|j�||�dS)Nr�z'Requested %s, but installing version %sz;Source in %s has version %s, which satisfies requirement %s)	r�rJr�r�r�r�r�rrU)rkr�r?r?r@�assert_source_matches_version$s
z0InstallRequirement.assert_source_matches_versioncCs�|jstjd|j�dS|jjdkr(dS|js2dS|jjjdd�\}}tj	|�}|r�||jj�}|rr|j
|j�q�|j|j�ndS)Nz>Cannot update repository at %s; repository location is unknownr|�+r7)rXr�r�rUr�rbrwr~r4�get_backend�obtainZexport)rkr��vc_typerw�backendZvcs_backendr?r?r@�update_editable5s"
z"InstallRequirement.update_editablecsT|j�std|jf��|jp"|j}t|j�}t|�sTtj	d|j
|tj�d|_
dS|t�krxtj	d|j
|�d|_
dSt|�}t|�}djtj|j��}|jo�tjj|j�}t|jdd�}|o�|jjd�o�|jj|��r�|j|j�|jd	��r2x�|jd	�j�D](}	tjj tjj!|j|	��}
|j|
��qWn�|jd
��r�|jd��rV|jd��ng�xj�fdd
�|jd
�j�D�D]J}tjj!|j|�}
|j|
�|j|
d�|j|
d�|j|
d��qxW�n$|�r�t"j#dj|j�t$�|j|�n�|jjd��rF|j|j�tjj%|j�d}tjj!tjj&|j�d�}
|j'|
d|�n�|�r~|jjd��r~x�t(j)j*|�D]}
|j|
��qhWnp|�r�t+|d��}tjj,|j-�j.��}WdQRX|j|�tjj!tjj&|�d�}
|j'|
|j�ntj/d||j�|jd��rb|j0d��rbxZ|j1d�D]L}t2|��r&t3}nt4}|jtjj!||��t5�r|jtjj!||�d��qW|jd��r@t6j7�r|i}ndd"i}t8j9f|�}|j:t;|j<d���|j=d��r@x�|j>d�D]�\}}t2|��r�t3}nt4}|jtjj!||��t5�r�|jtjj!||�d�|jtjj!||�d �|jtjj!||�d!��q�W|j?|�||_@dS)#a�
        Uninstall the distribution currently satisfying this requirement.

        Prompts before removing or modifying files unless
        ``auto_confirm`` is True.

        Refuses to delete or modify files outside of ``sys.prefix`` -
        thus uninstallation within a virtual environment can only
        modify that virtual environment, even if the virtualenv is
        linked to global site-packages.

        z.Cannot uninstall requirement %s, not installedz1Not uninstalling %s at %s, outside environment %sTNz<Not uninstalling %s at %s, as it is in the standard library.z{0}.egg-infor<z	.egg-infozinstalled-files.txtz
top_level.txtznamespace_packages.txtcsg|]}|r|�kr|�qSr?r?)rBr�)�
namespacesr?r@r��sz0InstallRequirement.uninstall.<locals>.<listcomp>z.pyz.pycz.pyoz�Uninstalling a distutils installed project ({0}) has been deprecated and will be removed in a future version. This is due to the fact that uninstalling a distutils project will only partially uninstall the project.z.eggr7zeasy-install.pthz./z
.dist-info�rz)Not sure how to uninstall: %s - Check: %s�scriptsz.batzentry_points.txtZ
delimitersrHZconsole_scriptsz.exez
.exe.manifestz
-script.py)rH)A�check_if_existsrrvr^r_r,r�r-r��infor�r��prefixrerr3r%�formatrZto_filename�project_namer�rMr<r��getattrZ	_providerr��addr�r��
splitlinesr�r��warnings�warnr/r~�dirnameZadd_pth�pipr�Zuninstallation_paths�open�normcase�readlinerr�Zmetadata_isdirZmetadata_listdirr"rrrrr�rZSafeConfigParserZreadfpr(Zget_metadata_linesZhas_section�itemsr�rd)rkZauto_confirmZdistZ	dist_pathZpaths_to_removeZdevelop_egg_linkZdevelop_egg_link_egg_infoZegg_info_existsZdistutils_egg_infoZinstalled_filer<Z
top_level_pkgZeasy_install_eggZeasy_install_pthZfhZlink_pointerr�Zbin_dirrh�configrv�valuer?)r�r@�	uninstallRs�









zInstallRequirement.uninstallcCs$|jr|jj�ntjd|j�dS)Nz'Can't rollback %s, nothing uninstalled.)rdZrollbackr��errorrv)rkr?r?r@�rollback_uninstall�sz%InstallRequirement.rollback_uninstallcCs*|jr|jj�n|js&tjd|j�dS)Nz%Can't commit %s, nothing uninstalled.)rdZcommitrer�r�rv)rkr?r?r@�commit_uninstall�s
z#InstallRequirement.commit_uninstallcCs�d}d|j|j�df}tjj||�}tjj|�r�tdt|�d�}|dkrTd	}nj|dkrxtj	d
t|��tj
|�nF|dkr�t|�}tj	dt|�t|��tj
||�n|dkr�tjd�|�r�tj|dtjdd
�}tjjtjj|j��}x�tj|�D]�\}	}
}d|
k�r|
j
d�xR|
D]J}tjj|	|�}|j||�}
tj|jd|
d�}d|_|j|d��qWxL|D]D}|tk�r��qrtjj|	|�}|j||�}
|j||jd|
��qrW�q�W|j�tjdt|��dS)NTz	%s-%s.zipr�z8The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort �i�w�b�aFzDeleting %szBacking up %s to %sr7)Z
allowZip64zpip-egg-info�/i��r�zSaved %s)r�rrr���i�)rvr�rMr<r�r�rrr�r�r�r r�r�r��exit�zipfileZZipFileZZIP_DEFLATEDr�r�r�r��_clean_zip_nameZZipInfoZ
external_attrZwritestrr�writer�r�)rkr�Zcreate_archiveZarchive_nameZarchive_pathZresponseZ	dest_file�zipr��dirpathr�r�r�rvZzipdirr�r?r?r@�archivesV






"zInstallRequirement.archivecCs(|t|�dd�}|jtjjd�}|S)Nr7r)r��replacerMr<rN)rkrvr�r?r?r@r5sz"InstallRequirement._clean_zip_namecs0|sd}�jdk	r(t�fdd�|D��SdSdS)Nr�c3s|]}�jjd|i�VqdS)rCN)r[Zevaluate)rBrC)rkr?r@rDDsz3InstallRequirement.match_markers.<locals>.<genexpr>T)r�)r[rO)rkZextras_requestedr?)rkr@�
match_markers=s


z InstallRequirement.match_markersc,s`|jr|j|||d�dS|jr\tjj|j�}tjj||j�|j	|j�||d�d|_
dS||jjdg�7}||jjdg�7}|j
r�t|�dg}tjdd�}tjj|d	�}�z�|j||�|�}	d
|jf}
t|
��.}t��t|	||jd|d�WdQRXWdQRXtjj|��s(tjd
|�dSd|_
|j�r:dS�fdd�}t|��H}
x@|
D](}tjj|�}|jd��rV||�}P�qVWtj d|�dSWdQRXg}t|��P}
xH|
D]@}|j!�}tjj"|��r�|tjj#7}|j$tjj%||�|���q�WWdQRXtjj|d�}t|d��}
|
j&dj|�d�WdQRXWdtjj|��rRtj'|�t(|�XdS)N)r�)r�r��strip_file_prefixT�global_options�install_optionsz
--no-user-cfgz-recordzpip-zinstall-record.txtzRunning setup.py install for %sF)r�r��spinnerzRecord file %s not foundcs(�dkstjj|�r|St�|�SdS)N)rMr<�isabsr)r<)r�r?r@�prepend_root~sz0InstallRequirement.install.<locals>.prepend_rootz	.egg-infoz;Could not find .egg-info directory in install record for %szinstalled-files.txtr�
))rV�install_editabler�r�r�Z
wheel_versionrUZcheck_compatibilityrvr5rcrh�getrjr�r�r�rMr<r��get_install_argsr2r0r&r�r�r�r�rZr�r�r�r�rr�rN�append�relpathr	r�r)rkrrr�r�rr�Z
temp_location�record_filename�install_args�msgrrr��lineZ	directoryr�Z	new_linesr�Zinst_files_pathr?)r�r@�installIs~




"
zInstallRequirement.installcCs|jdkr|j|�|_|jS)aAEnsure that a source_dir is set.

        This will create a temporary build dir if the name of the requirement
        isn't known yet.

        :param parent_dir: The ideal pip parent_dir for the source_dir.
            Generally src_dir for editables and build_dir for sdists.
        :return: self.source_dir
        N)rUr�)rkZ
parent_dirr?r?r@�ensure_has_source_dir�s

z(InstallRequirement.ensure_has_source_dircCs�tjdg}|jd�|jt|j�|t|�dd|g7}|jsJ|dg7}|dk	r^|d|g7}|dk	rr|d|g7}|jr�|dg7}n
|d	g7}t�r�d
t	j
�}|dtjj
tjdd
||j�g7}|S)Nz-uz-crz--recordz#--single-version-externally-managedz--rootz--prefixz	--compilez--no-compiler�z--install-headers�includeZsite)r�r�rr1r�r�rZrirr�get_python_versionrMr<r�r�rv)rkrrr�r�rZ
py_ver_strr?r?r@r�s(



z#InstallRequirement.get_install_argscCsd|jr6tjjtjj|jt��r6tjd|j�t|j�d|_|j	rZtjj|j	�rZt|j	�d|_	dS)zVRemove the source files from this requirement, if they are marked
        for deletionzRemoving source in %sN)
rUrMr<r�r�rr�r�rr`)rkr?r?r@�remove_temporary_source�s

z*InstallRequirement.remove_temporary_sourcecCs�tjd|j�|jr"t|�dg}|r>dj|�g}t|�|}t��<ttj	dt
|jgt|�ddgt|�|jdd�WdQRXd	|_
dS)
NzRunning setup.py develop for %sz
--no-user-cfgz--prefix={0}z-cZdevelopz	--no-depsF)r�r�T)r�r�rvrjr�r�r0r&r�r�r1r�r�rc)rkrrr�Zprefix_paramr?r?r@r�s z#InstallRequirement.install_editablecCs�|jdkrdSyFtt|j��}d|_tjt|��|_|jrR|jrR|j|_d|_dSWn�tj	k
rjdStj
k
r�tj|jj�}|jr�t
|�r�||_q�t�r�t|�r�td|j|jf��nt|�r�||_YnXdS)z�Find an installed distribution that satisfies or conflicts
        with this requirement, and set self.satisfied_by or
        self.conflicts_with appropriately.
        NFTzVWill not install to the user site because it will lack sys.path precedence to %s in %s)rJrr�r\rZget_distributionr^rVr_ZDistributionNotFoundZVersionConflictrvrfr"rr#rr�r�r$)rkZ	no_markerZ
existing_distr?r?r@r��s4

z"InstallRequirement.check_if_existscCs|jo|jjS)N)rXr�)rkr?r?r@r� szInstallRequirement.is_wheelcCs,t|j|j||j|j|||j|j|d�
dS)N)�user�homer�r�rirjr)r5rvrJrfrgrirj)rkZwheeldirr�r�rr?r?r@r5$s
z#InstallRequirement.move_wheel_filescCsX|jd�jd�}tjj|�}tj||�}tjjtjj|��d}tj	tjj|�||d�S)zAReturn a pkg_resources.Distribution built from self.egg_info_pathr�rr)r��metadata)
r��rstriprMr<r�rZPathMetadata�splitext�basenameZDistribution)rkr�Zbase_dirr&Z	dist_namer?r?r@�get_dist0s
zInstallRequirement.get_distcCst|jjdi��S)z�Return whether any known-good hashes are specified as options.

        These activate --require-hashes mode; hashes specified as part of a
        URL do not.

        �hashes)�boolrhr)rkr?r?r@�has_hash_options;sz#InstallRequirement.has_hash_optionscCsJ|jjdi�j�}|r|jn|j}|rB|jrB|j|jg�j|j�t	|�S)a�Return a hash-comparer that considers my option- and URL-based
        hashes to be known-good.

        Hashes in URLs--ones embedded in the requirements file, not ones
        downloaded from an index server--are almost peers with ones from
        flags. They satisfy --require-hashes (whether it was implicitly or
        explicitly activated) but do not activate it. md5 and sha224 are not
        allowed in flags, which should nudge people toward good algos. We
        always OR all hashes together, even ones from URLs.

        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
            downloaded from the internet, as by populate_link()

        r+)
rhr�copyrXrY�hash�
setdefaultZ	hash_namerr.)rkZtrust_internetZgood_hashesrXr?r?r@r+Es

zInstallRequirement.hashes)NFNFTTNFNNF)NNFNNF)NFNNF)T)F)N)NNN)T).r��
__module__�__qualname__rn�classmethodryr�r�r�r��propertyr�r�r�r�r�rvr�r�r�r�r�r�r9�compileZ_requirements_section_rer�r�r�r�r�r�rrrrr rr#rr�r�r5r*r-r+r?r?r?r@rGIs`
;M

 :6


$0
\
)

rGcCstjd|�}|r|jd�}|S)z2
        Strip req postfix ( -dev, 0.2, etc )
    z^(.*?)(?:-dev|-\d.*)$r7)r9r�r;)rJr:r?r?r@�_strip_postfix[s
r6cCs�ddlm}|}d}tjd|�}|r:|jd�}|jd�}n|}tjj|�rttjjtjj	|d��slt
d|��t|�}|j�j
d	�r�||�j}|r�||td
|j��jfS||dfSx,tD]$}|j�j
d|�r�d||f}Pq�Wd
|k�r|�r
tjdt�|d
|}nt
d|��|jd
d�dj�}	tj|	��s`d|dj	dd�tjD��d}
t
|
��||�j}|�sxt
d��|�s�t
d|��t|�|dfS)aParses an editable requirement into:
        - a requirement name
        - an URL
        - extras
        - editable options
    Accepted requirements:
        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
        .[some_extra]
    r)roNz^(.+)(\[[^\]]+\])$r7r8zsetup.pyz;Directory %r is not installable. File 'setup.py' not found.zfile:r}z%s:z%s+%sr�zD--default-vcs has been deprecated and will be removed in the future.zb%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+zFor --editable=%s only z, cSsg|]}|jd�qS)z+URL)rv)rBr�r?r?r@r��sz"parse_editable.<locals>.<listcomp>z is currently supportedz@Could not detect requirement name, please specify one with #egg=z@--editable=%s is not the right format; it must have #egg=Package)rpror9r:r;rMr<r�r�r�rrr�rrr�rr>r4r�r�r/r~r�Zbackendsr6)rtrurorwr>r=Z
url_no_extrasZpackage_nameZversion_controlr�Z
error_messager?r?r@rqgs`





rq)N)eZ
__future__rZloggingrMr9r�r�r�rQr�rZ	distutilsrZdistutils.utilrZemail.parserrZpip._vendorrrZpip._vendor.packagingrZpip._vendor.packaging.markersr	Z"pip._vendor.packaging.requirementsr
rZpip._vendor.packaging.utilsrZpip._vendor.packaging.versionr
rr�Zpip._vendor.six.movesrZ	pip.wheelr�Z
pip.compatrrrZpip.downloadrrrrZpip.exceptionsrrZ
pip.locationsrrrrZ	pip.utilsrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-Zpip.utils.hashesr.Zpip.utils.deprecationr/Zpip.utils.loggingr0Zpip.utils.setuptools_buildr1Zpip.utils.uir2Zpip.req.req_uninstallr3Zpip.vcsr4r5r6Z	getLoggerr�r�Z	SpecifierZ
_operators�keysrPrArF�objectrGr6rqr?r?r?r@�<module>s`L
req/__pycache__/req_uninstall.cpython-36.pyc000064400000014171151733136160015063 0ustar003

�Pf��@s�ddlmZddlZddlZddlZddlmZmZmZddl	m
Z
ddlmZm
Z
mZmZmZddlmZeje�ZGdd�de�ZGd	d
�d
e�ZdS)�)�absolute_importN)�uses_pycache�WINDOWS�cache_from_source)�UninstallationError)�rmtree�ask�is_local�renames�normalize_path)�
indent_logc@sZeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	ddd�Z
dd�Zdd�ZdS)�UninstallPathSetzMA set of file paths to be removed in the uninstallation of a
    requirement.cCs,t�|_t�|_i|_||_d|_g|_dS)N)�set�paths�_refuse�pth�dist�save_dir�_moved_paths)�selfr�r�#/usr/lib/python3.6/req_uninstall.py�__init__szUninstallPathSet.__init__cCst|�S)zs
        Return True if the given path is one we are permitted to
        remove/modify, False otherwise.

        )r	)r�pathrrr�
_permittedszUninstallPathSet._permittedcCs�tjj|�\}}tjjt|�tjj|��}tjj|�s:dS|j|�rR|jj	|�n|j
j	|�tjj|�ddkr�tr�|j	t
|��dS)N�z.py)�osr�split�joinr�normcase�existsrr�addr�splitextrr)rr�head�tailrrrr!#s
zUninstallPathSet.addcCsLt|�}|j|�r<||jkr*t|�|j|<|j|j|�n|jj|�dS)N)rrr�UninstallPthEntriesr!r)r�pth_file�entryrrr�add_pth6s

zUninstallPathSet.add_pthcs@t�}x4t|td�D]$�t�fdd�|D��s|j��qW|S)z�Compact a path set to contain the minimal number of paths
        necessary to contain all paths in the set. If /a/path/ and
        /a/path/to/a/file.txt are both in the set, leave only the
        shorter path.)�keycs4g|],}�j|�o.�t|jtjj��tjjk�qSr)�
startswith�len�rstriprr�sep)�.0Z	shortpath)rrr�
<listcomp>Gsz,UninstallPathSet.compact.<locals>.<listcomp>)r�sortedr+�anyr!)rrZshort_pathsr)rr�compact?s

zUninstallPathSet.compactcCs&tjj|jtjj|�djtjj��S)Nr)rrrr�
splitdrive�lstripr-)rrrrr�_stashMszUninstallPathSet._stashFcCs:|jstjd|jj�dStjd|jj|jj�t���t|j|j��}|rRd}n"x|D]}tj|�qXWt	dd
�}|j
r�tjd�x|j|j
�D]}tj|�q�W|dk�r,tjdd	d
�|_
x8|D]0}|j|�}tjd|�|jj|�t||�q�Wx|jj�D]}|j��qWtjd|jj|jj�WdQRXdS)z[Remove paths in ``self.paths`` with confirmation (unless
        ``auto_confirm`` is True).z7Can't uninstall '%s'. No files were found to uninstall.NzUninstalling %s-%s:�yzProceed (y/n)? �nz.Not removing or modifying (outside of prefix):z
-uninstallzpip-)�suffix�prefixzRemoving file or directory %szSuccessfully uninstalled %s-%s)r6r7)r�logger�infor�project_name�versionrr0r2rr�tempfileZmkdtemprr5�debugr�appendr
r�values�remove)rZauto_confirmrZresponser�new_pathrrrrrBQs@







zUninstallPathSet.removecCs~|jdkrtjd|jj�dStjd|jj�x.|jD]$}|j|�}tjd|�t	||�q6Wx|j
j�D]}|j�qjWdS)z1Rollback the changes previously made by remove().Nz'Can't roll back %s; was not uninstalledFzRolling back uninstall of %szReplacing %s)
rr:�errorrr<r;rr5r?r
rrA�rollback)rrZtmp_pathrrrrrE{s


zUninstallPathSet.rollbackcCs$|jdk	r t|j�d|_g|_dS)z?Remove temporary save dir: rollback will no longer be possible.N)rrr)rrrr�commit�s

zUninstallPathSet.commitN)F)
�__name__�
__module__�__qualname__�__doc__rrr!r(r2r5rBrErFrrrrr
s	
*r
c@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
r%cCs0tjj|�std|��||_t�|_d|_dS)Nz.Cannot remove entries from nonexistent file %s)rr�isfiler�filer�entries�_saved_lines)rr&rrrr�s
zUninstallPthEntries.__init__cCs>tjj|�}tr.tjj|�dr.|jdd�}|jj|�dS)Nr�\�/)rrrrr3�replacerMr!)rr'rrrr!�szUninstallPthEntries.addcCs�tjd|j�t|jd��}|j�}||_WdQRXtdd�|D��rLd}nd}xH|jD]>}y$tjd|�|j||j	d��WqXt
k
r�YqXXqXWt|jd	��}|j|�WdQRXdS)
NzRemoving pth entries from %s:�rbcss|]}d|kVqdS)s
Nr)r.�linerrr�	<genexpr>�sz-UninstallPthEntries.remove.<locals>.<genexpr>z
�
zRemoving entry: %szutf-8�wb)r:r?rL�open�	readlinesrNr1rMrB�encode�
ValueError�
writelines)r�fh�linesZendliner'rrrrB�s
zUninstallPthEntries.removec	CsR|jdkrtjd|j�dStjd|j�t|jd��}|j|j�WdQRXdS)Nz.Cannot roll back changes to %s, none were madeFz!Rolling %s back to previous staterVT)rNr:rDrLr?rWr[)rr\rrrrE�s

zUninstallPthEntries.rollbackN)rGrHrIrr!rBrErrrrr%�s	
r%)Z
__future__rZloggingrr>Z
pip.compatrrrZpip.exceptionsrZ	pip.utilsrrr	r
rZpip.utils.loggingrZ	getLoggerrGr:�objectr
r%rrrr�<module>s
req/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000603151733136160014674 0ustar003

�Pf�@sDddlmZddlmZddlmZmZddlmZdddd	gZ	d
S)�)�absolute_import�)�InstallRequirement)�RequirementSet�Requirements)�parse_requirementsrrrrN)
Z
__future__rZreq_installrZreq_setrrZreq_filer�__all__�r	r	�/usr/lib/python3.6/__init__.py�<module>s
req/__pycache__/req_file.cpython-36.pyc000064400000020322151733136160013764 0ustar003

�Pf�.�@sxdZddlmZddlZddlZddlZddlZddlZddlZddl	m
Zddlm
Z
ddlZddlmZddlmZddlmZdd	lmZdd
lmZdgZejdej�Zejd
�Zejejejej ej!ej"ej#ej$ej%ej&ej'ej(ej)ej*ej+ej,ej-ej.ej/ej0ej1gZ2ej3ej4ej5gZ6dd�e6D�Z7d dd�Z8dd�Z9d!dd�Z:dd�Z;dd�Z<dd�Z=dd�Z>dd�Z?dS)"z
Requirements file parsing
�)�absolute_importN)�parse)�filterfalse)�get_file_content)�InstallRequirement)�RequirementsFileParseError)�RemovedInPip10Warning)�
cmdoptions�parse_requirementsz^(http|https|file):z(^|\s)+#.*$cCsg|]}|�j�qS�)�dest)�.0�orr�/usr/lib/python3.6/req_file.py�
<listcomp>=srFccsp|dkrtd��t|||d�\}}t||�}	x>|	D]6\}
}t|||
||||||d�	}x|D]
}
|
VqZWq2WdS)a�Parse a requirements file and yield InstallRequirement instances.

    :param filename:    Path or url of requirements file.
    :param finder:      Instance of pip.index.PackageFinder.
    :param comes_from:  Origin description of requirements.
    :param options:     cli options.
    :param session:     Instance of pip.download.PipSession.
    :param constraint:  If true, parsing a constraint file rather than
        requirements file.
    :param wheel_cache: Instance of pip.wheel.WheelCache
    NzCparse_requirements() missing 1 required keyword argument: 'session')�
comes_from�session)�
constraint)�	TypeErrorr�
preprocess�process_line)�filename�finderr�optionsrr�wheel_cache�_�content�
lines_enum�line_number�lineZreq_iter�reqrrrr
@s



cCs.t|j�dd�}t|�}t|�}t||�}|S)z�Split, filter, and join lines, and return a line iterator

    :param content: the content of the requirements file
    :param options: cli options
    �)�start)�	enumerate�
splitlines�
join_lines�ignore_comments�
skip_regex)rrrrrrras

rc		cs
t�}	|	j�}
d|
_|r |j|
_t|�\}}tjdkr@|jd�}|	jt	j
|�|
�\}
}d|r`dnd||f}|r�|rz|jnd	}|r�tj
||
�i}x.tD]&}||
jkr�|
j|r�|
j|||<q�Wtj||||||d
�V�n(|
j�r&|r�|jnd	}|�r|jnd}tj|
jd|||||d�V�n�|
j�s6|
j�r�|
j�rN|
jd}d	}n|
jd}d
}tj|��rvtj||�}n"tj|��s�tjjtjj|�|�}t|||||||d�}	x|	D]}|V�q�W�n>|
j �r�|
j |_ �n*|�r|
j!�r�t"j#dt$�|
j%�r
t"j#dt$�|
j&�rt"j#dt$�|
j�r0|
jg|_'|
j(d	k�rPd	|_(t)j*j+|j�|
j,d
k�rbg|_'|
j-�rx|j'j.|
j-�|
j/�r�|
j/d}tjjtjj0|��}tjj||�}tjj1|��r�|}|j/j2|�|
j3�r�d
|_4|
j5�r�d
|_5|
j6�r|j7j.dd�|
j6D��dS)a#Process a single requirements line; This can result in creating/yielding
    requirements, or updating the finder.

    For lines that contain requirements, the only options that have an effect
    are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
    requirement. Other options from SUPPORTED_OPTIONS may be present, but are
    ignored.

    For lines that do not contain requirements, the only options that have an
    effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
    be present, but are ignored. These lines may contain multiple options
    (although our docs imply only one is supported), and all our parsed and
    affect the finder.

    :param constraint: If True, parsing a constraints file.
    :param options: OptionParser options that we may update
    N����utf8z%s %s (line %s)z-cz-rF)r�isolatedrrr)rr�default_vcsr,rT)rrz�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.css|]}d|dfVqdS)�*Nr)r
�hostrrr�	<genexpr>�szprocess_line.<locals>.<genexpr>)r(r)r*)8�build_parserZget_default_values�	index_urlZformat_control�break_args_options�sys�version_info�encode�
parse_args�shlex�splitZ
isolated_moder	Zcheck_install_build_global�SUPPORTED_OPTIONS_REQ_DEST�__dict__rZ	from_lineZ	editablesr-Z
from_editable�requirements�constraints�	SCHEME_RE�search�urllib_parseZurljoin�os�path�join�dirnamer
�require_hashes�allow_external�warnings�warnr�allow_all_externalZallow_unverifiedZ
index_urls�	use_wheel�pip�indexZfmt_ctl_no_use_wheel�no_indexZextra_index_urls�extend�
find_links�abspath�exists�append�preZallow_all_prereleases�process_dependency_linksZ
trusted_hostsZsecure_origins)rrrrrrrrr�parserZdefaultsZargs_strZoptions_strZoptsrZline_comes_fromr,Zreq_optionsrr-Zreq_pathZnested_constraintr �valueZreq_dirZrelative_to_reqs_filerrrrns�










rcCsh|jd�}g}|dd�}x8|D]0}|jd�s8|jd�r<Pq |j|�|jd�q Wdj|�dj|�fS)z�Break up the line into an args and options string.  We only want to shlex
    (and then optparse) the options, not the args.  args can contain markers
    which are corrupted by shlex.
    � N�-z--r)r9�
startswithrR�poprC)r�tokens�argsr�tokenrrrr3�s


r3cCsDtjdd�}tt}x|D]}|�}|j|�qWdd�}||_|S)z7
    Return a parser for parsing requirement lines
    F)Zadd_help_optioncSst|��dS)N)r)�self�msgrrr�parser_exitsz!build_parser.<locals>.parser_exit)�optparseZOptionParser�SUPPORTED_OPTIONS�SUPPORTED_OPTIONS_REQZ
add_option�exit)rUZoption_factoriesZoption_factoryZoptionr`rrrr1s
r1ccs�d}g}x�|D]x\}}|jd�s,tj|�rntj|�r>d|}|rb|j|�|dj|�fVg}q�||fVq|sv|}|j|jd��qW|r�|dj|�fVdS)z�Joins a line ending in '' with the previous line (except when following
    comments).  The joined line takes on the index of the first line.
    N�\rW�)�endswith�
COMMENT_RE�matchrRrC�strip)rZprimary_line_numberZnew_linerrrrrr%"s 

r%ccs8x2|D]*\}}tjd|�}|j�}|r||fVqWdS)z1
    Strips comments and filter empty lines.
    rfN)rh�subrj)rrrrrrr&?s
r&cs2|r
|jnd}|r.tj|��t�fdd�|�}|S)zs
    Skip lines that match '--skip-requirements-regex' pattern

    Note: the regex pattern is only built once
    Ncs�j|d�S)Nr!)r?)�e)�patternrr�<lambda>Tszskip_regex.<locals>.<lambda>)Zskip_requirements_regex�re�compiler)rrr'r)rmrr'Js

r')NNNNFN)NNNNNF)@�__doc__Z
__future__rrAror8r4rarGZpip._vendor.six.moves.urllibrr@Zpip._vendor.six.movesrrKZpip.downloadrZpip.req.req_installrZpip.exceptionsrZpip.utils.deprecationrr	�__all__rp�Ir>rhr=Zeditabler<rMr2rOZextra_index_urlrFrIZno_allow_externalZallow_unsafeZno_allow_unsaferJZno_use_wheelZalways_unzipZ	no_binaryZonly_binaryrSrTZtrusted_hostrErbZinstall_optionsZglobal_options�hashrcr:r
rrr3r1r%r&r'rrrr�<module>sn

 

req/__pycache__/req_set.cpython-36.opt-1.pyc000064400000050573151733136160014612 0ustar003

�Pf��@s~ddlmZddlmZddlmZddlZddlZddlm	Z	ddlm
Z
ddlmZddl
mZmZmZmZmZdd	lmZmZmZmZmZmZmZmZmZmZdd
lmZddl m!Z!m"Z"m#Z#m$Z$m%Z%ddl&m'Z'dd
l(m)Z)ddl*m+Z+ddl,m-Z-ddl.m/Z/ej0e1�Z2Gdd�de3�Z4Gdd�de3�Z5dd�Z6Gdd�de5�Z7Gdd�de5�Z8Gdd�de5�Z9Gdd�de3�Z:dS)�)�absolute_import)�defaultdict)�chainN)�
pkg_resources)�requests)�
expanduser)�is_file_url�
is_dir_url�
is_vcs_url�url_to_path�
unpack_url)
�InstallationError�BestVersionAlreadyInstalled�DistributionNotFound�PreviousBuildDirError�	HashError�
HashErrors�HashUnpinned�DirectoryUrlHashUnsupported�VcsHashUnsupported�UnsupportedPythonVersion)�InstallRequirement)�display_path�dist_in_usersite�dist_in_install_path�
ensure_dir�normalize_path)�
MissingHashes)�
indent_log)�check_dist_requires_python)�vcs)�Wheelc@sDeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)�RequirementscCsg|_i|_dS)N)�_keys�_dict)�self�r&�/usr/lib/python3.6/req_set.py�__init__"szRequirements.__init__cCs|jS)N)r#)r%r&r&r'�keys&szRequirements.keyscs�fdd��jD�S)Ncsg|]}�j|�qSr&)r$)�.0�key)r%r&r'�
<listcomp>*sz'Requirements.values.<locals>.<listcomp>)r#)r%r&)r%r'�values)szRequirements.valuescCs
||jkS)N)r#)r%�itemr&r&r'�__contains__,szRequirements.__contains__cCs$||jkr|jj|�||j|<dS)N)r#�appendr$)r%r+�valuer&r&r'�__setitem__/s
zRequirements.__setitem__cCs
|j|S)N)r$)r%r+r&r&r'�__getitem__4szRequirements.__getitem__cs$�fdd��j�D�}ddj|�S)Ncs$g|]}dt|�t�|�f�qS)z%s: %s)�repr)r*�k)r%r&r'r,8sz)Requirements.__repr__.<locals>.<listcomp>zRequirements({%s})z, )r)�join)r%r-r&)r%r'�__repr__7szRequirements.__repr__N)
�__name__�
__module__�__qualname__r(r)r-r/r2r3r7r&r&r&r'r" sr"c@s(eZdZdZdd�Zdd�Zdd�ZdS)	�DistAbstractionatAbstracts out the wheel vs non-wheel prepare_files logic.

    The requirements for anything installable are as follows:
     - we must be able to determine the requirement name
       (or we can't correctly handle the non-upgrade case).
     - we must be able to generate a list of run-time dependencies
       without installing any additional packages (or we would
       have to either burn time by doing temporary isolated installs
       or alternatively violate pips 'don't start installing unless
       all requirements are available' rule - neither of which are
       desirable).
     - for packages with setup requirements, we must also be able
       to determine their requirements without installing additional
       packages (for the same reason as run-time dependencies)
     - we must be able to create a Distribution object exposing the
       above metadata.
    cCs
||_dS)N)�req_to_install)r%r<r&r&r'r(OszDistAbstraction.__init__cCst|j��dS)z Return a setuptools Dist object.N)�NotImplementedError�dist)r%�finderr&r&r'r>RszDistAbstraction.distcCst|j��dS)z3Ensure that we can get a Dist for this requirement.N)r=r>)r%r&r&r'�
prep_for_distVszDistAbstraction.prep_for_distN)r8r9r:�__doc__r(r>r@r&r&r&r'r;<sr;cCs0|jrt|�S|jr$|jjr$t|�St|�SdS)z�Factory to make an abstract dist object.

    Preconditions: Either an editable req with a source_dir, or satisfied_by or
    a wheel link, or a non-editable req with a source_dir.

    :return: A concrete DistAbstraction.
    N)�editable�IsSDist�link�is_wheel�IsWheel)r<r&r&r'�make_abstract_dist[s
rGc@seZdZdd�Zdd�ZdS)rFcCsttj|jj��dS)Nr)�listrZfind_distributionsr<�
source_dir)r%r?r&r&r'r>mszIsWheel.distcCsdS)Nr&)r%r&r&r'r@qszIsWheel.prep_for_distN)r8r9r:r>r@r&r&r&r'rFksrFc@seZdZdd�Zdd�ZdS)rCcCs(|jj�}|jd�r$|j|jd��|S)Nzdependency_links.txt)r<Zget_distZhas_metadataZadd_dependency_linksZget_metadata_lines)r%r?r>r&r&r'r>xs


zIsSDist.distcCs|jj�|jj�dS)N)r<Zrun_egg_infoZassert_source_matches_version)r%r&r&r'r@�s
zIsSDist.prep_for_distN)r8r9r:r>r@r&r&r&r'rCvs	rCc@seZdZdd�Zdd�ZdS)�	InstalledcCs|jjS)N)r<�satisfied_by)r%r?r&r&r'r>�szInstalled.distcCsdS)Nr&)r%r&r&r'r@�szInstalled.prep_for_distN)r8r9r:r>r@r&r&r&r'rJ�srJc@s�eZdZd$dd�Zdd�Zdd	�Zd%d
d�Zdd
�Zedd��Z	edd��Z
dd�Zd&dd�Zdd�Z
dd�Zdd�Zd'dd�Zdd�Zd d!�Zffd"d#�ZdS)(�RequirementSetFNTcCs�|dkrtd��||_||_||_||_||_||_|
|_t�|_	i|_
g|_|	|_||_
g|_g|_g|_||_||_||_||_|
|_||_|r�t|�}||_||_||_tt�|_dS)a3Create a RequirementSet.

        :param wheel_download_dir: Where still-packed .whl files should be
            written to. If None they are written to the download_dir parameter.
            Separate to download_dir to permit only keeping wheel archives for
            pip wheel.
        :param download_dir: Where still packed archives should be written to.
            If None they are not saved, and are deleted immediately after
            unpacking.
        :param wheel_cache: The pip wheel cache, for passing to
            InstallRequirement.
        Nz?RequirementSet() missing 1 required keyword argument: 'session')�	TypeError�	build_dir�src_dir�download_dir�upgrade�upgrade_strategy�ignore_installed�force_reinstallr"�requirements�requirement_aliases�unnamed_requirements�ignore_dependencies�ignore_requires_python�successfully_downloaded�successfully_installed�reqs_to_cleanup�as_egg�
use_user_site�
target_dir�session�	pycompile�isolatedr�wheel_download_dir�_wheel_cache�require_hashesrrH�
_dependencies)r%rNrOrPrQrRrSr]r_rXrTr^r`rarbrc�wheel_cachererYr&r&r'r(�s<zRequirementSet.__init__cCs8dd�|jj�D�}|jdd�d�djdd�|D��S)NcSsg|]}|js|�qSr&)Z
comes_from)r*�reqr&r&r'r,�sz*RequirementSet.__str__.<locals>.<listcomp>cSs
|jj�S)N)�name�lower)rhr&r&r'�<lambda>�sz(RequirementSet.__str__.<locals>.<lambda>)r+� cSsg|]}t|j��qSr&)�strrh)r*rhr&r&r'r,�s)rUr-�sortr6)r%�reqsr&r&r'�__str__�szRequirementSet.__str__cCsNdd�|jj�D�}|jdd�d�djdd�|D��}d|jjt|�|fS)	NcSsg|]}|�qSr&r&)r*rhr&r&r'r,�sz+RequirementSet.__repr__.<locals>.<listcomp>cSs
|jj�S)N)rirj)rhr&r&r'rk�sz)RequirementSet.__repr__.<locals>.<lambda>)r+z, cSsg|]}t|j��qSr&)rmrh)r*rhr&r&r'r,�sz"<%s object; %d requirement(s): %s>)rUr-rnr6�	__class__r8�len)r%roZreqs_strr&r&r'r7�s
zRequirementSet.__repr__c	
Cs�|j}|j|�s&tjd|j|j�gS|jrV|jjrVt|jj�}|j	�sVt
d|j��|j|_|j|_|j
|_
|j|_|dk|_|s�|jj|�|gSy|j|�}Wntk
r�d}YnX|dko�|o�|jo�|j|jko�|jj|jjk�rt
d|||f��|�s8||j|<|j�|k�r0||j|j�<|g}n�g}|j�r�|j�r�|j�r�|j�ol|jj|jjk�r�|jj|�t
d|��d|_ttt|j�j t|j����|_tj!d||j�|g}|}|�r�|j|�}|j"|j|�|SdS)a'Add install_req as a requirement to install.

        :param parent_req_name: The name of the requirement that needed this
            added. The name is used because when multiple unnamed requirements
            resolve to the same name, we could otherwise end up with dependency
            links that point outside the Requirements set. parent_req must
            already be added. Note that None implies that this is a user
            supplied requirement, vs an inferred one.
        :param extras_requested: an iterable of extras used to evaluate the
            environement markers.
        :return: Additional requirements to scan. That is either [] if
            the requirement is not applicable, or [install_req] if the
            requirement is applicable and has just been added.
        z6Ignoring %s: markers '%s' don't match your environmentz-%s is not a supported wheel on this platform.Nz5Double requirement given: %s (already in %s, name=%r)zhCould not satisfy constraints for '%s': installation from path or url cannot be constrained to a versionFzSetting %s extras to: %s)#riZ
match_markers�logger�warningZmarkersrDrEr!�filenameZ	supportedr
r]r^r_ra�	is_directrWr0�get_requirement�KeyError�
constraint�extrasrhZ	specifierrUrjrV�pathr\�tuple�sorted�set�union�debugrf)	r%�install_reqZparent_req_name�extras_requestedriZwheelZexisting_req�resultZ
parent_reqr&r&r'�add_requirement�sp






zRequirementSet.add_requirementcCsF|j�}||jkr |j|js>||jkrB|j|j|jrBdSdS)NTF)rjrUryrV)r%�project_namerir&r&r'�has_requirement4s

zRequirementSet.has_requirementcCstdd�|jj�D��p|jS)Ncss|]}|js|VqdS)N)ry)r*rhr&r&r'�	<genexpr>?sz2RequirementSet.has_requirements.<locals>.<genexpr>)rHrUr-rW)r%r&r&r'�has_requirements=szRequirementSet.has_requirementscCsD|jr@t|j�|_tjj|j�r$dStjd�tdt|j���dS)NTz!Could not find download directoryz0Could not find or access download directory '%s'F)	rPr�osr{�existsrs�criticalr
r)r%r&r&r'�is_downloadBs
zRequirementSet.is_downloadcCsTxB||j�fD]2}||jkr&|j|S||jkr|j|j|SqWtd|��dS)NzNo project with the name %r)rjrUrVrx)r%r�rir&r&r'rwOs


zRequirementSet.get_requirementcCs4x.|jj�D] }|jrq|j|d�|j�qWdS)N)�auto_confirm)rUr-ry�	uninstall�commit_uninstall)r%r�rhr&r&r'r�Ws
zRequirementSet.uninstallcCs�|jrt|j�|j|jj�}|jp6tdd�|D��}|rJ|jrJtd��g}t	�}xdt
||�D]V}y|j|j||||j
d��Wq`tk
r�}z||_|j|�WYdd}~Xq`Xq`W|r�|�dS)zY
        Prepare process. Create temp directories, download and/or unpack files.
        css|]}|jVqdS)N)Zhas_hash_options)r*rhr&r&r'r�jsz/RequirementSet.prepare_files.<locals>.<genexpr>z�--egg is not allowed with --require-hashes mode, since it delegates dependency resolution to setuptools and could thus result in installation of unhashed packages.)rerXN)rcrrWrUr-re�anyr]r
rr�extend�
_prepare_filerXrrhr0)r%r?Z	root_reqsreZdiscovered_reqsZhash_errorsrh�excr&r&r'�
prepare_files^s,

 zRequirementSet.prepare_filescCs |jo|jdkp|jdko|jS)NZeagerzonly-if-needed)rQrRrv)r%rhr&r&r'�_is_upgrade_allowed�s
z"RequirementSet._is_upgrade_allowedcCs�|j�|jr�|j|�}d}|r�|jp*|jshy|j||�Wn*tk
rTd}Yntk
rfYnX|s�|jr~t	|j�p�t
|j�s�|j|_d|_|r�d}n|jdkr�d}nd}|SdSdS)aCheck if req_to_install should be skipped.

        This will check if the req is installed, and whether we should upgrade
        or reinstall it, taking into account all the relevant user options.

        After calling this req_to_install will only have satisfied_by set to
        None if the req_to_install is to be upgraded/reinstalled etc. Any
        other value will be a dist recording the current thing installed that
        satisfies the requirement.

        Note that for vcs urls and the like we can't assess skipping in this
        routine - we simply identify that we need to pull the thing down,
        then later on it is pulled down and introspected to assess upgrade/
        reinstalls etc.

        :return: A text reason for why it was skipped, or None.
        FTNzalready up-to-datezonly-if-neededz%not upgraded as not directly requiredzalready satisfied)
�check_if_existsrKr�rTrDZfind_requirementrrr^rr�conflicts_withrR)r%r<r?Zupgrade_allowedZbest_installed�skip_reasonr&r&r'�_check_skip_installed�s4

z$RequirementSet._check_skip_installedc(st�js�jrgSd�_�jr*tjd��nd�js<�j�|�}�jrRtjd|��n<�jr��jj	dkr�t
�jj�}tjdt|��ntjd��t
�����jr�|r�td����j�j��j�j�t��}|j��jr�j�j��j��n$�j�r|�rtjd�t��}�n�j�j�tjjtjj�jd	���rRt d
��jf���j!|�j"��|��j}|�r�t#|��r�t$��nt%|��r�t&|��r�t'���j(�r��j)�r�t*���j+|d�}	|�r�|	�r�t,�}	yZ�j}
d}�jj-�r��j.�r��j.}
�jj-�r|
�rd}nd}t/�j�j|
|�j0|	d
�WnHt1j2k
�r|}z(tj3d�|�td�|�jf��WYdd}~XnXt��}|j��j�r��jj	t4j5k�r��j�j��j�sˆj��j�r�j6�sڈj�r�j7�r�t8�j��p�t9�j��s�j�_:d�_ntjd��|j;|�}
yt<|
�WnHt=k
�rx}z*�j>�r^tj?|j@d�n
�jA��WYdd}~XnXg����fdd�}�jB�jC��s��jD�d�|�s:�jE�r�tjddj�jE��tFtG�jE�tG|
jE��}x|D]}tj?d|
|��q�WtFtG|
jE�tG�jE�@�}x |
jH|�D]}|||d��q$W�jIjJ���j�rf�j�rf�jKjJ��WdQRX�S)zxPrepare a single requirements file.

        :return: A list of additional InstallRequirements to also install.
        TzObtaining %szRequirement %s: %s�filez
Processing %sz
Collecting %szoThe editable requirement %s cannot be installed when requiring hashes, because there is no single file to hash.z�Since it is already installed, we are trusting this package without checking its hash. To ensure a completely repeatable environment, install into an empty virtualenv.zsetup.pyz�pip can't proceed with requirements '%s' due to a pre-existing build directory (%s). This is likely due to a previous installation that failed. pip is being responsible and not assuming it can delete this. Please delete it and try again.)Ztrust_internetF)r`�hashesz4Could not install requirement %s because of error %szDCould not install requirement %s because of HTTP error %s for URL %sNz<Requirement already satisfied (use --upgrade to upgrade): %srcs4tt|���j�jd�}�j�j|�j|d��dS)N)rbrg)r�)rrmrbrdr�r�ri)�subreqr�Zsub_install_req)�	more_reqsr<r%r&r'�add_req�s
z-RequirementSet._prepare_file.<locals>.add_reqz!Installing extra requirements: %r�,z"%s does not provide the extra '%s')r�)LryZpreparedrBrs�inforSr�rKrD�schemerZurlrrr
Zensure_has_source_dirrOZupdate_editabler�rGr@�archiverPr�r�rJrNr�r{r�r6rIrZ
populate_linkr�r
rrr	rZ
original_linkZ	is_pinnedrr�rrErcrr`rZ	HTTPErrorr�r Zall_schemesrQr^rrr�r>rrrYrt�args�remove_temporary_sourcer�rir�rzr}r~Zrequiresr\r0rZ)r%r?r<rerXr�r{Z
abstract_distrDr�rPZautodelete_unpackedr�r>�er�Zmissing_requestedZmissingZavailable_requestedr�r&)r�r<r%r'r��s







"


zRequirementSet._prepare_filec	Cs8tjd�t��x|jD]}|j�qWWdQRXdS)zClean up files, remove builds.zCleaning up...N)rsr�rr\r�)r%rhr&r&r'�
cleanup_files�s
zRequirementSet.cleanup_filescs<g�t������fdd��x�jj�D]}�|�q(W�S)z�Create the installation order.

        The installation order is topological - requirements are installed
        before the requiring thing. We break cycles at an arbitrary point,
        and make no other guarantees.
        csP|js|�krdS|jrdS�j|�x�j|D]}�|�q2W�j|�dS)N)rKry�addrfr0)rhZdep)�order�ordered_reqs�scheduler%r&r'r��s
z,RequirementSet._to_install.<locals>.schedule)r~rUr-)r%r�r&)r�r�r�r%r'�_to_install�s
	zRequirementSet._to_installcOs�|j�}|r(tjddjdd�|D���t���x�|D]�}|jrltjd|j�t��|jdd�WdQRXy|j||f|�|�Wn$|jr�|jr�|j	��YnX|jr�|jr�|j
�|j�q6WWdQRX||_dS)	zl
        Install everything in this set (after having downloaded and unpacked
        the packages)
        z!Installing collected packages: %sz, cSsg|]
}|j�qSr&)ri)r*rhr&r&r'r,sz*RequirementSet.install.<locals>.<listcomp>zFound existing installation: %sT)r�N)
r�rsr�r6rr�r��installZinstall_succeededZrollback_uninstallr�r�r[)r%Zinstall_optionsZglobal_optionsr��kwargsZ
to_installZrequirementr&r&r'r��s:

zRequirementSet.install)FNFFNFFFNTFNNFF)NN)F)FF)r8r9r:r(rpr7r�r��propertyr�r�rwr�r�r�r�r�r�r�r�r&r&r&r'rL�s4
4
[	

'E
	rL);Z
__future__r�collectionsr�	itertoolsrZloggingr�Zpip._vendorrrZ
pip.compatrZpip.downloadrr	r
rrZpip.exceptionsr
rrrrrrrrrZpip.req.req_installrZ	pip.utilsrrrrrZpip.utils.hashesrZpip.utils.loggingrZpip.utils.packagingrZpip.vcsr Z	pip.wheelr!Z	getLoggerr8rs�objectr"r;rGrFrCrJrLr&r&r&r'�<module>s00
	req/__pycache__/req_install.cpython-36.pyc000064400000072766151733136160014536 0ustar003

�Pf���@s0ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZddlmZddlmZmZddlmZddlmZdd	lmZmZdd
lmZddlmZmZ ddl!m"Z"ddl#Z$dd
l%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+m,Z,m-Z-ddl.m/Z/m0Z0ddl1m2Z2m3Z3m4Z4m5Z5ddl6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=m>Z>m?Z?m@Z@mAZAmBZBmCZCmDZDmEZEmFZFmGZGddlHmIZIddlJmKZKddlLmMZMddlNmOZOddlPmQZQddlRmSZSddlTmUZUddl#mVZVmWZWejXeY�ZZej[j\j]�Z^dd�Z_dd�Z`Gdd�dea�Zbd d!�Zcd$d"d#�ZddS)%�)�absolute_importN)�	sysconfig)�change_root)�
FeedParser)�
pkg_resources�six)�
specifiers)�Marker)�InvalidRequirement�Requirement)�canonicalize_name)�Version�parse)�configparser)�
native_str�
get_stdlib�WINDOWS)�is_url�url_to_path�path_to_url�is_archive_file)�InstallationError�UninstallationError)�bin_py�running_under_virtualenv�PIP_DELETE_MARKER_FILENAME�bin_user)�display_path�rmtree�ask_path_exists�
backup_dir�is_installable_dir�dist_in_usersite�dist_in_site_packages�dist_in_install_path�
egg_link_path�call_subprocess�read_text_file�FakeFile�_make_build_dir�
ensure_dir�get_installed_version�normalize_path�
dist_is_local)�Hashes)�RemovedInPip10Warning)�
indent_log)�SETUPTOOLS_SHIM)�open_spinner)�UninstallPathSet)�vcs)�move_wheel_files�WheelcCs6tjd|�}d}|r*|jd�}|jd�}n|}||fS)Nz^(.+)(\[[^\]]+\])$��)�re�match�group)�path�m�extrasZpath_no_extras�r?�!/usr/lib/python3.6/req_install.py�
_strip_extras9s
rAcCstdd�|D��S)Ncss|]}tj|�VqdS)N)rZ
safe_extra)�.0�extrar?r?r@�	<genexpr>Fsz_safe_extras.<locals>.<genexpr>)�set)r>r?r?r@�_safe_extrasEsrFc@s�eZdZdQdd�ZedRdd��ZedSdd	��Zd
d�Zdd
�Zdd�Z	e
dd��Ze
dd��Zdd�Z
dd�Zdd�Ze
dd��Ze
dd��Ze
dd��Zd d!�Zd"d#�Zd$d%�Zd&d'�Zejd(�Ze
d)d*��Zd+d,�ZdTd-d.�ZdUd/d0�Zd1d2�Zd3d4�Zd5d6�Z d7d8�Z!dVd9d:�Z"gdddfd;d<�Z#d=d>�Z$d?d@�Z%dAdB�Z&fdfdCdD�Z'dEdF�Z(e
dGdH��Z)dWdIdJ�Z*dKdL�Z+e
dMdN��Z,dXdOdP�Z-dS)Y�InstallRequirementNFTcsJf|_t�tj�r�yt���Wndtk
r�tjj�kr@d}n.d�krft	�fdd�t
D��rfd}ntj�}t
d�|f��YnXt�j�|_�|_||_|
|_||_||_||_||_|_||_|	dk	r�|	|_n�oވj|_d|_d|_d|_d|_d|_||_d|_ d|_!d|_"d|_#d|_$|�r.|ni|_%||_&d|_'|
|_(dS)Nz%It looks like a path. Does it exist ?�=c3s|]}|�kVqdS)Nr?)rB�op)�reqr?r@rDVsz.InstallRequirement.__init__.<locals>.<genexpr>z,= is not a valid operator. Did you mean == ?zInvalid requirement: '%s'
%sF))r>�
isinstancer�string_typesrr
�osr<�sep�any�	operators�	traceback�
format_excrrFrJ�
comes_from�
constraint�
source_dir�editable�_wheel_cache�link�
original_link�as_egg�markers�marker�_egg_info_path�satisfied_by�conflicts_with�_temp_build_dir�_ideal_build_dir�update�install_succeeded�uninstalled�nothing_to_uninstall�
use_user_site�
target_dir�options�	pycompileZprepared�isolated)�selfrJrSrUrVrXrZrbrir[rjrh�wheel_cacherT�add_msgr?)rJr@�__init__KsN zInstallRequirement.__init__cCspddlm}t||�\}	}
}|
jd�r0t|
�}nd}||	||d||
�|||rP|ni|d�	}
|dk	rlt|�|
_|
S)Nr)�Linkzfile:T)rUrVrXrTrjrhrl)�	pip.indexro�parse_editable�
startswithrrFr>)�cls�editable_reqrS�default_vcsrjrhrlrTro�name�urlZextras_overriderU�resr?r?r@�
from_editable�s 



z InstallRequirement.from_editablec
Cs�ddlm}t|�rd}nd}||krR|j|d�\}}	|	j�}	|	sHd}	qVt|	�}	nd}	|j�}d}
tjjtjj	|��}d}d}
t|�r�||�}n�t
|�\}}
tjj|�r�tjj|ks�|j
d�r�t|�s�td|��|t|��}n0t|��rtjj|��stjd	|�|t|��}|�r||jd
k�rPtjd|j��rP|ttjjtjj	|j����}|j�rtt|j�}d|j|jf}
n|j}
n|}
|�r�|ni}||
|||	||||d
�}|
�r�tt d|
�j!�|_!|S)z�Creates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        r)roz; �;r7N�.z;Directory %r is not installable. File 'setup.py' not found.zARequirement %r looks like a filename, but the file does not exist�filez\.\./z%s==%s)rXr[rjrhrlrT�placeholder)"rpror�split�stripr	rMr<�normpath�abspathrA�isdirrNrrr!rrr�isfile�logger�warning�schemer9�searchrw�is_wheelr6�filenamerv�version�egg_fragmentrFrr>)rsrvrSrjrhrlrTroZ
marker_sepr[rJr<rXr>�p�wheelrxr?r?r@�	from_line�sb





zInstallRequirement.from_linecCs�|jr(t|j�}|jr:|d|jj7}n|jr6|jjnd}|jdk	rX|dt|jj�7}|jr�t|jt	j
�rt|j}n
|jj�}|r�|d|7}|S)Nz from %sz in %sz
 (from %s))rJ�strrXrwr^r�locationrSrKrrL�	from_path)rk�srSr?r?r@�__str__�s


zInstallRequirement.__str__cCsd|jjt|�|jfS)Nz<%s object: %s editable=%r>)�	__class__�__name__r�rV)rkr?r?r@�__repr__szInstallRequirement.__repr__cCs^|jdkr|j||�|_|jdk	rZ|rZ|j}|jj|j|j�|_||jkrZtjd|j�dS)aEnsure that if a link can be found for this, that it is found.

        Note that self.link may still be None - if Upgrade is False and the
        requirement is already installed.

        If require_hashes is True, don't use the wheel cache, because cached
        wheels, always built locally, have different hashes than the files
        downloaded from the index server and thus throw false hash mismatches.
        Furthermore, cached wheels at present have undeterministic contents due
        to file modification times.
        NzUsing cached wheel link: %s)rXZfind_requirementrWZcached_wheelrvr��debug)rk�finderZupgradeZrequire_hashesZold_linkr?r?r@�
populate_link	s

z InstallRequirement.populate_linkcCs|jjS)N)rJ�	specifier)rkr?r?r@r�szInstallRequirement.specifiercCs$|j}t|�dko"tt|��jdkS)z�Return whether I am pinned to an exact version.

        For example, some-package==1.2 is pinned; some-package>1.2 is not.
        r7�==�===)r�r�)r��len�next�iter�operator)rkrr?r?r@�	is_pinned!szInstallRequirement.is_pinnedcCsR|jdkrdSt|j�}|jrNt|jtj�r4|j}n
|jj�}|rN|d|7}|S)Nz->)rJr�rSrKrrLr�)rkr�rSr?r?r@r�+s


zInstallRequirement.from_pathcCs�|jdk	r|jS|jdkr<tjjtjdd��|_||_|jS|jrN|j	j
�}n|j	}tjj|�sttj
d|�t|�tjj||�S)Nz-buildzpip-zCreating directory %s)r`rJrMr<�realpath�tempfile�mkdtemprarVrv�lower�existsr�r�r)�join)rk�	build_dirrvr?r?r@�build_location8s

z!InstallRequirement.build_locationcCs�|jdk	rdS|jdk	st�|js&t�|js0t�|j}d|_|j|j�}tjj|�rdt	dt
|���tjd|t
|�t
|��t
j||�||_d|_||_d|_dS)a�Move self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.egg_info_path to fix the temporary build
        directory.
        Nz<A package already exists in %s; please remove it to continuez,Moving package %s from %s to new location %s)rUrJ�AssertionErrorr`rar�rMr<r�rrr�r��shutil�mover])rkZold_locationZnew_locationr?r?r@�_correct_build_locationSs(



z*InstallRequirement._correct_build_locationcCs |jdkrdSttj|jj��S)N)rJrrZ	safe_namerv)rkr?r?r@rvss
zInstallRequirement.namecCstjj|j|jr|jjpd�S)N�)rMr<r�rUrXZsubdirectory_fragment)rkr?r?r@�setup_py_diryszInstallRequirement.setup_py_dircCs�|jstd|��yddl}Wn:tk
rXtd�dkr@d}ntj�}td|��YnXtj	j
|jd�}tj
r�t|tj�r�|jtj��}|S)NzNo source dir for %sr�
setuptoolszPlease install setuptools.zWCould not import setuptools which is required to install from a source distribution.
%szsetup.py)rUr�r��ImportErrorr+rQrRrrMr<r�r�r�PY2rKZ	text_type�encode�sys�getfilesystemencoding)rkr�rm�setup_pyr?r?r@r�szInstallRequirement.setup_pyc
CsZ|js
t�|jr$tjd|j|j�ntjd|j|j�t��xt|j}t	j
d|g}|jrd|dg7}|dg}|jrzg}n t
jj|jd�}t|�ddg}t|||jdd	d
�WdQRX|j�stt|j�d�t�r�d}nd
}tdj|j�d||j�dg��|_|j�nDt|j�d�}t|jj�|k�rVtjd|j|j||j�t|�|_dS)Nz2Running setup.py (path:%s) egg_info for package %sz7Running setup.py (path:%s) egg_info for package from %sz-cz
--no-user-cfg�egg_infozpip-egg-infoz
--egg-baseFzpython setup.py egg_info)�cwd�show_stdoutZcommand_descr
z==z===r��NamezuRunning setup.py (path:%s) egg_info for package %s produced metadata for project name %s. Fix your #egg=%s fragments.)rUr�rvr�r�r�rXr0r1r��
executablerjrVrMr<r�r�r*r&rJrK�
parse_version�pkg_infor
rr�rr�)rk�scriptZbase_cmdZegg_info_cmdZegg_base_option�egg_info_dirrIZ
metadata_namer?r?r@�run_egg_info�sP





zInstallRequirement.run_egg_infocCsV|jdk	r&|jj|�sdS|jj|�S|js0t�|j|�}tjj|�sJdSt	|�}|S)N)
r^�has_metadata�get_metadatarUr��
egg_info_pathrMr<r�r')rkr��datar?r?r@�
egg_info_data�s


z InstallRequirement.egg_info_datacs||jdk�rl|jr|j}ntjj|jd�}tj|�}|j�rg}x�tj|�D]�\�}}x t	j
D]}||kr^|j|�q^Wxjt|�D]^}tjj
tjj�|dd��s�tjjtjj�|dd��r�|j|�q�|dks�|dkr�|j|�q�W|j�fdd	�|D��qLWd
d	�|D�}|�s$td||f��|�s:td||f��t|�dk�rX|jd
d�d�tjj||d�|_tjj|j|�S)Nzpip-egg-info�bin�pythonZScriptsz
Python.exeZtestZtestscsg|]}tjj�|��qSr?)rMr<r�)rB�dir)�rootr?r@�
<listcomp>�sz4InstallRequirement.egg_info_path.<locals>.<listcomp>cSsg|]}|jd�r|�qS)z	.egg-info)�endswith)rB�fr?r?r@r��sz$No files/directories in %s (from %s)r7cSs(|jtjj�tjjr"|jtjj�p$dS)Nr)�countrMr<rN�altsep)�xr?r?r@�<lambda>
sz2InstallRequirement.egg_info_path.<locals>.<lambda>)�keyr)r]rVrUrMr<r�r��listdir�walkr4�dirnames�remove�list�lexistsr��extendrr�r��sort)rkr��base�	filenames�dirs�filesr�r?)r�r@r��sB
z InstallRequirement.egg_info_pathcCs@t�}|jd�}|s*tjdt|jd���|j|p4d�|j�S)NzPKG-INFOzNo PKG-INFO file found in %sr�)rr�r�r�rr�Zfeed�close)rkr�r�r?r?r@r�s
zInstallRequirement.pkg_infoz	\[(.*?)\]cCs
t|j�S)N)r+rv)rkr?r?r@�installed_version sz$InstallRequirement.installed_versioncCsV|js
t�|j�d}|jjr<||jjkr<tjd||j�ntjdt	|j�||�dS)Nr�z'Requested %s, but installing version %sz;Source in %s has version %s, which satisfies requirement %s)
rUr�r�rJr�r�r�r�r�r)rkr�r?r?r@�assert_source_matches_version$s

z0InstallRequirement.assert_source_matches_versioncCs�|jstjd|j�dS|js"t�|js,t�|jjdkr<dSd|jjksXtd|jj��|jsbdS|jjj	dd�\}}t
j|�}|r�||jj�}|r�|j|j�q�|j
|j�nds�td|j|f��dS)Nz>Cannot update repository at %s; repository location is unknownr|�+zbad url: %rr7rz+Unexpected version control type (in %s): %s)rXr�r�rUrVr�r�rwrbr~r4�get_backend�obtainZexport)rkr��vc_typerw�backendZvcs_backendr?r?r@�update_editable5s,


z"InstallRequirement.update_editablecsx|j�std|jf��|jp"|j}t|j�}t|�sTtj	d|j
|tj�d|_
dS|t�krxtj	d|j
|�d|_
dSt|�}t|�}djtj|j��}|jo�tjj|j�}t|jdd�}|o�|jjd�o�|jj|��r�|j|j�|jd	��r2x�|jd	�j�D](}	tjj tjj!|j|	��}
|j|
��qWn�|jd
��r|jd��rV|jd��ng�xj�fdd
�|jd
�j�D�D]J}tjj!|j|�}
|j|
�|j|
d�|j|
d�|j|
d��qxW�nH|�r�t"j#dj|j�t$�|j|��n |jjd��rH|j|j�tjj%|j�d}tjj!tjj&|j�d�}
|j'|
d|�n�|�r�|jjd��r�x�t(j)j*|�D]}
|j|
��qjWn�|�rt+|d��}tjj,|j-�j.��}WdQRX||jk�s�t/d||j|jf��|j|�tjj!tjj&|�d�}
|j'|
|j�ntj0d||j�|jd��r�|j1d��r�xZ|j2d�D]L}t3|��rJt4}nt5}|jtjj!||��t6�r6|jtjj!||�d��q6W|jd��rdt7j8�r�i}ndd#i}t9j:f|�}|j;t<|j=d���|j>d��rdx�|j?d�D]�\}}t3|��r�t4}nt5}|jtjj!||��t6�r�|jtjj!||�d �|jtjj!||�d!�|jtjj!||�d"��q�W|j@|�||_AdS)$a�
        Uninstall the distribution currently satisfying this requirement.

        Prompts before removing or modifying files unless
        ``auto_confirm`` is True.

        Refuses to delete or modify files outside of ``sys.prefix`` -
        thus uninstallation within a virtual environment can only
        modify that virtual environment, even if the virtualenv is
        linked to global site-packages.

        z.Cannot uninstall requirement %s, not installedz1Not uninstalling %s at %s, outside environment %sTNz<Not uninstalling %s at %s, as it is in the standard library.z{0}.egg-infor<z	.egg-infozinstalled-files.txtz
top_level.txtznamespace_packages.txtcsg|]}|r|�kr|�qSr?r?)rBr�)�
namespacesr?r@r��sz0InstallRequirement.uninstall.<locals>.<listcomp>z.pyz.pycz.pyoz�Uninstalling a distutils installed project ({0}) has been deprecated and will be removed in a future version. This is due to the fact that uninstalling a distutils project will only partially uninstall the project.z.eggr7zeasy-install.pthz./z
.dist-info�rz;Egg-link %s does not match installed location of %s (at %s)z)Not sure how to uninstall: %s - Check: %s�scriptsz.batzentry_points.txtZ
delimitersrHZconsole_scriptsz.exez
.exe.manifestz
-script.py)rH)B�check_if_existsrrvr^r_r,r�r-r��infor�r��prefixrerr3r%�formatrZto_filename�project_namer�rMr<r��getattrZ	_providerr��addr�r��
splitlinesr�r��warnings�warnr/r~�dirnameZadd_pth�pipr�Zuninstallation_paths�open�normcase�readlinerr�r�Zmetadata_isdirZmetadata_listdirr"rrrrr�rZSafeConfigParserZreadfpr(Zget_metadata_linesZhas_section�itemsr�rd)rkZauto_confirmZdistZ	dist_pathZpaths_to_removeZdevelop_egg_linkZdevelop_egg_link_egg_infoZegg_info_existsZdistutils_egg_infoZinstalled_filer<Z
top_level_pkgZeasy_install_eggZeasy_install_pthZfhZlink_pointerr�Zbin_dirrh�configrv�valuer?)r�r@�	uninstallRs�









zInstallRequirement.uninstallcCs$|jr|jj�ntjd|j�dS)Nz'Can't rollback %s, nothing uninstalled.)rdZrollbackr��errorrv)rkr?r?r@�rollback_uninstall�sz%InstallRequirement.rollback_uninstallcCs*|jr|jj�n|js&tjd|j�dS)Nz%Can't commit %s, nothing uninstalled.)rdZcommitrer�r�rv)rkr?r?r@�commit_uninstall�s
z#InstallRequirement.commit_uninstallcCs�|js
t�d}d|j|j�df}tjj||�}tjj|�r�tdt	|�d�}|dkr^d	}nj|dkr�t
jd
t	|��tj|�nF|dkr�t
|�}t
jdt	|�t	|��tj||�n|dkr�tjd�|�r�tj|dtjdd
�}tjjtjj|j��}x�tj|�D]�\}	}
}d|
k�r"|
jd�xR|
D]J}tjj|	|�}|j||�}
tj|jd|
d�}d|_|j|d��q(WxL|D]D}|tk�r��q|tjj|	|�}|j||�}
|j||jd|
��q|W�qW|j�t
j dt	|��dS)NTz	%s-%s.zipr�z8The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort �i�w�b�aFzDeleting %szBacking up %s to %sr7)Z
allowZip64zpip-egg-info�/i��r�zSaved %s)rrrr���i�)!rUr�rvr�rMr<r�r�rrr�r�r�r r�r�r��exit�zipfileZZipFileZZIP_DEFLATEDr�r�r�r��_clean_zip_nameZZipInfoZ
external_attrZwritestrr�writer�r�)rkr�Zcreate_archiveZarchive_nameZarchive_pathZresponseZ	dest_file�zipr��dirpathr�r�r�rvZzipdirr�r?r?r@�archivesX







"zInstallRequirement.archivecCsJ|j|tjj�s"td||f��|t|�dd�}|jtjjd�}|S)Nz$name %r doesn't start with prefix %rr7r)rrrMr<rNr�r��replace)rkrvr�r?r?r@r	5s
z"InstallRequirement._clean_zip_namecs0|sd}�jdk	r(t�fdd�|D��SdSdS)Nr�c3s|]}�jjd|i�VqdS)rCN)r[Zevaluate)rBrC)rkr?r@rDDsz3InstallRequirement.match_markers.<locals>.<genexpr>T)r�)r[rO)rkZextras_requestedr?)rkr@�
match_markers=s


z InstallRequirement.match_markersc,s`|jr|j|||d�dS|jr\tjj|j�}tjj||j�|j	|j�||d�d|_
dS||jjdg�7}||jjdg�7}|j
r�t|�dg}tjdd�}tjj|d	�}�z�|j||�|�}	d
|jf}
t|
��.}t��t|	||jd|d�WdQRXWdQRXtjj|��s(tjd
|�dSd|_
|j�r:dS�fdd�}t|��H}
x@|
D](}tjj|�}|jd��rV||�}P�qVWtj d|�dSWdQRXg}t|��P}
xH|
D]@}|j!�}tjj"|��r�|tjj#7}|j$tjj%||�|���q�WWdQRXtjj|d�}t|d��}
|
j&dj|�d�WdQRXWdtjj|��rRtj'|�t(|�XdS)N)r�)r�r��strip_file_prefixT�global_options�install_optionsz
--no-user-cfgz-recordzpip-zinstall-record.txtzRunning setup.py install for %sF)r�r��spinnerzRecord file %s not foundcs(�dkstjj|�r|St�|�SdS)N)rMr<�isabsr)r<)r�r?r@�prepend_root~sz0InstallRequirement.install.<locals>.prepend_rootz	.egg-infoz;Could not find .egg-info directory in install record for %szinstalled-files.txtr�
))rV�install_editabler�r�r�Z
wheel_versionrUZcheck_compatibilityrvr5rcrh�getrjr�r�r�rMr<r��get_install_argsr2r0r&r�r�r�r�rZr�r�r�r�rr�rN�append�relpathr
r�r)rkrrr�r�rr�Z
temp_location�record_filename�install_args�msgrrr��lineZ	directoryr�Z	new_linesr�Zinst_files_pathr?)r�r@�installIs~




"
zInstallRequirement.installcCs|jdkr|j|�|_|jS)aAEnsure that a source_dir is set.

        This will create a temporary build dir if the name of the requirement
        isn't known yet.

        :param parent_dir: The ideal pip parent_dir for the source_dir.
            Generally src_dir for editables and build_dir for sdists.
        :return: self.source_dir
        N)rUr�)rkZ
parent_dirr?r?r@�ensure_has_source_dir�s

z(InstallRequirement.ensure_has_source_dircCs�tjdg}|jd�|jt|j�|t|�dd|g7}|jsJ|dg7}|dk	r^|d|g7}|dk	rr|d|g7}|jr�|dg7}n
|d	g7}t�r�d
t	j
�}|dtjj
tjdd
||j�g7}|S)Nz-uz-cr z--recordz#--single-version-externally-managedz--rootz--prefixz	--compilez--no-compiler�z--install-headers�includeZsite)r�r�rr1r�r�rZrirr�get_python_versionrMr<r�r�rv)rkrrr�r�rZ
py_ver_strr?r?r@r�s(



z#InstallRequirement.get_install_argscCsd|jr6tjjtjj|jt��r6tjd|j�t|j�d|_|j	rZtjj|j	�rZt|j	�d|_	dS)zVRemove the source files from this requirement, if they are marked
        for deletionzRemoving source in %sN)
rUrMr<r�r�rr�r�rr`)rkr?r?r@�remove_temporary_source�s

z*InstallRequirement.remove_temporary_sourcecCs�tjd|j�|jr"t|�dg}|r>dj|�g}t|�|}t��<ttj	dt
|jgt|�ddgt|�|jdd�WdQRXd	|_
dS)
NzRunning setup.py develop for %sz
--no-user-cfgz--prefix={0}z-cZdevelopz	--no-depsF)r�r�T)r�r�rvrjr�r�r0r&r�r�r1r�r�rc)rkrrr�Zprefix_paramr?r?r@r�s z#InstallRequirement.install_editablecCs�|jdkrdSyFtt|j��}d|_tjt|��|_|jrR|jrR|j|_d|_dSWn�tj	k
rjdStj
k
r�tj|jj�}|jr�t
|�r�||_q�t�r�t|�r�td|j|jf��nt|�r�||_YnXdS)z�Find an installed distribution that satisfies or conflicts
        with this requirement, and set self.satisfied_by or
        self.conflicts_with appropriately.
        NFTzVWill not install to the user site because it will lack sys.path precedence to %s in %s)rJrr�r\rZget_distributionr^rVr_ZDistributionNotFoundZVersionConflictrvrfr"rr#rr�r�r$)rkZ	no_markerZ
existing_distr?r?r@r��s4

z"InstallRequirement.check_if_existscCs|jo|jjS)N)rXr�)rkr?r?r@r� szInstallRequirement.is_wheelcCs,t|j|j||j|j|||j|j|d�
dS)N)�user�homer�r�rirjr)r5rvrJrfrgrirj)rkZwheeldirr�r�rr?r?r@r5$s
z#InstallRequirement.move_wheel_filescCsX|jd�jd�}tjj|�}tj||�}tjjtjj|��d}tj	tjj|�||d�S)zAReturn a pkg_resources.Distribution built from self.egg_info_pathr�rr)r��metadata)
r��rstriprMr<r�rZPathMetadata�splitext�basenameZDistribution)rkr�Zbase_dirr'Z	dist_namer?r?r@�get_dist0s
zInstallRequirement.get_distcCst|jjdi��S)z�Return whether any known-good hashes are specified as options.

        These activate --require-hashes mode; hashes specified as part of a
        URL do not.

        �hashes)�boolrhr)rkr?r?r@�has_hash_options;sz#InstallRequirement.has_hash_optionscCsJ|jjdi�j�}|r|jn|j}|rB|jrB|j|jg�j|j�t	|�S)a�Return a hash-comparer that considers my option- and URL-based
        hashes to be known-good.

        Hashes in URLs--ones embedded in the requirements file, not ones
        downloaded from an index server--are almost peers with ones from
        flags. They satisfy --require-hashes (whether it was implicitly or
        explicitly activated) but do not activate it. md5 and sha224 are not
        allowed in flags, which should nudge people toward good algos. We
        always OR all hashes together, even ones from URLs.

        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
            downloaded from the internet, as by populate_link()

        r,)
rhr�copyrXrY�hash�
setdefaultZ	hash_namerr.)rkZtrust_internetZgood_hashesrXr?r?r@r,Es

zInstallRequirement.hashes)NFNFTTNFNNF)NNFNNF)NFNNF)T)F)N)NNN)T).r��
__module__�__qualname__rn�classmethodryr�r�r�r��propertyr�r�r�r�r�rvr�r�r�r�r�r�r9�compileZ_requirements_section_rer�r�r�r�r�r�r
r	rr r!rr$rr�r�r5r+r.r,r?r?r?r@rGIs`
;M

 :6


$0
\
)

rGcCstjd|�}|r|jd�}|S)z2
        Strip req postfix ( -dev, 0.2, etc )
    z^(.*?)(?:-dev|-\d.*)$r7)r9r�r;)rJr:r?r?r@�_strip_postfix[s
r7cCs�ddlm}|}d}tjd|�}|r:|jd�}|jd�}n|}tjj|�rttjjtjj	|d��slt
d|��t|�}|j�j
d	�r�||�j}|r�||td
|j��jfS||dfSx,tD]$}|j�j
d|�r�d||f}Pq�Wd
|k�r|�r
tjdt�|d
|}nt
d|��|jd
d�dj�}	tj|	��s`d|dj	dd�tjD��d}
t
|
��||�j}|�sxt
d��|�s�t
d|��t|�|dfS)aParses an editable requirement into:
        - a requirement name
        - an URL
        - extras
        - editable options
    Accepted requirements:
        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
        .[some_extra]
    r)roNz^(.+)(\[[^\]]+\])$r7r8zsetup.pyz;Directory %r is not installable. File 'setup.py' not found.zfile:r}z%s:z%s+%sr�zD--default-vcs has been deprecated and will be removed in the future.zb%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+zFor --editable=%s only z, cSsg|]}|jd�qS)z+URL)rv)rBr�r?r?r@r��sz"parse_editable.<locals>.<listcomp>z is currently supportedz@Could not detect requirement name, please specify one with #egg=z@--editable=%s is not the right format; it must have #egg=Package)rpror9r:r;rMr<r�r�r�rrr�rrr�rr>r4r�r�r/r~r�Zbackendsr7)rtrurorwr>r=Z
url_no_extrasZpackage_nameZversion_controlr�Z
error_messager?r?r@rqgs`





rq)N)eZ
__future__rZloggingrMr9r�r�r�rQr�rZ	distutilsrZdistutils.utilrZemail.parserrZpip._vendorrrZpip._vendor.packagingrZpip._vendor.packaging.markersr	Z"pip._vendor.packaging.requirementsr
rZpip._vendor.packaging.utilsrZpip._vendor.packaging.versionr
rr�Zpip._vendor.six.movesrZ	pip.wheelr�Z
pip.compatrrrZpip.downloadrrrrZpip.exceptionsrrZ
pip.locationsrrrrZ	pip.utilsrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-Zpip.utils.hashesr.Zpip.utils.deprecationr/Zpip.utils.loggingr0Zpip.utils.setuptools_buildr1Zpip.utils.uir2Zpip.req.req_uninstallr3Zpip.vcsr4r5r6Z	getLoggerr�r�Z	SpecifierZ
_operators�keysrPrArF�objectrGr7rqr?r?r?r@�<module>s`L
req/__pycache__/req_set.cpython-36.pyc000064400000051035151733136160013645 0ustar003

�Pf��@s~ddlmZddlmZddlmZddlZddlZddlm	Z	ddlm
Z
ddlmZddl
mZmZmZmZmZdd	lmZmZmZmZmZmZmZmZmZmZdd
lmZddl m!Z!m"Z"m#Z#m$Z$m%Z%ddl&m'Z'dd
l(m)Z)ddl*m+Z+ddl,m-Z-ddl.m/Z/ej0e1�Z2Gdd�de3�Z4Gdd�de3�Z5dd�Z6Gdd�de5�Z7Gdd�de5�Z8Gdd�de5�Z9Gdd�de3�Z:dS)�)�absolute_import)�defaultdict)�chainN)�
pkg_resources)�requests)�
expanduser)�is_file_url�
is_dir_url�
is_vcs_url�url_to_path�
unpack_url)
�InstallationError�BestVersionAlreadyInstalled�DistributionNotFound�PreviousBuildDirError�	HashError�
HashErrors�HashUnpinned�DirectoryUrlHashUnsupported�VcsHashUnsupported�UnsupportedPythonVersion)�InstallRequirement)�display_path�dist_in_usersite�dist_in_install_path�
ensure_dir�normalize_path)�
MissingHashes)�
indent_log)�check_dist_requires_python)�vcs)�Wheelc@sDeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)�RequirementscCsg|_i|_dS)N)�_keys�_dict)�self�r&�/usr/lib/python3.6/req_set.py�__init__"szRequirements.__init__cCs|jS)N)r#)r%r&r&r'�keys&szRequirements.keyscs�fdd��jD�S)Ncsg|]}�j|�qSr&)r$)�.0�key)r%r&r'�
<listcomp>*sz'Requirements.values.<locals>.<listcomp>)r#)r%r&)r%r'�values)szRequirements.valuescCs
||jkS)N)r#)r%�itemr&r&r'�__contains__,szRequirements.__contains__cCs$||jkr|jj|�||j|<dS)N)r#�appendr$)r%r+�valuer&r&r'�__setitem__/s
zRequirements.__setitem__cCs
|j|S)N)r$)r%r+r&r&r'�__getitem__4szRequirements.__getitem__cs$�fdd��j�D�}ddj|�S)Ncs$g|]}dt|�t�|�f�qS)z%s: %s)�repr)r*�k)r%r&r'r,8sz)Requirements.__repr__.<locals>.<listcomp>zRequirements({%s})z, )r)�join)r%r-r&)r%r'�__repr__7szRequirements.__repr__N)
�__name__�
__module__�__qualname__r(r)r-r/r2r3r7r&r&r&r'r" sr"c@s(eZdZdZdd�Zdd�Zdd�ZdS)	�DistAbstractionatAbstracts out the wheel vs non-wheel prepare_files logic.

    The requirements for anything installable are as follows:
     - we must be able to determine the requirement name
       (or we can't correctly handle the non-upgrade case).
     - we must be able to generate a list of run-time dependencies
       without installing any additional packages (or we would
       have to either burn time by doing temporary isolated installs
       or alternatively violate pips 'don't start installing unless
       all requirements are available' rule - neither of which are
       desirable).
     - for packages with setup requirements, we must also be able
       to determine their requirements without installing additional
       packages (for the same reason as run-time dependencies)
     - we must be able to create a Distribution object exposing the
       above metadata.
    cCs
||_dS)N)�req_to_install)r%r<r&r&r'r(OszDistAbstraction.__init__cCst|j��dS)z Return a setuptools Dist object.N)�NotImplementedError�dist)r%�finderr&r&r'r>RszDistAbstraction.distcCst|j��dS)z3Ensure that we can get a Dist for this requirement.N)r=r>)r%r&r&r'�
prep_for_distVszDistAbstraction.prep_for_distN)r8r9r:�__doc__r(r>r@r&r&r&r'r;<sr;cCs0|jrt|�S|jr$|jjr$t|�St|�SdS)z�Factory to make an abstract dist object.

    Preconditions: Either an editable req with a source_dir, or satisfied_by or
    a wheel link, or a non-editable req with a source_dir.

    :return: A concrete DistAbstraction.
    N)�editable�IsSDist�link�is_wheel�IsWheel)r<r&r&r'�make_abstract_dist[s
rGc@seZdZdd�Zdd�ZdS)rFcCsttj|jj��dS)Nr)�listrZfind_distributionsr<�
source_dir)r%r?r&r&r'r>mszIsWheel.distcCsdS)Nr&)r%r&r&r'r@qszIsWheel.prep_for_distN)r8r9r:r>r@r&r&r&r'rFksrFc@seZdZdd�Zdd�ZdS)rCcCs(|jj�}|jd�r$|j|jd��|S)Nzdependency_links.txt)r<Zget_distZhas_metadataZadd_dependency_linksZget_metadata_lines)r%r?r>r&r&r'r>xs


zIsSDist.distcCs|jj�|jj�dS)N)r<Zrun_egg_infoZassert_source_matches_version)r%r&r&r'r@�s
zIsSDist.prep_for_distN)r8r9r:r>r@r&r&r&r'rCvs	rCc@seZdZdd�Zdd�ZdS)�	InstalledcCs|jjS)N)r<�satisfied_by)r%r?r&r&r'r>�szInstalled.distcCsdS)Nr&)r%r&r&r'r@�szInstalled.prep_for_distN)r8r9r:r>r@r&r&r&r'rJ�srJc@s�eZdZd$dd�Zdd�Zdd	�Zd%d
d�Zdd
�Zedd��Z	edd��Z
dd�Zd&dd�Zdd�Z
dd�Zdd�Zd'dd�Zdd�Zd d!�Zffd"d#�ZdS)(�RequirementSetFNTcCs�|dkrtd��||_||_||_||_||_||_|
|_t�|_	i|_
g|_|	|_||_
g|_g|_g|_||_||_||_||_|
|_||_|r�t|�}||_||_||_tt�|_dS)a3Create a RequirementSet.

        :param wheel_download_dir: Where still-packed .whl files should be
            written to. If None they are written to the download_dir parameter.
            Separate to download_dir to permit only keeping wheel archives for
            pip wheel.
        :param download_dir: Where still packed archives should be written to.
            If None they are not saved, and are deleted immediately after
            unpacking.
        :param wheel_cache: The pip wheel cache, for passing to
            InstallRequirement.
        Nz?RequirementSet() missing 1 required keyword argument: 'session')�	TypeError�	build_dir�src_dir�download_dir�upgrade�upgrade_strategy�ignore_installed�force_reinstallr"�requirements�requirement_aliases�unnamed_requirements�ignore_dependencies�ignore_requires_python�successfully_downloaded�successfully_installed�reqs_to_cleanup�as_egg�
use_user_site�
target_dir�session�	pycompile�isolatedr�wheel_download_dir�_wheel_cache�require_hashesrrH�
_dependencies)r%rNrOrPrQrRrSr]r_rXrTr^r`rarbrc�wheel_cachererYr&r&r'r(�s<zRequirementSet.__init__cCs8dd�|jj�D�}|jdd�d�djdd�|D��S)NcSsg|]}|js|�qSr&)Z
comes_from)r*�reqr&r&r'r,�sz*RequirementSet.__str__.<locals>.<listcomp>cSs
|jj�S)N)�name�lower)rhr&r&r'�<lambda>�sz(RequirementSet.__str__.<locals>.<lambda>)r+� cSsg|]}t|j��qSr&)�strrh)r*rhr&r&r'r,�s)rUr-�sortr6)r%�reqsr&r&r'�__str__�szRequirementSet.__str__cCsNdd�|jj�D�}|jdd�d�djdd�|D��}d|jjt|�|fS)	NcSsg|]}|�qSr&r&)r*rhr&r&r'r,�sz+RequirementSet.__repr__.<locals>.<listcomp>cSs
|jj�S)N)rirj)rhr&r&r'rk�sz)RequirementSet.__repr__.<locals>.<lambda>)r+z, cSsg|]}t|j��qSr&)rmrh)r*rhr&r&r'r,�sz"<%s object; %d requirement(s): %s>)rUr-rnr6�	__class__r8�len)r%roZreqs_strr&r&r'r7�s
zRequirementSet.__repr__c	
Cs�|j}|j|�s&tjd|j|j�gS|jrV|jjrVt|jj�}|j	�sVt
d|j��|j|_|j|_|j
|_
|j|_|dk|_|s�|jj|�|gSy|j|�}Wntk
r�d}YnX|dko�|o�|jo�|j|jko�|jj|jjk�rt
d|||f��|�s8||j|<|j�|k�r0||j|j�<|g}n�g}|j�r�|j�r�|j�r�|j�ol|jj|jjk�r�|jj|�t
d|��d|_ttt|j�j t|j����|_tj!d||j�|g}|}|�r�|j|�}|j"|j|�|SdS)a'Add install_req as a requirement to install.

        :param parent_req_name: The name of the requirement that needed this
            added. The name is used because when multiple unnamed requirements
            resolve to the same name, we could otherwise end up with dependency
            links that point outside the Requirements set. parent_req must
            already be added. Note that None implies that this is a user
            supplied requirement, vs an inferred one.
        :param extras_requested: an iterable of extras used to evaluate the
            environement markers.
        :return: Additional requirements to scan. That is either [] if
            the requirement is not applicable, or [install_req] if the
            requirement is applicable and has just been added.
        z6Ignoring %s: markers '%s' don't match your environmentz-%s is not a supported wheel on this platform.Nz5Double requirement given: %s (already in %s, name=%r)zhCould not satisfy constraints for '%s': installation from path or url cannot be constrained to a versionFzSetting %s extras to: %s)#riZ
match_markers�logger�warningZmarkersrDrEr!�filenameZ	supportedr
r]r^r_ra�	is_directrWr0�get_requirement�KeyError�
constraint�extrasrhZ	specifierrUrjrV�pathr\�tuple�sorted�set�union�debugrf)	r%�install_reqZparent_req_name�extras_requestedriZwheelZexisting_req�resultZ
parent_reqr&r&r'�add_requirement�sp






zRequirementSet.add_requirementcCsF|j�}||jkr |j|js>||jkrB|j|j|jrBdSdS)NTF)rjrUryrV)r%�project_namerir&r&r'�has_requirement4s

zRequirementSet.has_requirementcCstdd�|jj�D��p|jS)Ncss|]}|js|VqdS)N)ry)r*rhr&r&r'�	<genexpr>?sz2RequirementSet.has_requirements.<locals>.<genexpr>)rHrUr-rW)r%r&r&r'�has_requirements=szRequirementSet.has_requirementscCsD|jr@t|j�|_tjj|j�r$dStjd�tdt|j���dS)NTz!Could not find download directoryz0Could not find or access download directory '%s'F)	rPr�osr{�existsrs�criticalr
r)r%r&r&r'�is_downloadBs
zRequirementSet.is_downloadcCsTxB||j�fD]2}||jkr&|j|S||jkr|j|j|SqWtd|��dS)NzNo project with the name %r)rjrUrVrx)r%r�rir&r&r'rwOs


zRequirementSet.get_requirementcCs4x.|jj�D] }|jrq|j|d�|j�qWdS)N)�auto_confirm)rUr-ry�	uninstall�commit_uninstall)r%r�rhr&r&r'r�Ws
zRequirementSet.uninstallcCs�|jrt|j�|j|jj�}|jp6tdd�|D��}|rJ|jrJtd��g}t	�}xdt
||�D]V}y|j|j||||j
d��Wq`tk
r�}z||_|j|�WYdd}~Xq`Xq`W|r�|�dS)zY
        Prepare process. Create temp directories, download and/or unpack files.
        css|]}|jVqdS)N)Zhas_hash_options)r*rhr&r&r'r�jsz/RequirementSet.prepare_files.<locals>.<genexpr>z�--egg is not allowed with --require-hashes mode, since it delegates dependency resolution to setuptools and could thus result in installation of unhashed packages.)rerXN)rcrrWrUr-re�anyr]r
rr�extend�
_prepare_filerXrrhr0)r%r?Z	root_reqsreZdiscovered_reqsZhash_errorsrh�excr&r&r'�
prepare_files^s,

 zRequirementSet.prepare_filescCs |jo|jdkp|jdko|jS)NZeagerzonly-if-needed)rQrRrv)r%rhr&r&r'�_is_upgrade_allowed�s
z"RequirementSet._is_upgrade_allowedcCs�|j�|jr�|j|�}d}|r�|jp*|jshy|j||�Wn*tk
rTd}Yntk
rfYnX|s�|jr~t	|j�p�t
|j�s�|j|_d|_|r�d}n|jdkr�d}nd}|SdSdS)aCheck if req_to_install should be skipped.

        This will check if the req is installed, and whether we should upgrade
        or reinstall it, taking into account all the relevant user options.

        After calling this req_to_install will only have satisfied_by set to
        None if the req_to_install is to be upgraded/reinstalled etc. Any
        other value will be a dist recording the current thing installed that
        satisfies the requirement.

        Note that for vcs urls and the like we can't assess skipping in this
        routine - we simply identify that we need to pull the thing down,
        then later on it is pulled down and introspected to assess upgrade/
        reinstalls etc.

        :return: A text reason for why it was skipped, or None.
        FTNzalready up-to-datezonly-if-neededz%not upgraded as not directly requiredzalready satisfied)
�check_if_existsrKr�rTrDZfind_requirementrrr^rr�conflicts_withrR)r%r<r?Zupgrade_allowedZbest_installed�skip_reasonr&r&r'�_check_skip_installed�s4

z$RequirementSet._check_skip_installedc(s��js�jrgSd�_�jr*tjd��n��jdks8t��jsJ�j�|�}�jrx|dk	shtd�jf��tjd|��n<�j	r��j	j
dkr�t�j	j�}tjdt
|��ntjd��t�����j�r |r�td	����j�j��j�j�t��}|j��j�r�j�j��j��n0�j�rD|�r8tjd
�t��}�n�j�j�tjjtjj�j d���r|t!d��j f���j"|�j#��|��j	�s�t��j	}|�r�t$|��r�t%��nt&|��r�t'|��r�t(���j)�r�j*�r�t+���j,|d
�}	|�r|	�rt-�}	yZ�j}
d}�j	j.�r4�j/�r4�j/}
�j	j.�rN|
�rJd}nd}t0�j	�j |
|�j1|	d�WnHt2j3k
�r�}z(tj4d�|�td�|�j	f��WYdd}~XnXt��}|j��j�r�j	j
t5j6k�r�j�j��j�s��j��j�rP�j7�s�j�rD�j8�r&t9�j��p0t:�j��s<�j�_;d�_ntjd��|j<|�}
yt=|
�WnHt>k
�r�}z*�j?�r�tj@|jAd�n
�jB��WYdd}~XnXg����fdd�}�jC�jD��sވjE�d�|�sp�jF�rtjddj�jF��tGtH�jF�tH|
jF��}x|D]}tj@d|
|��qWtGtH|
jF�tH�jF�@�}x |
jI|�D]}|||d��qZW�jJjK���j�r��j�r��jLjK��WdQRX�S)zxPrepare a single requirements file.

        :return: A list of additional InstallRequirements to also install.
        TzObtaining %sNzP_check_skip_installed returned None but req_to_install.satisfied_by is set to %rzRequirement %s: %s�filez
Processing %sz
Collecting %szoThe editable requirement %s cannot be installed when requiring hashes, because there is no single file to hash.z�Since it is already installed, we are trusting this package without checking its hash. To ensure a completely repeatable environment, install into an empty virtualenv.zsetup.pyz�pip can't proceed with requirements '%s' due to a pre-existing build directory (%s). This is likely due to a previous installation that failed. pip is being responsible and not assuming it can delete this. Please delete it and try again.)Ztrust_internetF)r`�hashesz4Could not install requirement %s because of error %szDCould not install requirement %s because of HTTP error %s for URL %sz<Requirement already satisfied (use --upgrade to upgrade): %srcs4tt|���j�jd�}�j�j|�j|d��dS)N)rbrg)r�)rrmrbrdr�r�ri)�subreqr�Zsub_install_req)�	more_reqsr<r%r&r'�add_req�s
z-RequirementSet._prepare_file.<locals>.add_reqz!Installing extra requirements: %r�,z"%s does not provide the extra '%s')r�)MryZpreparedrBrs�inforK�AssertionErrorrSr�rD�schemerZurlrrr
Zensure_has_source_dirrOZupdate_editabler�rGr@�archiverPr�r�rJrNr�r{r�r6rIrZ
populate_linkr�r
rrr	rZ
original_linkZ	is_pinnedrr�rrErcrr`rZ	HTTPErrorr�r Zall_schemesrQr^rrr�r>rrrYrt�args�remove_temporary_sourcer�rir�rzr}r~Zrequiresr\r0rZ)r%r?r<rerXr�r{Z
abstract_distrDr�rPZautodelete_unpackedr�r>�er�Zmissing_requestedZmissingZavailable_requestedr�r&)r�r<r%r'r��s

	





"


zRequirementSet._prepare_filec	Cs8tjd�t��x|jD]}|j�qWWdQRXdS)zClean up files, remove builds.zCleaning up...N)rsr�rr\r�)r%rhr&r&r'�
cleanup_files�s
zRequirementSet.cleanup_filescs<g�t������fdd��x�jj�D]}�|�q(W�S)z�Create the installation order.

        The installation order is topological - requirements are installed
        before the requiring thing. We break cycles at an arbitrary point,
        and make no other guarantees.
        csP|js|�krdS|jrdS�j|�x�j|D]}�|�q2W�j|�dS)N)rKry�addrfr0)rhZdep)�order�ordered_reqs�scheduler%r&r'r��s
z,RequirementSet._to_install.<locals>.schedule)r~rUr-)r%r�r&)r�r�r�r%r'�_to_install�s
	zRequirementSet._to_installcOs�|j�}|r(tjddjdd�|D���t���x�|D]�}|jrltjd|j�t��|jdd�WdQRXy|j||f|�|�Wn$|jr�|jr�|j	��YnX|jr�|jr�|j
�|j�q6WWdQRX||_dS)	zl
        Install everything in this set (after having downloaded and unpacked
        the packages)
        z!Installing collected packages: %sz, cSsg|]
}|j�qSr&)ri)r*rhr&r&r'r,sz*RequirementSet.install.<locals>.<listcomp>zFound existing installation: %sT)r�N)
r�rsr�r6rr�r��installZinstall_succeededZrollback_uninstallr�r�r[)r%Zinstall_optionsZglobal_optionsr��kwargsZ
to_installZrequirementr&r&r'r��s:

zRequirementSet.install)FNFFNFFFNTFNNFF)NN)F)FF)r8r9r:r(rpr7r�r��propertyr�r�rwr�r�r�r�r�r�r�r�r&r&r&r'rL�s4
4
[	

'E
	rL);Z
__future__r�collectionsr�	itertoolsrZloggingr�Zpip._vendorrrZ
pip.compatrZpip.downloadrr	r
rrZpip.exceptionsr
rrrrrrrrrZpip.req.req_installrZ	pip.utilsrrrrrZpip.utils.hashesrZpip.utils.loggingrZpip.utils.packagingrZpip.vcsr Z	pip.wheelr!Z	getLoggerr8rs�objectr"r;rGrFrCrJrLr&r&r&r'�<module>s00
	req/req_install.py000064400000133225151733136160010236 0ustar00from __future__ import absolute_import

import logging
import os
import re
import shutil
import sys
import tempfile
import traceback
import warnings
import zipfile

from distutils import sysconfig
from distutils.util import change_root
from email.parser import FeedParser

from pip._vendor import pkg_resources, six
from pip._vendor.packaging import specifiers
from pip._vendor.packaging.markers import Marker
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import Version, parse as parse_version
from pip._vendor.six.moves import configparser

import pip.wheel

from pip.compat import native_str, get_stdlib, WINDOWS
from pip.download import is_url, url_to_path, path_to_url, is_archive_file
from pip.exceptions import (
    InstallationError, UninstallationError,
)
from pip.locations import (
    bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
)
from pip.utils import (
    display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
    dist_in_usersite, dist_in_site_packages, dist_in_install_path, egg_link_path,
    call_subprocess, read_text_file, FakeFile, _make_build_dir, ensure_dir,
    get_installed_version, normalize_path, dist_is_local,
)

from pip.utils.hashes import Hashes
from pip.utils.deprecation import RemovedInPip10Warning
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip.utils.ui import open_spinner
from pip.req.req_uninstall import UninstallPathSet
from pip.vcs import vcs
from pip.wheel import move_wheel_files, Wheel


logger = logging.getLogger(__name__)

operators = specifiers.Specifier._operators.keys()


def _strip_extras(path):
    m = re.match(r'^(.+)(\[[^\]]+\])$', path)
    extras = None
    if m:
        path_no_extras = m.group(1)
        extras = m.group(2)
    else:
        path_no_extras = path

    return path_no_extras, extras


def _safe_extras(extras):
    return set(pkg_resources.safe_extra(extra) for extra in extras)


class InstallRequirement(object):

    def __init__(self, req, comes_from, source_dir=None, editable=False,
                 link=None, as_egg=False, update=True,
                 pycompile=True, markers=None, isolated=False, options=None,
                 wheel_cache=None, constraint=False):
        self.extras = ()
        if isinstance(req, six.string_types):
            try:
                req = Requirement(req)
            except InvalidRequirement:
                if os.path.sep in req:
                    add_msg = "It looks like a path. Does it exist ?"
                elif '=' in req and not any(op in req for op in operators):
                    add_msg = "= is not a valid operator. Did you mean == ?"
                else:
                    add_msg = traceback.format_exc()
                raise InstallationError(
                    "Invalid requirement: '%s'\n%s" % (req, add_msg))
            self.extras = _safe_extras(req.extras)

        self.req = req
        self.comes_from = comes_from
        self.constraint = constraint
        self.source_dir = source_dir
        self.editable = editable

        self._wheel_cache = wheel_cache
        self.link = self.original_link = link
        self.as_egg = as_egg
        if markers is not None:
            self.markers = markers
        else:
            self.markers = req and req.marker
        self._egg_info_path = None
        # This holds the pkg_resources.Distribution object if this requirement
        # is already available:
        self.satisfied_by = None
        # This hold the pkg_resources.Distribution object if this requirement
        # conflicts with another installed distribution:
        self.conflicts_with = None
        # Temporary build location
        self._temp_build_dir = None
        # Used to store the global directory where the _temp_build_dir should
        # have been created. Cf _correct_build_location method.
        self._ideal_build_dir = None
        # True if the editable should be updated:
        self.update = update
        # Set to True after successful installation
        self.install_succeeded = None
        # UninstallPathSet of uninstalled distribution (for possible rollback)
        self.uninstalled = None
        # Set True if a legitimate do-nothing-on-uninstall has happened - e.g.
        # system site packages, stdlib packages.
        self.nothing_to_uninstall = False
        self.use_user_site = False
        self.target_dir = None
        self.options = options if options else {}
        self.pycompile = pycompile
        # Set to True after successful preparation of this requirement
        self.prepared = False

        self.isolated = isolated

    @classmethod
    def from_editable(cls, editable_req, comes_from=None, default_vcs=None,
                      isolated=False, options=None, wheel_cache=None,
                      constraint=False):
        from pip.index import Link

        name, url, extras_override = parse_editable(
            editable_req, default_vcs)
        if url.startswith('file:'):
            source_dir = url_to_path(url)
        else:
            source_dir = None

        res = cls(name, comes_from, source_dir=source_dir,
                  editable=True,
                  link=Link(url),
                  constraint=constraint,
                  isolated=isolated,
                  options=options if options else {},
                  wheel_cache=wheel_cache)

        if extras_override is not None:
            res.extras = _safe_extras(extras_override)

        return res

    @classmethod
    def from_line(
            cls, name, comes_from=None, isolated=False, options=None,
            wheel_cache=None, constraint=False):
        """Creates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        """
        from pip.index import Link

        if is_url(name):
            marker_sep = '; '
        else:
            marker_sep = ';'
        if marker_sep in name:
            name, markers = name.split(marker_sep, 1)
            markers = markers.strip()
            if not markers:
                markers = None
            else:
                markers = Marker(markers)
        else:
            markers = None
        name = name.strip()
        req = None
        path = os.path.normpath(os.path.abspath(name))
        link = None
        extras = None

        if is_url(name):
            link = Link(name)
        else:
            p, extras = _strip_extras(path)
            if (os.path.isdir(p) and
                    (os.path.sep in name or name.startswith('.'))):

                if not is_installable_dir(p):
                    raise InstallationError(
                        "Directory %r is not installable. File 'setup.py' "
                        "not found." % name
                    )
                link = Link(path_to_url(p))
            elif is_archive_file(p):
                if not os.path.isfile(p):
                    logger.warning(
                        'Requirement %r looks like a filename, but the '
                        'file does not exist',
                        name
                    )
                link = Link(path_to_url(p))

        # it's a local file, dir, or url
        if link:
            # Handle relative file URLs
            if link.scheme == 'file' and re.search(r'\.\./', link.url):
                link = Link(
                    path_to_url(os.path.normpath(os.path.abspath(link.path))))
            # wheel file
            if link.is_wheel:
                wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
                req = "%s==%s" % (wheel.name, wheel.version)
            else:
                # set the req to the egg fragment.  when it's not there, this
                # will become an 'unnamed' requirement
                req = link.egg_fragment

        # a requirement specifier
        else:
            req = name

        options = options if options else {}
        res = cls(req, comes_from, link=link, markers=markers,
                  isolated=isolated, options=options,
                  wheel_cache=wheel_cache, constraint=constraint)

        if extras:
            res.extras = _safe_extras(
                Requirement('placeholder' + extras).extras)

        return res

    def __str__(self):
        if self.req:
            s = str(self.req)
            if self.link:
                s += ' from %s' % self.link.url
        else:
            s = self.link.url if self.link else None
        if self.satisfied_by is not None:
            s += ' in %s' % display_path(self.satisfied_by.location)
        if self.comes_from:
            if isinstance(self.comes_from, six.string_types):
                comes_from = self.comes_from
            else:
                comes_from = self.comes_from.from_path()
            if comes_from:
                s += ' (from %s)' % comes_from
        return s

    def __repr__(self):
        return '<%s object: %s editable=%r>' % (
            self.__class__.__name__, str(self), self.editable)

    def populate_link(self, finder, upgrade, require_hashes):
        """Ensure that if a link can be found for this, that it is found.

        Note that self.link may still be None - if Upgrade is False and the
        requirement is already installed.

        If require_hashes is True, don't use the wheel cache, because cached
        wheels, always built locally, have different hashes than the files
        downloaded from the index server and thus throw false hash mismatches.
        Furthermore, cached wheels at present have undeterministic contents due
        to file modification times.
        """
        if self.link is None:
            self.link = finder.find_requirement(self, upgrade)
        if self._wheel_cache is not None and not require_hashes:
            old_link = self.link
            self.link = self._wheel_cache.cached_wheel(self.link, self.name)
            if old_link != self.link:
                logger.debug('Using cached wheel link: %s', self.link)

    @property
    def specifier(self):
        return self.req.specifier

    @property
    def is_pinned(self):
        """Return whether I am pinned to an exact version.

        For example, some-package==1.2 is pinned; some-package>1.2 is not.
        """
        specifiers = self.specifier
        return (len(specifiers) == 1 and
                next(iter(specifiers)).operator in ('==', '==='))

    def from_path(self):
        if self.req is None:
            return None
        s = str(self.req)
        if self.comes_from:
            if isinstance(self.comes_from, six.string_types):
                comes_from = self.comes_from
            else:
                comes_from = self.comes_from.from_path()
            if comes_from:
                s += '->' + comes_from
        return s

    def build_location(self, build_dir):
        if self._temp_build_dir is not None:
            return self._temp_build_dir
        if self.req is None:
            # for requirement via a path to a directory: the name of the
            # package is not available yet so we create a temp directory
            # Once run_egg_info will have run, we'll be able
            # to fix it via _correct_build_location
            # Some systems have /tmp as a symlink which confuses custom
            # builds (such as numpy). Thus, we ensure that the real path
            # is returned.
            self._temp_build_dir = os.path.realpath(
                tempfile.mkdtemp('-build', 'pip-')
            )
            self._ideal_build_dir = build_dir
            return self._temp_build_dir
        if self.editable:
            name = self.name.lower()
        else:
            name = self.name
        # FIXME: Is there a better place to create the build_dir? (hg and bzr
        # need this)
        if not os.path.exists(build_dir):
            logger.debug('Creating directory %s', build_dir)
            _make_build_dir(build_dir)
        return os.path.join(build_dir, name)

    def _correct_build_location(self):
        """Move self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.egg_info_path to fix the temporary build
        directory.
        """
        if self.source_dir is not None:
            return
        assert self.req is not None
        assert self._temp_build_dir
        assert self._ideal_build_dir
        old_location = self._temp_build_dir
        self._temp_build_dir = None
        new_location = self.build_location(self._ideal_build_dir)
        if os.path.exists(new_location):
            raise InstallationError(
                'A package already exists in %s; please remove it to continue'
                % display_path(new_location))
        logger.debug(
            'Moving package %s from %s to new location %s',
            self, display_path(old_location), display_path(new_location),
        )
        shutil.move(old_location, new_location)
        self._temp_build_dir = new_location
        self._ideal_build_dir = None
        self.source_dir = new_location
        self._egg_info_path = None

    @property
    def name(self):
        if self.req is None:
            return None
        return native_str(pkg_resources.safe_name(self.req.name))

    @property
    def setup_py_dir(self):
        return os.path.join(
            self.source_dir,
            self.link and self.link.subdirectory_fragment or '')

    @property
    def setup_py(self):
        assert self.source_dir, "No source dir for %s" % self
        try:
            import setuptools  # noqa
        except ImportError:
            if get_installed_version('setuptools') is None:
                add_msg = "Please install setuptools."
            else:
                add_msg = traceback.format_exc()
            # Setuptools is not available
            raise InstallationError(
                "Could not import setuptools which is required to "
                "install from a source distribution.\n%s" % add_msg
            )

        setup_py = os.path.join(self.setup_py_dir, 'setup.py')

        # Python2 __file__ should not be unicode
        if six.PY2 and isinstance(setup_py, six.text_type):
            setup_py = setup_py.encode(sys.getfilesystemencoding())

        return setup_py

    def run_egg_info(self):
        assert self.source_dir
        if self.name:
            logger.debug(
                'Running setup.py (path:%s) egg_info for package %s',
                self.setup_py, self.name,
            )
        else:
            logger.debug(
                'Running setup.py (path:%s) egg_info for package from %s',
                self.setup_py, self.link,
            )

        with indent_log():
            script = SETUPTOOLS_SHIM % self.setup_py
            base_cmd = [sys.executable, '-c', script]
            if self.isolated:
                base_cmd += ["--no-user-cfg"]
            egg_info_cmd = base_cmd + ['egg_info']
            # We can't put the .egg-info files at the root, because then the
            # source code will be mistaken for an installed egg, causing
            # problems
            if self.editable:
                egg_base_option = []
            else:
                egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
                ensure_dir(egg_info_dir)
                egg_base_option = ['--egg-base', 'pip-egg-info']
            call_subprocess(
                egg_info_cmd + egg_base_option,
                cwd=self.setup_py_dir,
                show_stdout=False,
                command_desc='python setup.py egg_info')

        if not self.req:
            if isinstance(parse_version(self.pkg_info()["Version"]), Version):
                op = "=="
            else:
                op = "==="
            self.req = Requirement(
                "".join([
                    self.pkg_info()["Name"],
                    op,
                    self.pkg_info()["Version"],
                ])
            )
            self._correct_build_location()
        else:
            metadata_name = canonicalize_name(self.pkg_info()["Name"])
            if canonicalize_name(self.req.name) != metadata_name:
                logger.warning(
                    'Running setup.py (path:%s) egg_info for package %s '
                    'produced metadata for project name %s. Fix your '
                    '#egg=%s fragments.',
                    self.setup_py, self.name, metadata_name, self.name
                )
                self.req = Requirement(metadata_name)

    def egg_info_data(self, filename):
        if self.satisfied_by is not None:
            if not self.satisfied_by.has_metadata(filename):
                return None
            return self.satisfied_by.get_metadata(filename)
        assert self.source_dir
        filename = self.egg_info_path(filename)
        if not os.path.exists(filename):
            return None
        data = read_text_file(filename)
        return data

    def egg_info_path(self, filename):
        if self._egg_info_path is None:
            if self.editable:
                base = self.source_dir
            else:
                base = os.path.join(self.setup_py_dir, 'pip-egg-info')
            filenames = os.listdir(base)
            if self.editable:
                filenames = []
                for root, dirs, files in os.walk(base):
                    for dir in vcs.dirnames:
                        if dir in dirs:
                            dirs.remove(dir)
                    # Iterate over a copy of ``dirs``, since mutating
                    # a list while iterating over it can cause trouble.
                    # (See https://github.com/pypa/pip/pull/462.)
                    for dir in list(dirs):
                        # Don't search in anything that looks like a virtualenv
                        # environment
                        if (
                                os.path.lexists(
                                    os.path.join(root, dir, 'bin', 'python')
                                ) or
                                os.path.exists(
                                    os.path.join(
                                        root, dir, 'Scripts', 'Python.exe'
                                    )
                                )):
                            dirs.remove(dir)
                        # Also don't search through tests
                        elif dir == 'test' or dir == 'tests':
                            dirs.remove(dir)
                    filenames.extend([os.path.join(root, dir)
                                     for dir in dirs])
                filenames = [f for f in filenames if f.endswith('.egg-info')]

            if not filenames:
                raise InstallationError(
                    'No files/directories in %s (from %s)' % (base, filename)
                )
            assert filenames, \
                "No files/directories in %s (from %s)" % (base, filename)

            # if we have more than one match, we pick the toplevel one.  This
            # can easily be the case if there is a dist folder which contains
            # an extracted tarball for testing purposes.
            if len(filenames) > 1:
                filenames.sort(
                    key=lambda x: x.count(os.path.sep) +
                    (os.path.altsep and x.count(os.path.altsep) or 0)
                )
            self._egg_info_path = os.path.join(base, filenames[0])
        return os.path.join(self._egg_info_path, filename)

    def pkg_info(self):
        p = FeedParser()
        data = self.egg_info_data('PKG-INFO')
        if not data:
            logger.warning(
                'No PKG-INFO file found in %s',
                display_path(self.egg_info_path('PKG-INFO')),
            )
        p.feed(data or '')
        return p.close()

    _requirements_section_re = re.compile(r'\[(.*?)\]')

    @property
    def installed_version(self):
        return get_installed_version(self.name)

    def assert_source_matches_version(self):
        assert self.source_dir
        version = self.pkg_info()['version']
        if self.req.specifier and version not in self.req.specifier:
            logger.warning(
                'Requested %s, but installing version %s',
                self,
                self.installed_version,
            )
        else:
            logger.debug(
                'Source in %s has version %s, which satisfies requirement %s',
                display_path(self.source_dir),
                version,
                self,
            )

    def update_editable(self, obtain=True):
        if not self.link:
            logger.debug(
                "Cannot update repository at %s; repository location is "
                "unknown",
                self.source_dir,
            )
            return
        assert self.editable
        assert self.source_dir
        if self.link.scheme == 'file':
            # Static paths don't get updated
            return
        assert '+' in self.link.url, "bad url: %r" % self.link.url
        if not self.update:
            return
        vc_type, url = self.link.url.split('+', 1)
        backend = vcs.get_backend(vc_type)
        if backend:
            vcs_backend = backend(self.link.url)
            if obtain:
                vcs_backend.obtain(self.source_dir)
            else:
                vcs_backend.export(self.source_dir)
        else:
            assert 0, (
                'Unexpected version control type (in %s): %s'
                % (self.link, vc_type))

    def uninstall(self, auto_confirm=False):
        """
        Uninstall the distribution currently satisfying this requirement.

        Prompts before removing or modifying files unless
        ``auto_confirm`` is True.

        Refuses to delete or modify files outside of ``sys.prefix`` -
        thus uninstallation within a virtual environment can only
        modify that virtual environment, even if the virtualenv is
        linked to global site-packages.

        """
        if not self.check_if_exists():
            raise UninstallationError(
                "Cannot uninstall requirement %s, not installed" % (self.name,)
            )
        dist = self.satisfied_by or self.conflicts_with

        dist_path = normalize_path(dist.location)
        if not dist_is_local(dist):
            logger.info(
                "Not uninstalling %s at %s, outside environment %s",
                dist.key,
                dist_path,
                sys.prefix,
            )
            self.nothing_to_uninstall = True
            return

        if dist_path in get_stdlib():
            logger.info(
                "Not uninstalling %s at %s, as it is in the standard library.",
                dist.key,
                dist_path,
            )
            self.nothing_to_uninstall = True
            return

        paths_to_remove = UninstallPathSet(dist)
        develop_egg_link = egg_link_path(dist)
        develop_egg_link_egg_info = '{0}.egg-info'.format(
            pkg_resources.to_filename(dist.project_name))
        egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
        # Special case for distutils installed package
        distutils_egg_info = getattr(dist._provider, 'path', None)

        # Uninstall cases order do matter as in the case of 2 installs of the
        # same package, pip needs to uninstall the currently detected version
        if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
                not dist.egg_info.endswith(develop_egg_link_egg_info)):
            # if dist.egg_info.endswith(develop_egg_link_egg_info), we
            # are in fact in the develop_egg_link case
            paths_to_remove.add(dist.egg_info)
            if dist.has_metadata('installed-files.txt'):
                for installed_file in dist.get_metadata(
                        'installed-files.txt').splitlines():
                    path = os.path.normpath(
                        os.path.join(dist.egg_info, installed_file)
                    )
                    paths_to_remove.add(path)
            # FIXME: need a test for this elif block
            # occurs with --single-version-externally-managed/--record outside
            # of pip
            elif dist.has_metadata('top_level.txt'):
                if dist.has_metadata('namespace_packages.txt'):
                    namespaces = dist.get_metadata('namespace_packages.txt')
                else:
                    namespaces = []
                for top_level_pkg in [
                        p for p
                        in dist.get_metadata('top_level.txt').splitlines()
                        if p and p not in namespaces]:
                    path = os.path.join(dist.location, top_level_pkg)
                    paths_to_remove.add(path)
                    paths_to_remove.add(path + '.py')
                    paths_to_remove.add(path + '.pyc')
                    paths_to_remove.add(path + '.pyo')

        elif distutils_egg_info:
            warnings.warn(
                "Uninstalling a distutils installed project ({0}) has been "
                "deprecated and will be removed in a future version. This is "
                "due to the fact that uninstalling a distutils project will "
                "only partially uninstall the project.".format(self.name),
                RemovedInPip10Warning,
            )
            paths_to_remove.add(distutils_egg_info)

        elif dist.location.endswith('.egg'):
            # package installed by easy_install
            # We cannot match on dist.egg_name because it can slightly vary
            # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
            paths_to_remove.add(dist.location)
            easy_install_egg = os.path.split(dist.location)[1]
            easy_install_pth = os.path.join(os.path.dirname(dist.location),
                                            'easy-install.pth')
            paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)

        elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
            for path in pip.wheel.uninstallation_paths(dist):
                paths_to_remove.add(path)

        elif develop_egg_link:
            # develop egg
            with open(develop_egg_link, 'r') as fh:
                link_pointer = os.path.normcase(fh.readline().strip())
            assert (link_pointer == dist.location), (
                'Egg-link %s does not match installed location of %s '
                '(at %s)' % (link_pointer, self.name, dist.location)
            )
            paths_to_remove.add(develop_egg_link)
            easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
                                            'easy-install.pth')
            paths_to_remove.add_pth(easy_install_pth, dist.location)

        else:
            logger.debug(
                'Not sure how to uninstall: %s - Check: %s',
                dist, dist.location)

        # find distutils scripts= scripts
        if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
            for script in dist.metadata_listdir('scripts'):
                if dist_in_usersite(dist):
                    bin_dir = bin_user
                else:
                    bin_dir = bin_py
                paths_to_remove.add(os.path.join(bin_dir, script))
                if WINDOWS:
                    paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')

        # find console_scripts
        if dist.has_metadata('entry_points.txt'):
            if six.PY2:
                options = {}
            else:
                options = {"delimiters": ('=', )}
            config = configparser.SafeConfigParser(**options)
            config.readfp(
                FakeFile(dist.get_metadata_lines('entry_points.txt'))
            )
            if config.has_section('console_scripts'):
                for name, value in config.items('console_scripts'):
                    if dist_in_usersite(dist):
                        bin_dir = bin_user
                    else:
                        bin_dir = bin_py
                    paths_to_remove.add(os.path.join(bin_dir, name))
                    if WINDOWS:
                        paths_to_remove.add(
                            os.path.join(bin_dir, name) + '.exe'
                        )
                        paths_to_remove.add(
                            os.path.join(bin_dir, name) + '.exe.manifest'
                        )
                        paths_to_remove.add(
                            os.path.join(bin_dir, name) + '-script.py'
                        )

        paths_to_remove.remove(auto_confirm)
        self.uninstalled = paths_to_remove

    def rollback_uninstall(self):
        if self.uninstalled:
            self.uninstalled.rollback()
        else:
            logger.error(
                "Can't rollback %s, nothing uninstalled.", self.name,
            )

    def commit_uninstall(self):
        if self.uninstalled:
            self.uninstalled.commit()
        elif not self.nothing_to_uninstall:
            logger.error(
                "Can't commit %s, nothing uninstalled.", self.name,
            )

    def archive(self, build_dir):
        assert self.source_dir
        create_archive = True
        archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
        archive_path = os.path.join(build_dir, archive_name)
        if os.path.exists(archive_path):
            response = ask_path_exists(
                'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
                display_path(archive_path), ('i', 'w', 'b', 'a'))
            if response == 'i':
                create_archive = False
            elif response == 'w':
                logger.warning('Deleting %s', display_path(archive_path))
                os.remove(archive_path)
            elif response == 'b':
                dest_file = backup_dir(archive_path)
                logger.warning(
                    'Backing up %s to %s',
                    display_path(archive_path),
                    display_path(dest_file),
                )
                shutil.move(archive_path, dest_file)
            elif response == 'a':
                sys.exit(-1)
        if create_archive:
            zip = zipfile.ZipFile(
                archive_path, 'w', zipfile.ZIP_DEFLATED,
                allowZip64=True
            )
            dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
            for dirpath, dirnames, filenames in os.walk(dir):
                if 'pip-egg-info' in dirnames:
                    dirnames.remove('pip-egg-info')
                for dirname in dirnames:
                    dirname = os.path.join(dirpath, dirname)
                    name = self._clean_zip_name(dirname, dir)
                    zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
                    zipdir.external_attr = 0x1ED << 16  # 0o755
                    zip.writestr(zipdir, '')
                for filename in filenames:
                    if filename == PIP_DELETE_MARKER_FILENAME:
                        continue
                    filename = os.path.join(dirpath, filename)
                    name = self._clean_zip_name(filename, dir)
                    zip.write(filename, self.name + '/' + name)
            zip.close()
            logger.info('Saved %s', display_path(archive_path))

    def _clean_zip_name(self, name, prefix):
        assert name.startswith(prefix + os.path.sep), (
            "name %r doesn't start with prefix %r" % (name, prefix)
        )
        name = name[len(prefix) + 1:]
        name = name.replace(os.path.sep, '/')
        return name

    def match_markers(self, extras_requested=None):
        if not extras_requested:
            # Provide an extra to safely evaluate the markers
            # without matching any extra
            extras_requested = ('',)
        if self.markers is not None:
            return any(
                self.markers.evaluate({'extra': extra})
                for extra in extras_requested)
        else:
            return True

    def install(self, install_options, global_options=[], root=None, prefix=None, strip_file_prefix=None):
        if self.editable:
            self.install_editable(
                install_options, global_options, prefix=prefix)
            return
        if self.is_wheel:
            version = pip.wheel.wheel_version(self.source_dir)
            pip.wheel.check_compatibility(version, self.name)

            self.move_wheel_files(
                self.source_dir,
                root=root,
                prefix=prefix,
                strip_file_prefix=strip_file_prefix
            )
            self.install_succeeded = True
            return

        # Extend the list of global and install options passed on to
        # the setup.py call with the ones from the requirements file.
        # Options specified in requirements file override those
        # specified on the command line, since the last option given
        # to setup.py is the one that is used.
        global_options += self.options.get('global_options', [])
        install_options += self.options.get('install_options', [])

        if self.isolated:
            global_options = list(global_options) + ["--no-user-cfg"]

        temp_location = tempfile.mkdtemp('-record', 'pip-')
        record_filename = os.path.join(temp_location, 'install-record.txt')
        try:
            install_args = self.get_install_args(
                global_options, record_filename, root, prefix)
            msg = 'Running setup.py install for %s' % (self.name,)
            with open_spinner(msg) as spinner:
                with indent_log():
                    call_subprocess(
                        install_args + install_options,
                        cwd=self.setup_py_dir,
                        show_stdout=False,
                        spinner=spinner,
                    )

            if not os.path.exists(record_filename):
                logger.debug('Record file %s not found', record_filename)
                return
            self.install_succeeded = True
            if self.as_egg:
                # there's no --always-unzip option we can pass to install
                # command so we unable to save the installed-files.txt
                return

            def prepend_root(path):
                if root is None or not os.path.isabs(path):
                    return path
                else:
                    return change_root(root, path)

            with open(record_filename) as f:
                for line in f:
                    directory = os.path.dirname(line)
                    if directory.endswith('.egg-info'):
                        egg_info_dir = prepend_root(directory)
                        break
                else:
                    logger.warning(
                        'Could not find .egg-info directory in install record'
                        ' for %s',
                        self,
                    )
                    # FIXME: put the record somewhere
                    # FIXME: should this be an error?
                    return
            new_lines = []
            with open(record_filename) as f:
                for line in f:
                    filename = line.strip()
                    if os.path.isdir(filename):
                        filename += os.path.sep
                    new_lines.append(
                        os.path.relpath(
                            prepend_root(filename), egg_info_dir)
                    )
            inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
            with open(inst_files_path, 'w') as f:
                f.write('\n'.join(new_lines) + '\n')
        finally:
            if os.path.exists(record_filename):
                os.remove(record_filename)
            rmtree(temp_location)

    def ensure_has_source_dir(self, parent_dir):
        """Ensure that a source_dir is set.

        This will create a temporary build dir if the name of the requirement
        isn't known yet.

        :param parent_dir: The ideal pip parent_dir for the source_dir.
            Generally src_dir for editables and build_dir for sdists.
        :return: self.source_dir
        """
        if self.source_dir is None:
            self.source_dir = self.build_location(parent_dir)
        return self.source_dir

    def get_install_args(self, global_options, record_filename, root, prefix):
        install_args = [sys.executable, "-u"]
        install_args.append('-c')
        install_args.append(SETUPTOOLS_SHIM % self.setup_py)
        install_args += list(global_options) + \
            ['install', '--record', record_filename]

        if not self.as_egg:
            install_args += ['--single-version-externally-managed']

        if root is not None:
            install_args += ['--root', root]
        if prefix is not None:
            install_args += ['--prefix', prefix]

        if self.pycompile:
            install_args += ["--compile"]
        else:
            install_args += ["--no-compile"]

        if running_under_virtualenv():
            py_ver_str = 'python' + sysconfig.get_python_version()
            install_args += ['--install-headers',
                             os.path.join(sys.prefix, 'include', 'site',
                                          py_ver_str, self.name)]

        return install_args

    def remove_temporary_source(self):
        """Remove the source files from this requirement, if they are marked
        for deletion"""
        if self.source_dir and os.path.exists(
                os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
            logger.debug('Removing source in %s', self.source_dir)
            rmtree(self.source_dir)
        self.source_dir = None
        if self._temp_build_dir and os.path.exists(self._temp_build_dir):
            rmtree(self._temp_build_dir)
        self._temp_build_dir = None

    def install_editable(self, install_options,
                         global_options=(), prefix=None):
        logger.info('Running setup.py develop for %s', self.name)

        if self.isolated:
            global_options = list(global_options) + ["--no-user-cfg"]

        if prefix:
            prefix_param = ['--prefix={0}'.format(prefix)]
            install_options = list(install_options) + prefix_param

        with indent_log():
            # FIXME: should we do --install-headers here too?
            call_subprocess(
                [
                    sys.executable,
                    '-c',
                    SETUPTOOLS_SHIM % self.setup_py
                ] +
                list(global_options) +
                ['develop', '--no-deps'] +
                list(install_options),

                cwd=self.setup_py_dir,
                show_stdout=False)

        self.install_succeeded = True

    def check_if_exists(self):
        """Find an installed distribution that satisfies or conflicts
        with this requirement, and set self.satisfied_by or
        self.conflicts_with appropriately.
        """
        if self.req is None:
            return False
        try:
            # get_distribution() will resolve the entire list of requirements
            # anyway, and we've already determined that we need the requirement
            # in question, so strip the marker so that we don't try to
            # evaluate it.
            no_marker = Requirement(str(self.req))
            no_marker.marker = None
            self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
            if self.editable and self.satisfied_by:
                self.conflicts_with = self.satisfied_by
                # when installing editables, nothing pre-existing should ever
                # satisfy
                self.satisfied_by = None
                return True
        except pkg_resources.DistributionNotFound:
            return False
        except pkg_resources.VersionConflict:
            existing_dist = pkg_resources.get_distribution(
                self.req.name
            )
            if self.use_user_site:
                if dist_in_usersite(existing_dist):
                    self.conflicts_with = existing_dist
                elif (running_under_virtualenv() and
                        dist_in_site_packages(existing_dist)):
                    raise InstallationError(
                        "Will not install to the user site because it will "
                        "lack sys.path precedence to %s in %s" %
                        (existing_dist.project_name, existing_dist.location)
                    )
            elif dist_in_install_path(existing_dist):
                self.conflicts_with = existing_dist
        return True

    @property
    def is_wheel(self):
        return self.link and self.link.is_wheel

    def move_wheel_files(self, wheeldir, root=None, prefix=None, strip_file_prefix=None):
        move_wheel_files(
            self.name, self.req, wheeldir,
            user=self.use_user_site,
            home=self.target_dir,
            root=root,
            prefix=prefix,
            pycompile=self.pycompile,
            isolated=self.isolated,
            strip_file_prefix=strip_file_prefix,
        )

    def get_dist(self):
        """Return a pkg_resources.Distribution built from self.egg_info_path"""
        egg_info = self.egg_info_path('').rstrip('/')
        base_dir = os.path.dirname(egg_info)
        metadata = pkg_resources.PathMetadata(base_dir, egg_info)
        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
        return pkg_resources.Distribution(
            os.path.dirname(egg_info),
            project_name=dist_name,
            metadata=metadata)

    @property
    def has_hash_options(self):
        """Return whether any known-good hashes are specified as options.

        These activate --require-hashes mode; hashes specified as part of a
        URL do not.

        """
        return bool(self.options.get('hashes', {}))

    def hashes(self, trust_internet=True):
        """Return a hash-comparer that considers my option- and URL-based
        hashes to be known-good.

        Hashes in URLs--ones embedded in the requirements file, not ones
        downloaded from an index server--are almost peers with ones from
        flags. They satisfy --require-hashes (whether it was implicitly or
        explicitly activated) but do not activate it. md5 and sha224 are not
        allowed in flags, which should nudge people toward good algos. We
        always OR all hashes together, even ones from URLs.

        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
            downloaded from the internet, as by populate_link()

        """
        good_hashes = self.options.get('hashes', {}).copy()
        link = self.link if trust_internet else self.original_link
        if link and link.hash:
            good_hashes.setdefault(link.hash_name, []).append(link.hash)
        return Hashes(good_hashes)


def _strip_postfix(req):
    """
        Strip req postfix ( -dev, 0.2, etc )
    """
    # FIXME: use package_to_requirement?
    match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
    if match:
        # Strip off -dev, -0.2, etc.
        req = match.group(1)
    return req


def parse_editable(editable_req, default_vcs=None):
    """Parses an editable requirement into:
        - a requirement name
        - an URL
        - extras
        - editable options
    Accepted requirements:
        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
        .[some_extra]
    """

    from pip.index import Link

    url = editable_req
    extras = None

    # If a file path is specified with extras, strip off the extras.
    m = re.match(r'^(.+)(\[[^\]]+\])$', url)
    if m:
        url_no_extras = m.group(1)
        extras = m.group(2)
    else:
        url_no_extras = url

    if os.path.isdir(url_no_extras):
        if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
            raise InstallationError(
                "Directory %r is not installable. File 'setup.py' not found." %
                url_no_extras
            )
        # Treating it as code that has already been checked out
        url_no_extras = path_to_url(url_no_extras)

    if url_no_extras.lower().startswith('file:'):
        package_name = Link(url_no_extras).egg_fragment
        if extras:
            return (
                package_name,
                url_no_extras,
                Requirement("placeholder" + extras.lower()).extras,
            )
        else:
            return package_name, url_no_extras, None

    for version_control in vcs:
        if url.lower().startswith('%s:' % version_control):
            url = '%s+%s' % (version_control, url)
            break

    if '+' not in url:
        if default_vcs:
            warnings.warn(
                "--default-vcs has been deprecated and will be removed in "
                "the future.",
                RemovedInPip10Warning,
            )
            url = default_vcs + '+' + url
        else:
            raise InstallationError(
                '%s should either be a path to a local project or a VCS url '
                'beginning with svn+, git+, hg+, or bzr+' %
                editable_req
            )

    vc_type = url.split('+', 1)[0].lower()

    if not vcs.get_backend(vc_type):
        error_message = 'For --editable=%s only ' % editable_req + \
            ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
            ' is currently supported'
        raise InstallationError(error_message)

    package_name = Link(url).egg_fragment
    if not package_name:
        raise InstallationError(
            "Could not detect requirement name, please specify one with #egg="
        )
    if not package_name:
        raise InstallationError(
            '--editable=%s is not the right format; it must have '
            '#egg=Package' % editable_req
        )
    return _strip_postfix(package_name), url, None
req/__init__.py000064400000000424151733136160007452 0ustar00from __future__ import absolute_import

from .req_install import InstallRequirement
from .req_set import RequirementSet, Requirements
from .req_file import parse_requirements

__all__ = [
    "RequirementSet", "Requirements", "InstallRequirement",
    "parse_requirements",
]
req/req_uninstall.py000064400000015361151733136170010602 0ustar00from __future__ import absolute_import

import logging
import os
import tempfile

from pip.compat import uses_pycache, WINDOWS, cache_from_source
from pip.exceptions import UninstallationError
from pip.utils import rmtree, ask, is_local, renames, normalize_path
from pip.utils.logging import indent_log


logger = logging.getLogger(__name__)


class UninstallPathSet(object):
    """A set of file paths to be removed in the uninstallation of a
    requirement."""
    def __init__(self, dist):
        self.paths = set()
        self._refuse = set()
        self.pth = {}
        self.dist = dist
        self.save_dir = None
        self._moved_paths = []

    def _permitted(self, path):
        """
        Return True if the given path is one we are permitted to
        remove/modify, False otherwise.

        """
        return is_local(path)

    def add(self, path):
        head, tail = os.path.split(path)

        # we normalize the head to resolve parent directory symlinks, but not
        # the tail, since we only want to uninstall symlinks, not their targets
        path = os.path.join(normalize_path(head), os.path.normcase(tail))

        if not os.path.exists(path):
            return
        if self._permitted(path):
            self.paths.add(path)
        else:
            self._refuse.add(path)

        # __pycache__ files can show up after 'installed-files.txt' is created,
        # due to imports
        if os.path.splitext(path)[1] == '.py' and uses_pycache:
            self.add(cache_from_source(path))

    def add_pth(self, pth_file, entry):
        pth_file = normalize_path(pth_file)
        if self._permitted(pth_file):
            if pth_file not in self.pth:
                self.pth[pth_file] = UninstallPthEntries(pth_file)
            self.pth[pth_file].add(entry)
        else:
            self._refuse.add(pth_file)

    def compact(self, paths):
        """Compact a path set to contain the minimal number of paths
        necessary to contain all paths in the set. If /a/path/ and
        /a/path/to/a/file.txt are both in the set, leave only the
        shorter path."""
        short_paths = set()
        for path in sorted(paths, key=len):
            if not any([
                    (path.startswith(shortpath) and
                     path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
                    for shortpath in short_paths]):
                short_paths.add(path)
        return short_paths

    def _stash(self, path):
        return os.path.join(
            self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))

    def remove(self, auto_confirm=False):
        """Remove paths in ``self.paths`` with confirmation (unless
        ``auto_confirm`` is True)."""
        if not self.paths:
            logger.info(
                "Can't uninstall '%s'. No files were found to uninstall.",
                self.dist.project_name,
            )
            return
        logger.info(
            'Uninstalling %s-%s:',
            self.dist.project_name, self.dist.version
        )

        with indent_log():
            paths = sorted(self.compact(self.paths))

            if auto_confirm:
                response = 'y'
            else:
                for path in paths:
                    logger.info(path)
                response = ask('Proceed (y/n)? ', ('y', 'n'))
            if self._refuse:
                logger.info('Not removing or modifying (outside of prefix):')
                for path in self.compact(self._refuse):
                    logger.info(path)
            if response == 'y':
                self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
                                                 prefix='pip-')
                for path in paths:
                    new_path = self._stash(path)
                    logger.debug('Removing file or directory %s', path)
                    self._moved_paths.append(path)
                    renames(path, new_path)
                for pth in self.pth.values():
                    pth.remove()
                logger.info(
                    'Successfully uninstalled %s-%s',
                    self.dist.project_name, self.dist.version
                )

    def rollback(self):
        """Rollback the changes previously made by remove()."""
        if self.save_dir is None:
            logger.error(
                "Can't roll back %s; was not uninstalled",
                self.dist.project_name,
            )
            return False
        logger.info('Rolling back uninstall of %s', self.dist.project_name)
        for path in self._moved_paths:
            tmp_path = self._stash(path)
            logger.debug('Replacing %s', path)
            renames(tmp_path, path)
        for pth in self.pth.values():
            pth.rollback()

    def commit(self):
        """Remove temporary save dir: rollback will no longer be possible."""
        if self.save_dir is not None:
            rmtree(self.save_dir)
            self.save_dir = None
            self._moved_paths = []


class UninstallPthEntries(object):
    def __init__(self, pth_file):
        if not os.path.isfile(pth_file):
            raise UninstallationError(
                "Cannot remove entries from nonexistent file %s" % pth_file
            )
        self.file = pth_file
        self.entries = set()
        self._saved_lines = None

    def add(self, entry):
        entry = os.path.normcase(entry)
        # On Windows, os.path.normcase converts the entry to use
        # backslashes.  This is correct for entries that describe absolute
        # paths outside of site-packages, but all the others use forward
        # slashes.
        if WINDOWS and not os.path.splitdrive(entry)[0]:
            entry = entry.replace('\\', '/')
        self.entries.add(entry)

    def remove(self):
        logger.debug('Removing pth entries from %s:', self.file)
        with open(self.file, 'rb') as fh:
            # windows uses '\r\n' with py3k, but uses '\n' with py2.x
            lines = fh.readlines()
            self._saved_lines = lines
        if any(b'\r\n' in line for line in lines):
            endline = '\r\n'
        else:
            endline = '\n'
        for entry in self.entries:
            try:
                logger.debug('Removing entry: %s', entry)
                lines.remove((entry + endline).encode("utf-8"))
            except ValueError:
                pass
        with open(self.file, 'wb') as fh:
            fh.writelines(lines)

    def rollback(self):
        if self._saved_lines is None:
            logger.error(
                'Cannot roll back changes to %s, none were made', self.file
            )
            return False
        logger.debug('Rolling %s back to previous state', self.file)
        with open(self.file, 'wb') as fh:
            fh.writelines(self._saved_lines)
        return True
req/req_file.py000064400000027226151733136170007513 0ustar00"""
Requirements file parsing
"""

from __future__ import absolute_import

import os
import re
import shlex
import sys
import optparse
import warnings

from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves import filterfalse

import pip
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.exceptions import (RequirementsFileParseError)
from pip.utils.deprecation import RemovedInPip10Warning
from pip import cmdoptions

__all__ = ['parse_requirements']

SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
COMMENT_RE = re.compile(r'(^|\s)+#.*$')

SUPPORTED_OPTIONS = [
    cmdoptions.constraints,
    cmdoptions.editable,
    cmdoptions.requirements,
    cmdoptions.no_index,
    cmdoptions.index_url,
    cmdoptions.find_links,
    cmdoptions.extra_index_url,
    cmdoptions.allow_external,
    cmdoptions.allow_all_external,
    cmdoptions.no_allow_external,
    cmdoptions.allow_unsafe,
    cmdoptions.no_allow_unsafe,
    cmdoptions.use_wheel,
    cmdoptions.no_use_wheel,
    cmdoptions.always_unzip,
    cmdoptions.no_binary,
    cmdoptions.only_binary,
    cmdoptions.pre,
    cmdoptions.process_dependency_links,
    cmdoptions.trusted_host,
    cmdoptions.require_hashes,
]

# options to be passed to requirements
SUPPORTED_OPTIONS_REQ = [
    cmdoptions.install_options,
    cmdoptions.global_options,
    cmdoptions.hash,
]

# the 'dest' string values
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]


def parse_requirements(filename, finder=None, comes_from=None, options=None,
                       session=None, constraint=False, wheel_cache=None):
    """Parse a requirements file and yield InstallRequirement instances.

    :param filename:    Path or url of requirements file.
    :param finder:      Instance of pip.index.PackageFinder.
    :param comes_from:  Origin description of requirements.
    :param options:     cli options.
    :param session:     Instance of pip.download.PipSession.
    :param constraint:  If true, parsing a constraint file rather than
        requirements file.
    :param wheel_cache: Instance of pip.wheel.WheelCache
    """
    if session is None:
        raise TypeError(
            "parse_requirements() missing 1 required keyword argument: "
            "'session'"
        )

    _, content = get_file_content(
        filename, comes_from=comes_from, session=session
    )

    lines_enum = preprocess(content, options)

    for line_number, line in lines_enum:
        req_iter = process_line(line, filename, line_number, finder,
                                comes_from, options, session, wheel_cache,
                                constraint=constraint)
        for req in req_iter:
            yield req


def preprocess(content, options):
    """Split, filter, and join lines, and return a line iterator

    :param content: the content of the requirements file
    :param options: cli options
    """
    lines_enum = enumerate(content.splitlines(), start=1)
    lines_enum = join_lines(lines_enum)
    lines_enum = ignore_comments(lines_enum)
    lines_enum = skip_regex(lines_enum, options)
    return lines_enum


def process_line(line, filename, line_number, finder=None, comes_from=None,
                 options=None, session=None, wheel_cache=None,
                 constraint=False):
    """Process a single requirements line; This can result in creating/yielding
    requirements, or updating the finder.

    For lines that contain requirements, the only options that have an effect
    are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
    requirement. Other options from SUPPORTED_OPTIONS may be present, but are
    ignored.

    For lines that do not contain requirements, the only options that have an
    effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
    be present, but are ignored. These lines may contain multiple options
    (although our docs imply only one is supported), and all our parsed and
    affect the finder.

    :param constraint: If True, parsing a constraints file.
    :param options: OptionParser options that we may update
    """
    parser = build_parser()
    defaults = parser.get_default_values()
    defaults.index_url = None
    if finder:
        # `finder.format_control` will be updated during parsing
        defaults.format_control = finder.format_control
    args_str, options_str = break_args_options(line)
    if sys.version_info < (2, 7, 3):
        # Prior to 2.7.3, shlex cannot deal with unicode entries
        options_str = options_str.encode('utf8')
    opts, _ = parser.parse_args(shlex.split(options_str), defaults)

    # preserve for the nested code path
    line_comes_from = '%s %s (line %s)' % (
        '-c' if constraint else '-r', filename, line_number)

    # yield a line requirement
    if args_str:
        isolated = options.isolated_mode if options else False
        if options:
            cmdoptions.check_install_build_global(options, opts)
        # get the options that apply to requirements
        req_options = {}
        for dest in SUPPORTED_OPTIONS_REQ_DEST:
            if dest in opts.__dict__ and opts.__dict__[dest]:
                req_options[dest] = opts.__dict__[dest]
        yield InstallRequirement.from_line(
            args_str, line_comes_from, constraint=constraint,
            isolated=isolated, options=req_options, wheel_cache=wheel_cache
        )

    # yield an editable requirement
    elif opts.editables:
        isolated = options.isolated_mode if options else False
        default_vcs = options.default_vcs if options else None
        yield InstallRequirement.from_editable(
            opts.editables[0], comes_from=line_comes_from,
            constraint=constraint, default_vcs=default_vcs, isolated=isolated,
            wheel_cache=wheel_cache
        )

    # parse a nested requirements file
    elif opts.requirements or opts.constraints:
        if opts.requirements:
            req_path = opts.requirements[0]
            nested_constraint = False
        else:
            req_path = opts.constraints[0]
            nested_constraint = True
        # original file is over http
        if SCHEME_RE.search(filename):
            # do a url join so relative paths work
            req_path = urllib_parse.urljoin(filename, req_path)
        # original file and nested file are paths
        elif not SCHEME_RE.search(req_path):
            # do a join so relative paths work
            req_path = os.path.join(os.path.dirname(filename), req_path)
        # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
        parser = parse_requirements(
            req_path, finder, comes_from, options, session,
            constraint=nested_constraint, wheel_cache=wheel_cache
        )
        for req in parser:
            yield req

    # percolate hash-checking option upward
    elif opts.require_hashes:
        options.require_hashes = opts.require_hashes

    # set finder options
    elif finder:
        if opts.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if opts.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if opts.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if opts.index_url:
            finder.index_urls = [opts.index_url]
        if opts.use_wheel is False:
            finder.use_wheel = False
            pip.index.fmt_ctl_no_use_wheel(finder.format_control)
        if opts.no_index is True:
            finder.index_urls = []
        if opts.extra_index_urls:
            finder.index_urls.extend(opts.extra_index_urls)
        if opts.find_links:
            # FIXME: it would be nice to keep track of the source
            # of the find_links: support a find-links local path
            # relative to a requirements file.
            value = opts.find_links[0]
            req_dir = os.path.dirname(os.path.abspath(filename))
            relative_to_reqs_file = os.path.join(req_dir, value)
            if os.path.exists(relative_to_reqs_file):
                value = relative_to_reqs_file
            finder.find_links.append(value)
        if opts.pre:
            finder.allow_all_prereleases = True
        if opts.process_dependency_links:
            finder.process_dependency_links = True
        if opts.trusted_hosts:
            finder.secure_origins.extend(
                ("*", host, "*") for host in opts.trusted_hosts)


def break_args_options(line):
    """Break up the line into an args and options string.  We only want to shlex
    (and then optparse) the options, not the args.  args can contain markers
    which are corrupted by shlex.
    """
    tokens = line.split(' ')
    args = []
    options = tokens[:]
    for token in tokens:
        if token.startswith('-') or token.startswith('--'):
            break
        else:
            args.append(token)
            options.pop(0)
    return ' '.join(args), ' '.join(options)


def build_parser():
    """
    Return a parser for parsing requirement lines
    """
    parser = optparse.OptionParser(add_help_option=False)

    option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
    for option_factory in option_factories:
        option = option_factory()
        parser.add_option(option)

    # By default optparse sys.exits on parsing errors. We want to wrap
    # that in our own exception.
    def parser_exit(self, msg):
        raise RequirementsFileParseError(msg)
    parser.exit = parser_exit

    return parser


def join_lines(lines_enum):
    """Joins a line ending in '\' with the previous line (except when following
    comments).  The joined line takes on the index of the first line.
    """
    primary_line_number = None
    new_line = []
    for line_number, line in lines_enum:
        if not line.endswith('\\') or COMMENT_RE.match(line):
            if COMMENT_RE.match(line):
                # this ensures comments are always matched later
                line = ' ' + line
            if new_line:
                new_line.append(line)
                yield primary_line_number, ''.join(new_line)
                new_line = []
            else:
                yield line_number, line
        else:
            if not new_line:
                primary_line_number = line_number
            new_line.append(line.strip('\\'))

    # last line contains \
    if new_line:
        yield primary_line_number, ''.join(new_line)

    # TODO: handle space after '\'.


def ignore_comments(lines_enum):
    """
    Strips comments and filter empty lines.
    """
    for line_number, line in lines_enum:
        line = COMMENT_RE.sub('', line)
        line = line.strip()
        if line:
            yield line_number, line


def skip_regex(lines_enum, options):
    """
    Skip lines that match '--skip-requirements-regex' pattern

    Note: the regex pattern is only built once
    """
    skip_regex = options.skip_requirements_regex if options else None
    if skip_regex:
        pattern = re.compile(skip_regex)
        lines_enum = filterfalse(
            lambda e: pattern.search(e[1]),
            lines_enum)
    return lines_enum
download.py000064400000100117151733136170006734 0ustar00from __future__ import absolute_import

import cgi
import email.utils
import getpass
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
import tempfile

try:
    import ssl  # noqa
    HAS_TLS = True
except ImportError:
    HAS_TLS = False

from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request

import pip

from pip.exceptions import InstallationError, HashMismatch
from pip.models import PyPI
from pip.utils import (splitext, rmtree, format_size, display_path,
                       backup_dir, ask_path_exists, unpack_file,
                       ARCHIVE_EXTENSIONS, consume, call_subprocess)
from pip.utils.encoding import auto_decode
from pip.utils.filesystem import check_path_owner
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip.utils.glibc import libc_ver
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
from pip.locations import write_delete_marker_file
from pip.vcs import vcs
from pip._vendor import requests, six
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.requests.utils import get_netrc_auth
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor import urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.six.moves import xmlrpc_client


__all__ = ['get_file_content',
           'is_url', 'url_to_path', 'path_to_url',
           'is_archive_file', 'unpack_vcs_link',
           'unpack_file_url', 'is_vcs_url', 'is_file_url',
           'unpack_http_url', 'unpack_url',
           'parse_content_disposition', 'sanitize_content_filename']


logger = logging.getLogger(__name__)


def user_agent():
    """
    Return a string representing the user agent.
    """
    data = {
        "installer": {"name": "pip", "version": pip.__version__},
        "python": platform.python_version(),
        "implementation": {
            "name": platform.python_implementation(),
        },
    }

    if data["implementation"]["name"] == 'CPython':
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'PyPy':
        if sys.pypy_version_info.releaselevel == 'final':
            pypy_version_info = sys.pypy_version_info[:3]
        else:
            pypy_version_info = sys.pypy_version_info
        data["implementation"]["version"] = ".".join(
            [str(x) for x in pypy_version_info]
        )
    elif data["implementation"]["name"] == 'Jython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'IronPython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()

    if sys.platform.startswith("linux"):
        from pip._vendor import distro
        distro_infos = dict(filter(
            lambda x: x[1],
            zip(["name", "version", "id"], distro.linux_distribution()),
        ))
        libc = dict(filter(
            lambda x: x[1],
            zip(["lib", "version"], libc_ver()),
        ))
        if libc:
            distro_infos["libc"] = libc
        if distro_infos:
            data["distro"] = distro_infos

    if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
        data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}

    if platform.system():
        data.setdefault("system", {})["name"] = platform.system()

    if platform.release():
        data.setdefault("system", {})["release"] = platform.release()

    if platform.machine():
        data["cpu"] = platform.machine()

    # Python 2.6 doesn't have ssl.OPENSSL_VERSION.
    if HAS_TLS and sys.version_info[:2] > (2, 6):
        data["openssl_version"] = ssl.OPENSSL_VERSION

    return "{data[installer][name]}/{data[installer][version]} {json}".format(
        data=data,
        json=json.dumps(data, separators=(",", ":"), sort_keys=True),
    )


class MultiDomainBasicAuth(AuthBase):

    def __init__(self, prompting=True):
        self.prompting = prompting
        self.passwords = {}

    def __call__(self, req):
        parsed = urllib_parse.urlparse(req.url)

        # Get the netloc without any embedded credentials
        netloc = parsed.netloc.rsplit("@", 1)[-1]

        # Set the url of the request to the url without any credentials
        req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])

        # Use any stored credentials that we have for this netloc
        username, password = self.passwords.get(netloc, (None, None))

        # Extract credentials embedded in the url if we have none stored
        if username is None:
            username, password = self.parse_credentials(parsed.netloc)

        # Get creds from netrc if we still don't have them
        if username is None and password is None:
            netrc_auth = get_netrc_auth(req.url)
            username, password = netrc_auth if netrc_auth else (None, None)

        if username or password:
            # Store the username and password
            self.passwords[netloc] = (username, password)

            # Send the basic auth with this request
            req = HTTPBasicAuth(username or "", password or "")(req)

        # Attach a hook to handle 401 responses
        req.register_hook("response", self.handle_401)

        return req

    def handle_401(self, resp, **kwargs):
        # We only care about 401 responses, anything else we want to just
        #   pass through the actual response
        if resp.status_code != 401:
            return resp

        # We are not able to prompt the user so simply return the response
        if not self.prompting:
            return resp

        parsed = urllib_parse.urlparse(resp.url)

        # Prompt the user for a new username and password
        username = six.moves.input("User for %s: " % parsed.netloc)
        password = getpass.getpass("Password: ")

        # Store the new username and password to use for future requests
        if username or password:
            self.passwords[parsed.netloc] = (username, password)

        # Consume content and release the original connection to allow our new
        #   request to reuse the same one.
        resp.content
        resp.raw.release_conn()

        # Add our new username and password to the request
        req = HTTPBasicAuth(username or "", password or "")(resp.request)

        # Send our new request
        new_resp = resp.connection.send(req, **kwargs)
        new_resp.history.append(resp)

        return new_resp

    def parse_credentials(self, netloc):
        if "@" in netloc:
            userinfo = netloc.rsplit("@", 1)[0]
            if ":" in userinfo:
                return userinfo.split(":", 1)
            return userinfo, None
        return None, None


class LocalFSAdapter(BaseAdapter):

    def send(self, request, stream=None, timeout=None, verify=None, cert=None,
             proxies=None):
        pathname = url_to_path(request.url)

        resp = Response()
        resp.status_code = 200
        resp.url = request.url

        try:
            stats = os.stat(pathname)
        except OSError as exc:
            resp.status_code = 404
            resp.raw = exc
        else:
            modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
            content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
            resp.headers = CaseInsensitiveDict({
                "Content-Type": content_type,
                "Content-Length": stats.st_size,
                "Last-Modified": modified,
            })

            resp.raw = open(pathname, "rb")
            resp.close = resp.raw.close

        return resp

    def close(self):
        pass


class SafeFileCache(FileCache):
    """
    A file based cache which is safe to use even when the target directory may
    not be accessible or writable.
    """

    def __init__(self, *args, **kwargs):
        super(SafeFileCache, self).__init__(*args, **kwargs)

        # Check to ensure that the directory containing our cache directory
        # is owned by the user current executing pip. If it does not exist
        # we will check the parent directory until we find one that does exist.
        # If it is not owned by the user executing pip then we will disable
        # the cache and log a warning.
        if not check_path_owner(self.directory):
            logger.warning(
                "The directory '%s' or its parent directory is not owned by "
                "the current user and the cache has been disabled. Please "
                "check the permissions and owner of that directory. If "
                "executing pip with sudo, you may want sudo's -H flag.",
                self.directory,
            )

            # Set our directory to None to disable the Cache
            self.directory = None

    def get(self, *args, **kwargs):
        # If we don't have a directory, then the cache should be a no-op.
        if self.directory is None:
            return

        try:
            return super(SafeFileCache, self).get(*args, **kwargs)
        except (LockError, OSError, IOError):
            # We intentionally silence this error, if we can't access the cache
            # then we can just skip caching and process the request as if
            # caching wasn't enabled.
            pass

    def set(self, *args, **kwargs):
        # If we don't have a directory, then the cache should be a no-op.
        if self.directory is None:
            return

        try:
            return super(SafeFileCache, self).set(*args, **kwargs)
        except (LockError, OSError, IOError):
            # We intentionally silence this error, if we can't access the cache
            # then we can just skip caching and process the request as if
            # caching wasn't enabled.
            pass

    def delete(self, *args, **kwargs):
        # If we don't have a directory, then the cache should be a no-op.
        if self.directory is None:
            return

        try:
            return super(SafeFileCache, self).delete(*args, **kwargs)
        except (LockError, OSError, IOError):
            # We intentionally silence this error, if we can't access the cache
            # then we can just skip caching and process the request as if
            # caching wasn't enabled.
            pass


class InsecureHTTPAdapter(HTTPAdapter):

    def cert_verify(self, conn, url, verify, cert):
        conn.cert_reqs = 'CERT_NONE'
        conn.ca_certs = None


class PipSession(requests.Session):

    timeout = None

    def __init__(self, *args, **kwargs):
        retries = kwargs.pop("retries", 0)
        cache = kwargs.pop("cache", None)
        insecure_hosts = kwargs.pop("insecure_hosts", [])

        super(PipSession, self).__init__(*args, **kwargs)

        # Attach our User Agent to the request
        self.headers["User-Agent"] = user_agent()

        # Attach our Authentication handler to the session
        self.auth = MultiDomainBasicAuth()

        # Create our urllib3.Retry instance which will allow us to customize
        # how we handle retries.
        retries = urllib3.Retry(
            # Set the total number of retries that a particular request can
            # have.
            total=retries,

            # A 503 error from PyPI typically means that the Fastly -> Origin
            # connection got interrupted in some way. A 503 error in general
            # is typically considered a transient error so we'll go ahead and
            # retry it.
            status_forcelist=[503],

            # Add a small amount of back off between failed requests in
            # order to prevent hammering the service.
            backoff_factor=0.25,
        )

        # We want to _only_ cache responses on securely fetched origins. We do
        # this because we can't validate the response of an insecurely fetched
        # origin, and we don't want someone to be able to poison the cache and
        # require manual eviction from the cache to fix it.
        if cache:
            secure_adapter = CacheControlAdapter(
                cache=SafeFileCache(cache, use_dir_lock=True),
                max_retries=retries,
            )
        else:
            secure_adapter = HTTPAdapter(max_retries=retries)

        # Our Insecure HTTPAdapter disables HTTPS validation. It does not
        # support caching (see above) so we'll use it for all http:// URLs as
        # well as any https:// host that we've marked as ignoring TLS errors
        # for.
        insecure_adapter = InsecureHTTPAdapter(max_retries=retries)

        self.mount("https://", secure_adapter)
        self.mount("http://", insecure_adapter)

        # Enable file:// urls
        self.mount("file://", LocalFSAdapter())

        # We want to use a non-validating adapter for any requests which are
        # deemed insecure.
        for host in insecure_hosts:
            self.mount("https://{0}/".format(host), insecure_adapter)

    def request(self, method, url, *args, **kwargs):
        # Allow setting a default timeout on a session
        kwargs.setdefault("timeout", self.timeout)

        # Dispatch the actual request
        return super(PipSession, self).request(method, url, *args, **kwargs)


def get_file_content(url, comes_from=None, session=None):
    """Gets the content of a file; it may be a filename, file: URL, or
    http: URL.  Returns (location, content).  Content is unicode."""
    if session is None:
        raise TypeError(
            "get_file_content() missing 1 required keyword argument: 'session'"
        )

    match = _scheme_re.search(url)
    if match:
        scheme = match.group(1).lower()
        if (scheme == 'file' and comes_from and
                comes_from.startswith('http')):
            raise InstallationError(
                'Requirements file %s references URL %s, which is local'
                % (comes_from, url))
        if scheme == 'file':
            path = url.split(':', 1)[1]
            path = path.replace('\\', '/')
            match = _url_slash_drive_re.match(path)
            if match:
                path = match.group(1) + ':' + path.split('|', 1)[1]
            path = urllib_parse.unquote(path)
            if path.startswith('/'):
                path = '/' + path.lstrip('/')
            url = path
        else:
            # FIXME: catch some errors
            resp = session.get(url)
            resp.raise_for_status()
            return resp.url, resp.text
    try:
        with open(url, 'rb') as f:
            content = auto_decode(f.read())
    except IOError as exc:
        raise InstallationError(
            'Could not open requirements file: %s' % str(exc)
        )
    return url, content


_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)


def is_url(name):
    """Returns true if the name looks like a URL"""
    if ':' not in name:
        return False
    scheme = name.split(':', 1)[0].lower()
    return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes


def url_to_path(url):
    """
    Convert a file: URL to a path.
    """
    assert url.startswith('file:'), (
        "You can only turn file: urls into filenames (not %r)" % url)

    _, netloc, path, _, _ = urllib_parse.urlsplit(url)

    # if we have a UNC path, prepend UNC share notation
    if netloc:
        netloc = '\\\\' + netloc

    path = urllib_request.url2pathname(netloc + path)
    return path


def path_to_url(path):
    """
    Convert a path to a file: URL.  The path will be made absolute and have
    quoted path parts.
    """
    path = os.path.normpath(os.path.abspath(path))
    url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
    return url


def is_archive_file(name):
    """Return True if `name` is a considered as an archive file."""
    ext = splitext(name)[1].lower()
    if ext in ARCHIVE_EXTENSIONS:
        return True
    return False


def unpack_vcs_link(link, location):
    vcs_backend = _get_used_vcs_backend(link)
    vcs_backend.unpack(location)


def _get_used_vcs_backend(link):
    for backend in vcs.backends:
        if link.scheme in backend.schemes:
            vcs_backend = backend(link.url)
            return vcs_backend


def is_vcs_url(link):
    return bool(_get_used_vcs_backend(link))


def is_file_url(link):
    return link.url.lower().startswith('file:')


def is_dir_url(link):
    """Return whether a file:// Link points to a directory.

    ``link`` must not have any other scheme but file://. Call is_file_url()
    first.

    """
    link_path = url_to_path(link.url_without_fragment)
    return os.path.isdir(link_path)


def _progress_indicator(iterable, *args, **kwargs):
    return iterable


def _download_url(resp, link, content_file, hashes):
    try:
        total_length = int(resp.headers['content-length'])
    except (ValueError, KeyError, TypeError):
        total_length = 0

    cached_resp = getattr(resp, "from_cache", False)

    if logger.getEffectiveLevel() > logging.INFO:
        show_progress = False
    elif cached_resp:
        show_progress = False
    elif total_length > (40 * 1000):
        show_progress = True
    elif not total_length:
        show_progress = True
    else:
        show_progress = False

    show_url = link.show_url

    def resp_read(chunk_size):
        try:
            # Special case for urllib3.
            for chunk in resp.raw.stream(
                    chunk_size,
                    # We use decode_content=False here because we don't
                    # want urllib3 to mess with the raw bytes we get
                    # from the server. If we decompress inside of
                    # urllib3 then we cannot verify the checksum
                    # because the checksum will be of the compressed
                    # file. This breakage will only occur if the
                    # server adds a Content-Encoding header, which
                    # depends on how the server was configured:
                    # - Some servers will notice that the file isn't a
                    #   compressible file and will leave the file alone
                    #   and with an empty Content-Encoding
                    # - Some servers will notice that the file is
                    #   already compressed and will leave the file
                    #   alone and will add a Content-Encoding: gzip
                    #   header
                    # - Some servers won't notice anything at all and
                    #   will take a file that's already been compressed
                    #   and compress it again and set the
                    #   Content-Encoding: gzip header
                    #
                    # By setting this not to decode automatically we
                    # hope to eliminate problems with the second case.
                    decode_content=False):
                yield chunk
        except AttributeError:
            # Standard file-like object.
            while True:
                chunk = resp.raw.read(chunk_size)
                if not chunk:
                    break
                yield chunk

    def written_chunks(chunks):
        for chunk in chunks:
            content_file.write(chunk)
            yield chunk

    progress_indicator = _progress_indicator

    if link.netloc == PyPI.netloc:
        url = show_url
    else:
        url = link.url_without_fragment

    if show_progress:  # We don't show progress on cached responses
        if total_length:
            logger.info("Downloading %s (%s)", url, format_size(total_length))
            progress_indicator = DownloadProgressBar(max=total_length).iter
        else:
            logger.info("Downloading %s", url)
            progress_indicator = DownloadProgressSpinner().iter
    elif cached_resp:
        logger.info("Using cached %s", url)
    else:
        logger.info("Downloading %s", url)

    logger.debug('Downloading from URL %s', link)

    downloaded_chunks = written_chunks(
        progress_indicator(
            resp_read(CONTENT_CHUNK_SIZE),
            CONTENT_CHUNK_SIZE
        )
    )
    if hashes:
        hashes.check_against_chunks(downloaded_chunks)
    else:
        consume(downloaded_chunks)


def _copy_file(filename, location, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
            display_path(download_location), ('i', 'w', 'b', 'a'))
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
            shutil.move(download_location, dest_file)
        elif response == 'a':
            sys.exit(-1)
    if copy:
        shutil.copy(filename, download_location)
        logger.info('Saved %s', display_path(download_location))


def unpack_http_url(link, location, download_dir=None,
                    session=None, hashes=None):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'"
        )

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = mimetypes.guess_type(from_path)[0]
    else:
        # let's download to a tmp dir
        from_path, content_type = _download_http_url(link,
                                                     session,
                                                     temp_dir,
                                                     hashes)

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified; let's copy the archive there
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)

    if not already_downloaded_path:
        os.unlink(from_path)
    rmtree(temp_dir)


def unpack_file_url(link, location, download_dir=None, hashes=None):
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """
    link_path = url_to_path(link.url_without_fragment)

    # If it's a url to a local directory
    if is_dir_url(link):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True)
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(link_path)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)


def _copy_dist_from_dir(link_path, location):
    """Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    """

    # Note: This is currently VERY SLOW if you have a lot of data in the
    # directory, because it copies everything with `shutil.copytree`.
    # What it should really do is build an sdist and install that.
    # See https://github.com/pypa/pip/issues/2195

    if os.path.isdir(location):
        rmtree(location)

    # build an sdist
    setup_py = 'setup.py'
    sdist_args = [sys.executable]
    sdist_args.append('-c')
    sdist_args.append(SETUPTOOLS_SHIM % setup_py)
    sdist_args.append('sdist')
    sdist_args += ['--dist-dir', location]
    logger.info('Running setup.py sdist for %s', link_path)

    with indent_log():
        call_subprocess(sdist_args, cwd=link_path, show_stdout=False)

    # unpack sdist into `location`
    sdist = os.path.join(location, os.listdir(location)[0])
    logger.info('Unpacking sdist %s into %s', sdist, location)
    unpack_file(sdist, location, content_type=None, link=None)


class PipXmlrpcTransport(xmlrpc_client.Transport):
    """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
    object.
    """

    def __init__(self, index_url, session, use_datetime=False):
        xmlrpc_client.Transport.__init__(self, use_datetime)
        index_parts = urllib_parse.urlparse(index_url)
        self._scheme = index_parts.scheme
        self._session = session

    def request(self, host, handler, request_body, verbose=False):
        parts = (self._scheme, host, handler, None, None, None)
        url = urllib_parse.urlunparse(parts)
        try:
            headers = {'Content-Type': 'text/xml'}
            response = self._session.post(url, data=request_body,
                                          headers=headers, stream=True)
            response.raise_for_status()
            self.verbose = verbose
            return self.parse_response(response.raw)
        except requests.HTTPError as exc:
            logger.critical(
                "HTTP error %s while getting %s",
                exc.response.status_code, url,
            )
            raise


def unpack_url(link, location, download_dir=None,
               only_download=False, session=None, hashes=None):
    """Unpack link.
       If link is a VCS link:
         if only_download, export into download_dir and ignore location
          else unpack into location
       for other types of link:
         - unpack into location
         - if download_dir, copy the file into download_dir
         - if only_download, mark location for deletion

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if is_vcs_url(link):
        unpack_vcs_link(link, location)

    # file urls
    elif is_file_url(link):
        unpack_file_url(link, location, download_dir, hashes=hashes)

    # http urls
    else:
        if session is None:
            session = PipSession()

        unpack_http_url(
            link,
            location,
            download_dir,
            session,
            hashes=hashes
        )
    if only_download:
        write_delete_marker_file(location)


def sanitize_content_filename(filename):
    # type: (str) -> str
    """
    Sanitize the "filename" value from a Content-Disposition header.
    """
    return os.path.basename(filename)


def parse_content_disposition(content_disposition, default_filename):
    # type: (str, str) -> str
    """
    Parse the "filename" value from a Content-Disposition header, and
    return the default filename if the result is empty.
    """
    _type, params = cgi.parse_header(content_disposition)
    filename = params.get('filename')
    if filename:
        # We need to sanitize the filename to prevent directory traversal
        # in case the filename contains ".." path parts.
        filename = sanitize_content_filename(filename)
    return filename or default_filename


def _download_http_url(link, session, temp_dir, hashes):
    """Download link url into temp_dir using provided session"""
    target_url = link.url.split('#', 1)[0]
    try:
        resp = session.get(
            target_url,
            # We use Accept-Encoding: identity here because requests
            # defaults to accepting compressed responses. This breaks in
            # a variety of ways depending on how the server is configured.
            # - Some servers will notice that the file isn't a compressible
            #   file and will leave the file alone and with an empty
            #   Content-Encoding
            # - Some servers will notice that the file is already
            #   compressed and will leave the file alone and will add a
            #   Content-Encoding: gzip header
            # - Some servers won't notice anything at all and will take
            #   a file that's already been compressed and compress it again
            #   and set the Content-Encoding: gzip header
            # By setting this to request only the identity encoding We're
            # hoping to eliminate the third case. Hopefully there does not
            # exist a server which when given a file will notice it is
            # already compressed and that you're not asking for a
            # compressed file and will then decompress it before sending
            # because if that's the case I don't think it'll ever be
            # possible to make this work.
            headers={"Accept-Encoding": "identity"},
            stream=True,
        )
        resp.raise_for_status()
    except requests.HTTPError as exc:
        logger.critical(
            "HTTP error %s while getting %s", exc.response.status_code, link,
        )
        raise

    content_type = resp.headers.get('content-type', '')
    filename = link.filename  # fallback
    # Have a look at the Content-Disposition header for a better guess
    content_disposition = resp.headers.get('content-disposition')
    if content_disposition:
        filename = parse_content_disposition(content_disposition, filename)
    ext = splitext(filename)[1]
    if not ext:
        ext = mimetypes.guess_extension(content_type)
        if ext:
            filename += ext
    if not ext and link.url != resp.url:
        ext = os.path.splitext(resp.url)[1]
        if ext:
            filename += ext
    file_path = os.path.join(temp_dir, filename)
    with open(file_path, 'wb') as content_file:
        _download_url(resp, link, content_file, hashes)
    return file_path, content_type


def _check_download_dir(link, download_dir, hashes):
    """ Check download_dir for previously downloaded file with correct hash
        If a correct file is found return its path else None
    """
    download_path = os.path.join(download_dir, link.filename)
    if os.path.exists(download_path):
        # If already downloaded, does its hash match?
        logger.info('File was already downloaded %s', download_path)
        if hashes:
            try:
                hashes.check_against_path(download_path)
            except HashMismatch:
                logger.warning(
                    'Previously-downloaded file %s has bad hash. '
                    'Re-downloading.',
                    download_path
                )
                os.unlink(download_path)
                return None
        return download_path
    return None
baseparser.py000064400000024341151733136170007260 0ustar00"""Base option parser setup"""
from __future__ import absolute_import

import sys
import optparse
import os
import re
import textwrap
from distutils.util import strtobool

from pip._vendor.six import string_types
from pip._vendor.six.moves import configparser
from pip.locations import (
    legacy_config_file, config_basename, running_under_virtualenv,
    site_config_files
)
from pip.utils import appdirs, get_terminal_size


_environ_prefix_re = re.compile(r"^PIP_", re.I)


class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
    """A prettier/less verbose help formatter for optparse."""

    def __init__(self, *args, **kwargs):
        # help position must be aligned with __init__.parseopts.description
        kwargs['max_help_position'] = 30
        kwargs['indent_increment'] = 1
        kwargs['width'] = get_terminal_size()[0] - 2
        optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)

    def format_option_strings(self, option):
        return self._format_option_strings(option, ' <%s>', ', ')

    def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
        """
        Return a comma-separated list of option strings and metavars.

        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
        :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
        :param optsep:  separator
        """
        opts = []

        if option._short_opts:
            opts.append(option._short_opts[0])
        if option._long_opts:
            opts.append(option._long_opts[0])
        if len(opts) > 1:
            opts.insert(1, optsep)

        if option.takes_value():
            metavar = option.metavar or option.dest.lower()
            opts.append(mvarfmt % metavar.lower())

        return ''.join(opts)

    def format_heading(self, heading):
        if heading == 'Options':
            return ''
        return heading + ':\n'

    def format_usage(self, usage):
        """
        Ensure there is only one newline between usage and the first heading
        if there is no description.
        """
        msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), "  ")
        return msg

    def format_description(self, description):
        # leave full control over description to us
        if description:
            if hasattr(self.parser, 'main'):
                label = 'Commands'
            else:
                label = 'Description'
            # some doc strings have initial newlines, some don't
            description = description.lstrip('\n')
            # some doc strings have final newlines and spaces, some don't
            description = description.rstrip()
            # dedent, then reindent
            description = self.indent_lines(textwrap.dedent(description), "  ")
            description = '%s:\n%s\n' % (label, description)
            return description
        else:
            return ''

    def format_epilog(self, epilog):
        # leave full control over epilog to us
        if epilog:
            return epilog
        else:
            return ''

    def indent_lines(self, text, indent):
        new_lines = [indent + line for line in text.split('\n')]
        return "\n".join(new_lines)


class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
    """Custom help formatter for use in ConfigOptionParser.

    This is updates the defaults before expanding them, allowing
    them to show up correctly in the help listing.
    """

    def expand_default(self, option):
        if self.parser is not None:
            self.parser._update_defaults(self.parser.defaults)
        return optparse.IndentedHelpFormatter.expand_default(self, option)


class CustomOptionParser(optparse.OptionParser):

    def insert_option_group(self, idx, *args, **kwargs):
        """Insert an OptionGroup at a given position."""
        group = self.add_option_group(*args, **kwargs)

        self.option_groups.pop()
        self.option_groups.insert(idx, group)

        return group

    @property
    def option_list_all(self):
        """Get a list of all options, including those in option groups."""
        res = self.option_list[:]
        for i in self.option_groups:
            res.extend(i.option_list)

        return res


class ConfigOptionParser(CustomOptionParser):
    """Custom option parser which updates its defaults by checking the
    configuration files and environmental variables"""

    isolated = False

    def __init__(self, *args, **kwargs):
        self.config = configparser.RawConfigParser()
        self.name = kwargs.pop('name')
        self.isolated = kwargs.pop("isolated", False)
        self.files = self.get_config_files()
        if self.files:
            self.config.read(self.files)
        assert self.name
        optparse.OptionParser.__init__(self, *args, **kwargs)

    def get_config_files(self):
        # the files returned by this method will be parsed in order with the
        # first files listed being overridden by later files in standard
        # ConfigParser fashion
        config_file = os.environ.get('PIP_CONFIG_FILE', False)
        if config_file == os.devnull:
            return []

        # at the base we have any site-wide configuration
        files = list(site_config_files)

        # per-user configuration next
        if not self.isolated:
            if config_file and os.path.exists(config_file):
                files.append(config_file)
            else:
                # This is the legacy config file, we consider it to be a lower
                # priority than the new file location.
                files.append(legacy_config_file)

                # This is the new config file, we consider it to be a higher
                # priority than the legacy file.
                files.append(
                    os.path.join(
                        appdirs.user_config_dir("pip"),
                        config_basename,
                    )
                )

        # finally virtualenv configuration first trumping others
        if running_under_virtualenv():
            venv_config_file = os.path.join(
                sys.prefix,
                config_basename,
            )
            if os.path.exists(venv_config_file):
                files.append(venv_config_file)

        return files

    def check_default(self, option, key, val):
        try:
            return option.check_value(key, val)
        except optparse.OptionValueError as exc:
            print("An error occurred during configuration: %s" % exc)
            sys.exit(3)

    def _update_defaults(self, defaults):
        """Updates the given defaults with values from the config files and
        the environ. Does a little special handling for certain types of
        options (lists)."""
        # Then go and look for the other sources of configuration:
        config = {}
        # 1. config files
        for section in ('global', self.name):
            config.update(
                self.normalize_keys(self.get_config_section(section))
            )
        # 2. environmental variables
        if not self.isolated:
            config.update(self.normalize_keys(self.get_environ_vars()))
        # Accumulate complex default state.
        self.values = optparse.Values(self.defaults)
        late_eval = set()
        # Then set the options with those values
        for key, val in config.items():
            # ignore empty values
            if not val:
                continue

            option = self.get_option(key)
            # Ignore options not present in this parser. E.g. non-globals put
            # in [global] by users that want them to apply to all applicable
            # commands.
            if option is None:
                continue

            if option.action in ('store_true', 'store_false', 'count'):
                val = strtobool(val)
            elif option.action == 'append':
                val = val.split()
                val = [self.check_default(option, key, v) for v in val]
            elif option.action == 'callback':
                late_eval.add(option.dest)
                opt_str = option.get_opt_string()
                val = option.convert_value(opt_str, val)
                # From take_action
                args = option.callback_args or ()
                kwargs = option.callback_kwargs or {}
                option.callback(option, opt_str, val, self, *args, **kwargs)
            else:
                val = self.check_default(option, key, val)

            defaults[option.dest] = val

        for key in late_eval:
            defaults[key] = getattr(self.values, key)
        self.values = None
        return defaults

    def normalize_keys(self, items):
        """Return a config dictionary with normalized keys regardless of
        whether the keys were specified in environment variables or in config
        files"""
        normalized = {}
        for key, val in items:
            key = key.replace('_', '-')
            if not key.startswith('--'):
                key = '--%s' % key  # only prefer long opts
            normalized[key] = val
        return normalized

    def get_config_section(self, name):
        """Get a section of a configuration"""
        if self.config.has_section(name):
            return self.config.items(name)
        return []

    def get_environ_vars(self):
        """Returns a generator with all environmental vars with prefix PIP_"""
        for key, val in os.environ.items():
            if _environ_prefix_re.search(key):
                yield (_environ_prefix_re.sub("", key).lower(), val)

    def get_default_values(self):
        """Overriding to make updating the defaults after instantiation of
        the option parser possible, _update_defaults() does the dirty work."""
        if not self.process_default_values:
            # Old, pre-Optik 1.5 behaviour.
            return optparse.Values(self.defaults)

        defaults = self._update_defaults(self.defaults.copy())  # ours
        for option in self._get_all_options():
            default = defaults.get(option.dest)
            if isinstance(default, string_types):
                opt_str = option.get_opt_string()
                defaults[option.dest] = option.check_value(opt_str, default)
        return optparse.Values(defaults)

    def error(self, msg):
        self.print_usage(sys.stderr)
        self.exit(2, "%s\n" % msg)
commands/search.py000064400000010626151733136170010200 0ustar00from __future__ import absolute_import

import logging
import sys
import textwrap

from pip.basecommand import Command, SUCCESS
from pip.compat import OrderedDict
from pip.download import PipXmlrpcTransport
from pip.models import PyPI
from pip.utils import get_terminal_size
from pip.utils.logging import indent_log
from pip.exceptions import CommandError
from pip.status_codes import NO_MATCHES_FOUND
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor import pkg_resources
from pip._vendor.six.moves import xmlrpc_client


logger = logging.getLogger(__name__)


class SearchCommand(Command):
    """Search for PyPI packages whose name or summary contains <query>."""
    name = 'search'
    usage = """
      %prog [options] <query>"""
    summary = 'Search PyPI for packages.'

    def __init__(self, *args, **kw):
        super(SearchCommand, self).__init__(*args, **kw)
        self.cmd_opts.add_option(
            '-i', '--index',
            dest='index',
            metavar='URL',
            default=PyPI.pypi_url,
            help='Base URL of Python Package Index (default %default)')

        self.parser.insert_option_group(0, self.cmd_opts)

    def run(self, options, args):
        if not args:
            raise CommandError('Missing required argument (search query).')
        query = args
        pypi_hits = self.search(query, options)
        hits = transform_hits(pypi_hits)

        terminal_width = None
        if sys.stdout.isatty():
            terminal_width = get_terminal_size()[0]

        print_results(hits, terminal_width=terminal_width)
        if pypi_hits:
            return SUCCESS
        return NO_MATCHES_FOUND

    def search(self, query, options):
        index_url = options.index
        with self._build_session(options) as session:
            transport = PipXmlrpcTransport(index_url, session)
            pypi = xmlrpc_client.ServerProxy(index_url, transport)
            hits = pypi.search({'name': query, 'summary': query}, 'or')
            return hits


def transform_hits(hits):
    """
    The list from pypi is really a list of versions. We want a list of
    packages with the list of versions stored inline. This converts the
    list from pypi into one we can use.
    """
    packages = OrderedDict()
    for hit in hits:
        name = hit['name']
        summary = hit['summary']
        version = hit['version']

        if name not in packages.keys():
            packages[name] = {
                'name': name,
                'summary': summary,
                'versions': [version],
            }
        else:
            packages[name]['versions'].append(version)

            # if this is the highest version, replace summary and score
            if version == highest_version(packages[name]['versions']):
                packages[name]['summary'] = summary

    return list(packages.values())


def print_results(hits, name_column_width=None, terminal_width=None):
    if not hits:
        return
    if name_column_width is None:
        name_column_width = max([
            len(hit['name']) + len(hit.get('versions', ['-'])[-1])
            for hit in hits
        ]) + 4

    installed_packages = [p.project_name for p in pkg_resources.working_set]
    for hit in hits:
        name = hit['name']
        summary = hit['summary'] or ''
        version = hit.get('versions', ['-'])[-1]
        if terminal_width is not None:
            target_width = terminal_width - name_column_width - 5
            if target_width > 10:
                # wrap and indent summary to fit terminal
                summary = textwrap.wrap(summary, target_width)
                summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)

        line = '%-*s - %s' % (name_column_width,
                              '%s (%s)' % (name, version), summary)
        try:
            logger.info(line)
            if name in installed_packages:
                dist = pkg_resources.get_distribution(name)
                with indent_log():
                    latest = highest_version(hit['versions'])
                    if dist.version == latest:
                        logger.info('INSTALLED: %s (latest)', dist.version)
                    else:
                        logger.info('INSTALLED: %s', dist.version)
                        logger.info('LATEST:    %s', latest)
        except UnicodeEncodeError:
            pass


def highest_version(versions):
    return max(versions, key=parse_version)
commands/__pycache__/help.cpython-36.pyc000064400000002034151733136170014141 0ustar003

�Pf��@s<ddlmZddlmZmZddlmZGdd�de�ZdS)�)�absolute_import)�Command�SUCCESS)�CommandErrorc@s$eZdZdZdZdZdZdd�ZdS)�HelpCommandzShow help for commands�helpz
      %prog <command>zShow help for commands.c	Cs�ddlm}m}y|d}Wntk
r0tSX||krl||�}d|g}|r^|jd|�tdj|���||�}|jj	�tS)Nr)�
commands_dict�get_similar_commandszunknown command "%s"zmaybe you meant "%s"z - )
Zpip.commandsrr	�
IndexErrorr�appendr�join�parserZ
print_help)	�selfZoptions�argsrr	Zcmd_nameZguess�msgZcommand�r�/usr/lib/python3.6/help.py�runs


zHelpCommand.runN)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrrrrrrs
rN)Z
__future__rZpip.basecommandrrZpip.exceptionsrrrrrr�<module>scommands/__pycache__/search.cpython-36.opt-1.pyc000064400000010056151733136200015412 0ustar003

�Pf��@s�ddlmZddlZddlZddlZddlmZmZddlm	Z	ddl
mZddlm
Z
ddlmZddlmZdd	lmZdd
lmZddlmZddlmZdd
lmZeje�ZGdd�de�Z dd�Z!ddd�Z"dd�Z#dS)�)�absolute_importN)�Command�SUCCESS)�OrderedDict)�PipXmlrpcTransport)�PyPI)�get_terminal_size)�
indent_log)�CommandError)�NO_MATCHES_FOUND)�parse)�
pkg_resources)�
xmlrpc_clientcs<eZdZdZdZdZdZ�fdd�Zdd�Zd	d
�Z	�Z
S)�
SearchCommandz@Search for PyPI packages whose name or summary contains <query>.�searchz
      %prog [options] <query>zSearch PyPI for packages.cs@tt|�j||�|jjddddtjdd�|jjd|j�dS)Nz-iz--index�indexZURLz3Base URL of Python Package Index (default %default))�dest�metavar�default�helpr)	�superr�__init__Zcmd_optsZ
add_optionrZpypi_url�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/search.pyrszSearchCommand.__init__cCsT|std��|}|j||�}t|�}d}tjj�r<t�d}t||d�|rPtSt	S)Nz)Missing required argument (search query).r)�terminal_width)
r
r�transform_hits�sys�stdout�isattyr�
print_resultsrr)r�optionsr�queryZ	pypi_hits�hitsrrrr�run)s

zSearchCommand.runcCsH|j}|j|��.}t||�}tj||�}|j||d�d�}|SQRXdS)N)�name�summary�or)rZ_build_sessionrrZServerProxyr)rr&r%Z	index_urlZsessionZ	transportZpypir'rrrr9s
zSearchCommand.search)�__name__�
__module__�__qualname__�__doc__r)Zusager*rr(r�
__classcell__rr)rrrsrcCs�t�}xv|D]n}|d}|d}|d}||j�krH|||gd�||<q||dj|�|t||d�kr|||d<qWt|j��S)z�
    The list from pypi is really a list of versions. We want a list of
    packages with the list of versions stored inline. This converts the
    list from pypi into one we can use.
    r)r*�version)r)r*�versionsr2)r�keys�append�highest_version�list�values)r'Zpackages�hitr)r*r1rrrr Bs
r cCsT|sdS|dkr&tdd�|D��d}dd�tjD�}�x|D�]}|d}|dpVd}|jdd	g�d}|dk	r�||d}|dkr�tj||�}d
d|dj|�}d|d||f|f}	yvtj|	�||k�r2tj	|�}
t
��Ht|d�}|
j|k�rtjd|
j�ntjd|
j�tjd|�WdQRXWq>t
k
�rJYq>Xq>WdS)NcSs.g|]&}t|d�t|jddg�d��qS)r)r2�-����)�len�get)�.0r8rrr�
<listcomp>csz!print_results.<locals>.<listcomp>�cSsg|]
}|j�qSr)Zproject_name)r>�prrrr?gsr)r*�r2r9r:��
�
� �z	%-*s - %sz%s (%s)zINSTALLED: %s (latest)z
INSTALLED: %sz
LATEST:    %sr;)�maxr
Zworking_setr=�textwrapZwrap�join�logger�infoZget_distributionr	r5r1�UnicodeEncodeError)r'Zname_column_widthrZinstalled_packagesr8r)r*r1Ztarget_width�lineZdistZlatestrrrr$^s>


r$cCst|td�S)N)�key)rH�
parse_version)r2rrrr5�sr5)NN)$Z
__future__rZloggingr!rIZpip.basecommandrrZ
pip.compatrZpip.downloadrZ
pip.modelsrZ	pip.utilsrZpip.utils.loggingr	Zpip.exceptionsr
Zpip.status_codesrZpip._vendor.packaging.versionrrPZpip._vendorr
Zpip._vendor.six.movesrZ	getLoggerr,rKrr r$r5rrrr�<module>s&
+
&commands/__pycache__/help.cpython-36.opt-1.pyc000064400000002034151733136200015072 0ustar003

�Pf��@s<ddlmZddlmZmZddlmZGdd�de�ZdS)�)�absolute_import)�Command�SUCCESS)�CommandErrorc@s$eZdZdZdZdZdZdd�ZdS)�HelpCommandzShow help for commands�helpz
      %prog <command>zShow help for commands.c	Cs�ddlm}m}y|d}Wntk
r0tSX||krl||�}d|g}|r^|jd|�tdj|���||�}|jj	�tS)Nr)�
commands_dict�get_similar_commandszunknown command "%s"zmaybe you meant "%s"z - )
Zpip.commandsrr	�
IndexErrorr�appendr�join�parserZ
print_help)	�selfZoptions�argsrr	Zcmd_nameZguess�msgZcommand�r�/usr/lib/python3.6/help.py�runs


zHelpCommand.runN)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrrrrrrs
rN)Z
__future__rZpip.basecommandrrZpip.exceptionsrrrrrr�<module>scommands/__pycache__/completion.cpython-36.pyc000064400000005022151733136200015354 0ustar003

�Pf�	�@sDddlmZddlZddlmZdZdddd�ZGd	d
�d
e�ZdS)�)�absolute_importN)�CommandzJ
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
z�
_pip_completion()
{
    COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \
                   COMP_CWORD=$COMP_CWORD \
                   PIP_AUTO_COMPLETE=1 $1 ) )
}
complete -o default -F _pip_completion pip
z�
function _pip_completion {
  local words cword
  read -Ac words
  read -cn cword
  reply=( $( COMP_WORDS="$words[*]" \
             COMP_CWORD=$(( cword-1 )) \
             PIP_AUTO_COMPLETE=1 $words[1] ) )
}
compctl -K _pip_completion pip
a
function __fish_complete_pip
    set -lx COMP_WORDS (commandline -o) ""
    set -lx COMP_CWORD (math (contains -i -- (commandline -t) $COMP_WORDS)-1)
    set -lx PIP_AUTO_COMPLETE 1
    string split \  -- (eval $COMP_WORDS[1])
end
complete -fa "(__fish_complete_pip)" -c pip
)�bash�zsh�fishcs0eZdZdZdZdZ�fdd�Zdd�Z�ZS)�CompletionCommandz3A helper command to be used for command completion.Z
completionz-A helper command used for command completion.csltt|�j||�|j}|jddddddd�|jdd	dd
ddd�|jdd
ddddd�|jjd|�dS)Nz--bashz-b�store_constr�shellzEmit completion code for bash)�action�const�dest�helpz--zshz-zrzEmit completion code for zshz--fishz-frzEmit completion code for fishr)�superr�__init__�cmd_optsZ
add_option�parserZinsert_option_group)�self�args�kwr)�	__class__�� /usr/lib/python3.6/completion.pyr-s*zCompletionCommand.__init__cCsbtj�}dd�t|�D�}|j|krHtj|jd�}tt||jd��ntjj	ddj
|��dS)z-Prints the completion code of the given shellcSsg|]}d|�qS)z--r)�.0r	rrr�
<listcomp>Jsz)CompletionCommand.run.<locals>.<listcomp>�)�scriptr	zERROR: You must pass %s
z or N)�COMPLETION_SCRIPTS�keys�sortedr	�get�print�BASE_COMPLETION�sys�stderr�write�join)rZoptionsrZshellsZ
shell_optionsrrrr�runGs
zCompletionCommand.run)	�__name__�
__module__�__qualname__�__doc__�nameZsummaryrr&�
__classcell__rr)rrr(s
r)Z
__future__rr"Zpip.basecommandrr!rrrrrr�<module>s
commands/__pycache__/wheel.cpython-36.pyc000064400000012430151733136200014310 0ustar003

�Pf1�@s�ddlmZddlZddlZddlZddlmZddlmZm	Z	ddl
mZddlm
Z
ddlmZddlmZdd	lmZmZdd
lmZeje�ZGdd�de�ZdS)
�)�absolute_importN)�RequirementCommand)�CommandError�PreviousBuildDirError)�RequirementSet)�import_or_raise)�BuildDirectory)�RemovedInPip10Warning)�
WheelCache�WheelBuilder)�
cmdoptionscs<eZdZdZdZdZdZ�fdd�Zdd�Zd	d
�Z	�Z
S)�WheelCommanda�
    Build Wheel archives for your requirements and dependencies.

    Wheel is a built-package format, and offers the advantage of not
    recompiling your software during every install. For more details, see the
    wheel docs: https://wheel.readthedocs.io/en/latest/

    Requirements: setuptools>=0.8, and wheel.

    'pip wheel' uses the bdist_wheel setuptools extension from the wheel
    package to build individual wheels.

    Zwheelz�
      %prog [options] <requirement specifier> ...
      %prog [options] -r <requirements file> ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ...z$Build wheels from your requirements.csPtt|�j||�|j}|jddddtjdd�|jtj��|jtj	��|jtj
��|jtj��|jddd	d
dd�|jtj��|jtj
��|jtj��|jtj��|jtj��|jtj��|jtj��|jd
dd
d	dd�|jddddd�|jtj��|jtj��tjtj|j�}|jjd|�|jjd|�dS)Nz-wz--wheel-dir�	wheel_dir�dirzLBuild wheels into <dir>, where the default is the current working directory.)�dest�metavar�default�helpz--build-option�
build_options�options�appendz9Extra arguments to be supplied to 'setup.py bdist_wheel'.)rr�actionrz--global-option�global_optionszZExtra global options to be supplied to the setup.py call before the 'bdist_wheel' command.)rrrrz--pre�
store_trueFzYInclude pre-release and development versions. By default, pip only finds stable versions.)rrrr)�superr
�__init__�cmd_optsZ
add_option�os�curdirrZ	use_wheelZno_use_wheelZ	no_binaryZonly_binaryZconstraintsZeditableZrequirements�src�ignore_requires_pythonZno_deps�	build_dir�no_clean�require_hashesZmake_option_groupZindex_group�parserZinsert_option_group)�self�args�kwrZ
index_opts)�	__class__��/usr/lib/python3.6/wheel.pyr.sVzWheelCommand.__init__cCs.tdtd�tdtd�}t|d�s*td��dS)Nzwheel.bdist_wheelzM'pip wheel' requires the 'wheel' package. To fix this, run: pip install wheel�
pkg_resourceszp'pip wheel' requires setuptools >= 0.8 for dist-info support. To fix this, run: pip install --upgrade setuptoolsZDistInfoDistribution)rr�hasattr)r%r+r)r)r*�check_required_packageshs
z$WheelCommand.check_required_packagesc Cs�|j�tj|�tj|�|jr.tjdt�|jr@tjdt�|j	rRtjdt�|j
g|j}|jr|t
jddj|��g}|jr�tjj|j�|_tjj|j�|_|j|���}|j||�}|jp�|j}t|j|j�}t|j|d���}t||jd|jd|j|j|||j |j!d�}	|j"|	|||||j#|�|	j$�s6dSzZy6t%|	||j&�pJg|j'�pTgd	�}
|
j(��slt)d
��Wnt*k
�r�d|_�YnXWd|j�s�|	j+�XWdQRXWdQRXdS)Nz�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.zIgnoring indexes: %s�,)�deleteT)r!�src_dirZdownload_dir�ignore_dependenciesZignore_installedr �isolated�session�wheel_cacheZwheel_download_dirr#)rrz"Failed to build one or more wheels),r-rZresolve_wheel_no_use_binaryZcheck_install_build_globalZallow_external�warnings�warnr	Zallow_all_externalZallow_unverifiedZ	index_urlZextra_index_urlsZno_index�logger�debug�joinr!r�path�abspathr0Z_build_sessionZ_build_package_finderr"r
�	cache_dirZformat_controlrrr1r Z
isolated_moderr#Zpopulate_requirement_set�nameZhas_requirementsrrrZbuildrrZ
cleanup_files)r%rr&Z
index_urlsr3�finderZbuild_deleter4r!Zrequirement_set�wbr)r)r*�run|sv






zWheelCommand.run)�__name__�
__module__�__qualname__�__doc__r=ZusageZsummaryrr-r@�
__classcell__r)r))r(r*r
s
:r
)Z
__future__rZloggingrr5Zpip.basecommandrZpip.exceptionsrrZpip.reqrZ	pip.utilsrZpip.utils.buildrZpip.utils.deprecationr	Z	pip.wheelr
rZpiprZ	getLoggerrAr7r
r)r)r)r*�<module>s
commands/__pycache__/search.cpython-36.pyc000064400000010056151733136200014453 0ustar003

�Pf��@s�ddlmZddlZddlZddlZddlmZmZddlm	Z	ddl
mZddlm
Z
ddlmZddlmZdd	lmZdd
lmZddlmZddlmZdd
lmZeje�ZGdd�de�Z dd�Z!ddd�Z"dd�Z#dS)�)�absolute_importN)�Command�SUCCESS)�OrderedDict)�PipXmlrpcTransport)�PyPI)�get_terminal_size)�
indent_log)�CommandError)�NO_MATCHES_FOUND)�parse)�
pkg_resources)�
xmlrpc_clientcs<eZdZdZdZdZdZ�fdd�Zdd�Zd	d
�Z	�Z
S)�
SearchCommandz@Search for PyPI packages whose name or summary contains <query>.�searchz
      %prog [options] <query>zSearch PyPI for packages.cs@tt|�j||�|jjddddtjdd�|jjd|j�dS)Nz-iz--index�indexZURLz3Base URL of Python Package Index (default %default))�dest�metavar�default�helpr)	�superr�__init__Zcmd_optsZ
add_optionrZpypi_url�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/search.pyrszSearchCommand.__init__cCsT|std��|}|j||�}t|�}d}tjj�r<t�d}t||d�|rPtSt	S)Nz)Missing required argument (search query).r)�terminal_width)
r
r�transform_hits�sys�stdout�isattyr�
print_resultsrr)r�optionsr�queryZ	pypi_hits�hitsrrrr�run)s

zSearchCommand.runcCsH|j}|j|��.}t||�}tj||�}|j||d�d�}|SQRXdS)N)�name�summary�or)rZ_build_sessionrrZServerProxyr)rr&r%Z	index_urlZsessionZ	transportZpypir'rrrr9s
zSearchCommand.search)�__name__�
__module__�__qualname__�__doc__r)Zusager*rr(r�
__classcell__rr)rrrsrcCs�t�}xv|D]n}|d}|d}|d}||j�krH|||gd�||<q||dj|�|t||d�kr|||d<qWt|j��S)z�
    The list from pypi is really a list of versions. We want a list of
    packages with the list of versions stored inline. This converts the
    list from pypi into one we can use.
    r)r*�version)r)r*�versionsr2)r�keys�append�highest_version�list�values)r'Zpackages�hitr)r*r1rrrr Bs
r cCsT|sdS|dkr&tdd�|D��d}dd�tjD�}�x|D�]}|d}|dpVd}|jdd	g�d}|dk	r�||d}|dkr�tj||�}d
d|dj|�}d|d||f|f}	yvtj|	�||k�r2tj	|�}
t
��Ht|d�}|
j|k�rtjd|
j�ntjd|
j�tjd|�WdQRXWq>t
k
�rJYq>Xq>WdS)NcSs.g|]&}t|d�t|jddg�d��qS)r)r2�-����)�len�get)�.0r8rrr�
<listcomp>csz!print_results.<locals>.<listcomp>�cSsg|]
}|j�qSr)Zproject_name)r>�prrrr?gsr)r*�r2r9r:��
�
� �z	%-*s - %sz%s (%s)zINSTALLED: %s (latest)z
INSTALLED: %sz
LATEST:    %sr;)�maxr
Zworking_setr=�textwrapZwrap�join�logger�infoZget_distributionr	r5r1�UnicodeEncodeError)r'Zname_column_widthrZinstalled_packagesr8r)r*r1Ztarget_width�lineZdistZlatestrrrr$^s>


r$cCst|td�S)N)�key)rH�
parse_version)r2rrrr5�sr5)NN)$Z
__future__rZloggingr!rIZpip.basecommandrrZ
pip.compatrZpip.downloadrZ
pip.modelsrZ	pip.utilsrZpip.utils.loggingr	Zpip.exceptionsr
Zpip.status_codesrZpip._vendor.packaging.versionrrPZpip._vendorr
Zpip._vendor.six.movesrZ	getLoggerr,rKrr r$r5rrrr�<module>s&
+
&commands/__pycache__/install.cpython-36.opt-1.pyc000064400000024302151733136200015612 0ustar003

�PfqG�@s(ddlmZddlZddlZddlZddlZddlZddlZddlZddlm	Z	yddl
Z
Wnek
rtdZ
YnXddlm
Z
ddlmZddlmZmZddlmZmZmZddlmZdd	lmZmZdd
lmZddlmZddl m!Z!dd
l"m#Z#m$Z$ej%e&�Z'Gdd�de�Z(dd�Z)dS)�)�absolute_importN)�path)�RequirementSet)�RequirementCommand)�virtualenv_no_global�distutils_scheme)�InstallationError�CommandError�PreviousBuildDirError)�
cmdoptions)�
ensure_dir�get_installed_version)�BuildDirectory)�RemovedInPip10Warning)�check_path_owner)�
WheelCache�WheelBuildercs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�InstallCommandaI
    Install packages from:

    - PyPI (and other indexes) using requirement specifiers.
    - VCS project urls.
    - Local project directories.
    - Local or remote source archives.

    pip also supports installing from "requirements files", which provide
    an easy way to specify a whole environment to be installed.
    �installa%
      %prog [options] <requirement specifier> [package-index-options] ...
      %prog [options] -r <requirements file> [package-index-options] ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ...zInstall packages.c
s0tt|�j||�|j}|jtj��|jtj��|jtj��|jtj	��|jddddddd�|jddd	d
ddddd�|jtj
��|jd
ddddd�|jdddddgdd�|jddddd�|jdddddd�|jtj��|jtj��|jtj
��|jtj��|jd d!dd"d�|jd#d$dd%d�|jd&d'ddd(d�|jd)d*d+dd,d�|jd-d.ddd/d�|jd0dd1d2d3d4�|jd5d6d1d7d8�|jtj��|jtj��|jtj��|jtj��|jtj��|jtj��|jtj��tjtj|j�}|jjd9|�|jjd9|�dS):Nz-tz--target�
target_dir�dirz�Install packages into <dir>. By default this will not replace existing files/folders in <dir>. Use --upgrade to replace existing packages in <dir> with new versions.)�dest�metavar�default�helpz-dz
--downloadz--download-dirz--download-directory�download_dirz`Download packages into <dir> instead of installing them, regardless of what's already installed.z-Uz	--upgrade�upgrade�
store_truez�Upgrade all specified packages to the newest available version. The handling of dependencies depends on the upgrade-strategy used.)r�actionrz--upgrade-strategy�upgrade_strategyZeagerzonly-if-neededa3Determines how dependency upgrading should be handled. "eager" - dependencies are upgraded regardless of whether the currently installed version satisfies the requirements of the upgraded package(s). "only-if-needed" -  are upgraded only when they do not satisfy the requirements of the upgraded package(s).)rr�choicesrz--force-reinstall�force_reinstallzKWhen upgrading, reinstall all packages even if they are already up-to-date.z-Iz--ignore-installed�ignore_installedz5Ignore the installed packages (reinstalling instead).z--user�
use_user_sitez�Install to the Python user install directory for your platform. Typically ~/.local/, or %APPDATA%\Python on Windows. (See the Python documentation for site.USER_BASE for full details.)z--egg�as_eggz�Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)z--root�	root_pathz=Install everything relative to this alternate root directory.z--strip-file-prefix�strip_file_prefix�prefixz5Strip given prefix from script paths in wheel RECORD.z--prefix�prefix_pathzIInstallation prefix where lib, bin and other top-level folders are placedz	--compile�compileTzCompile py files to pyc)rrrrz--no-compileZstore_falsezDo not compile py files to pyc)rrrr)�superr�__init__�cmd_optsZ
add_optionrZconstraintsZeditableZrequirements�	build_dir�src�ignore_requires_pythonZno_deps�install_options�global_optionsZ	use_wheelZno_use_wheelZ	no_binaryZonly_binaryZpre�no_clean�require_hashesZmake_option_groupZindex_group�parserZinsert_option_group)�self�args�kwr,Z
index_opts)�	__class__��/usr/lib/python3.6/install.pyr+8s�zInstallCommand.__init__c&Cstj|�tj|�dd�}tj�dkrJ|�rJtjdtjt	j
d��|jr\tj
dt�|jrntj
dt�|jr�tj
dt�|jr�tj
dt�|jr�tj
d	t�d
|_|jr�tjj|j�|_tjj|j�|_|jp�g}|j�r|jr�td��t�r�td��|jd
�|jd�d}|j�rtd
|_tj �}tjj|j�|_tjj!|j��rftjj"|j��rftd��|jd|�|j#�p~g}|j$|���T}|j%||�}|j&�p�|j}	t'|j(|j)�}
|j(�r�t*|j(��r�tjd|j(�d|_(t+|j|	d����}t,||j|j|j-|j.|j|j|j/|j0|j1|j|||j2|j3|
|j4d�}|j5||||||j6|
�|j7�s\dS�z`�y:|j�s~t8�s~|j(�r�|j9|�nt:||ggd�}
|
j;d
d�|j�sr|j<|||j=|j|j>d�t?|j||j=|j|j3d�}t@|jAtBjCd�d�}g}xX|D]P}|j6}y"tD|j6|�}|�r*|d|7}WntEk
�rBYnX|j|��qWdjF|�}|�r�tjGd|�n(djFdd�|jHD��}|�r�tjGd|�WntIk
�r�d
|_&�YnXWd|j&�s�|jJ�XWdQRXWdQRX|j�rtK|j�g}tLd |d!�d"}tLd |d!�d#}tjj!|��r4|j|�tjj!|��rV||k�rV|j|�x�|D]�}x�tjM|�D]�}tjjF|j|�}tjj!|��r�|j-�s�tjd$|��qltjjN|��r�tjd%|��qltjj"|��r�tOjP|�n
tjQ|�tOjRtjjF||�|��qlW�q\WtOjP|�|S)&NcSs ttd�pttd�otjtjkS)NZreal_prefix�base_prefix)�hasattr�sysr;r'r9r9r9r:�is_venv�s
z#InstallCommand.run.<locals>.is_venvrzpWARNING: Running pip install with root privileges is generally not a good idea. Try `%s install --user` instead.z�--egg has been deprecated and will be removed in the future. This flag is mutually exclusive with large parts of pip, and actually using it invalidates pip's ability to manage the installation process.z�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�pip install --download has been deprecated and will be removed in the future. Pip now has a download command that should be used instead.TzVCan not combine '--user' and '--prefix' as they imply different installation locationszZCan not perform a '--user' install. User site-packages are not visible in this virtualenv.z--userz	--prefix=z=Target path exists but is not a directory, will not continue.z--home=z�The directory '%s' or its parent directory is not owned by the current user and caching wheels has been disabled. check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.)�delete)r-�src_dirrrrr$r"�ignore_dependenciesr/r!r#r�sessionZ	pycompile�isolated�wheel_cacher3)Z
build_optionsr1)Zautobuilding)�rootr'r&)�user�homerEr'rC�name)�key�-� zSuccessfully installed %scSsg|]
}|j�qSr9)rH)�.0�reqr9r9r:�
<listcomp>�sz&InstallCommand.run.<locals>.<listcomp>zSuccessfully downloaded %s�)rG�purelib�platlibzKTarget directory %s already exists. Specify --upgrade to force replacement.z�Target directory %s already exists and is a link. Pip will not automatically replace links, please remove if replacement is desired.)SrZresolve_wheel_no_use_binaryZcheck_install_build_global�os�getuid�loggerZwarningr�basenamer=�argvr$�warnings�warnrZallow_externalZallow_all_externalZallow_unverifiedrr"r-�abspathr@r0r#r(r	rr�appendr�tempfileZmkdtemp�exists�isdirr1Z_build_sessionZ_build_package_finderr2r�	cache_dirZformat_controlrrrrrrAr/r!r)Z
isolated_moder3Zpopulate_requirement_setrHZhas_requirements�wheelZ
prepare_filesrZbuildrr%r&�get_lib_location_guesses�sortedZsuccessfully_installed�operator�
attrgetterr
�	Exception�join�infoZsuccessfully_downloadedr
Z
cleanup_filesrr�listdir�islink�shutilZrmtree�removeZmove)r5Zoptionsr6r>r0Ztemp_target_dirr1rB�finderZbuild_deleterDr-Zrequirement_set�wbZpossible_lib_locationsZreqs�itemsrM�itemZinstalled_versionZ	installedZ
downloadedZlib_dir_listZpurelib_dirZplatlib_dirZlib_dirZtarget_item_dirr9r9r:�run�sP

















zInstallCommand.run)
�__name__�
__module__�__qualname__�__doc__rHZusageZsummaryr+ro�
__classcell__r9r9)r8r:r!srcOstd|�|�}|d|dgS)NrOrPrQ)rO)r)r6�kwargs�schemer9r9r:r`�sr`)*Z
__future__rZloggingrbrRr[rirWr=rr_�ImportErrorZpip.reqrZpip.basecommandrZ
pip.locationsrrZpip.exceptionsrr	r
ZpiprZ	pip.utilsrr
Zpip.utils.buildrZpip.utils.deprecationrZpip.utils.filesystemrZ	pip.wheelrrZ	getLoggerrprTrr`r9r9r9r:�<module>s8

,commands/__pycache__/freeze.cpython-36.pyc000064400000005000151733136200014457 0ustar003

�Pf�@sdddlmZddlZddlZddlmZddlmZddlm	Z	ddl
mZd
ZGdd�de�Z
dS)�)�absolute_importN)�stdlib_pkgs)�Command)�freeze)�
WheelCache�pip�
setuptools�
distribute�wheelcs8eZdZdZdZdZdZd
Z�fdd�Zdd	�Z	�Z
S)�
FreezeCommandzx
    Output installed packages in requirements format.

    packages are listed in a case-insensitive sorted order.
    rz
      %prog [options]z1Output installed packages in requirements format.�ext://sys.stderrc	s�tt|�j||�|jjddddgddd�|jjdd	d
dgddd�|jjd
dddddd�|jjdddddd�|jjdddddjt�d�|jjd|j�dS)Nz-rz
--requirement�requirements�append�filez}Use the order in the given requirements file and its comments when generating output. This option can be used multiple times.)�dest�action�default�metavar�helpz-fz--find-links�
find_linksZURLz<URL for finding packages, which will be added to the output.z-lz--local�local�
store_trueFzUIf in a virtualenv that has global access, do not output globally-installed packages.)rrrrz--user�userz,Only output packages installed in user-site.z--all�
freeze_allz,Do not skip these packages in the output: %sz, )rrrr)	�superr�__init__Zcmd_optsZ
add_option�join�DEV_PKGS�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/freeze.pyrsDzFreezeCommand.__init__c
Cs�tjjt�t��}t|j|�}tt�}|js6|jt	�t
|j|j|j
|j|j|j||d�}x"tf|�D]}tjj|d�qfWdS)N)ZrequirementrZ
local_onlyZ	user_onlyZ
skip_regex�isolated�wheel_cache�skip�
)r�indexZ
FormatControl�setr�	cache_dirrr�updater�dictr
rrrZskip_requirements_regexZ
isolated_moder�sys�stdout�write)rZoptionsr Zformat_controlr&r'Z
freeze_kwargs�liner#r#r$�runEs 
zFreezeCommand.run)rr)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryZlog_streamsrr2�
__classcell__r#r#)r"r$rs*r)rrr	r
)Z
__future__rr.rZ
pip.compatrZpip.basecommandrZpip.operations.freezerZ	pip.wheelrrrr#r#r#r$�<module>scommands/__pycache__/download.cpython-36.pyc000064400000012363151733136200015020 0ustar003

�Pf��@s�ddlmZddlZddlZddlmZddlmZddlm	Z	ddl
mZddlm
Z
ddlmZmZdd	lmZdd
lmZeje�ZGdd�de�ZdS)
�)�absolute_importN)�CommandError)�
FormatControl)�RequirementSet)�RequirementCommand)�
cmdoptions)�
ensure_dir�normalize_path)�BuildDirectory)�check_path_ownercs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�DownloadCommandaL
    Download packages from:

    - PyPI (and other indexes) using requirement specifiers.
    - VCS project urls.
    - Local project directories.
    - Local or remote source archives.

    pip also supports downloading from "requirements files", which provide
    an easy way to specify a whole environment to be downloaded.
    Zdownloada%
      %prog [options] <requirement specifier> [package-index-options] ...
      %prog [options] -r <requirements file> [package-index-options] ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ...zDownload packages.c
s\tt|�j||�|j}|jtj��|jtj��|jtj��|jtj	��|jtj
��|jtj��|jtj��|jtj
��|jtj��|jtj��|jtj��|jtj��|jddddddtjdd�|jd	d
d
ddd�|jdd
d
ddd�|jdddddd�|jdddddd�tjtj|j�}|jjd|�|jjd|�dS)Nz-dz--destz--destination-dirz--destination-directory�download_dir�dirzDownload packages into <dir>.)�dest�metavar�default�helpz
--platform�platformz`Only download wheels compatible with <platform>. Defaults to the platform of the running system.z--python-version�python_versiona&Only download wheels compatible with Python interpreter version <version>. If not specified, then the current system interpreter minor version is used. A major version (e.g. '2') can be specified to match all minor revs of that major version.  A minor version (e.g. '34') can also be specified.z--implementation�implementationz�Only download wheels compatible with Python implementation <implementation>, e.g. 'pp', 'jy', 'cp',  or 'ip'. If not specified, then the current interpreter implementation is used.  Use 'py' to force implementation-agnostic wheels.z--abi�abiz�Only download wheels compatible with Python abi <abi>, e.g. 'pypy_41'.  If not specified, then the current interpreter abi tag is used.  Generally you will need to specify --implementation, --platform, and --python-version when using this option.r)�superr�__init__�cmd_optsZ
add_optionrZconstraintsZeditableZrequirements�	build_dirZno_depsZglobal_optionsZ	no_binaryZonly_binary�srcZpre�no_clean�require_hashes�os�curdirZmake_option_groupZnon_deprecated_index_group�parserZinsert_option_group)�self�args�kwrZ
index_opts)�	__class__��/usr/lib/python3.6/download.pyr*sbzDownloadCommand.__init__cCs�d|_|jr|jg}nd}t|j|j|j|jg�}tt�tdg��}|rZ|j|krZt	d��t
jj|j
�|_
t|j�|_t|j�|j|���}|j|||j||j|jd�}|jp�|j}|jr�t|j�r�tjd|j�d|_t|j|d���}	t|	|j
|jd|j||j|jd�}
|j|
|||||jd�|
j �s2dS|
j!|�dj"d	d
�|
j#D��}|�rdtj$d|�|j�st|
j%�WdQRXWdQRX|
S)NTz:all:z�--only-binary=:all: must be set and --no-binary must not be set (or must be set to :none:) when restricting platform and interpreter constraints using --python-version, --platform, --abi, or --implementation.)�options�sessionr�python_versionsrrz�The directory '%s' or its parent directory is not owned by the current user and caching wheels has been disabled. check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.)�delete)r�src_dirr
�ignore_installed�ignore_dependenciesr(�isolatedr� cSsg|]
}|j�qSr%)�name)�.0Zreqr%r%r&�
<listcomp>�sz'DownloadCommand.run.<locals>.<listcomp>zSuccessfully downloaded %s)&r,r�anyrrrr�setZformat_controlrr�path�abspathr+r	r
rZ_build_sessionZ_build_package_finderrr�	cache_dirr�loggerZwarningr
rr-Z
isolated_moderZpopulate_requirement_setr0Zhas_requirementsZ
prepare_files�joinZsuccessfully_downloaded�infoZ
cleanup_files)r!r'r"r)Zdist_restriction_setZbinary_onlyr(�finderZbuild_deleterZrequirement_setZ
downloadedr%r%r&�run{sx






zDownloadCommand.run)
�__name__�
__module__�__qualname__�__doc__r0ZusageZsummaryrr<�
__classcell__r%r%)r$r&rsQr)Z
__future__rZloggingrZpip.exceptionsrZ	pip.indexrZpip.reqrZpip.basecommandrZpiprZ	pip.utilsrr	Zpip.utils.buildr
Zpip.utils.filesystemrZ	getLoggerr=r8rr%r%r%r&�<module>s
commands/__pycache__/hash.cpython-36.opt-1.pyc000064400000003555151733136200015076 0ustar003

�Pf=�@s~ddlmZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZm
Z
eje�ZGdd�de�Zd	d
�ZdS)�)�absolute_importN)�Command)�ERROR)�read_chunks)�
FAVORITE_HASH�
STRONG_HASHEScs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�HashCommandz�
    Compute a hash of a local package archive.

    These can be used with --hash in a requirements file to do repeatable
    installs.

    �hashz%prog [options] <file> ...z#Compute hashes of package archives.c
sJtt|�j||�|jjdddtdtddjt�d�|jj	d|j�dS)	Nz-az--algorithm�	algorithmZstorez$The hash algorithm to use: one of %sz, )�dest�choices�action�default�helpr)
�superr�__init__Zcmd_optsZ
add_optionrr�join�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/hash.pyrszHashCommand.__init__cCsD|s|jjtj�tS|j}x"|D]}tjd||t||��q"WdS)Nz%s:
--hash=%s:%s)	rZprint_usage�sys�stderrrr
�logger�info�
_hash_of_file)rZoptionsrr
�pathrrr�run(s
zHashCommand.run)
�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrr �
__classcell__rr)rrrsrc
CsDt|d��,}tj|�}xt|�D]}|j|�q WWdQRX|j�S)z!Return the hash digest of a file.�rbN)�open�hashlib�newr�updateZ	hexdigest)rr
�archiver	�chunkrrrr3s

r)Z
__future__rr)ZloggingrZpip.basecommandrZpip.status_codesrZ	pip.utilsrZpip.utils.hashesrrZ	getLoggerr!rrrrrrr�<module>s
#commands/__pycache__/download.cpython-36.opt-1.pyc000064400000012363151733136200015757 0ustar003

�Pf��@s�ddlmZddlZddlZddlmZddlmZddlm	Z	ddl
mZddlm
Z
ddlmZmZdd	lmZdd
lmZeje�ZGdd�de�ZdS)
�)�absolute_importN)�CommandError)�
FormatControl)�RequirementSet)�RequirementCommand)�
cmdoptions)�
ensure_dir�normalize_path)�BuildDirectory)�check_path_ownercs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�DownloadCommandaL
    Download packages from:

    - PyPI (and other indexes) using requirement specifiers.
    - VCS project urls.
    - Local project directories.
    - Local or remote source archives.

    pip also supports downloading from "requirements files", which provide
    an easy way to specify a whole environment to be downloaded.
    Zdownloada%
      %prog [options] <requirement specifier> [package-index-options] ...
      %prog [options] -r <requirements file> [package-index-options] ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ...zDownload packages.c
s\tt|�j||�|j}|jtj��|jtj��|jtj��|jtj	��|jtj
��|jtj��|jtj��|jtj
��|jtj��|jtj��|jtj��|jtj��|jddddddtjdd�|jd	d
d
ddd�|jdd
d
ddd�|jdddddd�|jdddddd�tjtj|j�}|jjd|�|jjd|�dS)Nz-dz--destz--destination-dirz--destination-directory�download_dir�dirzDownload packages into <dir>.)�dest�metavar�default�helpz
--platform�platformz`Only download wheels compatible with <platform>. Defaults to the platform of the running system.z--python-version�python_versiona&Only download wheels compatible with Python interpreter version <version>. If not specified, then the current system interpreter minor version is used. A major version (e.g. '2') can be specified to match all minor revs of that major version.  A minor version (e.g. '34') can also be specified.z--implementation�implementationz�Only download wheels compatible with Python implementation <implementation>, e.g. 'pp', 'jy', 'cp',  or 'ip'. If not specified, then the current interpreter implementation is used.  Use 'py' to force implementation-agnostic wheels.z--abi�abiz�Only download wheels compatible with Python abi <abi>, e.g. 'pypy_41'.  If not specified, then the current interpreter abi tag is used.  Generally you will need to specify --implementation, --platform, and --python-version when using this option.r)�superr�__init__�cmd_optsZ
add_optionrZconstraintsZeditableZrequirements�	build_dirZno_depsZglobal_optionsZ	no_binaryZonly_binary�srcZpre�no_clean�require_hashes�os�curdirZmake_option_groupZnon_deprecated_index_group�parserZinsert_option_group)�self�args�kwrZ
index_opts)�	__class__��/usr/lib/python3.6/download.pyr*sbzDownloadCommand.__init__cCs�d|_|jr|jg}nd}t|j|j|j|jg�}tt�tdg��}|rZ|j|krZt	d��t
jj|j
�|_
t|j�|_t|j�|j|���}|j|||j||j|jd�}|jp�|j}|jr�t|j�r�tjd|j�d|_t|j|d���}	t|	|j
|jd|j||j|jd�}
|j|
|||||jd�|
j �s2dS|
j!|�dj"d	d
�|
j#D��}|�rdtj$d|�|j�st|
j%�WdQRXWdQRX|
S)NTz:all:z�--only-binary=:all: must be set and --no-binary must not be set (or must be set to :none:) when restricting platform and interpreter constraints using --python-version, --platform, --abi, or --implementation.)�options�sessionr�python_versionsrrz�The directory '%s' or its parent directory is not owned by the current user and caching wheels has been disabled. check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.)�delete)r�src_dirr
�ignore_installed�ignore_dependenciesr(�isolatedr� cSsg|]
}|j�qSr%)�name)�.0Zreqr%r%r&�
<listcomp>�sz'DownloadCommand.run.<locals>.<listcomp>zSuccessfully downloaded %s)&r,r�anyrrrr�setZformat_controlrr�path�abspathr+r	r
rZ_build_sessionZ_build_package_finderrr�	cache_dirr�loggerZwarningr
rr-Z
isolated_moderZpopulate_requirement_setr0Zhas_requirementsZ
prepare_files�joinZsuccessfully_downloaded�infoZ
cleanup_files)r!r'r"r)Zdist_restriction_setZbinary_onlyr(�finderZbuild_deleterZrequirement_setZ
downloadedr%r%r&�run{sx






zDownloadCommand.run)
�__name__�
__module__�__qualname__�__doc__r0ZusageZsummaryrr<�
__classcell__r%r%)r$r&rsQr)Z
__future__rZloggingrZpip.exceptionsrZ	pip.indexrZpip.reqrZpip.basecommandrZpiprZ	pip.utilsrr	Zpip.utils.buildr
Zpip.utils.filesystemrZ	getLoggerr=r8rr%r%r%r&�<module>s
commands/__pycache__/__init__.cpython-36.pyc000064400000003726151733136200014753 0ustar003

�Pf��@s&dZddlmZddlmZddlmZddlmZddl	m
Z
ddlmZddl
mZdd	lmZdd
lmZddlmZddlmZdd
lmZddlmZejeejee
je
ejeejeejeejeejeejeejeejeejeiZeeeeeeeeee
eegZddd�Zdd�Zdd�Z dS)z%
Package containing all pip commands
�)�absolute_import)�CompletionCommand)�DownloadCommand)�
FreezeCommand)�HashCommand)�HelpCommand)�ListCommand)�CheckCommand)�
SearchCommand)�ShowCommand)�InstallCommand)�UninstallCommand)�WheelCommandTccs:|rttt�}ntj�}x|D]\}}||jfVqWdS)z5Yields sorted (command name, command summary) tuples.N)�_sort_commands�
commands_dict�commands_order�itemsZsummary)ZorderedZcmditems�nameZ
command_class�r�/usr/lib/python3.6/__init__.py�
get_summaries4s
rcCs6ddlm}|j�}||tj��}|r.|dSdSdS)zCommand name auto-correct.r)�get_close_matchesFN)Zdifflibr�lowerr�keys)rrZclose_commandsrrr�get_similar_commands@srcs�fdd�}t|j�|d�S)Ncs(y�j|d�Stk
r"dSXdS)N��)�index�
ValueError)�key)�orderrr�keyfnOsz_sort_commands.<locals>.keyfn)r)�sortedr)Zcmddictr r!r)r rrNsrN)T)!�__doc__Z
__future__rZpip.commands.completionrZpip.commands.downloadrZpip.commands.freezerZpip.commands.hashrZpip.commands.helprZpip.commands.listrZpip.commands.checkr	Zpip.commands.searchr
Zpip.commands.showrZpip.commands.installrZpip.commands.uninstallr
Zpip.commands.wheelrrrrrrrrrrr�<module>sP

commands/__pycache__/show.cpython-36.pyc000064400000012301151733136200014161 0ustar003

�Pf�@s�ddlmZddlmZddlZddlZddlmZddlm	Z	m
Z
ddlmZddl
mZeje�ZGdd	�d	e�Zd
d�Zdd
d�ZdS)�)�absolute_import)�
FeedParserN)�Command)�SUCCESS�ERROR)�
pkg_resources)�canonicalize_namecs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�ShowCommandz6Show information about one or more installed packages.Zshowz$
      %prog [options] <package> ...z*Show information about installed packages.cs>tt|�j||�|jjddddddd�|jjd|j�dS)	Nz-fz--files�files�
store_trueFz7Show the full list of installed files for each package.)�dest�action�default�helpr)�superr	�__init__Zcmd_optsZ
add_option�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/show.pyrszShowCommand.__init__cCs8|stjd�tS|}t|�}t||j|jd�s4tStS)Nz.ERROR: Please provide a package name or names.)�
list_files�verbose)�loggerZwarningr�search_packages_info�
print_resultsr
rr)rZoptionsr�query�resultsrrr�run"s
zShowCommand.run)
�__name__�
__module__�__qualname__�__doc__�nameZusage�summaryrr �
__classcell__rr)rrr	sr	c#si�xtjD]}|�t|j�<qWdd�|D�}�x�fdd�|D�D�]Ή�j�j�jdd��j�D�d�}d}d}t�tj�rވj	d�rȈj
d�}dd�|D�}�fd	d�|D�}�fd
d�|D�}�j	d�r܈jd�}nP�j	d��r�j
d�}�fd
d�|D�}�fdd�|D�}�j	d��r.�jd�}�j	d��rL�j
d�}||d<�j	d��r�x,�j
d�D]}	|	j��rd|	j�|d<P�qdWt
�}
|
j|�|
j�}xdD]}|j|�||<�q�Wg}
x4|j�D](}	|	jd��r�|
j|	td�d���q�W|
|d<|�rt|�|d<|VqFWdS)z�
    Gather details from installed distributions. Print distribution name,
    version, location, and installed files. Installed files requires a
    pip generated 'installed-files.txt' in the distributions '.egg-info'
    directory.
    cSsg|]}t|��qSr)r)�.0r%rrr�
<listcomp>:sz(search_packages_info.<locals>.<listcomp>csg|]}|�kr�|�qSrr)r(Zpkg)�	installedrrr)<scSsg|]
}|j�qSr)�project_name)r(Zdeprrrr)As)r%�version�location�requiresNZRECORDcSsg|]}|jd�d�qS)�,r)�split)r(�lrrrr)Iscsg|]}tjj�j|��qSr)�os�path�joinr-)r(�p)�distrrr)Jscsg|]}tjj|�j��qSr)r2r3�relpathr-)r(r5)r6rrr)KsZMETADATAzinstalled-files.txtcsg|]}tjj�j|��qSr)r2r3r4Zegg_info)r(r5)r6rrr)Sscsg|]}tjj|�j��qSr)r2r3r7r-)r(r5)r6rrr)TszPKG-INFOzentry_points.txt�entry_pointsZ	INSTALLER�	installer�metadata-versionr&�	home-page�author�author-email�licensezClassifier: �classifiersr
)r:r&r;r<r=r>)rZworking_setrr+r,r-r.�
isinstanceZDistInfoDistributionZhas_metadataZget_metadata_linesZget_metadata�striprZfeed�close�get�
splitlines�
startswith�append�len�sorted)rr5Zquery_names�packageZ	file_listZmetadata�lines�pathsr8�lineZfeed_parserZ
pkg_info_dict�keyr?r)r6r*rr/s^







rFc	Cs�d}�x�t|�D�]�\}}d}|dkr0tjd�tjd|jdd��tjd|jd	d��tjd
|jdd��tjd|jd
d��tjd|jdd��tjd|jdd��tjd|jdd��tjd|jdd��tjddj|jdg���|�rxtjd|jdd��tjd|jdd��tjd�x"|jdg�D]}tjd|��q0Wtjd �x&|jd!g�D]}tjd|j���q^W|rtjd"�x&|jd#g�D]}tjd|j���q�Wd#|krtjd$�qW|S)%zD
    Print the informations from installed distributions found.
    FTrz---zName: %sr%�zVersion: %sr,zSummary: %sr&z
Home-page: %sz	home-pagez
Author: %sr<zAuthor-email: %szauthor-emailzLicense: %sr>zLocation: %sr-zRequires: %sz, r.zMetadata-Version: %szmetadata-versionz
Installer: %sr9zClassifiers:r?z  %sz
Entry-points:r8zFiles:r
z!Cannot locate installed-files.txt)�	enumerater�inforCr4rA)	Z
distributionsrrZresults_printed�ir6Z
classifier�entryrLrrrrxs>



r)FF)Z
__future__rZemail.parserrZloggingr2Zpip.basecommandrZpip.status_codesrrZpip._vendorrZpip._vendor.packaging.utilsrZ	getLoggerr!rr	rrrrrr�<module>s
Icommands/__pycache__/install.cpython-36.pyc000064400000024302151733136200014653 0ustar003

�PfqG�@s(ddlmZddlZddlZddlZddlZddlZddlZddlZddlm	Z	yddl
Z
Wnek
rtdZ
YnXddlm
Z
ddlmZddlmZmZddlmZmZmZddlmZdd	lmZmZdd
lmZddlmZddl m!Z!dd
l"m#Z#m$Z$ej%e&�Z'Gdd�de�Z(dd�Z)dS)�)�absolute_importN)�path)�RequirementSet)�RequirementCommand)�virtualenv_no_global�distutils_scheme)�InstallationError�CommandError�PreviousBuildDirError)�
cmdoptions)�
ensure_dir�get_installed_version)�BuildDirectory)�RemovedInPip10Warning)�check_path_owner)�
WheelCache�WheelBuildercs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�InstallCommandaI
    Install packages from:

    - PyPI (and other indexes) using requirement specifiers.
    - VCS project urls.
    - Local project directories.
    - Local or remote source archives.

    pip also supports installing from "requirements files", which provide
    an easy way to specify a whole environment to be installed.
    �installa%
      %prog [options] <requirement specifier> [package-index-options] ...
      %prog [options] -r <requirements file> [package-index-options] ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ...zInstall packages.c
s0tt|�j||�|j}|jtj��|jtj��|jtj��|jtj	��|jddddddd�|jddd	d
ddddd�|jtj
��|jd
ddddd�|jdddddgdd�|jddddd�|jdddddd�|jtj��|jtj��|jtj
��|jtj��|jd d!dd"d�|jd#d$dd%d�|jd&d'ddd(d�|jd)d*d+dd,d�|jd-d.ddd/d�|jd0dd1d2d3d4�|jd5d6d1d7d8�|jtj��|jtj��|jtj��|jtj��|jtj��|jtj��|jtj��tjtj|j�}|jjd9|�|jjd9|�dS):Nz-tz--target�
target_dir�dirz�Install packages into <dir>. By default this will not replace existing files/folders in <dir>. Use --upgrade to replace existing packages in <dir> with new versions.)�dest�metavar�default�helpz-dz
--downloadz--download-dirz--download-directory�download_dirz`Download packages into <dir> instead of installing them, regardless of what's already installed.z-Uz	--upgrade�upgrade�
store_truez�Upgrade all specified packages to the newest available version. The handling of dependencies depends on the upgrade-strategy used.)r�actionrz--upgrade-strategy�upgrade_strategyZeagerzonly-if-neededa3Determines how dependency upgrading should be handled. "eager" - dependencies are upgraded regardless of whether the currently installed version satisfies the requirements of the upgraded package(s). "only-if-needed" -  are upgraded only when they do not satisfy the requirements of the upgraded package(s).)rr�choicesrz--force-reinstall�force_reinstallzKWhen upgrading, reinstall all packages even if they are already up-to-date.z-Iz--ignore-installed�ignore_installedz5Ignore the installed packages (reinstalling instead).z--user�
use_user_sitez�Install to the Python user install directory for your platform. Typically ~/.local/, or %APPDATA%\Python on Windows. (See the Python documentation for site.USER_BASE for full details.)z--egg�as_eggz�Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)z--root�	root_pathz=Install everything relative to this alternate root directory.z--strip-file-prefix�strip_file_prefix�prefixz5Strip given prefix from script paths in wheel RECORD.z--prefix�prefix_pathzIInstallation prefix where lib, bin and other top-level folders are placedz	--compile�compileTzCompile py files to pyc)rrrrz--no-compileZstore_falsezDo not compile py files to pyc)rrrr)�superr�__init__�cmd_optsZ
add_optionrZconstraintsZeditableZrequirements�	build_dir�src�ignore_requires_pythonZno_deps�install_options�global_optionsZ	use_wheelZno_use_wheelZ	no_binaryZonly_binaryZpre�no_clean�require_hashesZmake_option_groupZindex_group�parserZinsert_option_group)�self�args�kwr,Z
index_opts)�	__class__��/usr/lib/python3.6/install.pyr+8s�zInstallCommand.__init__c&Cstj|�tj|�dd�}tj�dkrJ|�rJtjdtjt	j
d��|jr\tj
dt�|jrntj
dt�|jr�tj
dt�|jr�tj
dt�|jr�tj
d	t�d
|_|jr�tjj|j�|_tjj|j�|_|jp�g}|j�r|jr�td��t�r�td��|jd
�|jd�d}|j�rtd
|_tj �}tjj|j�|_tjj!|j��rftjj"|j��rftd��|jd|�|j#�p~g}|j$|���T}|j%||�}|j&�p�|j}	t'|j(|j)�}
|j(�r�t*|j(��r�tjd|j(�d|_(t+|j|	d����}t,||j|j|j-|j.|j|j|j/|j0|j1|j|||j2|j3|
|j4d�}|j5||||||j6|
�|j7�s\dS�z`�y:|j�s~t8�s~|j(�r�|j9|�nt:||ggd�}
|
j;d
d�|j�sr|j<|||j=|j|j>d�t?|j||j=|j|j3d�}t@|jAtBjCd�d�}g}xX|D]P}|j6}y"tD|j6|�}|�r*|d|7}WntEk
�rBYnX|j|��qWdjF|�}|�r�tjGd|�n(djFdd�|jHD��}|�r�tjGd|�WntIk
�r�d
|_&�YnXWd|j&�s�|jJ�XWdQRXWdQRX|j�rtK|j�g}tLd |d!�d"}tLd |d!�d#}tjj!|��r4|j|�tjj!|��rV||k�rV|j|�x�|D]�}x�tjM|�D]�}tjjF|j|�}tjj!|��r�|j-�s�tjd$|��qltjjN|��r�tjd%|��qltjj"|��r�tOjP|�n
tjQ|�tOjRtjjF||�|��qlW�q\WtOjP|�|S)&NcSs ttd�pttd�otjtjkS)NZreal_prefix�base_prefix)�hasattr�sysr;r'r9r9r9r:�is_venv�s
z#InstallCommand.run.<locals>.is_venvrzpWARNING: Running pip install with root privileges is generally not a good idea. Try `%s install --user` instead.z�--egg has been deprecated and will be removed in the future. This flag is mutually exclusive with large parts of pip, and actually using it invalidates pip's ability to manage the installation process.z�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�pip install --download has been deprecated and will be removed in the future. Pip now has a download command that should be used instead.TzVCan not combine '--user' and '--prefix' as they imply different installation locationszZCan not perform a '--user' install. User site-packages are not visible in this virtualenv.z--userz	--prefix=z=Target path exists but is not a directory, will not continue.z--home=z�The directory '%s' or its parent directory is not owned by the current user and caching wheels has been disabled. check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.)�delete)r-�src_dirrrrr$r"�ignore_dependenciesr/r!r#r�sessionZ	pycompile�isolated�wheel_cacher3)Z
build_optionsr1)Zautobuilding)�rootr'r&)�user�homerEr'rC�name)�key�-� zSuccessfully installed %scSsg|]
}|j�qSr9)rH)�.0�reqr9r9r:�
<listcomp>�sz&InstallCommand.run.<locals>.<listcomp>zSuccessfully downloaded %s�)rG�purelib�platlibzKTarget directory %s already exists. Specify --upgrade to force replacement.z�Target directory %s already exists and is a link. Pip will not automatically replace links, please remove if replacement is desired.)SrZresolve_wheel_no_use_binaryZcheck_install_build_global�os�getuid�loggerZwarningr�basenamer=�argvr$�warnings�warnrZallow_externalZallow_all_externalZallow_unverifiedrr"r-�abspathr@r0r#r(r	rr�appendr�tempfileZmkdtemp�exists�isdirr1Z_build_sessionZ_build_package_finderr2r�	cache_dirZformat_controlrrrrrrAr/r!r)Z
isolated_moder3Zpopulate_requirement_setrHZhas_requirements�wheelZ
prepare_filesrZbuildrr%r&�get_lib_location_guesses�sortedZsuccessfully_installed�operator�
attrgetterr
�	Exception�join�infoZsuccessfully_downloadedr
Z
cleanup_filesrr�listdir�islink�shutilZrmtree�removeZmove)r5Zoptionsr6r>r0Ztemp_target_dirr1rB�finderZbuild_deleterDr-Zrequirement_set�wbZpossible_lib_locationsZreqs�itemsrM�itemZinstalled_versionZ	installedZ
downloadedZlib_dir_listZpurelib_dirZplatlib_dirZlib_dirZtarget_item_dirr9r9r:�run�sP

















zInstallCommand.run)
�__name__�
__module__�__qualname__�__doc__rHZusageZsummaryr+ro�
__classcell__r9r9)r8r:r!srcOstd|�|�}|d|dgS)NrOrPrQ)rO)r)r6�kwargs�schemer9r9r:r`�sr`)*Z
__future__rZloggingrbrRr[rirWr=rr_�ImportErrorZpip.reqrZpip.basecommandrZ
pip.locationsrrZpip.exceptionsrr	r
ZpiprZ	pip.utilsrr
Zpip.utils.buildrZpip.utils.deprecationrZpip.utils.filesystemrZ	pip.wheelrrZ	getLoggerrprTrr`r9r9r9r:�<module>s8

,commands/__pycache__/completion.cpython-36.opt-1.pyc000064400000005022151733136200016313 0ustar003

�Pf�	�@sDddlmZddlZddlmZdZdddd�ZGd	d
�d
e�ZdS)�)�absolute_importN)�CommandzJ
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
z�
_pip_completion()
{
    COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \
                   COMP_CWORD=$COMP_CWORD \
                   PIP_AUTO_COMPLETE=1 $1 ) )
}
complete -o default -F _pip_completion pip
z�
function _pip_completion {
  local words cword
  read -Ac words
  read -cn cword
  reply=( $( COMP_WORDS="$words[*]" \
             COMP_CWORD=$(( cword-1 )) \
             PIP_AUTO_COMPLETE=1 $words[1] ) )
}
compctl -K _pip_completion pip
a
function __fish_complete_pip
    set -lx COMP_WORDS (commandline -o) ""
    set -lx COMP_CWORD (math (contains -i -- (commandline -t) $COMP_WORDS)-1)
    set -lx PIP_AUTO_COMPLETE 1
    string split \  -- (eval $COMP_WORDS[1])
end
complete -fa "(__fish_complete_pip)" -c pip
)�bash�zsh�fishcs0eZdZdZdZdZ�fdd�Zdd�Z�ZS)�CompletionCommandz3A helper command to be used for command completion.Z
completionz-A helper command used for command completion.csltt|�j||�|j}|jddddddd�|jdd	dd
ddd�|jdd
ddddd�|jjd|�dS)Nz--bashz-b�store_constr�shellzEmit completion code for bash)�action�const�dest�helpz--zshz-zrzEmit completion code for zshz--fishz-frzEmit completion code for fishr)�superr�__init__�cmd_optsZ
add_option�parserZinsert_option_group)�self�args�kwr)�	__class__�� /usr/lib/python3.6/completion.pyr-s*zCompletionCommand.__init__cCsbtj�}dd�t|�D�}|j|krHtj|jd�}tt||jd��ntjj	ddj
|��dS)z-Prints the completion code of the given shellcSsg|]}d|�qS)z--r)�.0r	rrr�
<listcomp>Jsz)CompletionCommand.run.<locals>.<listcomp>�)�scriptr	zERROR: You must pass %s
z or N)�COMPLETION_SCRIPTS�keys�sortedr	�get�print�BASE_COMPLETION�sys�stderr�write�join)rZoptionsrZshellsZ
shell_optionsrrrr�runGs
zCompletionCommand.run)	�__name__�
__module__�__qualname__�__doc__�nameZsummaryrr&�
__classcell__rr)rrr(s
r)Z
__future__rr"Zpip.basecommandrr!rrrrrr�<module>s
commands/__pycache__/list.cpython-36.pyc000064400000022677151733136200014175 0ustar003

�Pfi,�@s�ddlmZddlZddlZddlZyddlmZWn ek
rTddlmZYnXddl	m
Z
ddlmZddl
mZddlmZdd	lmZmZdd
lmZddlmZmZeje�ZGdd
�d
e�Zdd�Zdd�Zdd�ZdS)�)�absolute_importN)�zip_longest)�izip_longest)�six)�Command)�CommandError)�
PackageFinder)�get_installed_distributions�dist_is_editable)�RemovedInPip10Warning)�make_option_group�index_groupcs|eZdZdZdZdZdZ�fdd�Zdd�Zd	d
�Z	dd�Z
d
d�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Z�ZS)�ListCommandzt
    List installed packages, including editables.

    Packages are listed in a case-insensitive sorted order.
    �listz
      %prog [options]zList installed packages.cs�tt|�j||�|j}|jdddddd�|jddddd	d�|jd
ddddd�|jd
ddddd�|jjdddddd�|jddddd�|jdddd$dd�|jddd d!d"�tt|j�}|jjd#|�|jjd#|�dS)%Nz-oz
--outdated�
store_trueFzList outdated packages)�action�default�helpz-uz
--uptodatezList uptodate packagesz-ez
--editablezList editable projects.z-lz--localzSIf in a virtualenv that has global access, do not list globally-installed packages.z--user�userz,Only output packages installed in user-site.)�destrrrz--prezYInclude pre-release and development versions. By default, pip only finds stable versions.z--formatZstore�list_format�legacy�columns�freeze�jsonzJSelect the output format among: legacy (default), columns, freeze or json.)rr�choicesrz--not-required�not_requiredz>List packages that are not dependencies of installed packages.)rrrr)rrrr)	�superr�__init__�cmd_optsZ
add_optionrr
�parserZinsert_option_group)�self�args�kwrZ
index_opts)�	__class__��/usr/lib/python3.6/list.pyr#s^zListCommand.__init__cCst|j||j|j|j|d�S)zK
        Create a package finder appropriate to this list command.
        )�
find_links�
index_urlsZallow_all_prereleases�
trusted_hosts�process_dependency_links�session)rr'�prer)r*)r!�optionsr(r+r%r%r&�_build_package_findercsz!ListCommand._build_package_findercCs�|jrtjdt�|jr$tjdt�|jr6tjdt�|jdkrLtjdt�|jr`|jr`t	d��t
|j|j|j
d�}|jr�|j||�}n|jr�|j||�}|jr�|j||�}|j||�dS)Nz�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�The default format will switch to columns in the future. You can use --format=(legacy|columns) (or define a format=(legacy|columns) in your pip.conf under the [list] section) to disable this warning.z5Options --outdated and --uptodate cannot be combined.)Z
local_onlyZ	user_onlyZeditables_only)Zallow_external�warnings�warnrZallow_all_externalZallow_unverifiedr�outdatedZuptodaterr	ZlocalrZeditable�get_outdated�get_uptodater�get_not_required�output_package_listing)r!r-r"�packagesr%r%r&�runps<

zListCommand.runcCsdd�|j||�D�S)NcSsg|]}|j|jkr|�qSr%)�latest_version�parsed_version)�.0�distr%r%r&�
<listcomp>�sz,ListCommand.get_outdated.<locals>.<listcomp>)�iter_packages_latest_infos)r!r6r-r%r%r&r2�szListCommand.get_outdatedcCsdd�|j||�D�S)NcSsg|]}|j|jkr|�qSr%)r8r9)r:r;r%r%r&r<�sz,ListCommand.get_uptodate.<locals>.<listcomp>)r=)r!r6r-r%r%r&r3�szListCommand.get_uptodatecsBt��x$|D]}�jdd�|j�D��qWt�fdd�|D��S)Ncss|]}|jVqdS)N)�key)r:Zrequirementr%r%r&�	<genexpr>�sz/ListCommand.get_not_required.<locals>.<genexpr>c3s|]}|j�kr|VqdS)N)r>)r:Zpkg)�dep_keysr%r&r?�s)�set�updateZrequires)r!r6r-r;r%)r@r&r4�s
zListCommand.get_not_requiredccs�|jg|j}|jr*tjddj|��g}g}x&|D]}|jd�r4|j|jd��q4W|j	|���}|j
|||�}|j|�xn|D]f}d}|j|j
�}	|js�dd�|	D�}	|	s�q�t|	|jd�}
|
j}|
jjr�d}nd	}||_||_|Vq�WWdQRXdS)
NzIgnoring indexes: %s�,zdependency_links.txt�unknowncSsg|]}|jjs|�qSr%)�versionZ
is_prerelease)r:�	candidater%r%r&r<�sz:ListCommand.iter_packages_latest_infos.<locals>.<listcomp>)r>ZwheelZsdist)Z	index_urlZextra_index_urlsZno_index�logger�debug�joinZhas_metadata�extendZget_metadata_linesZ_build_sessionr.Zadd_dependency_linksZfind_all_candidatesr>r,�maxZ_candidate_sort_keyrE�locationZis_wheelr8�latest_filetype)r!r6r-r(Zdependency_linksr;r+�finder�typZall_candidatesZbest_candidateZremote_versionr%r%r&r=�s8




z&ListCommand.iter_packages_latest_infoscCs0t|�rd|j|j|jfSd|j|jfSdS)Nz%s (%s, %s)z%s (%s))r
�project_namerErL)r!r;r%r%r&�
output_legacy�s
zListCommand.output_legacycCsd|j|�|j|jfS)Nz%s - Latest: %s [%s])rQr8rM)r!r;r%r%r&�output_legacy_latest�sz ListCommand.output_legacy_latestcCs�t|dd�d�}|jdkr:|r:t||�\}}|j||�n~|jdkrfxr|D]}tjd|j|j�qJWnR|jdkr�tjt||��n6x4|D],}|j	r�tj|j
|��q�tj|j|��q�WdS)NcSs
|jj�S)N)rP�lower)r;r%r%r&�<lambda>�sz4ListCommand.output_package_listing.<locals>.<lambda>)r>rrz%s==%sr)�sortedr�format_for_columns�output_package_listing_columnsrG�inforPrE�format_for_jsonr1rRrQ)r!r6r-�data�headerr;r%r%r&r5�s



z"ListCommand.output_package_listingcCsht|�dkr|jd|�t|�\}}t|�dkrL|jddjtdd�|���x|D]}tj|�qRWdS)Nr�� cSsd|S)N�-r%)�xr%r%r&rT
sz<ListCommand.output_package_listing_columns.<locals>.<lambda>)�len�insert�tabulaterI�maprGrX)r!rZr[Zpkg_strings�sizes�valr%r%r&rWs
z*ListCommand.output_package_listing_columns)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrr.r7r2r3r4r=rQrRr5rW�
__classcell__r%r%)r$r&rs@
6'
rcCs�t|�dkst�dgtdd�|D��}x |D]}dd�t||�D�}q.Wg}x0|D](}djdd�t||�D��}|j|�qTW||fS)Nrcss|]}t|�VqdS)N)r`)r:r_r%r%r&r?sztabulate.<locals>.<genexpr>cSs"g|]\}}t|tt|����qSr%)rKr`�str)r:�s�cr%r%r&r<sztabulate.<locals>.<listcomp>r]cSs*g|]"\}}|dk	r"t|�j|�nd�qS)N�)rl�ljust)r:rmrnr%r%r&r<s)r`�AssertionErrorrKrrI�append)�valsrd�row�resultZdisplayr%r%r&rbs


rbcCs�|j}|rddddg}nddg}g}tdd�|D��r@|jd�xR|D]J}|j|jg}|rr|j|j�|j|j�t|�r�|j|j�|j|�qFW||fS)z_
    Convert the package data into something usable
    by output_package_listing_columns.
    ZPackageZVersionZLatestZTypecss|]}t|�VqdS)N)r
)r:r_r%r%r&r?2sz%format_for_columns.<locals>.<genexpr>ZLocation)	r1�anyrrrPrEr8rMr
rL)Zpkgsr-Zrunning_outdatedr[rZZprojrtr%r%r&rV%s 

rVcCsZg}xJ|D]B}|jtj|j�d�}|jrBtj|j�|d<|j|d<|j|�q
Wtj	|�S)N)rjrEr8rM)
rPrZ	text_typerEr1r8rMrrr�dumps)r6r-rZr;rXr%r%r&rYFs

rY) Z
__future__rrZloggingr/�	itertoolsr�ImportErrorrZpip._vendorrZpip.basecommandrZpip.exceptionsrZ	pip.indexrZ	pip.utilsr	r
Zpip.utils.deprecationrZpip.cmdoptionsrr
Z	getLoggerrfrGrrbrVrYr%r%r%r&�<module>s(
|!commands/__pycache__/check.cpython-36.opt-1.pyc000064400000002337151733136200015225 0ustar003

�Pff�@sJddlZddlmZddlmZddlmZeje�Z	Gdd�de�Z
dS)�N)�Command)�check_requirements)�get_installed_distributionsc@s$eZdZdZdZdZdZdd�ZdS)�CheckCommandz7Verify installed packages have compatible dependencies.Zcheckz
      %prog [options]c
	Cs�tdfd�}t|�\}}x~|D]v}d|j|jf}x*|j|g�D]}tjd|j|j|j�q@Wx4|j|g�D]$\}}	tjd|j|j||	j|	j�qlWqW|s�|r�dStjd�dS)NF)Z
local_only�skipz%s==%sz*%s %s requires %s, which is not installed.z-%s %s has requirement %s, but you have %s %s.�zNo broken requirements found.)rrZproject_name�version�get�logger�info)
�selfZoptions�argsZdistsZmissing_reqs_dictZincompatible_reqs_dictZdist�keyZrequirement�actual�r�/usr/lib/python3.6/check.py�runs 

zCheckCommand.runN)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrrrrrrs
r)ZloggingZpip.basecommandrZpip.operations.checkrZ	pip.utilsrZ	getLoggerrr
rrrrr�<module>s

commands/__pycache__/uninstall.cpython-36.opt-1.pyc000064400000004752151733136200016164 0ustar003

�PfD�@s`ddlmZddlZddlmZddlmZmZmZddl	m
Z
ddlmZGdd�de
�Z
dS)	�)�absolute_importN)�
WheelCache)�InstallRequirement�RequirementSet�parse_requirements)�Command)�InstallationErrorcs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�UninstallCommandaB
    Uninstall packages.

    pip is able to uninstall most installed packages. Known exceptions are:

    - Pure distutils packages installed with ``python setup.py install``, which
      leave behind no metadata to determine what files were installed.
    - Script wrappers installed by ``python setup.py develop``.
    �	uninstallzU
      %prog [options] <package> ...
      %prog [options] -r <requirements file> ...zUninstall packages.c	sVtt|�j||�|jjddddgddd�|jjdd	d
ddd
�|jjd|j�dS)Nz-rz
--requirement�requirements�append�filezjUninstall all the packages listed in the given requirements file.  This option can be used multiple times.)�dest�action�default�metavar�helpz-yz--yes�yes�
store_truez2Don't ask for confirmation of uninstall deletions.)rrrr)�superr	�__init__Zcmd_optsZ
add_option�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/uninstall.pyrszUninstallCommand.__init__c
Cs�|j|���}tjjt�t��}t|j|�}tddd|j||d�}x$|D]}|j	t
j||j|d��qFWx2|jD](}x"t
||||d�D]}	|j	|	�q�WqnW|js�tdt|jd���|j|jd�WdQRXdS)N)Z	build_dirZsrc_dirZdownload_dir�isolated�session�wheel_cache)rr )�optionsrr zLYou must give at least one requirement to %(name)s (see "pip help %(name)s"))�name)Zauto_confirm)Z_build_session�pip�indexZ
FormatControl�setr�	cache_dirrZ
isolated_modeZadd_requirementrZ	from_linerrZhas_requirementsr�dictr"r
r)
rr!rrZformat_controlr Zrequirement_setr"�filenameZreqrrr�run-s6
zUninstallCommand.run)
�__name__�
__module__�__qualname__�__doc__r"ZusageZsummaryrr)�
__classcell__rr)rrr	
s	r	)Z
__future__rr#Z	pip.wheelrZpip.reqrrrZpip.basecommandrZpip.exceptionsrr	rrrr�<module>scommands/__pycache__/list.cpython-36.opt-1.pyc000064400000022630151733136200015121 0ustar003

�Pfi,�@s�ddlmZddlZddlZddlZyddlmZWn ek
rTddlmZYnXddl	m
Z
ddlmZddl
mZddlmZdd	lmZmZdd
lmZddlmZmZeje�ZGdd
�d
e�Zdd�Zdd�Zdd�ZdS)�)�absolute_importN)�zip_longest)�izip_longest)�six)�Command)�CommandError)�
PackageFinder)�get_installed_distributions�dist_is_editable)�RemovedInPip10Warning)�make_option_group�index_groupcs|eZdZdZdZdZdZ�fdd�Zdd�Zd	d
�Z	dd�Z
d
d�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Z�ZS)�ListCommandzt
    List installed packages, including editables.

    Packages are listed in a case-insensitive sorted order.
    �listz
      %prog [options]zList installed packages.cs�tt|�j||�|j}|jdddddd�|jddddd	d�|jd
ddddd�|jd
ddddd�|jjdddddd�|jddddd�|jdddd$dd�|jddd d!d"�tt|j�}|jjd#|�|jjd#|�dS)%Nz-oz
--outdated�
store_trueFzList outdated packages)�action�default�helpz-uz
--uptodatezList uptodate packagesz-ez
--editablezList editable projects.z-lz--localzSIf in a virtualenv that has global access, do not list globally-installed packages.z--user�userz,Only output packages installed in user-site.)�destrrrz--prezYInclude pre-release and development versions. By default, pip only finds stable versions.z--formatZstore�list_format�legacy�columns�freeze�jsonzJSelect the output format among: legacy (default), columns, freeze or json.)rr�choicesrz--not-required�not_requiredz>List packages that are not dependencies of installed packages.)rrrr)rrrr)	�superr�__init__�cmd_optsZ
add_optionrr
�parserZinsert_option_group)�self�args�kwrZ
index_opts)�	__class__��/usr/lib/python3.6/list.pyr#s^zListCommand.__init__cCst|j||j|j|j|d�S)zK
        Create a package finder appropriate to this list command.
        )�
find_links�
index_urlsZallow_all_prereleases�
trusted_hosts�process_dependency_links�session)rr'�prer)r*)r!�optionsr(r+r%r%r&�_build_package_findercsz!ListCommand._build_package_findercCs�|jrtjdt�|jr$tjdt�|jr6tjdt�|jdkrLtjdt�|jr`|jr`t	d��t
|j|j|j
d�}|jr�|j||�}n|jr�|j||�}|jr�|j||�}|j||�dS)Nz�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�The default format will switch to columns in the future. You can use --format=(legacy|columns) (or define a format=(legacy|columns) in your pip.conf under the [list] section) to disable this warning.z5Options --outdated and --uptodate cannot be combined.)Z
local_onlyZ	user_onlyZeditables_only)Zallow_external�warnings�warnrZallow_all_externalZallow_unverifiedr�outdatedZuptodaterr	ZlocalrZeditable�get_outdated�get_uptodater�get_not_required�output_package_listing)r!r-r"�packagesr%r%r&�runps<

zListCommand.runcCsdd�|j||�D�S)NcSsg|]}|j|jkr|�qSr%)�latest_version�parsed_version)�.0�distr%r%r&�
<listcomp>�sz,ListCommand.get_outdated.<locals>.<listcomp>)�iter_packages_latest_infos)r!r6r-r%r%r&r2�szListCommand.get_outdatedcCsdd�|j||�D�S)NcSsg|]}|j|jkr|�qSr%)r8r9)r:r;r%r%r&r<�sz,ListCommand.get_uptodate.<locals>.<listcomp>)r=)r!r6r-r%r%r&r3�szListCommand.get_uptodatecsBt��x$|D]}�jdd�|j�D��qWt�fdd�|D��S)Ncss|]}|jVqdS)N)�key)r:Zrequirementr%r%r&�	<genexpr>�sz/ListCommand.get_not_required.<locals>.<genexpr>c3s|]}|j�kr|VqdS)N)r>)r:Zpkg)�dep_keysr%r&r?�s)�set�updateZrequires)r!r6r-r;r%)r@r&r4�s
zListCommand.get_not_requiredccs�|jg|j}|jr*tjddj|��g}g}x&|D]}|jd�r4|j|jd��q4W|j	|���}|j
|||�}|j|�xn|D]f}d}|j|j
�}	|js�dd�|	D�}	|	s�q�t|	|jd�}
|
j}|
jjr�d}nd	}||_||_|Vq�WWdQRXdS)
NzIgnoring indexes: %s�,zdependency_links.txt�unknowncSsg|]}|jjs|�qSr%)�versionZ
is_prerelease)r:�	candidater%r%r&r<�sz:ListCommand.iter_packages_latest_infos.<locals>.<listcomp>)r>ZwheelZsdist)Z	index_urlZextra_index_urlsZno_index�logger�debug�joinZhas_metadata�extendZget_metadata_linesZ_build_sessionr.Zadd_dependency_linksZfind_all_candidatesr>r,�maxZ_candidate_sort_keyrE�locationZis_wheelr8�latest_filetype)r!r6r-r(Zdependency_linksr;r+�finder�typZall_candidatesZbest_candidateZremote_versionr%r%r&r=�s8




z&ListCommand.iter_packages_latest_infoscCs0t|�rd|j|j|jfSd|j|jfSdS)Nz%s (%s, %s)z%s (%s))r
�project_namerErL)r!r;r%r%r&�
output_legacy�s
zListCommand.output_legacycCsd|j|�|j|jfS)Nz%s - Latest: %s [%s])rQr8rM)r!r;r%r%r&�output_legacy_latest�sz ListCommand.output_legacy_latestcCs�t|dd�d�}|jdkr:|r:t||�\}}|j||�n~|jdkrfxr|D]}tjd|j|j�qJWnR|jdkr�tjt||��n6x4|D],}|j	r�tj|j
|��q�tj|j|��q�WdS)NcSs
|jj�S)N)rP�lower)r;r%r%r&�<lambda>�sz4ListCommand.output_package_listing.<locals>.<lambda>)r>rrz%s==%sr)�sortedr�format_for_columns�output_package_listing_columnsrG�inforPrE�format_for_jsonr1rRrQ)r!r6r-�data�headerr;r%r%r&r5�s



z"ListCommand.output_package_listingcCsht|�dkr|jd|�t|�\}}t|�dkrL|jddjtdd�|���x|D]}tj|�qRWdS)Nr�� cSsd|S)N�-r%)�xr%r%r&rT
sz<ListCommand.output_package_listing_columns.<locals>.<lambda>)�len�insert�tabulaterI�maprGrX)r!rZr[Zpkg_strings�sizes�valr%r%r&rWs
z*ListCommand.output_package_listing_columns)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrr.r7r2r3r4r=rQrRr5rW�
__classcell__r%r%)r$r&rs@
6'
rcCsxdgtdd�|D��}x |D]}dd�t||�D�}qWg}x0|D](}djdd�t||�D��}|j|�qDW||fS)Nrcss|]}t|�VqdS)N)r`)r:r_r%r%r&r?sztabulate.<locals>.<genexpr>cSs"g|]\}}t|tt|����qSr%)rKr`�str)r:�s�cr%r%r&r<sztabulate.<locals>.<listcomp>r]cSs*g|]"\}}|dk	r"t|�j|�nd�qS)N�)rl�ljust)r:rmrnr%r%r&r<s)rKrrI�append)�valsrd�row�resultZdisplayr%r%r&rbs


rbcCs�|j}|rddddg}nddg}g}tdd�|D��r@|jd�xR|D]J}|j|jg}|rr|j|j�|j|j�t|�r�|j|j�|j|�qFW||fS)z_
    Convert the package data into something usable
    by output_package_listing_columns.
    ZPackageZVersionZLatestZTypecss|]}t|�VqdS)N)r
)r:r_r%r%r&r?2sz%format_for_columns.<locals>.<genexpr>ZLocation)	r1�anyrqrPrEr8rMr
rL)Zpkgsr-Zrunning_outdatedr[rZZprojrsr%r%r&rV%s 

rVcCsZg}xJ|D]B}|jtj|j�d�}|jrBtj|j�|d<|j|d<|j|�q
Wtj	|�S)N)rjrEr8rM)
rPrZ	text_typerEr1r8rMrqr�dumps)r6r-rZr;rXr%r%r&rYFs

rY) Z
__future__rrZloggingr/�	itertoolsr�ImportErrorrZpip._vendorrZpip.basecommandrZpip.exceptionsrZ	pip.indexrZ	pip.utilsr	r
Zpip.utils.deprecationrZpip.cmdoptionsrr
Z	getLoggerrfrGrrbrVrYr%r%r%r&�<module>s(
|!commands/__pycache__/__init__.cpython-36.opt-1.pyc000064400000003726151733136200015712 0ustar003

�Pf��@s&dZddlmZddlmZddlmZddlmZddl	m
Z
ddlmZddl
mZdd	lmZdd
lmZddlmZddlmZdd
lmZddlmZejeejee
je
ejeejeejeejeejeejeejeejeejeiZeeeeeeeeee
eegZddd�Zdd�Zdd�Z dS)z%
Package containing all pip commands
�)�absolute_import)�CompletionCommand)�DownloadCommand)�
FreezeCommand)�HashCommand)�HelpCommand)�ListCommand)�CheckCommand)�
SearchCommand)�ShowCommand)�InstallCommand)�UninstallCommand)�WheelCommandTccs:|rttt�}ntj�}x|D]\}}||jfVqWdS)z5Yields sorted (command name, command summary) tuples.N)�_sort_commands�
commands_dict�commands_order�itemsZsummary)ZorderedZcmditems�nameZ
command_class�r�/usr/lib/python3.6/__init__.py�
get_summaries4s
rcCs6ddlm}|j�}||tj��}|r.|dSdSdS)zCommand name auto-correct.r)�get_close_matchesFN)Zdifflibr�lowerr�keys)rrZclose_commandsrrr�get_similar_commands@srcs�fdd�}t|j�|d�S)Ncs(y�j|d�Stk
r"dSXdS)N��)�index�
ValueError)�key)�orderrr�keyfnOsz_sort_commands.<locals>.keyfn)r)�sortedr)Zcmddictr r!r)r rrNsrN)T)!�__doc__Z
__future__rZpip.commands.completionrZpip.commands.downloadrZpip.commands.freezerZpip.commands.hashrZpip.commands.helprZpip.commands.listrZpip.commands.checkr	Zpip.commands.searchr
Zpip.commands.showrZpip.commands.installrZpip.commands.uninstallr
Zpip.commands.wheelrrrrrrrrrrr�<module>sP

commands/__pycache__/freeze.cpython-36.opt-1.pyc000064400000005000151733136200015416 0ustar003

�Pf�@sdddlmZddlZddlZddlmZddlmZddlm	Z	ddl
mZd
ZGdd�de�Z
dS)�)�absolute_importN)�stdlib_pkgs)�Command)�freeze)�
WheelCache�pip�
setuptools�
distribute�wheelcs8eZdZdZdZdZdZd
Z�fdd�Zdd	�Z	�Z
S)�
FreezeCommandzx
    Output installed packages in requirements format.

    packages are listed in a case-insensitive sorted order.
    rz
      %prog [options]z1Output installed packages in requirements format.�ext://sys.stderrc	s�tt|�j||�|jjddddgddd�|jjdd	d
dgddd�|jjd
dddddd�|jjdddddd�|jjdddddjt�d�|jjd|j�dS)Nz-rz
--requirement�requirements�append�filez}Use the order in the given requirements file and its comments when generating output. This option can be used multiple times.)�dest�action�default�metavar�helpz-fz--find-links�
find_linksZURLz<URL for finding packages, which will be added to the output.z-lz--local�local�
store_trueFzUIf in a virtualenv that has global access, do not output globally-installed packages.)rrrrz--user�userz,Only output packages installed in user-site.z--all�
freeze_allz,Do not skip these packages in the output: %sz, )rrrr)	�superr�__init__Zcmd_optsZ
add_option�join�DEV_PKGS�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/freeze.pyrsDzFreezeCommand.__init__c
Cs�tjjt�t��}t|j|�}tt�}|js6|jt	�t
|j|j|j
|j|j|j||d�}x"tf|�D]}tjj|d�qfWdS)N)ZrequirementrZ
local_onlyZ	user_onlyZ
skip_regex�isolated�wheel_cache�skip�
)r�indexZ
FormatControl�setr�	cache_dirrr�updater�dictr
rrrZskip_requirements_regexZ
isolated_moder�sys�stdout�write)rZoptionsr Zformat_controlr&r'Z
freeze_kwargs�liner#r#r$�runEs 
zFreezeCommand.run)rr)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryZlog_streamsrr2�
__classcell__r#r#)r"r$rs*r)rrr	r
)Z
__future__rr.rZ
pip.compatrZpip.basecommandrZpip.operations.freezerZ	pip.wheelrrrr#r#r#r$�<module>scommands/__pycache__/check.cpython-36.pyc000064400000002337151733136200014266 0ustar003

�Pff�@sJddlZddlmZddlmZddlmZeje�Z	Gdd�de�Z
dS)�N)�Command)�check_requirements)�get_installed_distributionsc@s$eZdZdZdZdZdZdd�ZdS)�CheckCommandz7Verify installed packages have compatible dependencies.Zcheckz
      %prog [options]c
	Cs�tdfd�}t|�\}}x~|D]v}d|j|jf}x*|j|g�D]}tjd|j|j|j�q@Wx4|j|g�D]$\}}	tjd|j|j||	j|	j�qlWqW|s�|r�dStjd�dS)NF)Z
local_only�skipz%s==%sz*%s %s requires %s, which is not installed.z-%s %s has requirement %s, but you have %s %s.�zNo broken requirements found.)rrZproject_name�version�get�logger�info)
�selfZoptions�argsZdistsZmissing_reqs_dictZincompatible_reqs_dictZdist�keyZrequirement�actual�r�/usr/lib/python3.6/check.py�runs 

zCheckCommand.runN)�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrrrrrrs
r)ZloggingZpip.basecommandrZpip.operations.checkrZ	pip.utilsrZ	getLoggerrr
rrrrr�<module>s

commands/__pycache__/uninstall.cpython-36.pyc000064400000004752151733136200015225 0ustar003

�PfD�@s`ddlmZddlZddlmZddlmZmZmZddl	m
Z
ddlmZGdd�de
�Z
dS)	�)�absolute_importN)�
WheelCache)�InstallRequirement�RequirementSet�parse_requirements)�Command)�InstallationErrorcs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�UninstallCommandaB
    Uninstall packages.

    pip is able to uninstall most installed packages. Known exceptions are:

    - Pure distutils packages installed with ``python setup.py install``, which
      leave behind no metadata to determine what files were installed.
    - Script wrappers installed by ``python setup.py develop``.
    �	uninstallzU
      %prog [options] <package> ...
      %prog [options] -r <requirements file> ...zUninstall packages.c	sVtt|�j||�|jjddddgddd�|jjdd	d
ddd
�|jjd|j�dS)Nz-rz
--requirement�requirements�append�filezjUninstall all the packages listed in the given requirements file.  This option can be used multiple times.)�dest�action�default�metavar�helpz-yz--yes�yes�
store_truez2Don't ask for confirmation of uninstall deletions.)rrrr)�superr	�__init__Zcmd_optsZ
add_option�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/uninstall.pyrszUninstallCommand.__init__c
Cs�|j|���}tjjt�t��}t|j|�}tddd|j||d�}x$|D]}|j	t
j||j|d��qFWx2|jD](}x"t
||||d�D]}	|j	|	�q�WqnW|js�tdt|jd���|j|jd�WdQRXdS)N)Z	build_dirZsrc_dirZdownload_dir�isolated�session�wheel_cache)rr )�optionsrr zLYou must give at least one requirement to %(name)s (see "pip help %(name)s"))�name)Zauto_confirm)Z_build_session�pip�indexZ
FormatControl�setr�	cache_dirrZ
isolated_modeZadd_requirementrZ	from_linerrZhas_requirementsr�dictr"r
r)
rr!rrZformat_controlr Zrequirement_setr"�filenameZreqrrr�run-s6
zUninstallCommand.run)
�__name__�
__module__�__qualname__�__doc__r"ZusageZsummaryrr)�
__classcell__rr)rrr	
s	r	)Z
__future__rr#Z	pip.wheelrZpip.reqrrrZpip.basecommandrZpip.exceptionsrr	rrrr�<module>scommands/__pycache__/wheel.cpython-36.opt-1.pyc000064400000012430151733136200015247 0ustar003

�Pf1�@s�ddlmZddlZddlZddlZddlmZddlmZm	Z	ddl
mZddlm
Z
ddlmZddlmZdd	lmZmZdd
lmZeje�ZGdd�de�ZdS)
�)�absolute_importN)�RequirementCommand)�CommandError�PreviousBuildDirError)�RequirementSet)�import_or_raise)�BuildDirectory)�RemovedInPip10Warning)�
WheelCache�WheelBuilder)�
cmdoptionscs<eZdZdZdZdZdZ�fdd�Zdd�Zd	d
�Z	�Z
S)�WheelCommanda�
    Build Wheel archives for your requirements and dependencies.

    Wheel is a built-package format, and offers the advantage of not
    recompiling your software during every install. For more details, see the
    wheel docs: https://wheel.readthedocs.io/en/latest/

    Requirements: setuptools>=0.8, and wheel.

    'pip wheel' uses the bdist_wheel setuptools extension from the wheel
    package to build individual wheels.

    Zwheelz�
      %prog [options] <requirement specifier> ...
      %prog [options] -r <requirements file> ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ...z$Build wheels from your requirements.csPtt|�j||�|j}|jddddtjdd�|jtj��|jtj	��|jtj
��|jtj��|jddd	d
dd�|jtj��|jtj
��|jtj��|jtj��|jtj��|jtj��|jtj��|jd
dd
d	dd�|jddddd�|jtj��|jtj��tjtj|j�}|jjd|�|jjd|�dS)Nz-wz--wheel-dir�	wheel_dir�dirzLBuild wheels into <dir>, where the default is the current working directory.)�dest�metavar�default�helpz--build-option�
build_options�options�appendz9Extra arguments to be supplied to 'setup.py bdist_wheel'.)rr�actionrz--global-option�global_optionszZExtra global options to be supplied to the setup.py call before the 'bdist_wheel' command.)rrrrz--pre�
store_trueFzYInclude pre-release and development versions. By default, pip only finds stable versions.)rrrr)�superr
�__init__�cmd_optsZ
add_option�os�curdirrZ	use_wheelZno_use_wheelZ	no_binaryZonly_binaryZconstraintsZeditableZrequirements�src�ignore_requires_pythonZno_deps�	build_dir�no_clean�require_hashesZmake_option_groupZindex_group�parserZinsert_option_group)�self�args�kwrZ
index_opts)�	__class__��/usr/lib/python3.6/wheel.pyr.sVzWheelCommand.__init__cCs.tdtd�tdtd�}t|d�s*td��dS)Nzwheel.bdist_wheelzM'pip wheel' requires the 'wheel' package. To fix this, run: pip install wheel�
pkg_resourceszp'pip wheel' requires setuptools >= 0.8 for dist-info support. To fix this, run: pip install --upgrade setuptoolsZDistInfoDistribution)rr�hasattr)r%r+r)r)r*�check_required_packageshs
z$WheelCommand.check_required_packagesc Cs�|j�tj|�tj|�|jr.tjdt�|jr@tjdt�|j	rRtjdt�|j
g|j}|jr|t
jddj|��g}|jr�tjj|j�|_tjj|j�|_|j|���}|j||�}|jp�|j}t|j|j�}t|j|d���}t||jd|jd|j|j|||j |j!d�}	|j"|	|||||j#|�|	j$�s6dSzZy6t%|	||j&�pJg|j'�pTgd	�}
|
j(��slt)d
��Wnt*k
�r�d|_�YnXWd|j�s�|	j+�XWdQRXWdQRXdS)Nz�--allow-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-all-external has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.z�--allow-unverified has been deprecated and will be removed in the future. Due to changes in the repository protocol, it no longer has any effect.zIgnoring indexes: %s�,)�deleteT)r!�src_dirZdownload_dir�ignore_dependenciesZignore_installedr �isolated�session�wheel_cacheZwheel_download_dirr#)rrz"Failed to build one or more wheels),r-rZresolve_wheel_no_use_binaryZcheck_install_build_globalZallow_external�warnings�warnr	Zallow_all_externalZallow_unverifiedZ	index_urlZextra_index_urlsZno_index�logger�debug�joinr!r�path�abspathr0Z_build_sessionZ_build_package_finderr"r
�	cache_dirZformat_controlrrr1r Z
isolated_moderr#Zpopulate_requirement_set�nameZhas_requirementsrrrZbuildrrZ
cleanup_files)r%rr&Z
index_urlsr3�finderZbuild_deleter4r!Zrequirement_set�wbr)r)r*�run|sv






zWheelCommand.run)�__name__�
__module__�__qualname__�__doc__r=ZusageZsummaryrr-r@�
__classcell__r)r))r(r*r
s
:r
)Z
__future__rZloggingrr5Zpip.basecommandrZpip.exceptionsrrZpip.reqrZ	pip.utilsrZpip.utils.buildrZpip.utils.deprecationr	Z	pip.wheelr
rZpiprZ	getLoggerrAr7r
r)r)r)r*�<module>s
commands/__pycache__/hash.cpython-36.pyc000064400000003555151733136200014137 0ustar003

�Pf=�@s~ddlmZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZm
Z
eje�ZGdd�de�Zd	d
�ZdS)�)�absolute_importN)�Command)�ERROR)�read_chunks)�
FAVORITE_HASH�
STRONG_HASHEScs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�HashCommandz�
    Compute a hash of a local package archive.

    These can be used with --hash in a requirements file to do repeatable
    installs.

    �hashz%prog [options] <file> ...z#Compute hashes of package archives.c
sJtt|�j||�|jjdddtdtddjt�d�|jj	d|j�dS)	Nz-az--algorithm�	algorithmZstorez$The hash algorithm to use: one of %sz, )�dest�choices�action�default�helpr)
�superr�__init__Zcmd_optsZ
add_optionrr�join�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/hash.pyrszHashCommand.__init__cCsD|s|jjtj�tS|j}x"|D]}tjd||t||��q"WdS)Nz%s:
--hash=%s:%s)	rZprint_usage�sys�stderrrr
�logger�info�
_hash_of_file)rZoptionsrr
�pathrrr�run(s
zHashCommand.run)
�__name__�
__module__�__qualname__�__doc__�nameZusageZsummaryrr �
__classcell__rr)rrrsrc
CsDt|d��,}tj|�}xt|�D]}|j|�q WWdQRX|j�S)z!Return the hash digest of a file.�rbN)�open�hashlib�newr�updateZ	hexdigest)rr
�archiver	�chunkrrrr3s

r)Z
__future__rr)ZloggingrZpip.basecommandrZpip.status_codesrZ	pip.utilsrZpip.utils.hashesrrZ	getLoggerr!rrrrrrr�<module>s
#commands/__pycache__/show.cpython-36.opt-1.pyc000064400000012301151733136200015120 0ustar003

�Pf�@s�ddlmZddlmZddlZddlZddlmZddlm	Z	m
Z
ddlmZddl
mZeje�ZGdd	�d	e�Zd
d�Zdd
d�ZdS)�)�absolute_import)�
FeedParserN)�Command)�SUCCESS�ERROR)�
pkg_resources)�canonicalize_namecs4eZdZdZdZdZdZ�fdd�Zdd�Z�Z	S)	�ShowCommandz6Show information about one or more installed packages.Zshowz$
      %prog [options] <package> ...z*Show information about installed packages.cs>tt|�j||�|jjddddddd�|jjd|j�dS)	Nz-fz--files�files�
store_trueFz7Show the full list of installed files for each package.)�dest�action�default�helpr)�superr	�__init__Zcmd_optsZ
add_option�parserZinsert_option_group)�self�args�kw)�	__class__��/usr/lib/python3.6/show.pyrszShowCommand.__init__cCs8|stjd�tS|}t|�}t||j|jd�s4tStS)Nz.ERROR: Please provide a package name or names.)�
list_files�verbose)�loggerZwarningr�search_packages_info�
print_resultsr
rr)rZoptionsr�query�resultsrrr�run"s
zShowCommand.run)
�__name__�
__module__�__qualname__�__doc__�nameZusage�summaryrr �
__classcell__rr)rrr	sr	c#si�xtjD]}|�t|j�<qWdd�|D�}�x�fdd�|D�D�]Ή�j�j�jdd��j�D�d�}d}d}t�tj�rވj	d�rȈj
d�}dd�|D�}�fd	d�|D�}�fd
d�|D�}�j	d�r܈jd�}nP�j	d��r�j
d�}�fd
d�|D�}�fdd�|D�}�j	d��r.�jd�}�j	d��rL�j
d�}||d<�j	d��r�x,�j
d�D]}	|	j��rd|	j�|d<P�qdWt
�}
|
j|�|
j�}xdD]}|j|�||<�q�Wg}
x4|j�D](}	|	jd��r�|
j|	td�d���q�W|
|d<|�rt|�|d<|VqFWdS)z�
    Gather details from installed distributions. Print distribution name,
    version, location, and installed files. Installed files requires a
    pip generated 'installed-files.txt' in the distributions '.egg-info'
    directory.
    cSsg|]}t|��qSr)r)�.0r%rrr�
<listcomp>:sz(search_packages_info.<locals>.<listcomp>csg|]}|�kr�|�qSrr)r(Zpkg)�	installedrrr)<scSsg|]
}|j�qSr)�project_name)r(Zdeprrrr)As)r%�version�location�requiresNZRECORDcSsg|]}|jd�d�qS)�,r)�split)r(�lrrrr)Iscsg|]}tjj�j|��qSr)�os�path�joinr-)r(�p)�distrrr)Jscsg|]}tjj|�j��qSr)r2r3�relpathr-)r(r5)r6rrr)KsZMETADATAzinstalled-files.txtcsg|]}tjj�j|��qSr)r2r3r4Zegg_info)r(r5)r6rrr)Sscsg|]}tjj|�j��qSr)r2r3r7r-)r(r5)r6rrr)TszPKG-INFOzentry_points.txt�entry_pointsZ	INSTALLER�	installer�metadata-versionr&�	home-page�author�author-email�licensezClassifier: �classifiersr
)r:r&r;r<r=r>)rZworking_setrr+r,r-r.�
isinstanceZDistInfoDistributionZhas_metadataZget_metadata_linesZget_metadata�striprZfeed�close�get�
splitlines�
startswith�append�len�sorted)rr5Zquery_names�packageZ	file_listZmetadata�lines�pathsr8�lineZfeed_parserZ
pkg_info_dict�keyr?r)r6r*rr/s^







rFc	Cs�d}�x�t|�D�]�\}}d}|dkr0tjd�tjd|jdd��tjd|jd	d��tjd
|jdd��tjd|jd
d��tjd|jdd��tjd|jdd��tjd|jdd��tjd|jdd��tjddj|jdg���|�rxtjd|jdd��tjd|jdd��tjd�x"|jdg�D]}tjd|��q0Wtjd �x&|jd!g�D]}tjd|j���q^W|rtjd"�x&|jd#g�D]}tjd|j���q�Wd#|krtjd$�qW|S)%zD
    Print the informations from installed distributions found.
    FTrz---zName: %sr%�zVersion: %sr,zSummary: %sr&z
Home-page: %sz	home-pagez
Author: %sr<zAuthor-email: %szauthor-emailzLicense: %sr>zLocation: %sr-zRequires: %sz, r.zMetadata-Version: %szmetadata-versionz
Installer: %sr9zClassifiers:r?z  %sz
Entry-points:r8zFiles:r
z!Cannot locate installed-files.txt)�	enumerater�inforCr4rA)	Z
distributionsrrZresults_printed�ir6Z
classifier�entryrLrrrrxs>



r)FF)Z
__future__rZemail.parserrZloggingr2Zpip.basecommandrZpip.status_codesrrZpip._vendorrZpip._vendor.packaging.utilsrZ	getLoggerr!rr	rrrrrr�<module>s
Icommands/freeze.py000064400000005423151733136200010204 0ustar00from __future__ import absolute_import

import sys

import pip
from pip.compat import stdlib_pkgs
from pip.basecommand import Command
from pip.operations.freeze import freeze
from pip.wheel import WheelCache


DEV_PKGS = ('pip', 'setuptools', 'distribute', 'wheel')


class FreezeCommand(Command):
    """
    Output installed packages in requirements format.

    packages are listed in a case-insensitive sorted order.
    """
    name = 'freeze'
    usage = """
      %prog [options]"""
    summary = 'Output installed packages in requirements format.'
    log_streams = ("ext://sys.stderr", "ext://sys.stderr")

    def __init__(self, *args, **kw):
        super(FreezeCommand, self).__init__(*args, **kw)

        self.cmd_opts.add_option(
            '-r', '--requirement',
            dest='requirements',
            action='append',
            default=[],
            metavar='file',
            help="Use the order in the given requirements file and its "
                 "comments when generating output. This option can be "
                 "used multiple times.")
        self.cmd_opts.add_option(
            '-f', '--find-links',
            dest='find_links',
            action='append',
            default=[],
            metavar='URL',
            help='URL for finding packages, which will be added to the '
                 'output.')
        self.cmd_opts.add_option(
            '-l', '--local',
            dest='local',
            action='store_true',
            default=False,
            help='If in a virtualenv that has global access, do not output '
                 'globally-installed packages.')
        self.cmd_opts.add_option(
            '--user',
            dest='user',
            action='store_true',
            default=False,
            help='Only output packages installed in user-site.')
        self.cmd_opts.add_option(
            '--all',
            dest='freeze_all',
            action='store_true',
            help='Do not skip these packages in the output:'
                 ' %s' % ', '.join(DEV_PKGS))

        self.parser.insert_option_group(0, self.cmd_opts)

    def run(self, options, args):
        format_control = pip.index.FormatControl(set(), set())
        wheel_cache = WheelCache(options.cache_dir, format_control)
        skip = set(stdlib_pkgs)
        if not options.freeze_all:
            skip.update(DEV_PKGS)

        freeze_kwargs = dict(
            requirement=options.requirements,
            find_links=options.find_links,
            local_only=options.local,
            user_only=options.user,
            skip_regex=options.skip_requirements_regex,
            isolated=options.isolated_mode,
            wheel_cache=wheel_cache,
            skip=skip)

        for line in freeze(**freeze_kwargs):
            sys.stdout.write(line + '\n')
commands/show.py000064400000013403151733136200007701 0ustar00from __future__ import absolute_import

from email.parser import FeedParser
import logging
import os

from pip.basecommand import Command
from pip.status_codes import SUCCESS, ERROR
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name


logger = logging.getLogger(__name__)


class ShowCommand(Command):
    """Show information about one or more installed packages."""
    name = 'show'
    usage = """
      %prog [options] <package> ..."""
    summary = 'Show information about installed packages.'

    def __init__(self, *args, **kw):
        super(ShowCommand, self).__init__(*args, **kw)
        self.cmd_opts.add_option(
            '-f', '--files',
            dest='files',
            action='store_true',
            default=False,
            help='Show the full list of installed files for each package.')

        self.parser.insert_option_group(0, self.cmd_opts)

    def run(self, options, args):
        if not args:
            logger.warning('ERROR: Please provide a package name or names.')
            return ERROR
        query = args

        results = search_packages_info(query)
        if not print_results(
                results, list_files=options.files, verbose=options.verbose):
            return ERROR
        return SUCCESS


def search_packages_info(query):
    """
    Gather details from installed distributions. Print distribution name,
    version, location, and installed files. Installed files requires a
    pip generated 'installed-files.txt' in the distributions '.egg-info'
    directory.
    """
    installed = {}
    for p in pkg_resources.working_set:
        installed[canonicalize_name(p.project_name)] = p

    query_names = [canonicalize_name(name) for name in query]

    for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
        package = {
            'name': dist.project_name,
            'version': dist.version,
            'location': dist.location,
            'requires': [dep.project_name for dep in dist.requires()],
        }
        file_list = None
        metadata = None
        if isinstance(dist, pkg_resources.DistInfoDistribution):
            # RECORDs should be part of .dist-info metadatas
            if dist.has_metadata('RECORD'):
                lines = dist.get_metadata_lines('RECORD')
                paths = [l.split(',')[0] for l in lines]
                paths = [os.path.join(dist.location, p) for p in paths]
                file_list = [os.path.relpath(p, dist.location) for p in paths]

            if dist.has_metadata('METADATA'):
                metadata = dist.get_metadata('METADATA')
        else:
            # Otherwise use pip's log for .egg-info's
            if dist.has_metadata('installed-files.txt'):
                paths = dist.get_metadata_lines('installed-files.txt')
                paths = [os.path.join(dist.egg_info, p) for p in paths]
                file_list = [os.path.relpath(p, dist.location) for p in paths]

            if dist.has_metadata('PKG-INFO'):
                metadata = dist.get_metadata('PKG-INFO')

        if dist.has_metadata('entry_points.txt'):
            entry_points = dist.get_metadata_lines('entry_points.txt')
            package['entry_points'] = entry_points

        if dist.has_metadata('INSTALLER'):
            for line in dist.get_metadata_lines('INSTALLER'):
                if line.strip():
                    package['installer'] = line.strip()
                    break

        # @todo: Should pkg_resources.Distribution have a
        # `get_pkg_info` method?
        feed_parser = FeedParser()
        feed_parser.feed(metadata)
        pkg_info_dict = feed_parser.close()
        for key in ('metadata-version', 'summary',
                    'home-page', 'author', 'author-email', 'license'):
            package[key] = pkg_info_dict.get(key)

        # It looks like FeedParser cannot deal with repeated headers
        classifiers = []
        for line in metadata.splitlines():
            if line.startswith('Classifier: '):
                classifiers.append(line[len('Classifier: '):])
        package['classifiers'] = classifiers

        if file_list:
            package['files'] = sorted(file_list)
        yield package


def print_results(distributions, list_files=False, verbose=False):
    """
    Print the informations from installed distributions found.
    """
    results_printed = False
    for i, dist in enumerate(distributions):
        results_printed = True
        if i > 0:
            logger.info("---")
        logger.info("Name: %s", dist.get('name', ''))
        logger.info("Version: %s", dist.get('version', ''))
        logger.info("Summary: %s", dist.get('summary', ''))
        logger.info("Home-page: %s", dist.get('home-page', ''))
        logger.info("Author: %s", dist.get('author', ''))
        logger.info("Author-email: %s", dist.get('author-email', ''))
        logger.info("License: %s", dist.get('license', ''))
        logger.info("Location: %s", dist.get('location', ''))
        logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
        if verbose:
            logger.info("Metadata-Version: %s",
                        dist.get('metadata-version', ''))
            logger.info("Installer: %s", dist.get('installer', ''))
            logger.info("Classifiers:")
            for classifier in dist.get('classifiers', []):
                logger.info("  %s", classifier)
            logger.info("Entry-points:")
            for entry in dist.get('entry_points', []):
                logger.info("  %s", entry.strip())
        if list_files:
            logger.info("Files:")
            for line in dist.get('files', []):
                logger.info("  %s", line.strip())
            if "files" not in dist:
                logger.info("Cannot locate installed-files.txt")
    return results_printed
commands/hash.py000064400000003075151733136200007650 0ustar00from __future__ import absolute_import

import hashlib
import logging
import sys

from pip.basecommand import Command
from pip.status_codes import ERROR
from pip.utils import read_chunks
from pip.utils.hashes import FAVORITE_HASH, STRONG_HASHES


logger = logging.getLogger(__name__)


class HashCommand(Command):
    """
    Compute a hash of a local package archive.

    These can be used with --hash in a requirements file to do repeatable
    installs.

    """
    name = 'hash'
    usage = '%prog [options] <file> ...'
    summary = 'Compute hashes of package archives.'

    def __init__(self, *args, **kw):
        super(HashCommand, self).__init__(*args, **kw)
        self.cmd_opts.add_option(
            '-a', '--algorithm',
            dest='algorithm',
            choices=STRONG_HASHES,
            action='store',
            default=FAVORITE_HASH,
            help='The hash algorithm to use: one of %s' %
                 ', '.join(STRONG_HASHES))
        self.parser.insert_option_group(0, self.cmd_opts)

    def run(self, options, args):
        if not args:
            self.parser.print_usage(sys.stderr)
            return ERROR

        algorithm = options.algorithm
        for path in args:
            logger.info('%s:\n--hash=%s:%s',
                        path, algorithm, _hash_of_file(path, algorithm))


def _hash_of_file(path, algorithm):
    """Return the hash digest of a file."""
    with open(path, 'rb') as archive:
        hash = hashlib.new(algorithm)
        for chunk in read_chunks(archive):
            hash.update(chunk)
    return hash.hexdigest()
commands/uninstall.py000064400000005504151733136200010735 0ustar00from __future__ import absolute_import

import pip
from pip.wheel import WheelCache
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.basecommand import Command
from pip.exceptions import InstallationError


class UninstallCommand(Command):
    """
    Uninstall packages.

    pip is able to uninstall most installed packages. Known exceptions are:

    - Pure distutils packages installed with ``python setup.py install``, which
      leave behind no metadata to determine what files were installed.
    - Script wrappers installed by ``python setup.py develop``.
    """
    name = 'uninstall'
    usage = """
      %prog [options] <package> ...
      %prog [options] -r <requirements file> ..."""
    summary = 'Uninstall packages.'

    def __init__(self, *args, **kw):
        super(UninstallCommand, self).__init__(*args, **kw)
        self.cmd_opts.add_option(
            '-r', '--requirement',
            dest='requirements',
            action='append',
            default=[],
            metavar='file',
            help='Uninstall all the packages listed in the given requirements '
                 'file.  This option can be used multiple times.',
        )
        self.cmd_opts.add_option(
            '-y', '--yes',
            dest='yes',
            action='store_true',
            help="Don't ask for confirmation of uninstall deletions.")

        self.parser.insert_option_group(0, self.cmd_opts)

    def run(self, options, args):
        with self._build_session(options) as session:
            format_control = pip.index.FormatControl(set(), set())
            wheel_cache = WheelCache(options.cache_dir, format_control)
            requirement_set = RequirementSet(
                build_dir=None,
                src_dir=None,
                download_dir=None,
                isolated=options.isolated_mode,
                session=session,
                wheel_cache=wheel_cache,
            )
            for name in args:
                requirement_set.add_requirement(
                    InstallRequirement.from_line(
                        name, isolated=options.isolated_mode,
                        wheel_cache=wheel_cache
                    )
                )
            for filename in options.requirements:
                for req in parse_requirements(
                        filename,
                        options=options,
                        session=session,
                        wheel_cache=wheel_cache):
                    requirement_set.add_requirement(req)
            if not requirement_set.has_requirements:
                raise InstallationError(
                    'You must give at least one requirement to %(name)s (see '
                    '"pip help %(name)s")' % dict(name=self.name)
                )
            requirement_set.uninstall(auto_confirm=options.yes)
commands/__init__.py000064400000004304151733136200010460 0ustar00"""
Package containing all pip commands
"""
from __future__ import absolute_import

from pip.commands.completion import CompletionCommand
from pip.commands.download import DownloadCommand
from pip.commands.freeze import FreezeCommand
from pip.commands.hash import HashCommand
from pip.commands.help import HelpCommand
from pip.commands.list import ListCommand
from pip.commands.check import CheckCommand
from pip.commands.search import SearchCommand
from pip.commands.show import ShowCommand
from pip.commands.install import InstallCommand
from pip.commands.uninstall import UninstallCommand
from pip.commands.wheel import WheelCommand


commands_dict = {
    CompletionCommand.name: CompletionCommand,
    FreezeCommand.name: FreezeCommand,
    HashCommand.name: HashCommand,
    HelpCommand.name: HelpCommand,
    SearchCommand.name: SearchCommand,
    ShowCommand.name: ShowCommand,
    InstallCommand.name: InstallCommand,
    UninstallCommand.name: UninstallCommand,
    DownloadCommand.name: DownloadCommand,
    ListCommand.name: ListCommand,
    CheckCommand.name: CheckCommand,
    WheelCommand.name: WheelCommand,
}


commands_order = [
    InstallCommand,
    DownloadCommand,
    UninstallCommand,
    FreezeCommand,
    ListCommand,
    ShowCommand,
    CheckCommand,
    SearchCommand,
    WheelCommand,
    HashCommand,
    CompletionCommand,
    HelpCommand,
]


def get_summaries(ordered=True):
    """Yields sorted (command name, command summary) tuples."""

    if ordered:
        cmditems = _sort_commands(commands_dict, commands_order)
    else:
        cmditems = commands_dict.items()

    for name, command_class in cmditems:
        yield (name, command_class.summary)


def get_similar_commands(name):
    """Command name auto-correct."""
    from difflib import get_close_matches

    name = name.lower()

    close_commands = get_close_matches(name, commands_dict.keys())

    if close_commands:
        return close_commands[0]
    else:
        return False


def _sort_commands(cmddict, order):
    def keyfn(key):
        try:
            return order.index(key[1])
        except ValueError:
            # unordered items should come last
            return 0xff

    return sorted(cmddict.items(), key=keyfn)
commands/download.py000064400000017202151733136200010531 0ustar00from __future__ import absolute_import

import logging
import os

from pip.exceptions import CommandError
from pip.index import FormatControl
from pip.req import RequirementSet
from pip.basecommand import RequirementCommand
from pip import cmdoptions
from pip.utils import ensure_dir, normalize_path
from pip.utils.build import BuildDirectory
from pip.utils.filesystem import check_path_owner


logger = logging.getLogger(__name__)


class DownloadCommand(RequirementCommand):
    """
    Download packages from:

    - PyPI (and other indexes) using requirement specifiers.
    - VCS project urls.
    - Local project directories.
    - Local or remote source archives.

    pip also supports downloading from "requirements files", which provide
    an easy way to specify a whole environment to be downloaded.
    """
    name = 'download'

    usage = """
      %prog [options] <requirement specifier> [package-index-options] ...
      %prog [options] -r <requirements file> [package-index-options] ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ..."""

    summary = 'Download packages.'

    def __init__(self, *args, **kw):
        super(DownloadCommand, self).__init__(*args, **kw)

        cmd_opts = self.cmd_opts

        cmd_opts.add_option(cmdoptions.constraints())
        cmd_opts.add_option(cmdoptions.editable())
        cmd_opts.add_option(cmdoptions.requirements())
        cmd_opts.add_option(cmdoptions.build_dir())
        cmd_opts.add_option(cmdoptions.no_deps())
        cmd_opts.add_option(cmdoptions.global_options())
        cmd_opts.add_option(cmdoptions.no_binary())
        cmd_opts.add_option(cmdoptions.only_binary())
        cmd_opts.add_option(cmdoptions.src())
        cmd_opts.add_option(cmdoptions.pre())
        cmd_opts.add_option(cmdoptions.no_clean())
        cmd_opts.add_option(cmdoptions.require_hashes())

        cmd_opts.add_option(
            '-d', '--dest', '--destination-dir', '--destination-directory',
            dest='download_dir',
            metavar='dir',
            default=os.curdir,
            help=("Download packages into <dir>."),
        )

        cmd_opts.add_option(
            '--platform',
            dest='platform',
            metavar='platform',
            default=None,
            help=("Only download wheels compatible with <platform>. "
                  "Defaults to the platform of the running system."),
        )

        cmd_opts.add_option(
            '--python-version',
            dest='python_version',
            metavar='python_version',
            default=None,
            help=("Only download wheels compatible with Python "
                  "interpreter version <version>. If not specified, then the "
                  "current system interpreter minor version is used. A major "
                  "version (e.g. '2') can be specified to match all "
                  "minor revs of that major version.  A minor version "
                  "(e.g. '34') can also be specified."),
        )

        cmd_opts.add_option(
            '--implementation',
            dest='implementation',
            metavar='implementation',
            default=None,
            help=("Only download wheels compatible with Python "
                  "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
                  " or 'ip'. If not specified, then the current "
                  "interpreter implementation is used.  Use 'py' to force "
                  "implementation-agnostic wheels."),
        )

        cmd_opts.add_option(
            '--abi',
            dest='abi',
            metavar='abi',
            default=None,
            help=("Only download wheels compatible with Python "
                  "abi <abi>, e.g. 'pypy_41'.  If not specified, then the "
                  "current interpreter abi tag is used.  Generally "
                  "you will need to specify --implementation, "
                  "--platform, and --python-version when using "
                  "this option."),
        )

        index_opts = cmdoptions.make_option_group(
            cmdoptions.non_deprecated_index_group,
            self.parser,
        )

        self.parser.insert_option_group(0, index_opts)
        self.parser.insert_option_group(0, cmd_opts)

    def run(self, options, args):
        options.ignore_installed = True

        if options.python_version:
            python_versions = [options.python_version]
        else:
            python_versions = None

        dist_restriction_set = any([
            options.python_version,
            options.platform,
            options.abi,
            options.implementation,
        ])
        binary_only = FormatControl(set(), set([':all:']))
        if dist_restriction_set and options.format_control != binary_only:
            raise CommandError(
                "--only-binary=:all: must be set and --no-binary must not "
                "be set (or must be set to :none:) when restricting platform "
                "and interpreter constraints using --python-version, "
                "--platform, --abi, or --implementation."
            )

        options.src_dir = os.path.abspath(options.src_dir)
        options.download_dir = normalize_path(options.download_dir)

        ensure_dir(options.download_dir)

        with self._build_session(options) as session:
            finder = self._build_package_finder(
                options=options,
                session=session,
                platform=options.platform,
                python_versions=python_versions,
                abi=options.abi,
                implementation=options.implementation,
            )
            build_delete = (not (options.no_clean or options.build_dir))
            if options.cache_dir and not check_path_owner(options.cache_dir):
                logger.warning(
                    "The directory '%s' or its parent directory is not owned "
                    "by the current user and caching wheels has been "
                    "disabled. check the permissions and owner of that "
                    "directory. If executing pip with sudo, you may want "
                    "sudo's -H flag.",
                    options.cache_dir,
                )
                options.cache_dir = None

            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:

                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=options.download_dir,
                    ignore_installed=True,
                    ignore_dependencies=options.ignore_dependencies,
                    session=session,
                    isolated=options.isolated_mode,
                    require_hashes=options.require_hashes
                )
                self.populate_requirement_set(
                    requirement_set,
                    args,
                    options,
                    finder,
                    session,
                    self.name,
                    None
                )

                if not requirement_set.has_requirements:
                    return

                requirement_set.prepare_files(finder)

                downloaded = ' '.join([
                    req.name for req in requirement_set.successfully_downloaded
                ])
                if downloaded:
                    logger.info(
                        'Successfully downloaded %s', downloaded
                    )

                # Clean up
                if not options.no_clean:
                    requirement_set.cleanup_files()

        return requirement_set
commands/install.py000064400000043561151733136200010377 0ustar00from __future__ import absolute_import

import logging
import operator
import os
import tempfile
import shutil
import warnings
import sys
from os import path
try:
    import wheel
except ImportError:
    wheel = None

from pip.req import RequirementSet
from pip.basecommand import RequirementCommand
from pip.locations import virtualenv_no_global, distutils_scheme
from pip.exceptions import (
    InstallationError, CommandError, PreviousBuildDirError,
)
from pip import cmdoptions
from pip.utils import ensure_dir, get_installed_version
from pip.utils.build import BuildDirectory
from pip.utils.deprecation import RemovedInPip10Warning
from pip.utils.filesystem import check_path_owner
from pip.wheel import WheelCache, WheelBuilder


logger = logging.getLogger(__name__)


class InstallCommand(RequirementCommand):
    """
    Install packages from:

    - PyPI (and other indexes) using requirement specifiers.
    - VCS project urls.
    - Local project directories.
    - Local or remote source archives.

    pip also supports installing from "requirements files", which provide
    an easy way to specify a whole environment to be installed.
    """
    name = 'install'

    usage = """
      %prog [options] <requirement specifier> [package-index-options] ...
      %prog [options] -r <requirements file> [package-index-options] ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ..."""

    summary = 'Install packages.'

    def __init__(self, *args, **kw):
        super(InstallCommand, self).__init__(*args, **kw)

        cmd_opts = self.cmd_opts

        cmd_opts.add_option(cmdoptions.constraints())
        cmd_opts.add_option(cmdoptions.editable())
        cmd_opts.add_option(cmdoptions.requirements())
        cmd_opts.add_option(cmdoptions.build_dir())

        cmd_opts.add_option(
            '-t', '--target',
            dest='target_dir',
            metavar='dir',
            default=None,
            help='Install packages into <dir>. '
                 'By default this will not replace existing files/folders in '
                 '<dir>. Use --upgrade to replace existing packages in <dir> '
                 'with new versions.'
        )

        cmd_opts.add_option(
            '-d', '--download', '--download-dir', '--download-directory',
            dest='download_dir',
            metavar='dir',
            default=None,
            help=("Download packages into <dir> instead of installing them, "
                  "regardless of what's already installed."),
        )

        cmd_opts.add_option(cmdoptions.src())

        cmd_opts.add_option(
            '-U', '--upgrade',
            dest='upgrade',
            action='store_true',
            help='Upgrade all specified packages to the newest available '
                 'version. The handling of dependencies depends on the '
                 'upgrade-strategy used.'
        )

        cmd_opts.add_option(
            '--upgrade-strategy',
            dest='upgrade_strategy',
            default='eager',
            choices=['only-if-needed', 'eager'],
            help='Determines how dependency upgrading should be handled. '
                 '"eager" - dependencies are upgraded regardless of '
                 'whether the currently installed version satisfies the '
                 'requirements of the upgraded package(s). '
                 '"only-if-needed" -  are upgraded only when they do not '
                 'satisfy the requirements of the upgraded package(s).'
        )

        cmd_opts.add_option(
            '--force-reinstall',
            dest='force_reinstall',
            action='store_true',
            help='When upgrading, reinstall all packages even if they are '
                 'already up-to-date.')

        cmd_opts.add_option(
            '-I', '--ignore-installed',
            dest='ignore_installed',
            action='store_true',
            help='Ignore the installed packages (reinstalling instead).')

        cmd_opts.add_option(cmdoptions.ignore_requires_python())
        cmd_opts.add_option(cmdoptions.no_deps())

        cmd_opts.add_option(cmdoptions.install_options())
        cmd_opts.add_option(cmdoptions.global_options())

        cmd_opts.add_option(
            '--user',
            dest='use_user_site',
            action='store_true',
            help="Install to the Python user install directory for your "
                 "platform. Typically ~/.local/, or %APPDATA%\Python on "
                 "Windows. (See the Python documentation for site.USER_BASE "
                 "for full details.)")

        cmd_opts.add_option(
            '--egg',
            dest='as_egg',
            action='store_true',
            help="Install packages as eggs, not 'flat', like pip normally "
                 "does. This option is not about installing *from* eggs. "
                 "(WARNING: Because this option overrides pip's normal install"
                 " logic, requirements files may not behave as expected.)")

        cmd_opts.add_option(
            '--root',
            dest='root_path',
            metavar='dir',
            default=None,
            help="Install everything relative to this alternate root "
                 "directory.")

        cmd_opts.add_option(
            '--strip-file-prefix',
            dest='strip_file_prefix',
            metavar='prefix',
            default=None,
            help="Strip given prefix from script paths in wheel RECORD."
        )

        cmd_opts.add_option(
            '--prefix',
            dest='prefix_path',
            metavar='dir',
            default=None,
            help="Installation prefix where lib, bin and other top-level "
                 "folders are placed")

        cmd_opts.add_option(
            "--compile",
            action="store_true",
            dest="compile",
            default=True,
            help="Compile py files to pyc",
        )

        cmd_opts.add_option(
            "--no-compile",
            action="store_false",
            dest="compile",
            help="Do not compile py files to pyc",
        )

        cmd_opts.add_option(cmdoptions.use_wheel())
        cmd_opts.add_option(cmdoptions.no_use_wheel())
        cmd_opts.add_option(cmdoptions.no_binary())
        cmd_opts.add_option(cmdoptions.only_binary())
        cmd_opts.add_option(cmdoptions.pre())
        cmd_opts.add_option(cmdoptions.no_clean())
        cmd_opts.add_option(cmdoptions.require_hashes())

        index_opts = cmdoptions.make_option_group(
            cmdoptions.index_group,
            self.parser,
        )

        self.parser.insert_option_group(0, index_opts)
        self.parser.insert_option_group(0, cmd_opts)

    def run(self, options, args):
        cmdoptions.resolve_wheel_no_use_binary(options)
        cmdoptions.check_install_build_global(options)

        def is_venv():
            return hasattr(sys, 'real_prefix') or \
                    (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix)

        # Check whether we have root privileges and aren't in venv/virtualenv
        if os.getuid() == 0 and not is_venv():
            logger.warning(
                "WARNING: Running pip install with root privileges is "
                "generally not a good idea. Try `%s install --user` instead."
                        % path.basename(sys.argv[0])
            )

        if options.as_egg:
            warnings.warn(
                "--egg has been deprecated and will be removed in the future. "
                "This flag is mutually exclusive with large parts of pip, and "
                "actually using it invalidates pip's ability to manage the "
                "installation process.",
                RemovedInPip10Warning,
            )

        if options.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.download_dir:
            warnings.warn(
                "pip install --download has been deprecated and will be "
                "removed in the future. Pip now has a download command that "
                "should be used instead.",
                RemovedInPip10Warning,
            )
            options.ignore_installed = True

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)
        install_options = options.install_options or []
        if options.use_user_site:
            if options.prefix_path:
                raise CommandError(
                    "Can not combine '--user' and '--prefix' as they imply "
                    "different installation locations"
                )
            if virtualenv_no_global():
                raise InstallationError(
                    "Can not perform a '--user' install. User site-packages "
                    "are not visible in this virtualenv."
                )
            install_options.append('--user')
            install_options.append('--prefix=')

        temp_target_dir = None
        if options.target_dir:
            options.ignore_installed = True
            temp_target_dir = tempfile.mkdtemp()
            options.target_dir = os.path.abspath(options.target_dir)
            if (os.path.exists(options.target_dir) and not
                    os.path.isdir(options.target_dir)):
                raise CommandError(
                    "Target path exists but is not a directory, will not "
                    "continue."
                )
            install_options.append('--home=' + temp_target_dir)

        global_options = options.global_options or []

        with self._build_session(options) as session:

            finder = self._build_package_finder(options, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)
            if options.cache_dir and not check_path_owner(options.cache_dir):
                logger.warning(
                    "The directory '%s' or its parent directory is not owned "
                    "by the current user and caching wheels has been "
                    "disabled. check the permissions and owner of that "
                    "directory. If executing pip with sudo, you may want "
                    "sudo's -H flag.",
                    options.cache_dir,
                )
                options.cache_dir = None

            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:
                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=options.download_dir,
                    upgrade=options.upgrade,
                    upgrade_strategy=options.upgrade_strategy,
                    as_egg=options.as_egg,
                    ignore_installed=options.ignore_installed,
                    ignore_dependencies=options.ignore_dependencies,
                    ignore_requires_python=options.ignore_requires_python,
                    force_reinstall=options.force_reinstall,
                    use_user_site=options.use_user_site,
                    target_dir=temp_target_dir,
                    session=session,
                    pycompile=options.compile,
                    isolated=options.isolated_mode,
                    wheel_cache=wheel_cache,
                    require_hashes=options.require_hashes,
                )

                self.populate_requirement_set(
                    requirement_set, args, options, finder, session, self.name,
                    wheel_cache
                )

                if not requirement_set.has_requirements:
                    return

                try:
                    if (options.download_dir or not wheel or not
                            options.cache_dir):
                        # on -d don't do complex things like building
                        # wheels, and don't try to build wheels when wheel is
                        # not installed.
                        requirement_set.prepare_files(finder)
                    else:
                        # build wheels before install.
                        wb = WheelBuilder(
                            requirement_set,
                            finder,
                            build_options=[],
                            global_options=[],
                        )
                        # Ignore the result: a failed wheel will be
                        # installed from the sdist/vcs whatever.
                        wb.build(autobuilding=True)

                    if not options.download_dir:
                        requirement_set.install(
                            install_options,
                            global_options,
                            root=options.root_path,
                            prefix=options.prefix_path,
                            strip_file_prefix=options.strip_file_prefix,
                        )

                        possible_lib_locations = get_lib_location_guesses(
                            user=options.use_user_site,
                            home=temp_target_dir,
                            root=options.root_path,
                            prefix=options.prefix_path,
                            isolated=options.isolated_mode,
                        )
                        reqs = sorted(
                            requirement_set.successfully_installed,
                            key=operator.attrgetter('name'))
                        items = []
                        for req in reqs:
                            item = req.name
                            try:
                                installed_version = get_installed_version(
                                    req.name, possible_lib_locations
                                )
                                if installed_version:
                                    item += '-' + installed_version
                            except Exception:
                                pass
                            items.append(item)
                        installed = ' '.join(items)
                        if installed:
                            logger.info('Successfully installed %s', installed)
                    else:
                        downloaded = ' '.join([
                            req.name
                            for req in requirement_set.successfully_downloaded
                        ])
                        if downloaded:
                            logger.info(
                                'Successfully downloaded %s', downloaded
                            )
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    # Clean up
                    if not options.no_clean:
                        requirement_set.cleanup_files()

        if options.target_dir:
            ensure_dir(options.target_dir)

            # Checking both purelib and platlib directories for installed
            # packages to be moved to target directory
            lib_dir_list = []

            purelib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
            platlib_dir = distutils_scheme('', home=temp_target_dir)['platlib']

            if os.path.exists(purelib_dir):
                lib_dir_list.append(purelib_dir)
            if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
                lib_dir_list.append(platlib_dir)

            for lib_dir in lib_dir_list:
                for item in os.listdir(lib_dir):
                    target_item_dir = os.path.join(options.target_dir, item)
                    if os.path.exists(target_item_dir):
                        if not options.upgrade:
                            logger.warning(
                                'Target directory %s already exists. Specify '
                                '--upgrade to force replacement.',
                                target_item_dir
                            )
                            continue
                        if os.path.islink(target_item_dir):
                            logger.warning(
                                'Target directory %s already exists and is '
                                'a link. Pip will not automatically replace '
                                'links, please remove if replacement is '
                                'desired.',
                                target_item_dir
                            )
                            continue
                        if os.path.isdir(target_item_dir):
                            shutil.rmtree(target_item_dir)
                        else:
                            os.remove(target_item_dir)

                    shutil.move(
                        os.path.join(lib_dir, item),
                        target_item_dir
                    )
            shutil.rmtree(temp_target_dir)
        return requirement_set


def get_lib_location_guesses(*args, **kwargs):
    scheme = distutils_scheme('', *args, **kwargs)
    return [scheme['purelib'], scheme['platlib']]
commands/help.py000064400000001726151733136200007656 0ustar00from __future__ import absolute_import

from pip.basecommand import Command, SUCCESS
from pip.exceptions import CommandError


class HelpCommand(Command):
    """Show help for commands"""
    name = 'help'
    usage = """
      %prog <command>"""
    summary = 'Show help for commands.'

    def run(self, options, args):
        from pip.commands import commands_dict, get_similar_commands

        try:
            # 'pip help' with no args is handled by pip.__init__.parseopt()
            cmd_name = args[0]  # the command we need help for
        except IndexError:
            return SUCCESS

        if cmd_name not in commands_dict:
            guess = get_similar_commands(cmd_name)

            msg = ['unknown command "%s"' % cmd_name]
            if guess:
                msg.append('maybe you meant "%s"' % guess)

            raise CommandError(' - '.join(msg))

        command = commands_dict[cmd_name]()
        command.parser.print_help()

        return SUCCESS
commands/check.py000064400000002546151733136200010004 0ustar00import logging

from pip.basecommand import Command
from pip.operations.check import check_requirements
from pip.utils import get_installed_distributions


logger = logging.getLogger(__name__)


class CheckCommand(Command):
    """Verify installed packages have compatible dependencies."""
    name = 'check'
    usage = """
      %prog [options]"""
    summary = 'Verify installed packages have compatible dependencies.'

    def run(self, options, args):
        dists = get_installed_distributions(local_only=False, skip=())
        missing_reqs_dict, incompatible_reqs_dict = check_requirements(dists)

        for dist in dists:
            key = '%s==%s' % (dist.project_name, dist.version)

            for requirement in missing_reqs_dict.get(key, []):
                logger.info(
                    "%s %s requires %s, which is not installed.",
                    dist.project_name, dist.version, requirement.project_name)

            for requirement, actual in incompatible_reqs_dict.get(key, []):
                logger.info(
                    "%s %s has requirement %s, but you have %s %s.",
                    dist.project_name, dist.version, requirement,
                    actual.project_name, actual.version)

        if missing_reqs_dict or incompatible_reqs_dict:
            return 1
        else:
            logger.info("No broken requirements found.")
commands/list.py000064400000026151151733136200007700 0ustar00from __future__ import absolute_import

import json
import logging
import warnings
try:
    from itertools import zip_longest
except ImportError:
    from itertools import izip_longest as zip_longest

from pip._vendor import six

from pip.basecommand import Command
from pip.exceptions import CommandError
from pip.index import PackageFinder
from pip.utils import (
    get_installed_distributions, dist_is_editable)
from pip.utils.deprecation import RemovedInPip10Warning
from pip.cmdoptions import make_option_group, index_group

logger = logging.getLogger(__name__)


class ListCommand(Command):
    """
    List installed packages, including editables.

    Packages are listed in a case-insensitive sorted order.
    """
    name = 'list'
    usage = """
      %prog [options]"""
    summary = 'List installed packages.'

    def __init__(self, *args, **kw):
        super(ListCommand, self).__init__(*args, **kw)

        cmd_opts = self.cmd_opts

        cmd_opts.add_option(
            '-o', '--outdated',
            action='store_true',
            default=False,
            help='List outdated packages')
        cmd_opts.add_option(
            '-u', '--uptodate',
            action='store_true',
            default=False,
            help='List uptodate packages')
        cmd_opts.add_option(
            '-e', '--editable',
            action='store_true',
            default=False,
            help='List editable projects.')
        cmd_opts.add_option(
            '-l', '--local',
            action='store_true',
            default=False,
            help=('If in a virtualenv that has global access, do not list '
                  'globally-installed packages.'),
        )
        self.cmd_opts.add_option(
            '--user',
            dest='user',
            action='store_true',
            default=False,
            help='Only output packages installed in user-site.')

        cmd_opts.add_option(
            '--pre',
            action='store_true',
            default=False,
            help=("Include pre-release and development versions. By default, "
                  "pip only finds stable versions."),
        )

        cmd_opts.add_option(
            '--format',
            action='store',
            dest='list_format',
            choices=('legacy', 'columns', 'freeze', 'json'),
            help="Select the output format among: legacy (default), columns, "
                 "freeze or json.",
        )

        cmd_opts.add_option(
            '--not-required',
            action='store_true',
            dest='not_required',
            help="List packages that are not dependencies of "
                 "installed packages.",
        )

        index_opts = make_option_group(index_group, self.parser)

        self.parser.insert_option_group(0, index_opts)
        self.parser.insert_option_group(0, cmd_opts)

    def _build_package_finder(self, options, index_urls, session):
        """
        Create a package finder appropriate to this list command.
        """
        return PackageFinder(
            find_links=options.find_links,
            index_urls=index_urls,
            allow_all_prereleases=options.pre,
            trusted_hosts=options.trusted_hosts,
            process_dependency_links=options.process_dependency_links,
            session=session,
        )

    def run(self, options, args):
        if options.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.list_format is None:
            warnings.warn(
                "The default format will switch to columns in the future. "
                "You can use --format=(legacy|columns) (or define a "
                "format=(legacy|columns) in your pip.conf under the [list] "
                "section) to disable this warning.",
                RemovedInPip10Warning,
            )

        if options.outdated and options.uptodate:
            raise CommandError(
                "Options --outdated and --uptodate cannot be combined.")

        packages = get_installed_distributions(
            local_only=options.local,
            user_only=options.user,
            editables_only=options.editable,
        )

        if options.outdated:
            packages = self.get_outdated(packages, options)
        elif options.uptodate:
            packages = self.get_uptodate(packages, options)

        if options.not_required:
            packages = self.get_not_required(packages, options)

        self.output_package_listing(packages, options)

    def get_outdated(self, packages, options):
        return [
            dist for dist in self.iter_packages_latest_infos(packages, options)
            if dist.latest_version > dist.parsed_version
        ]

    def get_uptodate(self, packages, options):
        return [
            dist for dist in self.iter_packages_latest_infos(packages, options)
            if dist.latest_version == dist.parsed_version
        ]

    def get_not_required(self, packages, options):
        dep_keys = set()
        for dist in packages:
            dep_keys.update(requirement.key for requirement in dist.requires())
        return set(pkg for pkg in packages if pkg.key not in dep_keys)

    def iter_packages_latest_infos(self, packages, options):
        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.debug('Ignoring indexes: %s', ','.join(index_urls))
            index_urls = []

        dependency_links = []
        for dist in packages:
            if dist.has_metadata('dependency_links.txt'):
                dependency_links.extend(
                    dist.get_metadata_lines('dependency_links.txt'),
                )

        with self._build_session(options) as session:
            finder = self._build_package_finder(options, index_urls, session)
            finder.add_dependency_links(dependency_links)

            for dist in packages:
                typ = 'unknown'
                all_candidates = finder.find_all_candidates(dist.key)
                if not options.pre:
                    # Remove prereleases
                    all_candidates = [candidate for candidate in all_candidates
                                      if not candidate.version.is_prerelease]

                if not all_candidates:
                    continue
                best_candidate = max(all_candidates,
                                     key=finder._candidate_sort_key)
                remote_version = best_candidate.version
                if best_candidate.location.is_wheel:
                    typ = 'wheel'
                else:
                    typ = 'sdist'
                # This is dirty but makes the rest of the code much cleaner
                dist.latest_version = remote_version
                dist.latest_filetype = typ
                yield dist

    def output_legacy(self, dist):
        if dist_is_editable(dist):
            return '%s (%s, %s)' % (
                dist.project_name,
                dist.version,
                dist.location,
            )
        else:
            return '%s (%s)' % (dist.project_name, dist.version)

    def output_legacy_latest(self, dist):
        return '%s - Latest: %s [%s]' % (
            self.output_legacy(dist),
            dist.latest_version,
            dist.latest_filetype,
        )

    def output_package_listing(self, packages, options):
        packages = sorted(
            packages,
            key=lambda dist: dist.project_name.lower(),
        )
        if options.list_format == 'columns' and packages:
            data, header = format_for_columns(packages, options)
            self.output_package_listing_columns(data, header)
        elif options.list_format == 'freeze':
            for dist in packages:
                logger.info("%s==%s", dist.project_name, dist.version)
        elif options.list_format == 'json':
            logger.info(format_for_json(packages, options))
        else:  # legacy
            for dist in packages:
                if options.outdated:
                    logger.info(self.output_legacy_latest(dist))
                else:
                    logger.info(self.output_legacy(dist))

    def output_package_listing_columns(self, data, header):
        # insert the header first: we need to know the size of column names
        if len(data) > 0:
            data.insert(0, header)

        pkg_strings, sizes = tabulate(data)

        # Create and add a separator.
        if len(data) > 0:
            pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))

        for val in pkg_strings:
            logger.info(val)


def tabulate(vals):
    # From pfmoore on GitHub:
    # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
    assert len(vals) > 0

    sizes = [0] * max(len(x) for x in vals)
    for row in vals:
        sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]

    result = []
    for row in vals:
        display = " ".join([str(c).ljust(s) if c is not None else ''
                            for s, c in zip_longest(sizes, row)])
        result.append(display)

    return result, sizes


def format_for_columns(pkgs, options):
    """
    Convert the package data into something usable
    by output_package_listing_columns.
    """
    running_outdated = options.outdated
    # Adjust the header for the `pip list --outdated` case.
    if running_outdated:
        header = ["Package", "Version", "Latest", "Type"]
    else:
        header = ["Package", "Version"]

    data = []
    if any(dist_is_editable(x) for x in pkgs):
        header.append("Location")

    for proj in pkgs:
        # if we're working on the 'outdated' list, separate out the
        # latest_version and type
        row = [proj.project_name, proj.version]

        if running_outdated:
            row.append(proj.latest_version)
            row.append(proj.latest_filetype)

        if dist_is_editable(proj):
            row.append(proj.location)

        data.append(row)

    return data, header


def format_for_json(packages, options):
    data = []
    for dist in packages:
        info = {
            'name': dist.project_name,
            'version': six.text_type(dist.version),
        }
        if options.outdated:
            info['latest_version'] = six.text_type(dist.latest_version)
            info['latest_filetype'] = dist.latest_filetype
        data.append(info)
    return json.dumps(data)
commands/wheel.py000064400000017061151733136200010031 0ustar00# -*- coding: utf-8 -*-
from __future__ import absolute_import

import logging
import os
import warnings

from pip.basecommand import RequirementCommand
from pip.exceptions import CommandError, PreviousBuildDirError
from pip.req import RequirementSet
from pip.utils import import_or_raise
from pip.utils.build import BuildDirectory
from pip.utils.deprecation import RemovedInPip10Warning
from pip.wheel import WheelCache, WheelBuilder
from pip import cmdoptions


logger = logging.getLogger(__name__)


class WheelCommand(RequirementCommand):
    """
    Build Wheel archives for your requirements and dependencies.

    Wheel is a built-package format, and offers the advantage of not
    recompiling your software during every install. For more details, see the
    wheel docs: https://wheel.readthedocs.io/en/latest/

    Requirements: setuptools>=0.8, and wheel.

    'pip wheel' uses the bdist_wheel setuptools extension from the wheel
    package to build individual wheels.

    """

    name = 'wheel'
    usage = """
      %prog [options] <requirement specifier> ...
      %prog [options] -r <requirements file> ...
      %prog [options] [-e] <vcs project url> ...
      %prog [options] [-e] <local project path> ...
      %prog [options] <archive url/path> ..."""

    summary = 'Build wheels from your requirements.'

    def __init__(self, *args, **kw):
        super(WheelCommand, self).__init__(*args, **kw)

        cmd_opts = self.cmd_opts

        cmd_opts.add_option(
            '-w', '--wheel-dir',
            dest='wheel_dir',
            metavar='dir',
            default=os.curdir,
            help=("Build wheels into <dir>, where the default is the "
                  "current working directory."),
        )
        cmd_opts.add_option(cmdoptions.use_wheel())
        cmd_opts.add_option(cmdoptions.no_use_wheel())
        cmd_opts.add_option(cmdoptions.no_binary())
        cmd_opts.add_option(cmdoptions.only_binary())
        cmd_opts.add_option(
            '--build-option',
            dest='build_options',
            metavar='options',
            action='append',
            help="Extra arguments to be supplied to 'setup.py bdist_wheel'.")
        cmd_opts.add_option(cmdoptions.constraints())
        cmd_opts.add_option(cmdoptions.editable())
        cmd_opts.add_option(cmdoptions.requirements())
        cmd_opts.add_option(cmdoptions.src())
        cmd_opts.add_option(cmdoptions.ignore_requires_python())
        cmd_opts.add_option(cmdoptions.no_deps())
        cmd_opts.add_option(cmdoptions.build_dir())

        cmd_opts.add_option(
            '--global-option',
            dest='global_options',
            action='append',
            metavar='options',
            help="Extra global options to be supplied to the setup.py "
            "call before the 'bdist_wheel' command.")

        cmd_opts.add_option(
            '--pre',
            action='store_true',
            default=False,
            help=("Include pre-release and development versions. By default, "
                  "pip only finds stable versions."),
        )

        cmd_opts.add_option(cmdoptions.no_clean())
        cmd_opts.add_option(cmdoptions.require_hashes())

        index_opts = cmdoptions.make_option_group(
            cmdoptions.index_group,
            self.parser,
        )

        self.parser.insert_option_group(0, index_opts)
        self.parser.insert_option_group(0, cmd_opts)

    def check_required_packages(self):
        import_or_raise(
            'wheel.bdist_wheel',
            CommandError,
            "'pip wheel' requires the 'wheel' package. To fix this, run: "
            "pip install wheel"
        )
        pkg_resources = import_or_raise(
            'pkg_resources',
            CommandError,
            "'pip wheel' requires setuptools >= 0.8 for dist-info support."
            " To fix this, run: pip install --upgrade setuptools"
        )
        if not hasattr(pkg_resources, 'DistInfoDistribution'):
            raise CommandError(
                "'pip wheel' requires setuptools >= 0.8 for dist-info "
                "support. To fix this, run: pip install --upgrade "
                "setuptools"
            )

    def run(self, options, args):
        self.check_required_packages()
        cmdoptions.resolve_wheel_no_use_binary(options)
        cmdoptions.check_install_build_global(options)

        if options.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if options.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        index_urls = [options.index_url] + options.extra_index_urls
        if options.no_index:
            logger.debug('Ignoring indexes: %s', ','.join(index_urls))
            index_urls = []

        if options.build_dir:
            options.build_dir = os.path.abspath(options.build_dir)

        options.src_dir = os.path.abspath(options.src_dir)

        with self._build_session(options) as session:
            finder = self._build_package_finder(options, session)
            build_delete = (not (options.no_clean or options.build_dir))
            wheel_cache = WheelCache(options.cache_dir, options.format_control)
            with BuildDirectory(options.build_dir,
                                delete=build_delete) as build_dir:
                requirement_set = RequirementSet(
                    build_dir=build_dir,
                    src_dir=options.src_dir,
                    download_dir=None,
                    ignore_dependencies=options.ignore_dependencies,
                    ignore_installed=True,
                    ignore_requires_python=options.ignore_requires_python,
                    isolated=options.isolated_mode,
                    session=session,
                    wheel_cache=wheel_cache,
                    wheel_download_dir=options.wheel_dir,
                    require_hashes=options.require_hashes
                )

                self.populate_requirement_set(
                    requirement_set, args, options, finder, session, self.name,
                    wheel_cache
                )

                if not requirement_set.has_requirements:
                    return

                try:
                    # build wheels
                    wb = WheelBuilder(
                        requirement_set,
                        finder,
                        build_options=options.build_options or [],
                        global_options=options.global_options or [],
                    )
                    if not wb.build():
                        raise CommandError(
                            "Failed to build one or more wheels"
                        )
                except PreviousBuildDirError:
                    options.no_clean = True
                    raise
                finally:
                    if not options.no_clean:
                        requirement_set.cleanup_files()
commands/completion.py000064400000004625151733136200011100 0ustar00from __future__ import absolute_import

import sys
from pip.basecommand import Command

BASE_COMPLETION = """
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
"""

COMPLETION_SCRIPTS = {
    'bash': """
_pip_completion()
{
    COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
                   COMP_CWORD=$COMP_CWORD \\
                   PIP_AUTO_COMPLETE=1 $1 ) )
}
complete -o default -F _pip_completion pip
""", 'zsh': """
function _pip_completion {
  local words cword
  read -Ac words
  read -cn cword
  reply=( $( COMP_WORDS="$words[*]" \\
             COMP_CWORD=$(( cword-1 )) \\
             PIP_AUTO_COMPLETE=1 $words[1] ) )
}
compctl -K _pip_completion pip
""", 'fish': """
function __fish_complete_pip
    set -lx COMP_WORDS (commandline -o) ""
    set -lx COMP_CWORD (math (contains -i -- (commandline -t) $COMP_WORDS)-1)
    set -lx PIP_AUTO_COMPLETE 1
    string split \  -- (eval $COMP_WORDS[1])
end
complete -fa "(__fish_complete_pip)" -c pip
"""}


class CompletionCommand(Command):
    """A helper command to be used for command completion."""
    name = 'completion'
    summary = 'A helper command used for command completion.'

    def __init__(self, *args, **kw):
        super(CompletionCommand, self).__init__(*args, **kw)

        cmd_opts = self.cmd_opts

        cmd_opts.add_option(
            '--bash', '-b',
            action='store_const',
            const='bash',
            dest='shell',
            help='Emit completion code for bash')
        cmd_opts.add_option(
            '--zsh', '-z',
            action='store_const',
            const='zsh',
            dest='shell',
            help='Emit completion code for zsh')
        cmd_opts.add_option(
            '--fish', '-f',
            action='store_const',
            const='fish',
            dest='shell',
            help='Emit completion code for fish')

        self.parser.insert_option_group(0, cmd_opts)

    def run(self, options, args):
        """Prints the completion code of the given shell"""
        shells = COMPLETION_SCRIPTS.keys()
        shell_options = ['--' + shell for shell in sorted(shells)]
        if options.shell in shells:
            script = COMPLETION_SCRIPTS.get(options.shell, '')
            print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
        else:
            sys.stderr.write(
                'ERROR: You must pass %s\n' % ' or '.join(shell_options)
            )
pep425tags.py000064400000025344151733136200007025 0ustar00"""Generate and work with PEP 425 Compatibility Tags."""
from __future__ import absolute_import

import re
import sys
import warnings
import platform
import logging

try:
    import sysconfig
except ImportError:  # pragma nocover
    # Python < 2.7
    import distutils.sysconfig as sysconfig
import distutils.util

from pip.compat import OrderedDict
import pip.utils.glibc

logger = logging.getLogger(__name__)

_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')


def get_config_var(var):
    try:
        return sysconfig.get_config_var(var)
    except IOError as e:  # Issue #1074
        warnings.warn("{0}".format(e), RuntimeWarning)
        return None


def get_abbr_impl():
    """Return abbreviated implementation name."""
    if hasattr(sys, 'pypy_version_info'):
        pyimpl = 'pp'
    elif sys.platform.startswith('java'):
        pyimpl = 'jy'
    elif sys.platform == 'cli':
        pyimpl = 'ip'
    else:
        pyimpl = 'cp'
    return pyimpl


def get_impl_ver():
    """Return implementation version."""
    impl_ver = get_config_var("py_version_nodot")
    if not impl_ver or get_abbr_impl() == 'pp':
        impl_ver = ''.join(map(str, get_impl_version_info()))
    return impl_ver


def get_impl_version_info():
    """Return sys.version_info-like tuple for use in decrementing the minor
    version."""
    if get_abbr_impl() == 'pp':
        # as per https://github.com/pypa/pip/issues/2882
        return (sys.version_info[0], sys.pypy_version_info.major,
                sys.pypy_version_info.minor)
    else:
        return sys.version_info[0], sys.version_info[1]


def get_impl_tag():
    """
    Returns the Tag for this specific implementation.
    """
    return "{0}{1}".format(get_abbr_impl(), get_impl_ver())


def get_flag(var, fallback, expected=True, warn=True):
    """Use a fallback method for determining SOABI flags if the needed config
    var is unset or unavailable."""
    val = get_config_var(var)
    if val is None:
        if warn:
            logger.debug("Config variable '%s' is unset, Python ABI tag may "
                         "be incorrect", var)
        return fallback()
    return val == expected


def get_abi_tag():
    """Return the ABI tag based on SOABI (if available) or emulate SOABI
    (CPython 2, PyPy)."""
    soabi = get_config_var('SOABI')
    impl = get_abbr_impl()
    if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
        d = ''
        m = ''
        u = ''
        if get_flag('Py_DEBUG',
                    lambda: hasattr(sys, 'gettotalrefcount'),
                    warn=(impl == 'cp')):
            d = 'd'
        if get_flag('WITH_PYMALLOC',
                    lambda: impl == 'cp',
                    warn=(impl == 'cp')):
            m = 'm'
        if get_flag('Py_UNICODE_SIZE',
                    lambda: sys.maxunicode == 0x10ffff,
                    expected=4,
                    warn=(impl == 'cp' and
                          sys.version_info < (3, 3))) \
                and sys.version_info < (3, 3):
            u = 'u'
        abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
    elif soabi and soabi.startswith('cpython-'):
        abi = 'cp' + soabi.split('-')[1]
    elif soabi:
        abi = soabi.replace('.', '_').replace('-', '_')
    else:
        abi = None
    return abi


def _is_running_32bit():
    return sys.maxsize == 2147483647


def get_platform():
    """Return our platform name 'win32', 'linux_x86_64'"""
    if sys.platform == 'darwin':
        # distutils.util.get_platform() returns the release based on the value
        # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
        # be significantly older than the user's current machine.
        release, _, machine = platform.mac_ver()
        split_ver = release.split('.')

        if machine == "x86_64" and _is_running_32bit():
            machine = "i386"
        elif machine == "ppc64" and _is_running_32bit():
            machine = "ppc"

        return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine)

    # XXX remove distutils dependency
    result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
    if result == "linux_x86_64" and _is_running_32bit():
        # 32 bit Python program (running on a 64 bit Linux): pip should only
        # install and run 32 bit compiled extensions in that case.
        result = "linux_i686"

    return result


def is_manylinux1_compatible():
    # Only Linux, and only x86-64 / i686
    if get_platform() not in ("linux_x86_64", "linux_i686"):
        return False

    # Check for presence of _manylinux module
    try:
        import _manylinux
        return bool(_manylinux.manylinux1_compatible)
    except (ImportError, AttributeError):
        # Fall through to heuristic check below
        pass

    # Check glibc version. CentOS 5 uses glibc 2.5.
    return pip.utils.glibc.have_compatible_glibc(2, 5)


def get_darwin_arches(major, minor, machine):
    """Return a list of supported arches (including group arches) for
    the given major, minor and machine architecture of an macOS machine.
    """
    arches = []

    def _supports_arch(major, minor, arch):
        # Looking at the application support for macOS versions in the chart
        # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
        # our timeline looks roughly like:
        #
        # 10.0 - Introduces ppc support.
        # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
        #        and x86_64 support is CLI only, and cannot be used for GUI
        #        applications.
        # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
        # 10.6 - Drops support for ppc64
        # 10.7 - Drops support for ppc
        #
        # Given that we do not know if we're installing a CLI or a GUI
        # application, we must be conservative and assume it might be a GUI
        # application and behave as if ppc64 and x86_64 support did not occur
        # until 10.5.
        #
        # Note: The above information is taken from the "Application support"
        #       column in the chart not the "Processor support" since I believe
        #       that we care about what instruction sets an application can use
        #       not which processors the OS supports.
        if arch == 'ppc':
            return (major, minor) <= (10, 5)
        if arch == 'ppc64':
            return (major, minor) == (10, 5)
        if arch == 'i386':
            return (major, minor) >= (10, 4)
        if arch == 'x86_64':
            return (major, minor) >= (10, 5)
        if arch in groups:
            for garch in groups[arch]:
                if _supports_arch(major, minor, garch):
                    return True
        return False

    groups = OrderedDict([
        ("fat", ("i386", "ppc")),
        ("intel", ("x86_64", "i386")),
        ("fat64", ("x86_64", "ppc64")),
        ("fat32", ("x86_64", "i386", "ppc")),
    ])

    if _supports_arch(major, minor, machine):
        arches.append(machine)

    for garch in groups:
        if machine in groups[garch] and _supports_arch(major, minor, garch):
            arches.append(garch)

    arches.append('universal')

    return arches


def get_supported(versions=None, noarch=False, platform=None,
                  impl=None, abi=None):
    """Return a list of supported tags for each version specified in
    `versions`.

    :param versions: a list of string versions, of the form ["33", "32"],
        or None. The first version will be assumed to support our ABI.
    :param platform: specify the exact platform you want valid
        tags for, or None. If None, use the local system platform.
    :param impl: specify the exact implementation you want valid
        tags for, or None. If None, use the local interpreter impl.
    :param abi: specify the exact abi you want valid
        tags for, or None. If None, use the local interpreter abi.
    """
    supported = []

    # Versions must be given with respect to the preference
    if versions is None:
        versions = []
        version_info = get_impl_version_info()
        major = version_info[:-1]
        # Support all previous minor Python versions.
        for minor in range(version_info[-1], -1, -1):
            versions.append(''.join(map(str, major + (minor,))))

    impl = impl or get_abbr_impl()

    abis = []

    abi = abi or get_abi_tag()
    if abi:
        abis[0:0] = [abi]

    abi3s = set()
    import imp
    for suffix in imp.get_suffixes():
        if suffix[0].startswith('.abi'):
            abi3s.add(suffix[0].split('.', 2)[1])

    abis.extend(sorted(list(abi3s)))

    abis.append('none')

    if not noarch:
        arch = platform or get_platform()
        if arch.startswith('macosx'):
            # support macosx-10.6-intel on macosx-10.9-x86_64
            match = _osx_arch_pat.match(arch)
            if match:
                name, major, minor, actual_arch = match.groups()
                tpl = '{0}_{1}_%i_%s'.format(name, major)
                arches = []
                for m in reversed(range(int(minor) + 1)):
                    for a in get_darwin_arches(int(major), m, actual_arch):
                        arches.append(tpl % (m, a))
            else:
                # arch pattern didn't match (?!)
                arches = [arch]
        elif platform is None and is_manylinux1_compatible():
            arches = [arch.replace('linux', 'manylinux1'), arch]
        else:
            arches = [arch]

        # Current version, current API (built specifically for our Python):
        for abi in abis:
            for arch in arches:
                supported.append(('%s%s' % (impl, versions[0]), abi, arch))

        # abi3 modules compatible with older version of Python
        for version in versions[1:]:
            # abi3 was introduced in Python 3.2
            if version in ('31', '30'):
                break
            for abi in abi3s:   # empty set if not Python 3
                for arch in arches:
                    supported.append(("%s%s" % (impl, version), abi, arch))

        # Has binaries, does not use the Python API:
        for arch in arches:
            supported.append(('py%s' % (versions[0][0]), 'none', arch))

    # No abi / arch, but requires our implementation:
    supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
    # Tagged specifically as being cross-version compatible
    # (with just the major version specified)
    supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))

    # No abi / arch, generic Python
    for i, version in enumerate(versions):
        supported.append(('py%s' % (version,), 'none', 'any'))
        if i == 0:
            supported.append(('py%s' % (version[0]), 'none', 'any'))

    return supported

supported_tags = get_supported()
supported_tags_noarch = get_supported(noarch=True)

implementation_tag = get_impl_tag()
wheel.py000064400000077037151733136200006241 0ustar00"""
Support for installing and building the "wheel" binary package format.
"""
from __future__ import absolute_import

import compileall
import csv
import errno
import functools
import hashlib
import logging
import os
import os.path
import re
import shutil
import stat
import sys
import tempfile
import warnings

from base64 import urlsafe_b64encode
from email.parser import Parser

from pip._vendor.six import StringIO

import pip
from pip.compat import expanduser
from pip.download import path_to_url, unpack_url
from pip.exceptions import (
    InstallationError, InvalidWheelFilename, UnsupportedWheel)
from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME
from pip import pep425tags
from pip.utils import (
    call_subprocess, ensure_dir, captured_stdout, rmtree, read_chunks,
)
from pip.utils.ui import open_spinner
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.six.moves import configparser


wheel_ext = '.whl'

VERSION_COMPATIBLE = (1, 0)


logger = logging.getLogger(__name__)


class WheelCache(object):
    """A cache of wheels for future installs."""

    def __init__(self, cache_dir, format_control):
        """Create a wheel cache.

        :param cache_dir: The root of the cache.
        :param format_control: A pip.index.FormatControl object to limit
            binaries being read from the cache.
        """
        self._cache_dir = expanduser(cache_dir) if cache_dir else None
        self._format_control = format_control

    def cached_wheel(self, link, package_name):
        return cached_wheel(
            self._cache_dir, link, self._format_control, package_name)


def _cache_for_link(cache_dir, link):
    """
    Return a directory to store cached wheels in for link.

    Because there are M wheels for any one sdist, we provide a directory
    to cache them in, and then consult that directory when looking up
    cache hits.

    We only insert things into the cache if they have plausible version
    numbers, so that we don't contaminate the cache with things that were not
    unique. E.g. ./package might have dozens of installs done for it and build
    a version of 0.0...and if we built and cached a wheel, we'd end up using
    the same wheel even if the source has been edited.

    :param cache_dir: The cache_dir being used by pip.
    :param link: The link of the sdist for which this will cache wheels.
    """

    # We want to generate an url to use as our cache key, we don't want to just
    # re-use the URL because it might have other items in the fragment and we
    # don't care about those.
    key_parts = [link.url_without_fragment]
    if link.hash_name is not None and link.hash is not None:
        key_parts.append("=".join([link.hash_name, link.hash]))
    key_url = "#".join(key_parts)

    # Encode our key url with sha224, we'll use this because it has similar
    # security properties to sha256, but with a shorter total output (and thus
    # less secure). However the differences don't make a lot of difference for
    # our use case here.
    hashed = hashlib.sha224(key_url.encode()).hexdigest()

    # We want to nest the directories some to prevent having a ton of top level
    # directories where we might run out of sub directories on some FS.
    parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]

    # Inside of the base location for cached wheels, expand our parts and join
    # them all together.
    return os.path.join(cache_dir, "wheels", *parts)


def cached_wheel(cache_dir, link, format_control, package_name):
    if not cache_dir:
        return link
    if not link:
        return link
    if link.is_wheel:
        return link
    if not link.is_artifact:
        return link
    if not package_name:
        return link
    canonical_name = canonicalize_name(package_name)
    formats = pip.index.fmt_ctl_formats(format_control, canonical_name)
    if "binary" not in formats:
        return link
    root = _cache_for_link(cache_dir, link)
    try:
        wheel_names = os.listdir(root)
    except OSError as e:
        if e.errno in (errno.ENOENT, errno.ENOTDIR):
            return link
        raise
    candidates = []
    for wheel_name in wheel_names:
        try:
            wheel = Wheel(wheel_name)
        except InvalidWheelFilename:
            continue
        if not wheel.supported():
            # Built for a different python/arch/etc
            continue
        candidates.append((wheel.support_index_min(), wheel_name))
    if not candidates:
        return link
    candidates.sort()
    path = os.path.join(root, candidates[0][1])
    return pip.index.Link(path_to_url(path))


def rehash(path, algo='sha256', blocksize=1 << 20):
    """Return (hash, length) for path using hashlib.new(algo)"""
    h = hashlib.new(algo)
    length = 0
    with open(path, 'rb') as f:
        for block in read_chunks(f, size=blocksize):
            length += len(block)
            h.update(block)
    digest = 'sha256=' + urlsafe_b64encode(
        h.digest()
    ).decode('latin1').rstrip('=')
    return (digest, length)


def open_for_csv(name, mode):
    if sys.version_info[0] < 3:
        nl = {}
        bin = 'b'
    else:
        nl = {'newline': ''}
        bin = ''
    return open(name, mode + bin, **nl)


def fix_script(path):
    """Replace #!python with #!/path/to/python
    Return True if file was changed."""
    # XXX RECORD hashes will need to be updated
    if os.path.isfile(path):
        with open(path, 'rb') as script:
            firstline = script.readline()
            if not firstline.startswith(b'#!python'):
                return False
            exename = sys.executable.encode(sys.getfilesystemencoding())
            firstline = b'#!' + exename + os.linesep.encode("ascii")
            rest = script.read()
        with open(path, 'wb') as script:
            script.write(firstline)
            script.write(rest)
        return True

dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
                                \.dist-info$""", re.VERBOSE)


def root_is_purelib(name, wheeldir):
    """
    Return True if the extracted wheel in wheeldir should go into purelib.
    """
    name_folded = name.replace("-", "_")
    for item in os.listdir(wheeldir):
        match = dist_info_re.match(item)
        if match and match.group('name') == name_folded:
            with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
                for line in wheel:
                    line = line.lower().rstrip()
                    if line == "root-is-purelib: true":
                        return True
    return False


def get_entrypoints(filename):
    if not os.path.exists(filename):
        return {}, {}

    # This is done because you can pass a string to entry_points wrappers which
    # means that they may or may not be valid INI files. The attempt here is to
    # strip leading and trailing whitespace in order to make them valid INI
    # files.
    with open(filename) as fp:
        data = StringIO()
        for line in fp:
            data.write(line.strip())
            data.write("\n")
        data.seek(0)

    cp = configparser.RawConfigParser()
    cp.optionxform = lambda option: option
    cp.readfp(data)

    console = {}
    gui = {}
    if cp.has_section('console_scripts'):
        console = dict(cp.items('console_scripts'))
    if cp.has_section('gui_scripts'):
        gui = dict(cp.items('gui_scripts'))
    return console, gui


def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
                     pycompile=True, scheme=None, isolated=False, prefix=None, strip_file_prefix=None):
    """Install a wheel"""

    if not scheme:
        scheme = distutils_scheme(
            name, user=user, home=home, root=root, isolated=isolated,
            prefix=prefix,
        )

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}
    changed = set()
    generated = []

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        with captured_stdout() as stdout:
            with warnings.catch_warnings():
                warnings.filterwarnings('ignore')
                compileall.compile_dir(source, force=True, quiet=True)
        logger.debug(stdout.getvalue())

    def normpath(src, p):
        return os.path.relpath(src, p).replace(os.path.sep, '/')

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        ensure_dir(dest)  # common for the 'include' path

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            destdir = os.path.join(dest, basedir)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base and
                        s.endswith('.dist-info') and
                        canonicalize_name(s).startswith(
                            canonicalize_name(req.name))):
                    assert not info_dir, ('Multiple .dist-info directories: ' +
                                          destsubdir + ', ' +
                                          ', '.join(info_dir))
                    info_dir.append(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                # directory creation is lazy and after the file filtering above
                # to ensure we don't install empty dirs; empty dirs can't be
                # uninstalled.
                ensure_dir(destdir)

                # We use copyfile (not move, copy, or copy2) to be extra sure
                # that we are not moving directories over (copyfile fails for
                # directories) as well as to ensure that we are not copying
                # over any metadata because we want more control over what
                # metadata we actually copy over.
                shutil.copyfile(srcfile, destfile)

                # Copy over the metadata for the file, currently this only
                # includes the atime and mtime.
                st = os.stat(srcfile)
                if hasattr(os, "utime"):
                    os.utime(destfile, (st.st_atime, st.st_mtime))

                # If our file is executable, then make our destination file
                # executable.
                if os.access(srcfile, os.X_OK):
                    st = os.stat(srcfile)
                    permissions = (
                        st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
                    )
                    os.chmod(destfile, permissions)

                changed = False
                if fixer:
                    changed = fixer(destfile)
                record_installed(srcfile, destfile, changed)

    clobber(source, lib_dir, True)

    assert info_dir, "%s .dist-info directory not found" % req

    # Get the defined entry points
    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
    console, gui = get_entrypoints(ep_file)

    def is_entrypoint_wrapper(name):
        # EP, EP.exe and EP-script.py are scripts generated for
        # entry point EP by setuptools
        if name.lower().endswith('.exe'):
            matchname = name[:-4]
        elif name.lower().endswith('-script.py'):
            matchname = name[:-10]
        elif name.lower().endswith(".pya"):
            matchname = name[:-4]
        else:
            matchname = name
        # Ignore setuptools-generated scripts
        return (matchname in console or matchname in gui)

    for datadir in data_dirs:
        fixer = None
        filter = None
        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
            fixer = None
            if subdir == 'scripts':
                fixer = fix_script
                filter = is_entrypoint_wrapper
            source = os.path.join(wheeldir, datadir, subdir)
            dest = scheme[subdir]
            clobber(source, dest, False, fixer=fixer, filter=filter)

    maker = ScriptMaker(None, scheme['scripts'])

    # Ensure old scripts are overwritten.
    # See https://github.com/pypa/pip/issues/1800
    maker.clobber = True

    # Ensure we don't generate any variants for scripts because this is almost
    # never what somebody wants.
    # See https://bitbucket.org/pypa/distlib/issue/35/
    maker.variants = set(('', ))

    # This is required because otherwise distlib creates scripts that are not
    # executable.
    # See https://bitbucket.org/pypa/distlib/issue/32/
    maker.set_mode = True

    # Simplify the script and fix the fact that the default script swallows
    # every single stack trace.
    # See https://bitbucket.org/pypa/distlib/issue/34/
    # See https://bitbucket.org/pypa/distlib/issue/33/
    def _get_script_text(entry):
        if entry.suffix is None:
            raise InstallationError(
                "Invalid script entry point: %s for req: %s - A callable "
                "suffix is required. Cf https://packaging.python.org/en/"
                "latest/distributing.html#console-scripts for more "
                "information." % (entry, req)
            )
        return maker.script_template % {
            "module": entry.prefix,
            "import_name": entry.suffix.split(".")[0],
            "func": entry.suffix,
        }

    maker._get_script_text = _get_script_text
    maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
"""

    # Special case pip and setuptools to generate versioned wrappers
    #
    # The issue is that some projects (specifically, pip and setuptools) use
    # code in setup.py to create "versioned" entry points - pip2.7 on Python
    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
    # the wheel metadata at build time, and so if the wheel is installed with
    # a *different* version of Python the entry points will be wrong. The
    # correct fix for this is to enhance the metadata to be able to describe
    # such versioned entry points, but that won't happen till Metadata 2.0 is
    # available.
    # In the meantime, projects using versioned entry points will either have
    # incorrect versioned entry points, or they will not be able to distribute
    # "universal" wheels (i.e., they will need a wheel per Python version).
    #
    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
    # override the versioned entry points in the wheel and generate the
    # correct ones. This code is purely a short-term measure until Metadata 2.0
    # is available.
    #
    # To add the level of hack in this section of code, in order to support
    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
    # variable which will control which version scripts get installed.
    #
    # ENSUREPIP_OPTIONS=altinstall
    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
    # ENSUREPIP_OPTIONS=install
    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
    #     not altinstall
    # DEFAULT
    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
    #     and easy_install-X.Y.
    pip_script = console.pop('pip', None)
    if pip_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'pip = ' + pip_script
            generated.extend(maker.make(spec))

        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
            spec = 'pip%s = %s' % (sys.version[:1], pip_script)
            generated.extend(maker.make(spec))

        spec = 'pip%s = %s' % (sys.version[:3], pip_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned pip entry points
        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
        for k in pip_ep:
            del console[k]
    easy_install_script = console.pop('easy_install', None)
    if easy_install_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'easy_install = ' + easy_install_script
            generated.extend(maker.make(spec))

        spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned easy_install entry points
        easy_install_ep = [
            k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
        ]
        for k in easy_install_ep:
            del console[k]

    # Generate the console and GUI entry points specified in the wheel
    if len(console) > 0:
        generated.extend(
            maker.make_multiple(['%s = %s' % kv for kv in console.items()])
        )
    if len(gui) > 0:
        generated.extend(
            maker.make_multiple(
                ['%s = %s' % kv for kv in gui.items()],
                {'gui': True}
            )
        )

    # Record pip as the installer
    installer = os.path.join(info_dir[0], 'INSTALLER')
    temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
    with open(temp_installer, 'wb') as installer_file:
        installer_file.write(b'pip\n')
    shutil.move(temp_installer, installer)
    generated.append(installer)

    # Record details of all files installed
    record = os.path.join(info_dir[0], 'RECORD')
    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
    with open_for_csv(record, 'r') as record_in:
        with open_for_csv(temp_record, 'w+') as record_out:
            reader = csv.reader(record_in)
            writer = csv.writer(record_out)
            for row in reader:
                row[0] = installed.pop(row[0], row[0])
                if row[0] in changed:
                    row[1], row[2] = rehash(row[0])
                writer.writerow(row)
            for f in generated:
                h, l = rehash(f)
                final_path = normpath(f, lib_dir)
                if strip_file_prefix and final_path.startswith(strip_file_prefix):
                    final_path = os.path.join(os.sep,
                            os.path.relpath(final_path, strip_file_prefix))
                writer.writerow((final_path, h, l))
            for f in installed:
                writer.writerow((installed[f], '', ''))
    shutil.move(temp_record, record)


def _unique(fn):
    @functools.wraps(fn)
    def unique(*args, **kw):
        seen = set()
        for item in fn(*args, **kw):
            if item not in seen:
                seen.add(item)
                yield item
    return unique


# TODO: this goes somewhere besides the wheel module
@_unique
def uninstallation_paths(dist):
    """
    Yield all the uninstallation paths for dist based on RECORD-without-.pyc

    Yield paths to all the files in RECORD. For each .py file in RECORD, add
    the .pyc in the same directory.

    UninstallPathSet.add() takes care of the __pycache__ .pyc.
    """
    from pip.utils import FakeFile  # circular import
    r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
    for row in r:
        path = os.path.join(dist.location, row[0])
        yield path
        if path.endswith('.py'):
            dn, fn = os.path.split(path)
            base = fn[:-3]
            path = os.path.join(dn, base + '.pyc')
            yield path


def wheel_version(source_dir):
    """
    Return the Wheel-Version of an extracted wheel, if possible.

    Otherwise, return False if we couldn't parse / extract it.
    """
    try:
        dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]

        wheel_data = dist.get_metadata('WHEEL')
        wheel_data = Parser().parsestr(wheel_data)

        version = wheel_data['Wheel-Version'].strip()
        version = tuple(map(int, version.split('.')))
        return version
    except:
        return False


def check_compatibility(version, name):
    """
    Raises errors or warns if called with an incompatible Wheel-Version.

    Pip should refuse to install a Wheel-Version that's a major series
    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
    installing a version only minor version ahead (e.g 1.2 > 1.1).

    version: a 2-tuple representing a Wheel-Version (Major, Minor)
    name: name of wheel or package to raise exception about

    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
    """
    if not version:
        raise UnsupportedWheel(
            "%s is in an unsupported or invalid wheel" % name
        )
    if version[0] > VERSION_COMPATIBLE[0]:
        raise UnsupportedWheel(
            "%s's Wheel-Version (%s) is not compatible with this version "
            "of pip" % (name, '.'.join(map(str, version)))
        )
    elif version > VERSION_COMPATIBLE:
        logger.warning(
            'Installing from a newer Wheel-Version (%s)',
            '.'.join(map(str, version)),
        )


class Wheel(object):
    """A wheel file"""

    # TODO: maybe move the install code into this class

    wheel_file_re = re.compile(
        r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
        ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
        \.whl|\.dist-info)$""",
        re.VERBOSE
    )

    def __init__(self, filename):
        """
        :raises InvalidWheelFilename: when the filename is invalid for a wheel
        """
        wheel_info = self.wheel_file_re.match(filename)
        if not wheel_info:
            raise InvalidWheelFilename(
                "%s is not a valid wheel filename." % filename
            )
        self.filename = filename
        self.name = wheel_info.group('name').replace('_', '-')
        # we'll assume "_" means "-" due to wheel naming scheme
        # (https://github.com/pypa/pip/issues/1150)
        self.version = wheel_info.group('ver').replace('_', '-')
        self.pyversions = wheel_info.group('pyver').split('.')
        self.abis = wheel_info.group('abi').split('.')
        self.plats = wheel_info.group('plat').split('.')

        # All the tag combinations from this file
        self.file_tags = set(
            (x, y, z) for x in self.pyversions
            for y in self.abis for z in self.plats
        )

    def support_index_min(self, tags=None):
        """
        Return the lowest index that one of the wheel's file_tag combinations
        achieves in the supported_tags list e.g. if there are 8 supported tags,
        and one of the file tags is first in the list, then return 0.  Returns
        None is the wheel is not supported.
        """
        if tags is None:  # for mock
            tags = pep425tags.supported_tags
        indexes = [tags.index(c) for c in self.file_tags if c in tags]
        return min(indexes) if indexes else None

    def supported(self, tags=None):
        """Is this wheel supported on this system?"""
        if tags is None:  # for mock
            tags = pep425tags.supported_tags
        return bool(set(tags).intersection(self.file_tags))


class WheelBuilder(object):
    """Build wheels from a RequirementSet."""

    def __init__(self, requirement_set, finder, build_options=None,
                 global_options=None):
        self.requirement_set = requirement_set
        self.finder = finder
        self._cache_root = requirement_set._wheel_cache._cache_dir
        self._wheel_dir = requirement_set.wheel_download_dir
        self.build_options = build_options or []
        self.global_options = global_options or []

    def _build_one(self, req, output_dir, python_tag=None):
        """Build one wheel.

        :return: The filename of the built wheel, or None if the build failed.
        """
        tempd = tempfile.mkdtemp('pip-wheel-')
        try:
            if self.__build_one(req, tempd, python_tag=python_tag):
                try:
                    wheel_name = os.listdir(tempd)[0]
                    wheel_path = os.path.join(output_dir, wheel_name)
                    shutil.move(os.path.join(tempd, wheel_name), wheel_path)
                    logger.info('Stored in directory: %s', output_dir)
                    return wheel_path
                except:
                    pass
            # Ignore return, we can't do anything else useful.
            self._clean_one(req)
            return None
        finally:
            rmtree(tempd)

    def _base_setup_args(self, req):
        return [
            sys.executable, "-u", '-c',
            SETUPTOOLS_SHIM % req.setup_py
        ] + list(self.global_options)

    def __build_one(self, req, tempd, python_tag=None):
        base_args = self._base_setup_args(req)

        spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
        with open_spinner(spin_message) as spinner:
            logger.debug('Destination directory: %s', tempd)
            wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
                + self.build_options

            if python_tag is not None:
                wheel_args += ["--python-tag", python_tag]

            try:
                call_subprocess(wheel_args, cwd=req.setup_py_dir,
                                show_stdout=False, spinner=spinner)
                return True
            except:
                spinner.finish("error")
                logger.error('Failed building wheel for %s', req.name)
                return False

    def _clean_one(self, req):
        base_args = self._base_setup_args(req)

        logger.info('Running setup.py clean for %s', req.name)
        clean_args = base_args + ['clean', '--all']
        try:
            call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
            return True
        except:
            logger.error('Failed cleaning build dir for %s', req.name)
            return False

    def build(self, autobuilding=False):
        """Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        """
        assert self._wheel_dir or (autobuilding and self._cache_root)
        # unpack sdists and constructs req set
        self.requirement_set.prepare_files(self.finder)

        reqset = self.requirement_set.requirements.values()

        buildset = []
        for req in reqset:
            if req.constraint:
                continue
            if req.is_wheel:
                if not autobuilding:
                    logger.info(
                        'Skipping %s, due to already being wheel.', req.name)
            elif autobuilding and req.editable:
                pass
            elif autobuilding and req.link and not req.link.is_artifact:
                pass
            elif autobuilding and not req.source_dir:
                pass
            else:
                if autobuilding:
                    link = req.link
                    base, ext = link.splitext()
                    if pip.index.egg_info_matches(base, None, link) is None:
                        # Doesn't look like a package - don't autobuild a wheel
                        # because we'll have no way to lookup the result sanely
                        continue
                    if "binary" not in pip.index.fmt_ctl_formats(
                            self.finder.format_control,
                            canonicalize_name(req.name)):
                        logger.info(
                            "Skipping bdist_wheel for %s, due to binaries "
                            "being disabled for it.", req.name)
                        continue
                buildset.append(req)

        if not buildset:
            return True

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for req in buildset]),
        )
        with indent_log():
            build_success, build_failure = [], []
            for req in buildset:
                python_tag = None
                if autobuilding:
                    python_tag = pep425tags.implementation_tag
                    output_dir = _cache_for_link(self._cache_root, req.link)
                    try:
                        ensure_dir(output_dir)
                    except OSError as e:
                        logger.warning("Building wheel for %s failed: %s",
                                       req.name, e)
                        build_failure.append(req)
                        continue
                else:
                    output_dir = self._wheel_dir
                wheel_file = self._build_one(
                    req, output_dir,
                    python_tag=python_tag,
                )
                if wheel_file:
                    build_success.append(req)
                    if autobuilding:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if req.source_dir and not os.path.exists(os.path.join(
                                req.source_dir, PIP_DELETE_MARKER_FILENAME)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.build_location(
                            self.requirement_set.build_dir)
                        # Update the link for this.
                        req.link = pip.index.Link(
                            path_to_url(wheel_file))
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_url(
                            req.link, req.source_dir, None, False,
                            session=self.requirement_set.session)
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return True if all builds were successful
        return len(build_failure) == 0
index.py000064400000121127151733136200006232 0ustar00"""Routines related to PyPI, indexes"""
from __future__ import absolute_import

import logging
import cgi
from collections import namedtuple
import itertools
import sys
import os
import re
import mimetypes
import posixpath
import warnings

from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request

from pip.compat import ipaddress
from pip.utils import (
    cached_property, splitext, normalize_path,
    ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS,
)
from pip.utils.deprecation import RemovedInPip10Warning
from pip.utils.logging import indent_log
from pip.utils.packaging import check_requires_python
from pip.exceptions import (
    DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
    UnsupportedWheel,
)
from pip.download import HAS_TLS, is_url, path_to_url, url_to_path
from pip.wheel import Wheel, wheel_ext
from pip.pep425tags import get_supported
from pip._vendor import html5lib, requests, six
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging import specifiers
from pip._vendor.requests.exceptions import SSLError
from pip._vendor.distlib.compat import unescape


__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder']


SECURE_ORIGINS = [
    # protocol, hostname, port
    # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
    ("https", "*", "*"),
    ("*", "localhost", "*"),
    ("*", "127.0.0.0/8", "*"),
    ("*", "::1/128", "*"),
    ("file", "*", None),
    # ssh is always secure.
    ("ssh", "*", "*"),
]


logger = logging.getLogger(__name__)


class InstallationCandidate(object):

    def __init__(self, project, version, location):
        self.project = project
        self.version = parse_version(version)
        self.location = location
        self._key = (self.project, self.version, self.location)

    def __repr__(self):
        return "<InstallationCandidate({0!r}, {1!r}, {2!r})>".format(
            self.project, self.version, self.location,
        )

    def __hash__(self):
        return hash(self._key)

    def __lt__(self, other):
        return self._compare(other, lambda s, o: s < o)

    def __le__(self, other):
        return self._compare(other, lambda s, o: s <= o)

    def __eq__(self, other):
        return self._compare(other, lambda s, o: s == o)

    def __ge__(self, other):
        return self._compare(other, lambda s, o: s >= o)

    def __gt__(self, other):
        return self._compare(other, lambda s, o: s > o)

    def __ne__(self, other):
        return self._compare(other, lambda s, o: s != o)

    def _compare(self, other, method):
        if not isinstance(other, InstallationCandidate):
            return NotImplemented

        return method(self._key, other._key)


class PackageFinder(object):
    """This finds packages.

    This is meant to match easy_install's technique for looking for
    packages, by reading pages and looking for appropriate links.
    """

    def __init__(self, find_links, index_urls, allow_all_prereleases=False,
                 trusted_hosts=None, process_dependency_links=False,
                 session=None, format_control=None, platform=None,
                 versions=None, abi=None, implementation=None):
        """Create a PackageFinder.

        :param format_control: A FormatControl object or None. Used to control
            the selection of source packages / binary packages when consulting
            the index and links.
        :param platform: A string or None. If None, searches for packages
            that are supported by the current system. Otherwise, will find
            packages that can be built on the platform passed in. These
            packages will only be downloaded for distribution: they will
            not be built locally.
        :param versions: A list of strings or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param abi: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param implementation: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        """
        if session is None:
            raise TypeError(
                "PackageFinder() missing 1 required keyword argument: "
                "'session'"
            )

        # Build find_links. If an argument starts with ~, it may be
        # a local file relative to a home directory. So try normalizing
        # it and if it exists, use the normalized version.
        # This is deliberately conservative - it might be fine just to
        # blindly normalize anything starting with a ~...
        self.find_links = []
        for link in find_links:
            if link.startswith('~'):
                new_link = normalize_path(link)
                if os.path.exists(new_link):
                    link = new_link
            self.find_links.append(link)

        self.index_urls = index_urls
        self.dependency_links = []

        # These are boring links that have already been logged somehow:
        self.logged_links = set()

        self.format_control = format_control or FormatControl(set(), set())

        # Domains that we won't emit warnings for when not using HTTPS
        self.secure_origins = [
            ("*", host, "*")
            for host in (trusted_hosts if trusted_hosts else [])
        ]

        # Do we want to allow _all_ pre-releases?
        self.allow_all_prereleases = allow_all_prereleases

        # Do we process dependency links?
        self.process_dependency_links = process_dependency_links

        # The Session we'll use to make requests
        self.session = session

        # The valid tags to check potential found wheel candidates against
        self.valid_tags = get_supported(
            versions=versions,
            platform=platform,
            abi=abi,
            impl=implementation,
        )

        # If we don't have TLS enabled, then WARN if anyplace we're looking
        # relies on TLS.
        if not HAS_TLS:
            for link in itertools.chain(self.index_urls, self.find_links):
                parsed = urllib_parse.urlparse(link)
                if parsed.scheme == "https":
                    logger.warning(
                        "pip is configured with locations that require "
                        "TLS/SSL, however the ssl module in Python is not "
                        "available."
                    )
                    break

    def add_dependency_links(self, links):
        # # FIXME: this shouldn't be global list this, it should only
        # # apply to requirements of the package that specifies the
        # # dependency_links value
        # # FIXME: also, we should track comes_from (i.e., use Link)
        if self.process_dependency_links:
            warnings.warn(
                "Dependency Links processing has been deprecated and will be "
                "removed in a future release.",
                RemovedInPip10Warning,
            )
            self.dependency_links.extend(links)

    @staticmethod
    def _sort_locations(locations, expand_dir=False):
        """
        Sort locations into "files" (archives) and "urls", and return
        a pair of lists (files,urls)
        """
        files = []
        urls = []

        # puts the url for the given file path into the appropriate list
        def sort_path(path):
            url = path_to_url(path)
            if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
                urls.append(url)
            else:
                files.append(url)

        for url in locations:

            is_local_path = os.path.exists(url)
            is_file_url = url.startswith('file:')

            if is_local_path or is_file_url:
                if is_local_path:
                    path = url
                else:
                    path = url_to_path(url)
                if os.path.isdir(path):
                    if expand_dir:
                        path = os.path.realpath(path)
                        for item in os.listdir(path):
                            sort_path(os.path.join(path, item))
                    elif is_file_url:
                        urls.append(url)
                elif os.path.isfile(path):
                    sort_path(path)
                else:
                    logger.warning(
                        "Url '%s' is ignored: it is neither a file "
                        "nor a directory.", url)
            elif is_url(url):
                # Only add url with clear scheme
                urls.append(url)
            else:
                logger.warning(
                    "Url '%s' is ignored. It is either a non-existing "
                    "path or lacks a specific scheme.", url)

        return files, urls

    def _candidate_sort_key(self, candidate):
        """
        Function used to generate link sort key for link tuples.
        The greater the return value, the more preferred it is.
        If not finding wheels, then sorted by version only.
        If finding wheels, then the sort order is by version, then:
          1. existing installs
          2. wheels ordered via Wheel.support_index_min(self.valid_tags)
          3. source archives
        Note: it was considered to embed this logic into the Link
              comparison operators, but then different sdist links
              with the same version, would have to be considered equal
        """
        support_num = len(self.valid_tags)
        if candidate.location.is_wheel:
            # can raise InvalidWheelFilename
            wheel = Wheel(candidate.location.filename)
            if not wheel.supported(self.valid_tags):
                raise UnsupportedWheel(
                    "%s is not a supported wheel for this platform. It "
                    "can't be sorted." % wheel.filename
                )
            pri = -(wheel.support_index_min(self.valid_tags))
        else:  # sdist
            pri = -(support_num)
        return (candidate.version, pri)

    def _validate_secure_origin(self, logger, location):
        # Determine if this url used a secure transport mechanism
        parsed = urllib_parse.urlparse(str(location))
        origin = (parsed.scheme, parsed.hostname, parsed.port)

        # The protocol to use to see if the protocol matches.
        # Don't count the repository type as part of the protocol: in
        # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
        # the last scheme.)
        protocol = origin[0].rsplit('+', 1)[-1]

        # Determine if our origin is a secure origin by looking through our
        # hardcoded list of secure origins, as well as any additional ones
        # configured on this PackageFinder instance.
        for secure_origin in (SECURE_ORIGINS + self.secure_origins):
            if protocol != secure_origin[0] and secure_origin[0] != "*":
                continue

            try:
                # We need to do this decode dance to ensure that we have a
                # unicode object, even on Python 2.x.
                addr = ipaddress.ip_address(
                    origin[1]
                    if (
                        isinstance(origin[1], six.text_type) or
                        origin[1] is None
                    )
                    else origin[1].decode("utf8")
                )
                network = ipaddress.ip_network(
                    secure_origin[1]
                    if isinstance(secure_origin[1], six.text_type)
                    else secure_origin[1].decode("utf8")
                )
            except ValueError:
                # We don't have both a valid address or a valid network, so
                # we'll check this origin against hostnames.
                if (origin[1] and
                        origin[1].lower() != secure_origin[1].lower() and
                        secure_origin[1] != "*"):
                    continue
            else:
                # We have a valid address and network, so see if the address
                # is contained within the network.
                if addr not in network:
                    continue

            # Check to see if the port patches
            if (origin[2] != secure_origin[2] and
                    secure_origin[2] != "*" and
                    secure_origin[2] is not None):
                continue

            # If we've gotten here, then this origin matches the current
            # secure origin and we should return True
            return True

        # If we've gotten to this point, then the origin isn't secure and we
        # will not accept it as a valid location to search. We will however
        # log a warning that we are ignoring it.
        logger.warning(
            "The repository located at %s is not a trusted or secure host and "
            "is being ignored. If this repository is available via HTTPS it "
            "is recommended to use HTTPS instead, otherwise you may silence "
            "this warning and allow it anyways with '--trusted-host %s'.",
            parsed.hostname,
            parsed.hostname,
        )

        return False

    def _get_index_urls_locations(self, project_name):
        """Returns the locations found via self.index_urls

        Checks the url_name on the main (first in the list) index and
        use this url_name to produce all locations
        """

        def mkurl_pypi_url(url):
            loc = posixpath.join(
                url,
                urllib_parse.quote(canonicalize_name(project_name)))
            # For maximum compatibility with easy_install, ensure the path
            # ends in a trailing slash.  Although this isn't in the spec
            # (and PyPI can handle it without the slash) some other index
            # implementations might break if they relied on easy_install's
            # behavior.
            if not loc.endswith('/'):
                loc = loc + '/'
            return loc

        return [mkurl_pypi_url(url) for url in self.index_urls]

    def find_all_candidates(self, project_name):
        """Find all available InstallationCandidate for project_name

        This checks index_urls, find_links and dependency_links.
        All versions found are returned as an InstallationCandidate list.

        See _link_package_versions for details on which files are accepted
        """
        index_locations = self._get_index_urls_locations(project_name)
        index_file_loc, index_url_loc = self._sort_locations(index_locations)
        fl_file_loc, fl_url_loc = self._sort_locations(
            self.find_links, expand_dir=True)
        dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)

        file_locations = (
            Link(url) for url in itertools.chain(
                index_file_loc, fl_file_loc, dep_file_loc)
        )

        # We trust every url that the user has given us whether it was given
        #   via --index-url or --find-links
        # We explicitly do not trust links that came from dependency_links
        # We want to filter out any thing which does not have a secure origin.
        url_locations = [
            link for link in itertools.chain(
                (Link(url) for url in index_url_loc),
                (Link(url) for url in fl_url_loc),
                (Link(url) for url in dep_url_loc),
            )
            if self._validate_secure_origin(logger, link)
        ]

        logger.debug('%d location(s) to search for versions of %s:',
                     len(url_locations), project_name)

        for location in url_locations:
            logger.debug('* %s', location)

        canonical_name = canonicalize_name(project_name)
        formats = fmt_ctl_formats(self.format_control, canonical_name)
        search = Search(project_name, canonical_name, formats)
        find_links_versions = self._package_versions(
            # We trust every directly linked archive in find_links
            (Link(url, '-f') for url in self.find_links),
            search
        )

        page_versions = []
        for page in self._get_pages(url_locations, project_name):
            logger.debug('Analyzing links from page %s', page.url)
            with indent_log():
                page_versions.extend(
                    self._package_versions(page.links, search)
                )

        dependency_versions = self._package_versions(
            (Link(url) for url in self.dependency_links), search
        )
        if dependency_versions:
            logger.debug(
                'dependency_links found: %s',
                ', '.join([
                    version.location.url for version in dependency_versions
                ])
            )

        file_versions = self._package_versions(file_locations, search)
        if file_versions:
            file_versions.sort(reverse=True)
            logger.debug(
                'Local files found: %s',
                ', '.join([
                    url_to_path(candidate.location.url)
                    for candidate in file_versions
                ])
            )

        # This is an intentional priority ordering
        return (
            file_versions + find_links_versions + page_versions +
            dependency_versions
        )

    def find_requirement(self, req, upgrade):
        """Try to find a Link matching req

        Expects req, an InstallRequirement and upgrade, a boolean
        Returns a Link if found,
        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
        """
        all_candidates = self.find_all_candidates(req.name)

        # Filter out anything which doesn't match our specifier
        compatible_versions = set(
            req.specifier.filter(
                # We turn the version object into a str here because otherwise
                # when we're debundled but setuptools isn't, Python will see
                # packaging.version.Version and
                # pkg_resources._vendor.packaging.version.Version as different
                # types. This way we'll use a str as a common data interchange
                # format. If we stop using the pkg_resources provided specifier
                # and start using our own, we can drop the cast to str().
                [str(c.version) for c in all_candidates],
                prereleases=(
                    self.allow_all_prereleases
                    if self.allow_all_prereleases else None
                ),
            )
        )
        applicable_candidates = [
            # Again, converting to str to deal with debundling.
            c for c in all_candidates if str(c.version) in compatible_versions
        ]

        if applicable_candidates:
            best_candidate = max(applicable_candidates,
                                 key=self._candidate_sort_key)
            # If we cannot find a non-yanked candidate,
            # use the best one and print a warning about it.
            # Otherwise, try to find another best candidate, ignoring
            # all the yanked releases.
            if getattr(best_candidate.location, "yanked", False):
                nonyanked_candidates = [
                    c for c in applicable_candidates
                    if not getattr(c.location, "yanked", False)
                ]

                if set(nonyanked_candidates):
                    best_candidate = max(nonyanked_candidates,
                                         key=self._candidate_sort_key)
                else:
                    warning_message = (
                        "WARNING: The candidate selected for download or install "
                        "is a yanked version: '{}' candidate (version {} at {})"
                    ).format(best_candidate.project, best_candidate.version, best_candidate.location)
                    if best_candidate.location.yanked_reason:
                        warning_message += "\nReason for being yanked: {}".format(best_candidate.location.yanked_reason)
                    logger.warning(warning_message)
        else:
            best_candidate = None

        if req.satisfied_by is not None:
            installed_version = parse_version(req.satisfied_by.version)
        else:
            installed_version = None

        if installed_version is None and best_candidate is None:
            logger.critical(
                'Could not find a version that satisfies the requirement %s '
                '(from versions: %s)',
                req,
                ', '.join(
                    sorted(
                        set(str(c.version) for c in all_candidates),
                        key=parse_version,
                    )
                )
            )

            raise DistributionNotFound(
                'No matching distribution found for %s' % req
            )

        best_installed = False
        if installed_version and (
                best_candidate is None or
                best_candidate.version <= installed_version):
            best_installed = True

        if not upgrade and installed_version is not None:
            if best_installed:
                logger.debug(
                    'Existing installed version (%s) is most up-to-date and '
                    'satisfies requirement',
                    installed_version,
                )
            else:
                logger.debug(
                    'Existing installed version (%s) satisfies requirement '
                    '(most up-to-date version is %s)',
                    installed_version,
                    best_candidate.version,
                )
            return None

        if best_installed:
            # We have an existing version, and its the best version
            logger.debug(
                'Installed version (%s) is most up-to-date (past versions: '
                '%s)',
                installed_version,
                ', '.join(sorted(compatible_versions, key=parse_version)) or
                "none",
            )
            raise BestVersionAlreadyInstalled

        logger.debug(
            'Using version %s (newest of versions: %s)',
            best_candidate.version,
            ', '.join(sorted(compatible_versions, key=parse_version))
        )
        return best_candidate.location

    def _get_pages(self, locations, project_name):
        """
        Yields (page, page_url) from the given locations, skipping
        locations that have errors.
        """
        seen = set()
        for location in locations:
            if location in seen:
                continue
            seen.add(location)

            page = self._get_page(location)
            if page is None:
                continue

            yield page

    _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')

    def _sort_links(self, links):
        """
        Returns elements of links in order, non-egg links first, egg links
        second, while eliminating duplicates
        """
        eggs, no_eggs = [], []
        seen = set()
        for link in links:
            if link not in seen:
                seen.add(link)
                if link.egg_fragment:
                    eggs.append(link)
                else:
                    no_eggs.append(link)
        return no_eggs + eggs

    def _package_versions(self, links, search):
        result = []
        for link in self._sort_links(links):
            v = self._link_package_versions(link, search)
            if v is not None:
                result.append(v)
        return result

    def _log_skipped_link(self, link, reason):
        if link not in self.logged_links:
            logger.debug('Skipping link %s; %s', link, reason)
            self.logged_links.add(link)

    def _link_package_versions(self, link, search):
        """Return an InstallationCandidate or None"""
        version = None
        if link.egg_fragment:
            egg_info = link.egg_fragment
            ext = link.ext
        else:
            egg_info, ext = link.splitext()
            if not ext:
                self._log_skipped_link(link, 'not a file')
                return
            if ext not in SUPPORTED_EXTENSIONS:
                self._log_skipped_link(
                    link, 'unsupported archive format: %s' % ext)
                return
            if "binary" not in search.formats and ext == wheel_ext:
                self._log_skipped_link(
                    link, 'No binaries permitted for %s' % search.supplied)
                return
            if "macosx10" in link.path and ext == '.zip':
                self._log_skipped_link(link, 'macosx10 one')
                return
            if ext == wheel_ext:
                try:
                    wheel = Wheel(link.filename)
                except InvalidWheelFilename:
                    self._log_skipped_link(link, 'invalid wheel filename')
                    return
                if canonicalize_name(wheel.name) != search.canonical:
                    self._log_skipped_link(
                        link, 'wrong project name (not %s)' % search.supplied)
                    return

                if not wheel.supported(self.valid_tags):
                    self._log_skipped_link(
                        link, 'it is not compatible with this Python')
                    return

                version = wheel.version

        # This should be up by the search.ok_binary check, but see issue 2700.
        if "source" not in search.formats and ext != wheel_ext:
            self._log_skipped_link(
                link, 'No sources permitted for %s' % search.supplied)
            return

        if not version:
            version = egg_info_matches(egg_info, search.supplied, link)
        if version is None:
            self._log_skipped_link(
                link, 'wrong project name (not %s)' % search.supplied)
            return

        match = self._py_version_re.search(version)
        if match:
            version = version[:match.start()]
            py_version = match.group(1)
            if py_version != sys.version[:3]:
                self._log_skipped_link(
                    link, 'Python version is incorrect')
                return
        try:
            support_this_python = check_requires_python(link.requires_python)
        except specifiers.InvalidSpecifier:
            logger.debug("Package %s has an invalid Requires-Python entry: %s",
                         link.filename, link.requires_python)
            support_this_python = True

        if not support_this_python:
            logger.debug("The package %s is incompatible with the python"
                         "version in use. Acceptable python versions are:%s",
                         link, link.requires_python)
            return
        logger.debug('Found link %s, version: %s', link, version)

        return InstallationCandidate(search.supplied, version, link)

    def _get_page(self, link):
        return HTMLPage.get_page(link, session=self.session)


def egg_info_matches(
        egg_info, search_name, link,
        _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
    """Pull the version part out of a string.

    :param egg_info: The string to parse. E.g. foo-2.1
    :param search_name: The name of the package this belongs to. None to
        infer the name. Note that this cannot unambiguously parse strings
        like foo-2-2 which might be foo, 2-2 or foo-2, 2.
    :param link: The link the string came from, for logging on failure.
    """
    match = _egg_info_re.search(egg_info)
    if not match:
        logger.debug('Could not parse version from link: %s', link)
        return None
    if search_name is None:
        full_match = match.group(0)
        return full_match[full_match.index('-'):]
    name = match.group(0).lower()
    # To match the "safe" name that pkg_resources creates:
    name = name.replace('_', '-')
    # project name and version must be separated by a dash
    look_for = search_name.lower() + "-"
    if name.startswith(look_for):
        return match.group(0)[len(look_for):]
    else:
        return None


class HTMLPage(object):
    """Represents one page, along with its URL"""

    def __init__(self, content, url, headers=None):
        # Determine if we have any encoding information in our headers
        encoding = None
        if headers and "Content-Type" in headers:
            content_type, params = cgi.parse_header(headers["Content-Type"])

            if "charset" in params:
                encoding = params['charset']

        self.content = content
        self.parsed = html5lib.parse(
            self.content,
            transport_encoding=encoding,
            namespaceHTMLElements=False,
        )
        self.url = url
        self.headers = headers

    def __str__(self):
        return self.url

    @classmethod
    def get_page(cls, link, skip_archives=True, session=None):
        if session is None:
            raise TypeError(
                "get_page() missing 1 required keyword argument: 'session'"
            )

        url = link.url
        url = url.split('#', 1)[0]

        # Check for VCS schemes that do not support lookup as web pages.
        from pip.vcs import VcsSupport
        for scheme in VcsSupport.schemes:
            if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
                logger.debug('Cannot look at %s URL %s', scheme, link)
                return None

        try:
            if skip_archives:
                filename = link.filename
                for bad_ext in ARCHIVE_EXTENSIONS:
                    if filename.endswith(bad_ext):
                        content_type = cls._get_content_type(
                            url, session=session,
                        )
                        if content_type.lower().startswith('text/html'):
                            break
                        else:
                            logger.debug(
                                'Skipping page %s because of Content-Type: %s',
                                link,
                                content_type,
                            )
                            return

            logger.debug('Getting page %s', url)

            # Tack index.html onto file:// URLs that point to directories
            (scheme, netloc, path, params, query, fragment) = \
                urllib_parse.urlparse(url)
            if (scheme == 'file' and
                    os.path.isdir(urllib_request.url2pathname(path))):
                # add trailing slash if not present so urljoin doesn't trim
                # final segment
                if not url.endswith('/'):
                    url += '/'
                url = urllib_parse.urljoin(url, 'index.html')
                logger.debug(' file: URL is directory, getting %s', url)

            resp = session.get(
                url,
                headers={
                    "Accept": "text/html",
                    "Cache-Control": "max-age=600",
                },
            )
            resp.raise_for_status()

            # The check for archives above only works if the url ends with
            # something that looks like an archive. However that is not a
            # requirement of an url. Unless we issue a HEAD request on every
            # url we cannot know ahead of time for sure if something is HTML
            # or not. However we can check after we've downloaded it.
            content_type = resp.headers.get('Content-Type', 'unknown')
            if not content_type.lower().startswith("text/html"):
                logger.debug(
                    'Skipping page %s because of Content-Type: %s',
                    link,
                    content_type,
                )
                return

            inst = cls(resp.content, resp.url, resp.headers)
        except requests.HTTPError as exc:
            cls._handle_fail(link, exc, url)
        except SSLError as exc:
            reason = ("There was a problem confirming the ssl certificate: "
                      "%s" % exc)
            cls._handle_fail(link, reason, url, meth=logger.info)
        except requests.ConnectionError as exc:
            cls._handle_fail(link, "connection error: %s" % exc, url)
        except requests.Timeout:
            cls._handle_fail(link, "timed out", url)
        else:
            return inst

    @staticmethod
    def _handle_fail(link, reason, url, meth=None):
        if meth is None:
            meth = logger.debug

        meth("Could not fetch URL %s: %s - skipping", link, reason)

    @staticmethod
    def _get_content_type(url, session):
        """Get the Content-Type of the given url, using a HEAD request"""
        scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
        if scheme not in ('http', 'https'):
            # FIXME: some warning or something?
            # assertion error?
            return ''

        resp = session.head(url, allow_redirects=True)
        resp.raise_for_status()

        return resp.headers.get("Content-Type", "")

    @cached_property
    def base_url(self):
        bases = [
            x for x in self.parsed.findall(".//base")
            if x.get("href") is not None
        ]
        if bases and bases[0].get("href"):
            return bases[0].get("href")
        else:
            return self.url

    @property
    def links(self):
        """Yields all links in the page"""
        for anchor in self.parsed.findall(".//a"):
            if anchor.get("href"):
                href = anchor.get("href")
                url = self.clean_link(
                    urllib_parse.urljoin(self.base_url, href)
                )
                pyrequire = anchor.get('data-requires-python')
                pyrequire = unescape(pyrequire) if pyrequire else None
                yanked_reason = anchor.get('data-yanked', default=None)
                # Empty or valueless attribute are both parsed as empty string
                if yanked_reason is not None:
                    yanked_reason = unescape(yanked_reason)
                yield Link(url, self, requires_python=pyrequire, yanked_reason=yanked_reason)

    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)

    def clean_link(self, url):
        """Makes sure a link is fully encoded.  That is, if a ' ' shows up in
        the link, it will be rewritten to %20 (while not over-quoting
        % or other characters)."""
        return self._clean_re.sub(
            lambda match: '%%%2x' % ord(match.group(0)), url)


class Link(object):

    def __init__(self, url, comes_from=None, requires_python=None, yanked_reason=None):
        """
        Object representing a parsed link from https://pypi.python.org/simple/*

        url:
            url of the resource pointed to (href of the link)
        comes_from:
            instance of HTMLPage where the link was found, or string.
        requires_python:
            String containing the `Requires-Python` metadata field, specified
            in PEP 345. This may be specified by a data-requires-python
            attribute in the HTML link tag, as described in PEP 503.
        """

        # url can be a UNC windows share
        if url.startswith('\\\\'):
            url = path_to_url(url)

        self.url = url
        self.comes_from = comes_from
        self.requires_python = requires_python if requires_python else None
        self.yanked_reason = yanked_reason
        self.yanked = yanked_reason is not None

    def __str__(self):
        if self.requires_python:
            rp = ' (requires-python:%s)' % self.requires_python
        else:
            rp = ''
        if self.comes_from:
            return '%s (from %s)%s' % (self.url, self.comes_from, rp)
        else:
            return str(self.url)

    def __repr__(self):
        return '<Link %s>' % self

    def __eq__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url == other.url

    def __ne__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url != other.url

    def __lt__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url < other.url

    def __le__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url <= other.url

    def __gt__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url > other.url

    def __ge__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url >= other.url

    def __hash__(self):
        return hash(self.url)

    @property
    def filename(self):
        _, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
        name = posixpath.basename(path.rstrip('/')) or netloc
        name = urllib_parse.unquote(name)
        assert name, ('URL %r produced no filename' % self.url)
        return name

    @property
    def scheme(self):
        return urllib_parse.urlsplit(self.url)[0]

    @property
    def netloc(self):
        return urllib_parse.urlsplit(self.url)[1]

    @property
    def path(self):
        return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])

    def splitext(self):
        return splitext(posixpath.basename(self.path.rstrip('/')))

    @property
    def ext(self):
        return self.splitext()[1]

    @property
    def url_without_fragment(self):
        scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
        return urllib_parse.urlunsplit((scheme, netloc, path, query, None))

    _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')

    @property
    def egg_fragment(self):
        match = self._egg_fragment_re.search(self.url)
        if not match:
            return None
        return match.group(1)

    _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')

    @property
    def subdirectory_fragment(self):
        match = self._subdirectory_fragment_re.search(self.url)
        if not match:
            return None
        return match.group(1)

    _hash_re = re.compile(
        r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
    )

    @property
    def hash(self):
        match = self._hash_re.search(self.url)
        if match:
            return match.group(2)
        return None

    @property
    def hash_name(self):
        match = self._hash_re.search(self.url)
        if match:
            return match.group(1)
        return None

    @property
    def show_url(self):
        return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])

    @property
    def is_wheel(self):
        return self.ext == wheel_ext

    @property
    def is_artifact(self):
        """
        Determines if this points to an actual artifact (e.g. a tarball) or if
        it points to an "abstract" thing like a path or a VCS location.
        """
        from pip.vcs import vcs

        if self.scheme in vcs.all_schemes:
            return False

        return True


FormatControl = namedtuple('FormatControl', 'no_binary only_binary')
"""This object has two fields, no_binary and only_binary.

If a field is falsy, it isn't set. If it is {':all:'}, it should match all
packages except those listed in the other field. Only one field can be set
to {':all:'} at a time. The rest of the time exact package name matches
are listed, with any given package only showing up in one field at a time.
"""


def fmt_ctl_handle_mutual_exclude(value, target, other):
    new = value.split(',')
    while ':all:' in new:
        other.clear()
        target.clear()
        target.add(':all:')
        del new[:new.index(':all:') + 1]
        if ':none:' not in new:
            # Without a none, we want to discard everything as :all: covers it
            return
    for name in new:
        if name == ':none:':
            target.clear()
            continue
        name = canonicalize_name(name)
        other.discard(name)
        target.add(name)


def fmt_ctl_formats(fmt_ctl, canonical_name):
    result = set(["binary", "source"])
    if canonical_name in fmt_ctl.only_binary:
        result.discard('source')
    elif canonical_name in fmt_ctl.no_binary:
        result.discard('binary')
    elif ':all:' in fmt_ctl.only_binary:
        result.discard('source')
    elif ':all:' in fmt_ctl.no_binary:
        result.discard('binary')
    return frozenset(result)


def fmt_ctl_no_binary(fmt_ctl):
    fmt_ctl_handle_mutual_exclude(
        ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary)


def fmt_ctl_no_use_wheel(fmt_ctl):
    fmt_ctl_no_binary(fmt_ctl)
    warnings.warn(
        '--no-use-wheel is deprecated and will be removed in the future. '
        ' Please use --no-binary :all: instead.', RemovedInPip10Warning,
        stacklevel=2)


Search = namedtuple('Search', 'supplied canonical formats')
"""Capture key aspects of a search.

:attribute supplied: The user supplied package.
:attribute canonical: The canonical package name.
:attribute formats: The formats allowed for this package. Should be a set
    with 'binary' or 'source' or both in it.
"""
__main__.py000064400000001110151733136200006630 0ustar00from __future__ import absolute_import

import os
import sys

# If we are running from a wheel, add the wheel to sys.path
# This allows the usage python pip-*.whl/pip install pip-*.whl
if __package__ == '':
    # __file__ is pip-*.whl/pip/__main__.py
    # first dirname call strips of '/__main__.py', second strips off '/pip'
    # Resulting path is the name of the wheel itself
    # Add that to sys.path so we can import pip
    path = os.path.dirname(os.path.dirname(__file__))
    sys.path.insert(0, path)

import pip  # noqa

if __name__ == '__main__':
    sys.exit(pip.main())
models/__pycache__/index.cpython-36.opt-1.pyc000064400000001447151733136200014742 0ustar003

�Pf��@s(ddlmZGdd�de�Zed�ZdS)�)�parsec@seZdZdd�Zdd�ZdS)�IndexcCs<||_tj|�j|_|jd�|_|jd�|_|jd�|_dS)NZsimpleZpypiz
pypi/pip/json)�url�urllib_parseZurlsplitZnetloc�url_to_pathZ
simple_urlZpypi_urlZpip_json_url)�selfr�r�/usr/lib/python3.6/index.py�__init__s
zIndex.__init__cCstj|j|�S)N)rZurljoinr)r�pathrrr	rszIndex.url_to_pathN)�__name__�
__module__�__qualname__r
rrrrr	rsrzhttps://pypi.python.org/N)Zpip._vendor.six.moves.urllibrr�objectrZPyPIrrrr	�<module>smodels/__pycache__/__init__.cpython-36.pyc000064400000000313151733136200014422 0ustar003

�PfG�@sddlmZmZddgZdS)�)�Index�PyPIrrN)Zpip.models.indexrr�__all__�rr�/usr/lib/python3.6/__init__.py�<module>smodels/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000313151733136200015361 0ustar003

�PfG�@sddlmZmZddgZdS)�)�Index�PyPIrrN)Zpip.models.indexrr�__all__�rr�/usr/lib/python3.6/__init__.py�<module>smodels/__pycache__/index.cpython-36.pyc000064400000001447151733136200014003 0ustar003

�Pf��@s(ddlmZGdd�de�Zed�ZdS)�)�parsec@seZdZdd�Zdd�ZdS)�IndexcCs<||_tj|�j|_|jd�|_|jd�|_|jd�|_dS)NZsimpleZpypiz
pypi/pip/json)�url�urllib_parseZurlsplitZnetloc�url_to_pathZ
simple_urlZpypi_urlZpip_json_url)�selfr�r�/usr/lib/python3.6/index.py�__init__s
zIndex.__init__cCstj|j|�S)N)rZurljoinr)r�pathrrr	rszIndex.url_to_pathN)�__name__�
__module__�__qualname__r
rrrrr	rsrzhttps://pypi.python.org/N)Zpip._vendor.six.moves.urllibrr�objectrZPyPIrrrr	�<module>smodels/__init__.py000064400000000107151733136200010137 0ustar00from pip.models.index import Index, PyPI


__all__ = ["Index", "PyPI"]
models/index.py000064400000000747151733136200007521 0ustar00from pip._vendor.six.moves.urllib import parse as urllib_parse


class Index(object):
    def __init__(self, url):
        self.url = url
        self.netloc = urllib_parse.urlsplit(url).netloc
        self.simple_url = self.url_to_path('simple')
        self.pypi_url = self.url_to_path('pypi')
        self.pip_json_url = self.url_to_path('pypi/pip/json')

    def url_to_path(self, path):
        return urllib_parse.urljoin(self.url, path)


PyPI = Index('https://pypi.python.org/')
_vendor/distro.py000064400000112715151733136210010067 0ustar00# Copyright 2015,2016 Nir Cohen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
The ``distro`` package (``distro`` stands for Linux Distribution) provides
information about the Linux distribution it runs on, such as a reliable
machine-readable distro ID, or version information.

It is a renewed alternative implementation for Python's original
:py:func:`platform.linux_distribution` function, but it provides much more
functionality. An alternative implementation became necessary because Python
3.5 deprecated this function, and Python 3.7 is expected to remove it
altogether. Its predecessor function :py:func:`platform.dist` was already
deprecated since Python 2.6 and is also expected to be removed in Python 3.7.
Still, there are many cases in which access to Linux distribution information
is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
more information.
"""

import os
import re
import sys
import json
import shlex
import logging
import subprocess


if not sys.platform.startswith('linux'):
    raise ImportError('Unsupported platform: {0}'.format(sys.platform))

_UNIXCONFDIR = '/etc'
_OS_RELEASE_BASENAME = 'os-release'

#: Translation table for normalizing the "ID" attribute defined in os-release
#: files, for use by the :func:`distro.id` method.
#:
#: * Key: Value as defined in the os-release file, translated to lower case,
#:   with blanks translated to underscores.
#:
#: * Value: Normalized value.
NORMALIZED_OS_ID = {}

#: Translation table for normalizing the "Distributor ID" attribute returned by
#: the lsb_release command, for use by the :func:`distro.id` method.
#:
#: * Key: Value as returned by the lsb_release command, translated to lower
#:   case, with blanks translated to underscores.
#:
#: * Value: Normalized value.
NORMALIZED_LSB_ID = {
    'enterpriseenterprise': 'oracle',  # Oracle Enterprise Linux
    'redhatenterpriseworkstation': 'rhel',  # RHEL 6.7
}

#: Translation table for normalizing the distro ID derived from the file name
#: of distro release files, for use by the :func:`distro.id` method.
#:
#: * Key: Value as derived from the file name of a distro release file,
#:   translated to lower case, with blanks translated to underscores.
#:
#: * Value: Normalized value.
NORMALIZED_DISTRO_ID = {
    'redhat': 'rhel',  # RHEL 6.x, 7.x
}

# Pattern for content of distro release file (reversed)
_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
    r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)')

# Pattern for base file name of distro release file
_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(
    r'(\w+)[-_](release|version)$')

# Base file names to be ignored when searching for distro release file
_DISTRO_RELEASE_IGNORE_BASENAMES = (
    'debian_version',
    'lsb-release',
    'oem-release',
    _OS_RELEASE_BASENAME,
    'system-release'
)


def linux_distribution(full_distribution_name=True):
    """
    Return information about the current Linux distribution as a tuple
    ``(id_name, version, codename)`` with items as follows:

    * ``id_name``:  If *full_distribution_name* is false, the result of
      :func:`distro.id`. Otherwise, the result of :func:`distro.name`.

    * ``version``:  The result of :func:`distro.version`.

    * ``codename``:  The result of :func:`distro.codename`.

    The interface of this function is compatible with the original
    :py:func:`platform.linux_distribution` function, supporting a subset of
    its parameters.

    The data it returns may not exactly be the same, because it uses more data
    sources than the original function, and that may lead to different data if
    the Linux distribution is not consistent across multiple data sources it
    provides (there are indeed such distributions ...).

    Another reason for differences is the fact that the :func:`distro.id`
    method normalizes the distro ID string to a reliable machine-readable value
    for a number of popular Linux distributions.
    """
    return _distro.linux_distribution(full_distribution_name)


def id():
    """
    Return the distro ID of the current Linux distribution, as a
    machine-readable string.

    For a number of Linux distributions, the returned distro ID value is
    *reliable*, in the sense that it is documented and that it does not change
    across releases of the distribution.

    This package maintains the following reliable distro ID values:

    ==============  =========================================
    Distro ID       Distribution
    ==============  =========================================
    "ubuntu"        Ubuntu
    "debian"        Debian
    "rhel"          RedHat Enterprise Linux
    "centos"        CentOS
    "fedora"        Fedora
    "sles"          SUSE Linux Enterprise Server
    "opensuse"      openSUSE
    "amazon"        Amazon Linux
    "arch"          Arch Linux
    "cloudlinux"    CloudLinux OS
    "exherbo"       Exherbo Linux
    "gentoo"        GenToo Linux
    "ibm_powerkvm"  IBM PowerKVM
    "kvmibm"        KVM for IBM z Systems
    "linuxmint"     Linux Mint
    "mageia"        Mageia
    "mandriva"      Mandriva Linux
    "parallels"     Parallels
    "pidora"        Pidora
    "raspbian"      Raspbian
    "oracle"        Oracle Linux (and Oracle Enterprise Linux)
    "scientific"    Scientific Linux
    "slackware"     Slackware
    "xenserver"     XenServer
    ==============  =========================================

    If you have a need to get distros for reliable IDs added into this set,
    or if you find that the :func:`distro.id` function returns a different
    distro ID for one of the listed distros, please create an issue in the
    `distro issue tracker`_.

    **Lookup hierarchy and transformations:**

    First, the ID is obtained from the following sources, in the specified
    order. The first available and non-empty value is used:

    * the value of the "ID" attribute of the os-release file,

    * the value of the "Distributor ID" attribute returned by the lsb_release
      command,

    * the first part of the file name of the distro release file,

    The so determined ID value then passes the following transformations,
    before it is returned by this method:

    * it is translated to lower case,

    * blanks (which should not be there anyway) are translated to underscores,

    * a normalization of the ID is performed, based upon
      `normalization tables`_. The purpose of this normalization is to ensure
      that the ID is as reliable as possible, even across incompatible changes
      in the Linux distributions. A common reason for an incompatible change is
      the addition of an os-release file, or the addition of the lsb_release
      command, with ID values that differ from what was previously determined
      from the distro release file name.
    """
    return _distro.id()


def name(pretty=False):
    """
    Return the name of the current Linux distribution, as a human-readable
    string.

    If *pretty* is false, the name is returned without version or codename.
    (e.g. "CentOS Linux")

    If *pretty* is true, the version and codename are appended.
    (e.g. "CentOS Linux 7.1.1503 (Core)")

    **Lookup hierarchy:**

    The name is obtained from the following sources, in the specified order.
    The first available and non-empty value is used:

    * If *pretty* is false:

      - the value of the "NAME" attribute of the os-release file,

      - the value of the "Distributor ID" attribute returned by the lsb_release
        command,

      - the value of the "<name>" field of the distro release file.

    * If *pretty* is true:

      - the value of the "PRETTY_NAME" attribute of the os-release file,

      - the value of the "Description" attribute returned by the lsb_release
        command,

      - the value of the "<name>" field of the distro release file, appended
        with the value of the pretty version ("<version_id>" and "<codename>"
        fields) of the distro release file, if available.
    """
    return _distro.name(pretty)


def version(pretty=False, best=False):
    """
    Return the version of the current Linux distribution, as a human-readable
    string.

    If *pretty* is false, the version is returned without codename (e.g.
    "7.0").

    If *pretty* is true, the codename in parenthesis is appended, if the
    codename is non-empty (e.g. "7.0 (Maipo)").

    Some distributions provide version numbers with different precisions in
    the different sources of distribution information. Examining the different
    sources in a fixed priority order does not always yield the most precise
    version (e.g. for Debian 8.2, or CentOS 7.1).

    The *best* parameter can be used to control the approach for the returned
    version:

    If *best* is false, the first non-empty version number in priority order of
    the examined sources is returned.

    If *best* is true, the most precise version number out of all examined
    sources is returned.

    **Lookup hierarchy:**

    In all cases, the version number is obtained from the following sources.
    If *best* is false, this order represents the priority order:

    * the value of the "VERSION_ID" attribute of the os-release file,
    * the value of the "Release" attribute returned by the lsb_release
      command,
    * the version number parsed from the "<version_id>" field of the first line
      of the distro release file,
    * the version number parsed from the "PRETTY_NAME" attribute of the
      os-release file, if it follows the format of the distro release files.
    * the version number parsed from the "Description" attribute returned by
      the lsb_release command, if it follows the format of the distro release
      files.
    """
    return _distro.version(pretty, best)


def version_parts(best=False):
    """
    Return the version of the current Linux distribution as a tuple
    ``(major, minor, build_number)`` with items as follows:

    * ``major``:  The result of :func:`distro.major_version`.

    * ``minor``:  The result of :func:`distro.minor_version`.

    * ``build_number``:  The result of :func:`distro.build_number`.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    """
    return _distro.version_parts(best)


def major_version(best=False):
    """
    Return the major version of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The major version is the first
    part of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    """
    return _distro.major_version(best)


def minor_version(best=False):
    """
    Return the minor version of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The minor version is the second
    part of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    """
    return _distro.minor_version(best)


def build_number(best=False):
    """
    Return the build number of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The build number is the third part
    of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    """
    return _distro.build_number(best)


def like():
    """
    Return a space-separated list of distro IDs of distributions that are
    closely related to the current Linux distribution in regards to packaging
    and programming interfaces, for example distributions the current
    distribution is a derivative from.

    **Lookup hierarchy:**

    This information item is only provided by the os-release file.
    For details, see the description of the "ID_LIKE" attribute in the
    `os-release man page
    <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
    """
    return _distro.like()


def codename():
    """
    Return the codename for the release of the current Linux distribution,
    as a string.

    If the distribution does not have a codename, an empty string is returned.

    Note that the returned codename is not always really a codename. For
    example, openSUSE returns "x86_64". This function does not handle such
    cases in any special way and just returns the string it finds, if any.

    **Lookup hierarchy:**

    * the codename within the "VERSION" attribute of the os-release file, if
      provided,

    * the value of the "Codename" attribute returned by the lsb_release
      command,

    * the value of the "<codename>" field of the distro release file.
    """
    return _distro.codename()


def info(pretty=False, best=False):
    """
    Return certain machine-readable information items about the current Linux
    distribution in a dictionary, as shown in the following example:

    .. sourcecode:: python

        {
            'id': 'rhel',
            'version': '7.0',
            'version_parts': {
                'major': '7',
                'minor': '0',
                'build_number': ''
            },
            'like': 'fedora',
            'codename': 'Maipo'
        }

    The dictionary structure and keys are always the same, regardless of which
    information items are available in the underlying data sources. The values
    for the various keys are as follows:

    * ``id``:  The result of :func:`distro.id`.

    * ``version``:  The result of :func:`distro.version`.

    * ``version_parts -> major``:  The result of :func:`distro.major_version`.

    * ``version_parts -> minor``:  The result of :func:`distro.minor_version`.

    * ``version_parts -> build_number``:  The result of
      :func:`distro.build_number`.

    * ``like``:  The result of :func:`distro.like`.

    * ``codename``:  The result of :func:`distro.codename`.

    For a description of the *pretty* and *best* parameters, see the
    :func:`distro.version` method.
    """
    return _distro.info(pretty, best)


def os_release_info():
    """
    Return a dictionary containing key-value pairs for the information items
    from the os-release file data source of the current Linux distribution.

    See `os-release file`_ for details about these information items.
    """
    return _distro.os_release_info()


def lsb_release_info():
    """
    Return a dictionary containing key-value pairs for the information items
    from the lsb_release command data source of the current Linux distribution.

    See `lsb_release command output`_ for details about these information
    items.
    """
    return _distro.lsb_release_info()


def distro_release_info():
    """
    Return a dictionary containing key-value pairs for the information items
    from the distro release file data source of the current Linux distribution.

    See `distro release file`_ for details about these information items.
    """
    return _distro.distro_release_info()


def os_release_attr(attribute):
    """
    Return a single named information item from the os-release file data source
    of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `os-release file`_ for details about these information items.
    """
    return _distro.os_release_attr(attribute)


def lsb_release_attr(attribute):
    """
    Return a single named information item from the lsb_release command output
    data source of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `lsb_release command output`_ for details about these information
    items.
    """
    return _distro.lsb_release_attr(attribute)


def distro_release_attr(attribute):
    """
    Return a single named information item from the distro release file
    data source of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `distro release file`_ for details about these information items.
    """
    return _distro.distro_release_attr(attribute)


class LinuxDistribution(object):
    """
    Provides information about a Linux distribution.

    This package creates a private module-global instance of this class with
    default initialization arguments, that is used by the
    `consolidated accessor functions`_ and `single source accessor functions`_.
    By using default initialization arguments, that module-global instance
    returns data about the current Linux distribution (i.e. the distro this
    package runs on).

    Normally, it is not necessary to create additional instances of this class.
    However, in situations where control is needed over the exact data sources
    that are used, instances of this class can be created with a specific
    distro release file, or a specific os-release file, or without invoking the
    lsb_release command.
    """

    def __init__(self,
                 include_lsb=True,
                 os_release_file='',
                 distro_release_file=''):
        """
        The initialization method of this class gathers information from the
        available data sources, and stores that in private instance attributes.
        Subsequent access to the information items uses these private instance
        attributes, so that the data sources are read only once.

        Parameters:

        * ``include_lsb`` (bool): Controls whether the
          `lsb_release command output`_ is included as a data source.

          If the lsb_release command is not available in the program execution
          path, the data source for the lsb_release command will be empty.

        * ``os_release_file`` (string): The path name of the
          `os-release file`_ that is to be used as a data source.

          An empty string (the default) will cause the default path name to
          be used (see `os-release file`_ for details).

          If the specified or defaulted os-release file does not exist, the
          data source for the os-release file will be empty.

        * ``distro_release_file`` (string): The path name of the
          `distro release file`_ that is to be used as a data source.

          An empty string (the default) will cause a default search algorithm
          to be used (see `distro release file`_ for details).

          If the specified distro release file does not exist, or if no default
          distro release file can be found, the data source for the distro
          release file will be empty.

        Public instance attributes:

        * ``os_release_file`` (string): The path name of the
          `os-release file`_ that is actually used as a data source. The
          empty string if no distro release file is used as a data source.

        * ``distro_release_file`` (string): The path name of the
          `distro release file`_ that is actually used as a data source. The
          empty string if no distro release file is used as a data source.

        Raises:

        * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
          release file.

        * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
          some issue (other than not being available in the program execution
          path).

        * :py:exc:`UnicodeError`: A data source has unexpected characters or
          uses an unexpected encoding.
        """
        self.os_release_file = os_release_file or \
            os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)
        self.distro_release_file = distro_release_file or ''  # updated later
        self._os_release_info = self._get_os_release_info()
        self._lsb_release_info = self._get_lsb_release_info() \
            if include_lsb else {}
        self._distro_release_info = self._get_distro_release_info()

    def __repr__(self):
        """Return repr of all info
        """
        return \
            "LinuxDistribution(" \
            "os_release_file={0!r}, " \
            "distro_release_file={1!r}, " \
            "_os_release_info={2!r}, " \
            "_lsb_release_info={3!r}, " \
            "_distro_release_info={4!r})".format(
                self.os_release_file,
                self.distro_release_file,
                self._os_release_info,
                self._lsb_release_info,
                self._distro_release_info)

    def linux_distribution(self, full_distribution_name=True):
        """
        Return information about the Linux distribution that is compatible
        with Python's :func:`platform.linux_distribution`, supporting a subset
        of its parameters.

        For details, see :func:`distro.linux_distribution`.
        """
        return (
            self.name() if full_distribution_name else self.id(),
            self.version(),
            self.codename()
        )

    def id(self):
        """Return the distro ID of the Linux distribution, as a string.

        For details, see :func:`distro.id`.
        """
        def normalize(distro_id, table):
            distro_id = distro_id.lower().replace(' ', '_')
            return table.get(distro_id, distro_id)

        distro_id = self.os_release_attr('id')
        if distro_id:
            return normalize(distro_id, NORMALIZED_OS_ID)

        distro_id = self.lsb_release_attr('distributor_id')
        if distro_id:
            return normalize(distro_id, NORMALIZED_LSB_ID)

        distro_id = self.distro_release_attr('id')
        if distro_id:
            return normalize(distro_id, NORMALIZED_DISTRO_ID)

        return ''

    def name(self, pretty=False):
        """
        Return the name of the Linux distribution, as a string.

        For details, see :func:`distro.name`.
        """
        name = self.os_release_attr('name') \
            or self.lsb_release_attr('distributor_id') \
            or self.distro_release_attr('name')
        if pretty:
            name = self.os_release_attr('pretty_name') \
                or self.lsb_release_attr('description')
            if not name:
                name = self.distro_release_attr('name')
                version = self.version(pretty=True)
                if version:
                    name = name + ' ' + version
        return name or ''

    def version(self, pretty=False, best=False):
        """
        Return the version of the Linux distribution, as a string.

        For details, see :func:`distro.version`.
        """
        versions = [
            self.os_release_attr('version_id'),
            self.lsb_release_attr('release'),
            self.distro_release_attr('version_id'),
            self._parse_distro_release_content(
                self.os_release_attr('pretty_name')).get('version_id', ''),
            self._parse_distro_release_content(
                self.lsb_release_attr('description')).get('version_id', '')
        ]
        version = ''
        if best:
            # This algorithm uses the last version in priority order that has
            # the best precision. If the versions are not in conflict, that
            # does not matter; otherwise, using the last one instead of the
            # first one might be considered a surprise.
            for v in versions:
                if v.count(".") > version.count(".") or version == '':
                    version = v
        else:
            for v in versions:
                if v != '':
                    version = v
                    break
        if pretty and version and self.codename():
            version = u'{0} ({1})'.format(version, self.codename())
        return version

    def version_parts(self, best=False):
        """
        Return the version of the Linux distribution, as a tuple of version
        numbers.

        For details, see :func:`distro.version_parts`.
        """
        version_str = self.version(best=best)
        if version_str:
            version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?')
            matches = version_regex.match(version_str)
            if matches:
                major, minor, build_number = matches.groups()
                return major, minor or '', build_number or ''
        return '', '', ''

    def major_version(self, best=False):
        """
        Return the major version number of the current distribution.

        For details, see :func:`distro.major_version`.
        """
        return self.version_parts(best)[0]

    def minor_version(self, best=False):
        """
        Return the minor version number of the Linux distribution.

        For details, see :func:`distro.minor_version`.
        """
        return self.version_parts(best)[1]

    def build_number(self, best=False):
        """
        Return the build number of the Linux distribution.

        For details, see :func:`distro.build_number`.
        """
        return self.version_parts(best)[2]

    def like(self):
        """
        Return the IDs of distributions that are like the Linux distribution.

        For details, see :func:`distro.like`.
        """
        return self.os_release_attr('id_like') or ''

    def codename(self):
        """
        Return the codename of the Linux distribution.

        For details, see :func:`distro.codename`.
        """
        return self.os_release_attr('codename') \
            or self.lsb_release_attr('codename') \
            or self.distro_release_attr('codename') \
            or ''

    def info(self, pretty=False, best=False):
        """
        Return certain machine-readable information about the Linux
        distribution.

        For details, see :func:`distro.info`.
        """
        return dict(
            id=self.id(),
            version=self.version(pretty, best),
            version_parts=dict(
                major=self.major_version(best),
                minor=self.minor_version(best),
                build_number=self.build_number(best)
            ),
            like=self.like(),
            codename=self.codename(),
        )

    def os_release_info(self):
        """
        Return a dictionary containing key-value pairs for the information
        items from the os-release file data source of the Linux distribution.

        For details, see :func:`distro.os_release_info`.
        """
        return self._os_release_info

    def lsb_release_info(self):
        """
        Return a dictionary containing key-value pairs for the information
        items from the lsb_release command data source of the Linux
        distribution.

        For details, see :func:`distro.lsb_release_info`.
        """
        return self._lsb_release_info

    def distro_release_info(self):
        """
        Return a dictionary containing key-value pairs for the information
        items from the distro release file data source of the Linux
        distribution.

        For details, see :func:`distro.distro_release_info`.
        """
        return self._distro_release_info

    def os_release_attr(self, attribute):
        """
        Return a single named information item from the os-release file data
        source of the Linux distribution.

        For details, see :func:`distro.os_release_attr`.
        """
        return self._os_release_info.get(attribute, '')

    def lsb_release_attr(self, attribute):
        """
        Return a single named information item from the lsb_release command
        output data source of the Linux distribution.

        For details, see :func:`distro.lsb_release_attr`.
        """
        return self._lsb_release_info.get(attribute, '')

    def distro_release_attr(self, attribute):
        """
        Return a single named information item from the distro release file
        data source of the Linux distribution.

        For details, see :func:`distro.distro_release_attr`.
        """
        return self._distro_release_info.get(attribute, '')

    def _get_os_release_info(self):
        """
        Get the information items from the specified os-release file.

        Returns:
            A dictionary containing all information items.
        """
        if os.path.isfile(self.os_release_file):
            with open(self.os_release_file) as release_file:
                return self._parse_os_release_content(release_file)
        return {}

    @staticmethod
    def _parse_os_release_content(lines):
        """
        Parse the lines of an os-release file.

        Parameters:

        * lines: Iterable through the lines in the os-release file.
                 Each line must be a unicode string or a UTF-8 encoded byte
                 string.

        Returns:
            A dictionary containing all information items.
        """
        props = {}
        lexer = shlex.shlex(lines, posix=True)
        lexer.whitespace_split = True

        # The shlex module defines its `wordchars` variable using literals,
        # making it dependent on the encoding of the Python source file.
        # In Python 2.6 and 2.7, the shlex source file is encoded in
        # 'iso-8859-1', and the `wordchars` variable is defined as a byte
        # string. This causes a UnicodeDecodeError to be raised when the
        # parsed content is a unicode object. The following fix resolves that
        # (... but it should be fixed in shlex...):
        if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
            lexer.wordchars = lexer.wordchars.decode('iso-8859-1')

        tokens = list(lexer)
        for token in tokens:
            # At this point, all shell-like parsing has been done (i.e.
            # comments processed, quotes and backslash escape sequences
            # processed, multi-line values assembled, trailing newlines
            # stripped, etc.), so the tokens are now either:
            # * variable assignments: var=value
            # * commands or their arguments (not allowed in os-release)
            if '=' in token:
                k, v = token.split('=', 1)
                if isinstance(v, bytes):
                    v = v.decode('utf-8')
                props[k.lower()] = v
                if k == 'VERSION':
                    # this handles cases in which the codename is in
                    # the `(CODENAME)` (rhel, centos, fedora) format
                    # or in the `, CODENAME` format (Ubuntu).
                    codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v)
                    if codename:
                        codename = codename.group()
                        codename = codename.strip('()')
                        codename = codename.strip(',')
                        codename = codename.strip()
                        # codename appears within paranthese.
                        props['codename'] = codename
                    else:
                        props['codename'] = ''
            else:
                # Ignore any tokens that are not variable assignments
                pass
        return props

    def _get_lsb_release_info(self):
        """
        Get the information items from the lsb_release command output.

        Returns:
            A dictionary containing all information items.
        """
        cmd = 'lsb_release -a'
        process = subprocess.Popen(
            cmd,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
        stdout, stderr = process.communicate()
        stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8')
        code = process.returncode
        if code == 0:
            content = stdout.splitlines()
            return self._parse_lsb_release_content(content)
        elif code == 127:  # Command not found
            return {}
        else:
            if sys.version_info[:2] >= (3, 5):
                raise subprocess.CalledProcessError(code, cmd, stdout, stderr)
            elif sys.version_info[:2] >= (2, 7):
                raise subprocess.CalledProcessError(code, cmd, stdout)
            elif sys.version_info[:2] == (2, 6):
                raise subprocess.CalledProcessError(code, cmd)

    @staticmethod
    def _parse_lsb_release_content(lines):
        """
        Parse the output of the lsb_release command.

        Parameters:

        * lines: Iterable through the lines of the lsb_release output.
                 Each line must be a unicode string or a UTF-8 encoded byte
                 string.

        Returns:
            A dictionary containing all information items.
        """
        props = {}
        for line in lines:
            line = line.decode('utf-8') if isinstance(line, bytes) else line
            kv = line.strip('\n').split(':', 1)
            if len(kv) != 2:
                # Ignore lines without colon.
                continue
            k, v = kv
            props.update({k.replace(' ', '_').lower(): v.strip()})
        return props

    def _get_distro_release_info(self):
        """
        Get the information items from the specified distro release file.

        Returns:
            A dictionary containing all information items.
        """
        if self.distro_release_file:
            # If it was specified, we use it and parse what we can, even if
            # its file name or content does not match the expected pattern.
            distro_info = self._parse_distro_release_file(
                self.distro_release_file)
            basename = os.path.basename(self.distro_release_file)
            # The file name pattern for user-specified distro release files
            # is somewhat more tolerant (compared to when searching for the
            # file), because we want to use what was specified as best as
            # possible.
            match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
            if match:
                distro_info['id'] = match.group(1)
            return distro_info
        else:
            basenames = os.listdir(_UNIXCONFDIR)
            # We sort for repeatability in cases where there are multiple
            # distro specific files; e.g. CentOS, Oracle, Enterprise all
            # containing `redhat-release` on top of their own.
            basenames.sort()
            for basename in basenames:
                if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
                    continue
                match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
                if match:
                    filepath = os.path.join(_UNIXCONFDIR, basename)
                    distro_info = self._parse_distro_release_file(filepath)
                    if 'name' in distro_info:
                        # The name is always present if the pattern matches
                        self.distro_release_file = filepath
                        distro_info['id'] = match.group(1)
                        return distro_info
            return {}

    def _parse_distro_release_file(self, filepath):
        """
        Parse a distro release file.

        Parameters:

        * filepath: Path name of the distro release file.

        Returns:
            A dictionary containing all information items.
        """
        if os.path.isfile(filepath):
            with open(filepath) as fp:
                # Only parse the first line. For instance, on SLES there
                # are multiple lines. We don't want them...
                return self._parse_distro_release_content(fp.readline())
        return {}

    @staticmethod
    def _parse_distro_release_content(line):
        """
        Parse a line from a distro release file.

        Parameters:
        * line: Line from the distro release file. Must be a unicode string
                or a UTF-8 encoded byte string.

        Returns:
            A dictionary containing all information items.
        """
        if isinstance(line, bytes):
            line = line.decode('utf-8')
        matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(
            line.strip()[::-1])
        distro_info = {}
        if matches:
            # regexp ensures non-None
            distro_info['name'] = matches.group(3)[::-1]
            if matches.group(2):
                distro_info['version_id'] = matches.group(2)[::-1]
            if matches.group(1):
                distro_info['codename'] = matches.group(1)[::-1]
        elif line:
            distro_info['name'] = line.strip()
        return distro_info


_distro = LinuxDistribution()


def main():
    import argparse

    logger = logging.getLogger(__name__)
    logger.setLevel(logging.DEBUG)
    logger.addHandler(logging.StreamHandler(sys.stdout))

    parser = argparse.ArgumentParser(description="Linux distro info tool")
    parser.add_argument(
        '--json',
        '-j',
        help="Output in machine readable format",
        action="store_true")
    args = parser.parse_args()

    if args.json:
        logger.info(json.dumps(info(), indent=4, sort_keys=True))
    else:
        logger.info('Name: %s', name(pretty=True))
        distribution_version = version(pretty=True)
        if distribution_version:
            logger.info('Version: %s', distribution_version)
        distribution_codename = codename()
        if distribution_codename:
            logger.info('Codename: %s', distribution_codename)


if __name__ == '__main__':
    main()
_vendor/chardet/langthaimodel.py000064400000026032151733136210013001 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9

# The following result for thai was collected from a limited sample (1M).

# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111,  # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253,  # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82,  # 60
 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253,  # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236,  5, 30,237, 24,238, 75,  8, 26, 52, 34, 51,119, 47, 58, 57,
 49, 53, 55, 43, 20, 19, 44, 14, 48,  3, 17, 25, 39, 62, 31, 54,
 45,  9, 16,  2, 61, 15,239, 12, 42, 46, 18, 21, 76,  4, 66, 63,
 22, 10,  1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
 11, 28, 41, 29, 33,245, 50, 37,  6,  7, 67, 77, 38, 93,246,247,
 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)

# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest  sequences:     1.0230%
# negative sequences:  0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)

TIS620ThaiModel = {
  'char_to_order_map': TIS620CharToOrderMap,
  'precedence_matrix': ThaiLangModel,
  'typical_positive_ratio': 0.926386,
  'keep_english_letter': False,
  'charset_name': "TIS-620",
  'language': 'Thai',
}
_vendor/chardet/__pycache__/escprober.cpython-36.pyc000064400000004742151733136210016445 0ustar003

�Pfn�@sXddlmZddlmZddlmZmZmZddlm	Z	m
Z
mZmZGdd�de�Z
dS)�)�
CharSetProber)�CodingStateMachine)�LanguageFilter�ProbingState�MachineState)�HZ_SM_MODEL�ISO2022CN_SM_MODEL�ISO2022JP_SM_MODEL�ISO2022KR_SM_MODELcsVeZdZdZd�fdd�	Z�fdd�Zedd��Zed	d
��Zdd�Z	d
d�Z
�ZS)�EscCharSetProberz�
    This CharSetProber uses a "code scheme" approach for detecting encodings,
    whereby easily recognizable escape or shift sequences are relied on to
    identify these encodings.
    Ncs�tt|�j|d�g|_|jtj@rD|jjtt	��|jjtt
��|jtj@r`|jjtt��|jtj
@r||jjtt��d|_d|_d|_d|_|j�dS)N)�lang_filter)�superr�__init__�	coding_smrrZCHINESE_SIMPLIFIED�appendrrrZJAPANESEr	ZKOREANr
�active_sm_count�_detected_charset�_detected_language�_state�reset)�selfr)�	__class__��/usr/lib/python3.6/escprober.pyr*szEscCharSetProber.__init__csNtt|�j�x"|jD]}|s qd|_|j�qWt|j�|_d|_d|_dS)NT)	r
rrr�active�lenrrr)rr)rrrr:szEscCharSetProber.resetcCs|jS)N)r)rrrr�charset_nameEszEscCharSetProber.charset_namecCs|jS)N)r)rrrr�languageIszEscCharSetProber.languagecCs|jr
dSdSdS)Ng�G�z��?g)r)rrrr�get_confidenceMszEscCharSetProber.get_confidencecCs�x�|D]�}x�|jD]�}|s|jr&q|j|�}|tjkrhd|_|jd8_|jdkr�tj|_|j	Sq|tj
krtj|_|j�|_
|j|_|j	SqWqW|j	S)NFr�)rrZ
next_staterZERRORrrZNOT_MEr�stateZITS_MEZFOUND_ITZget_coding_state_machinerrr)rZbyte_str�crZcoding_staterrr�feedSs"





zEscCharSetProber.feed)N)�__name__�
__module__�__qualname__�__doc__rr�propertyrrrr"�
__classcell__rr)rrr#srN)Z
charsetproberrZcodingstatemachinerZenumsrrrZescsmrrr	r
rrrrr�<module>s_vendor/chardet/__pycache__/euctwprober.cpython-36.pyc000064400000002031151733136210017007 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�EUCTWDistributionAnalysis)�EUCTW_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�EUCTWProbercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/euctwprober.pyr"s
zEUCTWProber.__init__cCsdS)NzEUC-TWr)r
rrr
�charset_name(szEUCTWProber.charset_namecCsdS)NZTaiwanr)r
rrr
�language,szEUCTWProber.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r!srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/cp949prober.cpython-36.opt-1.pyc000064400000002030151733136210017466 0ustar003

�Pf?�@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�EUCKRDistributionAnalysis)�CodingStateMachine)�MultiByteCharSetProber)�CP949_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�CP949Probercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/cp949prober.pyr#s
zCP949Prober.__init__cCsdS)NZCP949r)r
rrr
�charset_name+szCP949Prober.charset_namecCsdS)NZKoreanr)r
rrr
�language/szCP949Prober.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r"srN)	ZchardistributionrZcodingstatemachinerZmbcharsetproberrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/chardistribution.cpython-36.opt-1.pyc000064400000014135151733136210020772 0ustar003

�Pf�$�@s�ddlmZmZmZddlmZmZmZddlm	Z	m
Z
mZddlm
Z
mZmZddlmZmZmZGdd�de�ZGdd	�d	e�ZGd
d�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd�de�ZdS)�)�EUCTW_CHAR_TO_FREQ_ORDER�EUCTW_TABLE_SIZE� EUCTW_TYPICAL_DISTRIBUTION_RATIO)�EUCKR_CHAR_TO_FREQ_ORDER�EUCKR_TABLE_SIZE� EUCKR_TYPICAL_DISTRIBUTION_RATIO)�GB2312_CHAR_TO_FREQ_ORDER�GB2312_TABLE_SIZE�!GB2312_TYPICAL_DISTRIBUTION_RATIO)�BIG5_CHAR_TO_FREQ_ORDER�BIG5_TABLE_SIZE�BIG5_TYPICAL_DISTRIBUTION_RATIO)�JIS_CHAR_TO_FREQ_ORDER�JIS_TABLE_SIZE�JIS_TYPICAL_DISTRIBUTION_RATIOc@sLeZdZdZdZdZdZdd�Zdd�Zd	d
�Z	dd�Z
d
d�Zdd�ZdS)�CharDistributionAnalysisig�G�z��?g{�G�z�?�cCs0d|_d|_d|_d|_d|_d|_|j�dS)N)�_char_to_freq_order�_table_size�typical_distribution_ratio�_done�_total_chars�_freq_chars�reset)�self�r�&/usr/lib/python3.6/chardistribution.py�__init__.sz!CharDistributionAnalysis.__init__cCsd|_d|_d|_dS)zreset analyser, clear any stateF�N)rrr)rrrrr=szCharDistributionAnalysis.resetcCsX|dkr|j|�}nd}|dkrT|jd7_||jkrTd|j|krT|jd7_dS)z"feed a character with known length�rriN���)�	get_orderrrrr)r�charZchar_len�orderrrr�feedFs
zCharDistributionAnalysis.feedcCsT|jdks|j|jkr|jS|j|jkrN|j|j|j|j}||jkrN|S|jS)z(return confidence based on existing datar)rr�MINIMUM_DATA_THRESHOLD�SURE_NOr�SURE_YES)r�rrrr�get_confidenceTs

z'CharDistributionAnalysis.get_confidencecCs|j|jkS)N)r�ENOUGH_DATA_THRESHOLD)rrrr�got_enough_datadsz(CharDistributionAnalysis.got_enough_datacCsdS)Nrr r)r�byte_strrrrr!isz"CharDistributionAnalysis.get_orderN)
�__name__�
__module__�__qualname__r*r'r&r%rrr$r)r+r!rrrrr(s	rcs$eZdZ�fdd�Zdd�Z�ZS)�EUCTWDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	�superr0rrrrrrr)r)�	__class__rrrrsz"EUCTWDistributionAnalysis.__init__cCs0|d}|dkr(d|d|ddSdSdS)Nr���^r�r r)rr,�
first_charrrrr!xsz#EUCTWDistributionAnalysis.get_order)r-r.r/rr!�
__classcell__rr)r2rr0qsr0cs$eZdZ�fdd�Zdd�Z�ZS)�EUCKRDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1r8rrrrrrr)r)r2rrr�sz"EUCKRDistributionAnalysis.__init__cCs0|d}|dkr(d|d|ddSdSdS)Nr�r4rr5r r)rr,r6rrrr!�sz#EUCKRDistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rr8�sr8cs$eZdZ�fdd�Zdd�Z�ZS)�GB2312DistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1r:rrrr	rr
r)r)r2rrr�sz#GB2312DistributionAnalysis.__init__cCs>|d|d}}|dkr6|dkr6d|d|dSdSdS)Nrrr9r5r4r r)rr,r6�second_charrrrr!�sz$GB2312DistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rr:�sr:cs$eZdZ�fdd�Zdd�Z�ZS)�Big5DistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1r<rrrrrr
r)r)r2rrr�sz!Big5DistributionAnalysis.__init__cCsX|d|d}}|dkrP|dkr:d|d|ddSd|d|dSndSdS)	Nrr�r5��?�@r r)rr,r6r;rrrr!�sz"Big5DistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rr<�sr<cs$eZdZ�fdd�Zdd�Z�ZS)�SJISDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1rArrrrrrr)r)r2rrr�sz!SJISDistributionAnalysis.__init__cCsr|d|d}}|dkr0|dkr0d|d}n&|dkrR|dkrRd|dd}ndS||d	}|d
krnd}|S)
Nrr��������r@�r r r)rr,r6r;r#rrrr!�sz"SJISDistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rrA�srAcs$eZdZ�fdd�Zdd�Z�ZS)�EUCJPDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1rIrrrrrrr)r)r2rrr�sz"EUCJPDistributionAnalysis.__init__cCs0|d}|dkr(d|d|ddSdSdS)Nr�r4r5rr r)rr,r"rrrr!�sz#EUCJPDistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rrI�srIN)Z	euctwfreqrrrZ	euckrfreqrrrZ
gb2312freqrr	r
Zbig5freqrrr
Zjisfreqrrr�objectrr0r8r:r<rArIrrrr�<module>sI_vendor/chardet/__pycache__/euckrprober.cpython-36.opt-1.pyc000064400000002031151733136210017730 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�EUCKRDistributionAnalysis)�EUCKR_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�EUCKRProbercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/euckrprober.pyr#s
zEUCKRProber.__init__cCsdS)NzEUC-KRr)r
rrr
�charset_name)szEUCKRProber.charset_namecCsdS)NZKoreanr)r
rrr
�language-szEUCKRProber.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r"srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/sbcharsetprober.cpython-36.pyc000064400000005532151733136210017647 0ustar003

�Pf�@s4ddlmZddlmZmZmZGdd�de�ZdS)�)�
CharSetProber)�CharacterCategory�ProbingState�SequenceLikelihoodcsbeZdZdZdZdZdZd�fdd�	Z�fd	d
�Ze	dd��Z
e	d
d��Zdd�Zdd�Z
�ZS)�SingleByteCharSetProber�@igffffff�?g�������?FNcsJtt|�j�||_||_||_d|_d|_d|_d|_	d|_
|j�dS)N)�superr�__init__�_model�	_reversed�_name_prober�_last_order�
_seq_counters�_total_seqs�_total_char�
_freq_char�reset)�self�model�reversedZname_prober)�	__class__��%/usr/lib/python3.6/sbcharsetprober.pyr	'sz SingleByteCharSetProber.__init__cs:tt|�j�d|_dgtj�|_d|_d|_d|_	dS)N��)
rrrr
rZget_num_categoriesrrrr)r)rrrr5szSingleByteCharSetProber.resetcCs|jr|jjS|jdSdS)N�charset_name)rrr
)rrrrr?sz$SingleByteCharSetProber.charset_namecCs|jr|jjS|jjd�SdS)N�language)rrr
�get)rrrrrFsz SingleByteCharSetProber.languagec	Csn|jds|j|�}|s|jS|jd}x�t|�D]�\}}||}|tjkrZ|jd7_||jkr�|jd7_|j	|jkr�|j
d7_
|js�|j	|j|}|jd|}n||j|j	}|jd|}|j|d7<||_	q2W|jd}|jt
jk�rh|j
|jk�rh|j�}||jk�r@|jjd||�t
j|_n(||jk�rh|jjd|||j�t
j|_|jS)NZkeep_english_letter�char_to_order_maprZprecedence_matrixrz$%s confidence = %s, we have a winnerz9%s confidence = %s, below negative shortcut threshhold %s)r
Zfilter_international_words�state�	enumeraterZCONTROLr�SAMPLE_SIZErr
rrrrZ	DETECTING�SB_ENOUGH_REL_THRESHOLD�get_confidence�POSITIVE_SHORTCUT_THRESHOLDZlogger�debugZFOUND_ITZ_state�NEGATIVE_SHORTCUT_THRESHOLDZNOT_ME)	rZbyte_strr�i�c�orderrrZ
confidencerrr�feedMsF







zSingleByteCharSetProber.feedcCsNd}|jdkrJd|jtj|j|jd}||j|j}|dkrJd}|S)Ng{�G�z�?rg�?Ztypical_positive_ratiog�G�z��?)rrrZPOSITIVEr
rr)r�rrrrr#|s
 z&SingleByteCharSetProber.get_confidence)FN)�__name__�
__module__�__qualname__r!r"r$r&r	r�propertyrrr*r#�
__classcell__rr)rrr!s
/rN)Z
charsetproberrZenumsrrrrrrrr�<module>s_vendor/chardet/__pycache__/hebrewprober.cpython-36.pyc000064400000005513151733136210017144 0ustar003

�Pf6�@s,ddlmZddlmZGdd�de�ZdS)�)�
CharSetProber)�ProbingStatecs�eZdZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZd
ZdZ�fdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zedd��Zedd��Zedd ��Z�ZS)!�HebrewProber�������������������g{�G�z�?z
ISO-8859-8zwindows-1255cs>tt|�j�d|_d|_d|_d|_d|_d|_|j	�dS)N)
�superr�__init__�_final_char_logical_score�_final_char_visual_score�_prev�_before_prev�_logical_prober�_visual_prober�reset)�self)�	__class__��"/usr/lib/python3.6/hebrewprober.pyr�szHebrewProber.__init__cCsd|_d|_d|_d|_dS)N�� )rrrr)rrrrr�szHebrewProber.resetcCs||_||_dS)N)rr)rZ
logicalProberZvisualProberrrr�set_model_probers�szHebrewProber.set_model_proberscCs||j|j|j|j|jgkS)N)�	FINAL_KAF�	FINAL_MEM�	FINAL_NUN�FINAL_PE�FINAL_TSADI)r�crrr�is_final�szHebrewProber.is_finalcCs||j|j|j|jgkS)N)�
NORMAL_KAF�
NORMAL_MEM�
NORMAL_NUN�	NORMAL_PE)rr%rrr�is_non_final�s
zHebrewProber.is_non_finalcCs�|jtjkrtjS|j|�}x�|D]�}|dkrp|jdkr�|j|j�rT|jd7_q�|j|j�r�|j	d7_	n,|jdkr�|j|j�r�|dkr�|j	d7_	|j|_||_q"Wtj
S)Nrr)�stater�NOT_MEZfilter_high_byte_onlyrr&rrr+r�	DETECTING)rZbyte_strZcurrrr�feed�s 




zHebrewProber.feedcCsx|j|j}||jkr|jS||jkr.|jS|jj�|jj�}||jkrR|jS||jkrd|jS|dkrr|jS|jS)Ng)	rr�MIN_FINAL_CHAR_DISTANCE�LOGICAL_HEBREW_NAME�VISUAL_HEBREW_NAMErZget_confidencer�MIN_MODEL_DISTANCE)rZfinalsubZmodelsubrrr�charset_name�s

zHebrewProber.charset_namecCsdS)NZHebrewr)rrrr�languageszHebrewProber.languagecCs(|jjtjkr"|jjtjkr"tjStjS)N)rr,rr-rr.)rrrrr,szHebrewProber.state)�__name__�
__module__�__qualname__r r'r!r(r"r)r#r*r$ZNORMAL_TSADIr0r3r2r1rrrr&r+r/�propertyr4r5r,�
__classcell__rr)rrr�s.

;rN)Z
charsetproberrZenumsrrrrrr�<module>sc_vendor/chardet/__pycache__/charsetprober.cpython-36.opt-1.pyc000064400000006456151733136210020267 0ustar003

�Pf��@s0ddlZddlZddlmZGdd�de�ZdS)�N�)�ProbingStatec@sneZdZdZddd�Zdd�Zedd��Zd	d
�Zedd��Z	d
d�Z
edd��Zedd��Z
edd��ZdS)�
CharSetProbergffffff�?NcCsd|_||_tjt�|_dS)N)�_state�lang_filter�loggingZ	getLogger�__name__Zlogger)�selfr�r
�#/usr/lib/python3.6/charsetprober.py�__init__'szCharSetProber.__init__cCstj|_dS)N)rZ	DETECTINGr)r	r
r
r�reset,szCharSetProber.resetcCsdS)Nr
)r	r
r
r�charset_name/szCharSetProber.charset_namecCsdS)Nr
)r	�bufr
r
r�feed3szCharSetProber.feedcCs|jS)N)r)r	r
r
r�state6szCharSetProber.statecCsdS)Ngr
)r	r
r
r�get_confidence:szCharSetProber.get_confidencecCstjdd|�}|S)Ns([-])+� )�re�sub)rr
r
r�filter_high_byte_only=sz#CharSetProber.filter_high_byte_onlycCsbt�}tjd|�}xJ|D]B}|j|dd��|dd�}|j�rP|dkrPd}|j|�qW|S)u9
        We define three types of bytes:
        alphabet: english alphabets [a-zA-Z]
        international: international characters [€-ÿ]
        marker: everything else [^a-zA-Z€-ÿ]

        The input buffer can be thought to contain a series of words delimited
        by markers. This function works to filter all words that contain at
        least one international character. All contiguous sequences of markers
        are replaced by a single space ascii character.

        This filter applies to all scripts which do not use English characters.
        s%[a-zA-Z]*[�-�]+[a-zA-Z]*[^a-zA-Z�-�]?Nr��r���r)�	bytearrayr�findall�extend�isalpha)r�filteredZwordsZwordZ	last_charr
r
r�filter_international_wordsBs
z(CharSetProber.filter_international_wordscCs�t�}d}d}x�tt|��D]r}|||d�}|dkr>d}n|dkrJd}|dkr|j�r||kr�|r�|j|||��|jd�|d}qW|s�|j||d	��|S)
a�
        Returns a copy of ``buf`` that retains only the sequences of English
        alphabet and high byte characters that are not between <> characters.
        Also retains English alphabet and high byte characters immediately
        before occurrences of >.

        This filter can be applied to all scripts which contain both English
        characters and extended ASCII characters, but is currently only used by
        ``Latin1Prober``.
        Frr�>�<TrrN)r�range�lenrr)rrZin_tag�prevZcurrZbuf_charr
r
r�filter_with_english_lettersgs"
z)CharSetProber.filter_with_english_letters)N)r�
__module__�__qualname__ZSHORTCUT_THRESHOLDrr
�propertyrrrr�staticmethodrrr$r
r
r
rr#s
%r)rrZenumsr�objectrr
r
r
r�<module>s_vendor/chardet/__pycache__/latin1prober.cpython-36.pyc000064400000005456151733136210017066 0ustar003

�Pf��@s^ddlmZddlmZdZdZdZdZdZdZ	dZ
dZd	Zd
Z
eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
ee
ee
eeeeeeeeeeeeeeeee
eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee	e	e	e	e	e	e
e
e	e	e	e	e	e	e	e	e
e
e	e	e	e	e	ee	e	e	e	e	e
e
e
eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee�fZdZGdd�de�Zd
S)�)�
CharSetProber)�ProbingState��������csLeZdZ�fdd�Zdd�Zedd��Zedd��Zd	d
�Zdd�Z	�Z
S)
�Latin1Probercs&tt|�j�d|_d|_|j�dS)N)�superr�__init__�_last_char_class�
_freq_counter�reset)�self)�	__class__��"/usr/lib/python3.6/latin1prober.pyraszLatin1Prober.__init__cCs t|_dgt|_tj|�dS)Nr)�OTHr�FREQ_CAT_NUMrrr)rrrrrgszLatin1Prober.resetcCsdS)Nz
ISO-8859-1r)rrrr�charset_namelszLatin1Prober.charset_namecCsdS)N�r)rrrr�languagepszLatin1Prober.languagecCsb|j|�}xP|D]H}t|}t|jt|}|dkr@tj|_P|j|d7<||_qW|j	S)Nrr)
Zfilter_with_english_letters�Latin1_CharToClass�Latin1ClassModelr�	CLASS_NUMr�NOT_MEZ_stater�state)rZbyte_str�cZ
char_classZfreqrrr�feedts



zLatin1Prober.feedcCs\|jtjkrdSt|j�}|dkr(d}n|jd|jdd|}|dkrPd}|d}|S)Ng{�G�z�?grrg4@g\��(\�?)rrr�sumr)rZtotalZ
confidencerrr�get_confidence�s
zLatin1Prober.get_confidence)�__name__�
__module__�__qualname__rr�propertyrrr!r#�
__classcell__rr)rrr`srN)@rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)Z
charsetproberrZenumsrrZUDFrZASCZASSZACVZACOZASVZASOrrrrrrrr�<module>sh	_vendor/chardet/__pycache__/enums.cpython-36.opt-1.pyc000064400000004753151733136210016551 0ustar003

�Pf}�@shdZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZGdd�de�Zd
S)zr
All of the Enums that are used throughout the chardet package.

:author: Dan Blanchard (dan.blanchard@gmail.com)
c@seZdZdZdZdZdZdS)�
InputStatezS
    This enum represents the different states a universal detector can be in.
    ���N)�__name__�
__module__�__qualname__�__doc__Z
PURE_ASCIIZ	ESC_ASCIIZ	HIGH_BYTE�r	r	�/usr/lib/python3.6/enums.pyrsrc@s<eZdZdZdZdZdZdZdZdZ	eeBZ
e
eBeBZdS)	�LanguageFilterzj
    This enum represents the different language filters we can apply to a
    ``UniversalDetector``.
    rr����N)rrrrZCHINESE_SIMPLIFIEDZCHINESE_TRADITIONALZJAPANESEZKOREANZNON_CJKZALLZCHINESEZCJKr	r	r	r
rsrc@seZdZdZdZdZdZdS)�ProbingStatezG
    This enum represents the different states a prober can be in.
    rrrN)rrrrZ	DETECTINGZFOUND_ITZNOT_MEr	r	r	r
r src@seZdZdZdZdZdZdS)�MachineStatezN
    This enum represents the different states a state machine can be in.
    rrrN)rrrrZSTARTZERRORZITS_MEr	r	r	r
r)src@s,eZdZdZdZdZdZdZedd��Z	dS)	�SequenceLikelihoodzX
    This enum represents the likelihood of a character following the previous one.
    rrr�cCsdS)z::returns: The number of likelihood categories in the enum.rr	)�clsr	r	r
�get_num_categories;sz%SequenceLikelihood.get_num_categoriesN)
rrrrZNEGATIVEZUNLIKELYZLIKELYZPOSITIVE�classmethodrr	r	r	r
r2src@s$eZdZdZdZdZdZdZdZdS)�CharacterCategoryz�
    This enum represents the different categories language models for
    ``SingleByteCharsetProber`` put characters into.

    Anything less than CONTROL is considered a letter.
    �����N)	rrrrZ	UNDEFINEDZ
LINE_BREAKZSYMBOLZDIGITZCONTROLr	r	r	r
rAsrN)r�objectrrrrrrr	r	r	r
�<module>s			_vendor/chardet/__pycache__/sbcharsetprober.cpython-36.opt-1.pyc000064400000005532151733136210020606 0ustar003

�Pf�@s4ddlmZddlmZmZmZGdd�de�ZdS)�)�
CharSetProber)�CharacterCategory�ProbingState�SequenceLikelihoodcsbeZdZdZdZdZdZd�fdd�	Z�fd	d
�Ze	dd��Z
e	d
d��Zdd�Zdd�Z
�ZS)�SingleByteCharSetProber�@igffffff�?g�������?FNcsJtt|�j�||_||_||_d|_d|_d|_d|_	d|_
|j�dS)N)�superr�__init__�_model�	_reversed�_name_prober�_last_order�
_seq_counters�_total_seqs�_total_char�
_freq_char�reset)�self�model�reversedZname_prober)�	__class__��%/usr/lib/python3.6/sbcharsetprober.pyr	'sz SingleByteCharSetProber.__init__cs:tt|�j�d|_dgtj�|_d|_d|_d|_	dS)N��)
rrrr
rZget_num_categoriesrrrr)r)rrrr5szSingleByteCharSetProber.resetcCs|jr|jjS|jdSdS)N�charset_name)rrr
)rrrrr?sz$SingleByteCharSetProber.charset_namecCs|jr|jjS|jjd�SdS)N�language)rrr
�get)rrrrrFsz SingleByteCharSetProber.languagec	Csn|jds|j|�}|s|jS|jd}x�t|�D]�\}}||}|tjkrZ|jd7_||jkr�|jd7_|j	|jkr�|j
d7_
|js�|j	|j|}|jd|}n||j|j	}|jd|}|j|d7<||_	q2W|jd}|jt
jk�rh|j
|jk�rh|j�}||jk�r@|jjd||�t
j|_n(||jk�rh|jjd|||j�t
j|_|jS)NZkeep_english_letter�char_to_order_maprZprecedence_matrixrz$%s confidence = %s, we have a winnerz9%s confidence = %s, below negative shortcut threshhold %s)r
Zfilter_international_words�state�	enumeraterZCONTROLr�SAMPLE_SIZErr
rrrrZ	DETECTING�SB_ENOUGH_REL_THRESHOLD�get_confidence�POSITIVE_SHORTCUT_THRESHOLDZlogger�debugZFOUND_ITZ_state�NEGATIVE_SHORTCUT_THRESHOLDZNOT_ME)	rZbyte_strr�i�c�orderrrZ
confidencerrr�feedMsF







zSingleByteCharSetProber.feedcCsNd}|jdkrJd|jtj|j|jd}||j|j}|dkrJd}|S)Ng{�G�z�?rg�?Ztypical_positive_ratiog�G�z��?)rrrZPOSITIVEr
rr)r�rrrrr#|s
 z&SingleByteCharSetProber.get_confidence)FN)�__name__�
__module__�__qualname__r!r"r$r&r	r�propertyrrr*r#�
__classcell__rr)rrr!s
/rN)Z
charsetproberrZenumsrrrrrrrr�<module>s_vendor/chardet/__pycache__/version.cpython-36.pyc000064400000000550151733136210016137 0ustar003

�Pf��@sdZdZejd�ZdS)z�
This module exists only to simplify retrieving the version number of chardet
from within setup.py and from chardet subpackages.

:author: Dan Blanchard (dan.blanchard@gmail.com)
z3.0.4�.N)�__doc__�__version__�split�VERSION�rr�/usr/lib/python3.6/version.py�<module>s_vendor/chardet/__pycache__/jpcntx.cpython-36.opt-1.pyc000064400000113272151733136210016725 0ustar003

�Pf�L��@s8d`ZGdd�de�ZGdd	�d	e�ZGd
d�de�ZdS)a������c@sPeZdZdZdZdZdZdZdd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
dS)�JapaneseContextAnalysis�r�di�rcCs*d|_d|_d|_d|_d|_|j�dS)N)�
_total_rel�_rel_sample�_need_to_skip_char_num�_last_char_order�_done�reset)�self�r�/usr/lib/python3.6/jpcntx.py�__init__{sz JapaneseContextAnalysis.__init__cCs*d|_dg|j|_d|_d|_d|_dS)NrrF���)r
�NUM_OF_CATEGORYrrr
r)rrrrr�s
zJapaneseContextAnalysis.resetcCs�|jr
dS|j}x�||kr�|j|||d��\}}||7}||krV|||_d|_q|dkr�|jdkr�|jd7_|j|jkr�d|_P|jt|j|d7<||_qWdS)NrrTrrr)rr�	get_orderr
r
�MAX_REL_THRESHOLDr�jp2CharContext)r�byte_strZ	num_bytes�i�order�char_lenrrr�feed�s 	

zJapaneseContextAnalysis.feedcCs|j|jkS)N)r
�ENOUGH_REL_THRESHOLD)rrrr�got_enough_data�sz'JapaneseContextAnalysis.got_enough_datacCs,|j|jkr"|j|jd|jS|jSdS)Nr)r
�MINIMUM_DATA_THRESHOLDr�	DONT_KNOW)rrrr�get_confidence�sz&JapaneseContextAnalysis.get_confidencecCsdS)Nrr)rrr)rrrrrr�sz!JapaneseContextAnalysis.get_orderNr)�__name__�
__module__�__qualname__rr!rrr rrrrr"rrrrrrtsrcs0eZdZ�fdd�Zedd��Zdd�Z�ZS)�SJISContextAnalysiscstt|�j�d|_dS)NZ	SHIFT_JIS)�superr&r�
_charset_name)r)�	__class__rrr�szSJISContextAnalysis.__init__cCs|jS)N)r()rrrr�charset_name�sz SJISContextAnalysis.charset_namecCs�|sdS|d}d|ko"dkns@d|ko:dknrld}|dksdd	|ko^dknrpd
|_nd}t|�dkr�|d}|dkr�d|ko�dknr�|d|fSd|fS)Nrr�����r��ZCP932����r)rrr)r(�len)rr�
first_charr�second_charrrrr�s0  zSJISContextAnalysis.get_order)r#r$r%r�propertyr*r�
__classcell__rr)r)rr&�sr&c@seZdZdd�ZdS)�EUCJPContextAnalysiscCs�|sdS|d}|dks0d|ko*dknr6d}n|dkrDd}nd}t|�dkr�|d}|d	kr�d|kovd
knr�|d|fSd
|fS)Nrr���r�r���r)rrr)r3)rrr4rr5rrrr�s  zEUCJPContextAnalysis.get_orderN)r#r$r%rrrrrr8�sr8N�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)Sr?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)r�objectrr&r8rrrr�<module>s�C_vendor/chardet/__pycache__/mbcharsetprober.cpython-36.opt-1.pyc000064400000004151151733136210020574 0ustar003

�PfU
�@s0ddlmZddlmZmZGdd�de�ZdS)�)�
CharSetProber)�ProbingState�MachineStatecsVeZdZdZd�fdd�	Z�fdd�Zedd��Zed	d
��Zdd�Z	d
d�Z
�ZS)�MultiByteCharSetProberz 
    MultiByteCharSetProber
    Ncs,tt|�j|d�d|_d|_ddg|_dS)N)�lang_filter�)�superr�__init__�distribution_analyzer�	coding_sm�
_last_char)�selfr)�	__class__��%/usr/lib/python3.6/mbcharsetprober.pyr	'szMultiByteCharSetProber.__init__cs<tt|�j�|jr|jj�|jr.|jj�ddg|_dS)Nr)rr�resetrr
r)r
)rrrr-s

zMultiByteCharSetProber.resetcCst�dS)N)�NotImplementedError)r
rrr�charset_name5sz#MultiByteCharSetProber.charset_namecCst�dS)N)r)r
rrr�language9szMultiByteCharSetProber.languagecCsx�tt|��D]�}|jj||�}|tjkrN|jjd|j|j	|�t
j|_Pq|tj
krdt
j|_Pq|tjkr|jj�}|dkr�|d|jd<|jj|j|�q|jj||d|d�|�qW|d|jd<|jt
jkr�|jj�r�|j�|jkr�t
j|_|jS)Nz!%s %s prober hit error at byte %srr���)�range�lenrZ
next_staterZERRORZlogger�debugrrrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenrr
�feed�stateZ	DETECTINGZgot_enough_data�get_confidenceZSHORTCUT_THRESHOLD)r
Zbyte_str�iZcoding_stateZchar_lenrrrr=s.





zMultiByteCharSetProber.feedcCs
|jj�S)N)r
r)r
rrrrZsz%MultiByteCharSetProber.get_confidence)N)�__name__�
__module__�__qualname__�__doc__r	r�propertyrrrr�
__classcell__rr)rrr"srN)Z
charsetproberrZenumsrrrrrrr�<module>s_vendor/chardet/__pycache__/utf8prober.cpython-36.opt-1.pyc000064400000003543151733136210017516 0ustar003

�Pf�
�@sHddlmZddlmZmZddlmZddlmZGdd�de�Z	dS)�)�
CharSetProber)�ProbingState�MachineState)�CodingStateMachine)�
UTF8_SM_MODELcsTeZdZdZ�fdd�Z�fdd�Zedd��Zedd	��Zd
d�Z	dd
�Z
�ZS)�
UTF8Proberg�?cs*tt|�j�tt�|_d|_|j�dS)N)�superr�__init__rr�	coding_sm�
_num_mb_chars�reset)�self)�	__class__�� /usr/lib/python3.6/utf8prober.pyr	&s
zUTF8Prober.__init__cs"tt|�j�|jj�d|_dS)N�)rrrr
r)r
)rrrr,s
zUTF8Prober.resetcCsdS)Nzutf-8r)r
rrr�charset_name1szUTF8Prober.charset_namecCsdS)N�r)r
rrr�language5szUTF8Prober.languagecCs�xj|D]b}|jj|�}|tjkr,tj|_Pq|tjkrBtj|_Pq|tj	kr|jj
�dkr|jd7_qW|jtj
kr�|j�|jkr�tj|_|jS)N�r)r
Z
next_staterZERRORrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenr�stateZ	DETECTING�get_confidenceZSHORTCUT_THRESHOLD)r
Zbyte_str�cZcoding_staterrr�feed9s



zUTF8Prober.feedcCs.d}|jdkr&||j|j9}d|S|SdS)Ng�G�z��?�g�?)r�
ONE_CHAR_PROB)r
ZunlikerrrrLs

zUTF8Prober.get_confidence)�__name__�
__module__�__qualname__rr	r�propertyrrrr�
__classcell__rr)rrr#srN)
Z
charsetproberrZenumsrrZcodingstatemachinerZmbcssmrrrrrr�<module>s_vendor/chardet/__pycache__/charsetprober.cpython-36.pyc000064400000006456151733136220017331 0ustar003

�Pf��@s0ddlZddlZddlmZGdd�de�ZdS)�N�)�ProbingStatec@sneZdZdZddd�Zdd�Zedd��Zd	d
�Zedd��Z	d
d�Z
edd��Zedd��Z
edd��ZdS)�
CharSetProbergffffff�?NcCsd|_||_tjt�|_dS)N)�_state�lang_filter�loggingZ	getLogger�__name__Zlogger)�selfr�r
�#/usr/lib/python3.6/charsetprober.py�__init__'szCharSetProber.__init__cCstj|_dS)N)rZ	DETECTINGr)r	r
r
r�reset,szCharSetProber.resetcCsdS)Nr
)r	r
r
r�charset_name/szCharSetProber.charset_namecCsdS)Nr
)r	�bufr
r
r�feed3szCharSetProber.feedcCs|jS)N)r)r	r
r
r�state6szCharSetProber.statecCsdS)Ngr
)r	r
r
r�get_confidence:szCharSetProber.get_confidencecCstjdd|�}|S)Ns([-])+� )�re�sub)rr
r
r�filter_high_byte_only=sz#CharSetProber.filter_high_byte_onlycCsbt�}tjd|�}xJ|D]B}|j|dd��|dd�}|j�rP|dkrPd}|j|�qW|S)u9
        We define three types of bytes:
        alphabet: english alphabets [a-zA-Z]
        international: international characters [€-ÿ]
        marker: everything else [^a-zA-Z€-ÿ]

        The input buffer can be thought to contain a series of words delimited
        by markers. This function works to filter all words that contain at
        least one international character. All contiguous sequences of markers
        are replaced by a single space ascii character.

        This filter applies to all scripts which do not use English characters.
        s%[a-zA-Z]*[�-�]+[a-zA-Z]*[^a-zA-Z�-�]?Nr��r���r)�	bytearrayr�findall�extend�isalpha)r�filteredZwordsZwordZ	last_charr
r
r�filter_international_wordsBs
z(CharSetProber.filter_international_wordscCs�t�}d}d}x�tt|��D]r}|||d�}|dkr>d}n|dkrJd}|dkr|j�r||kr�|r�|j|||��|jd�|d}qW|s�|j||d	��|S)
a�
        Returns a copy of ``buf`` that retains only the sequences of English
        alphabet and high byte characters that are not between <> characters.
        Also retains English alphabet and high byte characters immediately
        before occurrences of >.

        This filter can be applied to all scripts which contain both English
        characters and extended ASCII characters, but is currently only used by
        ``Latin1Prober``.
        Frr�>�<TrrN)r�range�lenrr)rrZin_tag�prevZcurrZbuf_charr
r
r�filter_with_english_lettersgs"
z)CharSetProber.filter_with_english_letters)N)r�
__module__�__qualname__ZSHORTCUT_THRESHOLDrr
�propertyrrrr�staticmethodrrr$r
r
r
rr#s
%r)rrZenumsr�objectrr
r
r
r�<module>s_vendor/chardet/__pycache__/langhebrewmodel.cpython-36.opt-1.pyc000064400000055445151733136220020566 0ustar003

�PfQ,�@sd�Zd�Zeed�d�d�d�d��Zd�S)������E�[�O�P�\�Y�a�Z�D�o�p�R�I�_�U�N�y�V�G�C�f�k�T�r�g�s�2�J�<�=�*�L�F�@�5�i�]�8�A�6�1�B�n�3�+�,�?�Q�M�b�K�l�|���������(�:���������������������S�4�/�.�H� �^���q���m���������"�t���v�d�����u�w�h�}�����W�c���j�z�{���7�����e�����x���0�'�9����;�)�X�!�%�$���#���>����~�����&�-��������������������	������������������
�������
����`�gC��|�?Fzwindows-1255ZHebrew)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguageN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r(r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZWIN1255_CHAR_TO_ORDER_MAPZHEBREW_LANG_MODELZWin1255HebrewModel�r�r��%/usr/lib/python3.6/langhebrewmodel.py�<module>&s*
_vendor/chardet/__pycache__/mbcharsetprober.cpython-36.pyc000064400000004151151733136220017636 0ustar003

�PfU
�@s0ddlmZddlmZmZGdd�de�ZdS)�)�
CharSetProber)�ProbingState�MachineStatecsVeZdZdZd�fdd�	Z�fdd�Zedd��Zed	d
��Zdd�Z	d
d�Z
�ZS)�MultiByteCharSetProberz 
    MultiByteCharSetProber
    Ncs,tt|�j|d�d|_d|_ddg|_dS)N)�lang_filter�)�superr�__init__�distribution_analyzer�	coding_sm�
_last_char)�selfr)�	__class__��%/usr/lib/python3.6/mbcharsetprober.pyr	'szMultiByteCharSetProber.__init__cs<tt|�j�|jr|jj�|jr.|jj�ddg|_dS)Nr)rr�resetrr
r)r
)rrrr-s

zMultiByteCharSetProber.resetcCst�dS)N)�NotImplementedError)r
rrr�charset_name5sz#MultiByteCharSetProber.charset_namecCst�dS)N)r)r
rrr�language9szMultiByteCharSetProber.languagecCsx�tt|��D]�}|jj||�}|tjkrN|jjd|j|j	|�t
j|_Pq|tj
krdt
j|_Pq|tjkr|jj�}|dkr�|d|jd<|jj|j|�q|jj||d|d�|�qW|d|jd<|jt
jkr�|jj�r�|j�|jkr�t
j|_|jS)Nz!%s %s prober hit error at byte %srr���)�range�lenrZ
next_staterZERRORZlogger�debugrrrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenrr
�feed�stateZ	DETECTINGZgot_enough_data�get_confidenceZSHORTCUT_THRESHOLD)r
Zbyte_str�iZcoding_stateZchar_lenrrrr=s.





zMultiByteCharSetProber.feedcCs
|jj�S)N)r
r)r
rrrrZsz%MultiByteCharSetProber.get_confidence)N)�__name__�
__module__�__qualname__�__doc__r	r�propertyrrrr�
__classcell__rr)rrr"srN)Z
charsetproberrZenumsrrrrrrr�<module>s_vendor/chardet/__pycache__/langhebrewmodel.cpython-36.pyc000064400000055445151733136220017627 0ustar003

�PfQ,�@sd�Zd�Zeed�d�d�d�d��Zd�S)������E�[�O�P�\�Y�a�Z�D�o�p�R�I�_�U�N�y�V�G�C�f�k�T�r�g�s�2�J�<�=�*�L�F�@�5�i�]�8�A�6�1�B�n�3�+�,�?�Q�M�b�K�l�|���������(�:���������������������S�4�/�.�H� �^���q���m���������"�t���v�d�����u�w�h�}�����W�c���j�z�{���7�����e�����x���0�'�9����;�)�X�!�%�$���#���>����~�����&�-��������������������	������������������
�������
����`�gC��|�?Fzwindows-1255ZHebrew)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguageN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r(r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZWIN1255_CHAR_TO_ORDER_MAPZHEBREW_LANG_MODELZWin1255HebrewModel�r�r��%/usr/lib/python3.6/langhebrewmodel.py�<module>&s*
_vendor/chardet/__pycache__/jisfreq.cpython-36.opt-1.pyc000064400000126637151733136220017074 0ustar003

�Pf�d�@sdZdZ�dZ�dS(g@i�(������'�O��}�������]�
�
���
���������������X�}�����k��g
�
��k������������%�&�0�1�,�-�������������<���p�������������g������W�X�����h
�"�������
�	��\����
�/
�����������0
��
�h�����������������������������������������������������������������������������	�
���
������������������� ��!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7��V�j�4����B���8�9�:�;�<�=�>�v�����S��e��f���d	�+����a�w�����I���~���
��?�@�A�B�C�D�
��-���i���
��E�^�.���3��i
�F�/�Y�������j��
��G�H�I�J�q��1
��Y��k�/��2���#�����*�������[��\�5��!�!�	�%�@�l��'�A����4�
�������<����-���7�S����~�K�=��E��;���7�7�8�m�&���
��O�K�=�~�d���L�M�N�O�P�Q�R�S�T�U�V�Y�>�J�"�p	�p������T�_���.�X���L�j�e��9�P� �l�y�����D�����)�h���F�?��+���g�c����B�]�N����8�j�:�5���7���R�4�G�d�����n��h�t�6�3�$�W�C���:
�x����*�V	��W�X�Y�Z�[�\�]�^�_�`��a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�
�u�v�w�x�y�z�{�|�������l�}��~���	��
�����
�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������k���
�>��
��
�	��
	�j
�����Z�[�\�]������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�B�C�D�E�F�G�H�I�J�K�L�M�N�O�P�Q�R�S�T�U�V�W�X�Y�Z�[�\�]�^�_�`�a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{�|�}�~����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�B�C�D�E�F�G�H�I�J�K�L�M�N�O�P�Q�R�S�T�U�V�W�X�Y�Z�[�\�]�^�_�`�a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{�|�}�~����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�B�C�D�E�F�G�H�I�J�K�L�M�N�O�P�Q�R�S�T�U�V�W�X�Y�Z�[�\�]�^�_�`�a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{�|�}�~����������������������������������������������������������������������������������������������������������������������������������������
����������2
�����{�T���a
�
�^�����|��=��m�k
�����	�V�y�	��
�q	�����
�	������!��	��������C�O�3��������r	���n���+�����
�p�q���;
�A������C�0��n�
����T�C�o�
��
�#	���~��P����������	���	��$	�z�����&���������������_���U���
�s	��	����I����������3
����$�W�1�5�P��	����������X����l
����H��	�������
�
�W���~�p��$����@�L����	�����u���J�I	�W	����_���	�<�����]�D�������h���������f�-��}�t	���	�������,���
�b����X���F��{�`����+�3�q��m��4
�������B�l�X	�
��%
���%	�2�u��	��	��J��|�r��@����������
���	�L
�������b
�J	��c�����������4	�R��m
�T��������������e	����?�
������-�	��*�E����+����&	�	����/�����.�s�[���+��
��
�B�c
��������	�)��,���u	��	��9�&
�������v	���Y���3��
�����D�����d���|�(�y������<�8��	�i����d
�5
�B��n���������|��	�!���)�@�n
�m��	�0�a�����	�z�'�����	�W���[�v�����	����	�o
���	��
���'
���"���9������J���h����e
���\�����p
�@��Q�9������	����w	�i�E��J�I��	��������f����
���1�����
����'	�r�
������
���c��	�q��x����
�2�������	�*���b�F��v���d�
����!�R����Q����P����������(	����_��`���f
��6
���A����a�����u�T��2��������^�C����
��[����
�v�%������n�!�F��Z�^������d������x	���]���B��c��
�����P�q�	��H�Y	����(
���
���g�}�K�	����4���W�����C�g
�-�d���k��4�}��q
�~�+�	����h�@	�	��@�A	�D�:���e��
���q�w�����������d���Z��*�M
�[����s�Z�
����>��'�-����
�����)	�������t�F�7
���t�#��������Y���f�k�b�1���
��6�o�c��q��	������
�u��:�����N
���U�=��	�v���l�N������]��;������
����l�L�	�}�������B	��8
�Q���#�`�T���	�x��r��	���������	���3�n���������{���@��������9
����5�h
�d�Z	����i������s�-�r
�������[���0������.�S�	�R�O
�������;��4�y	�
�;��z	��p��� ����:
�(�|�
��"����������O��e����
�C	�t�)
��6�K����8��P
��h���������3�$�����*
�=�b�e���
�4�����	�.�7��\����j����������/������=���^���5	����	�\�������9�#�+�
�s
�,��7���Y�����
�M���+
�i
�Q���6	���������y���s�#�{	��
�U�f�<����v����)��
�������m�<���	�����g��	�l��� �D	�b�u��
��D�B����
�[	���8����>�����������]���D�������6�Z���	�5���
��
�<�m��
�,�	��u�^�	����g����I���g����\�:�
�M�t�	���E��o��������E�R�����E�j
���g�W���K�	�C��=���]�$�!��	�����`�K�v���0�i��^����3���"��������a�k
��;
�w��������
�y�������P���	�w�����t
�����������J�a���]�������	�����L�h�������
�u
�j���#�C����	�a���!�s�|	��	�Y��
����h�O�5���z�i�
�S���L���.��&��	�<
�7���s�A���M��
��s���*�,
�%�
��	��k���&�f���O�j�"��(�-��[�������-�=��}	���M�S���������������
�
��
��~	��J�t�k�v�e�y�����<
���f��M�k��
���o�������w�v
�l���]���.��
������/�f	�q�$�g��G���n��>�6�=
�������N��	�x��e�*	��h������$�����o�b�,�����t����	�����
�V���
�i������w
���p�H���V��i�<�Z�	�8�r�w��&������/�������>��>
�E	�x���&�e���w�	����5�����
��\����S�i������2������+�'���%����x
�O��
�Q
���������_�H������g����y
��
�0�&�+	������l
�'��'�z��	�	��_����������?
��
����	��z
���m
�|�
��	�g	�0���� ��*���
�`�w���
�#���c�)�����R
�������T����r��V�����_�����r����
�`�x���
���f���n
�=
�*�A���	�(���x��S
��{�T
��9���M��r����;��(��%��[�a�b���D����-
�\	�����o
�E�m�)���!���c��,	�1��]	����>�I����	�T���������y��k�z�x���N�	�����	�5����"���H�<�d��
���h	��	��
�p
���:�L�1�����
���>
���q
�?�Z�M�I���B�K	�D��#�
�
�Y�>������`�������������� ����c���������T��a��2�y�/���"�U
�����b����/���	����
�{�
��j����5�*�������a�x����	��F������7����	�?��w���������+��8����
�N��
����,���n��
���o�s�_�?���	�����	����?�0��	�	��y�3�f��=���l��	��v�]��	�	�����z��
��y�j��u����p���j���	
�i�z����r
������������F��'��	�{
�n��x�u�$����M�����k��p��
�^	�q�
�`�|��	��

�|
�	��
���	�
�����	�G���-���Z�1�Y���-	�	�e����[�C����e��.�-�����	���
�
�7	�b�l�	������{���.�	����z���
�
��?
����9��
�����}��
�a��	�
�Q�	�d�;�V
���/�^�D���?�m��
��
�9�I�
�����
�����
�{�}� ������&���~���X���}
��
���9����:��"�\����
�_	���8�W�~
�^�%�P�s��w����8	��L����.
�����
����N�����P��S������:�R�'�0�b���
�������z��	�����K�������
�r��q�
�����n�L	�����������X�@
���g�i���4��:����r�	��Q��
��	�;�'��>���b�m���w�����s
�����A
��t
�9	�����	�o�^���2��c��������r����!��t��1�{���������s��������x��v�a�p����
���L�O���U��.����)�.	��E�	�"��y��	������(�
���q�E�6�
��
�2�u�6������.��i	����V���s��
���G�G�j�t��W��	���`����u����
���{�Q��	�O�C���
���J�	�	�	�$������/	����_�����j	�R��7���/
��3�D�u
�6�X�4��������>����0
�`�`	�
������U��� �
�r���)���5���H��� 
���~�r��
������	���]����������@
����
�a���v����/�!�6������7�w��	�3�c�8�x����B
�����M	���F	��g����(��
�:	����H���
�f����"��
�~������
�y������ �!�N��
�4�A
���d�e����z������������0	�����������{������
��f�(�l���G�	����^�B
�m�F�|��U����2�R�����#���
����F����U����1���c�k��N	�S�	�����
����$�O	���Y����
���G����W
�	�C
�?����~����U�F�����������
�C
�	�����e�
������/�Q�_�v
�;���P�)���G�m�	���G�,�I�z�_�	���	�9�n��
�A�	�
�X
���#�V�)�<�%��}���H�w
�5��p�.�����m�����
�#�h�|�b����@�(�^�z����A��
� ��"��g��|���������6��X���,�����		����$�#�I�k	�����:�	��Q��o�h�V�$�
���t����Y
�,����
��D
���	�~��
�
���%��� ��������
� �7�/�
	������	�E�V����
�E
�u��&�������	���o���Z��i���	�����!���n�8���'����������	��G��R�2����:��,����Z
�l��(�X�P	�?���	�o��������W�*����;�8��&�)����
�o������H�_���0���x
��0��;	��
���X��	�����1��Z�*�%��	���|�d�`���S�j�}�\�����G	���	�f�q�%�B��1
�p��"�������|������	���l	�2
��
��
���@�h��;����	��	���&�����}��%�����?��	���a	�y
��
����	��=�1��<��{�#��������
�A��
��t���	��>�
�K�3
���=����\�����'�A�+�g���b	�K�������
���
���4
��*��m	�	��
�o�N��0�(�i��$�4� �{����Q	����U�����,�`���	����
�j�)� ���-�6���	�F
�2�)��
�*��YN(rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrr)ZJIS_TYPICAL_DISTRIBUTION_RATIOZJIS_TABLE_SIZEZJIS_CHAR_TO_FREQ_ORDER�rr�/usr/lib/python3.6/jisfreq.py�<module>,s$_vendor/chardet/__pycache__/sbcsgroupprober.cpython-36.opt-1.pyc000064400000002776151733136220020647 0ustar003

�Pf�
�@s�ddlmZddlmZddlmZmZmZmZm	Z	m
Z
ddlmZm
Z
ddlmZmZddlmZddlmZddlmZdd	lmZGd
d�de�ZdS)
�)�CharSetGroupProber)�SingleByteCharSetProber)�Win1251CyrillicModel�
Koi8rModel�Latin5CyrillicModel�MacCyrillicModel�Ibm866Model�Ibm855Model)�Latin7GreekModel�Win1253GreekModel)�Latin5BulgarianModel�Win1251BulgarianModel)�TIS620ThaiModel)�Win1255HebrewModel)�HebrewProber)�Latin5TurkishModelcseZdZ�fdd�Z�ZS)�SBCSGroupProberc
s�tt|�j�tt�tt�tt�tt�tt�tt	�tt
�tt�tt�tt
�tt�tt�g|_t�}ttd|�}ttd|�}|j||�|jj|||g�|j�dS)NFT)�superr�__init__rrrrrrr	r
rrr
rrZprobersrrZset_model_probers�extend�reset)�selfZ
hebrew_proberZlogical_hebrew_proberZvisual_hebrew_prober)�	__class__��%/usr/lib/python3.6/sbcsgroupprober.pyr,s,
zSBCSGroupProber.__init__)�__name__�
__module__�__qualname__r�
__classcell__rr)rrr+srN)ZcharsetgroupproberrZsbcharsetproberrZlangcyrillicmodelrrrrrr	Zlanggreekmodelr
rZlangbulgarianmodelrr
Z
langthaimodelrZlanghebrewmodelrZhebrewproberrZlangturkishmodelrrrrrr�<module>s _vendor/chardet/__pycache__/eucjpprober.cpython-36.opt-1.pyc000064400000004436151733136220017741 0ustar003

�Pf��@s`ddlmZmZddlmZddlmZddlmZddl	m
Z
ddlmZGdd�de�Z
d	S)
�)�ProbingState�MachineState)�MultiByteCharSetProber)�CodingStateMachine)�EUCJPDistributionAnalysis)�EUCJPContextAnalysis)�EUCJP_SM_MODELcsPeZdZ�fdd�Z�fdd�Zedd��Zedd��Zd	d
�Zdd�Z	�Z
S)
�EUCJPProbercs4tt|�j�tt�|_t�|_t�|_	|j
�dS)N)�superr	�__init__rr�	coding_smr�distribution_analyzerr�context_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/eucjpprober.pyr%s

zEUCJPProber.__init__cstt|�j�|jj�dS)N)r
r	rr)r)rrrr,szEUCJPProber.resetcCsdS)NzEUC-JPr)rrrr�charset_name0szEUCJPProber.charset_namecCsdS)NZJapaneser)rrrr�language4szEUCJPProber.languagecCs6x�tt|��D]�}|jj||�}|tjkrN|jjd|j|j	|�t
j|_Pq|tj
krdt
j|_Pq|tjkr|jj�}|dkr�|d|jd<|jj|j|�|jj|j|�q|jj||d|d�|�|jj||d|d�|�qW|d|jd<|jt
jk�r0|jj��r0|j�|jk�r0t
j|_|jS)Nz!%s %s prober hit error at byte %s�r���)�range�lenrZ
next_staterZERRORZlogger�debugrrrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenZ
_last_charr�feedr
�stateZ	DETECTINGZgot_enough_data�get_confidenceZSHORTCUT_THRESHOLD)rZbyte_str�iZcoding_stateZchar_lenrrrr8s4




zEUCJPProber.feedcCs|jj�}|jj�}t||�S)N)rrr
�max)rZcontext_confZdistrib_confrrrrYs

zEUCJPProber.get_confidence)�__name__�
__module__�__qualname__rr�propertyrrrr�
__classcell__rr)rrr	$s!r	N)ZenumsrrZmbcharsetproberrZcodingstatemachinerZchardistributionrZjpcntxrZmbcssmrr	rrrr�<module>s_vendor/chardet/__pycache__/universaldetector.cpython-36.pyc000064400000013173151733136220020222 0ustar003

�Pf�0�@s�dZddlZddlZddlZddlmZddlmZmZm	Z	ddl
mZddlm
Z
ddlmZdd	lmZGd
d�de�ZdS)a
Module containing the UniversalDetector detector class, which is the primary
class a user of ``chardet`` should use.

:author: Mark Pilgrim (initial port to Python)
:author: Shy Shalom (original C code)
:author: Dan Blanchard (major refactoring for 3.0)
:author: Ian Cordasco
�N�)�CharSetGroupProber)�
InputState�LanguageFilter�ProbingState)�EscCharSetProber)�Latin1Prober)�MBCSGroupProber)�SBCSGroupProberc	@sneZdZdZdZejd�Zejd�Zejd�Z	dddd	d
ddd
d�Z
ejfdd�Z
dd�Zdd�Zdd�ZdS)�UniversalDetectoraq
    The ``UniversalDetector`` class underlies the ``chardet.detect`` function
    and coordinates all of the different charset probers.

    To get a ``dict`` containing an encoding and its confidence, you can simply
    run:

    .. code::

            u = UniversalDetector()
            u.feed(some_bytes)
            u.close()
            detected = u.result

    g�������?s[�-�]s(|~{)s[�-�]zWindows-1252zWindows-1250zWindows-1251zWindows-1256zWindows-1253zWindows-1255zWindows-1254zWindows-1257)z
iso-8859-1z
iso-8859-2z
iso-8859-5z
iso-8859-6z
iso-8859-7z
iso-8859-8z
iso-8859-9ziso-8859-13cCsNd|_g|_d|_d|_d|_d|_d|_||_tj	t
�|_d|_|j
�dS)N)�_esc_charset_prober�_charset_probers�result�done�	_got_data�_input_state�
_last_char�lang_filter�loggingZ	getLogger�__name__�logger�_has_win_bytes�reset)�selfr�r�'/usr/lib/python3.6/universaldetector.py�__init__QszUniversalDetector.__init__cCsZdddd�|_d|_d|_d|_tj|_d|_|jr>|jj	�x|j
D]}|j	�qFWdS)z�
        Reset the UniversalDetector and all of its probers back to their
        initial states.  This is called by ``__init__``, so you only need to
        call this directly in between analyses of different documents.
        Ng)�encoding�
confidence�languageF�)rrrrr�
PURE_ASCIIrrrrr
)r�proberrrrr^s
zUniversalDetector.resetcCs>|jr
dSt|�sdSt|t�s(t|�}|js�|jtj�rJdddd�|_nv|jtj	tj
f�rldddd�|_nT|jd�r�dddd�|_n:|jd	�r�d
ddd�|_n |jtjtjf�r�dddd�|_d|_|jd
dk	r�d|_dS|j
tjk�r.|jj|��rtj|_
n*|j
tjk�r.|jj|j|��r.tj|_
|dd�|_|j
tjk�r�|j�s^t|j�|_|jj|�tjk�r:|jj|jj�|jjd�|_d|_n�|j
tjk�r:|j�s�t |j�g|_|jt!j"@�r�|jj#t$��|jj#t%��x@|jD]6}|j|�tjk�r�|j|j�|jd�|_d|_P�q�W|j&j|��r:d|_'dS)a�
        Takes a chunk of a document and feeds it through all of the relevant
        charset probers.

        After calling ``feed``, you can check the value of the ``done``
        attribute to see if you need to continue feeding the
        ``UniversalDetector`` more data, or if it has made a prediction
        (in the ``result`` attribute).

        .. note::
           You should always call ``close`` when you're done feeding in your
           document if ``done`` is not already ``True``.
        Nz	UTF-8-SIGg�?�)rrrzUTF-32s��zX-ISO-10646-UCS-4-3412s��zX-ISO-10646-UCS-4-2143zUTF-16Trr���)(r�len�
isinstance�	bytearrayr�
startswith�codecs�BOM_UTF8r�BOM_UTF32_LE�BOM_UTF32_BE�BOM_LE�BOM_BErrr!�HIGH_BYTE_DETECTOR�search�	HIGH_BYTE�ESC_DETECTORrZ	ESC_ASCIIrrr�feedrZFOUND_IT�charset_name�get_confidencerr
r	rZNON_CJK�appendr
r�WIN_BYTE_DETECTORr)rZbyte_strr"rrrr3os|





zUniversalDetector.feedc	Cs�|jr|jSd|_|js&|jjd�n�|jtjkrBdddd�|_n�|jtjkr�d}d}d}x,|j	D]"}|slqb|j
�}||krb|}|}qbW|r�||jkr�|j}|jj
�}|j
�}|jd	�r�|jr�|jj||�}|||jd�|_|jj�tjk�rz|jd
dk�rz|jjd�xn|j	D]d}|�s �qt|t��rZxF|jD] }|jjd|j|j|j
���q4Wn|jjd|j|j|j
���qW|jS)
z�
        Stop analyzing the current document and come up with a final
        prediction.

        :returns:  The ``result`` attribute, a ``dict`` with the keys
                   `encoding`, `confidence`, and `language`.
        Tzno data received!�asciig�?r#)rrrNgziso-8859rz no probers hit minimum thresholdz%s %s confidence = %s)rrrr�debugrrr!r1r
r5�MINIMUM_THRESHOLDr4�lowerr(r�ISO_WIN_MAP�getrZgetEffectiveLevelr�DEBUGr&rZprobers)	rZprober_confidenceZmax_prober_confidenceZ
max_proberr"r4Zlower_charset_namerZgroup_proberrrr�close�s`	

zUniversalDetector.closeN)r�
__module__�__qualname__�__doc__r:�re�compiler/r2r7r<rZALLrrr3r?rrrrr3s"



mr)rBr)rrCZcharsetgroupproberrZenumsrrrZ	escproberrZlatin1proberrZmbcsgroupproberr	Zsbcsgroupproberr
�objectrrrrr�<module>$s_vendor/chardet/__pycache__/enums.cpython-36.pyc000064400000004753151733136220015613 0ustar003

�Pf}�@shdZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZGdd�de�Zd
S)zr
All of the Enums that are used throughout the chardet package.

:author: Dan Blanchard (dan.blanchard@gmail.com)
c@seZdZdZdZdZdZdS)�
InputStatezS
    This enum represents the different states a universal detector can be in.
    ���N)�__name__�
__module__�__qualname__�__doc__Z
PURE_ASCIIZ	ESC_ASCIIZ	HIGH_BYTE�r	r	�/usr/lib/python3.6/enums.pyrsrc@s<eZdZdZdZdZdZdZdZdZ	eeBZ
e
eBeBZdS)	�LanguageFilterzj
    This enum represents the different language filters we can apply to a
    ``UniversalDetector``.
    rr����N)rrrrZCHINESE_SIMPLIFIEDZCHINESE_TRADITIONALZJAPANESEZKOREANZNON_CJKZALLZCHINESEZCJKr	r	r	r
rsrc@seZdZdZdZdZdZdS)�ProbingStatezG
    This enum represents the different states a prober can be in.
    rrrN)rrrrZ	DETECTINGZFOUND_ITZNOT_MEr	r	r	r
r src@seZdZdZdZdZdZdS)�MachineStatezN
    This enum represents the different states a state machine can be in.
    rrrN)rrrrZSTARTZERRORZITS_MEr	r	r	r
r)src@s,eZdZdZdZdZdZdZedd��Z	dS)	�SequenceLikelihoodzX
    This enum represents the likelihood of a character following the previous one.
    rrr�cCsdS)z::returns: The number of likelihood categories in the enum.rr	)�clsr	r	r
�get_num_categories;sz%SequenceLikelihood.get_num_categoriesN)
rrrrZNEGATIVEZUNLIKELYZLIKELYZPOSITIVE�classmethodrr	r	r	r
r2src@s$eZdZdZdZdZdZdZdZdS)�CharacterCategoryz�
    This enum represents the different categories language models for
    ``SingleByteCharsetProber`` put characters into.

    Anything less than CONTROL is considered a letter.
    �����N)	rrrrZ	UNDEFINEDZ
LINE_BREAKZSYMBOLZDIGITZCONTROLr	r	r	r
rAsrN)r�objectrrrrrrr	r	r	r
�<module>s			_vendor/chardet/__pycache__/jpcntx.cpython-36.pyc000064400000113272151733136220015767 0ustar003

�Pf�L��@s8d`ZGdd�de�ZGdd	�d	e�ZGd
d�de�ZdS)a������c@sPeZdZdZdZdZdZdZdd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
dS)�JapaneseContextAnalysis�r�di�rcCs*d|_d|_d|_d|_d|_|j�dS)N)�
_total_rel�_rel_sample�_need_to_skip_char_num�_last_char_order�_done�reset)�self�r�/usr/lib/python3.6/jpcntx.py�__init__{sz JapaneseContextAnalysis.__init__cCs*d|_dg|j|_d|_d|_d|_dS)NrrF���)r
�NUM_OF_CATEGORYrrr
r)rrrrr�s
zJapaneseContextAnalysis.resetcCs�|jr
dS|j}x�||kr�|j|||d��\}}||7}||krV|||_d|_q|dkr�|jdkr�|jd7_|j|jkr�d|_P|jt|j|d7<||_qWdS)NrrTrrr)rr�	get_orderr
r
�MAX_REL_THRESHOLDr�jp2CharContext)r�byte_strZ	num_bytes�i�order�char_lenrrr�feed�s 	

zJapaneseContextAnalysis.feedcCs|j|jkS)N)r
�ENOUGH_REL_THRESHOLD)rrrr�got_enough_data�sz'JapaneseContextAnalysis.got_enough_datacCs,|j|jkr"|j|jd|jS|jSdS)Nr)r
�MINIMUM_DATA_THRESHOLDr�	DONT_KNOW)rrrr�get_confidence�sz&JapaneseContextAnalysis.get_confidencecCsdS)Nrr)rrr)rrrrrr�sz!JapaneseContextAnalysis.get_orderNr)�__name__�
__module__�__qualname__rr!rrr rrrrr"rrrrrrtsrcs0eZdZ�fdd�Zedd��Zdd�Z�ZS)�SJISContextAnalysiscstt|�j�d|_dS)NZ	SHIFT_JIS)�superr&r�
_charset_name)r)�	__class__rrr�szSJISContextAnalysis.__init__cCs|jS)N)r()rrrr�charset_name�sz SJISContextAnalysis.charset_namecCs�|sdS|d}d|ko"dkns@d|ko:dknrld}|dksdd	|ko^dknrpd
|_nd}t|�dkr�|d}|dkr�d|ko�dknr�|d|fSd|fS)Nrr�����r��ZCP932����r)rrr)r(�len)rr�
first_charr�second_charrrrr�s0  zSJISContextAnalysis.get_order)r#r$r%r�propertyr*r�
__classcell__rr)r)rr&�sr&c@seZdZdd�ZdS)�EUCJPContextAnalysiscCs�|sdS|d}|dks0d|ko*dknr6d}n|dkrDd}nd}t|�dkr�|d}|d	kr�d|kovd
knr�|d|fSd
|fS)Nrr���r�r���r)rrr)r3)rrr4rr5rrrr�s  zEUCJPContextAnalysis.get_orderN)r#r$r%rrrrrr8�sr8N�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr�Srrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)Sr?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)r�objectrr&r8rrrr�<module>s�C_vendor/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc000064400000002024151733136220017664 0ustar003

�Pf��@s�ddlmZddlmZddlmZddlmZddlm	Z	ddl
mZddlm
Z
ddlmZdd	lmZGd
d�de�ZdS)
�)�CharSetGroupProber)�
UTF8Prober)�
SJISProber)�EUCJPProber)�GB2312Prober)�EUCKRProber)�CP949Prober)�
Big5Prober)�EUCTWProbercseZdZd�fdd�	Z�ZS)�MBCSGroupProberNcsDtt|�j|d�t�t�t�t�t�t�t	�t
�g|_|j�dS)N)�lang_filter)
�superr�__init__rrrrrrr	r
Zprobers�reset)�selfr)�	__class__��%/usr/lib/python3.6/mbcsgroupprober.pyr*s
zMBCSGroupProber.__init__)N)�__name__�
__module__�__qualname__r�
__classcell__rr)rrr)srN)ZcharsetgroupproberrZ
utf8proberrZ
sjisproberrZeucjpproberrZgb2312proberrZeuckrproberrZcp949proberrZ
big5proberr	Zeuctwproberr
rrrrr�<module>s_vendor/chardet/__pycache__/langturkishmodel.cpython-36.pyc000064400000055442151733136220020041 0ustar003

�Pf^+�@sd�Zd�Zeed�d�d�d�d��Zd�S)����%�/�'��4�$�-�5�<��1��.�*�0�E�,�#��3�&�>�A�+�8�����������
��
����@���	�� �9�:�������������������������e���������j��������d���������^�P�]��i���?�������~�}�|�h�I�c�O�U�{�6�z�b�\�y�x�[�g�w�D�v�u�a�t�s�2�Z�r�q�p�o�7�)�(�V�Y�F�;�N�G�R�X�!�M�B�T�S�n�K�=�`��C�m�J�W�f�"�_�Q�l�L�H����k�g�X4���?Tz
ISO-8859-9ZTurkish)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguageN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5rrrrrr6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�(r$r r$r$r$rr$r$r$r$r$r$r$r$r rrr$r$rr$r$r�r$r$r$r$r$r�r$rr$r$r rr�r�rrr�r�r�rr�r�rrrrr�r�r�r�r�r�r�r r r�r�rr�r�rr$r r r$r$r�r$r$r$r$r$r$r$r r$rr�r$r$rr$r$r�r$r$r$r$r$r�r$r�r$r$rrr�rr�rr�r�r�r�r�r�rrrrr�r�r�r�r�r�r�r r r�r�r�rr�rr$r$r r$r$r�r$r$r$r$r$r$r$r r$rrr$r$r�r$r$rr r$r$r$r$r�r$r�r$r$rrr�r�r�rr�r�r�r�rrr�rr rr�r�r�rr�r�r�r�r r�r�r�r�r�rr$r$r$r$r$r$r r$r$r$r$r$r$r$r$rr$r$r r�r$r rr r rr$r$r�r�r�r r r r�rr�r�rr�r�rrr�r�r�r�r�r�r�r�r�r�r�r�rr�rrr�rr�r�rr$r$r$r r$r$rr r$r$r$r$r$r$r$rr$r rr�r$r r�rr r$r$r rr�r�r r rr�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r r�r r�r�r�rr�rr$r$rr$r$r$r$r$r$r$rr r�r�r r$r�r r$r�r�r r r r$r�r$r�rr rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$r$r$r$r$r$r$r$r$r$r$r$r$r�r$r$r$r�r$r r�r r$r r$r$rr�r�r r$r r�r�rr�r�r�r�r�r�r r�r�rr�r�r�r�r�r�r�r�r�rrrr�r r�r�rr$r$r$r r$r$r r$r$r$r$r r$r$r$r�r$r$r�r�r rr�r�r r$r r r�r�r�r r r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�rr�rr�r r�r�rr$r$r$r r$r$r$r$r$r$r$r r$r$r$r�r$r r�rr$r rrr$r r$r rr�r�r r r r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�rr�r�r�r�r�r$r$r$r r$r$r$r$r$r$r$r r$r$r$r�r$r r r�r r$r�r�r r r r r�r�r�r r$r$r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r r�rr�r�r�r$r$r$r$r$r$r$r r r r r$r r$r$r�r$r$rrr r r�r�r r r$r r�r�rr$r�r$rr�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�rr�r�r�r�rr$r$r$r r$r$r$r rr r r$r r$r$r�r$r r�r�rrr�rrr rr r�r�r�rr�r$r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�rr�rr�rr�r�r�r$r$r$r r$r$r r$r r r r$r$r$r$rr$rrr�r$r rrr$r$r r$rr�r�rrr�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r r�r�rr$r r r$r$r�r$r$r$r$r$r$r$r r rr�r$r$rr$r$r�rr$r$r r$r�r$r�r$r r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r r r r$r$r�r$r$r$r$r$r$r$r$r$r�r�r$r r�r$r$r�r$r r$r$r$r�r$rr$r r�r�r�r�r�r�r�r�r�r�rr�rr r�rr�r�r�r�r�r�r�r r r�r�rr�r�rr$r$r$rr r$r$rr�r�rr�r�r$r$r r$r�r�r r�r�r r�r r�r�r�r r�r r�r�r$rr�rr�r�r�r r rr�rrr rr r r r�r rrr�r�r�r r�r�r�r�r�rr rr$r$r�r$r$r$r$r$r r$r�r�r�r�r r$r�r r$rr�r r$rr$r�r$r�r r$r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$rr$r$r r r$r r r�rr r$r�rr rr�rr�r�r�rr�r r r�r�r�rrrr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�rrr�r�rr�r�r�r$r$r$rr$r$rrr$r$rrr$r$rr�r rr r�r rr�r�rrr rr�r�r�r r rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$rr�r rr$r�r�r r�r�r$r$r�r$r�r�rr�rr r�r�rrr r r�rr�r�rr rrr�rr�rrrrrr�rrrr r rr r�rr�r�r�r�r�r�rr�r�r$r$r$r r$r r$r$r�r r r r$r$r$r�r$r�r�r�r r r�rr rrrr�r�r�rr�r$r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r$r$r$r$r$r$r rr r r$r$r$r$r r�r r�r�r�r r r�r�r rr$r$r�r�rrrrr�r�rr�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�rr�r�r�rrr r$r$r�r$r$r$r$r$r$r r r�r r�r r$r r$r r r r r r r rr$r r$r r�r rr r r r rrr r rr r rr r�r�r rrr�r rr�r�rr�r�r�rr r$r$rrrr�rrrr r$r rrr�r�r�r�r�r�r�r�r�r�rr�rr�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$r r r r$r r$r r rr$r$r$r�r rr r�r rr�r�rrrrrr�r�rr rr�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�rr�r r�rr�r�r�r$r$r$r r$r$r$r$r$r r$rr r$r$rr r�r�r�r�r�r�r�r$r rrr�r�r�r�r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r$r$r$r r r$r$r rrrrrr$r$r�r$rr�r�rrr�r�r$rr rr�r�r�r�r�r$r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r$r$r$r r r$r r r r$r rrr$r$r�r$r�r�r�r�rr�r�r$rrr r�r�r�rrr�r�rr�r�r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr$r$r�r$r$r$r$r$r r r rr r�r rr r rrr�rr r r r r r r r�r�r rr rr rr�rrr$rr rrr r�r�r r�rr�rr�rr�r�r�rr�rr$r$r$rr$r$r$r�rrr�r r r$rr�r$r�r�r�rr�r�r�rr�r�rr�rr�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r r�r�r r rr�r�rr�r�r$r$rr$r�r�rrr�r r�r$r�r�r�r r�rrr�rr r�rr r r�r r r r rr�r rrr�r r�r rr r�r�r�r�r�r�r�r�r�r$r$r$rr$r r$r r�r r r rr$r r�r rr r�rr r�r�rr�r r r�r�r�r rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�rr�r�r�r$r$r$r�r$r$rrr r$rr�r$r r$r�r$r�r�r�rr�r�r�rr�rr�r�r�r�r�rr r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r�r$r$r r$r$r r r�r�r�r�rr r�rr$r�r�r�r$rrr�r$r�r r r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$rr r rr�r$rrrrr$r$r r$r�r�rr�rr r�r r r�r r r�r rr�r r rrrrr�r rrr�rrrrr rr rr r�rr�rr�r�r�r�r�r�r�r$r$r$r�rrr$r�r�rrr�r�r r r�r$r�r�rrr�rr�r�r�r�r�r r�r�r�r�r$rr�rr�rr�r r�r�rr�rr�rrrr rrr�r r�r�r�r�r�r�r�r�r�r$r$r$r�r r�r r�rrrr�r�r$r$r�r r�r�rr�r�r rrr�rr�rr�rr�r�r r�rr r�r r�r rrr�rr�r rrr�r rrr�rr�r�r�rrr�r�r�r�r$r r$r�rr�r�r�r�r�r�r�r�rr r�rr�r�rr�r�rr�r�r�r�r�r r�r�r�r�r�rrr�r�rr�rr�r�rr�r�r�r rr�rr�r r�r�r�r�r�r�r�r�r�r�r�r$r$r$r�r�r r$r�r�rr�rr�r r$r r$r�r�rr$r�r rr�r�r�r�r r�rr�r�r rr�r�rrr�r rr�r�rr�r�rrr�rrr r�rr�r�r�r�rr�r�r�r�r$r r r�r�rrr�r�r�r�r�r�r$rrrr�r�r�r�r�rr�r�r�r�r�r r�rr�r�rr�r�r�r�r�r�rr�r�r�r�r�r�r�rr�r�r�rr�rr�r�r�r�r�r�r�r�r�r�r�r�r$r$r�r r$r r rr r rrr r�rr$r r r r�r�r r r�r�r�rr rr$r�r rrr�rrrr�rr r r rrr r�r�r�r�rr�rrr�r�r�r�r�r�r�r�rrr r$r�r$r$r$r r r r rr�rr�rr�rr r r�r�r r rr$rrr rr�r�rrr r�rrr�r�rr r�r rrr r�r�rr�r�r�rr�rr�rr�r�r�r�r$r$r r�r�r$rr�r�r�r�r�r�r$r rr r�r�rr�r�r r�r�r�r�r�r r�rr�r�r rrr�r�rr�rr r�r�rrr�r�r rrrrr�r r�r�r�r�r�r�r�r�r�r$r$r r�r�rr�r�r�r�rr�r�r$r$r r r�r�rr�r�r r�rr�r�r�r r�rr�r�r�rrr�r�r r�r rr�r�rrr rr r�r rr rrrr�r�rrr�r�r�r�r$r$r r�r�r r r�r�r�rrr�r r rr$rr�rr�rr r�r�r�r�r�rr�rr�r�rrr�r�r�r�r�rr�r�rr�r�r�rrr�rr�rr�r�r�r�r�r�r�r�r�r�r�r$r$r$r r�r�r�rr�r�rr�r�r r$rr r�r�rr�r�r r�r�r�rr�r r�r r�r�rrr r rr r�r rrr�r�rrr�rrrrr rrr�r�r�r�r�r�r�r�r�r$r$r$r�r rr rr�r�rrr�r$r$rr r�r�rr�r�r r�r r�rrr r�r�r�r�r�rrrrr r�rrr�rrrrr�r�r�rrrr�rr�r�r�rr�r�r�r�r�r$r$r$r�r r r$r r�r�rr�r�r r$rr�r�r�r�r�r�r r�r r�r�r�r r�r�r�r�rrr�r�r�rr�r�rr�rrr�rr�rrrr�rr�r�r�r�r�r�r�r�r�r�r�r$r r$r�r�r�r�r�r�r�rr�r�r r r r r�r�rr�r�r r�r�r�r�r�r r�rr�r�r�r rrr�rr�r rrr�r�rrr rr�r r�r r�rr�r�r�r r�r�r�r�r�r�r�r�r r r�r rrrrr r r�r�rr�rr�r�rr$r�r�r�r�rr�r�r rr�r�r�rr�rr�r�r�r�r�r rr�rr�r�r�r�r�r�r�r�r�r r�r�r�r�r�r�r�r�r r�r�r r$r�r r$rr r r�r r�r�r r�r rrrr rr�r�rr rrr rr�rr�r r�rr�rrr�r�r r rr rrr r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r$r$r$r�r rr r�r�r�rr�r�r$r r�rr�r�rr�r�r r�r�r�rr rr�rr�r�r�r�r�rr�rr�r�rr�r�r�r�rr�rr�rrrr�rr�r�r�r�r�r�r�r�r�r�r�r�r r r�r r rrr�rrrrrr�r�rr rrrr�rr�r�r�rrrrr�r�r rr�rrrr�rrr rr rrr r�rrr rr�r r�r�r�r�r�r�r�r�r$r r r�r�r r�r�r�r�r�r�r�r r r�r r�r�rr�r�r r�r�r�r�r�r r�r�r�r�r rr�r�r�r�r�rr�r�r�r�r�r�r�rr�r�r�rr�rr�r�r�r�r�r�r�r�r�r�r�r�r$r r�r r r�rrr�rr�r�rr�r�r�rr�rr�r�r�r�r�rr�r�r�r�r r�rr�rr�rrr�r�rr r�rr�rrr�r�rr�rr�r r�r�r�r�r�r�r�r�r r r r�rrr�r�r�rr�r�r�rr r�rr�r�rr�r�rr�r�r�r�rr r�rr�r�r�rr�r�r�rr�r�rr�r�r�r�r�r�rr�rr�r r�r�r�r�r�r�r�r�r�r�r�r r r r rr�rrrr�r�r�r�rr r�r�rr�r�r�rr�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r r�r�r�r�r�r�r�rrr r�rr�r�r�rr�rr�r�r�rr�r�rr�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r r�r�r�r�r�rr�r�rr r r�r rr rrr r r�r�r�r�rr�r�rrr�r�r r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r r r r�r�r�rr�r�r�r�r�r�r r rrr�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�rr�r�r�rr�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r r r r�rr�rr�r�r�r�r�r�rrr�r�r�r�r�r�r�rr�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZLatin5_TurkishCharToOrderMapZTurkishLangModelZLatin5TurkishModel�r�r��&/usr/lib/python3.6/langturkishmodel.py�<module>%s*_vendor/chardet/__pycache__/euckrfreq.cpython-36.pyc000064400000056746151733136220016464 0ustar003

�Pf�4�0	@sdZdZ�	d2Z�	d1S(3	g@�0	�
��x�t�����H�a�������+��W�u��h��]�������������v�w�������m�F�!�p�������������x���/������������9�����t���-�y��K������������O��n������������0����<�4�{����������i���r����������������������X�X���������Y���&��P�������������^��������������9�������������Q���"��t������]�{�7����{��;��u���z�/��|�������7���.��������������{����#�|�}��~��t�8��_�	�
���
�!����_���������*��u��`�"���|�������a�������?��R�!� �/���!�"�=���#��$�%�&�'�(�)�,���'�b�$�*��+��-���,�������&�U����#���-�.�'���f�/�s��0�������� ��9�e�[�1����Z���:�����2�3���G����y�4�����5�6�7�,�w����s�8����9�:���~�;��<�;�}�=�>�?�o�)����@��A�B����2�Y�C�D��<�E�F�G�H�I�%�J�K�L�M�N�O��`�>�P��=�Q��R��S�T�;�������U��V�W�X�4�Y���Z�[��\�]���^���_�"�P���`�;��~�H�a��v��z�?����b���<�c�d�d�e�f�c�0��d��g�y���h�i�s�0�j�=�k��l�����<�b���U������I�m�n�o��p�q�r�s�t�u�������6�v�w��*��]�x�y���z�Z��-�:��b�
�{�|��&�'����5����>�}�~�w��g����6��%���(��v����w������E������������f���V��7�����B��N��[��'���������S�������e���x������������������������?�����q��f��(�)����~���\���������)��������������$����������l�����~�����C�����@��������2����K��z�V��������Q�r�f������ �h�+�3��1�������g�(����z��������������������A��<�j���M�g��2�������������������V�h����J�����0����b�������������Y���������a�!�*����������K�D�8��R��B��@����������y����������X�:����#������i���G����k����=��������!������J�����=����}����j�����������������������E����������j�O�4�����������������	�v�]��C���������o�����
����l�c�A������������T����k����������3�*��q�����>��������+������;��p��x�������������	��
����
������l�������)������m�8���D��������������7�L�B���D�������t���� �!�
�"�#�$�%�R�&�'�(�)�*��+�,��,�-�.�m���
�^�/�c��E�����a�m�0�E�1�2�3����4�T�����5������6�7��n���o�8�9�F����:�G�;�<�=�>�?�@�A�B�C�D�E�$�F�G�����%���p�H�I�J�K�L�����������M�N�O�P�Q�R���S�/�T��U�����V��I�H��������W��X��q�Y�Z�[�\���r��s�]�^�_�`��v�L�a����.��b�F�>�����j�c�Z����B�6��`�d�e����|��f���5�g��h��i�H��j����k��l���1�m�n�o�p�q�r�C�s�t�u�v�w���x�y�z�{���|�}�~���������O��������������E���q���I��\��-���S����
��e�����l��M���Q��P���^��
�-�F���������������������
�.���t�����J���������g�������������������������������������u�����9�	������	���$���������5�%��k�L����������������A��������������R��u���������)�����:��������������"�$�v�����c�Z�����������*�W��K���L���+���������������	�B�����?���������������M�[�5�������n����������C���'���������������
�������	��������������F�T�/����������8��u������K���(�M���i��������������T��?���������e� �(��������%�0����O��	�8�	�	�	�	�	�����	�	�	����#����		����V�
���P�� �M�W���
	��	��|�	�
	�S������������	�	�����	�	�	�	�	�	���	�	��9�D���������	�3����	�	�	�{�	����	�	�	�� 	�������!	�k�������N���Y���"	�#	�$	�%	�&	�'	�(	�]�)	�*	�+	�,	�-	�.	��/	�����>�����1	������\��2	��3	�����4	�5	�6	�w�7	��8	��3�����9	�:	�L�����;	���<	���������������=	��J�>	�?	�@	���A	���B	�C	�D	���E	�����F	�G	�H	�I	������������J	�K	����L	�M	�N	��I�O	�P	���Q	�R	�S	��T	���U	�V	�W	�X	���Y	�Z	�[	�\	�]	����^	�_	�`	�a	����b	�c	�d	�e	����f	�g	�U���x� �h	����i	�j	�����4�&�������!�����S�y������"�
���#�k	���l	�m	�n	�����p�Q��.�o	�U��O���p	���q	�r	�s	�x�t	��u	�v	�w	���$�x	��y	���`�z	�{	�|	�}	���~	�	�	�	�	�	�	�	���C�������%�y���o�	���	�	�	�	�	�	��	�	�	�	����	����	�	��@����	�G�	������2��������	�	�	�	�N�	���������	�	�	�	�	�r�	���z�n�����P�	�#�	�&�Q�	���m�	�c����	�'�	�w�	�	���(�l�@�������)���*�	�	�	�	���	�	�	�	���	����	�	�	�H��	�	��	���	�	�	���	�	�r�	���A�	�������	�}�,���:�����
��I�	�N��1�	�W������������	�+��	��	�1���b����	�q��	�,��	���e����_��d��	��	��	��	��	��	����2�����	���	��	��	��	���	���	��	��-��	�����	��	��	��	��	��	����	��	��	��	��	��	���	����	��	����7��	����	���	��	��	��	��	��	��	������������,�G�������	��^��	��	�.��	��g���	����	�	�	���	�����_��	�	�	���h�h��{�	�|����3�	�	�	�����	�
�
�\�����
�
�
�
�
�
�
��	
�

�
�
����
�j�
�i��4��
�
�
�
�
�
���
�
�
�
��6�d���/�
�
����o�
��R������	�&�
�k�n�z��������
��
����X��d��S��}�
�}����~������� 
�!
�N�"
����������#
���s�$
�%
�&
�f�D��1�'
���(
�@�)
�^�����*
���+
�,
�-
�.
�/
�J�+�0
�1
�2
���T��3
�4
�5
�6
���7
�A�.�����8
�9
��"�:
�;
���<
�=
�>
���0�?
��@
��A
�B
��C
�D
�E
�F
�G
���_�[�H
�I
���`�a�J
�����K
�L
�M
�N
��O
��i����P
�Q
�R
N(0	rrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxrryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	)Z EUCKR_TYPICAL_DISTRIBUTION_RATIOZEUCKR_TABLE_SIZEZEUCKR_CHAR_TO_FREQ_ORDER�r1	r1	�/usr/lib/python3.6/euckrfreq.py�<module>)s(_vendor/chardet/__pycache__/hebrewprober.cpython-36.opt-1.pyc000064400000005513151733136220020104 0ustar003

�Pf6�@s,ddlmZddlmZGdd�de�ZdS)�)�
CharSetProber)�ProbingStatecs�eZdZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZd
ZdZ�fdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zedd��Zedd��Zedd ��Z�ZS)!�HebrewProber�������������������g{�G�z�?z
ISO-8859-8zwindows-1255cs>tt|�j�d|_d|_d|_d|_d|_d|_|j	�dS)N)
�superr�__init__�_final_char_logical_score�_final_char_visual_score�_prev�_before_prev�_logical_prober�_visual_prober�reset)�self)�	__class__��"/usr/lib/python3.6/hebrewprober.pyr�szHebrewProber.__init__cCsd|_d|_d|_d|_dS)N�� )rrrr)rrrrr�szHebrewProber.resetcCs||_||_dS)N)rr)rZ
logicalProberZvisualProberrrr�set_model_probers�szHebrewProber.set_model_proberscCs||j|j|j|j|jgkS)N)�	FINAL_KAF�	FINAL_MEM�	FINAL_NUN�FINAL_PE�FINAL_TSADI)r�crrr�is_final�szHebrewProber.is_finalcCs||j|j|j|jgkS)N)�
NORMAL_KAF�
NORMAL_MEM�
NORMAL_NUN�	NORMAL_PE)rr%rrr�is_non_final�s
zHebrewProber.is_non_finalcCs�|jtjkrtjS|j|�}x�|D]�}|dkrp|jdkr�|j|j�rT|jd7_q�|j|j�r�|j	d7_	n,|jdkr�|j|j�r�|dkr�|j	d7_	|j|_||_q"Wtj
S)Nrr)�stater�NOT_MEZfilter_high_byte_onlyrr&rrr+r�	DETECTING)rZbyte_strZcurrrr�feed�s 




zHebrewProber.feedcCsx|j|j}||jkr|jS||jkr.|jS|jj�|jj�}||jkrR|jS||jkrd|jS|dkrr|jS|jS)Ng)	rr�MIN_FINAL_CHAR_DISTANCE�LOGICAL_HEBREW_NAME�VISUAL_HEBREW_NAMErZget_confidencer�MIN_MODEL_DISTANCE)rZfinalsubZmodelsubrrr�charset_name�s

zHebrewProber.charset_namecCsdS)NZHebrewr)rrrr�languageszHebrewProber.languagecCs(|jjtjkr"|jjtjkr"tjStjS)N)rr,rr-rr.)rrrrr,szHebrewProber.state)�__name__�
__module__�__qualname__r r'r!r(r"r)r#r*r$ZNORMAL_TSADIr0r3r2r1rrrr&r+r/�propertyr4r5r,�
__classcell__rr)rrr�s.

;rN)Z
charsetproberrZenumsrrrrrr�<module>sc_vendor/chardet/__pycache__/utf8prober.cpython-36.pyc000064400000003543151733136220016560 0ustar003

�Pf�
�@sHddlmZddlmZmZddlmZddlmZGdd�de�Z	dS)�)�
CharSetProber)�ProbingState�MachineState)�CodingStateMachine)�
UTF8_SM_MODELcsTeZdZdZ�fdd�Z�fdd�Zedd��Zedd	��Zd
d�Z	dd
�Z
�ZS)�
UTF8Proberg�?cs*tt|�j�tt�|_d|_|j�dS)N)�superr�__init__rr�	coding_sm�
_num_mb_chars�reset)�self)�	__class__�� /usr/lib/python3.6/utf8prober.pyr	&s
zUTF8Prober.__init__cs"tt|�j�|jj�d|_dS)N�)rrrr
r)r
)rrrr,s
zUTF8Prober.resetcCsdS)Nzutf-8r)r
rrr�charset_name1szUTF8Prober.charset_namecCsdS)N�r)r
rrr�language5szUTF8Prober.languagecCs�xj|D]b}|jj|�}|tjkr,tj|_Pq|tjkrBtj|_Pq|tj	kr|jj
�dkr|jd7_qW|jtj
kr�|j�|jkr�tj|_|jS)N�r)r
Z
next_staterZERRORrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenr�stateZ	DETECTING�get_confidenceZSHORTCUT_THRESHOLD)r
Zbyte_str�cZcoding_staterrr�feed9s



zUTF8Prober.feedcCs.d}|jdkr&||j|j9}d|S|SdS)Ng�G�z��?�g�?)r�
ONE_CHAR_PROB)r
ZunlikerrrrLs

zUTF8Prober.get_confidence)�__name__�
__module__�__qualname__rr	r�propertyrrrr�
__classcell__rr)rrr#srN)
Z
charsetproberrZenumsrrZcodingstatemachinerZmbcssmrrrrrr�<module>s_vendor/chardet/__pycache__/langturkishmodel.cpython-36.opt-1.pyc000064400000055442151733136220021000 0ustar003

�Pf^+�@sd�Zd�Zeed�d�d�d�d��Zd�S)����%�/�'��4�$�-�5�<��1��.�*�0�E�,�#��3�&�>�A�+�8�����������
��
����@���	�� �9�:�������������������������e���������j��������d���������^�P�]��i���?�������~�}�|�h�I�c�O�U�{�6�z�b�\�y�x�[�g�w�D�v�u�a�t�s�2�Z�r�q�p�o�7�)�(�V�Y�F�;�N�G�R�X�!�M�B�T�S�n�K�=�`��C�m�J�W�f�"�_�Q�l�L�H����k�g�X4���?Tz
ISO-8859-9ZTurkish)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguageN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5rrrrrr6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�(r$r r$r$r$rr$r$r$r$r$r$r$r$r rrr$r$rr$r$r�r$r$r$r$r$r�r$rr$r$r rr�r�rrr�r�r�rr�r�rrrrr�r�r�r�r�r�r�r r r�r�rr�r�rr$r r r$r$r�r$r$r$r$r$r$r$r r$rr�r$r$rr$r$r�r$r$r$r$r$r�r$r�r$r$rrr�rr�rr�r�r�r�r�r�rrrrr�r�r�r�r�r�r�r r r�r�r�rr�rr$r$r r$r$r�r$r$r$r$r$r$r$r r$rrr$r$r�r$r$rr r$r$r$r$r�r$r�r$r$rrr�r�r�rr�r�r�r�rrr�rr rr�r�r�rr�r�r�r�r r�r�r�r�r�rr$r$r$r$r$r$r r$r$r$r$r$r$r$r$rr$r$r r�r$r rr r rr$r$r�r�r�r r r r�rr�r�rr�r�rrr�r�r�r�r�r�r�r�r�r�r�r�rr�rrr�rr�r�rr$r$r$r r$r$rr r$r$r$r$r$r$r$rr$r rr�r$r r�rr r$r$r rr�r�r r rr�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r r�r r�r�r�rr�rr$r$rr$r$r$r$r$r$r$rr r�r�r r$r�r r$r�r�r r r r$r�r$r�rr rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$r$r$r$r$r$r$r$r$r$r$r$r$r�r$r$r$r�r$r r�r r$r r$r$rr�r�r r$r r�r�rr�r�r�r�r�r�r r�r�rr�r�r�r�r�r�r�r�r�rrrr�r r�r�rr$r$r$r r$r$r r$r$r$r$r r$r$r$r�r$r$r�r�r rr�r�r r$r r r�r�r�r r r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�rr�rr�r r�r�rr$r$r$r r$r$r$r$r$r$r$r r$r$r$r�r$r r�rr$r rrr$r r$r rr�r�r r r r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�rr�r�r�r�r�r$r$r$r r$r$r$r$r$r$r$r r$r$r$r�r$r r r�r r$r�r�r r r r r�r�r�r r$r$r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r r�rr�r�r�r$r$r$r$r$r$r$r r r r r$r r$r$r�r$r$rrr r r�r�r r r$r r�r�rr$r�r$rr�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�rr�r�r�r�rr$r$r$r r$r$r$r rr r r$r r$r$r�r$r r�r�rrr�rrr rr r�r�r�rr�r$r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�rr�rr�rr�r�r�r$r$r$r r$r$r r$r r r r$r$r$r$rr$rrr�r$r rrr$r$r r$rr�r�rrr�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r r�r�rr$r r r$r$r�r$r$r$r$r$r$r$r r rr�r$r$rr$r$r�rr$r$r r$r�r$r�r$r r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r r r r$r$r�r$r$r$r$r$r$r$r$r$r�r�r$r r�r$r$r�r$r r$r$r$r�r$rr$r r�r�r�r�r�r�r�r�r�r�rr�rr r�rr�r�r�r�r�r�r�r r r�r�rr�r�rr$r$r$rr r$r$rr�r�rr�r�r$r$r r$r�r�r r�r�r r�r r�r�r�r r�r r�r�r$rr�rr�r�r�r r rr�rrr rr r r r�r rrr�r�r�r r�r�r�r�r�rr rr$r$r�r$r$r$r$r$r r$r�r�r�r�r r$r�r r$rr�r r$rr$r�r$r�r r$r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$rr$r$r r r$r r r�rr r$r�rr rr�rr�r�r�rr�r r r�r�r�rrrr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�rrr�r�rr�r�r�r$r$r$rr$r$rrr$r$rrr$r$rr�r rr r�r rr�r�rrr rr�r�r�r r rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$rr�r rr$r�r�r r�r�r$r$r�r$r�r�rr�rr r�r�rrr r r�rr�r�rr rrr�rr�rrrrrr�rrrr r rr r�rr�r�r�r�r�r�rr�r�r$r$r$r r$r r$r$r�r r r r$r$r$r�r$r�r�r�r r r�rr rrrr�r�r�rr�r$r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r$r$r$r$r$r$r rr r r$r$r$r$r r�r r�r�r�r r r�r�r rr$r$r�r�rrrrr�r�rr�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�rr�r�r�rrr r$r$r�r$r$r$r$r$r$r r r�r r�r r$r r$r r r r r r r rr$r r$r r�r rr r r r rrr r rr r rr r�r�r rrr�r rr�r�rr�r�r�rr r$r$rrrr�rrrr r$r rrr�r�r�r�r�r�r�r�r�r�rr�rr�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$r r r r$r r$r r rr$r$r$r�r rr r�r rr�r�rrrrrr�r�rr rr�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�rr�r r�rr�r�r�r$r$r$r r$r$r$r$r$r r$rr r$r$rr r�r�r�r�r�r�r�r$r rrr�r�r�r�r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r$r$r$r r r$r$r rrrrrr$r$r�r$rr�r�rrr�r�r$rr rr�r�r�r�r�r$r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r$r$r$r r r$r r r r$r rrr$r$r�r$r�r�r�r�rr�r�r$rrr r�r�r�rrr�r�rr�r�r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr$r$r�r$r$r$r$r$r r r rr r�r rr r rrr�rr r r r r r r r�r�r rr rr rr�rrr$rr rrr r�r�r r�rr�rr�rr�r�r�rr�rr$r$r$rr$r$r$r�rrr�r r r$rr�r$r�r�r�rr�r�r�rr�r�rr�rr�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r r�r�r r rr�r�rr�r�r$r$rr$r�r�rrr�r r�r$r�r�r�r r�rrr�rr r�rr r r�r r r r rr�r rrr�r r�r rr r�r�r�r�r�r�r�r�r�r$r$r$rr$r r$r r�r r r rr$r r�r rr r�rr r�r�rr�r r r�r�r�r rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�rr�r�r�r$r$r$r�r$r$rrr r$rr�r$r r$r�r$r�r�r�rr�r�r�rr�rr�r�r�r�r�rr r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r�r$r$r r$r$r r r�r�r�r�rr r�rr$r�r�r�r$rrr�r$r�r r r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r$r$r$rr r rr�r$rrrrr$r$r r$r�r�rr�rr r�r r r�r r r�r rr�r r rrrrr�r rrr�rrrrr rr rr r�rr�rr�r�r�r�r�r�r�r$r$r$r�rrr$r�r�rrr�r�r r r�r$r�r�rrr�rr�r�r�r�r�r r�r�r�r�r$rr�rr�rr�r r�r�rr�rr�rrrr rrr�r r�r�r�r�r�r�r�r�r�r$r$r$r�r r�r r�rrrr�r�r$r$r�r r�r�rr�r�r rrr�rr�rr�rr�r�r r�rr r�r r�r rrr�rr�r rrr�r rrr�rr�r�r�rrr�r�r�r�r$r r$r�rr�r�r�r�r�r�r�r�rr r�rr�r�rr�r�rr�r�r�r�r�r r�r�r�r�r�rrr�r�rr�rr�r�rr�r�r�r rr�rr�r r�r�r�r�r�r�r�r�r�r�r�r$r$r$r�r�r r$r�r�rr�rr�r r$r r$r�r�rr$r�r rr�r�r�r�r r�rr�r�r rr�r�rrr�r rr�r�rr�r�rrr�rrr r�rr�r�r�r�rr�r�r�r�r$r r r�r�rrr�r�r�r�r�r�r$rrrr�r�r�r�r�rr�r�r�r�r�r r�rr�r�rr�r�r�r�r�r�rr�r�r�r�r�r�r�rr�r�r�rr�rr�r�r�r�r�r�r�r�r�r�r�r�r$r$r�r r$r r rr r rrr r�rr$r r r r�r�r r r�r�r�rr rr$r�r rrr�rrrr�rr r r rrr r�r�r�r�rr�rrr�r�r�r�r�r�r�r�rrr r$r�r$r$r$r r r r rr�rr�rr�rr r r�r�r r rr$rrr rr�r�rrr r�rrr�r�rr r�r rrr r�r�rr�r�r�rr�rr�rr�r�r�r�r$r$r r�r�r$rr�r�r�r�r�r�r$r rr r�r�rr�r�r r�r�r�r�r�r r�rr�r�r rrr�r�rr�rr r�r�rrr�r�r rrrrr�r r�r�r�r�r�r�r�r�r�r$r$r r�r�rr�r�r�r�rr�r�r$r$r r r�r�rr�r�r r�rr�r�r�r r�rr�r�r�rrr�r�r r�r rr�r�rrr rr r�r rr rrrr�r�rrr�r�r�r�r$r$r r�r�r r r�r�r�rrr�r r rr$rr�rr�rr r�r�r�r�r�rr�rr�r�rrr�r�r�r�r�rr�r�rr�r�r�rrr�rr�rr�r�r�r�r�r�r�r�r�r�r�r$r$r$r r�r�r�rr�r�rr�r�r r$rr r�r�rr�r�r r�r�r�rr�r r�r r�r�rrr r rr r�r rrr�r�rrr�rrrrr rrr�r�r�r�r�r�r�r�r�r$r$r$r�r rr rr�r�rrr�r$r$rr r�r�rr�r�r r�r r�rrr r�r�r�r�r�rrrrr r�rrr�rrrrr�r�r�rrrr�rr�r�r�rr�r�r�r�r�r$r$r$r�r r r$r r�r�rr�r�r r$rr�r�r�r�r�r�r r�r r�r�r�r r�r�r�r�rrr�r�r�rr�r�rr�rrr�rr�rrrr�rr�r�r�r�r�r�r�r�r�r�r�r$r r$r�r�r�r�r�r�r�rr�r�r r r r r�r�rr�r�r r�r�r�r�r�r r�rr�r�r�r rrr�rr�r rrr�r�rrr rr�r r�r r�rr�r�r�r r�r�r�r�r�r�r�r�r r r�r rrrrr r r�r�rr�rr�r�rr$r�r�r�r�rr�r�r rr�r�r�rr�rr�r�r�r�r�r rr�rr�r�r�r�r�r�r�r�r�r r�r�r�r�r�r�r�r�r r�r�r r$r�r r$rr r r�r r�r�r r�r rrrr rr�r�rr rrr rr�rr�r r�rr�rrr�r�r r rr rrr r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r$r$r$r�r rr r�r�r�rr�r�r$r r�rr�r�rr�r�r r�r�r�rr rr�rr�r�r�r�r�rr�rr�r�rr�r�r�r�rr�rr�rrrr�rr�r�r�r�r�r�r�r�r�r�r�r�r r r�r r rrr�rrrrrr�r�rr rrrr�rr�r�r�rrrrr�r�r rr�rrrr�rrr rr rrr r�rrr rr�r r�r�r�r�r�r�r�r�r$r r r�r�r r�r�r�r�r�r�r�r r r�r r�r�rr�r�r r�r�r�r�r�r r�r�r�r�r rr�r�r�r�r�rr�r�r�r�r�r�r�rr�r�r�rr�rr�r�r�r�r�r�r�r�r�r�r�r�r$r r�r r r�rrr�rr�r�rr�r�r�rr�rr�r�r�r�r�rr�r�r�r�r r�rr�rr�rrr�r�rr r�rr�rrr�r�rr�rr�r r�r�r�r�r�r�r�r�r r r r�rrr�r�r�rr�r�r�rr r�rr�r�rr�r�rr�r�r�r�rr r�rr�r�r�rr�r�r�rr�r�rr�r�r�r�r�r�rr�rr�r r�r�r�r�r�r�r�r�r�r�r�r r r r rr�rrrr�r�r�r�rr r�r�rr�r�r�rr�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r r�r�r�r�r�r�r�rrr r�rr�r�r�rr�rr�r�r�rr�r�rr�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r r�r�r�r�r�rr�r�rr r r�r rr rrr r r�r�r�r�rr�r�rrr�r�r r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r r r r�r�r�rr�r�r�r�r�r�r r rrr�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�rr�r�r�rr�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r r r r�rr�rr�r�r�r�r�r�rrr�r�r�r�r�r�r�rr�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r r r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZLatin5_TurkishCharToOrderMapZTurkishLangModelZLatin5TurkishModel�r�r��&/usr/lib/python3.6/langturkishmodel.py�<module>%s*_vendor/chardet/__pycache__/codingstatemachine.cpython-36.pyc000064400000005365151733136220020315 0ustar003

�Pf�@s(ddlZddlmZGdd�de�ZdS)�N�)�MachineStatec@sDeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Ze	dd
��Z
dS)�CodingStateMachinea�
    A state machine to verify a byte sequence for a particular encoding. For
    each byte the detector receives, it will feed that byte to every active
    state machine available, one byte at a time. The state machine changes its
    state based on its previous state and the byte it receives. There are 3
    states in a state machine that are of interest to an auto-detector:

    START state: This is the state to start with, or a legal byte sequence
                 (i.e. a valid code point) for character has been identified.

    ME state:  This indicates that the state machine identified a byte sequence
               that is specific to the charset it is designed for and that
               there is no other possible encoding which can contain this byte
               sequence. This will to lead to an immediate positive answer for
               the detector.

    ERROR state: This indicates the state machine identified an illegal byte
                 sequence for that encoding. This will lead to an immediate
                 negative answer for this encoding. Detector will exclude this
                 encoding from consideration from here on.
    cCs0||_d|_d|_d|_tjt�|_|j�dS)Nr)	�_model�_curr_byte_pos�_curr_char_len�_curr_state�loggingZ	getLogger�__name__Zlogger�reset)�selfZsm�r
�(/usr/lib/python3.6/codingstatemachine.py�__init__7szCodingStateMachine.__init__cCstj|_dS)N)r�STARTr)rr
r
rr?szCodingStateMachine.resetcCsh|jd|}|jtjkr0d|_|jd||_|j|jd|}|jd||_|jd7_|jS)NZclass_tablerZchar_len_tableZclass_factorZstate_tabler)rrrrrr)r�cZ
byte_classZ
curr_stater
r
r�
next_stateBszCodingStateMachine.next_statecCs|jS)N)r)rr
r
r�get_current_charlenPsz&CodingStateMachine.get_current_charlencCs
|jdS)N�name)r)rr
r
r�get_coding_state_machineSsz+CodingStateMachine.get_coding_state_machinecCs
|jdS)N�language)r)rr
r
rrVszCodingStateMachine.languageN)r
�
__module__�__qualname__�__doc__rrrrr�propertyrr
r
r
rr!sr)r	Zenumsr�objectrr
r
r
r�<module>s_vendor/chardet/__pycache__/langgreekmodel.cpython-36.opt-1.pyc000064400000057637151733136220020414 0ustar003

�Pf�1�@s4d�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zd�S)������R�d�h�^�b�e�t�f�o��u�\�X�q�U�O�v�i�S�C�r�w�_�c�m��H�F�P�Q�<�`�]�Y�D�x�a�M�V�E�7�N�s�A�B�:�L�j�g�W�k�p���Z�J���=�$�.�G�I�6�l�{�n��3�+�)�"�[�(�4�/�,�5�&�1�;�'�#�0��%�!�-�8�2�T�9�y�����|������ �
�����
����	��������*��@�K����g���s�?Fz
ISO-8859-7ZGreek)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1253N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr9r:rrrrrrrrrrr;rrrrrrr<r=r>r?r@rArBrrCrrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r(r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr9r>rrrrrrrrrrr;rrrrrrr<rrr?r@rArBrrCrrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r(r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r(r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjryryrjrjrjrjrjrjrjrjrfrjrjrjr�ryryrjrjr�rjr�rjryr�rjrjrjr�rjr�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjr�rjrjr�rjryrjrjr�rjryrjrjrjr�r�rjr�rjr�rjrjryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�ryrjryryrjrjrjrjrjrjrjrjr�rjrjrjrjr�ryrjrjr�rjrjrjrjryrjrjrjr�ryr�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrjrjryrjrjrjrjrjrjrjrjrjrjrjrjr�ryrfrjrjrjrjryrjrjryrjrjryr�r�r�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjr�rjrjrjrjrjrjr�rjrjr�rjrjrjrjrjrjrjrjrjrjr�rjryrjrjr�ryr�rfr�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�rjrjrjrjrjryrjr�r�r�r�rjrjr�rjrfrjrjrjr�rjrjr�rjrjrjrjr�r�r�r�ryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjr�rjr�rjrjrjrjrjr�rjryryryrjr�ryrjrjrjrjrjryrjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjrjryryryrjrjrjrjr�rjrfrjrjrjrjryrjrjrjrjrjrjrjryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjryr�rjr�r�r�rjrjryrjrjrjrjrjr�r�rjryrjr�ryrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjrjr�r�rjrjr�ryrjr�rjr�rjrjrjr�r�rjr�rjr�ryryrjrjr�r�r�r�rfr�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjryr�rjryrjrjrjrjr�rjrjrjrjrjr�rjrjryrjryrjrjryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjryrjryrjrjrjrjrjrjr�ryrjryrjryryryrjryrjrjryrjr�ryryryrjr�ryr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�r�rjrjrjryrjrjr�r�rjr�rjr�r�r�rjryr�rjr�rjr�r�ryr�ryr�r�r�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjr�rjrjrjrjrjrjr�rjrjr�rjr�r�r�rjrjr�rjrjrjr�r�rfryrjr�rjr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjryr�r�rjryryrjrjr�rjrjrjrjrjryrfrjr�rjryrjrjryrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjr�ryrjrjrjrjrjrjr�r�rjr�rjr�r�r�rjrjr�rjryrjr�r�rjrjrjr�rjr�r�r�ryr�r�r�r�r�rjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjr�rjrjrjrjrjrjr�r�rjr�rjr�r�r�rjryr�rjryrjr�r�rjryrjr�ryr�r�r�r�r�r�r�r�r�rjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrfryryrjrjrjrjrjrjr�ryrjr�rjr�r�r�rjrjr�rjr�ryr�r�ryrjrfr�ryr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjrjr�rjr�rjrjryrjr�rjrjrjrjrjrjr�rjrjrjr�ryrjr�r�rjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjr�r�rjr�r�r�rjrjr�rjr�ryrjrjr�r�rjr�rjr�rjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�r�rjrjrjrjrjrjr�r�rjr�ryr�r�r�rjrjr�rjr�rjr�r�ryr�ryr�r�r�r�r�rfr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjrjr�rjr�ryr�rjryr�rjryrjryrjr�r�rjryrjryrjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�ryrjrjrjrjrjr�r�r�rjr�ryrfr�r�rjryryryr�rjr�r�ryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjryr�rjr�rjr�rjrjr�ryrfryrjrjr�r�rjr�rjr�rjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrjrjrjr�rjrjrjrjrjrjr�ryrjr�rjr�r�r�ryrfr�ryryrjr�r�ryryryr�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�ryrjrjrjryrjr�r�rfrjr�ryr�r�r�r�rjr�rfr�ryr�r�rfrfrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjrfr�rjr�r�r�rjryr�rjryrjrjrjr�r�rjr�rjryryryrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjr�r�rjr�r�r�r�ryr�ryrjrjryryryryrjr�ryr�ryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjryr�r�r�r�r�r�ryrjr�ryr�ryrjryr�r�rjr�rjr�rjrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjryrjrjryryrjr�ryr�rjr�r�r�ryr�r�r�r�rfryr�ryr�ryr�r�ryr�ryr�ryryr�r�rfr�ryryryr�ryryryr�ryryryr�r�ryr�r�rfr�r�r�r�r�ryr�rjrjryr�r�r�r�r�r�rfrjr�ryr�ryryryr�r�ryr�rjr�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�ryrjryr�ryryr�ryr�ryryr�ryr�ryryryr�r�r�r�r�r�ryrjr�r�r�ryr�rfryr�r�r�r�ryryr�r�r�ryrfr�ryryr�r�r�r�r�r�rfr�ryr�r�r�r�r�r�r�r�ryrfr�ryrjryryrjryrjryr�r�rjrjrjr�r�rjryr�r�r�rfrfr�ryr�ryryr�ryr�ryr�ryryr�r�ryr�ryryryr�ryryryryr�r�ryr�r�r�ryr�rfr�r�r�r�r�rjr�rjrjryryr�rjr�r�r�ryryr�ryryryrfryr�r�rfryryr�r�rjr�r�r�ryr�rfryr�r�r�rfryr�r�r�r�r�r�r�ryryr�rfr�r�ryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrjrjryryr�r�r�ryr�ryrjrjr�ryr�r�r�r�r�r�ryryryr�ryryr�ryr�ryr�ryryr�r�ryryryryrfr�r�ryryr�ryr�r�ryr�r�r�r�r�r�ryr�r�r�r�r�r�r�ryr�rjryrjr�r�r�rjr�r�ryryr�ryr�ryryryr�r�ryr�r�r�r�r�r�r�r�ryr�r�ryryr�r�ryryryr�r�r�r�r�r�ryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�rjryr�ryryryryryr�r�r�ryr�r�r�r�ryr�rfr�r�ryr�rfr�r�r�r�ryryryr�ryryr�rfryr�ryryryr�ryryryryrfryryr�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�ryr�ryr�ryryr�r�r�r�rfryrfr�r�ryryr�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�rjryrjr�r�ryr�r�r�ryryr�ryr�r�r�rfr�r�ryr�ryr�ryryr�r�r�r�r�r�ryr�r�r�r�ryryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�ryryrjryryr�r�r�r�r�r�rfrjr�ryr�ryryr�r�r�rfr�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�ryr�rjryr�ryr�r�r�r�r�r�ryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�ryr�r�r�r�rfrfr�r�ryrfryr�ryryr�rfr�r�rfr�r�r�ryr�r�r�r�r�r�r�rjr�ryryryr�r�ryr�r�r�ryr�r�r�ryrjr�ryr�r�r�r�r�r�ryryr�r�r�ryr�rfryr�r�r�rfryryrfr�r�r�ryr�r�ryr�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrfryr�ryryr�ryr�r�ryr�r�r�r�rfryrfr�ryrfr�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�rjrfryryr�ryr�r�r�r�ryr�r�r�ryr�r�rjr�r�r�r�ryryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrfr�ryr�rfryr�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�rfr�r�r�r�r�r�ryr�ryryr�r�ryryryryryr�rfryr�r�r�ryryr�rfr�ryr�r�ryryr�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�rjr�r�ryr�r�r�r�r�r�r�r�ryr�ryr�r�r�r�ryr�rfryr�r�r�r�ryryrfr�rfr�rfr�ryryryrfr�r�r�r�r�r�rfr�r�r�r�r�r�r�ryr�rfryr�r�r�r�r�r�r�r�r�r�ryr�r�ryryr�r�r�r�rfr�r�r�r�r�r�ryr�ryryr�r�r�r�ryryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�ryr�r�ryr�r�r�r�ryryryryr�r�r�rjr�r�r�r�r�r�r�r�ryr�r�r�r�r�r�ryr�r�r�r�r�r�rfr�r�ryr�r�r�r�rfryr�r�r�r�r�r�ryryrfrfr�r�r�r�r�r�rfr�r�r�r�r�r�r�ryr�ryryryr�r�ryr�r�r�r�r�r�r�ryryryr�r�r�ryr�r�r�r�r�r�r�r�ryr�r�rfr�r�r�r�ryrfr�r�r�r�r�r�rfr�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�rjr�ryr�r�r�r�r�r�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�ryr�r�r�r�ryr�r�ryr�r�r�r�ryryr�r�r�r�rfr�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�ryryrfr�r�r�r�r�r�ryr�r�ryr�ryryryr�r�r�r�r�r�ryr�r�r�r�ryr�r�ryr�r�ryr�ryryr�r�r�r�ryr�ryr�r�r�r�r�ryr�r�r�ryr�r�r�r�r�r�r�r�rjr�r�r�ryryr�ryryr�r�r�r�r�ryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�ryr�r�r�r�r�r�ryryryryryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�rfrfr�r�r�rfr�r�r�r�r�r�r�ryrfr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryryr�r�r�r�r�ryr�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�ryr�r�r�ryr�r�r�r�r�rfr�r�r�r�ryryr�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�rfr�ryr�r�r�r�ryr�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�rfrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�rfr�r�ryr�ryr�r�r�r�r�r�r�r�r�r�r�ryrfr�r�r�r�r�r�ryr�r�r�rfryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZLatin7_char_to_order_mapZwin1253_char_to_order_mapZGreekLangModelZLatin7GreekModelZWin1253GreekModel�r�r��$/usr/lib/python3.6/langgreekmodel.py�<module>#sV
_vendor/chardet/__pycache__/__init__.cpython-36.pyc000064400000001375151733136220016220 0ustar003

�Pf�@s8ddlmZmZddlmZddlmZmZdd�ZdS)�)�PY2�PY3)�UniversalDetector)�__version__�VERSIONcCsHt|t�s0t|t�s(tdjt|����nt|�}t�}|j|�|j�S)z�
    Detect the encoding of the given byte string.

    :param byte_str:     The byte sequence to examine.
    :type byte_str:      ``bytes`` or ``bytearray``
    z4Expected object of type bytes or bytearray, got: {0})	�
isinstance�	bytearray�bytes�	TypeError�format�typerZfeed�close)Zbyte_strZdetector�r�/usr/lib/python3.6/__init__.py�detects


rN)	�compatrrZuniversaldetectorr�versionrrrrrrr�<module>s_vendor/chardet/__pycache__/compat.cpython-36.opt-1.pyc000064400000000431151733136220016673 0ustar003

�Pfn�@s@ddlZejdkr(dZdZeefZeZndZdZeefZeZdS)�N�TF)rr)	�sys�version_infoZPY2ZPY3�strZunicodeZbase_strZ	text_type�bytes�rr�/usr/lib/python3.6/compat.py�<module>s
_vendor/chardet/__pycache__/euctwfreq.cpython-36.pyc000064400000152605151733136220016471 0ustar003

�Pf�{�@sdZdZ�dZ�dS(g�?i���������	�R���������
�n��!���L�,�A��s������L
�S
���������.�N�i�����:�����?���=�N�K������l	����
����
� ��
�����
�o�$��i���c�8����������z�|���t�"�
�e�@�\��	��������F��M
�Q�H���P�v���f�����D�T
����F�N
��E��O��/���s���3�<�2���&�L�����O
����G���M���?���`��F	�*���g�	�Z�
�:����K��	��	�������`�����g��	� �q��~����P
�	������!��u���*���	�
��~��������e���G�^�������U�C���B��������� �j�o�/���O�2	�[����
�&������S�(����p�]��6�i�
�'�������8��+�%�[���\��������X	�(������0��� �H�
�
�"�!���+��1�"���
�O�G	���f�1�����������2�9���l�,���	�������}�h�#�q
�Q�M�&��X���#����
�����j�����M�����%����$���'��	���N�i��7��J���!���������M��)�P�U
�����
��%�
�
��� ��
� �I���3	���
��r
���������m����$���x�������
������%�&���
��������&��'���'���.�����H	�������$�#���D�&�A��	��������U�G������
�Q
�P�S�'���d��0�F�����*��������J���U�����I	�R
���<��S
�:�7
��
���	�	�9���}���	���V�P�T
��)�C�����&�
��	�)���m	��������4	������n	�*��������O������	�+�(�
������U
�(���5�Y�j�
�l��u��)�
�*�+�V
�
�=�������������4��!����T�,�x�����e���	���J	�P��s
�5�A�V
�/�k���
��l�!�	���A��`��
���A��
����
���	���������M�
�������W
���
�t
����+�}��j�8
�����������-�)�m��	���W
���		��
��a��
�P�K	��,������7�'�u
���{�k�������
�9
������������1���b���	�
����o	���X
�,�Q����������
��
�X
�����5�D���l����[� 
����Y
��%��.�Y	��*����
���R������p����n�c�g�+����'�2�����{��l���m���:
���f�
	���|�5	�������Z
���
������,������;���I�Y�����[
�X�"���	������~�����,��k����-�������D�����>�]���,�������v�L�B�i�&�����
�����G������B���
�!
�����u�������	� �a�����v
�S���}�;
��D���=���0����\
�(������v� ���������.�����9�H�������]
�E���������!���/�������-���Q�*��.�/�������0�3��"
��*�R�����W�����/��b�.���	�����������R����)��2��	������������Z	���T��w
������	��������[	�O��	��`�7�x
�^
��������\	�������������g���n��
�_��~������0��
�a���y
�]	�������������	�;���	�-����L	������/��������`���#�"�������0�
�s�����d�s��
���Y
����k�w�o����������1��2���g�p�Q��U��v������C�S��^	���5���B���_��b�N�����X�����
��L�c�
�
�	�K�w�*�a�G�3��
����1�6�	�2�6	�N����	�3���:��\�q����-�����3�
�������x�
���r���4�
�_
��U����@��5���������/�+�6���������
�7	�Z
�����8�4��C������l��`
��������Y������5��	�{�����$�7��M�V�0�r�����g��V��8��9�,�Z����{�#�W�0���$��
��
��4��[��v���c��V�M	�$�-����X����d����W�[
�������q��
��1�����t����	����
�2��2�:�3� �.���2���
�������%�������
��������
��
�6��4���|���S����_	��
������`�����	�3���;�r����7�Y�L�;���	��	�����T�\�����<���V�5����
�����"�x�%��;�8	� ���2�E�=�!�����^��w��
�"�����s��#�<�Q����r�$��]��%��.�&��3�5�B��y�'��������>�Y�a
�b
�(�
�	�)�*�	�����:��6��^�����(��
�+�+���x�,�	�.�h�/�-����
�V����h���z�.�����<����;�/�?���
�	�0�8������5�6�
�_�)�1�2�	�3�y�	����
��^�4���?�$�+�\
��5�_�W����z
�6�0��
�]
�	���p	�7����8�^
�<
���V�9��6���I�w�}�c
�^��
�9�@��
�A�����	�:�e�1���B��������������7�;�<���=�{
���(�`�R�1�Y��I��	�����6��d
�>�����e
�?���o��q�Y�n���l��C�D�{���
�_
�E�P�U��������E�m��@���:�
��
�A������#
�	�N�
��B��C�W�T�T����
��f
�;�D��&��
�|���������
���<�=
��
����	�V���
��o���	�E��
�F�������������D�-�8���G�d��	���
�H�t����I�z�
��J��
��
�K���|
�����>
��������L�:�	�=���M��?
����H������N���	�O�P��F�k�Q�9�������
�	�u��v������R�S����}�
�>�������
�:�T�	���N	��C������U�~�� �W���G����,������O���|�����O�?�V�`	�� ��~��w����@�!����;������W���"��
�X��Y��R�g
�u���
�h
��
�x�=�
��Z��<����[��#��=� 	�\�$��]�H�I�^�7�O	�$
�
�_�%�v�J�� �!�w�t��`�a�b�9	�&���"�{����	�'�c���d�8������[�9���d�Z�	�e�����@
�!�(�"�f��������#�$�T�g�K���%��W�M�h���i�����%��������
�`
��y�7�m�L�j�k��:	�>�!�������B�)�l�|���
�z����i
���M�����
���m��n�u�q	�v����
�
�*�N�o�p���q�7�.�r���y��
���
��f����s�!	�y��
�O�	�����+�P�j
��
�>������Y��A�t��u�%
�
��v���z�2�w�,��	��	���3�Q����
�x��6�9�k
���&�6����-�
��R�y���Q�9�c��
�'�.�
�o�(�B�F�?�l
���)�e����
�z��
�S�T������{�/���"	�	�b�/���`���0�u��v���1��
�	��U��*���/�E��S��8�Z�|�}�F�+����!�~�2�,�F�P	��m
��
��e�s��C���r	������a�3�Z�%�-�����	�4�����A���
��
�i�
����D�.����	�n������
���	��3�5�J�0���s	��
�����@���V��/��	���6�W�A�7�o��
���P�0��1��n
���a�Q�2��p�:���P��b�������{�3�7�E���	����|�F�G�X�������]��
�4���}
�����
�5��&��8�	
�����	�������L�������
�	�Y�

�����4�
�B�9�8������	�H��
�/�3�Z�[���
�o
����:�0���n�\���]���^������	�����I������;��4��<��=�;�	��
����X�	�����5�_���
�
��������}�0�>�p
�J���K������&
�F�������
��
�#��[�����f��>�q
��?�r
��6�������
�������j�����@�-�p�9�;	�6�y�A������	�'
��������V�����`���4�����K�
�a��
�6����������"���K��

������	�Y�G�o��b�f�c�7��s
�p�r�	�8��������A
�Q	�
�C�,�
��������\�L��u��~
��
����9�����B��Z���
�C������j�d��0�h�g��e���M�}�7��
��
��D�	�����f�:����N�;�
�E�F��(
���<����G�M�=�w�g��~�t	�?�����
�t�m�#	��	��+���h�������[�
�O�	�����
���B
����$	�t
��
���a	���	��H������Q�"�<	�
����-�1��#�u
�'���D�����
��
�I���>�J�%	����P��
�i��
�=�a
��K��v
���������
�����_���j���?�E�1��
��
��k���8�Q����l�����
��m�@�
��b�������R������5�������L�	�w
��n�����4�H����M�����������\��o���N�]���
��\�x
�g��E�
������������p���
��q�
���������O�������r��Z��s�t������P�e�����������o�����^��}�������z�A�&	�������
�����n�-����
�Q��i�
������R���u��	�����B������
�y
����v�w�B�h�q�����<��*�i�S�"�T�C�>�)�
�x�U�����~������z���V�������C
�)�q����W�
	�������D���E�T�F�p�����#�#�y���
����G���z
������	�����S���X�
��������u	�
�=�H���_��R�������w�����Y�z�{�q����3���5���
���{
���|�}�S�~���
�
��Z�[�������`���F��]���n�����I���
����������G�T�J���K�z�S�r�\�)
��
�v	��5������
�b	���j��������
���]�^����c�������
����
�a�:�1�������A�h��a��
�6������b
�-���
�H�_���	��
����������|
��*
�f���	����
�D
��
�L�W����U��
���$���
�
���N���R	�k��E
�������M���S	�'	��`���s��F
���a�}
����
�(	�j��	�~
�V�B�[�b�W�����b�
�I��c
��
��{�����	����	�c�
��	��d�
�
�]������=	�e�������
����
�N�	����f�g�
��
�
��h������
�=��X���������d
��H�C�
�i��T�W�
�
��9��	�N���K�E�j�4�J�
�Y�
��G
�	�k���h�O��
�
��]�P�Z��l���Q�[�k��m�n�R�o�c	������S�X����\�+��������I�T�U�U����
���l��
�D����p��q�
�k�	�m��
�K��������V�8��
� ��r�@�W�r��
�C�G����
��^�
�����8�t��:���
�!��n�s��"��
�#�$�]�t�	�%��
�&�r�u�'�+
�(�)��e
�
�}�
�����*�4�1�+�����������,���
�>	�X�t�r�?	���
�-��v�u�{���w	�w���.�x�T	�,
����/�0�������1��
��2��
�G�R�
��^��Y�
�Z���
�[�3�^��������
�)	�\�	�4�@�v��5��6�7��d	�]��
���^�8��&��	����9�y�u���
���z�R��{�����(�_���<�9��l������_� 
�S�`�k���[�	���
�x�:��;�<�!
�$�=��|���>�d��
��?�e��������������
�}�@�A�B���.�~�a�O�
�����`�b��
�%�����
�;��
����C��D�������f
�"
�L�
�����
�E�;��F�&�G��
�
���x	�H�)����c��I�d�T���
�
�e��������	��J��g�~��U��e	�	����f�a�8�g��
�	�b�h�K�y	�����L��:�
�������*�M�i����c�t���M�N������	��
���f	���Y�d�e��*����O�H
��#
���#�;�
��	������I
�\�@	�����s�$
��
�P�Q�
�	�
�R�j�S�T���+���U��V����L�
�!��J
�\�4�W����X�Y�Z��[�>�w�k�f����+���%
����$��\����l�
�]��l�^�_�g
�%��`�a�g�����
���N�K
�b�c�d��
�c�w��
�e���b�y��f�h�g�h�h��'�i��m���z	�j�d�i����	�<����|����������O�k��
�l���
�a���m�����n�
��u���n�����e�&
�
�o�����	�<���p�q�P�Q��j��	��	����(��_�r�s�����<�E�R�f��������]���I�t�=��u��v��
���o�
�
�k�,�l�w�'
��	�m����F�`����f����
�S�J�x��y�z�n��L
���p�O�����
�{��
���A	�y����U	���|�}�~�F��q����
�	������������p����B	��o�	����p��-
���r���q�q���(
���r����s������
����
�(�.
�s�)
��t���	��*
���u���T�v�M
�{	���t��'�����
�����������g�Z�w�
�	�c��j�+
�p����x������h����
������,
�=����	��u������/
�y��
��	�y�v�	����w���������
�z������>���C�'�����{��Z�����������a����	��q�?��
��|��	�"���
����}�������=��A��1��)����?�
�
����x�	��	��	�
���h��S�����(��
���'�x�y��
���
��
������_�	����>��}�P���������~��
�N
��c��
����H��	�i��������������j���	�*	����
������h
���_���/��<��~�7�k�T���]��U�b�
�t���g	��z���
���|	����������{���	��0
���
��+	��,	����I����������B���4�s�
��������
���?�C���	��G��������������������|�1
���m�C	�����l���i
�}	���?����������h	�����I������V�����������^�������-	�.	���)���
����2
��i�����
�����A�����-
���������
���
���	�������
��
����
�����Q���
�O
���������z�K������[���J�����@��.
�����D	��}�/
�@��
�����
��@��R�����	�j
�~����J�����
�������
�0
��m�����m�~	���	����1
�����	������
�d�������X����9���������U��������L��3
������
�	�������	������
���$�(�v����&���b����������������{�����������������2
��	�������	���
����������^����V	�	�����������������2�W	��
�W�o�i	��k
�X���	�A�4
��
��3
����	������
����
���������	���H�����������D���I�s�
���
��� �����w��{�������/	�n�,������	��	���
�����
��4
�B����P
�5
��
��
�	���
��	���|��z�����
�'��5
����!���y����V�����	��������������	�
���-�������
��
��	�������
��"�%����J�C����x������	��l
�6
�����
��
����X����7
���	�
�N��m
����� �W���!�	��
��
�>�8
��"�#�9
�j	�D�����$�	
�%��
���X�&��'�E��(�F��)��x��	�k������*�+������D���,�������K�-�.�/�:
��0�J���1�|����������E�*�2�
�������H�x�����3������3������;
�p���<
��������4�5�	�	�6��O���
�7�8���9���:����;���������<�
��Y�=�>�
�	�?���@�A�B�
����=
��	����o�	�f�������
��
�	�	�C��D��#��
�>
��E���F�Z�E	����	�[�$�
���G�?
���	�����G������@
�H���A
�\�L�������
�I�	������i�J�	���
�	�K�L�]�M�N�
���j��Q
�
�B
�O�P����Q�
����R�
�S�T���(�����C
���U��V��W�X�����Y������Z����d�[�c�H��_��#�0	�\��]��n�

��	������^���_�k	�D
���`�a���b�����	�c������d����	�����d�^�E
�e�����
����f��_�7����� �
���F
���g�-����!����h�	�q�;������X�� �	����i�j������
�k�l�����~�m�G
�`�n�y�p����o�>��p���q�
���r���a�����s�t�"�u�|���?����v���������e���w�x�y������z�{�@�|���	�}��	����%�s��R
��.���M�?�
���~����b����@������������������������
��6
�I����������c����������2�����m��@����	����N����f����Z�
��������\�H
��d�����J����	���	��
��������K�����#�����������	����K������g�����h���>��.������������)�n
�t�����
�o
��
�I
���
���q�����r��������p
����	����z����U��J
���1	�K
�t��������
�����J����L�	�������N(rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r)Z EUCTW_TYPICAL_DISTRIBUTION_RATIOZEUCTW_TABLE_SIZEZEUCTW_CHAR_TO_FREQ_ORDER�rr�/usr/lib/python3.6/euctwfreq.py�<module>,s�_vendor/chardet/__pycache__/gb2312freq.cpython-36.opt-1.pyc000064400000112637151733136220017202 0ustar003

�Pf�P��@sdZdZ�d�Z�d�S(�g�������?i������<	�T���	�Q��
�
�����<�w��9	���	������Z����q���
�W
�y���e�o���v
����������L�B��Q
�����Q����E��f����������d�(�y���,�	���
�E�e��
�W�R����|	�R
��
���
�X
��
�j�E���+�P��������;���m���l���������e�H������^�k�6���"�F���
��
�
�^	���
������?�`�u�$�1��
����� �U���1����g�l����		��G���2���q	�� �P��L	�������y	����
���M���k��4	�������
�	�@	�`��5�����N
��7�3��n��u�G�&�b�h�0��M�N���	�
�Y	��
���	�
���
�&��
�Q�
�7�^�h����J��P�����r���������T	�-�G�,�u���	���$���<�B��$�>����x�L�W����n�
���T���Z���
��	�{���	����
�?�
�0�b�;�D�������C�S��U�>	�|�g����	��
�K���

�����%
�5
������l��h�
����?��� �U�����	��*�<
��
���
��
������
���
�!	�X
���	��Q�g������	���r�
����q��
���z
���	�3�������
�
�]�c�z���7���_���C��&�a�H�|
�l�
��=���I�P��
���~
�8�`��w	���
�T��
�������
�������[�q����	�g��|�C�	����N�I����5�
�(���/�H�K
�
�����9��I���|�������
���W�����^�~�����	��=�X��3�
�I����
������3�r������o������{��i�C���!�H��	������f��=
��1�[���|�D��%��t���
�� ��.����y��T���9
��
��#���J���n�����
���>�B�"����a�����M�K����W���	�	�!
�r	�F�+	��
�
����x
�H�J��������2�K���R
�$��9�
�t
���
����������������%�~��
�`	�������'�v������|��'���n
�*�����F��%��)�~��
���J�&���
�!�D���w��[���	���	�'	�	���k���	�
��
�����������h
�B�_��?��g�h���'��������������5�]�������������1	��)�a	�������;	����_����Q�/�u
��
�
�j�[���}��
�����	����Q�^
��G��z������V�O���������m�u�	�6�����-�$���R���}	�
�p�r�]
�
�D�������i������1�n������t�Y�G	���/�|�t�0�
������\��
�����v�����c����
�~�A�"��"	�!�8�C�8	���	�j�
�1��l�>��
��]
��
�~��L�(����K�d��v�D������%��	������	�,��2���
���j�-�.���m�K�����o�	�;�	����d�@�a�	�M	�1
�L�@�2��w��t�Q�
�/�� ���~�����
�/����
�_	�#���O��O�G���k�[�O�o����-
�
���	�!���I������{�r��.�h�
��T
����'�L
�a��
�@��������
�T�/
���������M���i
�l�����
�{�8�#�
����9��S�R�	�w��Z��0
�E�g�g���
���L�����p����S�
�������	�@��I	�L�;
�O�q����)	��:�F��5�J�d�A
���
�)
�E�L�Y�����
�m	������������c��R��!����
���(����]�I���
��	���e����������`�������
��[��C
�(�P�,��}�O	�'�L�\
�
������Z����s�'�	�U�������N
�����
���M��������������i���g�'����	���
��P�0�s	�i��p	�	�n������\�
���
�!�h����m�(�&�
�C��B�)�5	�������
�����>�������	���{�N�����E�3��J�#��c��
��$
���5��x��]�����k����������P
��
���r���L��z�d���#�
����[�S��h��G�����
�U�+��	�y��w�"��V�2
�&�z��	��	��J
�P	��	�������%	�Z�$�o�T�K�1�
��`�V����d�����+����A��������$��	�_�3�\���������b��f������
�A�n���x��F�
�O�[����(���
�#�7�2	�x��f	��	��
�!�6�{
����Y����+����� ������	�	�K�e����
�T�$����h��	�R���"���Z� 
�_��@���a�����3
�
�k�)�j����e��
�A�^���
�W	����h�:
�z
�)
��|�������8�j����
���3�	�
�z���p�y����2�o�D	�=���.��9�a�f��*�_��	�*����
�
���-��
�}��
�����*	�
�t��t�6
�q�������������S� 	���!�)��y�
�����j	��	���V�I���g
���L�����p�D�����%���M����$������E	�"�U	��e
�	�|��	�Q�������
�1
�������$� �]�4�&���z����`���w�
�����
�)�%
�&�����*���8�2�>
�S�
����1�#���
���#�	�
�������F
���,	�>�+�
���Z�3�q���
�$��
�V��	�;���N���m�K�W����5�
�	�R�4���I��D�������
���
������%�K�|���
������9��n	�����
�-�3�2
�p���?
��~��������C��[��
������3	�>���l�
�`�x	�������#
����0������	�L
���Q���R��?�(�m��`
���3����&
�
���s�D��0	�b	�f�@�	����
��=��
�<�������
���6����s������S�����
�8�Z
�����������
����
�����N��	��
�L�
�J	����	�b��g����[
������~��	�	����f�����
��`�4�=	�����l�?	��c�����i����������w����%���d	�^�+������Z��F�I
��t���	�/��	���y�*
�s�&�+����)����	����
��e	�����Q�]	��	�_�7	��0�c�u����L�k�
������#����������?�����
�n�
������	�����	�H��Y��u	���
�	��J���_�_�&���������Z�%��	�s���S����V���������1�'����F�\�
�|�	�W�����|��S�W��	�*�	�R�l�P�#���-�q
����	���4�������a�����@
���X�i�%����8
�3�,��}������d�x��	���#	��6��
��
�.���A��
���r
��
��R�d����+
������
�{�{�H�	�
���������c�	�
�����,��I
���c������=��J�.�,�>����L�e�]��
�C�����*�;�
�� ��p�
����R���9�
�
��
����?�:�	�����Q�'�s���w�[��
��-�
�)����M�������]�}�6�@�
��U����/���s
�	���k	�?���,
�=
�1�M����/�S
�d�����o
�6	�k�u����W�{��x�W���������x�^����[	�+�7�:���U���!����
�.�H�T���Y����	��@�"�C�s�	��	��G
�b��7�~�.
�U��q�	�����{�����Y�/�
����A�
���*�:�8�'��������A�_�0�������i��K��
���t�*����
�������	��
����D�����>����v��

��
�'��V�����
����k���\�����!����*�U
�������z��\�f
����	�
�����������������B	������{��=�
���&�
���V��	��"�!��
�	�����
�(	�=��
��
���9����4�a�����-�Q	��
���
�m�������C���7�}�q�D�4���u���	�����v���F	���u�a
��H�o��
�
	�l�K�A	�����	�z�4�	�!
�H
��	���5��j���`��
�B�:�O�a���#�����d���(�
�&
��^�m�g
�E
��o	���(�G�$�@�
�I���A���`�X��7���/��$�����h�����	�U�"�T�������m����!�������� ��M�	������	�2�q����9����������/�������p�@�X	��j��i
�^
�b��
���n��v������o�o���v�����0���9���$	���f�S�z�
��D�{���X�����V�3�Y�
����
�A�������q�U�
�:	��2�j����{	��	�F��
�	��	������c�i�R	�;�k�h�����
��Y�O�x��z�Z�<�v	��
�
�
���I���4
�M��������b��������C����
����O��8������	�.�]��V�c
��	����
�H	��T����	��>
�
���3�����,����f����l�T�W�H�.�'�>�p�����"��	�J���
��
�v�2����	�x�	�2���g� �4����
�
���{
�z	���	�
����	��������?�����	�J������
�	�8���E�j
�\��8�8
�� ����`��>�b�S����������0�����E����d����1�����n�@������T��	�����Q���C���K�;�k���������_�\	�e��w�������M��.	�r�a����
����
����g���'�
����	�T���4�o���	�X�y��x���	��
�
�U
�^�:�������"�#�������	������N�U���p��"
�-��	���:���
���	������	���W�
�G
�L��
�G�w�o�����M�r�����Q
���I�N�%�
�X����7
��
�a�����	�'
�i�h��}�N�y�?�t���D������l����	����e�J�s�2�*�y�����������q����5�}�s�$�����5
�B������#�d�s�}���~����b
��O
�/	�]����	�]�2����f�7���O�

�R��#
�	�[��E���f
�S
�	�d�F����C	�V
�>��	���B���	�1�6�m����	��������p
�����
��<�z�
�|
�X����s�e�]�����
��
��O���+��	�������:�	�A���������(
�
�m��4�P��	�n��
�o�o���m��y���8�	���<����<�c	�C�����&�w�
��	�$� 
�9�&����)�`�������G�b�E��:�Q�o�F�
�����	�
�F�|�;�
���3�4�%���:�	�c��p�6
�\�C��0�9
��������~	���"�h������	�)��	��
���*
���
�
�%��	�����
�.����9�	�����
���	��"�!��,
��g	�J�g�F
������
���;�i	�Z�E�^�A
���l�5�������4�'
���k�	��	�2���
��	�X�7���
�{���B�x�=�e�(�������'��������������D
�i�k�������-�,��
�e�7
������������n�}
�p�7�j��	��9���
�������0�P�����������O
�
���	���U�
����<
����
�J���t�N���)��.����*��A���r
�]�)������G��_
�p�k
���
	�%�
����
���>�b�_�^�?��:�Y�D�%����
�������Y��
��������l
�A���	��
�	�D�B�	�
���	�@
�(�v
������g��
�	�}�v����
����������������� �-�~��y�%�	��
�_���������j�M���G�+�	������	������3
�;�
�G�)��
���������
�
��,�I�	�����X�A�\������P�v���	�V��=�
�
�6�<���X��
�	��8�-	�Y
���������c�����9�	�-
�u���	��
�R�
���6���	��h	�}��?
�w
�����	�a�(���1��
���>�������]��
�X�r�P�0�,��+�j��`������.���
��&	�W�B
���|�7�U���	��W��Y��
���w
�
��������u���b��q
����b������=�$
��N�J����U�����	���i�K�,���4�r�{����H��p�5���=��	���E�9��;��<��5��
��i��\����p������r��N����^�����K����*��{�+
�	��*�8����	������j
��@�l	�6���2�8���	������'������B
����\�����S�V	��~���������
��V��
�z���^�	����_
�}
�G�q��I�����8�t�h�S	��}�R�	�9��S�n
�P��B�����s���X�}�f����������������n�I�^��T���x���r����_��b��O�P�Q�R�SN(�rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)Z!GB2312_TYPICAL_DISTRIBUTION_RATIOZGB2312_TABLE_SIZEZGB2312_CHAR_TO_FREQ_ORDER�r�r�� /usr/lib/python3.6/gb2312freq.py�<module>*s�_vendor/chardet/__pycache__/langcyrillicmodel.cpython-36.opt-1.pyc000064400000073220151733136220021113 0ustar003

�PfF�@s�d�Zd�Zd�Zd�Zd�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zeed�d�d�d�d��Z	eed�d�d�d�d��Z
eed�d�d�d�d��Zeed�d�d�d�d��Zd�S)�����������������J��K�������������G��B��A��L��@���M�H��E�C��N�I���O������������������������������������������������������������������������D��������������������������������������������������
��'������������	�����
��������6�;�%�,�:�)�0�5�.�7�*�<�$�1�&��"�#�+�-� �(�4�8�!�=�>�3�9�/�?�2�F�gl���P@�?FzKOI8-RZRussian)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1251z
ISO-8859-5ZMacCyrillicZIBM866ZIBM855N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrrrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqr\rrrsrtrurvrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r}rrzr�r�r�r�r�r�r�rwr�(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrir�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r}rrzr�r�r�r�r�r�r�rwr�rjr\rkrlrmrnrorprqrrrsrtrurvrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrr\r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r}rrzr�r�r�r�r�r�r�rwr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r\r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrwr�r�r�rxr�ryr�rzr�r{r�r|r�r}r�r~r�rTrUrVrWrXrYrZrr�r�r�r[r]r^r_r�r�r`rarbrcrdrerfr�r�rgrhrirjrkrlrmrnr�r�r�r�r�r�r�r�r�rorprqrrr�r�rsr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rtrur�r�r�r�r�r�r�r�r�r�r�r�rvrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrir�r�r�r�r}rrzr�r�r�r�r�r�r�rwr�rjr\rkrlrmrnrorprqrrrsrtrurvrr(r�rxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxr�r�rxrxrxrxr�rxrxrxr|rxr|rxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxr�rxr|r|r|r|r|r�r�r|rxrxrxr|rxrxrxrxrxrxrxrxrxrxr|rxrxr�r�rxrxrxrxrxrxrxrxrxr|rxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxr|r|rxrxrxrxrxrxrxrxrxr|rxrxr�r�rxrxrxrxrxrxrxrxr|rxrxr�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxr|rxrxrxrxrxrxrxrxrxrxrxrxrxr�r�rxrxrxrxrxrxrxrxrxrxrxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxrxrxrxr|rxrxr�r�rxrxrxrxrxrxrxrxrxrxrxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxr|r|r|rxr�rxrxr�rxrxrxrxr|r|rxr�r|r|r|rxrxr|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxrxrxrxr|r|rxr|rxrxrxr|r�r|r|r�r�r|r|r|r|r|r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxr|r|r|rxr�r|r|rxrxr|r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxr�r|rxr|r|rxr|rxrxrxrxr|r|rxr�rxr|r|rxr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxr|r|rxrxrxrxrxr|rxrxrxrxr|r|r|r�rxrxrxr|r|r|r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxr|rxr|rxrxrxrxrxrxr|rxr|r|r�r�rxr|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxr|r�r�rxr�r�r�r�r�r|r�r�r�r|r|r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxr|r|r|r|r�rxr|rxr|rxr|r�r|r|r�r�r�r|r�r|r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxrxr|r|rxr|rxrxrxr|r|r|r|r�r|r|r|r|rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxr|rxr|r|rxrxrxrxrxrxrxrxrxr�rxr|r�r�rxrxrxrxr|rxrxrxrxr|rxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxrxr|r|rxrxr�r|r�r�rxr|rxr|rxr�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�rxr�r|rxrxrxrxr|rxrxrxrxr�r|r|r�r�r|rxr|r|r|rxr|rxr|r|rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxr�r|rxr|rxr�r�r|rxrxr|r�r|rxr�r�r|rxr|r|r�r�rxr�rxr|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�rxr�r|rxrxrxrxrxrxrxrxr|r�rxr|r�r�r|r|rxrxrxr|rxrxr�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|r|rxrxr|r|r|rxrxr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|r|rxrxrxrxrxrxrxr�rxr|rxrxr|rxr|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxrxr|r|r|r|rxr�rxr|rxr�r�r|r�r�r|r|r|r|r�rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r|rxrxrxrxrxr�r|r|r�rxr�r�rxr�r�rxr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|r|r�r�rxrxrxr|r|r�r|r|rxr�r�r|r�r�r|r|r�rxr�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxrxrxrxr�r|r|r|r�r|r�rxrxr�r�r|r�r|r�r|r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxrxr|r�rxr|r|rxr|r�rxr|r�rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxr|rxrxrxr|r|r|rxrxr�r|r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r�r|r�r|rxrxr|r|r�r|r|rxr�r|r�r�r�r|r|rxr|r�r|r|r|r|r|rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxr�r�r�r�r�r|r|r�r�rxr�r�r�rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�rxrxrxr|r�r�r�r|r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|rxr|r|r|r�r|r|r|r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxr�r�r�r�r�r�r�r�r�rxr�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|rxr|r|r|rxr�r|r|r|r|r|r|r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxr|r|r|r|rxr|r|r�r�r|r|r|r|r�r�rxr�r|r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�rxr|r|r|r|r�r|r�r|r�r|r�r�r�r|r�r|r|r�r�r|r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|r|r|r�r�r�r|rxr�r�r�r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr|rxr|r�r|r|r|r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r�r�r�r|r|r|rxr|r|r|r|r|r|r|r�r�r�r|r�r|r�r�r�r|r|r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr|rxrxr|r�r�r�r�r�r�r�r�r|r�r�r�rxr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxr�r|r|r|r|r�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxr|r�r�r�r�r|r|r�r�r�r|r�r�r�rxr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxr|r�r�r�r|r|r|r�r�r�r|r�r�r�r�r�r�r�r�r�rxr�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r|r�r�r|r|r|r|r|r|r�r|r|r�r�r�r�r�r|r|r|r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxr|r|r�r�r�r|r|r�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr|rxr|r�r�r�r�r�r�r�r�r�r|r�r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|r|r|r�r�r�r|r|r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr�r|r|r|r|r|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r|r�r�r�r�rxr|r�r|r�r|r|r�r�r�r�r�r|r�r�r�r|r�r�r�r�r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r|r|r|r�rxr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|rxr|r|r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r�r�r|r�r|r|r|r|r|r�r|r|r�r�r�r�r�r�r|r|r|r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r|r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r|r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r�r�r|r�r|r�r�r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)
ZKOI8R_char_to_order_mapZwin1251_char_to_order_mapZlatin5_char_to_order_mapZmacCyrillic_char_to_order_mapZIBM855_char_to_order_mapZIBM866_char_to_order_mapZRussianLangModelZ
Koi8rModelZWin1251CyrillicModelZLatin5CyrillicModelZMacCyrillicModelZIbm866ModelZIbm855Model�r�r��'/usr/lib/python3.6/langcyrillicmodel.py�<module>s
_vendor/chardet/__pycache__/euctwprober.cpython-36.opt-1.pyc000064400000002031151733136220017747 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�EUCTWDistributionAnalysis)�EUCTW_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�EUCTWProbercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/euctwprober.pyr"s
zEUCTWProber.__init__cCsdS)NzEUC-TWr)r
rrr
�charset_name(szEUCTWProber.charset_namecCsdS)NZTaiwanr)r
rrr
�language,szEUCTWProber.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r!srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc000064400000002776151733136220017710 0ustar003

�Pf�
�@s�ddlmZddlmZddlmZmZmZmZm	Z	m
Z
ddlmZm
Z
ddlmZmZddlmZddlmZddlmZdd	lmZGd
d�de�ZdS)
�)�CharSetGroupProber)�SingleByteCharSetProber)�Win1251CyrillicModel�
Koi8rModel�Latin5CyrillicModel�MacCyrillicModel�Ibm866Model�Ibm855Model)�Latin7GreekModel�Win1253GreekModel)�Latin5BulgarianModel�Win1251BulgarianModel)�TIS620ThaiModel)�Win1255HebrewModel)�HebrewProber)�Latin5TurkishModelcseZdZ�fdd�Z�ZS)�SBCSGroupProberc
s�tt|�j�tt�tt�tt�tt�tt�tt	�tt
�tt�tt�tt
�tt�tt�g|_t�}ttd|�}ttd|�}|j||�|jj|||g�|j�dS)NFT)�superr�__init__rrrrrrr	r
rrr
rrZprobersrrZset_model_probers�extend�reset)�selfZ
hebrew_proberZlogical_hebrew_proberZvisual_hebrew_prober)�	__class__��%/usr/lib/python3.6/sbcsgroupprober.pyr,s,
zSBCSGroupProber.__init__)�__name__�
__module__�__qualname__r�
__classcell__rr)rrr+srN)ZcharsetgroupproberrZsbcharsetproberrZlangcyrillicmodelrrrrrr	Zlanggreekmodelr
rZlangbulgarianmodelrr
Z
langthaimodelrZlanghebrewmodelrZhebrewproberrZlangturkishmodelrrrrrr�<module>s _vendor/chardet/__pycache__/gb2312prober.cpython-36.pyc000064400000002041151733136220016562 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�GB2312DistributionAnalysis)�GB2312_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�GB2312Probercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��"/usr/lib/python3.6/gb2312prober.pyr"s
zGB2312Prober.__init__cCsdS)NZGB2312r)r
rrr
�charset_name(szGB2312Prober.charset_namecCsdS)NZChineser)r
rrr
�language,szGB2312Prober.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r!srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/universaldetector.cpython-36.opt-1.pyc000064400000013173151733136220021161 0ustar003

�Pf�0�@s�dZddlZddlZddlZddlmZddlmZmZm	Z	ddl
mZddlm
Z
ddlmZdd	lmZGd
d�de�ZdS)a
Module containing the UniversalDetector detector class, which is the primary
class a user of ``chardet`` should use.

:author: Mark Pilgrim (initial port to Python)
:author: Shy Shalom (original C code)
:author: Dan Blanchard (major refactoring for 3.0)
:author: Ian Cordasco
�N�)�CharSetGroupProber)�
InputState�LanguageFilter�ProbingState)�EscCharSetProber)�Latin1Prober)�MBCSGroupProber)�SBCSGroupProberc	@sneZdZdZdZejd�Zejd�Zejd�Z	dddd	d
ddd
d�Z
ejfdd�Z
dd�Zdd�Zdd�ZdS)�UniversalDetectoraq
    The ``UniversalDetector`` class underlies the ``chardet.detect`` function
    and coordinates all of the different charset probers.

    To get a ``dict`` containing an encoding and its confidence, you can simply
    run:

    .. code::

            u = UniversalDetector()
            u.feed(some_bytes)
            u.close()
            detected = u.result

    g�������?s[�-�]s(|~{)s[�-�]zWindows-1252zWindows-1250zWindows-1251zWindows-1256zWindows-1253zWindows-1255zWindows-1254zWindows-1257)z
iso-8859-1z
iso-8859-2z
iso-8859-5z
iso-8859-6z
iso-8859-7z
iso-8859-8z
iso-8859-9ziso-8859-13cCsNd|_g|_d|_d|_d|_d|_d|_||_tj	t
�|_d|_|j
�dS)N)�_esc_charset_prober�_charset_probers�result�done�	_got_data�_input_state�
_last_char�lang_filter�loggingZ	getLogger�__name__�logger�_has_win_bytes�reset)�selfr�r�'/usr/lib/python3.6/universaldetector.py�__init__QszUniversalDetector.__init__cCsZdddd�|_d|_d|_d|_tj|_d|_|jr>|jj	�x|j
D]}|j	�qFWdS)z�
        Reset the UniversalDetector and all of its probers back to their
        initial states.  This is called by ``__init__``, so you only need to
        call this directly in between analyses of different documents.
        Ng)�encoding�
confidence�languageF�)rrrrr�
PURE_ASCIIrrrrr
)r�proberrrrr^s
zUniversalDetector.resetcCs>|jr
dSt|�sdSt|t�s(t|�}|js�|jtj�rJdddd�|_nv|jtj	tj
f�rldddd�|_nT|jd�r�dddd�|_n:|jd	�r�d
ddd�|_n |jtjtjf�r�dddd�|_d|_|jd
dk	r�d|_dS|j
tjk�r.|jj|��rtj|_
n*|j
tjk�r.|jj|j|��r.tj|_
|dd�|_|j
tjk�r�|j�s^t|j�|_|jj|�tjk�r:|jj|jj�|jjd�|_d|_n�|j
tjk�r:|j�s�t |j�g|_|jt!j"@�r�|jj#t$��|jj#t%��x@|jD]6}|j|�tjk�r�|j|j�|jd�|_d|_P�q�W|j&j|��r:d|_'dS)a�
        Takes a chunk of a document and feeds it through all of the relevant
        charset probers.

        After calling ``feed``, you can check the value of the ``done``
        attribute to see if you need to continue feeding the
        ``UniversalDetector`` more data, or if it has made a prediction
        (in the ``result`` attribute).

        .. note::
           You should always call ``close`` when you're done feeding in your
           document if ``done`` is not already ``True``.
        Nz	UTF-8-SIGg�?�)rrrzUTF-32s��zX-ISO-10646-UCS-4-3412s��zX-ISO-10646-UCS-4-2143zUTF-16Trr���)(r�len�
isinstance�	bytearrayr�
startswith�codecs�BOM_UTF8r�BOM_UTF32_LE�BOM_UTF32_BE�BOM_LE�BOM_BErrr!�HIGH_BYTE_DETECTOR�search�	HIGH_BYTE�ESC_DETECTORrZ	ESC_ASCIIrrr�feedrZFOUND_IT�charset_name�get_confidencerr
r	rZNON_CJK�appendr
r�WIN_BYTE_DETECTORr)rZbyte_strr"rrrr3os|





zUniversalDetector.feedc	Cs�|jr|jSd|_|js&|jjd�n�|jtjkrBdddd�|_n�|jtjkr�d}d}d}x,|j	D]"}|slqb|j
�}||krb|}|}qbW|r�||jkr�|j}|jj
�}|j
�}|jd	�r�|jr�|jj||�}|||jd�|_|jj�tjk�rz|jd
dk�rz|jjd�xn|j	D]d}|�s �qt|t��rZxF|jD] }|jjd|j|j|j
���q4Wn|jjd|j|j|j
���qW|jS)
z�
        Stop analyzing the current document and come up with a final
        prediction.

        :returns:  The ``result`` attribute, a ``dict`` with the keys
                   `encoding`, `confidence`, and `language`.
        Tzno data received!�asciig�?r#)rrrNgziso-8859rz no probers hit minimum thresholdz%s %s confidence = %s)rrrr�debugrrr!r1r
r5�MINIMUM_THRESHOLDr4�lowerr(r�ISO_WIN_MAP�getrZgetEffectiveLevelr�DEBUGr&rZprobers)	rZprober_confidenceZmax_prober_confidenceZ
max_proberr"r4Zlower_charset_namerZgroup_proberrrr�close�s`	

zUniversalDetector.closeN)r�
__module__�__qualname__�__doc__r:�re�compiler/r2r7r<rZALLrrr3r?rrrrr3s"



mr)rBr)rrCZcharsetgroupproberrZenumsrrrZ	escproberrZlatin1proberrZmbcsgroupproberr	Zsbcsgroupproberr
�objectrrrrr�<module>$s_vendor/chardet/__pycache__/euctwfreq.cpython-36.opt-1.pyc000064400000152605151733136220017430 0ustar003

�Pf�{�@sdZdZ�dZ�dS(g�?i���������	�R���������
�n��!���L�,�A��s������L
�S
���������.�N�i�����:�����?���=�N�K������l	����
����
� ��
�����
�o�$��i���c�8����������z�|���t�"�
�e�@�\��	��������F��M
�Q�H���P�v���f�����D�T
����F�N
��E��O��/���s���3�<�2���&�L�����O
����G���M���?���`��F	�*���g�	�Z�
�:����K��	��	�������`�����g��	� �q��~����P
�	������!��u���*���	�
��~��������e���G�^�������U�C���B��������� �j�o�/���O�2	�[����
�&������S�(����p�]��6�i�
�'�������8��+�%�[���\��������X	�(������0��� �H�
�
�"�!���+��1�"���
�O�G	���f�1�����������2�9���l�,���	�������}�h�#�q
�Q�M�&��X���#����
�����j�����M�����%����$���'��	���N�i��7��J���!���������M��)�P�U
�����
��%�
�
��� ��
� �I���3	���
��r
���������m����$���x�������
������%�&���
��������&��'���'���.�����H	�������$�#���D�&�A��	��������U�G������
�Q
�P�S�'���d��0�F�����*��������J���U�����I	�R
���<��S
�:�7
��
���	�	�9���}���	���V�P�T
��)�C�����&�
��	�)���m	��������4	������n	�*��������O������	�+�(�
������U
�(���5�Y�j�
�l��u��)�
�*�+�V
�
�=�������������4��!����T�,�x�����e���	���J	�P��s
�5�A�V
�/�k���
��l�!�	���A��`��
���A��
����
���	���������M�
�������W
���
�t
����+�}��j�8
�����������-�)�m��	���W
���		��
��a��
�P�K	��,������7�'�u
���{�k�������
�9
������������1���b���	�
����o	���X
�,�Q����������
��
�X
�����5�D���l����[� 
����Y
��%��.�Y	��*����
���R������p����n�c�g�+����'�2�����{��l���m���:
���f�
	���|�5	�������Z
���
������,������;���I�Y�����[
�X�"���	������~�����,��k����-�������D�����>�]���,�������v�L�B�i�&�����
�����G������B���
�!
�����u�������	� �a�����v
�S���}�;
��D���=���0����\
�(������v� ���������.�����9�H�������]
�E���������!���/�������-���Q�*��.�/�������0�3��"
��*�R�����W�����/��b�.���	�����������R����)��2��	������������Z	���T��w
������	��������[	�O��	��`�7�x
�^
��������\	�������������g���n��
�_��~������0��
�a���y
�]	�������������	�;���	�-����L	������/��������`���#�"�������0�
�s�����d�s��
���Y
����k�w�o����������1��2���g�p�Q��U��v������C�S��^	���5���B���_��b�N�����X�����
��L�c�
�
�	�K�w�*�a�G�3��
����1�6�	�2�6	�N����	�3���:��\�q����-�����3�
�������x�
���r���4�
�_
��U����@��5���������/�+�6���������
�7	�Z
�����8�4��C������l��`
��������Y������5��	�{�����$�7��M�V�0�r�����g��V��8��9�,�Z����{�#�W�0���$��
��
��4��[��v���c��V�M	�$�-����X����d����W�[
�������q��
��1�����t����	����
�2��2�:�3� �.���2���
�������%�������
��������
��
�6��4���|���S����_	��
������`�����	�3���;�r����7�Y�L�;���	��	�����T�\�����<���V�5����
�����"�x�%��;�8	� ���2�E�=�!�����^��w��
�"�����s��#�<�Q����r�$��]��%��.�&��3�5�B��y�'��������>�Y�a
�b
�(�
�	�)�*�	�����:��6��^�����(��
�+�+���x�,�	�.�h�/�-����
�V����h���z�.�����<����;�/�?���
�	�0�8������5�6�
�_�)�1�2�	�3�y�	����
��^�4���?�$�+�\
��5�_�W����z
�6�0��
�]
�	���p	�7����8�^
�<
���V�9��6���I�w�}�c
�^��
�9�@��
�A�����	�:�e�1���B��������������7�;�<���=�{
���(�`�R�1�Y��I��	�����6��d
�>�����e
�?���o��q�Y�n���l��C�D�{���
�_
�E�P�U��������E�m��@���:�
��
�A������#
�	�N�
��B��C�W�T�T����
��f
�;�D��&��
�|���������
���<�=
��
����	�V���
��o���	�E��
�F�������������D�-�8���G�d��	���
�H�t����I�z�
��J��
��
�K���|
�����>
��������L�:�	�=���M��?
����H������N���	�O�P��F�k�Q�9�������
�	�u��v������R�S����}�
�>�������
�:�T�	���N	��C������U�~�� �W���G����,������O���|�����O�?�V�`	�� ��~��w����@�!����;������W���"��
�X��Y��R�g
�u���
�h
��
�x�=�
��Z��<����[��#��=� 	�\�$��]�H�I�^�7�O	�$
�
�_�%�v�J�� �!�w�t��`�a�b�9	�&���"�{����	�'�c���d�8������[�9���d�Z�	�e�����@
�!�(�"�f��������#�$�T�g�K���%��W�M�h���i�����%��������
�`
��y�7�m�L�j�k��:	�>�!�������B�)�l�|���
�z����i
���M�����
���m��n�u�q	�v����
�
�*�N�o�p���q�7�.�r���y��
���
��f����s�!	�y��
�O�	�����+�P�j
��
�>������Y��A�t��u�%
�
��v���z�2�w�,��	��	���3�Q����
�x��6�9�k
���&�6����-�
��R�y���Q�9�c��
�'�.�
�o�(�B�F�?�l
���)�e����
�z��
�S�T������{�/���"	�	�b�/���`���0�u��v���1��
�	��U��*���/�E��S��8�Z�|�}�F�+����!�~�2�,�F�P	��m
��
��e�s��C���r	������a�3�Z�%�-�����	�4�����A���
��
�i�
����D�.����	�n������
���	��3�5�J�0���s	��
�����@���V��/��	���6�W�A�7�o��
���P�0��1��n
���a�Q�2��p�:���P��b�������{�3�7�E���	����|�F�G�X�������]��
�4���}
�����
�5��&��8�	
�����	�������L�������
�	�Y�

�����4�
�B�9�8������	�H��
�/�3�Z�[���
�o
����:�0���n�\���]���^������	�����I������;��4��<��=�;�	��
����X�	�����5�_���
�
��������}�0�>�p
�J���K������&
�F�������
��
�#��[�����f��>�q
��?�r
��6�������
�������j�����@�-�p�9�;	�6�y�A������	�'
��������V�����`���4�����K�
�a��
�6����������"���K��

������	�Y�G�o��b�f�c�7��s
�p�r�	�8��������A
�Q	�
�C�,�
��������\�L��u��~
��
����9�����B��Z���
�C������j�d��0�h�g��e���M�}�7��
��
��D�	�����f�:����N�;�
�E�F��(
���<����G�M�=�w�g��~�t	�?�����
�t�m�#	��	��+���h�������[�
�O�	�����
���B
����$	�t
��
���a	���	��H������Q�"�<	�
����-�1��#�u
�'���D�����
��
�I���>�J�%	����P��
�i��
�=�a
��K��v
���������
�����_���j���?�E�1��
��
��k���8�Q����l�����
��m�@�
��b�������R������5�������L�	�w
��n�����4�H����M�����������\��o���N�]���
��\�x
�g��E�
������������p���
��q�
���������O�������r��Z��s�t������P�e�����������o�����^��}�������z�A�&	�������
�����n�-����
�Q��i�
������R���u��	�����B������
�y
����v�w�B�h�q�����<��*�i�S�"�T�C�>�)�
�x�U�����~������z���V�������C
�)�q����W�
	�������D���E�T�F�p�����#�#�y���
����G���z
������	�����S���X�
��������u	�
�=�H���_��R�������w�����Y�z�{�q����3���5���
���{
���|�}�S�~���
�
��Z�[�������`���F��]���n�����I���
����������G�T�J���K�z�S�r�\�)
��
�v	��5������
�b	���j��������
���]�^����c�������
����
�a�:�1�������A�h��a��
�6������b
�-���
�H�_���	��
����������|
��*
�f���	����
�D
��
�L�W����U��
���$���
�
���N���R	�k��E
�������M���S	�'	��`���s��F
���a�}
����
�(	�j��	�~
�V�B�[�b�W�����b�
�I��c
��
��{�����	����	�c�
��	��d�
�
�]������=	�e�������
����
�N�	����f�g�
��
�
��h������
�=��X���������d
��H�C�
�i��T�W�
�
��9��	�N���K�E�j�4�J�
�Y�
��G
�	�k���h�O��
�
��]�P�Z��l���Q�[�k��m�n�R�o�c	������S�X����\�+��������I�T�U�U����
���l��
�D����p��q�
�k�	�m��
�K��������V�8��
� ��r�@�W�r��
�C�G����
��^�
�����8�t��:���
�!��n�s��"��
�#�$�]�t�	�%��
�&�r�u�'�+
�(�)��e
�
�}�
�����*�4�1�+�����������,���
�>	�X�t�r�?	���
�-��v�u�{���w	�w���.�x�T	�,
����/�0�������1��
��2��
�G�R�
��^��Y�
�Z���
�[�3�^��������
�)	�\�	�4�@�v��5��6�7��d	�]��
���^�8��&��	����9�y�u���
���z�R��{�����(�_���<�9��l������_� 
�S�`�k���[�	���
�x�:��;�<�!
�$�=��|���>�d��
��?�e��������������
�}�@�A�B���.�~�a�O�
�����`�b��
�%�����
�;��
����C��D�������f
�"
�L�
�����
�E�;��F�&�G��
�
���x	�H�)����c��I�d�T���
�
�e��������	��J��g�~��U��e	�	����f�a�8�g��
�	�b�h�K�y	�����L��:�
�������*�M�i����c�t���M�N������	��
���f	���Y�d�e��*����O�H
��#
���#�;�
��	������I
�\�@	�����s�$
��
�P�Q�
�	�
�R�j�S�T���+���U��V����L�
�!��J
�\�4�W����X�Y�Z��[�>�w�k�f����+���%
����$��\����l�
�]��l�^�_�g
�%��`�a�g�����
���N�K
�b�c�d��
�c�w��
�e���b�y��f�h�g�h�h��'�i��m���z	�j�d�i����	�<����|����������O�k��
�l���
�a���m�����n�
��u���n�����e�&
�
�o�����	�<���p�q�P�Q��j��	��	����(��_�r�s�����<�E�R�f��������]���I�t�=��u��v��
���o�
�
�k�,�l�w�'
��	�m����F�`����f����
�S�J�x��y�z�n��L
���p�O�����
�{��
���A	�y����U	���|�}�~�F��q����
�	������������p����B	��o�	����p��-
���r���q�q���(
���r����s������
����
�(�.
�s�)
��t���	��*
���u���T�v�M
�{	���t��'�����
�����������g�Z�w�
�	�c��j�+
�p����x������h����
������,
�=����	��u������/
�y��
��	�y�v�	����w���������
�z������>���C�'�����{��Z�����������a����	��q�?��
��|��	�"���
����}�������=��A��1��)����?�
�
����x�	��	��	�
���h��S�����(��
���'�x�y��
���
��
������_�	����>��}�P���������~��
�N
��c��
����H��	�i��������������j���	�*	����
������h
���_���/��<��~�7�k�T���]��U�b�
�t���g	��z���
���|	����������{���	��0
���
��+	��,	����I����������B���4�s�
��������
���?�C���	��G��������������������|�1
���m�C	�����l���i
�}	���?����������h	�����I������V�����������^�������-	�.	���)���
����2
��i�����
�����A�����-
���������
���
���	�������
��
����
�����Q���
�O
���������z�K������[���J�����@��.
�����D	��}�/
�@��
�����
��@��R�����	�j
�~����J�����
�������
�0
��m�����m�~	���	����1
�����	������
�d�������X����9���������U��������L��3
������
�	�������	������
���$�(�v����&���b����������������{�����������������2
��	�������	���
����������^����V	�	�����������������2�W	��
�W�o�i	��k
�X���	�A�4
��
��3
����	������
����
���������	���H�����������D���I�s�
���
��� �����w��{�������/	�n�,������	��	���
�����
��4
�B����P
�5
��
��
�	���
��	���|��z�����
�'��5
����!���y����V�����	��������������	�
���-�������
��
��	�������
��"�%����J�C����x������	��l
�6
�����
��
����X����7
���	�
�N��m
����� �W���!�	��
��
�>�8
��"�#�9
�j	�D�����$�	
�%��
���X�&��'�E��(�F��)��x��	�k������*�+������D���,�������K�-�.�/�:
��0�J���1�|����������E�*�2�
�������H�x�����3������3������;
�p���<
��������4�5�	�	�6��O���
�7�8���9���:����;���������<�
��Y�=�>�
�	�?���@�A�B�
����=
��	����o�	�f�������
��
�	�	�C��D��#��
�>
��E���F�Z�E	����	�[�$�
���G�?
���	�����G������@
�H���A
�\�L�������
�I�	������i�J�	���
�	�K�L�]�M�N�
���j��Q
�
�B
�O�P����Q�
����R�
�S�T���(�����C
���U��V��W�X�����Y������Z����d�[�c�H��_��#�0	�\��]��n�

��	������^���_�k	�D
���`�a���b�����	�c������d����	�����d�^�E
�e�����
����f��_�7����� �
���F
���g�-����!����h�	�q�;������X�� �	����i�j������
�k�l�����~�m�G
�`�n�y�p����o�>��p���q�
���r���a�����s�t�"�u�|���?����v���������e���w�x�y������z�{�@�|���	�}��	����%�s��R
��.���M�?�
���~����b����@������������������������
��6
�I����������c����������2�����m��@����	����N����f����Z�
��������\�H
��d�����J����	���	��
��������K�����#�����������	����K������g�����h���>��.������������)�n
�t�����
�o
��
�I
���
���q�����r��������p
����	����z����U��J
���1	�K
�t��������
�����J����L�	�������N(rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r)Z EUCTW_TYPICAL_DISTRIBUTION_RATIOZEUCTW_TABLE_SIZEZEUCTW_CHAR_TO_FREQ_ORDER�rr�/usr/lib/python3.6/euctwfreq.py�<module>,s�_vendor/chardet/__pycache__/big5freq.cpython-36.opt-1.pyc000064400000152574151733136220017134 0ustar003

�Pfz�@sdZdZ�dZ�dS(g�?���	�������	�R������a����n���!����L�,�B��������{
�]
���
�j���.�N�i�����:����?���=�N�K����k�q	�����
�����b��
�����
�o�2��i����c�8���������{�|����"��
��@�\���	��������F��|
�Q�H���P�v��������D�^
����F�}
��E��O��0���s���4�<�2���&�M�����~
����G���[���?���a��K	�*��g��	�Z�
�:����K�	��	�������`�l�����
	� �q��~����
�	�������c��u���*���	��
��~�m������e��G�^��n���U�C���C��������� �j�o�/���P�7	�[������
�?����o�S�(����p�]��6�j��@�������8��+�3�[���\�������]	�A������1����H��
��
�d����+��2�����
�O�L	���f�1����������3�9���l�,���	���������e�z
�Q�M�&���X�������
�����k�p��M�����%�������'��	��\���7��J��!���������N��B�P�_
�q����
����
��
�� ��� �I���8	���
��{
����
����m����f���x�������
��
���g����
��������&��'������.������M	������$�#���D�h�A�	����r���U�G������
�
�Q�S�i���d��0�F�����C����� ���J����U�����N	�
���<��
�:�A
��
��	� 	�9���}���	���W�P�
��)�D����4��
��	�)����r	��s����t��9	�����s	�*�������]�����	�D�j�(
�
����
��u�5�Y�j�	�l��u����
���`
�
�=������������5��!����T�E�x���e���	���O	�P��|
�6�A�
�/�k���
��l�!�	���B��`��
����A�������
�v�	����w�����M�
�������a
����
�}
���x�,�}���B
����������F�k�m�	���
���	���
��b��)
�^�P	�
�,������7�5�~
�y�|��z������
�C
�{�����������1���b��	��
����t	���
��_�����������
��
�b
�����5�D���l��|�[�*
����
��%��G�^	��l�����
��`�����p����n�c�g�m����'�2�����{���������D
���f�	���|�:	�������
����
�
������n��������;���I�Y�}���
�X�"��
�����������-��l������������E��~��>�]���,�������v�L�B�i�&����������G������B���
�+
�����������	� �a����
�a���}�E
��D���=���0����
�6������v�!������������9�H�����
�F���������"��H�������
�o���R�*��.�������I�3��,
��*�S����X�����/��b�p��	�����������R����7��2���	�����������_	���b��
�����	�������`	�O��	���`�7�
�
��������a	�������������g������
�_��~��������
�a���
�b	�������������	�;���	�-�
���Q	������q��������`���#�#�������r�
�s�����d�t��
���c
����k�w���������������g��Q��U�
�������C�S��c	���5���B���_��c�N�����Y������
��L�d��
�
�	�K��8�a�G���
����s�6�	�t�;	�N����	�4���:��\�q����.�����u��
���������
��
�r�����
�
���c����@�����������/�9�����������
�<	�d
�����8�v��C����
���m��
��������Z��������w��	�{������$���M�d�0�r�����g��V�����:�Z����{�$�e�0���$��
�
��5��[���v���c��V�R	�$�;����f�����d����W�e
��������
��J�����u���	��
���
�K��2��L� �.���2����������%�������
����������
��
�x��M���}���T�����d	�
������`�����	�3���;�����y�Y�L����	��	�����U�\��������V�N����
��� 
��"�x�%��;�=	� ����2�E��!������^���"�w��
�#��������$�<�Q����r�%��]���&��<�'��3�6�B���(�����������g�
�
�)�
�	�*�+�!	������:��7��^�����(�
�,�+���x�-��/�h�=�.�������V����h����/�����<����;�0�����
�	�1�z������5�6�
�_�)�2�3���4�y�"	�����
��^�5����?�%�+�f
��6�_�W����
�7�>��
�g
�#	���u	�8����9�h
�F
���V�{���O���I�w�~�
�^���:�����������	�;�e�1������������������P�<�=���>�
���(�`�R�?�Y��I��	������7���
�?�����
�@���o��q�Y�n���l��������
�i
�E�P�V��!
�������n��A���|�
��
�B������-
�	�O��
��C��D�W�T�T�������
�}�E���&�
���������
���~�G
��
����$	�W�����o���
�F��G����"
�������#
�����D�-�Q���H�e��	���I�����J�z�
��K���L��$
�
�����H
����������M�:��	����N���I
������H������O����	�P�Q����k�R�R��������
�
������������S�T�����%
���������&
�S�U����S	��C������V�~��!�X�������,��������O���|�����P��e	���������������T������W�����X��Y��R�
�v���
���=���Z��U����[�����V�%	�\���]���^�8�T	�.
�'
�_��v�����w�t��`�a�b�>	����"�{����	��c���d�9������[�9���d�h�
�e�����J
�!���f����������T�g����%��W�M�h���i�������������(
�j
��y�7�m��j�k��?	�W�"�������B��l�|���)
�z����
����������m��n�u�v	�w���	�*
���o�p���q�7�.�r����
���
��f�����s�&	�y��� �������
��>�������Y���t��u�/
�+
��v����@�w���	��	���3�����,
�x���6�9�
�	���6�����!���y���Q�:�c�
���-
�o���F�X�
����f����z��
��������{�/���'	�	�b����`����u�	�v������
�
�
������0�E��S��8�[�|�}�G�����!�~���F�U	��
����e�s�����w	����"��a�	�Z�&������	�
�����A���
��i����������
�o������
�
��	��A��J�0���x	��
�����Y�������	�����Z�8�o����Q�����
���a�Q����p�;���P��b����������
����	��������������]������
��������'���.
�����
��������L�������	��/
�����B�0
�[��8������	���/�3������
�����0���n��������������	�����������4�����<�	��
����Y�	�����C�����
��������}�1��
����K������0
�F�������
��#��\�����f��>�
���
����������
�������j������-�p�9�@	�6�y�������
�1
���������V���������4�����K�1
����
�D����������#������2
�����	�Z�H�p����g������
�q�r�
����������K
�V	�
�\�,�
��������]���u��
������������Z���
������j����0�h�h��������E���	
���	���������������
����2
���������M���w�����y	�?������

�t�m�(	��	��+�����������i�
��	������
���L
����)	�
��
���f	���	��������R�"�A	�3
����-�1��$�
�(���]���#��$��
������ �*	������
����
�=�k
��!��
���������
�����_���������^�2���
������9������������4
������
��b�����������5��������"�	�
��������4�I����#����������j�����$�k���
��\�
�g��E�5
����������������
����6
�����	���%���������[�����������&�e�����������o�����l�	�}�������z���+	�����������n�-���
�'��i�
������(������	������������
��������B�i�q�����<��*�j�)�"�*���>�)�7
���+�����������z���,������M
�)�r����-�	�������������T���p�����#�#������������
�����	�	�����S���.�8
��������z	�9
�=�����m��S�������x�����/�����q����3���5���:
��
�������������
�;
��0�1�������n���_��]���n���������
�%���������`��������z�T�s�2�3
���
�{	��5������
�g	���k���������
���3�4����c� ����
�
����a�:�1�����!�A�h� �o��
�6�"����l
�-���
�a�5���	��
���#�!��$��
��4
�f���	�%�&�
�N
�
���W������
�'��$����<
��N���W	�k��O
�����������X	�,	��6�&�t���P
���7�
���
�-	�j��	�
��C�[�p���'�8�
�b���m
��
��{�(���	����	�9�
��	��:�
�
�^�������B	�;��������
����
���	���)�<�=�
��
��>������
�=�������������n
��H�D�
�?���U�W�
�=
��:��	�N��K�E�@�4�c�
��>
��Q
�
�A���h����
�
�(�]������B�*����l���C�D���E�h	���������X�����+������+�J�����U����
���m� �E����F��G�
�k�	�n��
�d����������F�!� ��r�@���H�"�C�G�����
��^�
����8�t�)�;��?
�!��o�I���"�#�#�$��J�	�%��
�&�r�K�'�5
�(�)��o
�@
�}�A
��,��*�4�1�+�������"��,��
�C	���u�r�D	�-�
�-��L�v�{���|	�M�.�.�N�Y	�6
����/�0�/����1�$��2�%�G�R�B
���_����C
����D
���3���������.	���	�4�@�w��5��6�7��i	���&�0���8�	�&��	�*��+�G�O�u��#�
�1�P�R��Q�����)�����=�9��l�������E
�S��k��\�	���
�y�:��;�<�F
�%�=��R���>�d�'�,�?�e�$��2�3�-����4��(�S�@�A�B���.�T��O�
��������
�&�����
�<�
����C��D���U���p
�G
�e�
����
�E�;�V�F�'�G�)�
���}	�H�*���W���I��T��
�
���������	��J�.�g�~��U��j	�	���X����8��*�	����K�~	�����L��H�
����%���+�f������t���M�N�������	�� 
���k	�Y��Y�����*�/���O�R
�Z�H
���#�I�
��	����5�S
�]�E	�[�\�
�s�I
�+�P�Q�,�	�
�R��S�T���,���U�]�V�����L�-�!��T
�\�4�W���X�Y�Z��[�>�w������+�^��J
���_�$��\�����l�.�]�`��^�_�q
�%��`�a�����
�6�g�U
�b�c�d�a�
�q�x�
�e�
�b�y��f�h�g�h���'�i�����	�j�r��
�0�	�=���b�|��������	��h�k��
�l���
�a��c�m�����n�
��u���d�e���s�K
�/�o�7��&�	�J�8�p�q�i�j�f���	�g�	����(��`�r�s�����<�F�k�f��������]���I�t�>��u��v��
����
�0��-��w�L
��	�����G�a��
�t���
�S�K�x��y�z���V
����O���1���
�{�
����F	�y��9�Z	���|�}�~�F�������
�	�:�����2�����p��;�G	���	�h�����7
������q����M
��������������
����
�(�8
��N
�	��3��	��O
�<����T��W
�	�����(�����1����������u�Z��
�	�c��j�P
�p���
�������v�i�j��
�����Q
�K����	�'��������9
���
��	�z��	�
���k���������2�	��l����>�=�C�'���>�
��Z�����m�n����b����	��q�?��
����	�"��
�?�
�}����o�@�>��A��1��)����?�
��
����x�	��	��	�3���h���l����)���
�p��'����4���
�
����A�_�	�B��L��
�P�q�����r�����X
��c�s�!
����H��	�w����C�D�t�������x�
��	�/	����5�
�4�u��r
���_���/��<��~�7�y�m���^�5�n�b�
�t���l	�����6���	�7��������v����	�8�:
��
��0	��1	��w��I������(����B����4�s��
�����
��
���?�C��x�	�E�H����F�y�z�G�����{����|��;
�}�9�m�H	�~���z���s
�	���M����������m	�����I������o�����������_��������2	�3	�H��*���
����<
��i�:����
�����A������R
���������
���"
���	�������
���
����
�����Q���
�Y
��������{�K�����[��J�����N��S
�����I	���T
�@���
�����
��@��R����	�t
�����J�I���
�������
�U
��m�����{�	���	�
�;�V
����	���<����
�d�������X����9����6�����U����� ���L�!�=
���)���
�	������	�����
��=�$�(�v����&���c������	�
���>�������{�"��������#��������W
��	��������	��$�
�%���������&�^����[	�	���'�����������(� ����2�\	�J�
�p�p�n	��u
�q�)���	�O�>
��
�*�X
�����	�+����,��
����
������K��
�		���I�-���!������.�D���J�s�#
����
���7����/�w��|������"�4	�|�,�������	���������?�Y
�P�0��1�Z
�Z
�
��
�	����
�#�@�2��|��z�����A�'��?
���	���8���y���L�V�
�3���	���$������M���%����	��
��
�.��4�N�����
��	�������
�5�9�%������K�Q����x�6�7��B��
	�O�v
�[
���&���
��
����X��*��\
�P��	�
�N�+�w
������W�C���	�Q�D��?�]
�����^
�o	�R�	
���E��
��F��'��X��G��S���T�R��8�x��	�k�S��T��� ������D��9�!�����(��L�"�#�$�%�_
��&�J�:�U��}��)��

�;�<�
�E�+�'�
��H�=�V�*�H�y�,�+�W�3�>����?�(�I�,�-�`
�q��a
�����
���)�*�X�	�+��O���Y�,�-��.��/�����0�.������1�
�J�r�2�3�$
�	�4��5�6�7��
��b
��	����}�	�f���K����
��
�	�	�8�Z�9���:����
�c
���:���;�s�J	�@����	�t�;�
�/�<�d
��	����U���0��e
�=���f
�u�M�������
�>�	�[��A�L��i�?�	�
��
�	�@�A�v�B�C�\���j��[
��
�g
�D�E����F��B��G��
��H���(���M�C�h
���I�N�J��K�L���M���1�2�N���d�O�d�V�D�`��#�5	�P�]�Q�E�n�
��	���^�R�_�F�S�p	�i
���T�U��

��V�����	��G�����e�H�O�	��P�`�W�w�j
�X� ��3�
�a��I�Y�J�x�7�����-��
�4�k
��b�Z�-���.�c��[�	�r�;���K�5�L�X�� �	����\�]���6�%
�^�_����~�`�l
�y�a�z�~�����b�?��c�7�d�
���e��z�����M�f�g�/�h�|��@�d���i�N�����8�f���j�k��O�P����l�m�@�n�9�	�o��	�����<�s��\
�Q�.�R�N�@��
��p��S�{�q��A���������T�
����:����r��
�s�@
�W�;�t�u��v�w�x�y�|�����z�����3�Q����m��A�{�|���	��}�O�R����g���~�Z�
��������\�m
��}���<�X����	��	��
����
���Y�����0��������=��	����L������h�����i�>�>��/�?�U�������e�)�x
�t��f�S���
�y
��
�n
�
���T��@����������������	����z�U�g��V���o
���6	�p
�t���������
�A���J�V�h�Z��	���W�����N(rrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r)ZBIG5_TYPICAL_DISTRIBUTION_RATIOZBIG5_TABLE_SIZEZBIG5_CHAR_TO_FREQ_ORDER�rr�/usr/lib/python3.6/big5freq.py�<module>+s�_vendor/chardet/__pycache__/compat.cpython-36.pyc000064400000000431151733136220015734 0ustar003

�Pfn�@s@ddlZejdkr(dZdZeefZeZndZdZeefZeZdS)�N�TF)rr)	�sys�version_infoZPY2ZPY3�strZunicodeZbase_strZ	text_type�bytes�rr�/usr/lib/python3.6/compat.py�<module>s
_vendor/chardet/__pycache__/langcyrillicmodel.cpython-36.pyc000064400000073220151733136220020154 0ustar003

�PfF�@s�d�Zd�Zd�Zd�Zd�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zeed�d�d�d�d��Z	eed�d�d�d�d��Z
eed�d�d�d�d��Zeed�d�d�d�d��Zd�S)�����������������J��K�������������G��B��A��L��@���M�H��E�C��N�I���O������������������������������������������������������������������������D��������������������������������������������������
��'������������	�����
��������6�;�%�,�:�)�0�5�.�7�*�<�$�1�&��"�#�+�-� �(�4�8�!�=�>�3�9�/�?�2�F�gl���P@�?FzKOI8-RZRussian)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1251z
ISO-8859-5ZMacCyrillicZIBM866ZIBM855N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrrrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqr\rrrsrtrurvrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r}rrzr�r�r�r�r�r�r�rwr�(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrir�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r}rrzr�r�r�r�r�r�r�rwr�rjr\rkrlrmrnrorprqrrrsrtrurvrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrr\r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r}rrzr�r�r�r�r�r�r�rwr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r\r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrwr�r�r�rxr�ryr�rzr�r{r�r|r�r}r�r~r�rTrUrVrWrXrYrZrr�r�r�r[r]r^r_r�r�r`rarbrcrdrerfr�r�rgrhrirjrkrlrmrnr�r�r�r�r�r�r�r�r�rorprqrrr�r�rsr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rtrur�r�r�r�r�r�r�r�r�r�r�r�rvrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r~r{r|r�r�r�r�r�r�r�r�r�r�r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r]r^r_r`rarbrcrdrerfrgrhrir�r�r�r�r}rrzr�r�r�r�r�r�r�rwr�rjr\rkrlrmrnrorprqrrrsrtrurvrr(r�rxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxr�r�rxrxrxrxr�rxrxrxr|rxr|rxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxr�rxr|r|r|r|r|r�r�r|rxrxrxr|rxrxrxrxrxrxrxrxrxrxr|rxrxr�r�rxrxrxrxrxrxrxrxrxr|rxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxr|r|rxrxrxrxrxrxrxrxrxr|rxrxr�r�rxrxrxrxrxrxrxrxr|rxrxr�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxr|rxrxrxrxrxrxrxrxrxrxrxrxrxr�r�rxrxrxrxrxrxrxrxrxrxrxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxrxrxrxr|rxrxr�r�rxrxrxrxrxrxrxrxrxrxrxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxr|r|r|rxr�rxrxr�rxrxrxrxr|r|rxr�r|r|r|rxrxr|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxrxrxrxr|r|rxr|rxrxrxr|r�r|r|r�r�r|r|r|r|r|r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxrxr|r|r|rxr�r|r|rxrxr|r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxr�r|rxr|r|rxr|rxrxrxrxr|r|rxr�rxr|r|rxr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxr|r|rxrxrxrxrxr|rxrxrxrxr|r|r|r�rxrxrxr|r|r|r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxr|rxr|rxrxrxrxrxrxr|rxr|r|r�r�rxr|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxr|r�r�rxr�r�r�r�r�r|r�r�r�r|r|r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxr|r|r|r|r�rxr|rxr|rxr|r�r|r|r�r�r�r|r�r|r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxrxrxrxrxrxrxr|r|rxr|rxrxrxr|r|r|r|r�r|r|r|r|rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxr|rxr|r|rxrxrxrxrxrxrxrxrxr�rxr|r�r�rxrxrxrxr|rxrxrxrxr|rxr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxrxr|r|rxrxr�r|r�r�rxr|rxr|rxr�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�rxr�r|rxrxrxrxr|rxrxrxrxr�r|r|r�r�r|rxr|r|r|rxr|rxr|r|rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxr�r|rxr|rxr�r�r|rxrxr|r�r|rxr�r�r|rxr|r|r�r�rxr�rxr|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�rxr�r|rxrxrxrxrxrxrxrxr|r�rxr|r�r�r|r|rxrxrxr|rxrxr�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|r|rxrxr|r|r|rxrxr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|r|rxrxrxrxrxrxrxr�rxr|rxrxr|rxr|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rxrxrxrxrxrxr|rxrxrxr|r|r|r|rxr�rxr|rxr�r�r|r�r�r|r|r|r|r�rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r|rxrxrxrxrxr�r|r|r�rxr�r�rxr�r�rxr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|r|r�r�rxrxrxr|r|r�r|r|rxr�r�r|r�r�r|r|r�rxr�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxrxrxrxr�r|r|r|r�r|r�rxrxr�r�r|r�r|r�r|r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxrxr|r�rxr|r|rxr|r�rxr|r�rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxr|rxrxrxr|r|r|rxrxr�r|r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r�r|r�r|rxrxr|r|r�r|r|rxr�r|r�r�r�r|r|rxr|r�r|r|r|r|r|rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxrxrxr�r�r�r�r�r|r|r�r�rxr�r�r�rxr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�rxrxrxr|r�r�r�r|r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|rxr|r|r|r�r|r|r|r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxr�r�r�r�r�r�r�r�r�rxr�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|rxr|r|r|rxr�r|r|r|r|r|r|r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxrxr|r|r|r|rxr|r|r�r�r|r|r|r|r�r�rxr�r|r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�rxr|r|r|r|r�r|r�r|r�r|r�r�r�r|r�r|r|r�r�r|r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|r|r|r�r�r�r|rxr�r�r�r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr|rxr|r�r|r|r|r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r�r�r�r|r|r|rxr|r|r|r|r|r|r|r�r�r�r|r�r|r�r�r�r|r|r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr|rxrxr|r�r�r�r�r�r�r�r�r|r�r�r�rxr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxrxr�r|r|r|r|r�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxrxrxr|r�r�r�r�r|r|r�r�r�r|r�r�r�rxr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|rxr|r�r�r�r|r|r|r�r�r�r|r�r�r�r�r�r�r�r�r�rxr�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r|r�r�r|r|r|r|r|r|r�r|r|r�r�r�r�r�r|r|r|r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxrxr|r|r�r�r�r|r|r�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr|rxr|r�r�r�r�r�r�r�r�r�r|r�r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rxr|r|r|r�r�r�r|r|r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rxr�r|r|r|r|r|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r|r�r�r�r�rxr|r�r|r�r|r|r�r�r�r�r�r|r�r�r�r|r�r�r�r�r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r|r|r|r�rxr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|rxr|r|r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r�r�r|r�r|r|r|r|r|r�r|r|r�r�r�r�r�r�r|r|r|r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r|r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r|r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r�r�r|r�r|r�r�r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)
ZKOI8R_char_to_order_mapZwin1251_char_to_order_mapZlatin5_char_to_order_mapZmacCyrillic_char_to_order_mapZIBM855_char_to_order_mapZIBM866_char_to_order_mapZRussianLangModelZ
Koi8rModelZWin1251CyrillicModelZLatin5CyrillicModelZMacCyrillicModelZIbm866ModelZIbm855Model�r�r��'/usr/lib/python3.6/langcyrillicmodel.py�<module>s
_vendor/chardet/__pycache__/langthaimodel.cpython-36.pyc000064400000055420151733136220017271 0ustar003

�Pf,�@sd�Zd�Zeed�d�d�d�d��Zd�S)�������j�k�d����e�^���l�m�n�o����Y�_�p�q������@�H�I�r�J�s�t�f�Q���u�Z�g�N�R�`���[�O�T�h�i�a�b�\�������������X���������������v���������c�U�S��������������������������������K���4�"�3�w�/�:�9�1�5�7�+���,��0����'�>��6�-�	���=�����*�.���L��B�?��
��$��
�(�� �#�V�������������)��!��2�%���C�M�&�]���D�8�;�A�E�<�F�P�G�W�����g��@��?FzTIS-620ZThai)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguageN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr(r�r�rrrrrrrrr�r�rrrrr�rrrrr�rrrrrrrrrrrrrrrrr�r�rrrrrrr�rrrrrrrrr�rrrrr�r�r�r�rrr�rrrrr|rrrrr�r�r|rrrrrrrrr�r|r�r|r�r�rrr|r�r|r|rrr�rrrrr|rrr�r�rrrrr�rrrrr�rrrrrrrrrrrrrrrrrrr�rrr|rrr�r|r|r|rrr�r|rrr�r�r�r�r�r�r�r|rrr�r�rrr|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrr|rrrrrrrrrrrrrrrrrrrrrrr|r|r|r|r|r|r|rrrrr|rrr|rrrrr|r|r|rrr�r|rrr�rrrrr|r|r�r|rrrrr�r|r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|r|rrrrrrrrr�r|rrrrrrrrrrr|r|r|r|rrrrr|r|rrrrr|r|rrr|rrr|r|rrrrr�r|rrr�r|r|rrrrr�r�r|r�r�r�rrr�r|r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrrrr|r|rrrrrrrrr|rrr|r|rrrrr|r|rrr|r|r|r|r�r�rrr�r|r�r�rrr|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrr|rrr|rrrrr|r|rrr|rrrrr|rrr�r�r|rrr|r|r|rrr|r|r|r|r|r�r|r�r|r|r�r�rrrrr|r�r�r�r|r|r�r�rrr�r�r�r�r�r�r�r�r�r�r|rrr�r�r|r�r�rrrrr|rrrrr|r�r�rrrrr�rrrrr�r|r|rrr�r|r|r�r�r�r�r|r|r|r�r|r|r�r�r�r|r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|rrrrr|r�r�rrrrr�r|rrr�r|r�r|r|r|r|r�r|r�r�r|r|r|r�r|r|r�r�r�r|r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|rrr|rrr|r�r|r|r�rrr|r�rrr|r�r|rrr|r|rrr�r|rrr|r|r�r|r|r|r|r�r|r|r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|rrrrr|rrr|r|r|rrr|r|rrr|r|r�r|rrr|r|rrr�rrr|r|r|rrr|r|r|rrrrr|r�rrr�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�rrr�rrrrrrrrrrr�r�rrr�r|r|rrrrrrrrrrr�r�r�r�r�rrr�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r|rrr�r�r�rrr�r|r�r�r�r�r�rrr�r�r�r�r�r�r�r�r|r�rrrrrrrrr�r�r|rrr�r�rrr�rrrrr|rrrrrrrrrrr�r�rrrrrrr�r�r�rrrrr�r�rrr�r�r�r�r|r�r�r|r�r�rrr�r�r�r�r�r|rrr�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|rrrrrrrrrrrrrrr�r|r�rrrrr|r|r�r|r|r|rrr�r�r|r�r|r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|r�r|rrrrrrr�r|r�r|r|r�r|r�rrr|r|r�r|r�r�r�r|r|r�r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|r�rrrrr�r�rrr�r|rrr�r�rrr|r�r�r|r�r|r|rrr|r�r�r�r�r�r|rrr�r�r�rrr�r|r�r|r�rrr�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr�r�rrr|rrrrrrr�rrr|r�rrr|r�rrr|r|r|r|r�rrrrr�r|r�rrr�r|rrr�r|r�r�rrr|r|r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrrrr|rrr|rrrrr|rrr|rrr|rrrrr|r�r�rrr|r|r|r�r|r|r|r�r|r|r�r|r�r�r|r|r|rrr�r�rrr�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|rrr|r|r�r�rrr|rrr|rrr|r�rrr|r|r�r|r�r|r|r|r�r|r|r|r|r�rrr|r�r|r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrr|rrr�r|rrrrr|r|rrr�r�r�r|r�rrrrr|r|rrr�r�r�rrr�r�r�r�rrr�r�rrrrr�r|r�r|r�r�r�rrr|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrr|rrr|rrrrr�r�rrr�r�r|r�r|r�r�rrr�r�r�r|rrr�r�r�r�r�r�r�r�rrr�r�r|r|r|r|r�r�r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr|r|r�r�r|r�rrrrr|rrr|r|rrr|r|rrr�r|r|r�r|r�rrr|r�r|r|r|r|r|r�rrr|r�r|r|r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrrrrrrrr�rrrrr�r|r�r�rrr|r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|r|r|rrr�r�r�rrr�rrr|r�rrr|r|rrrrrrrrrrr�r�r|r|r|r�r|r|r�r|r�r|rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|rrr�rrrrr|rrrrr�rrrrr�rrr|r|rrr|rrrrrrr�r�r|r|rrr�r�r�r�rrr�r�rrr�r�r�r|r|r�r�rrr�r�r|r|r|rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr|rrrrr|r�rrrrr|r|rrr�rrr|r�rrr|r�r�r|r|r�r|rrr|r�r�rrr�r�r�r�rrr�r�r|rrr�rrr�r�rrr�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�rrr|r|r|r�r|r�r�rrr�r�rrr�rrr�r�r|r�r�r�r�r|r�r�r�r�r|r�r�r�r�r|r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�rrrrrrrrr|r|r|r|r|r�rrr�r�r�r|r�r�r�r|r�r|r�rrr|r�r�rrr�r�r�r�r�rrr�r�r|rrr�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr�rrrrr�r|r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr�rrr�r�r�r|r�r�r|r�rrrrr|rrrrrrr|rrr�r�r|r|r|r�r�r�r|r|r�r�r�r�r�r�r�rrr�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|r�r�r�r�r�r�r�r�r�r�r�r|rrr�rrrrr�r�r�r�rrr�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr�r|rrr�r|rrr�r�rrr�r|r|r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|r�r�r�r�r�rrr�r|r|r|r|r|r|r�r�r�r�r�rrr�r�r�rrr�r�r�r�r�r�r�r|r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�rrr�r|r|r�rrrrr|rrrrr�r�r�r�r|r|r�r|r�rrrrr�r�r�rrr|r�r�r�r�r|r�r�r�r�r�r�r�r�r|r�r�r�rrr�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�rrr�r�rrr�rrr�r�r�r�r�rrr|r�r�r�rrr�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rrrrr�rrr|r�rrrrr�r|r|r�r�r|r�r�r�r|r�r�r�r�r�rrr�r�r�rrr�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r|r�rrrrrrr|r|r�r�r�r�r�rrr�r�r�r|r|r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|rrr�r|r�r�r|r�r�rrr�r�r�r|r�r�r�r�r�rrr�r�rrr�r�r�r|r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�rrr�r|r�r�r|r|r�r�r|r�r�r�r�rrr�r|r�r�r�r�r|r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r|r|r�r�r�r|r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�rrr�r�r�r�r�r�r�r�rrr|r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr|r|r�r�r�r�r�r�r�r�r|rrr|r�r|r|rrr�r�r�r|rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|r|r�r�r�r�r�r�r|r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�rrr�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�rrr�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�rrrrr�r�r�r�r|rrr�r�r|r�r�r�r�r�r�r|r�r�r�r�r�r|r�r�r�r|r�r�r�r�r|r�r�rrr�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�rrr�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|r�r�r�r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�rrr�r�r�r�r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�rrrrr�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r|r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r|r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZTIS620CharToOrderMapZ
ThaiLangModelZTIS620ThaiModel�r�r��#/usr/lib/python3.6/langthaimodel.py�<module>%s*
_vendor/chardet/__pycache__/eucjpprober.cpython-36.pyc000064400000004436151733136220017002 0ustar003

�Pf��@s`ddlmZmZddlmZddlmZddlmZddl	m
Z
ddlmZGdd�de�Z
d	S)
�)�ProbingState�MachineState)�MultiByteCharSetProber)�CodingStateMachine)�EUCJPDistributionAnalysis)�EUCJPContextAnalysis)�EUCJP_SM_MODELcsPeZdZ�fdd�Z�fdd�Zedd��Zedd��Zd	d
�Zdd�Z	�Z
S)
�EUCJPProbercs4tt|�j�tt�|_t�|_t�|_	|j
�dS)N)�superr	�__init__rr�	coding_smr�distribution_analyzerr�context_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/eucjpprober.pyr%s

zEUCJPProber.__init__cstt|�j�|jj�dS)N)r
r	rr)r)rrrr,szEUCJPProber.resetcCsdS)NzEUC-JPr)rrrr�charset_name0szEUCJPProber.charset_namecCsdS)NZJapaneser)rrrr�language4szEUCJPProber.languagecCs6x�tt|��D]�}|jj||�}|tjkrN|jjd|j|j	|�t
j|_Pq|tj
krdt
j|_Pq|tjkr|jj�}|dkr�|d|jd<|jj|j|�|jj|j|�q|jj||d|d�|�|jj||d|d�|�qW|d|jd<|jt
jk�r0|jj��r0|j�|jk�r0t
j|_|jS)Nz!%s %s prober hit error at byte %s�r���)�range�lenrZ
next_staterZERRORZlogger�debugrrrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenZ
_last_charr�feedr
�stateZ	DETECTINGZgot_enough_data�get_confidenceZSHORTCUT_THRESHOLD)rZbyte_str�iZcoding_stateZchar_lenrrrr8s4




zEUCJPProber.feedcCs|jj�}|jj�}t||�S)N)rrr
�max)rZcontext_confZdistrib_confrrrrYs

zEUCJPProber.get_confidence)�__name__�
__module__�__qualname__rr�propertyrrrr�
__classcell__rr)rrr	$s!r	N)ZenumsrrZmbcharsetproberrZcodingstatemachinerZchardistributionrZjpcntxrZmbcssmrr	rrrr�<module>s_vendor/chardet/__pycache__/mbcsgroupprober.cpython-36.opt-1.pyc000064400000002024151733136220020623 0ustar003

�Pf��@s�ddlmZddlmZddlmZddlmZddlm	Z	ddl
mZddlm
Z
ddlmZdd	lmZGd
d�de�ZdS)
�)�CharSetGroupProber)�
UTF8Prober)�
SJISProber)�EUCJPProber)�GB2312Prober)�EUCKRProber)�CP949Prober)�
Big5Prober)�EUCTWProbercseZdZd�fdd�	Z�ZS)�MBCSGroupProberNcsDtt|�j|d�t�t�t�t�t�t�t	�t
�g|_|j�dS)N)�lang_filter)
�superr�__init__rrrrrrr	r
Zprobers�reset)�selfr)�	__class__��%/usr/lib/python3.6/mbcsgroupprober.pyr*s
zMBCSGroupProber.__init__)N)�__name__�
__module__�__qualname__r�
__classcell__rr)rrr)srN)ZcharsetgroupproberrZ
utf8proberrZ
sjisproberrZeucjpproberrZgb2312proberrZeuckrproberrZcp949proberrZ
big5proberr	Zeuctwproberr
rrrrr�<module>s_vendor/chardet/__pycache__/langhungarianmodel.cpython-36.opt-1.pyc000064400000060307151733136220021257 0ustar003

�Pf01�@s4d�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zd�S)�������(�6�-� �2�1�&�'�5�$�)�"�#�/�.�G�+�!�%�9�0�@�D�7�4���������	����
����C�
�����A�>����������������������������������������������K�����������������O���������������������������������3�Q���N���������,�������=�����������:���B�;�������<�E�?�������R��J���F�P���H����S�M�T��L�U�����I�*������8���V�W�g��(��P�?Tz
ISO-8859-2Z	Hungarian)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1250N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrr�rrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNr}rOr�rPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r�rarbrcrdrerfrgrhr{rjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr�r|r�r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r`r�rr�r�r�r�r�r�rir�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr(r�r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r#r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2rr2r2r2r2r2r2r2r2rrr2r2r#r#rrrrrr#rr2rrr2r2r2r2r2rr2r2r2r2r2r2r#rr2r2r2r2rr2r2r#r#r2r2r�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r2rr#r2r2r2r2r2rr2r2r2r2r2r#r#rr2r2r2r2r2r2r2r#r#r2rr�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r2r2r2r2r2r2r2r2r2r2r2r#r#rr2r2r2r#r2r2r2r2r2r#r2r2rrr�r2rr2r�r�r�r�r�r�r�r�r�r�r2r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r2r2r2r2r2r2rr2r2r2rr2r2rr2r2r2r2r2rr2r2rrr2rr2rr�r2rrr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r#r�r2r2r2r2r2r2rr2r2r2r2r2rr2r2r2r#rr2rrr2r#rr2r2rrr�r2r2r2r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r2r2r2r2r2r2r2rrr2r2r2r2r2r2rr2r2r2r2rr2r2r2r2r�rr2rr�r�r�r#r#r�r�r�r�r�r2r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r2r2r2r2r2r2r2r2r#r#r#r2r2rr#r2rrr2rr#r2rrr#r�r2r2r#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2rrr2r2r2r2r2r#rr2r2r2r2r#rr#r2r2r2r2rrr2r#r#r2rr�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r2r2r2r2r2r2r2r2rrr2r2r2r2r2rr#r2r2r2r2r2rrr#r2r2r2r�r#r#rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r#r�r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2rr2r2r2rr2r2rr2r2r2rr�r2rr2r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r#r�r2r2r2r2r2r2rr2r2r2rr2rr2r2r2r#r2rrrr2r#r#r2r2r#r#r�r2r2rr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r2r2r2r2rr2r2r2rr2rr2r2r2rr2r2r2r2r2r#rr2rrr�rrrr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2rrrr2r#r2r2rrr#r2r2r2r#r#r2r#rr2rr2rrrr#r�rrrr�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r2r#r#r2r2r2r2r2r#rr2r2r2r2r#rr#r2r2r2rrr2rr#r�r2rr�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#r#r2r2r2r2r2r#rr2r2r2r2r#r#r�r2r2r2r2r�rr2r�r�rr#r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r2r2r2r2r2rrr2r2rrrrr2r2r�r#rr2rr2rrr2rr#rr�rrrr�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r2r2r2r2r2r2r#rr2r2r2rr#rr2r2rrrr2rr2r2r#r2r2r#r#r�rr2rr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r#rrrrr2r2r2r#r#r#r2r2r#r#r2r#r#r2rr#rr2r#r#r�rrrr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2rr#rr#r#r2r2r#r#r#r#r2r2r#r#rrr#rr#r#rrr#r#r�rrr#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r#r#rr#r#r2r2r#r�r#r#r2r2rr�r#r#rr2r#r�rrr#r�r�r#r2rr�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2rr#r2r2r2r2r2r#rr2rr2r2rr#r#r2rr2rr#rrr�r#rr#r�r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r2r2r2r2rrrrr2r#rrr#r#r2r2r�r2rr#rr2rr#r2r2r#r#r�rr#r2r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2rrrr2rr2r2r2rr#r#r2r2r#r#r#rrr2rr2rrrr#r�rrr#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r#r�r�r2r2r2r2r2r�r�r2r2rr2r�r�r�rr2r2r#r�r#rr�r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#rr2r2r2r2r2r#rr2r2rrr#r#r�r2r2rrr#rrr#r�rrr�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r2rrr#r2r#rr2r2rrr#r#rrr#r#r#r#r2rr#r#r#r#rr#r�r#rr#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r#r�r�r�r�r�r�r�r�r�rr2r2r#r#r#r#r#r2r2r2r�r#r#r2r2r#r#r#r#r#rrr�r2r#r#rr�rr#r#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r#r�r#rr#rrr�r#rr2r#rr�r�r�rr#r#r#r#r#rr�r�r#r#r�r�r�r�r#rr#rrrr#rr#rr�rr�rrr#r#rr#r#rr#r#r#r�r#r�r�r�r#r#r�r#r#r#rr2rr2r2r�r#rrr2r#r�r#r�rr#rrr�r#r#r�r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r2r2rrr#r�r�r2rr2rr�r�r�r#r#r2r�r�r#r#r�r�rr#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#r#rrr2r2r#r�r#r2rr2r#r#r#r�r#r#r#r#r#r2r#r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#r#r#rrrr#r�r#rr2r2rr�r�r�rr#r#r#rr#r#r#r�r#r#r#r�r�r�r#rrrrrr#r#r#rr�rr#r#r#r#r#rr#r#r#r#r#r#r�r#r#r#r�r�r#r#r2rrr#r�r�r#r#rrr�r2r�r#rr#r#r�r�r#r#r#r�r#r#r#r#r�rr#r#r#rrr#r#r#rr#rr#r#r#r#r#r#r#rr#r#r#rr2r#r#r#r#r#r#r#r#r#r�r#rr2r2r�r#r�r�r�r2r2r#r�r�r#rrr#r�r�r�r�rr�r�r#r#r#r�rr#r#r#rr#r#r#r#r#r#rr#r#r�r#r#r�r#r#r#r�r#rr#r#r�r#r#r#r#r#r#r#r�r#rr2r2r�r#r�r�r�rrr�r�r�r�r#rrr�r�r�r�r#r�r�r#r#r�r�rr�r#r�rr#r#r#r#rr#r#r#r#r#r#r#rr#r#r#r#r#r#r#r#r#rr�r#r#r#r#r#r�r#r2rrr�r#r�r#r�rr2rr�r�r#rrr#r�r�r#r#r#r�r�rr#r�r#rrr#r#rr#r#r#r#r#r#rr#r#r#r#r#r#r�rr#r�r#r#r�r#r#r#r�r#r#rr#r#r�r#rrrr�r�r#r�r�rrr#r#r�r�rr#r#r�r�r�r#rr�r�rr#r�r�rr#r#r#rr#r#r#r#rr#rr#r#r#rrr#r#rr#r#r#rr#r#r#r#r#r#r#r#r#r#r�r#r#rr2r�r�r�r#r�r2rr#r�r�r#rr#r#r�r�r�r�rr#r�r#r#r�r�rr#rr#r#r#r�r�r�r#r�r#r#r#r#r#rr�r�r#r�r�r�rr�r�r#r#r#r#r#r#r#r#r�r#r2r�r�rr#rrr#r�r�rr#rrr�r�r�rr#r#r#r�r#r#r�r�r#r#rr�r�r�r#rr#rrr#r#rr#rr�r#r#r#r#r#r#r#r#r#rr#r#r�r�r#r#r#r#r�r�r#r#r2rr�r�r�r#r�rrrr�r�r�rrr#r�r�r�r�r2r#r#r#r#r�r�rr#r#r#rr#r�r#r#r#r�r#r#r#r#r#r#r#r�rr#r�r�r#r�r#r#r�r#r#r#r#r#r#r�r#rr2rr�r�r�r#r�rrr�r�r�r�rr#r#r�r�r�r�rr#r�r#r#r�r�rr#r#r�rr#r#r#r#rr#rr#rr�r#r#r#r�rr#r#r#rr#r#r#r#r�r#r#r#r#r#r�r#r2r#r#rrrr2rr#r#rrr#r#r�r#r�rrr#r#r#r#r#r�r�r#r#r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrr�r�r�r�r�rrr�r�r�r�rrr#r�r�r�r#r#r�r�r#rr�r�rr#r#r#rrr#r#r#rr#rr#r#r�r#r#r#r#rr#r#r#rr#r#r#r#r�r#rr#r#r#r�r#r#r�r�r#rr2rr#r�r�rr�r#r#r�r�r�r#r#r#r#r�r#r#r�r�r#r�r�r�r�r�r#rr#rr#rr#r#r#rr�rr#r#r#r�r#rr�r�r#r#r#r�r�r�r�r�r�r�r�r�rr2rr�r�r�r�r�r#r#rr#r�r�r#r#r#r�r�r�r�rr�r�r#r#r�r�rr#r#r#rr#r#r#r#r#r#rr#r�r#r#r#r#r�rr#r#r#r#r#r#r�r#r�r#r#r#r#r#r�r#r#rrr�r#r#r#r�rrrr�r�r�r2rr#r�r�r�r#r#r�r�r#r#r�r#r#r#r�r�r#r#r�r#r#r#r#r#r#r#r#rr#r#r#r#r#r#r#rr#r#r#r�r�r#r#r#r�r#r�r#rr#r�rr#r#rrr#r#rr#r#r#r�r�r�r#r#r�r#r#r#r#r�r�r#r#r#r�r�r�r#rrrrrr#r#r#rr�rr#r#r#r#r#r#r#r#r#r#r#r#r�r#r#r�r�r�r#r�r#rr2r�r�r�r#r�rrr�r�r�r�rrr�r�r�r�r�r#r�r�r#r�r�r�rr�r#r�rr#r#r#r#r#r�rr�r�r�r#rr#r#r#r#r�r#rr�r#r�r#r�r#r#r#r�r#r�r#rrrr�r�r�r#r�rr#rr�r�r�r#r#rr�r�r�r�r#r�r�r#r#r�r�rr#r�r#rr#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#rr�r#r#r#r#r#r�r#r#rrr�r�r�r#r�rrrr�r�r�r#r#r�r�r�r�r�r#r#r�rr�r�r#r#r#r�r#r#r�r#r#r#r#r#r#r�r#r#r#r#r�r�r#r�r�r#r#r�r#r�r#r#r#r#r#r�r�r�r#r#r�r�r#r�r#rr#r�r�r#r#r#rr�r�r�r#r#r�r#r�r#r#r�r�r#r�r�r�r�r�r�rr#rr#r#r#r#r#rr�rr�r#r#r�r#rr#r�r#r#r#r�r�r�r�r�r�r#r�r�rr#r#r�r#rr�r�r#r#r#r�r�r�r#r#r�r�r�r�r�r#r�r�r#r�r�r�rr#r�r#rrr#r#r#r#r#rr#r#r�r#r#r#r#rr#r#r#rr#r#r�r#r�r#r#r#r#r#r�r#r#rrr�r�r�r�r�r#r#r�r�r�r�rr#r�r�r�r�r�rr�r�rrr�r�rr�r�r#rr#r#r#r#r#r#r#r�r#r#r�r#r#r�r#r�r�r�r#r#r#r#r�r�r#r#r#r#r�r�r#r#r#rr�r�r2r#r�rr#r#r#r�r�r#r#r#r�r�r�r#r#r�r�r�r#r�r�r#r�r#r�r#rr#r�r#r#r#rr#r#r�r#r#r#r#r#r�r�r�r#r#r#r#r#r�r#r�r�r�r#r�r�rr#r#r�r�r�r�r�r#r�r�r�r�r�r�r�r�r#r�r#r�r�r�r#r�r�r�r�rr�r�r�rr#r#r#r#r#r#r#r#r#r�r#r#r#r#r#r#r#r#r#rr#r#r�r�r#r#r#r#r#r�r#rr#r#r#rr#r#r#r�r#r#rr#r�r�r�r�r#r#r#r#r�r#r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r#r�r#r#r#r#r#r�r�r#r#rr#r�r�r�r#r#r�r�r�r#r#r�r�r#r�r#r�r�r�r#rr#r#r#r#r#r#r#r#r�r#r�r#r#r#r#r#r#r�r#r#r#r�r�r�r�r�r�r#r�r�rr�r�r�r#r#r#r#r�r�r#r#r�r�r�r�r�r#r#r#rr�r�r#r�r�r#r�r#r�r�r�r�r#r#r#r#r#r#r#r#rr�r#r#r#r#r�r#r#r#r�r#r#r#r�r�r�r�r�r�r�r�r�r#r�r�r#r#r#r#r#r�r�rr#r�r#r�r�r�r#r�r#r�r�r�r�r�r�r#r�r�r�r�r�r�r#r#r#r#r#r#r�r#r#r�r#r�r#r#r�r#r#r�r�r#r#r#r�r�r�r�r�r�r�r�r�r#r�r�r#r#r#r�r�r�r�r#r�rr�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r#r#r#r#r#r�r�r#r#r�r#r�r#r�r�r#r#r#r�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r#r#rr#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r#r#r�r#r�r�r#r#r�r#r�r#r#r�r#r#r#r�r#r#r#r�r�r�r�r�r�r�r�r�rr#r#r#r#r#r#r#r#r#r#r�r�r#r#r#r�r�r#r�r�r#r�r#r�r#r#r#r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r#r#r#r#r�r�r�r#r#r#r�r�r�r�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r#r#r#r#r#r�r#r#r�r#r�r#r�r�r#r#r�r�r#r#r�r�r�r�r�r�r�r�r�r�)ZLatin2_HungarianCharToOrderMapZwin1250HungarianCharToOrderMapZHungarianLangModelZLatin2HungarianModelZWin1250HungarianModel�r�r��(/usr/lib/python3.6/langhungarianmodel.py�<module>#sV
_vendor/chardet/__pycache__/big5prober.cpython-36.opt-1.pyc000064400000002021151733136220017445 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�Big5DistributionAnalysis)�
BIG5_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�
Big5Probercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__�� /usr/lib/python3.6/big5prober.pyr#s
zBig5Prober.__init__cCsdS)NZBig5r)r
rrr
�charset_name)szBig5Prober.charset_namecCsdS)NZChineser)r
rrr
�language-szBig5Prober.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r"srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc000064400000060341151733136220020306 0ustar003

�Pf'2�@s4d�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zd�S)������M�Z�c�d�H�m�k�e�O��Q�f�L�^�R�n��l�[�J�w�T�`�o��s�A�E�F�B�?�D�p�g�\���h�_�V�W�G�t���U�]�a�q�����������������������������������������������������������������������i�������������-������ �#�+�%�,�7�/�(�;�!�.�&�$�)��'��"�3�0�1�5�2�6�9�=���C���<�8���	���������
����
������������K�4���*��>�������:��b�������x�N�@�S�y�u�X�z�Y�j�I�P�v�r�g! _B�?Fz
ISO-8859-5Z	Bulgairan)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1251Z	BulgarianN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr(r/r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr>r?r@rArBrCrDrEr�rFrGrHrIrJrKrLrMr�r�r�r�r�r�rWrNrOrPrQrRrSrTrUr�rVrXrYrZr�r�r�r[r\r]r_r`r^r|r~r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r�r}rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�(r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZLatin5_BulgarianCharToOrderMapZwin1251BulgarianCharToOrderMapZBulgarianLangModelZLatin5BulgarianModelZWin1251BulgarianModel�r�r��(/usr/lib/python3.6/langbulgarianmodel.py�<module>&sV
_vendor/chardet/__pycache__/__init__.cpython-36.opt-1.pyc000064400000001375151733136220017157 0ustar003

�Pf�@s8ddlmZmZddlmZddlmZmZdd�ZdS)�)�PY2�PY3)�UniversalDetector)�__version__�VERSIONcCsHt|t�s0t|t�s(tdjt|����nt|�}t�}|j|�|j�S)z�
    Detect the encoding of the given byte string.

    :param byte_str:     The byte sequence to examine.
    :type byte_str:      ``bytes`` or ``bytearray``
    z4Expected object of type bytes or bytearray, got: {0})	�
isinstance�	bytearray�bytes�	TypeError�format�typerZfeed�close)Zbyte_strZdetector�r�/usr/lib/python3.6/__init__.py�detects


rN)	�compatrrZuniversaldetectorr�versionrrrrrrr�<module>s_vendor/chardet/__pycache__/charsetgroupprober.cpython-36.opt-1.pyc000064400000004144151733136220021335 0ustar003

�Pf��@s,ddlmZddlmZGdd�de�ZdS)�)�ProbingState)�
CharSetProbercsReZdZd�fdd�	Z�fdd�Zedd��Zedd	��Zd
d�Zdd
�Z	�Z
S)�CharSetGroupProberNcs(tt|�j|d�d|_g|_d|_dS)N)�lang_filter�)�superr�__init__�_active_num�probers�_best_guess_prober)�selfr)�	__class__��(/usr/lib/python3.6/charsetgroupprober.pyr!szCharSetGroupProber.__init__csNtt|�j�d|_x.|jD]$}|r|j�d|_|jd7_qWd|_dS)NrTr)rr�resetr	r
�activer)r�prober)r
rrr'szCharSetGroupProber.resetcCs |js|j�|jsdS|jjS)N)r�get_confidence�charset_name)rrrrr1s
zCharSetGroupProber.charset_namecCs |js|j�|jsdS|jjS)N)rr�language)rrrrr9s
zCharSetGroupProber.languagecCs�xx|jD]n}|sq|jsq|j|�}|s*q|tjkr@||_|jS|tjkrd|_|jd8_|jdkrtj|_	|jSqW|jS)NFrr)
r
r�feedr�FOUND_ITr�state�NOT_MEr	Z_state)rZbyte_strrrrrrrAs$




zCharSetGroupProber.feedcCs�|j}|tjkrdS|tjkr"dSd}d|_x\|jD]R}|s>q4|jsV|jjd|j	�q4|j
�}|jjd|j	|j|�||kr4|}||_q4W|js�dS|S)Ng�G�z��?g{�G�z�?gz
%s not activez%s %s confidence = %s)rrrrrr
rZlogger�debugrrr)rrZ	best_confrZconfrrrrUs*


z!CharSetGroupProber.get_confidence)N)�__name__�
__module__�__qualname__rr�propertyrrrr�
__classcell__rr)r
rr s
rN)ZenumsrZ
charsetproberrrrrrr�<module>s_vendor/chardet/__pycache__/sjisprober.cpython-36.opt-1.pyc000064400000004470151733136230017602 0ustar003

�Pf��@s`ddlmZddlmZddlmZddlmZddlm	Z	ddl
mZmZGdd�de�Z
d	S)
�)�MultiByteCharSetProber)�CodingStateMachine)�SJISDistributionAnalysis)�SJISContextAnalysis)�
SJIS_SM_MODEL)�ProbingState�MachineStatecsPeZdZ�fdd�Z�fdd�Zedd��Zedd��Zd	d
�Zdd�Z	�Z
S)
�
SJISProbercs4tt|�j�tt�|_t�|_t�|_	|j
�dS)N)�superr	�__init__rr�	coding_smr�distribution_analyzerr�context_analyzer�reset)�self)�	__class__�� /usr/lib/python3.6/sjisprober.pyr%s

zSJISProber.__init__cstt|�j�|jj�dS)N)r
r	rr)r)rrrr,szSJISProber.resetcCs|jjS)N)r�charset_name)rrrrr0szSJISProber.charset_namecCsdS)NZJapaneser)rrrr�language4szSJISProber.languagecCsL�xtt|��D]�}|jj||�}|tjkrP|jjd|j|j	|�t
j|_Pq|tj
krft
j|_Pq|tjkr|jj�}|dkr�|d|jd<|jj|jd|d�|�|jj|j|�q|jj||d||d|�|�|jj||d|d�|�qW|d|jd<|jt
jk�rF|jj��rF|j�|jk�rFt
j|_|jS)Nz!%s %s prober hit error at byte %s�r�����)�range�lenrZ
next_staterZERRORZlogger�debugrrrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenZ
_last_charr�feedr
�stateZ	DETECTINGZgot_enough_data�get_confidenceZSHORTCUT_THRESHOLD)rZbyte_str�iZcoding_stateZchar_lenrrrr8s6




zSJISProber.feedcCs|jj�}|jj�}t||�S)N)rrr
�max)rZcontext_confZdistrib_confrrrrYs

zSJISProber.get_confidence)�__name__�
__module__�__qualname__rr�propertyrrrr�
__classcell__rr)rrr	$s!r	N)ZmbcharsetproberrZcodingstatemachinerZchardistributionrZjpcntxrZmbcssmrZenumsrrr	rrrr�<module>s_vendor/chardet/__pycache__/gb2312freq.cpython-36.pyc000064400000112637151733136230016244 0ustar003

�Pf�P��@sdZdZ�d�Z�d�S(�g�������?i������<	�T���	�Q��
�
�����<�w��9	���	������Z����q���
�W
�y���e�o���v
����������L�B��Q
�����Q����E��f����������d�(�y���,�	���
�E�e��
�W�R����|	�R
��
���
�X
��
�j�E���+�P��������;���m���l���������e�H������^�k�6���"�F���
��
�
�^	���
������?�`�u�$�1��
����� �U���1����g�l����		��G���2���q	�� �P��L	�������y	����
���M���k��4	�������
�	�@	�`��5�����N
��7�3��n��u�G�&�b�h�0��M�N���	�
�Y	��
���	�
���
�&��
�Q�
�7�^�h����J��P�����r���������T	�-�G�,�u���	���$���<�B��$�>����x�L�W����n�
���T���Z���
��	�{���	����
�?�
�0�b�;�D�������C�S��U�>	�|�g����	��
�K���

�����%
�5
������l��h�
����?��� �U�����	��*�<
��
���
��
������
���
�!	�X
���	��Q�g������	���r�
����q��
���z
���	�3�������
�
�]�c�z���7���_���C��&�a�H�|
�l�
��=���I�P��
���~
�8�`��w	���
�T��
�������
�������[�q����	�g��|�C�	����N�I����5�
�(���/�H�K
�
�����9��I���|�������
���W�����^�~�����	��=�X��3�
�I����
������3�r������o������{��i�C���!�H��	������f��=
��1�[���|�D��%��t���
�� ��.����y��T���9
��
��#���J���n�����
���>�B�"����a�����M�K����W���	�	�!
�r	�F�+	��
�
����x
�H�J��������2�K���R
�$��9�
�t
���
����������������%�~��
�`	�������'�v������|��'���n
�*�����F��%��)�~��
���J�&���
�!�D���w��[���	���	�'	�	���k���	�
��
�����������h
�B�_��?��g�h���'��������������5�]�������������1	��)�a	�������;	����_����Q�/�u
��
�
�j�[���}��
�����	����Q�^
��G��z������V�O���������m�u�	�6�����-�$���R���}	�
�p�r�]
�
�D�������i������1�n������t�Y�G	���/�|�t�0�
������\��
�����v�����c����
�~�A�"��"	�!�8�C�8	���	�j�
�1��l�>��
��]
��
�~��L�(����K�d��v�D������%��	������	�,��2���
���j�-�.���m�K�����o�	�;�	����d�@�a�	�M	�1
�L�@�2��w��t�Q�
�/�� ���~�����
�/����
�_	�#���O��O�G���k�[�O�o����-
�
���	�!���I������{�r��.�h�
��T
����'�L
�a��
�@��������
�T�/
���������M���i
�l�����
�{�8�#�
����9��S�R�	�w��Z��0
�E�g�g���
���L�����p����S�
�������	�@��I	�L�;
�O�q����)	��:�F��5�J�d�A
���
�)
�E�L�Y�����
�m	������������c��R��!����
���(����]�I���
��	���e����������`�������
��[��C
�(�P�,��}�O	�'�L�\
�
������Z����s�'�	�U�������N
�����
���M��������������i���g�'����	���
��P�0�s	�i��p	�	�n������\�
���
�!�h����m�(�&�
�C��B�)�5	�������
�����>�������	���{�N�����E�3��J�#��c��
��$
���5��x��]�����k����������P
��
���r���L��z�d���#�
����[�S��h��G�����
�U�+��	�y��w�"��V�2
�&�z��	��	��J
�P	��	�������%	�Z�$�o�T�K�1�
��`�V����d�����+����A��������$��	�_�3�\���������b��f������
�A�n���x��F�
�O�[����(���
�#�7�2	�x��f	��	��
�!�6�{
����Y����+����� ������	�	�K�e����
�T�$����h��	�R���"���Z� 
�_��@���a�����3
�
�k�)�j����e��
�A�^���
�W	����h�:
�z
�)
��|�������8�j����
���3�	�
�z���p�y����2�o�D	�=���.��9�a�f��*�_��	�*����
�
���-��
�}��
�����*	�
�t��t�6
�q�������������S� 	���!�)��y�
�����j	��	���V�I���g
���L�����p�D�����%���M����$������E	�"�U	��e
�	�|��	�Q�������
�1
�������$� �]�4�&���z����`���w�
�����
�)�%
�&�����*���8�2�>
�S�
����1�#���
���#�	�
�������F
���,	�>�+�
���Z�3�q���
�$��
�V��	�;���N���m�K�W����5�
�	�R�4���I��D�������
���
������%�K�|���
������9��n	�����
�-�3�2
�p���?
��~��������C��[��
������3	�>���l�
�`�x	�������#
����0������	�L
���Q���R��?�(�m��`
���3����&
�
���s�D��0	�b	�f�@�	����
��=��
�<�������
���6����s������S�����
�8�Z
�����������
����
�����N��	��
�L�
�J	����	�b��g����[
������~��	�	����f�����
��`�4�=	�����l�?	��c�����i����������w����%���d	�^�+������Z��F�I
��t���	�/��	���y�*
�s�&�+����)����	����
��e	�����Q�]	��	�_�7	��0�c�u����L�k�
������#����������?�����
�n�
������	�����	�H��Y��u	���
�	��J���_�_�&���������Z�%��	�s���S����V���������1�'����F�\�
�|�	�W�����|��S�W��	�*�	�R�l�P�#���-�q
����	���4�������a�����@
���X�i�%����8
�3�,��}������d�x��	���#	��6��
��
�.���A��
���r
��
��R�d����+
������
�{�{�H�	�
���������c�	�
�����,��I
���c������=��J�.�,�>����L�e�]��
�C�����*�;�
�� ��p�
����R���9�
�
��
����?�:�	�����Q�'�s���w�[��
��-�
�)����M�������]�}�6�@�
��U����/���s
�	���k	�?���,
�=
�1�M����/�S
�d�����o
�6	�k�u����W�{��x�W���������x�^����[	�+�7�:���U���!����
�.�H�T���Y����	��@�"�C�s�	��	��G
�b��7�~�.
�U��q�	�����{�����Y�/�
����A�
���*�:�8�'��������A�_�0�������i��K��
���t�*����
�������	��
����D�����>����v��

��
�'��V�����
����k���\�����!����*�U
�������z��\�f
����	�
�����������������B	������{��=�
���&�
���V��	��"�!��
�	�����
�(	�=��
��
���9����4�a�����-�Q	��
���
�m�������C���7�}�q�D�4���u���	�����v���F	���u�a
��H�o��
�
	�l�K�A	�����	�z�4�	�!
�H
��	���5��j���`��
�B�:�O�a���#�����d���(�
�&
��^�m�g
�E
��o	���(�G�$�@�
�I���A���`�X��7���/��$�����h�����	�U�"�T�������m����!�������� ��M�	������	�2�q����9����������/�������p�@�X	��j��i
�^
�b��
���n��v������o�o���v�����0���9���$	���f�S�z�
��D�{���X�����V�3�Y�
����
�A�������q�U�
�:	��2�j����{	��	�F��
�	��	������c�i�R	�;�k�h�����
��Y�O�x��z�Z�<�v	��
�
�
���I���4
�M��������b��������C����
����O��8������	�.�]��V�c
��	����
�H	��T����	��>
�
���3�����,����f����l�T�W�H�.�'�>�p�����"��	�J���
��
�v�2����	�x�	�2���g� �4����
�
���{
�z	���	�
����	��������?�����	�J������
�	�8���E�j
�\��8�8
�� ����`��>�b�S����������0�����E����d����1�����n�@������T��	�����Q���C���K�;�k���������_�\	�e��w�������M��.	�r�a����
����
����g���'�
����	�T���4�o���	�X�y��x���	��
�
�U
�^�:�������"�#�������	������N�U���p��"
�-��	���:���
���	������	���W�
�G
�L��
�G�w�o�����M�r�����Q
���I�N�%�
�X����7
��
�a�����	�'
�i�h��}�N�y�?�t���D������l����	����e�J�s�2�*�y�����������q����5�}�s�$�����5
�B������#�d�s�}���~����b
��O
�/	�]����	�]�2����f�7���O�

�R��#
�	�[��E���f
�S
�	�d�F����C	�V
�>��	���B���	�1�6�m����	��������p
�����
��<�z�
�|
�X����s�e�]�����
��
��O���+��	�������:�	�A���������(
�
�m��4�P��	�n��
�o�o���m��y���8�	���<����<�c	�C�����&�w�
��	�$� 
�9�&����)�`�������G�b�E��:�Q�o�F�
�����	�
�F�|�;�
���3�4�%���:�	�c��p�6
�\�C��0�9
��������~	���"�h������	�)��	��
���*
���
�
�%��	�����
�.����9�	�����
���	��"�!��,
��g	�J�g�F
������
���;�i	�Z�E�^�A
���l�5�������4�'
���k�	��	�2���
��	�X�7���
�{���B�x�=�e�(�������'��������������D
�i�k�������-�,��
�e�7
������������n�}
�p�7�j��	��9���
�������0�P�����������O
�
���	���U�
����<
����
�J���t�N���)��.����*��A���r
�]�)������G��_
�p�k
���
	�%�
����
���>�b�_�^�?��:�Y�D�%����
�������Y��
��������l
�A���	��
�	�D�B�	�
���	�@
�(�v
������g��
�	�}�v����
����������������� �-�~��y�%�	��
�_���������j�M���G�+�	������	������3
�;�
�G�)��
���������
�
��,�I�	�����X�A�\������P�v���	�V��=�
�
�6�<���X��
�	��8�-	�Y
���������c�����9�	�-
�u���	��
�R�
���6���	��h	�}��?
�w
�����	�a�(���1��
���>�������]��
�X�r�P�0�,��+�j��`������.���
��&	�W�B
���|�7�U���	��W��Y��
���w
�
��������u���b��q
����b������=�$
��N�J����U�����	���i�K�,���4�r�{����H��p�5���=��	���E�9��;��<��5��
��i��\����p������r��N����^�����K����*��{�+
�	��*�8����	������j
��@�l	�6���2�8���	������'������B
����\�����S�V	��~���������
��V��
�z���^�	����_
�}
�G�q��I�����8�t�h�S	��}�R�	�9��S�n
�P��B�����s���X�}�f����������������n�I�^��T���x���r����_��b��O�P�Q�R�SN(�rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)Z!GB2312_TYPICAL_DISTRIBUTION_RATIOZGB2312_TABLE_SIZEZGB2312_CHAR_TO_FREQ_ORDER�r�r�� /usr/lib/python3.6/gb2312freq.py�<module>*s�_vendor/chardet/__pycache__/langhungarianmodel.cpython-36.pyc000064400000060307151733136230020321 0ustar003

�Pf01�@s4d�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zd�S)�������(�6�-� �2�1�&�'�5�$�)�"�#�/�.�G�+�!�%�9�0�@�D�7�4���������	����
����C�
�����A�>����������������������������������������������K�����������������O���������������������������������3�Q���N���������,�������=�����������:���B�;�������<�E�?�������R��J���F�P���H����S�M�T��L�U�����I�*������8���V�W�g��(��P�?Tz
ISO-8859-2Z	Hungarian)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1250N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrr�rrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNr}rOr�rPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r�rarbrcrdrerfrgrhr{rjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr�r|r�r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r`r�rr�r�r�r�r�r�rir�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr(r�r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r#r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2rr2r2r2r2r2r2r2r2rrr2r2r#r#rrrrrr#rr2rrr2r2r2r2r2rr2r2r2r2r2r2r#rr2r2r2r2rr2r2r#r#r2r2r�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r2rr#r2r2r2r2r2rr2r2r2r2r2r#r#rr2r2r2r2r2r2r2r#r#r2rr�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r2r2r2r2r2r2r2r2r2r2r2r#r#rr2r2r2r#r2r2r2r2r2r#r2r2rrr�r2rr2r�r�r�r�r�r�r�r�r�r�r2r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r2r2r2r2r2r2rr2r2r2rr2r2rr2r2r2r2r2rr2r2rrr2rr2rr�r2rrr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r#r�r2r2r2r2r2r2rr2r2r2r2r2rr2r2r2r#rr2rrr2r#rr2r2rrr�r2r2r2r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r2r2r2r2r2r2r2rrr2r2r2r2r2r2rr2r2r2r2rr2r2r2r2r�rr2rr�r�r�r#r#r�r�r�r�r�r2r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r2r2r2r2r2r2r2r2r#r#r#r2r2rr#r2rrr2rr#r2rrr#r�r2r2r#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2rrr2r2r2r2r2r#rr2r2r2r2r#rr#r2r2r2r2rrr2r#r#r2rr�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r2r2r2r2r2r2r2r2rrr2r2r2r2r2rr#r2r2r2r2r2rrr#r2r2r2r�r#r#rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r#r�r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2r2rr2r2r2rr2r2rr2r2r2rr�r2rr2r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r#r�r2r2r2r2r2r2rr2r2r2rr2rr2r2r2r#r2rrrr2r#r#r2r2r#r#r�r2r2rr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r2r2r2r2rr2r2r2rr2rr2r2r2rr2r2r2r2r2r#rr2rrr�rrrr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2rrrr2r#r2r2rrr#r2r2r2r#r#r2r#rr2rr2rrrr#r�rrrr�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r2r#r#r2r2r2r2r2r#rr2r2r2r2r#rr#r2r2r2rrr2rr#r�r2rr�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#r#r2r2r2r2r2r#rr2r2r2r2r#r#r�r2r2r2r2r�rr2r�r�rr#r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r2r2r2r2r2rrr2r2rrrrr2r2r�r#rr2rr2rrr2rr#rr�rrrr�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r2r2r2r2r2r2r#rr2r2r2rr#rr2r2rrrr2rr2r2r#r2r2r#r#r�rr2rr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r#rrrrr2r2r2r#r#r#r2r2r#r#r2r#r#r2rr#rr2r#r#r�rrrr�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2rr#rr#r#r2r2r#r#r#r#r2r2r#r#rrr#rr#r#rrr#r#r�rrr#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2r#r#rr#r#r2r2r#r�r#r#r2r2rr�r#r#rr2r#r�rrr#r�r�r#r2rr�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2rr#r2r2r2r2r2r#rr2rr2r2rr#r#r2rr2rr#rrr�r#rr#r�r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r2r2r2r2rrrrr2r#rrr#r#r2r2r�r2rr#rr2rr#r2r2r#r#r�rr#r2r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r2r2rrrr2rr2r2r2rr#r#r2r2r#r#r#rrr2rr2rrrr#r�rrr#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r#r�r�r2r2r2r2r2r�r�r2r2rr2r�r�r�rr2r2r#r�r#rr�r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#rr2r2r2r2r2r#rr2r2rrr#r#r�r2r2rrr#rrr#r�rrr�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r2rrr#r2r#rr2r2rrr#r#rrr#r#r#r#r2rr#r#r#r#rr#r�r#rr#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r#r�r�r�r�r�r�r�r�r�rr2r2r#r#r#r#r#r2r2r2r�r#r#r2r2r#r#r#r#r#rrr�r2r#r#rr�rr#r#r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�r2r#r�r#rr#rrr�r#rr2r#rr�r�r�rr#r#r#r#r#rr�r�r#r#r�r�r�r�r#rr#rrrr#rr#rr�rr�rrr#r#rr#r#rr#r#r#r�r#r�r�r�r#r#r�r#r#r#rr2rr2r2r�r#rrr2r#r�r#r�rr#rrr�r#r#r�r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r2r2rrr#r�r�r2rr2rr�r�r�r#r#r2r�r�r#r#r�r�rr#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#r#rrr2r2r#r�r#r2rr2r#r#r#r�r#r#r#r#r#r2r#r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r2r#r#r#rrrr#r�r#rr2r2rr�r�r�rr#r#r#rr#r#r#r�r#r#r#r�r�r�r#rrrrrr#r#r#rr�rr#r#r#r#r#rr#r#r#r#r#r#r�r#r#r#r�r�r#r#r2rrr#r�r�r#r#rrr�r2r�r#rr#r#r�r�r#r#r#r�r#r#r#r#r�rr#r#r#rrr#r#r#rr#rr#r#r#r#r#r#r#rr#r#r#rr2r#r#r#r#r#r#r#r#r#r�r#rr2r2r�r#r�r�r�r2r2r#r�r�r#rrr#r�r�r�r�rr�r�r#r#r#r�rr#r#r#rr#r#r#r#r#r#rr#r#r�r#r#r�r#r#r#r�r#rr#r#r�r#r#r#r#r#r#r#r�r#rr2r2r�r#r�r�r�rrr�r�r�r�r#rrr�r�r�r�r#r�r�r#r#r�r�rr�r#r�rr#r#r#r#rr#r#r#r#r#r#r#rr#r#r#r#r#r#r#r#r#rr�r#r#r#r#r#r�r#r2rrr�r#r�r#r�rr2rr�r�r#rrr#r�r�r#r#r#r�r�rr#r�r#rrr#r#rr#r#r#r#r#r#rr#r#r#r#r#r#r�rr#r�r#r#r�r#r#r#r�r#r#rr#r#r�r#rrrr�r�r#r�r�rrr#r#r�r�rr#r#r�r�r�r#rr�r�rr#r�r�rr#r#r#rr#r#r#r#rr#rr#r#r#rrr#r#rr#r#r#rr#r#r#r#r#r#r#r#r#r#r�r#r#rr2r�r�r�r#r�r2rr#r�r�r#rr#r#r�r�r�r�rr#r�r#r#r�r�rr#rr#r#r#r�r�r�r#r�r#r#r#r#r#rr�r�r#r�r�r�rr�r�r#r#r#r#r#r#r#r#r�r#r2r�r�rr#rrr#r�r�rr#rrr�r�r�rr#r#r#r�r#r#r�r�r#r#rr�r�r�r#rr#rrr#r#rr#rr�r#r#r#r#r#r#r#r#r#rr#r#r�r�r#r#r#r#r�r�r#r#r2rr�r�r�r#r�rrrr�r�r�rrr#r�r�r�r�r2r#r#r#r#r�r�rr#r#r#rr#r�r#r#r#r�r#r#r#r#r#r#r#r�rr#r�r�r#r�r#r#r�r#r#r#r#r#r#r�r#rr2rr�r�r�r#r�rrr�r�r�r�rr#r#r�r�r�r�rr#r�r#r#r�r�rr#r#r�rr#r#r#r#rr#rr#rr�r#r#r#r�rr#r#r#rr#r#r#r#r�r#r#r#r#r#r�r#r2r#r#rrrr2rr#r#rrr#r#r�r#r�rrr#r#r#r#r#r�r�r#r#r�r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrr�r�r�r�r�rrr�r�r�r�rrr#r�r�r�r#r#r�r�r#rr�r�rr#r#r#rrr#r#r#rr#rr#r#r�r#r#r#r#rr#r#r#rr#r#r#r#r�r#rr#r#r#r�r#r#r�r�r#rr2rr#r�r�rr�r#r#r�r�r�r#r#r#r#r�r#r#r�r�r#r�r�r�r�r�r#rr#rr#rr#r#r#rr�rr#r#r#r�r#rr�r�r#r#r#r�r�r�r�r�r�r�r�r�rr2rr�r�r�r�r�r#r#rr#r�r�r#r#r#r�r�r�r�rr�r�r#r#r�r�rr#r#r#rr#r#r#r#r#r#rr#r�r#r#r#r#r�rr#r#r#r#r#r#r�r#r�r#r#r#r#r#r�r#r#rrr�r#r#r#r�rrrr�r�r�r2rr#r�r�r�r#r#r�r�r#r#r�r#r#r#r�r�r#r#r�r#r#r#r#r#r#r#r#rr#r#r#r#r#r#r#rr#r#r#r�r�r#r#r#r�r#r�r#rr#r�rr#r#rrr#r#rr#r#r#r�r�r�r#r#r�r#r#r#r#r�r�r#r#r#r�r�r�r#rrrrrr#r#r#rr�rr#r#r#r#r#r#r#r#r#r#r#r#r�r#r#r�r�r�r#r�r#rr2r�r�r�r#r�rrr�r�r�r�rrr�r�r�r�r�r#r�r�r#r�r�r�rr�r#r�rr#r#r#r#r#r�rr�r�r�r#rr#r#r#r#r�r#rr�r#r�r#r�r#r#r#r�r#r�r#rrrr�r�r�r#r�rr#rr�r�r�r#r#rr�r�r�r�r#r�r�r#r#r�r�rr#r�r#rr#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#r#rr�r#r#r#r#r#r�r#r#rrr�r�r�r#r�rrrr�r�r�r#r#r�r�r�r�r�r#r#r�rr�r�r#r#r#r�r#r#r�r#r#r#r#r#r#r�r#r#r#r#r�r�r#r�r�r#r#r�r#r�r#r#r#r#r#r�r�r�r#r#r�r�r#r�r#rr#r�r�r#r#r#rr�r�r�r#r#r�r#r�r#r#r�r�r#r�r�r�r�r�r�rr#rr#r#r#r#r#rr�rr�r#r#r�r#rr#r�r#r#r#r�r�r�r�r�r�r#r�r�rr#r#r�r#rr�r�r#r#r#r�r�r�r#r#r�r�r�r�r�r#r�r�r#r�r�r�rr#r�r#rrr#r#r#r#r#rr#r#r�r#r#r#r#rr#r#r#rr#r#r�r#r�r#r#r#r#r#r�r#r#rrr�r�r�r�r�r#r#r�r�r�r�rr#r�r�r�r�r�rr�r�rrr�r�rr�r�r#rr#r#r#r#r#r#r#r�r#r#r�r#r#r�r#r�r�r�r#r#r#r#r�r�r#r#r#r#r�r�r#r#r#rr�r�r2r#r�rr#r#r#r�r�r#r#r#r�r�r�r#r#r�r�r�r#r�r�r#r�r#r�r#rr#r�r#r#r#rr#r#r�r#r#r#r#r#r�r�r�r#r#r#r#r#r�r#r�r�r�r#r�r�rr#r#r�r�r�r�r�r#r�r�r�r�r�r�r�r�r#r�r#r�r�r�r#r�r�r�r�rr�r�r�rr#r#r#r#r#r#r#r#r#r�r#r#r#r#r#r#r#r#r#rr#r#r�r�r#r#r#r#r#r�r#rr#r#r#rr#r#r#r�r#r#rr#r�r�r�r�r#r#r#r#r�r#r�r�r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r#r�r#r#r#r#r#r�r�r#r#rr#r�r�r�r#r#r�r�r�r#r#r�r�r#r�r#r�r�r�r#rr#r#r#r#r#r#r#r#r�r#r�r#r#r#r#r#r#r�r#r#r#r�r�r�r�r�r�r#r�r�rr�r�r�r#r#r#r#r�r�r#r#r�r�r�r�r�r#r#r#rr�r�r#r�r�r#r�r#r�r�r�r�r#r#r#r#r#r#r#r#rr�r#r#r#r#r�r#r#r#r�r#r#r#r�r�r�r�r�r�r�r�r�r#r�r�r#r#r#r#r#r�r�rr#r�r#r�r�r�r#r�r#r�r�r�r�r�r�r#r�r�r�r�r�r�r#r#r#r#r#r#r�r#r#r�r#r�r#r#r�r#r#r�r�r#r#r#r�r�r�r�r�r�r�r�r�r#r�r�r#r#r#r�r�r�r�r#r�rr�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r#r#r#r#r#r�r�r#r#r�r#r�r#r�r�r#r#r#r�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r#r#rr#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r#r#r�r#r�r�r#r#r�r#r�r#r#r�r#r#r#r�r#r#r#r�r�r�r�r�r�r�r�r�rr#r#r#r#r#r#r#r#r#r#r�r�r#r#r#r�r�r#r�r�r#r�r#r�r#r#r#r�r�r#r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r#r#r#r#r�r�r�r#r#r#r�r�r�r�r#r#r#r�r�r�r�r�r�r�r�r�r�r�r�r�r#r#r#r#r#r#r�r#r#r�r#r�r#r�r�r#r#r�r�r#r#r�r�r�r�r�r�r�r�r�r�)ZLatin2_HungarianCharToOrderMapZwin1250HungarianCharToOrderMapZHungarianLangModelZLatin2HungarianModelZWin1250HungarianModel�r�r��(/usr/lib/python3.6/langhungarianmodel.py�<module>#sV
_vendor/chardet/__pycache__/escsm.cpython-36.opt-1.pyc000064400000016167151733136230016540 0ustar003

�Pf)�@s�ddlmZdZejejdejejejejejejejejejejejejejejejejejejejdejdejdejdddejdejdddejdejdejejejejejejejf0ZdZedeedd	d
�ZdZ	ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejddejejejejejejejejejejejejejejejejejejejejejejf@Z
dZe	de
edd	d
�ZdZ
ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejdejejejejejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejfHZdZe
deeddd
�ZdZejdejejejejejejejejejejejejejejejejejejejdejejejejejejdejejejejejejejejejejejf(ZdZedeeddd
�ZdS)�)�MachineState������z
HZ-GB-2312ZChinese)Zclass_tableZclass_factorZstate_tableZchar_len_table�nameZlanguage�	zISO-2022-CN���
zISO-2022-JPZJapanesezISO-2022-KRZKoreanN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)	rrrrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)
rrrrrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr)ZenumsrZHZ_CLSZSTARTZERRORZITS_MEZHZ_STZHZ_CHAR_LEN_TABLEZHZ_SM_MODELZ
ISO2022CN_CLSZISO2022CN_STZISO2022CN_CHAR_LEN_TABLEZISO2022CN_SM_MODELZ
ISO2022JP_CLSZISO2022JP_STZISO2022JP_CHAR_LEN_TABLEZISO2022JP_SM_MODELZ
ISO2022KR_CLSZISO2022KR_STZISO2022KR_CHAR_LEN_TABLEZISO2022KR_SM_MODEL�rr�/usr/lib/python3.6/escsm.py�<module>sp "    $     $ $_vendor/chardet/__pycache__/langbulgarianmodel.cpython-36.opt-1.pyc000064400000060341151733136230021246 0ustar003

�Pf'2�@s4d�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zd�S)������M�Z�c�d�H�m�k�e�O��Q�f�L�^�R�n��l�[�J�w�T�`�o��s�A�E�F�B�?�D�p�g�\���h�_�V�W�G�t���U�]�a�q�����������������������������������������������������������������������i�������������-������ �#�+�%�,�7�/�(�;�!�.�&�$�)��'��"�3�0�1�5�2�6�9�=���C���<�8���	���������
����
������������K�4���*��>�������:��b�������x�N�@�S�y�u�X�z�Y�j�I�P�v�r�g! _B�?Fz
ISO-8859-5Z	Bulgairan)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1251Z	BulgarianN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr(r/r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr>r?r@rArBrCrDrEr�rFrGrHrIrJrKrLrMr�r�r�r�r�r�rWrNrOrPrQrRrSrTrUr�rVrXrYrZr�r�r�r[r\r]r_r`r^r|r~r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r�r}rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�(r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZLatin5_BulgarianCharToOrderMapZwin1251BulgarianCharToOrderMapZBulgarianLangModelZLatin5BulgarianModelZWin1251BulgarianModel�r�r��(/usr/lib/python3.6/langbulgarianmodel.py�<module>&sV
_vendor/chardet/__pycache__/sjisprober.cpython-36.pyc000064400000004470151733136230016643 0ustar003

�Pf��@s`ddlmZddlmZddlmZddlmZddlm	Z	ddl
mZmZGdd�de�Z
d	S)
�)�MultiByteCharSetProber)�CodingStateMachine)�SJISDistributionAnalysis)�SJISContextAnalysis)�
SJIS_SM_MODEL)�ProbingState�MachineStatecsPeZdZ�fdd�Z�fdd�Zedd��Zedd��Zd	d
�Zdd�Z	�Z
S)
�
SJISProbercs4tt|�j�tt�|_t�|_t�|_	|j
�dS)N)�superr	�__init__rr�	coding_smr�distribution_analyzerr�context_analyzer�reset)�self)�	__class__�� /usr/lib/python3.6/sjisprober.pyr%s

zSJISProber.__init__cstt|�j�|jj�dS)N)r
r	rr)r)rrrr,szSJISProber.resetcCs|jjS)N)r�charset_name)rrrrr0szSJISProber.charset_namecCsdS)NZJapaneser)rrrr�language4szSJISProber.languagecCsL�xtt|��D]�}|jj||�}|tjkrP|jjd|j|j	|�t
j|_Pq|tj
krft
j|_Pq|tjkr|jj�}|dkr�|d|jd<|jj|jd|d�|�|jj|j|�q|jj||d||d|�|�|jj||d|d�|�qW|d|jd<|jt
jk�rF|jj��rF|j�|jk�rFt
j|_|jS)Nz!%s %s prober hit error at byte %s�r�����)�range�lenrZ
next_staterZERRORZlogger�debugrrrZNOT_MEZ_stateZITS_MEZFOUND_ITZSTARTZget_current_charlenZ
_last_charr�feedr
�stateZ	DETECTINGZgot_enough_data�get_confidenceZSHORTCUT_THRESHOLD)rZbyte_str�iZcoding_stateZchar_lenrrrr8s6




zSJISProber.feedcCs|jj�}|jj�}t||�S)N)rrr
�max)rZcontext_confZdistrib_confrrrrYs

zSJISProber.get_confidence)�__name__�
__module__�__qualname__rr�propertyrrrr�
__classcell__rr)rrr	$s!r	N)ZmbcharsetproberrZcodingstatemachinerZchardistributionrZjpcntxrZmbcssmrZenumsrrr	rrrr�<module>s_vendor/chardet/__pycache__/euckrprober.cpython-36.pyc000064400000002031151733136230016773 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�EUCKRDistributionAnalysis)�EUCKR_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�EUCKRProbercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/euckrprober.pyr#s
zEUCKRProber.__init__cCsdS)NzEUC-KRr)r
rrr
�charset_name)szEUCKRProber.charset_namecCsdS)NZKoreanr)r
rrr
�language-szEUCKRProber.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r"srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/gb2312prober.cpython-36.opt-1.pyc000064400000002041151733136230017522 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�GB2312DistributionAnalysis)�GB2312_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�GB2312Probercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��"/usr/lib/python3.6/gb2312prober.pyr"s
zGB2312Prober.__init__cCsdS)NZGB2312r)r
rrr
�charset_name(szGB2312Prober.charset_namecCsdS)NZChineser)r
rrr
�language,szGB2312Prober.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r!srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/mbcssm.cpython-36.opt-1.pyc000064400000042131151733136230016700 0ustar003

�Pf�c�@sl	ddlmZdZejejejdejejejejejejejejejejejejejejejejejejejejfZdZedeedd�Zd Z	ejejdejejejddejd	ejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejfFZ
d!Ze	d
e
edd�Zd"Z
ddddejejejejejejejejejejejejejejejejejejejejejejejejejejdejdejejejejejejejf(Zd#Ze
d	eedd�Zd$ZejejdejejejejejejejejejejejejejfZd%Zedeedd�Zd&Zejejejddddejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejejejejejejejf0Zd'Zed
eedd�Zd(Zejejejejejejdejejejejejejejejejejejejejejejejejdejejejejejejejejejdejejejejejejejejejejejejejf0Zd)Zed
eedd�Zd*ZejejejdejejejejejejejejejejejejejejejejejejejejfZd+Zed	eedd�Z d,Z!dd
d
ejddejejejejejejejejejejejejd	d	d	d	ejejd	d	d	d	d	ejd	d	d	d	d	d	dd
d
ejddd	d	ejd	d	d	d	d	d	d	ejejejejf8Z"d-Z#e!d	e"e#dd�Z$d.Z%d	d	d
d	ddejejejejejejejejejejejejdddejejejdddejdejd	d	d
d	dddddejdddejejejdddddejdejejejf8Z&d/Z'e%d	e&e'dd�Z(d0Z)ejejejejejejdd
dddd
d	dddejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejddddejejejejejejejejejejejejejdddejejejejejejejejejejejejd
d
d
d
ejejejejejejejejejejejejejejd
d
ejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejdddejejejejejejejejejejejejejejejejejejejejejejejejejejejf�Z*d1Z+e)de*e+dd�Z,dS)2�)�MachineState�����ZBig5)Zclass_tableZclass_factorZstate_tableZchar_len_table�name����	�
ZCP949zEUC-JPzEUC-KRzx-euc-twZGB2312Z	Shift_JISzUTF-16BEzUTF-16LE���
���zUTF-8N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r
r
r
r
r
r
r
r
r
r
r
r
rrrr
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)
rrrrrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r)rrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r
rrrrrrrrrrrrrrrr
rrrrrrrrrrrrrrr)rrrrrrrrrrrrrrr	r	)-ZenumsrZBIG5_CLSZERRORZSTARTZITS_MEZBIG5_STZBIG5_CHAR_LEN_TABLEZ
BIG5_SM_MODELZ	CP949_CLSZCP949_STZCP949_CHAR_LEN_TABLEZCP949_SM_MODELZ	EUCJP_CLSZEUCJP_STZEUCJP_CHAR_LEN_TABLEZEUCJP_SM_MODELZ	EUCKR_CLSZEUCKR_STZEUCKR_CHAR_LEN_TABLEZEUCKR_SM_MODELZ	EUCTW_CLSZEUCTW_STZEUCTW_CHAR_LEN_TABLEZEUCTW_SM_MODELZ
GB2312_CLSZ	GB2312_STZGB2312_CHAR_LEN_TABLEZGB2312_SM_MODELZSJIS_CLSZSJIS_STZSJIS_CHAR_LEN_TABLEZ
SJIS_SM_MODELZ
UCS2BE_CLSZ	UCS2BE_STZUCS2BE_CHAR_LEN_TABLEZUCS2BE_SM_MODELZ
UCS2LE_CLSZ	UCS2LE_STZUCS2LE_CHAR_LEN_TABLEZUCS2LE_SM_MODELZUTF8_CLSZUTF8_STZUTF8_CHAR_LEN_TABLEZ
UTF8_SM_MODEL�rr�/usr/lib/python3.6/mbcssm.py�<module>sh $ (((((,  "$   $  $ $                $_vendor/chardet/__pycache__/langgreekmodel.cpython-36.pyc000064400000057637151733136230017456 0ustar003

�Pf�1�@s4d�Zd�Zd�Zeed�d�d�d�d��Zeed�d�d�d�d��Zd�S)������R�d�h�^�b�e�t�f�o��u�\�X�q�U�O�v�i�S�C�r�w�_�c�m��H�F�P�Q�<�`�]�Y�D�x�a�M�V�E�7�N�s�A�B�:�L�j�g�W�k�p���Z�J���=�$�.�G�I�6�l�{�n��3�+�)�"�[�(�4�/�,�5�&�1�;�'�#�0��%�!�-�8�2�T�9�y�����|������ �
�����
����	��������*��@�K����g���s�?Fz
ISO-8859-7ZGreek)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguagezwindows-1253N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr9r:rrrrrrrrrrr;rrrrrrr<r=r>r?r@rArBrrCrrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r(r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr9r>rrrrrrrrrrr;rrrrrrr<rrr?r@rArBrrCrrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r(r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r(r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjryryrjrjrjrjrjrjrjrjrfrjrjrjr�ryryrjrjr�rjr�rjryr�rjrjrjr�rjr�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjr�rjrjr�rjryrjrjr�rjryrjrjrjr�r�rjr�rjr�rjrjryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�ryrjryryrjrjrjrjrjrjrjrjr�rjrjrjrjr�ryrjrjr�rjrjrjrjryrjrjrjr�ryr�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrjrjryrjrjrjrjrjrjrjrjrjrjrjrjr�ryrfrjrjrjrjryrjrjryrjrjryr�r�r�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjr�rjrjrjrjrjrjr�rjrjr�rjrjrjrjrjrjrjrjrjrjr�rjryrjrjr�ryr�rfr�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�rjrjrjrjrjryrjr�r�r�r�rjrjr�rjrfrjrjrjr�rjrjr�rjrjrjrjr�r�r�r�ryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjr�rjr�rjrjrjrjrjr�rjryryryrjr�ryrjrjrjrjrjryrjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjrjryryryrjrjrjrjr�rjrfrjrjrjrjryrjrjrjrjrjrjrjryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjryr�rjr�r�r�rjrjryrjrjrjrjrjr�r�rjryrjr�ryrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjrjr�r�rjrjr�ryrjr�rjr�rjrjrjr�r�rjr�rjr�ryryrjrjr�r�r�r�rfr�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjryr�rjryrjrjrjrjr�rjrjrjrjrjr�rjrjryrjryrjrjryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjryrjryrjrjrjrjrjrjr�ryrjryrjryryryrjryrjrjryrjr�ryryryrjr�ryr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�r�rjrjrjryrjrjr�r�rjr�rjr�r�r�rjryr�rjr�rjr�r�ryr�ryr�r�r�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjr�rjrjrjrjrjrjr�rjrjr�rjr�r�r�rjrjr�rjrjrjr�r�rfryrjr�rjr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjryr�r�rjryryrjrjr�rjrjrjrjrjryrfrjr�rjryrjrjryrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjr�ryrjrjrjrjrjrjr�r�rjr�rjr�r�r�rjrjr�rjryrjr�r�rjrjrjr�rjr�r�r�ryr�r�r�r�r�rjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjr�rjrjrjrjrjrjr�r�rjr�rjr�r�r�rjryr�rjryrjr�r�rjryrjr�ryr�r�r�r�r�r�r�r�r�rjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrfryryrjrjrjrjrjrjr�ryrjr�rjr�r�r�rjrjr�rjr�ryr�r�ryrjrfr�ryr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjrjr�rjr�rjrjryrjr�rjrjrjrjrjrjr�rjrjrjr�ryrjr�r�rjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjr�r�rjr�r�r�rjrjr�rjr�ryrjrjr�r�rjr�rjr�rjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�r�rjrjrjrjrjrjr�r�rjr�ryr�r�r�rjrjr�rjr�rjr�r�ryr�ryr�r�r�r�r�rfr�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjrjr�rjr�ryr�rjryr�rjryrjryrjr�r�rjryrjryrjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�ryrjrjrjrjrjr�r�r�rjr�ryrfr�r�rjryryryr�rjr�r�ryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjryr�rjr�rjr�rjrjr�ryrfryrjrjr�r�rjr�rjr�rjrjr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrjrjrjr�rjrjrjrjrjrjr�ryrjr�rjr�r�r�ryrfr�ryryrjr�r�ryryryr�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�ryrjrjrjryrjr�r�rfrjr�ryr�r�r�r�rjr�rfr�ryr�r�rfrfrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjrjrfr�rjr�r�r�rjryr�rjryrjrjrjr�r�rjr�rjryryryrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�rjrjrjr�r�rjr�r�r�r�ryr�ryrjrjryryryryrjr�ryr�ryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjrjrjrjryr�r�r�r�r�r�ryrjr�ryr�ryrjryr�r�rjr�rjr�rjrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjryrjrjryryrjr�ryr�rjr�r�r�ryr�r�r�r�rfryr�ryr�ryr�r�ryr�ryr�ryryr�r�rfr�ryryryr�ryryryr�ryryryr�r�ryr�r�rfr�r�r�r�r�ryr�rjrjryr�r�r�r�r�r�rfrjr�ryr�ryryryr�r�ryr�rjr�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�ryrjryr�ryryr�ryr�ryryr�ryr�ryryryr�r�r�r�r�r�ryrjr�r�r�ryr�rfryr�r�r�r�ryryr�r�r�ryrfr�ryryr�r�r�r�r�r�rfr�ryr�r�r�r�r�r�r�r�ryrfr�ryrjryryrjryrjryr�r�rjrjrjr�r�rjryr�r�r�rfrfr�ryr�ryryr�ryr�ryr�ryryr�r�ryr�ryryryr�ryryryryr�r�ryr�r�r�ryr�rfr�r�r�r�r�rjr�rjrjryryr�rjr�r�r�ryryr�ryryryrfryr�r�rfryryr�r�rjr�r�r�ryr�rfryr�r�r�rfryr�r�r�r�r�r�r�ryryr�rfr�r�ryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrjrjryryr�r�r�ryr�ryrjrjr�ryr�r�r�r�r�r�ryryryr�ryryr�ryr�ryr�ryryr�r�ryryryryrfr�r�ryryr�ryr�r�ryr�r�r�r�r�r�ryr�r�r�r�r�r�r�ryr�rjryrjr�r�r�rjr�r�ryryr�ryr�ryryryr�r�ryr�r�r�r�r�r�r�r�ryr�r�ryryr�r�ryryryr�r�r�r�r�r�ryr�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�rjryr�ryryryryryr�r�r�ryr�r�r�r�ryr�rfr�r�ryr�rfr�r�r�r�ryryryr�ryryr�rfryr�ryryryr�ryryryryrfryryr�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�ryr�ryr�ryryr�r�r�r�rfryrfr�r�ryryr�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�rjryrjr�r�ryr�r�r�ryryr�ryr�r�r�rfr�r�ryr�ryr�ryryr�r�r�r�r�r�ryr�r�r�r�ryryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�ryryrjryryr�r�r�r�r�r�rfrjr�ryr�ryryr�r�r�rfr�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�ryr�rjryr�ryr�r�r�r�r�r�ryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�ryr�r�r�r�rfrfr�r�ryrfryr�ryryr�rfr�r�rfr�r�r�ryr�r�r�r�r�r�r�rjr�ryryryr�r�ryr�r�r�ryr�r�r�ryrjr�ryr�r�r�r�r�r�ryryr�r�r�ryr�rfryr�r�r�rfryryrfr�r�r�ryr�r�ryr�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rjr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrfryr�ryryr�ryr�r�ryr�r�r�r�rfryrfr�ryrfr�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�rjrfryryr�ryr�r�r�r�ryr�r�r�ryr�r�rjr�r�r�r�ryryryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryrfr�ryr�rfryr�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�rfr�r�r�r�r�r�ryr�ryryr�r�ryryryryryr�rfryr�r�r�ryryr�rfr�ryr�r�ryryr�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�rjr�r�ryr�r�r�r�r�r�r�r�ryr�ryr�r�r�r�ryr�rfryr�r�r�r�ryryrfr�rfr�rfr�ryryryrfr�r�r�r�r�r�rfr�r�r�r�r�r�r�ryr�rfryr�r�r�r�r�r�r�r�r�r�ryr�r�ryryr�r�r�r�rfr�r�r�r�r�r�ryr�ryryr�r�r�r�ryryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�ryr�r�ryr�r�r�r�ryryryryr�r�r�rjr�r�r�r�r�r�r�r�ryr�r�r�r�r�r�ryr�r�r�r�r�r�rfr�r�ryr�r�r�r�rfryr�r�r�r�r�r�ryryrfrfr�r�r�r�r�r�rfr�r�r�r�r�r�r�ryr�ryryryr�r�ryr�r�r�r�r�r�r�ryryryr�r�r�ryr�r�r�r�r�r�r�r�ryr�r�rfr�r�r�r�ryrfr�r�r�r�r�r�rfr�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�rjr�ryr�r�r�r�r�r�r�r�ryr�r�r�r�r�ryr�r�r�r�r�r�r�ryr�r�r�r�ryr�r�ryr�r�r�r�ryryr�r�r�r�rfr�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�ryryrfr�r�r�r�r�r�ryr�r�ryr�ryryryr�r�r�r�r�r�ryr�r�r�r�ryr�r�ryr�r�ryr�ryryr�r�r�r�ryr�ryr�r�r�r�r�ryr�r�r�ryr�r�r�r�r�r�r�r�rjr�r�r�ryryr�ryryr�r�r�r�r�ryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�ryr�r�r�r�r�r�ryryryryryr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�rfrfr�r�r�rfr�r�r�r�r�r�r�ryrfr�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ryryr�r�r�r�r�ryr�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�ryr�r�r�ryr�r�r�r�r�rfr�r�r�r�ryryr�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�rfr�ryr�r�r�r�ryr�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�ryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�rfrfr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�r�r�r�r�r�r�r�r�r�rfr�r�r�r�rfr�r�ryr�ryr�r�r�r�r�r�r�r�r�r�r�ryrfr�r�r�r�r�r�ryr�r�r�rfryr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZLatin7_char_to_order_mapZwin1253_char_to_order_mapZGreekLangModelZLatin7GreekModelZWin1253GreekModel�r�r��$/usr/lib/python3.6/langgreekmodel.py�<module>#sV
_vendor/chardet/__pycache__/big5freq.cpython-36.pyc000064400000152574151733136230016176 0ustar003

�Pfz�@sdZdZ�dZ�dS(g�?���	�������	�R������a����n���!����L�,�B��������{
�]
���
�j���.�N�i�����:����?���=�N�K����k�q	�����
�����b��
�����
�o�2��i����c�8���������{�|����"��
��@�\���	��������F��|
�Q�H���P�v��������D�^
����F�}
��E��O��0���s���4�<�2���&�M�����~
����G���[���?���a��K	�*��g��	�Z�
�:����K�	��	�������`�l�����
	� �q��~����
�	�������c��u���*���	��
��~�m������e��G�^��n���U�C���C��������� �j�o�/���P�7	�[������
�?����o�S�(����p�]��6�j��@�������8��+�3�[���\�������]	�A������1����H��
��
�d����+��2�����
�O�L	���f�1����������3�9���l�,���	���������e�z
�Q�M�&���X�������
�����k�p��M�����%�������'��	��\���7��J��!���������N��B�P�_
�q����
����
��
�� ��� �I���8	���
��{
����
����m����f���x�������
��
���g����
��������&��'������.������M	������$�#���D�h�A�	����r���U�G������
�
�Q�S�i���d��0�F�����C����� ���J����U�����N	�
���<��
�:�A
��
��	� 	�9���}���	���W�P�
��)�D����4��
��	�)����r	��s����t��9	�����s	�*�������]�����	�D�j�(
�
����
��u�5�Y�j�	�l��u����
���`
�
�=������������5��!����T�E�x���e���	���O	�P��|
�6�A�
�/�k���
��l�!�	���B��`��
����A�������
�v�	����w�����M�
�������a
����
�}
���x�,�}���B
����������F�k�m�	���
���	���
��b��)
�^�P	�
�,������7�5�~
�y�|��z������
�C
�{�����������1���b��	��
����t	���
��_�����������
��
�b
�����5�D���l��|�[�*
����
��%��G�^	��l�����
��`�����p����n�c�g�m����'�2�����{���������D
���f�	���|�:	�������
����
�
������n��������;���I�Y�}���
�X�"��
�����������-��l������������E��~��>�]���,�������v�L�B�i�&����������G������B���
�+
�����������	� �a����
�a���}�E
��D���=���0����
�6������v�!������������9�H�����
�F���������"��H�������
�o���R�*��.�������I�3��,
��*�S����X�����/��b�p��	�����������R����7��2���	�����������_	���b��
�����	�������`	�O��	���`�7�
�
��������a	�������������g������
�_��~��������
�a���
�b	�������������	�;���	�-�
���Q	������q��������`���#�#�������r�
�s�����d�t��
���c
����k�w���������������g��Q��U�
�������C�S��c	���5���B���_��c�N�����Y������
��L�d��
�
�	�K��8�a�G���
����s�6�	�t�;	�N����	�4���:��\�q����.�����u��
���������
��
�r�����
�
���c����@�����������/�9�����������
�<	�d
�����8�v��C����
���m��
��������Z��������w��	�{������$���M�d�0�r�����g��V�����:�Z����{�$�e�0���$��
�
��5��[���v���c��V�R	�$�;����f�����d����W�e
��������
��J�����u���	��
���
�K��2��L� �.���2����������%�������
����������
��
�x��M���}���T�����d	�
������`�����	�3���;�����y�Y�L����	��	�����U�\��������V�N����
��� 
��"�x�%��;�=	� ����2�E��!������^���"�w��
�#��������$�<�Q����r�%��]���&��<�'��3�6�B���(�����������g�
�
�)�
�	�*�+�!	������:��7��^�����(�
�,�+���x�-��/�h�=�.�������V����h����/�����<����;�0�����
�	�1�z������5�6�
�_�)�2�3���4�y�"	�����
��^�5����?�%�+�f
��6�_�W����
�7�>��
�g
�#	���u	�8����9�h
�F
���V�{���O���I�w�~�
�^���:�����������	�;�e�1������������������P�<�=���>�
���(�`�R�?�Y��I��	������7���
�?�����
�@���o��q�Y�n���l��������
�i
�E�P�V��!
�������n��A���|�
��
�B������-
�	�O��
��C��D�W�T�T�������
�}�E���&�
���������
���~�G
��
����$	�W�����o���
�F��G����"
�������#
�����D�-�Q���H�e��	���I�����J�z�
��K���L��$
�
�����H
����������M�:��	����N���I
������H������O����	�P�Q����k�R�R��������
�
������������S�T�����%
���������&
�S�U����S	��C������V�~��!�X�������,��������O���|�����P��e	���������������T������W�����X��Y��R�
�v���
���=���Z��U����[�����V�%	�\���]���^�8�T	�.
�'
�_��v�����w�t��`�a�b�>	����"�{����	��c���d�9������[�9���d�h�
�e�����J
�!���f����������T�g����%��W�M�h���i�������������(
�j
��y�7�m��j�k��?	�W�"�������B��l�|���)
�z����
����������m��n�u�v	�w���	�*
���o�p���q�7�.�r����
���
��f�����s�&	�y��� �������
��>�������Y���t��u�/
�+
��v����@�w���	��	���3�����,
�x���6�9�
�	���6�����!���y���Q�:�c�
���-
�o���F�X�
����f����z��
��������{�/���'	�	�b����`����u�	�v������
�
�
������0�E��S��8�[�|�}�G�����!�~���F�U	��
����e�s�����w	����"��a�	�Z�&������	�
�����A���
��i����������
�o������
�
��	��A��J�0���x	��
�����Y�������	�����Z�8�o����Q�����
���a�Q����p�;���P��b����������
����	��������������]������
��������'���.
�����
��������L�������	��/
�����B�0
�[��8������	���/�3������
�����0���n��������������	�����������4�����<�	��
����Y�	�����C�����
��������}�1��
����K������0
�F�������
��#��\�����f��>�
���
����������
�������j������-�p�9�@	�6�y�������
�1
���������V���������4�����K�1
����
�D����������#������2
�����	�Z�H�p����g������
�q�r�
����������K
�V	�
�\�,�
��������]���u��
������������Z���
������j����0�h�h��������E���	
���	���������������
����2
���������M���w�����y	�?������

�t�m�(	��	��+�����������i�
��	������
���L
����)	�
��
���f	���	��������R�"�A	�3
����-�1��$�
�(���]���#��$��
������ �*	������
����
�=�k
��!��
���������
�����_���������^�2���
������9������������4
������
��b�����������5��������"�	�
��������4�I����#����������j�����$�k���
��\�
�g��E�5
����������������
����6
�����	���%���������[�����������&�e�����������o�����l�	�}�������z���+	�����������n�-���
�'��i�
������(������	������������
��������B�i�q�����<��*�j�)�"�*���>�)�7
���+�����������z���,������M
�)�r����-�	�������������T���p�����#�#������������
�����	�	�����S���.�8
��������z	�9
�=�����m��S�������x�����/�����q����3���5���:
��
�������������
�;
��0�1�������n���_��]���n���������
�%���������`��������z�T�s�2�3
���
�{	��5������
�g	���k���������
���3�4����c� ����
�
����a�:�1�����!�A�h� �o��
�6�"����l
�-���
�a�5���	��
���#�!��$��
��4
�f���	�%�&�
�N
�
���W������
�'��$����<
��N���W	�k��O
�����������X	�,	��6�&�t���P
���7�
���
�-	�j��	�
��C�[�p���'�8�
�b���m
��
��{�(���	����	�9�
��	��:�
�
�^�������B	�;��������
����
���	���)�<�=�
��
��>������
�=�������������n
��H�D�
�?���U�W�
�=
��:��	�N��K�E�@�4�c�
��>
��Q
�
�A���h����
�
�(�]������B�*����l���C�D���E�h	���������X�����+������+�J�����U����
���m� �E����F��G�
�k�	�n��
�d����������F�!� ��r�@���H�"�C�G�����
��^�
����8�t�)�;��?
�!��o�I���"�#�#�$��J�	�%��
�&�r�K�'�5
�(�)��o
�@
�}�A
��,��*�4�1�+�������"��,��
�C	���u�r�D	�-�
�-��L�v�{���|	�M�.�.�N�Y	�6
����/�0�/����1�$��2�%�G�R�B
���_����C
����D
���3���������.	���	�4�@�w��5��6�7��i	���&�0���8�	�&��	�*��+�G�O�u��#�
�1�P�R��Q�����)�����=�9��l�������E
�S��k��\�	���
�y�:��;�<�F
�%�=��R���>�d�'�,�?�e�$��2�3�-����4��(�S�@�A�B���.�T��O�
��������
�&�����
�<�
����C��D���U���p
�G
�e�
����
�E�;�V�F�'�G�)�
���}	�H�*���W���I��T��
�
���������	��J�.�g�~��U��j	�	���X����8��*�	����K�~	�����L��H�
����%���+�f������t���M�N�������	�� 
���k	�Y��Y�����*�/���O�R
�Z�H
���#�I�
��	����5�S
�]�E	�[�\�
�s�I
�+�P�Q�,�	�
�R��S�T���,���U�]�V�����L�-�!��T
�\�4�W���X�Y�Z��[�>�w������+�^��J
���_�$��\�����l�.�]�`��^�_�q
�%��`�a�����
�6�g�U
�b�c�d�a�
�q�x�
�e�
�b�y��f�h�g�h���'�i�����	�j�r��
�0�	�=���b�|��������	��h�k��
�l���
�a��c�m�����n�
��u���d�e���s�K
�/�o�7��&�	�J�8�p�q�i�j�f���	�g�	����(��`�r�s�����<�F�k�f��������]���I�t�>��u��v��
����
�0��-��w�L
��	�����G�a��
�t���
�S�K�x��y�z���V
����O���1���
�{�
����F	�y��9�Z	���|�}�~�F�������
�	�:�����2�����p��;�G	���	�h�����7
������q����M
��������������
����
�(�8
��N
�	��3��	��O
�<����T��W
�	�����(�����1����������u�Z��
�	�c��j�P
�p���
�������v�i�j��
�����Q
�K����	�'��������9
���
��	�z��	�
���k���������2�	��l����>�=�C�'���>�
��Z�����m�n����b����	��q�?��
����	�"��
�?�
�}����o�@�>��A��1��)����?�
��
����x�	��	��	�3���h���l����)���
�p��'����4���
�
����A�_�	�B��L��
�P�q�����r�����X
��c�s�!
����H��	�w����C�D�t�������x�
��	�/	����5�
�4�u��r
���_���/��<��~�7�y�m���^�5�n�b�
�t���l	�����6���	�7��������v����	�8�:
��
��0	��1	��w��I������(����B����4�s��
�����
��
���?�C��x�	�E�H����F�y�z�G�����{����|��;
�}�9�m�H	�~���z���s
�	���M����������m	�����I������o�����������_��������2	�3	�H��*���
����<
��i�:����
�����A������R
���������
���"
���	�������
���
����
�����Q���
�Y
��������{�K�����[��J�����N��S
�����I	���T
�@���
�����
��@��R����	�t
�����J�I���
�������
�U
��m�����{�	���	�
�;�V
����	���<����
�d�������X����9����6�����U����� ���L�!�=
���)���
�	������	�����
��=�$�(�v����&���c������	�
���>�������{�"��������#��������W
��	��������	��$�
�%���������&�^����[	�	���'�����������(� ����2�\	�J�
�p�p�n	��u
�q�)���	�O�>
��
�*�X
�����	�+����,��
����
������K��
�		���I�-���!������.�D���J�s�#
����
���7����/�w��|������"�4	�|�,�������	���������?�Y
�P�0��1�Z
�Z
�
��
�	����
�#�@�2��|��z�����A�'��?
���	���8���y���L�V�
�3���	���$������M���%����	��
��
�.��4�N�����
��	�������
�5�9�%������K�Q����x�6�7��B��
	�O�v
�[
���&���
��
����X��*��\
�P��	�
�N�+�w
������W�C���	�Q�D��?�]
�����^
�o	�R�	
���E��
��F��'��X��G��S���T�R��8�x��	�k�S��T��� ������D��9�!�����(��L�"�#�$�%�_
��&�J�:�U��}��)��

�;�<�
�E�+�'�
��H�=�V�*�H�y�,�+�W�3�>����?�(�I�,�-�`
�q��a
�����
���)�*�X�	�+��O���Y�,�-��.��/�����0�.������1�
�J�r�2�3�$
�	�4��5�6�7��
��b
��	����}�	�f���K����
��
�	�	�8�Z�9���:����
�c
���:���;�s�J	�@����	�t�;�
�/�<�d
��	����U���0��e
�=���f
�u�M�������
�>�	�[��A�L��i�?�	�
��
�	�@�A�v�B�C�\���j��[
��
�g
�D�E����F��B��G��
��H���(���M�C�h
���I�N�J��K�L���M���1�2�N���d�O�d�V�D�`��#�5	�P�]�Q�E�n�
��	���^�R�_�F�S�p	�i
���T�U��

��V�����	��G�����e�H�O�	��P�`�W�w�j
�X� ��3�
�a��I�Y�J�x�7�����-��
�4�k
��b�Z�-���.�c��[�	�r�;���K�5�L�X�� �	����\�]���6�%
�^�_����~�`�l
�y�a�z�~�����b�?��c�7�d�
���e��z�����M�f�g�/�h�|��@�d���i�N�����8�f���j�k��O�P����l�m�@�n�9�	�o��	�����<�s��\
�Q�.�R�N�@��
��p��S�{�q��A���������T�
����:����r��
�s�@
�W�;�t�u��v�w�x�y�|�����z�����3�Q����m��A�{�|���	��}�O�R����g���~�Z�
��������\�m
��}���<�X����	��	��
����
���Y�����0��������=��	����L������h�����i�>�>��/�?�U�������e�)�x
�t��f�S���
�y
��
�n
�
���T��@����������������	����z�U�g��V���o
���6	�p
�t���������
�A���J�V�h�Z��	���W�����N(rrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r)ZBIG5_TYPICAL_DISTRIBUTION_RATIOZBIG5_TABLE_SIZEZBIG5_CHAR_TO_FREQ_ORDER�rr�/usr/lib/python3.6/big5freq.py�<module>+s�_vendor/chardet/__pycache__/langthaimodel.cpython-36.opt-1.pyc000064400000055420151733136230020231 0ustar003

�Pf,�@sd�Zd�Zeed�d�d�d�d��Zd�S)�������j�k�d����e�^���l�m�n�o����Y�_�p�q������@�H�I�r�J�s�t�f�Q���u�Z�g�N�R�`���[�O�T�h�i�a�b�\�������������X���������������v���������c�U�S��������������������������������K���4�"�3�w�/�:�9�1�5�7�+���,��0����'�>��6�-�	���=�����*�.���L��B�?��
��$��
�(�� �#�V�������������)��!��2�%���C�M�&�]���D�8�;�A�E�<�F�P�G�W�����g��@��?FzTIS-620ZThai)Zchar_to_order_mapZprecedence_matrixZtypical_positive_ratioZkeep_english_letterZcharset_nameZlanguageN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8rrrrrr9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr(r�r�rrrrrrrrr�r�rrrrr�rrrrr�rrrrrrrrrrrrrrrrr�r�rrrrrrr�rrrrrrrrr�rrrrr�r�r�r�rrr�rrrrr|rrrrr�r�r|rrrrrrrrr�r|r�r|r�r�rrr|r�r|r|rrr�rrrrr|rrr�r�rrrrr�rrrrr�rrrrrrrrrrrrrrrrrrr�rrr|rrr�r|r|r|rrr�r|rrr�r�r�r�r�r�r�r|rrr�r�rrr|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrr|rrrrrrrrrrrrrrrrrrrrrrr|r|r|r|r|r|r|rrrrr|rrr|rrrrr|r|r|rrr�r|rrr�rrrrr|r|r�r|rrrrr�r|r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|r|rrrrrrrrr�r|rrrrrrrrrrr|r|r|r|rrrrr|r|rrrrr|r|rrr|rrr|r|rrrrr�r|rrr�r|r|rrrrr�r�r|r�r�r�rrr�r|r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrrrr|r|rrrrrrrrr|rrr|r|rrrrr|r|rrr|r|r|r|r�r�rrr�r|r�r�rrr|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrr|rrr|rrrrr|r|rrr|rrrrr|rrr�r�r|rrr|r|r|rrr|r|r|r|r|r�r|r�r|r|r�r�rrrrr|r�r�r�r|r|r�r�rrr�r�r�r�r�r�r�r�r�r�r|rrr�r�r|r�r�rrrrr|rrrrr|r�r�rrrrr�rrrrr�r|r|rrr�r|r|r�r�r�r�r|r|r|r�r|r|r�r�r�r|r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|rrrrr|r�r�rrrrr�r|rrr�r|r�r|r|r|r|r�r|r�r�r|r|r|r�r|r|r�r�r�r|r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|rrr|rrr|r�r|r|r�rrr|r�rrr|r�r|rrr|r|rrr�r|rrr|r|r�r|r|r|r|r�r|r|r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|rrrrr|rrr|r|r|rrr|r|rrr|r|r�r|rrr|r|rrr�rrr|r|r|rrr|r|r|rrrrr|r�rrr�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�rrr�rrrrrrrrrrr�r�rrr�r|r|rrrrrrrrrrr�r�r�r�r�rrr�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r|rrr�r�r�rrr�r|r�r�r�r�r�rrr�r�r�r�r�r�r�r�r|r�rrrrrrrrr�r�r|rrr�r�rrr�rrrrr|rrrrrrrrrrr�r�rrrrrrr�r�r�rrrrr�r�rrr�r�r�r�r|r�r�r|r�r�rrr�r�r�r�r�r|rrr�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|rrrrrrrrrrrrrrr�r|r�rrrrr|r|r�r|r|r|rrr�r�r|r�r|r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|r�r|rrrrrrr�r|r�r|r|r�r|r�rrr|r|r�r|r�r�r�r|r|r�r�r|r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|r�rrrrr�r�rrr�r|rrr�r�rrr|r�r�r|r�r|r|rrr|r�r�r�r�r�r|rrr�r�r�rrr�r|r�r|r�rrr�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr�r�rrr|rrrrrrr�rrr|r�rrr|r�rrr|r|r|r|r�rrrrr�r|r�rrr�r|rrr�r|r�r�rrr|r|r|r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrrrr|rrr|rrrrr|rrr|rrr|rrrrr|r�r�rrr|r|r|r�r|r|r|r�r|r|r�r|r�r�r|r|r|rrr�r�rrr�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|rrr|r|r�r�rrr|rrr|rrr|r�rrr|r|r�r|r�r|r|r|r�r|r|r|r|r�rrr|r�r|r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrr|rrr�r|rrrrr|r|rrr�r�r�r|r�rrrrr|r|rrr�r�r�rrr�r�r�r�rrr�r�rrrrr�r|r�r|r�r�r�rrr|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrr|rrr|rrrrr�r�rrr�r�r|r�r|r�r�rrr�r�r�r|rrr�r�r�r�r�r�r�r�rrr�r�r|r|r|r|r�r�r�r�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr|r|r�r�r|r�rrrrr|rrr|r|rrr|r|rrr�r|r|r�r|r�rrr|r�r|r|r|r|r|r�rrr|r�r|r|r|r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrrrrrrrrr�rrrrr�r|r�r�rrr|r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|r|r|rrr�r�r�rrr�rrr|r�rrr|r|rrrrrrrrrrr�r�r|r|r|r�r|r|r�r|r�r|rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|rrr�rrrrr|rrrrr�rrrrr�rrr|r|rrr|rrrrrrr�r�r|r|rrr�r�r�r�rrr�r�rrr�r�r�r|r|r�r�rrr�r�r|r|r|rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr|rrrrr|r�rrrrr|r|rrr�rrr|r�rrr|r�r�r|r|r�r|rrr|r�r�rrr�r�r�r�rrr�r�r|rrr�rrr�r�rrr�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�rrr|r|r|r�r|r�r�rrr�r�rrr�rrr�r�r|r�r�r�r�r|r�r�r�r�r|r�r�r�r�r|r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�rrrrrrrrr|r|r|r|r|r�rrr�r�r�r|r�r�r�r|r�r|r�rrr|r�r�rrr�r�r�r�r�rrr�r�r|rrr�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr�rrrrr�r|r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr�rrr�r�r�r|r�r�r|r�rrrrr|rrrrrrr|rrr�r�r|r|r|r�r�r�r|r|r�r�r�r�r�r�r�rrr�r�r�r�r|r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|r�r�r�r�r�r�r�r�r�r�r�r|rrr�rrrrr�r�r�r�rrr�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr�r|rrr�r|rrr�r�rrr�r|r|r�r�r|r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr|r�r�r�r�r�rrr�r|r|r|r|r|r|r�r�r�r�r�rrr�r�r�rrr�r�r�r�r�r�r�r|r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�rrr�r|r|r�rrrrr|rrrrr�r�r�r�r|r|r�r|r�rrrrr�r�r�rrr|r�r�r�r�r|r�r�r�r�r�r�r�r�r|r�r�r�rrr�r�r|r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�rrr�r�rrr�rrr�r�r�r�r�rrr|r�r�r�rrr�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�rrrrr�rrr|r�rrrrr�r|r|r�r�r|r�r�r�r|r�r�r�r�r�rrr�r�r�rrr�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r|r�rrrrrrr|r|r�r�r�r�r�rrr�r�r�r|r|r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r|rrr�r|r�r�r|r�r�rrr�r�r�r|r�r�r�r�r�rrr�r�rrr�r�r�r|r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�rrr�r|r�r�r|r|r�r�r|r�r�r�r�rrr�r|r�r�r�r�r|r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r|r|r�r�r�r|r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�rrr�r�r�r�r�r�r�r�rrr|r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr|r|r�r�r�r�r�r�r�r�r|rrr|r�r|r|rrr�r�r�r|rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|r|r�r�r�r�r�r�r|r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�rrr�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�rrr�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�rrrrr�r�r�r�r|rrr�r�r|r�r�r�r�r�r�r|r�r�r�r�r�r|r�r�r�r|r�r�r�r�r|r�r�rrr�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�rrr�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr|r�r�r�r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�rrr�r�r�r�r�r|r�r�r�r�r�r�r�r�r|r�r�r�r�r�rrrrr�r�r�r�r|r�r�r�r|r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r|r|r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r|r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|rrr�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r|r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r|r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrr�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r|r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)ZTIS620CharToOrderMapZ
ThaiLangModelZTIS620ThaiModel�r�r��#/usr/lib/python3.6/langthaimodel.py�<module>%s*
_vendor/chardet/__pycache__/chardistribution.cpython-36.pyc000064400000014135151733136230020035 0ustar003

�Pf�$�@s�ddlmZmZmZddlmZmZmZddlm	Z	m
Z
mZddlm
Z
mZmZddlmZmZmZGdd�de�ZGdd	�d	e�ZGd
d�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd�de�ZdS)�)�EUCTW_CHAR_TO_FREQ_ORDER�EUCTW_TABLE_SIZE� EUCTW_TYPICAL_DISTRIBUTION_RATIO)�EUCKR_CHAR_TO_FREQ_ORDER�EUCKR_TABLE_SIZE� EUCKR_TYPICAL_DISTRIBUTION_RATIO)�GB2312_CHAR_TO_FREQ_ORDER�GB2312_TABLE_SIZE�!GB2312_TYPICAL_DISTRIBUTION_RATIO)�BIG5_CHAR_TO_FREQ_ORDER�BIG5_TABLE_SIZE�BIG5_TYPICAL_DISTRIBUTION_RATIO)�JIS_CHAR_TO_FREQ_ORDER�JIS_TABLE_SIZE�JIS_TYPICAL_DISTRIBUTION_RATIOc@sLeZdZdZdZdZdZdd�Zdd�Zd	d
�Z	dd�Z
d
d�Zdd�ZdS)�CharDistributionAnalysisig�G�z��?g{�G�z�?�cCs0d|_d|_d|_d|_d|_d|_|j�dS)N)�_char_to_freq_order�_table_size�typical_distribution_ratio�_done�_total_chars�_freq_chars�reset)�self�r�&/usr/lib/python3.6/chardistribution.py�__init__.sz!CharDistributionAnalysis.__init__cCsd|_d|_d|_dS)zreset analyser, clear any stateF�N)rrr)rrrrr=szCharDistributionAnalysis.resetcCsX|dkr|j|�}nd}|dkrT|jd7_||jkrTd|j|krT|jd7_dS)z"feed a character with known length�rriN���)�	get_orderrrrr)r�charZchar_len�orderrrr�feedFs
zCharDistributionAnalysis.feedcCsT|jdks|j|jkr|jS|j|jkrN|j|j|j|j}||jkrN|S|jS)z(return confidence based on existing datar)rr�MINIMUM_DATA_THRESHOLD�SURE_NOr�SURE_YES)r�rrrr�get_confidenceTs

z'CharDistributionAnalysis.get_confidencecCs|j|jkS)N)r�ENOUGH_DATA_THRESHOLD)rrrr�got_enough_datadsz(CharDistributionAnalysis.got_enough_datacCsdS)Nrr r)r�byte_strrrrr!isz"CharDistributionAnalysis.get_orderN)
�__name__�
__module__�__qualname__r*r'r&r%rrr$r)r+r!rrrrr(s	rcs$eZdZ�fdd�Zdd�Z�ZS)�EUCTWDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	�superr0rrrrrrr)r)�	__class__rrrrsz"EUCTWDistributionAnalysis.__init__cCs0|d}|dkr(d|d|ddSdSdS)Nr���^r�r r)rr,�
first_charrrrr!xsz#EUCTWDistributionAnalysis.get_order)r-r.r/rr!�
__classcell__rr)r2rr0qsr0cs$eZdZ�fdd�Zdd�Z�ZS)�EUCKRDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1r8rrrrrrr)r)r2rrr�sz"EUCKRDistributionAnalysis.__init__cCs0|d}|dkr(d|d|ddSdSdS)Nr�r4rr5r r)rr,r6rrrr!�sz#EUCKRDistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rr8�sr8cs$eZdZ�fdd�Zdd�Z�ZS)�GB2312DistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1r:rrrr	rr
r)r)r2rrr�sz#GB2312DistributionAnalysis.__init__cCs>|d|d}}|dkr6|dkr6d|d|dSdSdS)Nrrr9r5r4r r)rr,r6�second_charrrrr!�sz$GB2312DistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rr:�sr:cs$eZdZ�fdd�Zdd�Z�ZS)�Big5DistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1r<rrrrrr
r)r)r2rrr�sz!Big5DistributionAnalysis.__init__cCsX|d|d}}|dkrP|dkr:d|d|ddSd|d|dSndSdS)	Nrr�r5��?�@r r)rr,r6r;rrrr!�sz"Big5DistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rr<�sr<cs$eZdZ�fdd�Zdd�Z�ZS)�SJISDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1rArrrrrrr)r)r2rrr�sz!SJISDistributionAnalysis.__init__cCsr|d|d}}|dkr0|dkr0d|d}n&|dkrR|dkrRd|dd}ndS||d	}|d
krnd}|S)
Nrr��������r@�r r r)rr,r6r;r#rrrr!�sz"SJISDistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rrA�srAcs$eZdZ�fdd�Zdd�Z�ZS)�EUCJPDistributionAnalysiscs$tt|�j�t|_t|_t|_dS)N)	r1rIrrrrrrr)r)r2rrr�sz"EUCJPDistributionAnalysis.__init__cCs0|d}|dkr(d|d|ddSdSdS)Nr�r4r5rr r)rr,r"rrrr!�sz#EUCJPDistributionAnalysis.get_order)r-r.r/rr!r7rr)r2rrI�srIN)Z	euctwfreqrrrZ	euckrfreqrrrZ
gb2312freqrr	r
Zbig5freqrrr
Zjisfreqrrr�objectrr0r8r:r<rArIrrrr�<module>sI_vendor/chardet/__pycache__/jisfreq.cpython-36.pyc000064400000126637151733136230016136 0ustar003

�Pf�d�@sdZdZ�dZ�dS(g@i�(������'�O��}�������]�
�
���
���������������X�}�����k��g
�
��k������������%�&�0�1�,�-�������������<���p�������������g������W�X�����h
�"�������
�	��\����
�/
�����������0
��
�h�����������������������������������������������������������������������������	�
���
������������������� ��!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7��V�j�4����B���8�9�:�;�<�=�>�v�����S��e��f���d	�+����a�w�����I���~���
��?�@�A�B�C�D�
��-���i���
��E�^�.���3��i
�F�/�Y�������j��
��G�H�I�J�q��1
��Y��k�/��2���#�����*�������[��\�5��!�!�	�%�@�l��'�A����4�
�������<����-���7�S����~�K�=��E��;���7�7�8�m�&���
��O�K�=�~�d���L�M�N�O�P�Q�R�S�T�U�V�Y�>�J�"�p	�p������T�_���.�X���L�j�e��9�P� �l�y�����D�����)�h���F�?��+���g�c����B�]�N����8�j�:�5���7���R�4�G�d�����n��h�t�6�3�$�W�C���:
�x����*�V	��W�X�Y�Z�[�\�]�^�_�`��a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�
�u�v�w�x�y�z�{�|�������l�}��~���	��
�����
�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������k���
�>��
��
�	��
	�j
�����Z�[�\�]������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�B�C�D�E�F�G�H�I�J�K�L�M�N�O�P�Q�R�S�T�U�V�W�X�Y�Z�[�\�]�^�_�`�a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{�|�}�~����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�B�C�D�E�F�G�H�I�J�K�L�M�N�O�P�Q�R�S�T�U�V�W�X�Y�Z�[�\�]�^�_�`�a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{�|�}�~����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�B�C�D�E�F�G�H�I�J�K�L�M�N�O�P�Q�R�S�T�U�V�W�X�Y�Z�[�\�]�^�_�`�a�b�c�d�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{�|�}�~����������������������������������������������������������������������������������������������������������������������������������������
����������2
�����{�T���a
�
�^�����|��=��m�k
�����	�V�y�	��
�q	�����
�	������!��	��������C�O�3��������r	���n���+�����
�p�q���;
�A������C�0��n�
����T�C�o�
��
�#	���~��P����������	���	��$	�z�����&���������������_���U���
�s	��	����I����������3
����$�W�1�5�P��	����������X����l
����H��	�������
�
�W���~�p��$����@�L����	�����u���J�I	�W	����_���	�<�����]�D�������h���������f�-��}�t	���	�������,���
�b����X���F��{�`����+�3�q��m��4
�������B�l�X	�
��%
���%	�2�u��	��	��J��|�r��@����������
���	�L
�������b
�J	��c�����������4	�R��m
�T��������������e	����?�
������-�	��*�E����+����&	�	����/�����.�s�[���+��
��
�B�c
��������	�)��,���u	��	��9�&
�������v	���Y���3��
�����D�����d���|�(�y������<�8��	�i����d
�5
�B��n���������|��	�!���)�@�n
�m��	�0�a�����	�z�'�����	�W���[�v�����	����	�o
���	��
���'
���"���9������J���h����e
���\�����p
�@��Q�9������	����w	�i�E��J�I��	��������f����
���1�����
����'	�r�
������
���c��	�q��x����
�2�������	�*���b�F��v���d�
����!�R����Q����P����������(	����_��`���f
��6
���A����a�����u�T��2��������^�C����
��[����
�v�%������n�!�F��Z�^������d������x	���]���B��c��
�����P�q�	��H�Y	����(
���
���g�}�K�	����4���W�����C�g
�-�d���k��4�}��q
�~�+�	����h�@	�	��@�A	�D�:���e��
���q�w�����������d���Z��*�M
�[����s�Z�
����>��'�-����
�����)	�������t�F�7
���t�#��������Y���f�k�b�1���
��6�o�c��q��	������
�u��:�����N
���U�=��	�v���l�N������]��;������
����l�L�	�}�������B	��8
�Q���#�`�T���	�x��r��	���������	���3�n���������{���@��������9
����5�h
�d�Z	����i������s�-�r
�������[���0������.�S�	�R�O
�������;��4�y	�
�;��z	��p��� ����:
�(�|�
��"����������O��e����
�C	�t�)
��6�K����8��P
��h���������3�$�����*
�=�b�e���
�4�����	�.�7��\����j����������/������=���^���5	����	�\�������9�#�+�
�s
�,��7���Y�����
�M���+
�i
�Q���6	���������y���s�#�{	��
�U�f�<����v����)��
�������m�<���	�����g��	�l��� �D	�b�u��
��D�B����
�[	���8����>�����������]���D�������6�Z���	�5���
��
�<�m��
�,�	��u�^�	����g����I���g����\�:�
�M�t�	���E��o��������E�R�����E�j
���g�W���K�	�C��=���]�$�!��	�����`�K�v���0�i��^����3���"��������a�k
��;
�w��������
�y�������P���	�w�����t
�����������J�a���]�������	�����L�h�������
�u
�j���#�C����	�a���!�s�|	��	�Y��
����h�O�5���z�i�
�S���L���.��&��	�<
�7���s�A���M��
��s���*�,
�%�
��	��k���&�f���O�j�"��(�-��[�������-�=��}	���M�S���������������
�
��
��~	��J�t�k�v�e�y�����<
���f��M�k��
���o�������w�v
�l���]���.��
������/�f	�q�$�g��G���n��>�6�=
�������N��	�x��e�*	��h������$�����o�b�,�����t����	�����
�V���
�i������w
���p�H���V��i�<�Z�	�8�r�w��&������/�������>��>
�E	�x���&�e���w�	����5�����
��\����S�i������2������+�'���%����x
�O��
�Q
���������_�H������g����y
��
�0�&�+	������l
�'��'�z��	�	��_����������?
��
����	��z
���m
�|�
��	�g	�0���� ��*���
�`�w���
�#���c�)�����R
�������T����r��V�����_�����r����
�`�x���
���f���n
�=
�*�A���	�(���x��S
��{�T
��9���M��r����;��(��%��[�a�b���D����-
�\	�����o
�E�m�)���!���c��,	�1��]	����>�I����	�T���������y��k�z�x���N�	�����	�5����"���H�<�d��
���h	��	��
�p
���:�L�1�����
���>
���q
�?�Z�M�I���B�K	�D��#�
�
�Y�>������`�������������� ����c���������T��a��2�y�/���"�U
�����b����/���	����
�{�
��j����5�*�������a�x����	��F������7����	�?��w���������+��8����
�N��
����,���n��
���o�s�_�?���	�����	����?�0��	�	��y�3�f��=���l��	��v�]��	�	�����z��
��y�j��u����p���j���	
�i�z����r
������������F��'��	�{
�n��x�u�$����M�����k��p��
�^	�q�
�`�|��	��

�|
�	��
���	�
�����	�G���-���Z�1�Y���-	�	�e����[�C����e��.�-�����	���
�
�7	�b�l�	������{���.�	����z���
�
��?
����9��
�����}��
�a��	�
�Q�	�d�;�V
���/�^�D���?�m��
��
�9�I�
�����
�����
�{�}� ������&���~���X���}
��
���9����:��"�\����
�_	���8�W�~
�^�%�P�s��w����8	��L����.
�����
����N�����P��S������:�R�'�0�b���
�������z��	�����K�������
�r��q�
�����n�L	�����������X�@
���g�i���4��:����r�	��Q��
��	�;�'��>���b�m���w�����s
�����A
��t
�9	�����	�o�^���2��c��������r����!��t��1�{���������s��������x��v�a�p����
���L�O���U��.����)�.	��E�	�"��y��	������(�
���q�E�6�
��
�2�u�6������.��i	����V���s��
���G�G�j�t��W��	���`����u����
���{�Q��	�O�C���
���J�	�	�	�$������/	����_�����j	�R��7���/
��3�D�u
�6�X�4��������>����0
�`�`	�
������U��� �
�r���)���5���H��� 
���~�r��
������	���]����������@
����
�a���v����/�!�6������7�w��	�3�c�8�x����B
�����M	���F	��g����(��
�:	����H���
�f����"��
�~������
�y������ �!�N��
�4�A
���d�e����z������������0	�����������{������
��f�(�l���G�	����^�B
�m�F�|��U����2�R�����#���
����F����U����1���c�k��N	�S�	�����
����$�O	���Y����
���G����W
�	�C
�?����~����U�F�����������
�C
�	�����e�
������/�Q�_�v
�;���P�)���G�m�	���G�,�I�z�_�	���	�9�n��
�A�	�
�X
���#�V�)�<�%��}���H�w
�5��p�.�����m�����
�#�h�|�b����@�(�^�z����A��
� ��"��g��|���������6��X���,�����		����$�#�I�k	�����:�	��Q��o�h�V�$�
���t����Y
�,����
��D
���	�~��
�
���%��� ��������
� �7�/�
	������	�E�V����
�E
�u��&�������	���o���Z��i���	�����!���n�8���'����������	��G��R�2����:��,����Z
�l��(�X�P	�?���	�o��������W�*����;�8��&�)����
�o������H�_���0���x
��0��;	��
���X��	�����1��Z�*�%��	���|�d�`���S�j�}�\�����G	���	�f�q�%�B��1
�p��"�������|������	���l	�2
��
��
���@�h��;����	��	���&�����}��%�����?��	���a	�y
��
����	��=�1��<��{�#��������
�A��
��t���	��>�
�K�3
���=����\�����'�A�+�g���b	�K�������
���
���4
��*��m	�	��
�o�N��0�(�i��$�4� �{����Q	����U�����,�`���	����
�j�)� ���-�6���	�F
�2�)��
�*��YN(rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	r1	r2	r3	r4	r5	r6	r7	r8	r9	r:	r;	r<	r=	r>	r?	r@	rA	rB	rC	rD	rE	rF	rG	rH	rI	rJ	rK	rL	rM	rN	rO	rP	rQ	rR	rS	rT	rU	rV	rW	rX	rY	rZ	r[	r\	r]	r^	r_	r`	ra	rb	rc	rd	re	rf	rg	rh	ri	rj	rk	rl	rm	rn	ro	rp	rq	rr	rs	rt	ru	rv	rw	rx	ry	rz	r{	r|	r}	r~	r	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r�	r
r
r
r
r
r
r
r
r
r	
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r
r
r
r
r
r
r
r
r
r	
r

r
r
r

r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r 
r!
r"
r#
r$
r%
r&
r'
r(
r)
r*
r+
r,
r-
r.
r/
r0
r1
r2
r3
r4
r5
r6
r7
r8
r9
r:
r;
r<
r=
r>
r?
r@
rA
rB
rC
rD
rE
rF
rG
rH
rI
rJ
rK
rL
rM
rN
rO
rP
rQ
rR
rS
rT
rU
rV
rW
rX
rY
rZ
r[
r\
r]
r^
r_
r`
ra
rb
rc
rd
re
rf
rg
rh
ri
rj
rk
rl
rm
rn
ro
rp
rq
rr
rs
rt
ru
rv
rw
rx
ry
rz
r{
r|
r}
r~
r
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
r�
rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrr)ZJIS_TYPICAL_DISTRIBUTION_RATIOZJIS_TABLE_SIZEZJIS_CHAR_TO_FREQ_ORDER�rr�/usr/lib/python3.6/jisfreq.py�<module>,s$_vendor/chardet/__pycache__/latin1prober.cpython-36.opt-1.pyc000064400000005456151733136230020027 0ustar003

�Pf��@s^ddlmZddlmZdZdZdZdZdZdZ	dZ
dZd	Zd
Z
eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
ee
ee
eeeeeeeeeeeeeeeee
eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee	e	e	e	e	e	e
e
e	e	e	e	e	e	e	e	e
e
e	e	e	e	e	ee	e	e	e	e	e
e
e
eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee�fZdZGdd�de�Zd
S)�)�
CharSetProber)�ProbingState��������csLeZdZ�fdd�Zdd�Zedd��Zedd��Zd	d
�Zdd�Z	�Z
S)
�Latin1Probercs&tt|�j�d|_d|_|j�dS)N)�superr�__init__�_last_char_class�
_freq_counter�reset)�self)�	__class__��"/usr/lib/python3.6/latin1prober.pyraszLatin1Prober.__init__cCs t|_dgt|_tj|�dS)Nr)�OTHr�FREQ_CAT_NUMrrr)rrrrrgszLatin1Prober.resetcCsdS)Nz
ISO-8859-1r)rrrr�charset_namelszLatin1Prober.charset_namecCsdS)N�r)rrrr�languagepszLatin1Prober.languagecCsb|j|�}xP|D]H}t|}t|jt|}|dkr@tj|_P|j|d7<||_qW|j	S)Nrr)
Zfilter_with_english_letters�Latin1_CharToClass�Latin1ClassModelr�	CLASS_NUMr�NOT_MEZ_stater�state)rZbyte_str�cZ
char_classZfreqrrr�feedts



zLatin1Prober.feedcCs\|jtjkrdSt|j�}|dkr(d}n|jd|jdd|}|dkrPd}|d}|S)Ng{�G�z�?grrg4@g\��(\�?)rrr�sumr)rZtotalZ
confidencerrr�get_confidence�s
zLatin1Prober.get_confidence)�__name__�
__module__�__qualname__rr�propertyrrr!r#�
__classcell__rr)rrr`srN)@rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)Z
charsetproberrZenumsrrZUDFrZASCZASSZACVZACOZASVZASOrrrrrrrr�<module>sh	_vendor/chardet/__pycache__/euckrfreq.cpython-36.opt-1.pyc000064400000056746151733136230017424 0ustar003

�Pf�4�0	@sdZdZ�	d2Z�	d1S(3	g@�0	�
��x�t�����H�a�������+��W�u��h��]�������������v�w�������m�F�!�p�������������x���/������������9�����t���-�y��K������������O��n������������0����<�4�{����������i���r����������������������X�X���������Y���&��P�������������^��������������9�������������Q���"��t������]�{�7����{��;��u���z�/��|�������7���.��������������{����#�|�}��~��t�8��_�	�
���
�!����_���������*��u��`�"���|�������a�������?��R�!� �/���!�"�=���#��$�%�&�'�(�)�,���'�b�$�*��+��-���,�������&�U����#���-�.�'���f�/�s��0�������� ��9�e�[�1����Z���:�����2�3���G����y�4�����5�6�7�,�w����s�8����9�:���~�;��<�;�}�=�>�?�o�)����@��A�B����2�Y�C�D��<�E�F�G�H�I�%�J�K�L�M�N�O��`�>�P��=�Q��R��S�T�;�������U��V�W�X�4�Y���Z�[��\�]���^���_�"�P���`�;��~�H�a��v��z�?����b���<�c�d�d�e�f�c�0��d��g�y���h�i�s�0�j�=�k��l�����<�b���U������I�m�n�o��p�q�r�s�t�u�������6�v�w��*��]�x�y���z�Z��-�:��b�
�{�|��&�'����5����>�}�~�w��g����6��%���(��v����w������E������������f���V��7�����B��N��[��'���������S�������e���x������������������������?�����q��f��(�)����~���\���������)��������������$����������l�����~�����C�����@��������2����K��z�V��������Q�r�f������ �h�+�3��1�������g�(����z��������������������A��<�j���M�g��2�������������������V�h����J�����0����b�������������Y���������a�!�*����������K�D�8��R��B��@����������y����������X�:����#������i���G����k����=��������!������J�����=����}����j�����������������������E����������j�O�4�����������������	�v�]��C���������o�����
����l�c�A������������T����k����������3�*��q�����>��������+������;��p��x�������������	��
����
������l�������)������m�8���D��������������7�L�B���D�������t���� �!�
�"�#�$�%�R�&�'�(�)�*��+�,��,�-�.�m���
�^�/�c��E�����a�m�0�E�1�2�3����4�T�����5������6�7��n���o�8�9�F����:�G�;�<�=�>�?�@�A�B�C�D�E�$�F�G�����%���p�H�I�J�K�L�����������M�N�O�P�Q�R���S�/�T��U�����V��I�H��������W��X��q�Y�Z�[�\���r��s�]�^�_�`��v�L�a����.��b�F�>�����j�c�Z����B�6��`�d�e����|��f���5�g��h��i�H��j����k��l���1�m�n�o�p�q�r�C�s�t�u�v�w���x�y�z�{���|�}�~���������O��������������E���q���I��\��-���S����
��e�����l��M���Q��P���^��
�-�F���������������������
�.���t�����J���������g�������������������������������������u�����9�	������	���$���������5�%��k�L����������������A��������������R��u���������)�����:��������������"�$�v�����c�Z�����������*�W��K���L���+���������������	�B�����?���������������M�[�5�������n����������C���'���������������
�������	��������������F�T�/����������8��u������K���(�M���i��������������T��?���������e� �(��������%�0����O��	�8�	�	�	�	�	�����	�	�	����#����		����V�
���P�� �M�W���
	��	��|�	�
	�S������������	�	�����	�	�	�	�	�	���	�	��9�D���������	�3����	�	�	�{�	����	�	�	�� 	�������!	�k�������N���Y���"	�#	�$	�%	�&	�'	�(	�]�)	�*	�+	�,	�-	�.	��/	�����>�����1	������\��2	��3	�����4	�5	�6	�w�7	��8	��3�����9	�:	�L�����;	���<	���������������=	��J�>	�?	�@	���A	���B	�C	�D	���E	�����F	�G	�H	�I	������������J	�K	����L	�M	�N	��I�O	�P	���Q	�R	�S	��T	���U	�V	�W	�X	���Y	�Z	�[	�\	�]	����^	�_	�`	�a	����b	�c	�d	�e	����f	�g	�U���x� �h	����i	�j	�����4�&�������!�����S�y������"�
���#�k	���l	�m	�n	�����p�Q��.�o	�U��O���p	���q	�r	�s	�x�t	��u	�v	�w	���$�x	��y	���`�z	�{	�|	�}	���~	�	�	�	�	�	�	�	���C�������%�y���o�	���	�	�	�	�	�	��	�	�	�	����	����	�	��@����	�G�	������2��������	�	�	�	�N�	���������	�	�	�	�	�r�	���z�n�����P�	�#�	�&�Q�	���m�	�c����	�'�	�w�	�	���(�l�@�������)���*�	�	�	�	���	�	�	�	���	����	�	�	�H��	�	��	���	�	�	���	�	�r�	���A�	�������	�}�,���:�����
��I�	�N��1�	�W������������	�+��	��	�1���b����	�q��	�,��	���e����_��d��	��	��	��	��	��	����2�����	���	��	��	��	���	���	��	��-��	�����	��	��	��	��	��	����	��	��	��	��	��	���	����	��	����7��	����	���	��	��	��	��	��	��	������������,�G�������	��^��	��	�.��	��g���	����	�	�	���	�����_��	�	�	���h�h��{�	�|����3�	�	�	�����	�
�
�\�����
�
�
�
�
�
�
��	
�

�
�
����
�j�
�i��4��
�
�
�
�
�
���
�
�
�
��6�d���/�
�
����o�
��R������	�&�
�k�n�z��������
��
����X��d��S��}�
�}����~������� 
�!
�N�"
����������#
���s�$
�%
�&
�f�D��1�'
���(
�@�)
�^�����*
���+
�,
�-
�.
�/
�J�+�0
�1
�2
���T��3
�4
�5
�6
���7
�A�.�����8
�9
��"�:
�;
���<
�=
�>
���0�?
��@
��A
�B
��C
�D
�E
�F
�G
���_�[�H
�I
���`�a�J
�����K
�L
�M
�N
��O
��i����P
�Q
�R
N(0	rrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxrryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r	r	r	r	r	r	r	r	r	r		r
	r	r	r
	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r 	r!	r"	r#	r$	r%	r&	r'	r(	r)	r*	r+	r,	r-	r.	r/	r0	)Z EUCKR_TYPICAL_DISTRIBUTION_RATIOZEUCKR_TABLE_SIZEZEUCKR_CHAR_TO_FREQ_ORDER�r1	r1	�/usr/lib/python3.6/euckrfreq.py�<module>)s(_vendor/chardet/__pycache__/big5prober.cpython-36.pyc000064400000002021151733136230016507 0ustar003

�Pf��@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�MultiByteCharSetProber)�CodingStateMachine)�Big5DistributionAnalysis)�
BIG5_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�
Big5Probercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__�� /usr/lib/python3.6/big5prober.pyr#s
zBig5Prober.__init__cCsdS)NZBig5r)r
rrr
�charset_name)szBig5Prober.charset_namecCsdS)NZChineser)r
rrr
�language-szBig5Prober.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r"srN)	ZmbcharsetproberrZcodingstatemachinerZchardistributionrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/escprober.cpython-36.opt-1.pyc000064400000004742151733136230017406 0ustar003

�Pfn�@sXddlmZddlmZddlmZmZmZddlm	Z	m
Z
mZmZGdd�de�Z
dS)�)�
CharSetProber)�CodingStateMachine)�LanguageFilter�ProbingState�MachineState)�HZ_SM_MODEL�ISO2022CN_SM_MODEL�ISO2022JP_SM_MODEL�ISO2022KR_SM_MODELcsVeZdZdZd�fdd�	Z�fdd�Zedd��Zed	d
��Zdd�Z	d
d�Z
�ZS)�EscCharSetProberz�
    This CharSetProber uses a "code scheme" approach for detecting encodings,
    whereby easily recognizable escape or shift sequences are relied on to
    identify these encodings.
    Ncs�tt|�j|d�g|_|jtj@rD|jjtt	��|jjtt
��|jtj@r`|jjtt��|jtj
@r||jjtt��d|_d|_d|_d|_|j�dS)N)�lang_filter)�superr�__init__�	coding_smrrZCHINESE_SIMPLIFIED�appendrrrZJAPANESEr	ZKOREANr
�active_sm_count�_detected_charset�_detected_language�_state�reset)�selfr)�	__class__��/usr/lib/python3.6/escprober.pyr*szEscCharSetProber.__init__csNtt|�j�x"|jD]}|s qd|_|j�qWt|j�|_d|_d|_dS)NT)	r
rrr�active�lenrrr)rr)rrrr:szEscCharSetProber.resetcCs|jS)N)r)rrrr�charset_nameEszEscCharSetProber.charset_namecCs|jS)N)r)rrrr�languageIszEscCharSetProber.languagecCs|jr
dSdSdS)Ng�G�z��?g)r)rrrr�get_confidenceMszEscCharSetProber.get_confidencecCs�x�|D]�}x�|jD]�}|s|jr&q|j|�}|tjkrhd|_|jd8_|jdkr�tj|_|j	Sq|tj
krtj|_|j�|_
|j|_|j	SqWqW|j	S)NFr�)rrZ
next_staterZERRORrrZNOT_MEr�stateZITS_MEZFOUND_ITZget_coding_state_machinerrr)rZbyte_str�crZcoding_staterrr�feedSs"





zEscCharSetProber.feed)N)�__name__�
__module__�__qualname__�__doc__rr�propertyrrrr"�
__classcell__rr)rrr#srN)Z
charsetproberrZcodingstatemachinerZenumsrrrZescsmrrr	r
rrrrr�<module>s_vendor/chardet/__pycache__/mbcssm.cpython-36.pyc000064400000042131151733136230015741 0ustar003

�Pf�c�@sl	ddlmZdZejejejdejejejejejejejejejejejejejejejejejejejejfZdZedeedd�Zd Z	ejejdejejejddejd	ejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejfFZ
d!Ze	d
e
edd�Zd"Z
ddddejejejejejejejejejejejejejejejejejejejejejejejejejejdejdejejejejejejejf(Zd#Ze
d	eedd�Zd$ZejejdejejejejejejejejejejejejejfZd%Zedeedd�Zd&Zejejejddddejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejejejejejejejf0Zd'Zed
eedd�Zd(Zejejejejejejdejejejejejejejejejejejejejejejejejdejejejejejejejejejdejejejejejejejejejejejejejf0Zd)Zed
eedd�Zd*ZejejejdejejejejejejejejejejejejejejejejejejejejfZd+Zed	eedd�Z d,Z!dd
d
ejddejejejejejejejejejejejejd	d	d	d	ejejd	d	d	d	d	ejd	d	d	d	d	d	dd
d
ejddd	d	ejd	d	d	d	d	d	d	ejejejejf8Z"d-Z#e!d	e"e#dd�Z$d.Z%d	d	d
d	ddejejejejejejejejejejejejdddejejejdddejdejd	d	d
d	dddddejdddejejejdddddejdejejejf8Z&d/Z'e%d	e&e'dd�Z(d0Z)ejejejejejejdd
dddd
d	dddejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejddddejejejejejejejejejejejejejdddejejejejejejejejejejejejd
d
d
d
ejejejejejejejejejejejejejejd
d
ejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejdddejejejejejejejejejejejejejejejejejejejejejejejejejejejf�Z*d1Z+e)de*e+dd�Z,dS)2�)�MachineState�����ZBig5)Zclass_tableZclass_factorZstate_tableZchar_len_table�name����	�
ZCP949zEUC-JPzEUC-KRzx-euc-twZGB2312Z	Shift_JISzUTF-16BEzUTF-16LE���
���zUTF-8N(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r
r
r
r
r
r
r
r
r
r
r
r
rrrr
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
r
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)
rrrrrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r)rrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r	r
rrrrrrrrrrrrrrrr
rrrrrrrrrrrrrrr)rrrrrrrrrrrrrrr	r	)-ZenumsrZBIG5_CLSZERRORZSTARTZITS_MEZBIG5_STZBIG5_CHAR_LEN_TABLEZ
BIG5_SM_MODELZ	CP949_CLSZCP949_STZCP949_CHAR_LEN_TABLEZCP949_SM_MODELZ	EUCJP_CLSZEUCJP_STZEUCJP_CHAR_LEN_TABLEZEUCJP_SM_MODELZ	EUCKR_CLSZEUCKR_STZEUCKR_CHAR_LEN_TABLEZEUCKR_SM_MODELZ	EUCTW_CLSZEUCTW_STZEUCTW_CHAR_LEN_TABLEZEUCTW_SM_MODELZ
GB2312_CLSZ	GB2312_STZGB2312_CHAR_LEN_TABLEZGB2312_SM_MODELZSJIS_CLSZSJIS_STZSJIS_CHAR_LEN_TABLEZ
SJIS_SM_MODELZ
UCS2BE_CLSZ	UCS2BE_STZUCS2BE_CHAR_LEN_TABLEZUCS2BE_SM_MODELZ
UCS2LE_CLSZ	UCS2LE_STZUCS2LE_CHAR_LEN_TABLEZUCS2LE_SM_MODELZUTF8_CLSZUTF8_STZUTF8_CHAR_LEN_TABLEZ
UTF8_SM_MODEL�rr�/usr/lib/python3.6/mbcssm.py�<module>sh $ (((((,  "$   $  $ $                $_vendor/chardet/__pycache__/charsetgroupprober.cpython-36.pyc000064400000004144151733136230020377 0ustar003

�Pf��@s,ddlmZddlmZGdd�de�ZdS)�)�ProbingState)�
CharSetProbercsReZdZd�fdd�	Z�fdd�Zedd��Zedd	��Zd
d�Zdd
�Z	�Z
S)�CharSetGroupProberNcs(tt|�j|d�d|_g|_d|_dS)N)�lang_filter�)�superr�__init__�_active_num�probers�_best_guess_prober)�selfr)�	__class__��(/usr/lib/python3.6/charsetgroupprober.pyr!szCharSetGroupProber.__init__csNtt|�j�d|_x.|jD]$}|r|j�d|_|jd7_qWd|_dS)NrTr)rr�resetr	r
�activer)r�prober)r
rrr'szCharSetGroupProber.resetcCs |js|j�|jsdS|jjS)N)r�get_confidence�charset_name)rrrrr1s
zCharSetGroupProber.charset_namecCs |js|j�|jsdS|jjS)N)rr�language)rrrrr9s
zCharSetGroupProber.languagecCs�xx|jD]n}|sq|jsq|j|�}|s*q|tjkr@||_|jS|tjkrd|_|jd8_|jdkrtj|_	|jSqW|jS)NFrr)
r
r�feedr�FOUND_ITr�state�NOT_MEr	Z_state)rZbyte_strrrrrrrAs$




zCharSetGroupProber.feedcCs�|j}|tjkrdS|tjkr"dSd}d|_x\|jD]R}|s>q4|jsV|jjd|j	�q4|j
�}|jjd|j	|j|�||kr4|}||_q4W|js�dS|S)Ng�G�z��?g{�G�z�?gz
%s not activez%s %s confidence = %s)rrrrrr
rZlogger�debugrrr)rrZ	best_confrZconfrrrrUs*


z!CharSetGroupProber.get_confidence)N)�__name__�
__module__�__qualname__rr�propertyrrrr�
__classcell__rr)r
rr s
rN)ZenumsrZ
charsetproberrrrrrr�<module>s_vendor/chardet/__pycache__/escsm.cpython-36.pyc000064400000016167151733136230015601 0ustar003

�Pf)�@s�ddlmZdZejejdejejejejejejejejejejejejejejejejejejejdejdejdejdddejdejdddejdejdejejejejejejejf0ZdZedeedd	d
�ZdZ	ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejddejejejejejejejejejejejejejejejejejejejejejejf@Z
dZe	de
edd	d
�ZdZ
ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejdejejejejejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejfHZdZe
deeddd
�ZdZejdejejejejejejejejejejejejejejejejejejejdejejejejejejdejejejejejejejejejejejf(ZdZedeeddd
�ZdS)�)�MachineState������z
HZ-GB-2312ZChinese)Zclass_tableZclass_factorZstate_tableZchar_len_table�nameZlanguage�	zISO-2022-CN���
zISO-2022-JPZJapanesezISO-2022-KRZKoreanN(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)	rrrrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)
rrrrrrrrrr(rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr)rrrrrr)ZenumsrZHZ_CLSZSTARTZERRORZITS_MEZHZ_STZHZ_CHAR_LEN_TABLEZHZ_SM_MODELZ
ISO2022CN_CLSZISO2022CN_STZISO2022CN_CHAR_LEN_TABLEZISO2022CN_SM_MODELZ
ISO2022JP_CLSZISO2022JP_STZISO2022JP_CHAR_LEN_TABLEZISO2022JP_SM_MODELZ
ISO2022KR_CLSZISO2022KR_STZISO2022KR_CHAR_LEN_TABLEZISO2022KR_SM_MODEL�rr�/usr/lib/python3.6/escsm.py�<module>sp "    $     $ $_vendor/chardet/__pycache__/codingstatemachine.cpython-36.opt-1.pyc000064400000005365151733136230021255 0ustar003

�Pf�@s(ddlZddlmZGdd�de�ZdS)�N�)�MachineStatec@sDeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Ze	dd
��Z
dS)�CodingStateMachinea�
    A state machine to verify a byte sequence for a particular encoding. For
    each byte the detector receives, it will feed that byte to every active
    state machine available, one byte at a time. The state machine changes its
    state based on its previous state and the byte it receives. There are 3
    states in a state machine that are of interest to an auto-detector:

    START state: This is the state to start with, or a legal byte sequence
                 (i.e. a valid code point) for character has been identified.

    ME state:  This indicates that the state machine identified a byte sequence
               that is specific to the charset it is designed for and that
               there is no other possible encoding which can contain this byte
               sequence. This will to lead to an immediate positive answer for
               the detector.

    ERROR state: This indicates the state machine identified an illegal byte
                 sequence for that encoding. This will lead to an immediate
                 negative answer for this encoding. Detector will exclude this
                 encoding from consideration from here on.
    cCs0||_d|_d|_d|_tjt�|_|j�dS)Nr)	�_model�_curr_byte_pos�_curr_char_len�_curr_state�loggingZ	getLogger�__name__Zlogger�reset)�selfZsm�r
�(/usr/lib/python3.6/codingstatemachine.py�__init__7szCodingStateMachine.__init__cCstj|_dS)N)r�STARTr)rr
r
rr?szCodingStateMachine.resetcCsh|jd|}|jtjkr0d|_|jd||_|j|jd|}|jd||_|jd7_|jS)NZclass_tablerZchar_len_tableZclass_factorZstate_tabler)rrrrrr)r�cZ
byte_classZ
curr_stater
r
r�
next_stateBszCodingStateMachine.next_statecCs|jS)N)r)rr
r
r�get_current_charlenPsz&CodingStateMachine.get_current_charlencCs
|jdS)N�name)r)rr
r
r�get_coding_state_machineSsz+CodingStateMachine.get_coding_state_machinecCs
|jdS)N�language)r)rr
r
rrVszCodingStateMachine.languageN)r
�
__module__�__qualname__�__doc__rrrrr�propertyrr
r
r
rr!sr)r	Zenumsr�objectrr
r
r
r�<module>s_vendor/chardet/__pycache__/cp949prober.cpython-36.pyc000064400000002030151733136230016531 0ustar003

�Pf?�@sDddlmZddlmZddlmZddlmZGdd�de�ZdS)�)�EUCKRDistributionAnalysis)�CodingStateMachine)�MultiByteCharSetProber)�CP949_SM_MODELcs4eZdZ�fdd�Zedd��Zedd��Z�ZS)�CP949Probercs,tt|�j�tt�|_t�|_|j�dS)N)	�superr�__init__rrZ	coding_smrZdistribution_analyzer�reset)�self)�	__class__��!/usr/lib/python3.6/cp949prober.pyr#s
zCP949Prober.__init__cCsdS)NZCP949r)r
rrr
�charset_name+szCP949Prober.charset_namecCsdS)NZKoreanr)r
rrr
�language/szCP949Prober.language)�__name__�
__module__�__qualname__r�propertyrr�
__classcell__rr)rr
r"srN)	ZchardistributionrZcodingstatemachinerZmbcharsetproberrZmbcssmrrrrrr
�<module>s_vendor/chardet/__pycache__/version.cpython-36.opt-1.pyc000064400000000550151733136230017100 0ustar003

�Pf��@sdZdZejd�ZdS)z�
This module exists only to simplify retrieving the version number of chardet
from within setup.py and from chardet subpackages.

:author: Dan Blanchard (dan.blanchard@gmail.com)
z3.0.4�.N)�__doc__�__version__�split�VERSION�rr�/usr/lib/python3.6/version.py�<module>s_vendor/chardet/langhebrewmodel.py000064400000026121151733136230013331 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
#          Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#   Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9

# Windows-1255 language model
# Character Mapping Table:
WIN1255_CHAR_TO_ORDER_MAP = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85,  # 40
 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253,  # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49,  # 60
 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253,  # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
  9,  8, 20, 16,  3,  2, 24, 14, 22,  1, 25, 15,  4, 11,  6, 23,
 12, 19, 13, 26, 18, 27, 21, 17,  7, 10,  5,251,252,128, 96,253,
)

# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest  sequences:      0.087%
# negative sequences:   0.0015%
HEBREW_LANG_MODEL = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)

Win1255HebrewModel = {
  'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP,
  'precedence_matrix': HEBREW_LANG_MODEL,
  'typical_positive_ratio': 0.984004,
  'keep_english_letter': False,
  'charset_name': "windows-1255",
  'language': 'Hebrew',
}
_vendor/chardet/sjisprober.py000064400000007276151733136230012366 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import SJISDistributionAnalysis
from .jpcntx import SJISContextAnalysis
from .mbcssm import SJIS_SM_MODEL
from .enums import ProbingState, MachineState


class SJISProber(MultiByteCharSetProber):
    def __init__(self):
        super(SJISProber, self).__init__()
        self.coding_sm = CodingStateMachine(SJIS_SM_MODEL)
        self.distribution_analyzer = SJISDistributionAnalysis()
        self.context_analyzer = SJISContextAnalysis()
        self.reset()

    def reset(self):
        super(SJISProber, self).reset()
        self.context_analyzer.reset()

    @property
    def charset_name(self):
        return self.context_analyzer.charset_name

    @property
    def language(self):
        return "Japanese"

    def feed(self, byte_str):
        for i in range(len(byte_str)):
            coding_state = self.coding_sm.next_state(byte_str[i])
            if coding_state == MachineState.ERROR:
                self.logger.debug('%s %s prober hit error at byte %s',
                                  self.charset_name, self.language, i)
                self._state = ProbingState.NOT_ME
                break
            elif coding_state == MachineState.ITS_ME:
                self._state = ProbingState.FOUND_IT
                break
            elif coding_state == MachineState.START:
                char_len = self.coding_sm.get_current_charlen()
                if i == 0:
                    self._last_char[1] = byte_str[0]
                    self.context_analyzer.feed(self._last_char[2 - char_len:],
                                               char_len)
                    self.distribution_analyzer.feed(self._last_char, char_len)
                else:
                    self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3
                                                        - char_len], char_len)
                    self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
                                                    char_len)

        self._last_char[0] = byte_str[-1]

        if self.state == ProbingState.DETECTING:
            if (self.context_analyzer.got_enough_data() and
               (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
                self._state = ProbingState.FOUND_IT

        return self.state

    def get_confidence(self):
        context_conf = self.context_analyzer.get_confidence()
        distrib_conf = self.distribution_analyzer.get_confidence()
        return max(context_conf, distrib_conf)
_vendor/chardet/jisfreq.py000064400000062261151733136230011642 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# Sampling from about 20M text materials include literature and computer technology
#
# Japanese frequency table, applied to both S-JIS and EUC-JP
# They are sorted in order.

# 128  --> 0.77094
# 256  --> 0.85710
# 512  --> 0.92635
# 1024 --> 0.97130
# 2048 --> 0.99431
#
# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
#
# Typical Distribution Ratio, 25% of IDR

JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0

# Char to FreqOrder table ,
JIS_TABLE_SIZE = 4368

JIS_CHAR_TO_FREQ_ORDER = (
  40,   1,   6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, #   16
3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247,  18, 179,5071, 856,1661, #   32
1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, #   48
2042,1061,1062,  48,  49,  44,  45, 433, 434,1040,1041, 996, 787,2997,1255,4305, #   64
2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, #   80
5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, #   96
1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, #  112
5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, #  128
5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, #  144
5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, #  160
5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, #  176
5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, #  192
5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, #  208
1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, #  224
1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, #  240
1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, #  256
2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, #  272
3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161,  26,3377,   2,3929,  20, #  288
3691,  47,4100,  50,  17,  16,  35, 268,  27, 243,  42, 155,  24, 154,  29, 184, #  304
   4,  91,  14,  92,  53, 396,  33, 289,   9,  37,  64, 620,  21,  39, 321,   5, #  320
  12,  11,  52,  13,   3, 208, 138,   0,   7,  60, 526, 141, 151,1069, 181, 275, #  336
1591,  83, 132,1475, 126, 331, 829,  15,  69, 160,  59,  22, 157,  55,1079, 312, #  352
 109,  38,  23,  25,  10,  19,  79,5195,  61, 382,1124,   8,  30,5196,5197,5198, #  368
5199,5200,5201,5202,5203,5204,5205,5206,  89,  62,  74,  34,2416, 112, 139, 196, #  384
 271, 149,  84, 607, 131, 765,  46,  88, 153, 683,  76, 874, 101, 258,  57,  80, #  400
  32, 364, 121,1508, 169,1547,  68, 235, 145,2999,  41, 360,3027,  70,  63,  31, #  416
  43, 259, 262,1383,  99, 533, 194,  66,  93, 846, 217, 192,  56, 106,  58, 565, #  432
 280, 272, 311, 256, 146,  82, 308,  71, 100, 128, 214, 655, 110, 261, 104,1140, #  448
  54,  51,  36,  87,  67,3070, 185,2618,2936,2020,  28,1066,2390,2059,5207,5208, #  464
5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, #  480
5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, #  496
5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, #  512
4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, #  528
5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, #  544
5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, #  560
5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, #  576
5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, #  592
5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, #  608
5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, #  624
5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, #  640
5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, #  656
5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, #  672
3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, #  688
5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, #  704
5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, #  720
5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, #  736
5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, #  752
5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, #  768
5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, #  784
5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, #  800
5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, #  816
5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, #  832
5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, #  848
5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, #  864
5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, #  880
5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, #  896
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, #  912
5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, #  928
5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, #  944
5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, #  960
5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, #  976
5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, #  992
5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619,  65,3302,2045, # 1488
1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876,  78,2287,1482,1277, # 2176
 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
2601,1919,1078,  75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
1075, 292,3818,1756,2602, 317,  98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
 359,2291,1676,  73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
1209,  96, 587,2166,1032, 260,1072,2153, 173,  94, 226,3244, 819,2006,4642,4114, # 2544
2203, 231,1744, 782,  97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
 764,2861,1853, 688,2429,1920,1462,  77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
6147, 441, 762,1771,3447,3607,3608,1904, 840,3037,  86, 939,1385, 572,1370,2445, # 2800
1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
  72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
 914,2550,2587,  81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
 480,2083,1774,3458, 923,2279,1350, 221,3086,  85,2233,2234,3835,1585,3010,2147, # 3872
1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
 227,1351,1645,2453,2193,1421,2887, 812,2121, 634,  95,2435, 201,2312,4665,1646, # 3952
1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
1279,2136,1697,2335, 204, 721,2097,3838,  90,6186,2085,2505, 191,3967, 124,2148, # 4192
1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368  #last 512
)


_vendor/chardet/euctwprober.py000064400000003323151733136230012532 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCTWDistributionAnalysis
from .mbcssm import EUCTW_SM_MODEL

class EUCTWProber(MultiByteCharSetProber):
    def __init__(self):
        super(EUCTWProber, self).__init__()
        self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
        self.distribution_analyzer = EUCTWDistributionAnalysis()
        self.reset()

    @property
    def charset_name(self):
        return "EUC-TW"

    @property
    def language(self):
        return "Taiwan"
_vendor/chardet/__init__.py000064400000003027151733136240011732 0ustar00######################## BEGIN LICENSE BLOCK ########################
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################


from .compat import PY2, PY3
from .universaldetector import UniversalDetector
from .version import __version__, VERSION


def detect(byte_str):
    """
    Detect the encoding of the given byte string.

    :param byte_str:     The byte sequence to examine.
    :type byte_str:      ``bytes`` or ``bytearray``
    """
    if not isinstance(byte_str, bytearray):
        if not isinstance(byte_str, bytes):
            raise TypeError('Expected object of type bytes or bytearray, got: '
                            '{0}'.format(type(byte_str)))
        else:
            byte_str = bytearray(byte_str)
    detector = UniversalDetector()
    detector.feed(byte_str)
    return detector.close()
_vendor/chardet/gb2312prober.py000064400000003332151733136240012304 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
from .mbcssm import GB2312_SM_MODEL

class GB2312Prober(MultiByteCharSetProber):
    def __init__(self):
        super(GB2312Prober, self).__init__()
        self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
        self.distribution_analyzer = GB2312DistributionAnalysis()
        self.reset()

    @property
    def charset_name(self):
        return "GB2312"

    @property
    def language(self):
        return "Chinese"
_vendor/chardet/charsetgroupprober.py000064400000007313151733136240014115 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .enums import ProbingState
from .charsetprober import CharSetProber


class CharSetGroupProber(CharSetProber):
    def __init__(self, lang_filter=None):
        super(CharSetGroupProber, self).__init__(lang_filter=lang_filter)
        self._active_num = 0
        self.probers = []
        self._best_guess_prober = None

    def reset(self):
        super(CharSetGroupProber, self).reset()
        self._active_num = 0
        for prober in self.probers:
            if prober:
                prober.reset()
                prober.active = True
                self._active_num += 1
        self._best_guess_prober = None

    @property
    def charset_name(self):
        if not self._best_guess_prober:
            self.get_confidence()
            if not self._best_guess_prober:
                return None
        return self._best_guess_prober.charset_name

    @property
    def language(self):
        if not self._best_guess_prober:
            self.get_confidence()
            if not self._best_guess_prober:
                return None
        return self._best_guess_prober.language

    def feed(self, byte_str):
        for prober in self.probers:
            if not prober:
                continue
            if not prober.active:
                continue
            state = prober.feed(byte_str)
            if not state:
                continue
            if state == ProbingState.FOUND_IT:
                self._best_guess_prober = prober
                return self.state
            elif state == ProbingState.NOT_ME:
                prober.active = False
                self._active_num -= 1
                if self._active_num <= 0:
                    self._state = ProbingState.NOT_ME
                    return self.state
        return self.state

    def get_confidence(self):
        state = self.state
        if state == ProbingState.FOUND_IT:
            return 0.99
        elif state == ProbingState.NOT_ME:
            return 0.01
        best_conf = 0.0
        self._best_guess_prober = None
        for prober in self.probers:
            if not prober:
                continue
            if not prober.active:
                self.logger.debug('%s not active', prober.charset_name)
                continue
            conf = prober.get_confidence()
            self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf)
            if best_conf < conf:
                best_conf = conf
                self._best_guess_prober = prober
        if not self._best_guess_prober:
            return 0.0
        return best_conf
_vendor/chardet/escsm.py000064400000024416151733136240011312 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .enums import MachineState

HZ_CLS = (
1,0,0,0,0,0,0,0,  # 00 - 07
0,0,0,0,0,0,0,0,  # 08 - 0f
0,0,0,0,0,0,0,0,  # 10 - 17
0,0,0,1,0,0,0,0,  # 18 - 1f
0,0,0,0,0,0,0,0,  # 20 - 27
0,0,0,0,0,0,0,0,  # 28 - 2f
0,0,0,0,0,0,0,0,  # 30 - 37
0,0,0,0,0,0,0,0,  # 38 - 3f
0,0,0,0,0,0,0,0,  # 40 - 47
0,0,0,0,0,0,0,0,  # 48 - 4f
0,0,0,0,0,0,0,0,  # 50 - 57
0,0,0,0,0,0,0,0,  # 58 - 5f
0,0,0,0,0,0,0,0,  # 60 - 67
0,0,0,0,0,0,0,0,  # 68 - 6f
0,0,0,0,0,0,0,0,  # 70 - 77
0,0,0,4,0,5,2,0,  # 78 - 7f
1,1,1,1,1,1,1,1,  # 80 - 87
1,1,1,1,1,1,1,1,  # 88 - 8f
1,1,1,1,1,1,1,1,  # 90 - 97
1,1,1,1,1,1,1,1,  # 98 - 9f
1,1,1,1,1,1,1,1,  # a0 - a7
1,1,1,1,1,1,1,1,  # a8 - af
1,1,1,1,1,1,1,1,  # b0 - b7
1,1,1,1,1,1,1,1,  # b8 - bf
1,1,1,1,1,1,1,1,  # c0 - c7
1,1,1,1,1,1,1,1,  # c8 - cf
1,1,1,1,1,1,1,1,  # d0 - d7
1,1,1,1,1,1,1,1,  # d8 - df
1,1,1,1,1,1,1,1,  # e0 - e7
1,1,1,1,1,1,1,1,  # e8 - ef
1,1,1,1,1,1,1,1,  # f0 - f7
1,1,1,1,1,1,1,1,  # f8 - ff
)

HZ_ST = (
MachineState.START,MachineState.ERROR,     3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,     4,MachineState.ERROR,# 10-17
     5,MachineState.ERROR,     6,MachineState.ERROR,     5,     5,     4,MachineState.ERROR,# 18-1f
     4,MachineState.ERROR,     4,     4,     4,MachineState.ERROR,     4,MachineState.ERROR,# 20-27
     4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f
)

HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)

HZ_SM_MODEL = {'class_table': HZ_CLS,
               'class_factor': 6,
               'state_table': HZ_ST,
               'char_len_table': HZ_CHAR_LEN_TABLE,
               'name': "HZ-GB-2312",
               'language': 'Chinese'}

ISO2022CN_CLS = (
2,0,0,0,0,0,0,0,  # 00 - 07
0,0,0,0,0,0,0,0,  # 08 - 0f
0,0,0,0,0,0,0,0,  # 10 - 17
0,0,0,1,0,0,0,0,  # 18 - 1f
0,0,0,0,0,0,0,0,  # 20 - 27
0,3,0,0,0,0,0,0,  # 28 - 2f
0,0,0,0,0,0,0,0,  # 30 - 37
0,0,0,0,0,0,0,0,  # 38 - 3f
0,0,0,4,0,0,0,0,  # 40 - 47
0,0,0,0,0,0,0,0,  # 48 - 4f
0,0,0,0,0,0,0,0,  # 50 - 57
0,0,0,0,0,0,0,0,  # 58 - 5f
0,0,0,0,0,0,0,0,  # 60 - 67
0,0,0,0,0,0,0,0,  # 68 - 6f
0,0,0,0,0,0,0,0,  # 70 - 77
0,0,0,0,0,0,0,0,  # 78 - 7f
2,2,2,2,2,2,2,2,  # 80 - 87
2,2,2,2,2,2,2,2,  # 88 - 8f
2,2,2,2,2,2,2,2,  # 90 - 97
2,2,2,2,2,2,2,2,  # 98 - 9f
2,2,2,2,2,2,2,2,  # a0 - a7
2,2,2,2,2,2,2,2,  # a8 - af
2,2,2,2,2,2,2,2,  # b0 - b7
2,2,2,2,2,2,2,2,  # b8 - bf
2,2,2,2,2,2,2,2,  # c0 - c7
2,2,2,2,2,2,2,2,  # c8 - cf
2,2,2,2,2,2,2,2,  # d0 - d7
2,2,2,2,2,2,2,2,  # d8 - df
2,2,2,2,2,2,2,2,  # e0 - e7
2,2,2,2,2,2,2,2,  # e8 - ef
2,2,2,2,2,2,2,2,  # f0 - f7
2,2,2,2,2,2,2,2,  # f8 - ff
)

ISO2022CN_ST = (
MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     4,MachineState.ERROR,# 18-1f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27
     5,     6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f
)

ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)

ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS,
                      'class_factor': 9,
                      'state_table': ISO2022CN_ST,
                      'char_len_table': ISO2022CN_CHAR_LEN_TABLE,
                      'name': "ISO-2022-CN",
                      'language': 'Chinese'}

ISO2022JP_CLS = (
2,0,0,0,0,0,0,0,  # 00 - 07
0,0,0,0,0,0,2,2,  # 08 - 0f
0,0,0,0,0,0,0,0,  # 10 - 17
0,0,0,1,0,0,0,0,  # 18 - 1f
0,0,0,0,7,0,0,0,  # 20 - 27
3,0,0,0,0,0,0,0,  # 28 - 2f
0,0,0,0,0,0,0,0,  # 30 - 37
0,0,0,0,0,0,0,0,  # 38 - 3f
6,0,4,0,8,0,0,0,  # 40 - 47
0,9,5,0,0,0,0,0,  # 48 - 4f
0,0,0,0,0,0,0,0,  # 50 - 57
0,0,0,0,0,0,0,0,  # 58 - 5f
0,0,0,0,0,0,0,0,  # 60 - 67
0,0,0,0,0,0,0,0,  # 68 - 6f
0,0,0,0,0,0,0,0,  # 70 - 77
0,0,0,0,0,0,0,0,  # 78 - 7f
2,2,2,2,2,2,2,2,  # 80 - 87
2,2,2,2,2,2,2,2,  # 88 - 8f
2,2,2,2,2,2,2,2,  # 90 - 97
2,2,2,2,2,2,2,2,  # 98 - 9f
2,2,2,2,2,2,2,2,  # a0 - a7
2,2,2,2,2,2,2,2,  # a8 - af
2,2,2,2,2,2,2,2,  # b0 - b7
2,2,2,2,2,2,2,2,  # b8 - bf
2,2,2,2,2,2,2,2,  # c0 - c7
2,2,2,2,2,2,2,2,  # c8 - cf
2,2,2,2,2,2,2,2,  # d0 - d7
2,2,2,2,2,2,2,2,  # d8 - df
2,2,2,2,2,2,2,2,  # e0 - e7
2,2,2,2,2,2,2,2,  # e8 - ef
2,2,2,2,2,2,2,2,  # f0 - f7
2,2,2,2,2,2,2,2,  # f8 - ff
)

ISO2022JP_ST = (
MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f
MachineState.ERROR,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     4,MachineState.ERROR,MachineState.ERROR,# 20-27
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47
)

ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)

ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS,
                      'class_factor': 10,
                      'state_table': ISO2022JP_ST,
                      'char_len_table': ISO2022JP_CHAR_LEN_TABLE,
                      'name': "ISO-2022-JP",
                      'language': 'Japanese'}

ISO2022KR_CLS = (
2,0,0,0,0,0,0,0,  # 00 - 07
0,0,0,0,0,0,0,0,  # 08 - 0f
0,0,0,0,0,0,0,0,  # 10 - 17
0,0,0,1,0,0,0,0,  # 18 - 1f
0,0,0,0,3,0,0,0,  # 20 - 27
0,4,0,0,0,0,0,0,  # 28 - 2f
0,0,0,0,0,0,0,0,  # 30 - 37
0,0,0,0,0,0,0,0,  # 38 - 3f
0,0,0,5,0,0,0,0,  # 40 - 47
0,0,0,0,0,0,0,0,  # 48 - 4f
0,0,0,0,0,0,0,0,  # 50 - 57
0,0,0,0,0,0,0,0,  # 58 - 5f
0,0,0,0,0,0,0,0,  # 60 - 67
0,0,0,0,0,0,0,0,  # 68 - 6f
0,0,0,0,0,0,0,0,  # 70 - 77
0,0,0,0,0,0,0,0,  # 78 - 7f
2,2,2,2,2,2,2,2,  # 80 - 87
2,2,2,2,2,2,2,2,  # 88 - 8f
2,2,2,2,2,2,2,2,  # 90 - 97
2,2,2,2,2,2,2,2,  # 98 - 9f
2,2,2,2,2,2,2,2,  # a0 - a7
2,2,2,2,2,2,2,2,  # a8 - af
2,2,2,2,2,2,2,2,  # b0 - b7
2,2,2,2,2,2,2,2,  # b8 - bf
2,2,2,2,2,2,2,2,  # c0 - c7
2,2,2,2,2,2,2,2,  # c8 - cf
2,2,2,2,2,2,2,2,  # d0 - d7
2,2,2,2,2,2,2,2,  # d8 - df
2,2,2,2,2,2,2,2,  # e0 - e7
2,2,2,2,2,2,2,2,  # e8 - ef
2,2,2,2,2,2,2,2,  # f0 - f7
2,2,2,2,2,2,2,2,  # f8 - ff
)

ISO2022KR_ST = (
MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     4,MachineState.ERROR,MachineState.ERROR,# 10-17
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27
)

ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)

ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS,
                      'class_factor': 6,
                      'state_table': ISO2022KR_ST,
                      'char_len_table': ISO2022KR_CHAR_LEN_TABLE,
                      'name': "ISO-2022-KR",
                      'language': 'Korean'}


_vendor/chardet/langcyrillicmodel.py000064400000043034151733136240013672 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# KOI8-R language model
# Character Mapping Table:
KOI8R_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,  # 80
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,  # 90
223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237,  # a0
238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,  # b0
 27,  3, 21, 28, 13,  2, 39, 19, 26,  4, 23, 11,  8, 12,  5,  1,  # c0
 15, 16,  9,  7,  6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54,  # d0
 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34,  # e0
 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70,  # f0
)

win1251_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)

latin5_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)

macCyrillic_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)

IBM855_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
  3, 37, 21, 44, 28, 58, 13, 41,  2, 48, 39, 53, 19, 46,218,219,
220,221,222,223,224, 26, 55,  4, 42,225,226,227,228, 23, 60,229,
230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
  8, 49, 12, 38,  5, 31,  1, 34, 15,244,245,246,247, 35, 16,248,
 43,  9, 45,  7, 32,  6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)

IBM866_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)

# Model Table:
# total sequences: 100%
# first 512 sequences: 97.6601%
# first 1024 sequences: 2.3389%
# rest  sequences:      0.1237%
# negative sequences:   0.0009%
RussianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
)

Koi8rModel = {
  'char_to_order_map': KOI8R_char_to_order_map,
  'precedence_matrix': RussianLangModel,
  'typical_positive_ratio': 0.976601,
  'keep_english_letter': False,
  'charset_name': "KOI8-R",
  'language': 'Russian',
}

Win1251CyrillicModel = {
  'char_to_order_map': win1251_char_to_order_map,
  'precedence_matrix': RussianLangModel,
  'typical_positive_ratio': 0.976601,
  'keep_english_letter': False,
  'charset_name': "windows-1251",
  'language': 'Russian',
}

Latin5CyrillicModel = {
  'char_to_order_map': latin5_char_to_order_map,
  'precedence_matrix': RussianLangModel,
  'typical_positive_ratio': 0.976601,
  'keep_english_letter': False,
  'charset_name': "ISO-8859-5",
  'language': 'Russian',
}

MacCyrillicModel = {
  'char_to_order_map': macCyrillic_char_to_order_map,
  'precedence_matrix': RussianLangModel,
  'typical_positive_ratio': 0.976601,
  'keep_english_letter': False,
  'charset_name': "MacCyrillic",
  'language': 'Russian',
}

Ibm866Model = {
  'char_to_order_map': IBM866_char_to_order_map,
  'precedence_matrix': RussianLangModel,
  'typical_positive_ratio': 0.976601,
  'keep_english_letter': False,
  'charset_name': "IBM866",
  'language': 'Russian',
}

Ibm855Model = {
  'char_to_order_map': IBM855_char_to_order_map,
  'precedence_matrix': RussianLangModel,
  'typical_positive_ratio': 0.976601,
  'keep_english_letter': False,
  'charset_name': "IBM855",
  'language': 'Russian',
}
_vendor/chardet/cp949prober.py000064400000003477151733136240012266 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .chardistribution import EUCKRDistributionAnalysis
from .codingstatemachine import CodingStateMachine
from .mbcharsetprober import MultiByteCharSetProber
from .mbcssm import CP949_SM_MODEL


class CP949Prober(MultiByteCharSetProber):
    def __init__(self):
        super(CP949Prober, self).__init__()
        self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
        # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
        #       not different.
        self.distribution_analyzer = EUCKRDistributionAnalysis()
        self.reset()

    @property
    def charset_name(self):
        return "CP949"

    @property
    def language(self):
        return "Korean"
_vendor/chardet/jpcntx.py000064400000046273151733136240011513 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################


# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)

class JapaneseContextAnalysis(object):
    NUM_OF_CATEGORY = 6
    DONT_KNOW = -1
    ENOUGH_REL_THRESHOLD = 100
    MAX_REL_THRESHOLD = 1000
    MINIMUM_DATA_THRESHOLD = 4

    def __init__(self):
        self._total_rel = None
        self._rel_sample = None
        self._need_to_skip_char_num = None
        self._last_char_order = None
        self._done = None
        self.reset()

    def reset(self):
        self._total_rel = 0  # total sequence received
        # category counters, each integer counts sequence in its category
        self._rel_sample = [0] * self.NUM_OF_CATEGORY
        # if last byte in current buffer is not the last byte of a character,
        # we need to know how many bytes to skip in next buffer
        self._need_to_skip_char_num = 0
        self._last_char_order = -1  # The order of previous char
        # If this flag is set to True, detection is done and conclusion has
        # been made
        self._done = False

    def feed(self, byte_str, num_bytes):
        if self._done:
            return

        # The buffer we got is byte oriented, and a character may span in more than one
        # buffers. In case the last one or two byte in last buffer is not
        # complete, we record how many byte needed to complete that character
        # and skip these bytes here.  We can choose to record those bytes as
        # well and analyse the character once it is complete, but since a
        # character will not make much difference, by simply skipping
        # this character will simply our logic and improve performance.
        i = self._need_to_skip_char_num
        while i < num_bytes:
            order, char_len = self.get_order(byte_str[i:i + 2])
            i += char_len
            if i > num_bytes:
                self._need_to_skip_char_num = i - num_bytes
                self._last_char_order = -1
            else:
                if (order != -1) and (self._last_char_order != -1):
                    self._total_rel += 1
                    if self._total_rel > self.MAX_REL_THRESHOLD:
                        self._done = True
                        break
                    self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1
                self._last_char_order = order

    def got_enough_data(self):
        return self._total_rel > self.ENOUGH_REL_THRESHOLD

    def get_confidence(self):
        # This is just one way to calculate confidence. It works well for me.
        if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
            return (self._total_rel - self._rel_sample[0]) / self._total_rel
        else:
            return self.DONT_KNOW

    def get_order(self, byte_str):
        return -1, 1

class SJISContextAnalysis(JapaneseContextAnalysis):
    def __init__(self):
        super(SJISContextAnalysis, self).__init__()
        self._charset_name = "SHIFT_JIS"

    @property
    def charset_name(self):
        return self._charset_name

    def get_order(self, byte_str):
        if not byte_str:
            return -1, 1
        # find out current char's byte length
        first_char = byte_str[0]
        if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC):
            char_len = 2
            if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
                self._charset_name = "CP932"
        else:
            char_len = 1

        # return its order if it is hiragana
        if len(byte_str) > 1:
            second_char = byte_str[1]
            if (first_char == 202) and (0x9F <= second_char <= 0xF1):
                return second_char - 0x9F, char_len

        return -1, char_len

class EUCJPContextAnalysis(JapaneseContextAnalysis):
    def get_order(self, byte_str):
        if not byte_str:
            return -1, 1
        # find out current char's byte length
        first_char = byte_str[0]
        if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
            char_len = 2
        elif first_char == 0x8F:
            char_len = 3
        else:
            char_len = 1

        # return its order if it is hiragana
        if len(byte_str) > 1:
            second_char = byte_str[1]
            if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
                return second_char - 0xA1, char_len

        return -1, char_len


_vendor/chardet/mbcsgroupprober.py000064400000003734151733136240013413 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#   Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetgroupprober import CharSetGroupProber
from .utf8prober import UTF8Prober
from .sjisprober import SJISProber
from .eucjpprober import EUCJPProber
from .gb2312prober import GB2312Prober
from .euckrprober import EUCKRProber
from .cp949prober import CP949Prober
from .big5prober import Big5Prober
from .euctwprober import EUCTWProber


class MBCSGroupProber(CharSetGroupProber):
    def __init__(self, lang_filter=None):
        super(MBCSGroupProber, self).__init__(lang_filter=lang_filter)
        self.probers = [
            UTF8Prober(),
            SJISProber(),
            EUCJPProber(),
            GB2312Prober(),
            EUCKRProber(),
            CP949Prober(),
            Big5Prober(),
            EUCTWProber()
        ]
        self.reset()
_vendor/chardet/utf8prober.py000064400000005316151733136240012276 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetprober import CharSetProber
from .enums import ProbingState, MachineState
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8_SM_MODEL



class UTF8Prober(CharSetProber):
    ONE_CHAR_PROB = 0.5

    def __init__(self):
        super(UTF8Prober, self).__init__()
        self.coding_sm = CodingStateMachine(UTF8_SM_MODEL)
        self._num_mb_chars = None
        self.reset()

    def reset(self):
        super(UTF8Prober, self).reset()
        self.coding_sm.reset()
        self._num_mb_chars = 0

    @property
    def charset_name(self):
        return "utf-8"

    @property
    def language(self):
        return ""

    def feed(self, byte_str):
        for c in byte_str:
            coding_state = self.coding_sm.next_state(c)
            if coding_state == MachineState.ERROR:
                self._state = ProbingState.NOT_ME
                break
            elif coding_state == MachineState.ITS_ME:
                self._state = ProbingState.FOUND_IT
                break
            elif coding_state == MachineState.START:
                if self.coding_sm.get_current_charlen() >= 2:
                    self._num_mb_chars += 1

        if self.state == ProbingState.DETECTING:
            if self.get_confidence() > self.SHORTCUT_THRESHOLD:
                self._state = ProbingState.FOUND_IT

        return self.state

    def get_confidence(self):
        unlike = 0.99
        if self._num_mb_chars < 6:
            unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars
            return 1.0 - unlike
        else:
            return unlike
_vendor/chardet/langgreekmodel.py000064400000030620151733136240013152 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9

# Character Mapping Table:
Latin7_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85,  # 40
 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253,  # 50
253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55,  # 60
 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253,  # 70
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 80
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 90
253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253,  # a0
253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123,  # b0
110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39,  # c0
 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15,  # d0
124,  1, 29, 20, 21,  3, 32, 13, 25,  5, 11, 16, 10,  6, 30,  4,  # e0
  9,  8, 14,  7,  2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253,  # f0
)

win1253_char_to_order_map = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85,  # 40
 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253,  # 50
253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55,  # 60
 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253,  # 70
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 80
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 90
253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253,  # a0
253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123,  # b0
110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39,  # c0
 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15,  # d0
124,  1, 29, 20, 21,  3, 32, 13, 25,  5, 11, 16, 10,  6, 30,  4,  # e0
  9,  8, 14,  7,  2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253,  # f0
)

# Model Table:
# total sequences: 100%
# first 512 sequences: 98.2851%
# first 1024 sequences:1.7001%
# rest  sequences:     0.0359%
# negative sequences:  0.0148%
GreekLangModel = (
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)

Latin7GreekModel = {
  'char_to_order_map': Latin7_char_to_order_map,
  'precedence_matrix': GreekLangModel,
  'typical_positive_ratio': 0.982851,
  'keep_english_letter': False,
  'charset_name': "ISO-8859-7",
  'language': 'Greek',
}

Win1253GreekModel = {
  'char_to_order_map': win1253_char_to_order_map,
  'precedence_matrix': GreekLangModel,
  'typical_positive_ratio': 0.982851,
  'keep_english_letter': False,
  'charset_name': "windows-1253",
  'language': 'Greek',
}
_vendor/chardet/codingstatemachine.py000064400000007006151733136240014025 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

import logging

from .enums import MachineState


class CodingStateMachine(object):
    """
    A state machine to verify a byte sequence for a particular encoding. For
    each byte the detector receives, it will feed that byte to every active
    state machine available, one byte at a time. The state machine changes its
    state based on its previous state and the byte it receives. There are 3
    states in a state machine that are of interest to an auto-detector:

    START state: This is the state to start with, or a legal byte sequence
                 (i.e. a valid code point) for character has been identified.

    ME state:  This indicates that the state machine identified a byte sequence
               that is specific to the charset it is designed for and that
               there is no other possible encoding which can contain this byte
               sequence. This will to lead to an immediate positive answer for
               the detector.

    ERROR state: This indicates the state machine identified an illegal byte
                 sequence for that encoding. This will lead to an immediate
                 negative answer for this encoding. Detector will exclude this
                 encoding from consideration from here on.
    """
    def __init__(self, sm):
        self._model = sm
        self._curr_byte_pos = 0
        self._curr_char_len = 0
        self._curr_state = None
        self.logger = logging.getLogger(__name__)
        self.reset()

    def reset(self):
        self._curr_state = MachineState.START

    def next_state(self, c):
        # for each byte we get its class
        # if it is first byte, we also get byte length
        byte_class = self._model['class_table'][c]
        if self._curr_state == MachineState.START:
            self._curr_byte_pos = 0
            self._curr_char_len = self._model['char_len_table'][byte_class]
        # from byte's class and state_table, we get its next state
        curr_state = (self._curr_state * self._model['class_factor']
                      + byte_class)
        self._curr_state = self._model['state_table'][curr_state]
        self._curr_byte_pos += 1
        return self._curr_state

    def get_current_charlen(self):
        return self._curr_char_len

    def get_coding_state_machine(self):
        return self._model['name']

    @property
    def language(self):
        return self._model['language']
_vendor/chardet/big5prober.py000064400000003335151733136240012235 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import BIG5_SM_MODEL


class Big5Prober(MultiByteCharSetProber):
    def __init__(self):
        super(Big5Prober, self).__init__()
        self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
        self.distribution_analyzer = Big5DistributionAnalysis()
        self.reset()

    @property
    def charset_name(self):
        return "Big5"

    @property
    def language(self):
        return "Chinese"
_vendor/chardet/sbcsgroupprober.py000064400000006732151733136240013422 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetgroupprober import CharSetGroupProber
from .sbcharsetprober import SingleByteCharSetProber
from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
                                Latin5CyrillicModel, MacCyrillicModel,
                                Ibm866Model, Ibm855Model)
from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
from .langthaimodel import TIS620ThaiModel
from .langhebrewmodel import Win1255HebrewModel
from .hebrewprober import HebrewProber
from .langturkishmodel import Latin5TurkishModel


class SBCSGroupProber(CharSetGroupProber):
    def __init__(self):
        super(SBCSGroupProber, self).__init__()
        self.probers = [
            SingleByteCharSetProber(Win1251CyrillicModel),
            SingleByteCharSetProber(Koi8rModel),
            SingleByteCharSetProber(Latin5CyrillicModel),
            SingleByteCharSetProber(MacCyrillicModel),
            SingleByteCharSetProber(Ibm866Model),
            SingleByteCharSetProber(Ibm855Model),
            SingleByteCharSetProber(Latin7GreekModel),
            SingleByteCharSetProber(Win1253GreekModel),
            SingleByteCharSetProber(Latin5BulgarianModel),
            SingleByteCharSetProber(Win1251BulgarianModel),
            # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250)
            #       after we retrain model.
            # SingleByteCharSetProber(Latin2HungarianModel),
            # SingleByteCharSetProber(Win1250HungarianModel),
            SingleByteCharSetProber(TIS620ThaiModel),
            SingleByteCharSetProber(Latin5TurkishModel),
        ]
        hebrew_prober = HebrewProber()
        logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel,
                                                        False, hebrew_prober)
        visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True,
                                                       hebrew_prober)
        hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober)
        self.probers.extend([hebrew_prober, logical_hebrew_prober,
                             visual_hebrew_prober])

        self.reset()
_vendor/chardet/chardistribution.py000064400000022303151733136240013546 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE,
                        EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE,
                        EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE,
                         GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE,
                       BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE,
                      JIS_TYPICAL_DISTRIBUTION_RATIO)


class CharDistributionAnalysis(object):
    ENOUGH_DATA_THRESHOLD = 1024
    SURE_YES = 0.99
    SURE_NO = 0.01
    MINIMUM_DATA_THRESHOLD = 3

    def __init__(self):
        # Mapping table to get frequency order from char order (get from
        # GetOrder())
        self._char_to_freq_order = None
        self._table_size = None  # Size of above table
        # This is a constant value which varies from language to language,
        # used in calculating confidence.  See
        # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
        # for further detail.
        self.typical_distribution_ratio = None
        self._done = None
        self._total_chars = None
        self._freq_chars = None
        self.reset()

    def reset(self):
        """reset analyser, clear any state"""
        # If this flag is set to True, detection is done and conclusion has
        # been made
        self._done = False
        self._total_chars = 0  # Total characters encountered
        # The number of characters whose frequency order is less than 512
        self._freq_chars = 0

    def feed(self, char, char_len):
        """feed a character with known length"""
        if char_len == 2:
            # we only care about 2-bytes character in our distribution analysis
            order = self.get_order(char)
        else:
            order = -1
        if order >= 0:
            self._total_chars += 1
            # order is valid
            if order < self._table_size:
                if 512 > self._char_to_freq_order[order]:
                    self._freq_chars += 1

    def get_confidence(self):
        """return confidence based on existing data"""
        # if we didn't receive any character in our consideration range,
        # return negative answer
        if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
            return self.SURE_NO

        if self._total_chars != self._freq_chars:
            r = (self._freq_chars / ((self._total_chars - self._freq_chars)
                 * self.typical_distribution_ratio))
            if r < self.SURE_YES:
                return r

        # normalize confidence (we don't want to be 100% sure)
        return self.SURE_YES

    def got_enough_data(self):
        # It is not necessary to receive all data to draw conclusion.
        # For charset detection, certain amount of data is enough
        return self._total_chars > self.ENOUGH_DATA_THRESHOLD

    def get_order(self, byte_str):
        # We do not handle characters based on the original encoding string,
        # but convert this encoding string to a number, here called order.
        # This allows multiple encodings of a language to share one frequency
        # table.
        return -1


class EUCTWDistributionAnalysis(CharDistributionAnalysis):
    def __init__(self):
        super(EUCTWDistributionAnalysis, self).__init__()
        self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
        self._table_size = EUCTW_TABLE_SIZE
        self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO

    def get_order(self, byte_str):
        # for euc-TW encoding, we are interested
        #   first  byte range: 0xc4 -- 0xfe
        #   second byte range: 0xa1 -- 0xfe
        # no validation needed here. State machine has done that
        first_char = byte_str[0]
        if first_char >= 0xC4:
            return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
        else:
            return -1


class EUCKRDistributionAnalysis(CharDistributionAnalysis):
    def __init__(self):
        super(EUCKRDistributionAnalysis, self).__init__()
        self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
        self._table_size = EUCKR_TABLE_SIZE
        self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO

    def get_order(self, byte_str):
        # for euc-KR encoding, we are interested
        #   first  byte range: 0xb0 -- 0xfe
        #   second byte range: 0xa1 -- 0xfe
        # no validation needed here. State machine has done that
        first_char = byte_str[0]
        if first_char >= 0xB0:
            return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
        else:
            return -1


class GB2312DistributionAnalysis(CharDistributionAnalysis):
    def __init__(self):
        super(GB2312DistributionAnalysis, self).__init__()
        self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
        self._table_size = GB2312_TABLE_SIZE
        self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO

    def get_order(self, byte_str):
        # for GB2312 encoding, we are interested
        #  first  byte range: 0xb0 -- 0xfe
        #  second byte range: 0xa1 -- 0xfe
        # no validation needed here. State machine has done that
        first_char, second_char = byte_str[0], byte_str[1]
        if (first_char >= 0xB0) and (second_char >= 0xA1):
            return 94 * (first_char - 0xB0) + second_char - 0xA1
        else:
            return -1


class Big5DistributionAnalysis(CharDistributionAnalysis):
    def __init__(self):
        super(Big5DistributionAnalysis, self).__init__()
        self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
        self._table_size = BIG5_TABLE_SIZE
        self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO

    def get_order(self, byte_str):
        # for big5 encoding, we are interested
        #   first  byte range: 0xa4 -- 0xfe
        #   second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
        # no validation needed here. State machine has done that
        first_char, second_char = byte_str[0], byte_str[1]
        if first_char >= 0xA4:
            if second_char >= 0xA1:
                return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
            else:
                return 157 * (first_char - 0xA4) + second_char - 0x40
        else:
            return -1


class SJISDistributionAnalysis(CharDistributionAnalysis):
    def __init__(self):
        super(SJISDistributionAnalysis, self).__init__()
        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
        self._table_size = JIS_TABLE_SIZE
        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO

    def get_order(self, byte_str):
        # for sjis encoding, we are interested
        #   first  byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
        #   second byte range: 0x40 -- 0x7e,  0x81 -- oxfe
        # no validation needed here. State machine has done that
        first_char, second_char = byte_str[0], byte_str[1]
        if (first_char >= 0x81) and (first_char <= 0x9F):
            order = 188 * (first_char - 0x81)
        elif (first_char >= 0xE0) and (first_char <= 0xEF):
            order = 188 * (first_char - 0xE0 + 31)
        else:
            return -1
        order = order + second_char - 0x40
        if second_char > 0x7F:
            order = -1
        return order


class EUCJPDistributionAnalysis(CharDistributionAnalysis):
    def __init__(self):
        super(EUCJPDistributionAnalysis, self).__init__()
        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
        self._table_size = JIS_TABLE_SIZE
        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO

    def get_order(self, byte_str):
        # for euc-JP encoding, we are interested
        #   first  byte range: 0xa0 -- 0xfe
        #   second byte range: 0xa1 -- 0xfe
        # no validation needed here. State machine has done that
        char = byte_str[0]
        if char >= 0xA0:
            return 94 * (char - 0xA1) + byte_str[1] - 0xa1
        else:
            return -1
_vendor/chardet/charsetprober.py000064400000011766151733136240013047 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

import logging
import re

from .enums import ProbingState


class CharSetProber(object):

    SHORTCUT_THRESHOLD = 0.95

    def __init__(self, lang_filter=None):
        self._state = None
        self.lang_filter = lang_filter
        self.logger = logging.getLogger(__name__)

    def reset(self):
        self._state = ProbingState.DETECTING

    @property
    def charset_name(self):
        return None

    def feed(self, buf):
        pass

    @property
    def state(self):
        return self._state

    def get_confidence(self):
        return 0.0

    @staticmethod
    def filter_high_byte_only(buf):
        buf = re.sub(b'([\x00-\x7F])+', b' ', buf)
        return buf

    @staticmethod
    def filter_international_words(buf):
        """
        We define three types of bytes:
        alphabet: english alphabets [a-zA-Z]
        international: international characters [\x80-\xFF]
        marker: everything else [^a-zA-Z\x80-\xFF]

        The input buffer can be thought to contain a series of words delimited
        by markers. This function works to filter all words that contain at
        least one international character. All contiguous sequences of markers
        are replaced by a single space ascii character.

        This filter applies to all scripts which do not use English characters.
        """
        filtered = bytearray()

        # This regex expression filters out only words that have at-least one
        # international character. The word may include one marker character at
        # the end.
        words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?',
                           buf)

        for word in words:
            filtered.extend(word[:-1])

            # If the last character in the word is a marker, replace it with a
            # space as markers shouldn't affect our analysis (they are used
            # similarly across all languages and may thus have similar
            # frequencies).
            last_char = word[-1:]
            if not last_char.isalpha() and last_char < b'\x80':
                last_char = b' '
            filtered.extend(last_char)

        return filtered

    @staticmethod
    def filter_with_english_letters(buf):
        """
        Returns a copy of ``buf`` that retains only the sequences of English
        alphabet and high byte characters that are not between <> characters.
        Also retains English alphabet and high byte characters immediately
        before occurrences of >.

        This filter can be applied to all scripts which contain both English
        characters and extended ASCII characters, but is currently only used by
        ``Latin1Prober``.
        """
        filtered = bytearray()
        in_tag = False
        prev = 0

        for curr in range(len(buf)):
            # Slice here to get bytes instead of an int with Python 3
            buf_char = buf[curr:curr + 1]
            # Check if we're coming out of or entering an HTML tag
            if buf_char == b'>':
                in_tag = False
            elif buf_char == b'<':
                in_tag = True

            # If current character is not extended-ASCII and not alphabetic...
            if buf_char < b'\x80' and not buf_char.isalpha():
                # ...and we're not in a tag
                if curr > prev and not in_tag:
                    # Keep everything after last non-extended-ASCII,
                    # non-alphabetic character
                    filtered.extend(buf[prev:curr])
                    # Output a space to delimit stretch we kept
                    filtered.extend(b' ')
                prev = curr + 1

        # If we're not in a tag...
        if not in_tag:
            # Keep everything after last non-extended-ASCII, non-alphabetic
            # character
            filtered.extend(buf[prev:])

        return filtered
_vendor/chardet/version.py000064400000000362151733136240011657 0ustar00"""
This module exists only to simplify retrieving the version number of chardet
from within setup.py and from chardet subpackages.

:author: Dan Blanchard (dan.blanchard@gmail.com)
"""

__version__ = "3.0.4"
VERSION = __version__.split('.')
_vendor/chardet/langbulgarianmodel.py000064400000031047151733136240014025 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9

# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid

Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,  # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,  # 90
 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238,  # a0
 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # b0
 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56,  # c0
  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # d0
  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16,  # e0
 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253,  # f0
)

win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220,  # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229,  # 90
 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240,  # a0
 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250,  # b0
 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # c0
 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56,  # d0
  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # e0
  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16,  # f0
)

# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest  sequences:     0.2992%
# negative sequences:  0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)

Latin5BulgarianModel = {
  'char_to_order_map': Latin5_BulgarianCharToOrderMap,
  'precedence_matrix': BulgarianLangModel,
  'typical_positive_ratio': 0.969392,
  'keep_english_letter': False,
  'charset_name': "ISO-8859-5",
  'language': 'Bulgairan',
}

Win1251BulgarianModel = {
  'char_to_order_map': win1251BulgarianCharToOrderMap,
  'precedence_matrix': BulgarianLangModel,
  'typical_positive_ratio': 0.969392,
  'keep_english_letter': False,
  'charset_name': "windows-1251",
  'language': 'Bulgarian',
}
_vendor/chardet/escprober.py000064400000007556151733136240012172 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .enums import LanguageFilter, ProbingState, MachineState
from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL,
                    ISO2022KR_SM_MODEL)


class EscCharSetProber(CharSetProber):
    """
    This CharSetProber uses a "code scheme" approach for detecting encodings,
    whereby easily recognizable escape or shift sequences are relied on to
    identify these encodings.
    """

    def __init__(self, lang_filter=None):
        super(EscCharSetProber, self).__init__(lang_filter=lang_filter)
        self.coding_sm = []
        if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
            self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
            self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
        if self.lang_filter & LanguageFilter.JAPANESE:
            self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
        if self.lang_filter & LanguageFilter.KOREAN:
            self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
        self.active_sm_count = None
        self._detected_charset = None
        self._detected_language = None
        self._state = None
        self.reset()

    def reset(self):
        super(EscCharSetProber, self).reset()
        for coding_sm in self.coding_sm:
            if not coding_sm:
                continue
            coding_sm.active = True
            coding_sm.reset()
        self.active_sm_count = len(self.coding_sm)
        self._detected_charset = None
        self._detected_language = None

    @property
    def charset_name(self):
        return self._detected_charset

    @property
    def language(self):
        return self._detected_language

    def get_confidence(self):
        if self._detected_charset:
            return 0.99
        else:
            return 0.00

    def feed(self, byte_str):
        for c in byte_str:
            for coding_sm in self.coding_sm:
                if not coding_sm or not coding_sm.active:
                    continue
                coding_state = coding_sm.next_state(c)
                if coding_state == MachineState.ERROR:
                    coding_sm.active = False
                    self.active_sm_count -= 1
                    if self.active_sm_count <= 0:
                        self._state = ProbingState.NOT_ME
                        return self.state
                elif coding_state == MachineState.ITS_ME:
                    self._state = ProbingState.FOUND_IT
                    self._detected_charset = coding_sm.get_coding_state_machine()
                    self._detected_language = coding_sm.language
                    return self.state

        return self.state
_vendor/chardet/langhungarianmodel.py000064400000030460151733136240014033 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9

# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253,  2, 18, 26, 17,  1, 27, 12, 20,  9, 22,  7,  6, 13,  4,  8,
 23, 67, 10,  5,  3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)

win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253,  2, 18, 26, 17,  1, 27, 12, 20,  9, 22,  7,  6, 13,  4,  8,
 23, 67, 10,  5,  3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)

# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest  sequences:     0.8894%
# negative sequences:  0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)

Latin2HungarianModel = {
  'char_to_order_map': Latin2_HungarianCharToOrderMap,
  'precedence_matrix': HungarianLangModel,
  'typical_positive_ratio': 0.947368,
  'keep_english_letter': True,
  'charset_name': "ISO-8859-2",
  'language': 'Hungarian',
}

Win1250HungarianModel = {
  'char_to_order_map': win1250HungarianCharToOrderMap,
  'precedence_matrix': HungarianLangModel,
  'typical_positive_ratio': 0.947368,
  'keep_english_letter': True,
  'charset_name': "windows-1250",
  'language': 'Hungarian',
}
_vendor/chardet/mbcharsetprober.py000064400000006525151733136240013363 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#   Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetprober import CharSetProber
from .enums import ProbingState, MachineState


class MultiByteCharSetProber(CharSetProber):
    """
    MultiByteCharSetProber
    """

    def __init__(self, lang_filter=None):
        super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter)
        self.distribution_analyzer = None
        self.coding_sm = None
        self._last_char = [0, 0]

    def reset(self):
        super(MultiByteCharSetProber, self).reset()
        if self.coding_sm:
            self.coding_sm.reset()
        if self.distribution_analyzer:
            self.distribution_analyzer.reset()
        self._last_char = [0, 0]

    @property
    def charset_name(self):
        raise NotImplementedError

    @property
    def language(self):
        raise NotImplementedError

    def feed(self, byte_str):
        for i in range(len(byte_str)):
            coding_state = self.coding_sm.next_state(byte_str[i])
            if coding_state == MachineState.ERROR:
                self.logger.debug('%s %s prober hit error at byte %s',
                                  self.charset_name, self.language, i)
                self._state = ProbingState.NOT_ME
                break
            elif coding_state == MachineState.ITS_ME:
                self._state = ProbingState.FOUND_IT
                break
            elif coding_state == MachineState.START:
                char_len = self.coding_sm.get_current_charlen()
                if i == 0:
                    self._last_char[1] = byte_str[0]
                    self.distribution_analyzer.feed(self._last_char, char_len)
                else:
                    self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
                                                    char_len)

        self._last_char[0] = byte_str[-1]

        if self.state == ProbingState.DETECTING:
            if (self.distribution_analyzer.got_enough_data() and
                    (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
                self._state = ProbingState.FOUND_IT

        return self.state

    def get_confidence(self):
        return self.distribution_analyzer.get_confidence()
_vendor/chardet/big5freq.py000064400000075026151733136240011707 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128  --> 0.42261
# 256  --> 0.57851
# 512  --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR

BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75

#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376

BIG5_CHAR_TO_FREQ_ORDER = (
   1,1801,1506, 255,1431, 198,   9,  82,   6,5008, 177, 202,3681,1256,2821, 110, #   16
3814,  33,3274, 261,  76,  44,2114,  16,2946,2187,1176, 659,3971,  26,3451,2653, #   32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964,   8, 204,  58,4510,5009,1932, #   48
  63,5010,5011, 317,1614,  75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, #   64
3682,   3,  10,3973,1471,  29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, #   80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947,  34,3556,3204,  64, 604, #   96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337,  72, 406,5017,  80, #  112
 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449,  69,2987, 591, #  128
 179,2096, 471, 115,2035,1844,  60,  50,2988, 134, 806,1869, 734,2036,3454, 180, #  144
 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, #  160
2502,  90,2716,1338, 663,  11, 906,1099,2553,  20,2441, 182, 532,1716,5019, 732, #  176
1376,4204,1311,1420,3206,  25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, #  192
3276, 475,1447,3683,5020, 117,  21, 656, 810,1297,2300,2334,3557,5021, 126,4205, #  208
 706, 456, 150, 613,4513,  71,1118,2037,4206, 145,3092,  85, 835, 486,2115,1246, #  224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, #  240
3558,3135,5023,1956,1153,4207,  83, 296,1199,3093, 192, 624,  93,5024, 822,1898, #  256
2823,3136, 795,2065, 991,1554,1542,1592,  27,  43,2867, 859, 139,1456, 860,4514, #  272
 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, #  288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, #  304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, #  320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277,  88,1723,2038,3978,1951, 212, #  336
 266, 152, 149, 468,1899,4208,4516,  77, 187,5028,3038,  37,   5,2990,5029,3979, #  352
5030,5031,  39,2524,4517,2908,3208,2079,  55, 148,  74,4518, 545, 483,1474,1029, #  368
1665, 217,1870,1531,3138,1104,2655,4209,  24, 172,3562, 900,3980,3563,3564,4519, #  384
  32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683,   4,3039,3351,1427,1789, #  400
 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, #  416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439,  38,5037,1063,5038, 794, #  432
3982,1435,2301,  46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804,  35, 707, #  448
 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, #  464
2129,1363,3689,1423, 697, 100,3094,  48,  70,1231, 495,3139,2196,5043,1294,5044, #  480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, #  496
 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, #  512
 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, #  528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, #  544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, #  560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, #  576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381,   7, #  592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, #  608
 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, #  624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, #  640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, #  656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, #  672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, #  688
 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, #  704
  98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, #  720
 523,2789,2790,2658,5061, 141,2235,1333,  68, 176, 441, 876, 907,4220, 603,2602, #  736
 710, 171,3464, 404, 549,  18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, #  752
5063,2991, 368,5064, 146, 366,  99, 871,3693,1543, 748, 807,1586,1185,  22,2263, #  768
 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, #  784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068,  59,5069, #  800
 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, #  816
 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, #  832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, #  848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, #  864
 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, #  880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, #  896
4224,  57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, #  912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, #  928
 279,3145,  51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, #  944
 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, #  960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, #  976
4227,2475,1436, 953,4228,2055,4545, 671,2400,  79,4229,2446,3285, 608, 567,2689, #  992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045,   0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102,  96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127,  67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
  78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
 165, 243,4559,3703,2528, 123, 683,4239, 764,4560,  36,3998,1793, 589,2916, 816, # 1232
 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875,   2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198,  13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133,  92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252,  94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068,  23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863,  41, # 1520
5170,5171,4575,5172,1657,2338,  19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733,  40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596,  17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551,  30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168,  87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
 730,1515, 184,2840,  66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186,  15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286,  89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278,  54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
  97,  81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587,  14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547,  62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294,  86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885,  28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131,  95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654,  53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309,  84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873,  65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371,  12,2668,  45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859,  91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771,  61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063,  56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076,  49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629,  31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412,  42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116,  52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550,  47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
 919,2352,2975,2353,1270,4727,4115,  73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
)

_vendor/chardet/eucjpprober.py000064400000007245151733136240012521 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .enums import ProbingState, MachineState
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCJPDistributionAnalysis
from .jpcntx import EUCJPContextAnalysis
from .mbcssm import EUCJP_SM_MODEL


class EUCJPProber(MultiByteCharSetProber):
    def __init__(self):
        super(EUCJPProber, self).__init__()
        self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
        self.distribution_analyzer = EUCJPDistributionAnalysis()
        self.context_analyzer = EUCJPContextAnalysis()
        self.reset()

    def reset(self):
        super(EUCJPProber, self).reset()
        self.context_analyzer.reset()

    @property
    def charset_name(self):
        return "EUC-JP"

    @property
    def language(self):
        return "Japanese"

    def feed(self, byte_str):
        for i in range(len(byte_str)):
            # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte
            coding_state = self.coding_sm.next_state(byte_str[i])
            if coding_state == MachineState.ERROR:
                self.logger.debug('%s %s prober hit error at byte %s',
                                  self.charset_name, self.language, i)
                self._state = ProbingState.NOT_ME
                break
            elif coding_state == MachineState.ITS_ME:
                self._state = ProbingState.FOUND_IT
                break
            elif coding_state == MachineState.START:
                char_len = self.coding_sm.get_current_charlen()
                if i == 0:
                    self._last_char[1] = byte_str[0]
                    self.context_analyzer.feed(self._last_char, char_len)
                    self.distribution_analyzer.feed(self._last_char, char_len)
                else:
                    self.context_analyzer.feed(byte_str[i - 1:i + 1],
                                                char_len)
                    self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
                                                     char_len)

        self._last_char[0] = byte_str[-1]

        if self.state == ProbingState.DETECTING:
            if (self.context_analyzer.got_enough_data() and
               (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
                self._state = ProbingState.FOUND_IT

        return self.state

    def get_confidence(self):
        context_conf = self.context_analyzer.get_confidence()
        distrib_conf = self.distribution_analyzer.get_confidence()
        return max(context_conf, distrib_conf)
_vendor/chardet/compat.py000064400000002156151733136240011460 0ustar00######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
#   Dan Blanchard
#   Ian Cordasco
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

import sys


if sys.version_info < (3, 0):
    PY2 = True
    PY3 = False
    base_str = (str, unicode)
    text_type = unicode
else:
    PY2 = False
    PY3 = True
    base_str = (bytes, str)
    text_type = str
_vendor/chardet/euckrfreq.py000064400000032352151733136250012166 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# Sampling from about 20M text materials include literature and computer technology

# 128  --> 0.79
# 256  --> 0.92
# 512  --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio

EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0

EUCKR_TABLE_SIZE = 2352

# Char to FreqOrder table ,
EUCKR_CHAR_TO_FREQ_ORDER = (
  13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722,  87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488,  20,1733,1269,1734,
 945,1400,1735,  47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
 116, 987, 813,1401, 683,  75,1204, 145,1740,1741,1742,1743,  16, 847, 667, 622,
 708,1744,1745,1746, 966, 787, 304, 129,1747,  60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
 344,1763,1764,1765,1766,  89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058,  28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782,  19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838,  39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
 544,1023,1081, 869,  91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
 282,  96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881,   2, 537,1882,1883,1215,1884,1885,
 127, 791,1886,1273,1423,1887,  34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
   0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123,  48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691,  85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909,  30, 451, 651, 988, 310,1910,
1911,1426, 810,1216,  93,1912,1913,1277,1217,1914, 858, 759,  45,  58, 181, 610,
 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924,  40, 662, 118, 692,  29, 939,
 887, 640, 482, 174,1925,  69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
 217, 854,1163, 823,1927,1928,1929,1930, 834,1931,  78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963,  36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
  50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971,   7,
 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287,  98,   8, 189, 650, 978,1288,1996,1437,1997,  17, 345, 250,
 423, 277, 234, 512, 226,  97, 289,  42, 167,1998, 201,1999,2000, 843, 836, 824,
 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008,  71,1440, 745,
 619, 688,2009, 829,2010,2011, 147,2012,  33, 948,2013,2014,  74, 224,2015,  61,
 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591,  52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
 719,1170, 959, 440, 437, 534,  84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053,  59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065,  41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074,  55, 588,  66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
  22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174,  73,1096, 231, 274,
 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144,  11, 374, 844,2145, 154,1232,  46,1461,2146, 838, 830, 721,
1233, 106,2147,  90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153,  72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177,  23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193,  10,
2194, 613, 424,2195, 979, 108, 449, 589,  27, 172,  81,1031,  80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300,  53, 549, 137, 875,  76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249,  82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256,  18, 450, 206,2257, 290, 292,1142,2258, 511, 162,  99, 346, 164, 735,2259,
1476,1477,   4, 554, 343, 798,1099,2260,1100,2261,  43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272,  67,2273,
 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286,  70, 852,1071,2287,2288,2289,2290,  21,  56, 509, 117,
 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296,  63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
 808, 494,2299,2300,2301, 903,2302,  37,1072,  14,   5,2303,  79, 675,2304, 312,
2305,2306,2307,2308,2309,1480,   6,1307,2310,2311,2312,   1, 470,  35,  24, 229,
2313, 695, 210,  86, 778,  15, 784, 592, 779,  32,  77, 855, 964,2314, 259,2315,
 501, 380,2316,2317,  83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128,  57,  68, 261,1048, 211, 170,
1240,  31,2328,  51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491,  62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332,  12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
 915, 489,2449,1514,1184,2450,2451, 515,  64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187,  65,2494,
 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500,  49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
  95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533,  25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
 704, 504, 468, 758, 657,1528, 196,  44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
 249,1075,2556,2557,2558, 466, 743,2559,2560,2561,  92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532,  54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
   3, 458,   9,  38,2588, 107, 110, 890, 209,  26, 737, 498,2589,1534,2590, 431,
 202,  88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601,  94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642,  # 512, 256
)

_vendor/chardet/mbcssm.py000064400000061611151733136250011463 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .enums import MachineState

# BIG5

BIG5_CLS = (
    1,1,1,1,1,1,1,1,  # 00 - 07    #allow 0x00 as legal value
    1,1,1,1,1,1,0,0,  # 08 - 0f
    1,1,1,1,1,1,1,1,  # 10 - 17
    1,1,1,0,1,1,1,1,  # 18 - 1f
    1,1,1,1,1,1,1,1,  # 20 - 27
    1,1,1,1,1,1,1,1,  # 28 - 2f
    1,1,1,1,1,1,1,1,  # 30 - 37
    1,1,1,1,1,1,1,1,  # 38 - 3f
    2,2,2,2,2,2,2,2,  # 40 - 47
    2,2,2,2,2,2,2,2,  # 48 - 4f
    2,2,2,2,2,2,2,2,  # 50 - 57
    2,2,2,2,2,2,2,2,  # 58 - 5f
    2,2,2,2,2,2,2,2,  # 60 - 67
    2,2,2,2,2,2,2,2,  # 68 - 6f
    2,2,2,2,2,2,2,2,  # 70 - 77
    2,2,2,2,2,2,2,1,  # 78 - 7f
    4,4,4,4,4,4,4,4,  # 80 - 87
    4,4,4,4,4,4,4,4,  # 88 - 8f
    4,4,4,4,4,4,4,4,  # 90 - 97
    4,4,4,4,4,4,4,4,  # 98 - 9f
    4,3,3,3,3,3,3,3,  # a0 - a7
    3,3,3,3,3,3,3,3,  # a8 - af
    3,3,3,3,3,3,3,3,  # b0 - b7
    3,3,3,3,3,3,3,3,  # b8 - bf
    3,3,3,3,3,3,3,3,  # c0 - c7
    3,3,3,3,3,3,3,3,  # c8 - cf
    3,3,3,3,3,3,3,3,  # d0 - d7
    3,3,3,3,3,3,3,3,  # d8 - df
    3,3,3,3,3,3,3,3,  # e0 - e7
    3,3,3,3,3,3,3,3,  # e8 - ef
    3,3,3,3,3,3,3,3,  # f0 - f7
    3,3,3,3,3,3,3,0  # f8 - ff
)

BIG5_ST = (
    MachineState.ERROR,MachineState.START,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
    MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f
    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17
)

BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0)

BIG5_SM_MODEL = {'class_table': BIG5_CLS,
                 'class_factor': 5,
                 'state_table': BIG5_ST,
                 'char_len_table': BIG5_CHAR_LEN_TABLE,
                 'name': 'Big5'}

# CP949

CP949_CLS  = (
    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0,  # 00 - 0f
    1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1,  # 10 - 1f
    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,  # 20 - 2f
    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,  # 30 - 3f
    1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4,  # 40 - 4f
    4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1,  # 50 - 5f
    1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5,  # 60 - 6f
    5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1,  # 70 - 7f
    0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6,  # 80 - 8f
    6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6,  # 90 - 9f
    6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8,  # a0 - af
    7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,  # b0 - bf
    7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2,  # c0 - cf
    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,  # d0 - df
    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,  # e0 - ef
    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0,  # f0 - ff
)

CP949_ST = (
#cls=    0      1      2      3      4      5      6      7      8      9  # previous state =
    MachineState.ERROR,MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,     4,     5,MachineState.ERROR,     6, # MachineState.START
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR
    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME
    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3
    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4
    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5
    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6
)

CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)

CP949_SM_MODEL = {'class_table': CP949_CLS,
                  'class_factor': 10,
                  'state_table': CP949_ST,
                  'char_len_table': CP949_CHAR_LEN_TABLE,
                  'name': 'CP949'}

# EUC-JP

EUCJP_CLS = (
    4,4,4,4,4,4,4,4,  # 00 - 07
    4,4,4,4,4,4,5,5,  # 08 - 0f
    4,4,4,4,4,4,4,4,  # 10 - 17
    4,4,4,5,4,4,4,4,  # 18 - 1f
    4,4,4,4,4,4,4,4,  # 20 - 27
    4,4,4,4,4,4,4,4,  # 28 - 2f
    4,4,4,4,4,4,4,4,  # 30 - 37
    4,4,4,4,4,4,4,4,  # 38 - 3f
    4,4,4,4,4,4,4,4,  # 40 - 47
    4,4,4,4,4,4,4,4,  # 48 - 4f
    4,4,4,4,4,4,4,4,  # 50 - 57
    4,4,4,4,4,4,4,4,  # 58 - 5f
    4,4,4,4,4,4,4,4,  # 60 - 67
    4,4,4,4,4,4,4,4,  # 68 - 6f
    4,4,4,4,4,4,4,4,  # 70 - 77
    4,4,4,4,4,4,4,4,  # 78 - 7f
    5,5,5,5,5,5,5,5,  # 80 - 87
    5,5,5,5,5,5,1,3,  # 88 - 8f
    5,5,5,5,5,5,5,5,  # 90 - 97
    5,5,5,5,5,5,5,5,  # 98 - 9f
    5,2,2,2,2,2,2,2,  # a0 - a7
    2,2,2,2,2,2,2,2,  # a8 - af
    2,2,2,2,2,2,2,2,  # b0 - b7
    2,2,2,2,2,2,2,2,  # b8 - bf
    2,2,2,2,2,2,2,2,  # c0 - c7
    2,2,2,2,2,2,2,2,  # c8 - cf
    2,2,2,2,2,2,2,2,  # d0 - d7
    2,2,2,2,2,2,2,2,  # d8 - df
    0,0,0,0,0,0,0,0,  # e0 - e7
    0,0,0,0,0,0,0,0,  # e8 - ef
    0,0,0,0,0,0,0,0,  # f0 - f7
    0,0,0,0,0,0,0,5  # f8 - ff
)

EUCJP_ST = (
          3,     4,     3,     5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
     MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
     MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     3,MachineState.ERROR,#18-1f
          3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27
)

EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0)

EUCJP_SM_MODEL = {'class_table': EUCJP_CLS,
                  'class_factor': 6,
                  'state_table': EUCJP_ST,
                  'char_len_table': EUCJP_CHAR_LEN_TABLE,
                  'name': 'EUC-JP'}

# EUC-KR

EUCKR_CLS  = (
    1,1,1,1,1,1,1,1,  # 00 - 07
    1,1,1,1,1,1,0,0,  # 08 - 0f
    1,1,1,1,1,1,1,1,  # 10 - 17
    1,1,1,0,1,1,1,1,  # 18 - 1f
    1,1,1,1,1,1,1,1,  # 20 - 27
    1,1,1,1,1,1,1,1,  # 28 - 2f
    1,1,1,1,1,1,1,1,  # 30 - 37
    1,1,1,1,1,1,1,1,  # 38 - 3f
    1,1,1,1,1,1,1,1,  # 40 - 47
    1,1,1,1,1,1,1,1,  # 48 - 4f
    1,1,1,1,1,1,1,1,  # 50 - 57
    1,1,1,1,1,1,1,1,  # 58 - 5f
    1,1,1,1,1,1,1,1,  # 60 - 67
    1,1,1,1,1,1,1,1,  # 68 - 6f
    1,1,1,1,1,1,1,1,  # 70 - 77
    1,1,1,1,1,1,1,1,  # 78 - 7f
    0,0,0,0,0,0,0,0,  # 80 - 87
    0,0,0,0,0,0,0,0,  # 88 - 8f
    0,0,0,0,0,0,0,0,  # 90 - 97
    0,0,0,0,0,0,0,0,  # 98 - 9f
    0,2,2,2,2,2,2,2,  # a0 - a7
    2,2,2,2,2,3,3,3,  # a8 - af
    2,2,2,2,2,2,2,2,  # b0 - b7
    2,2,2,2,2,2,2,2,  # b8 - bf
    2,2,2,2,2,2,2,2,  # c0 - c7
    2,3,2,2,2,2,2,2,  # c8 - cf
    2,2,2,2,2,2,2,2,  # d0 - d7
    2,2,2,2,2,2,2,2,  # d8 - df
    2,2,2,2,2,2,2,2,  # e0 - e7
    2,2,2,2,2,2,2,2,  # e8 - ef
    2,2,2,2,2,2,2,2,  # f0 - f7
    2,2,2,2,2,2,2,0   # f8 - ff
)

EUCKR_ST = (
    MachineState.ERROR,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f
)

EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0)

EUCKR_SM_MODEL = {'class_table': EUCKR_CLS,
                'class_factor': 4,
                'state_table': EUCKR_ST,
                'char_len_table': EUCKR_CHAR_LEN_TABLE,
                'name': 'EUC-KR'}

# EUC-TW

EUCTW_CLS = (
    2,2,2,2,2,2,2,2,  # 00 - 07
    2,2,2,2,2,2,0,0,  # 08 - 0f
    2,2,2,2,2,2,2,2,  # 10 - 17
    2,2,2,0,2,2,2,2,  # 18 - 1f
    2,2,2,2,2,2,2,2,  # 20 - 27
    2,2,2,2,2,2,2,2,  # 28 - 2f
    2,2,2,2,2,2,2,2,  # 30 - 37
    2,2,2,2,2,2,2,2,  # 38 - 3f
    2,2,2,2,2,2,2,2,  # 40 - 47
    2,2,2,2,2,2,2,2,  # 48 - 4f
    2,2,2,2,2,2,2,2,  # 50 - 57
    2,2,2,2,2,2,2,2,  # 58 - 5f
    2,2,2,2,2,2,2,2,  # 60 - 67
    2,2,2,2,2,2,2,2,  # 68 - 6f
    2,2,2,2,2,2,2,2,  # 70 - 77
    2,2,2,2,2,2,2,2,  # 78 - 7f
    0,0,0,0,0,0,0,0,  # 80 - 87
    0,0,0,0,0,0,6,0,  # 88 - 8f
    0,0,0,0,0,0,0,0,  # 90 - 97
    0,0,0,0,0,0,0,0,  # 98 - 9f
    0,3,4,4,4,4,4,4,  # a0 - a7
    5,5,1,1,1,1,1,1,  # a8 - af
    1,1,1,1,1,1,1,1,  # b0 - b7
    1,1,1,1,1,1,1,1,  # b8 - bf
    1,1,3,1,3,3,3,3,  # c0 - c7
    3,3,3,3,3,3,3,3,  # c8 - cf
    3,3,3,3,3,3,3,3,  # d0 - d7
    3,3,3,3,3,3,3,3,  # d8 - df
    3,3,3,3,3,3,3,3,  # e0 - e7
    3,3,3,3,3,3,3,3,  # e8 - ef
    3,3,3,3,3,3,3,3,  # f0 - f7
    3,3,3,3,3,3,3,0   # f8 - ff
)

EUCTW_ST = (
    MachineState.ERROR,MachineState.ERROR,MachineState.START,     3,     3,     3,     4,MachineState.ERROR,#00-07
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17
    MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
         5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27
    MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
)

EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3)

EUCTW_SM_MODEL = {'class_table': EUCTW_CLS,
                'class_factor': 7,
                'state_table': EUCTW_ST,
                'char_len_table': EUCTW_CHAR_LEN_TABLE,
                'name': 'x-euc-tw'}

# GB2312

GB2312_CLS = (
    1,1,1,1,1,1,1,1,  # 00 - 07
    1,1,1,1,1,1,0,0,  # 08 - 0f
    1,1,1,1,1,1,1,1,  # 10 - 17
    1,1,1,0,1,1,1,1,  # 18 - 1f
    1,1,1,1,1,1,1,1,  # 20 - 27
    1,1,1,1,1,1,1,1,  # 28 - 2f
    3,3,3,3,3,3,3,3,  # 30 - 37
    3,3,1,1,1,1,1,1,  # 38 - 3f
    2,2,2,2,2,2,2,2,  # 40 - 47
    2,2,2,2,2,2,2,2,  # 48 - 4f
    2,2,2,2,2,2,2,2,  # 50 - 57
    2,2,2,2,2,2,2,2,  # 58 - 5f
    2,2,2,2,2,2,2,2,  # 60 - 67
    2,2,2,2,2,2,2,2,  # 68 - 6f
    2,2,2,2,2,2,2,2,  # 70 - 77
    2,2,2,2,2,2,2,4,  # 78 - 7f
    5,6,6,6,6,6,6,6,  # 80 - 87
    6,6,6,6,6,6,6,6,  # 88 - 8f
    6,6,6,6,6,6,6,6,  # 90 - 97
    6,6,6,6,6,6,6,6,  # 98 - 9f
    6,6,6,6,6,6,6,6,  # a0 - a7
    6,6,6,6,6,6,6,6,  # a8 - af
    6,6,6,6,6,6,6,6,  # b0 - b7
    6,6,6,6,6,6,6,6,  # b8 - bf
    6,6,6,6,6,6,6,6,  # c0 - c7
    6,6,6,6,6,6,6,6,  # c8 - cf
    6,6,6,6,6,6,6,6,  # d0 - d7
    6,6,6,6,6,6,6,6,  # d8 - df
    6,6,6,6,6,6,6,6,  # e0 - e7
    6,6,6,6,6,6,6,6,  # e8 - ef
    6,6,6,6,6,6,6,6,  # f0 - f7
    6,6,6,6,6,6,6,0   # f8 - ff
)

GB2312_ST = (
    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,     3,MachineState.ERROR,#00-07
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17
         4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
    MachineState.ERROR,MachineState.ERROR,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27
    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
)

# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validating
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2)

GB2312_SM_MODEL = {'class_table': GB2312_CLS,
                   'class_factor': 7,
                   'state_table': GB2312_ST,
                   'char_len_table': GB2312_CHAR_LEN_TABLE,
                   'name': 'GB2312'}

# Shift_JIS

SJIS_CLS = (
    1,1,1,1,1,1,1,1,  # 00 - 07
    1,1,1,1,1,1,0,0,  # 08 - 0f
    1,1,1,1,1,1,1,1,  # 10 - 17
    1,1,1,0,1,1,1,1,  # 18 - 1f
    1,1,1,1,1,1,1,1,  # 20 - 27
    1,1,1,1,1,1,1,1,  # 28 - 2f
    1,1,1,1,1,1,1,1,  # 30 - 37
    1,1,1,1,1,1,1,1,  # 38 - 3f
    2,2,2,2,2,2,2,2,  # 40 - 47
    2,2,2,2,2,2,2,2,  # 48 - 4f
    2,2,2,2,2,2,2,2,  # 50 - 57
    2,2,2,2,2,2,2,2,  # 58 - 5f
    2,2,2,2,2,2,2,2,  # 60 - 67
    2,2,2,2,2,2,2,2,  # 68 - 6f
    2,2,2,2,2,2,2,2,  # 70 - 77
    2,2,2,2,2,2,2,1,  # 78 - 7f
    3,3,3,3,3,2,2,3,  # 80 - 87
    3,3,3,3,3,3,3,3,  # 88 - 8f
    3,3,3,3,3,3,3,3,  # 90 - 97
    3,3,3,3,3,3,3,3,  # 98 - 9f
    #0xa0 is illegal in sjis encoding, but some pages does
    #contain such byte. We need to be more error forgiven.
    2,2,2,2,2,2,2,2,  # a0 - a7
    2,2,2,2,2,2,2,2,  # a8 - af
    2,2,2,2,2,2,2,2,  # b0 - b7
    2,2,2,2,2,2,2,2,  # b8 - bf
    2,2,2,2,2,2,2,2,  # c0 - c7
    2,2,2,2,2,2,2,2,  # c8 - cf
    2,2,2,2,2,2,2,2,  # d0 - d7
    2,2,2,2,2,2,2,2,  # d8 - df
    3,3,3,3,3,3,3,3,  # e0 - e7
    3,3,3,3,3,4,4,4,  # e8 - ef
    3,3,3,3,3,3,3,3,  # f0 - f7
    3,3,3,3,3,0,0,0)  # f8 - ff


SJIS_ST = (
    MachineState.ERROR,MachineState.START,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17
)

SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0)

SJIS_SM_MODEL = {'class_table': SJIS_CLS,
               'class_factor': 6,
               'state_table': SJIS_ST,
               'char_len_table': SJIS_CHAR_LEN_TABLE,
               'name': 'Shift_JIS'}

# UCS2-BE

UCS2BE_CLS = (
    0,0,0,0,0,0,0,0,  # 00 - 07
    0,0,1,0,0,2,0,0,  # 08 - 0f
    0,0,0,0,0,0,0,0,  # 10 - 17
    0,0,0,3,0,0,0,0,  # 18 - 1f
    0,0,0,0,0,0,0,0,  # 20 - 27
    0,3,3,3,3,3,0,0,  # 28 - 2f
    0,0,0,0,0,0,0,0,  # 30 - 37
    0,0,0,0,0,0,0,0,  # 38 - 3f
    0,0,0,0,0,0,0,0,  # 40 - 47
    0,0,0,0,0,0,0,0,  # 48 - 4f
    0,0,0,0,0,0,0,0,  # 50 - 57
    0,0,0,0,0,0,0,0,  # 58 - 5f
    0,0,0,0,0,0,0,0,  # 60 - 67
    0,0,0,0,0,0,0,0,  # 68 - 6f
    0,0,0,0,0,0,0,0,  # 70 - 77
    0,0,0,0,0,0,0,0,  # 78 - 7f
    0,0,0,0,0,0,0,0,  # 80 - 87
    0,0,0,0,0,0,0,0,  # 88 - 8f
    0,0,0,0,0,0,0,0,  # 90 - 97
    0,0,0,0,0,0,0,0,  # 98 - 9f
    0,0,0,0,0,0,0,0,  # a0 - a7
    0,0,0,0,0,0,0,0,  # a8 - af
    0,0,0,0,0,0,0,0,  # b0 - b7
    0,0,0,0,0,0,0,0,  # b8 - bf
    0,0,0,0,0,0,0,0,  # c0 - c7
    0,0,0,0,0,0,0,0,  # c8 - cf
    0,0,0,0,0,0,0,0,  # d0 - d7
    0,0,0,0,0,0,0,0,  # d8 - df
    0,0,0,0,0,0,0,0,  # e0 - e7
    0,0,0,0,0,0,0,0,  # e8 - ef
    0,0,0,0,0,0,0,0,  # f0 - f7
    0,0,0,0,0,0,4,5   # f8 - ff
)

UCS2BE_ST  = (
          5,     7,     7,MachineState.ERROR,     4,     3,MachineState.ERROR,MachineState.ERROR,#00-07
     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
     MachineState.ITS_ME,MachineState.ITS_ME,     6,     6,     6,     6,MachineState.ERROR,MachineState.ERROR,#10-17
          6,     6,     6,     6,     6,MachineState.ITS_ME,     6,     6,#18-1f
          6,     6,     6,     6,     5,     7,     7,MachineState.ERROR,#20-27
          5,     8,     6,     6,MachineState.ERROR,     6,     6,     6,#28-2f
          6,     6,     6,     6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37
)

UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2)

UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS,
                   'class_factor': 6,
                   'state_table': UCS2BE_ST,
                   'char_len_table': UCS2BE_CHAR_LEN_TABLE,
                   'name': 'UTF-16BE'}

# UCS2-LE

UCS2LE_CLS = (
    0,0,0,0,0,0,0,0,  # 00 - 07
    0,0,1,0,0,2,0,0,  # 08 - 0f
    0,0,0,0,0,0,0,0,  # 10 - 17
    0,0,0,3,0,0,0,0,  # 18 - 1f
    0,0,0,0,0,0,0,0,  # 20 - 27
    0,3,3,3,3,3,0,0,  # 28 - 2f
    0,0,0,0,0,0,0,0,  # 30 - 37
    0,0,0,0,0,0,0,0,  # 38 - 3f
    0,0,0,0,0,0,0,0,  # 40 - 47
    0,0,0,0,0,0,0,0,  # 48 - 4f
    0,0,0,0,0,0,0,0,  # 50 - 57
    0,0,0,0,0,0,0,0,  # 58 - 5f
    0,0,0,0,0,0,0,0,  # 60 - 67
    0,0,0,0,0,0,0,0,  # 68 - 6f
    0,0,0,0,0,0,0,0,  # 70 - 77
    0,0,0,0,0,0,0,0,  # 78 - 7f
    0,0,0,0,0,0,0,0,  # 80 - 87
    0,0,0,0,0,0,0,0,  # 88 - 8f
    0,0,0,0,0,0,0,0,  # 90 - 97
    0,0,0,0,0,0,0,0,  # 98 - 9f
    0,0,0,0,0,0,0,0,  # a0 - a7
    0,0,0,0,0,0,0,0,  # a8 - af
    0,0,0,0,0,0,0,0,  # b0 - b7
    0,0,0,0,0,0,0,0,  # b8 - bf
    0,0,0,0,0,0,0,0,  # c0 - c7
    0,0,0,0,0,0,0,0,  # c8 - cf
    0,0,0,0,0,0,0,0,  # d0 - d7
    0,0,0,0,0,0,0,0,  # d8 - df
    0,0,0,0,0,0,0,0,  # e0 - e7
    0,0,0,0,0,0,0,0,  # e8 - ef
    0,0,0,0,0,0,0,0,  # f0 - f7
    0,0,0,0,0,0,4,5   # f8 - ff
)

UCS2LE_ST = (
          6,     6,     7,     6,     4,     3,MachineState.ERROR,MachineState.ERROR,#00-07
     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
     MachineState.ITS_ME,MachineState.ITS_ME,     5,     5,     5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17
          5,     5,     5,MachineState.ERROR,     5,MachineState.ERROR,     6,     6,#18-1f
          7,     6,     8,     8,     5,     5,     5,MachineState.ERROR,#20-27
          5,     5,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,     5,#28-2f
          5,     5,     5,MachineState.ERROR,     5,MachineState.ERROR,MachineState.START,MachineState.START #30-37
)

UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2)

UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS,
                 'class_factor': 6,
                 'state_table': UCS2LE_ST,
                 'char_len_table': UCS2LE_CHAR_LEN_TABLE,
                 'name': 'UTF-16LE'}

# UTF-8

UTF8_CLS = (
    1,1,1,1,1,1,1,1,  # 00 - 07  #allow 0x00 as a legal value
    1,1,1,1,1,1,0,0,  # 08 - 0f
    1,1,1,1,1,1,1,1,  # 10 - 17
    1,1,1,0,1,1,1,1,  # 18 - 1f
    1,1,1,1,1,1,1,1,  # 20 - 27
    1,1,1,1,1,1,1,1,  # 28 - 2f
    1,1,1,1,1,1,1,1,  # 30 - 37
    1,1,1,1,1,1,1,1,  # 38 - 3f
    1,1,1,1,1,1,1,1,  # 40 - 47
    1,1,1,1,1,1,1,1,  # 48 - 4f
    1,1,1,1,1,1,1,1,  # 50 - 57
    1,1,1,1,1,1,1,1,  # 58 - 5f
    1,1,1,1,1,1,1,1,  # 60 - 67
    1,1,1,1,1,1,1,1,  # 68 - 6f
    1,1,1,1,1,1,1,1,  # 70 - 77
    1,1,1,1,1,1,1,1,  # 78 - 7f
    2,2,2,2,3,3,3,3,  # 80 - 87
    4,4,4,4,4,4,4,4,  # 88 - 8f
    4,4,4,4,4,4,4,4,  # 90 - 97
    4,4,4,4,4,4,4,4,  # 98 - 9f
    5,5,5,5,5,5,5,5,  # a0 - a7
    5,5,5,5,5,5,5,5,  # a8 - af
    5,5,5,5,5,5,5,5,  # b0 - b7
    5,5,5,5,5,5,5,5,  # b8 - bf
    0,0,6,6,6,6,6,6,  # c0 - c7
    6,6,6,6,6,6,6,6,  # c8 - cf
    6,6,6,6,6,6,6,6,  # d0 - d7
    6,6,6,6,6,6,6,6,  # d8 - df
    7,8,8,8,8,8,8,8,  # e0 - e7
    8,8,8,8,8,9,8,8,  # e8 - ef
    10,11,11,11,11,11,11,11,  # f0 - f7
    12,13,13,13,14,15,0,0    # f8 - ff
)

UTF8_ST = (
    MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     12,   10,#00-07
         9,     11,     8,     7,     6,     5,     4,    3,#08-0f
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27
    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f
    MachineState.ERROR,MachineState.ERROR,     5,     5,     5,     5,MachineState.ERROR,MachineState.ERROR,#30-37
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,     5,     5,MachineState.ERROR,MachineState.ERROR,#40-47
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f
    MachineState.ERROR,MachineState.ERROR,     7,     7,     7,     7,MachineState.ERROR,MachineState.ERROR,#50-57
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     7,     7,MachineState.ERROR,MachineState.ERROR,#60-67
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f
    MachineState.ERROR,MachineState.ERROR,     9,     9,     9,     9,MachineState.ERROR,MachineState.ERROR,#70-77
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     9,MachineState.ERROR,MachineState.ERROR,#80-87
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f
    MachineState.ERROR,MachineState.ERROR,    12,    12,    12,    12,MachineState.ERROR,MachineState.ERROR,#90-97
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,    12,MachineState.ERROR,MachineState.ERROR,#a0-a7
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af
    MachineState.ERROR,MachineState.ERROR,    12,    12,    12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf
    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7
    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf
)

UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)

UTF8_SM_MODEL = {'class_table': UTF8_CLS,
                 'class_factor': 16,
                 'state_table': UTF8_ST,
                 'char_len_table': UTF8_CHAR_LEN_TABLE,
                 'name': 'UTF-8'}
_vendor/chardet/hebrewprober.py000064400000033016151733136250012663 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
#          Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetprober import CharSetProber
from .enums import ProbingState

# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers

### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
#    backwards while line order is natural. For charset recognition purposes
#    the line order is unimportant (In fact, for this implementation, even
#    word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
#    specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
#    that contain special punctuation marks or diacritics is displayed with
#    some unconverted characters showing as question marks. This problem might
#    be corrected using another model prober for x-mac-hebrew. Due to the fact
#    that x-mac-hebrew texts are so rare, writing another model prober isn't
#    worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".

class HebrewProber(CharSetProber):
    # windows-1255 / ISO-8859-8 code points of interest
    FINAL_KAF = 0xea
    NORMAL_KAF = 0xeb
    FINAL_MEM = 0xed
    NORMAL_MEM = 0xee
    FINAL_NUN = 0xef
    NORMAL_NUN = 0xf0
    FINAL_PE = 0xf3
    NORMAL_PE = 0xf4
    FINAL_TSADI = 0xf5
    NORMAL_TSADI = 0xf6

    # Minimum Visual vs Logical final letter score difference.
    # If the difference is below this, don't rely solely on the final letter score
    # distance.
    MIN_FINAL_CHAR_DISTANCE = 5

    # Minimum Visual vs Logical model score difference.
    # If the difference is below this, don't rely at all on the model score
    # distance.
    MIN_MODEL_DISTANCE = 0.01

    VISUAL_HEBREW_NAME = "ISO-8859-8"
    LOGICAL_HEBREW_NAME = "windows-1255"

    def __init__(self):
        super(HebrewProber, self).__init__()
        self._final_char_logical_score = None
        self._final_char_visual_score = None
        self._prev = None
        self._before_prev = None
        self._logical_prober = None
        self._visual_prober = None
        self.reset()

    def reset(self):
        self._final_char_logical_score = 0
        self._final_char_visual_score = 0
        # The two last characters seen in the previous buffer,
        # mPrev and mBeforePrev are initialized to space in order to simulate
        # a word delimiter at the beginning of the data
        self._prev = ' '
        self._before_prev = ' '
        # These probers are owned by the group prober.

    def set_model_probers(self, logicalProber, visualProber):
        self._logical_prober = logicalProber
        self._visual_prober = visualProber

    def is_final(self, c):
        return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN,
                     self.FINAL_PE, self.FINAL_TSADI]

    def is_non_final(self, c):
        # The normal Tsadi is not a good Non-Final letter due to words like
        # 'lechotet' (to chat) containing an apostrophe after the tsadi. This
        # apostrophe is converted to a space in FilterWithoutEnglishLetters
        # causing the Non-Final tsadi to appear at an end of a word even
        # though this is not the case in the original text.
        # The letters Pe and Kaf rarely display a related behavior of not being
        # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
        # for example legally end with a Non-Final Pe or Kaf. However, the
        # benefit of these letters as Non-Final letters outweighs the damage
        # since these words are quite rare.
        return c in [self.NORMAL_KAF, self.NORMAL_MEM,
                     self.NORMAL_NUN, self.NORMAL_PE]

    def feed(self, byte_str):
        # Final letter analysis for logical-visual decision.
        # Look for evidence that the received buffer is either logical Hebrew
        # or visual Hebrew.
        # The following cases are checked:
        # 1) A word longer than 1 letter, ending with a final letter. This is
        #    an indication that the text is laid out "naturally" since the
        #    final letter really appears at the end. +1 for logical score.
        # 2) A word longer than 1 letter, ending with a Non-Final letter. In
        #    normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
        #    should not end with the Non-Final form of that letter. Exceptions
        #    to this rule are mentioned above in isNonFinal(). This is an
        #    indication that the text is laid out backwards. +1 for visual
        #    score
        # 3) A word longer than 1 letter, starting with a final letter. Final
        #    letters should not appear at the beginning of a word. This is an
        #    indication that the text is laid out backwards. +1 for visual
        #    score.
        #
        # The visual score and logical score are accumulated throughout the
        # text and are finally checked against each other in GetCharSetName().
        # No checking for final letters in the middle of words is done since
        # that case is not an indication for either Logical or Visual text.
        #
        # We automatically filter out all 7-bit characters (replace them with
        # spaces) so the word boundary detection works properly. [MAP]

        if self.state == ProbingState.NOT_ME:
            # Both model probers say it's not them. No reason to continue.
            return ProbingState.NOT_ME

        byte_str = self.filter_high_byte_only(byte_str)

        for cur in byte_str:
            if cur == ' ':
                # We stand on a space - a word just ended
                if self._before_prev != ' ':
                    # next-to-last char was not a space so self._prev is not a
                    # 1 letter word
                    if self.is_final(self._prev):
                        # case (1) [-2:not space][-1:final letter][cur:space]
                        self._final_char_logical_score += 1
                    elif self.is_non_final(self._prev):
                        # case (2) [-2:not space][-1:Non-Final letter][
                        #  cur:space]
                        self._final_char_visual_score += 1
            else:
                # Not standing on a space
                if ((self._before_prev == ' ') and
                        (self.is_final(self._prev)) and (cur != ' ')):
                    # case (3) [-2:space][-1:final letter][cur:not space]
                    self._final_char_visual_score += 1
            self._before_prev = self._prev
            self._prev = cur

        # Forever detecting, till the end or until both model probers return
        # ProbingState.NOT_ME (handled above)
        return ProbingState.DETECTING

    @property
    def charset_name(self):
        # Make the decision: is it Logical or Visual?
        # If the final letter score distance is dominant enough, rely on it.
        finalsub = self._final_char_logical_score - self._final_char_visual_score
        if finalsub >= self.MIN_FINAL_CHAR_DISTANCE:
            return self.LOGICAL_HEBREW_NAME
        if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE:
            return self.VISUAL_HEBREW_NAME

        # It's not dominant enough, try to rely on the model scores instead.
        modelsub = (self._logical_prober.get_confidence()
                    - self._visual_prober.get_confidence())
        if modelsub > self.MIN_MODEL_DISTANCE:
            return self.LOGICAL_HEBREW_NAME
        if modelsub < -self.MIN_MODEL_DISTANCE:
            return self.VISUAL_HEBREW_NAME

        # Still no good, back to final letter distance, maybe it'll save the
        # day.
        if finalsub < 0.0:
            return self.VISUAL_HEBREW_NAME

        # (finalsub > 0 - Logical) or (don't know what to do) default to
        # Logical.
        return self.LOGICAL_HEBREW_NAME

    @property
    def language(self):
        return 'Hebrew'

    @property
    def state(self):
        # Remain active as long as any of the model probers are active.
        if (self._logical_prober.state == ProbingState.NOT_ME) and \
           (self._visual_prober.state == ProbingState.NOT_ME):
            return ProbingState.NOT_ME
        return ProbingState.DETECTING
_vendor/chardet/euckrprober.py000064400000003324151733136250012517 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
from .mbcssm import EUCKR_SM_MODEL


class EUCKRProber(MultiByteCharSetProber):
    def __init__(self):
        super(EUCKRProber, self).__init__()
        self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
        self.distribution_analyzer = EUCKRDistributionAnalysis()
        self.reset()

    @property
    def charset_name(self):
        return "EUC-KR"

    @property
    def language(self):
        return "Korean"
_vendor/chardet/cli/__pycache__/chardetect.cpython-36.pyc000064400000005671151733136250017344 0ustar003

�Pf�
�@srdZddlmZmZmZddlZddlZddlmZddl	m
Z
ddlmZd
dd	�Z
dd
d�Zedkrne�dS)a
Script which takes one or more file paths and reports on their detected
encodings

Example::

    % chardetect somefile someotherfile
    somefile: windows-1252 with confidence 0.5
    someotherfile: ascii with confidence 1.0

If no paths are provided, it takes its input from stdin.

�)�absolute_import�print_function�unicode_literalsN)�__version__)�PY2)�UniversalDetector�stdincCs|t�}x&|D]}t|�}|j|�|jrPqW|j�|j}trP|jtj	�d�}|drndj
||d|d�Sdj
|�SdS)z�
    Return a string describing the probable encoding of a file or
    list of strings.

    :param lines: The lines to get the encoding of.
    :type lines: Iterable of bytes
    :param name: Name of file or collection of lines
    :type name: str
    �ignore�encodingz{0}: {1} with confidence {2}Z
confidencez{0}: no resultN)r�	bytearrayZfeed�done�close�resultr�decode�sys�getfilesystemencoding�format)�lines�name�u�liner�r� /usr/lib/python3.6/chardetect.py�description_ofs



rcCs�tjdd�}|jddtjd�dtr(tjntjjgd�|jdd	d
jt	�d�|j
|�}x4|jD]*}|j�rxt
dtjd�t
t||j��q^WdS)z�
    Handles command line arguments and gets things started.

    :param argv: List of arguments, as if specified on the command-line.
                 If None, ``sys.argv[1:]`` is used instead.
    :type argv: list of str
    zVTakes one or more file paths and reports their detected                      encodings)�description�inputz^File whose encoding we would like to determine.                               (default: stdin)�rb�*)�help�type�nargs�defaultz	--version�versionz%(prog)s {0})�actionr"z0You are running chardetect interactively. Press z8CTRL-D twice at the start of a blank line to signal the z4end of your input. If you want help, run chardetect z--help
)�fileNzhYou are running chardetect interactively. Press CTRL-D twice at the start of a blank line to signal the z�You are running chardetect interactively. Press CTRL-D twice at the start of a blank line to signal the end of your input. If you want help, run chardetect z�You are running chardetect interactively. Press CTRL-D twice at the start of a blank line to signal the end of your input. If you want help, run chardetect --help
)�argparse�ArgumentParser�add_argumentZFileTyperrr�bufferrr�
parse_argsr�isatty�print�stderrrr)�argv�parser�args�frrr�main6s	

r1�__main__)r)N)�__doc__Z
__future__rrrr%rZchardetrZchardet.compatrZchardet.universaldetectorrrr1�__name__rrrr�<module>s

_vendor/chardet/cli/__pycache__/__init__.cpython-36.pyc000064400000000161151733136250016762 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/chardet/cli/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000161151733136250017721 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/chardet/cli/__pycache__/chardetect.cpython-36.opt-1.pyc000064400000005671151733136250020303 0ustar003

�Pf�
�@srdZddlmZmZmZddlZddlZddlmZddl	m
Z
ddlmZd
dd	�Z
dd
d�Zedkrne�dS)a
Script which takes one or more file paths and reports on their detected
encodings

Example::

    % chardetect somefile someotherfile
    somefile: windows-1252 with confidence 0.5
    someotherfile: ascii with confidence 1.0

If no paths are provided, it takes its input from stdin.

�)�absolute_import�print_function�unicode_literalsN)�__version__)�PY2)�UniversalDetector�stdincCs|t�}x&|D]}t|�}|j|�|jrPqW|j�|j}trP|jtj	�d�}|drndj
||d|d�Sdj
|�SdS)z�
    Return a string describing the probable encoding of a file or
    list of strings.

    :param lines: The lines to get the encoding of.
    :type lines: Iterable of bytes
    :param name: Name of file or collection of lines
    :type name: str
    �ignore�encodingz{0}: {1} with confidence {2}Z
confidencez{0}: no resultN)r�	bytearrayZfeed�done�close�resultr�decode�sys�getfilesystemencoding�format)�lines�name�u�liner�r� /usr/lib/python3.6/chardetect.py�description_ofs



rcCs�tjdd�}|jddtjd�dtr(tjntjjgd�|jdd	d
jt	�d�|j
|�}x4|jD]*}|j�rxt
dtjd�t
t||j��q^WdS)z�
    Handles command line arguments and gets things started.

    :param argv: List of arguments, as if specified on the command-line.
                 If None, ``sys.argv[1:]`` is used instead.
    :type argv: list of str
    zVTakes one or more file paths and reports their detected                      encodings)�description�inputz^File whose encoding we would like to determine.                               (default: stdin)�rb�*)�help�type�nargs�defaultz	--version�versionz%(prog)s {0})�actionr"z0You are running chardetect interactively. Press z8CTRL-D twice at the start of a blank line to signal the z4end of your input. If you want help, run chardetect z--help
)�fileNzhYou are running chardetect interactively. Press CTRL-D twice at the start of a blank line to signal the z�You are running chardetect interactively. Press CTRL-D twice at the start of a blank line to signal the end of your input. If you want help, run chardetect z�You are running chardetect interactively. Press CTRL-D twice at the start of a blank line to signal the end of your input. If you want help, run chardetect --help
)�argparse�ArgumentParser�add_argumentZFileTyperrr�bufferrr�
parse_argsr�isatty�print�stderrrr)�argv�parser�args�frrr�main6s	

r1�__main__)r)N)�__doc__Z
__future__rrrr%rZchardetrZchardet.compatrZchardet.universaldetectorrrr1�__name__rrrr�<module>s

_vendor/chardet/cli/__init__.py000064400000000001151733136250012467 0ustar00
_vendor/chardet/cli/chardetect.py000064400000005262151733136250013054 0ustar00#!/usr/bin/env python
"""
Script which takes one or more file paths and reports on their detected
encodings

Example::

    % chardetect somefile someotherfile
    somefile: windows-1252 with confidence 0.5
    someotherfile: ascii with confidence 1.0

If no paths are provided, it takes its input from stdin.

"""

from __future__ import absolute_import, print_function, unicode_literals

import argparse
import sys

from chardet import __version__
from chardet.compat import PY2
from chardet.universaldetector import UniversalDetector


def description_of(lines, name='stdin'):
    """
    Return a string describing the probable encoding of a file or
    list of strings.

    :param lines: The lines to get the encoding of.
    :type lines: Iterable of bytes
    :param name: Name of file or collection of lines
    :type name: str
    """
    u = UniversalDetector()
    for line in lines:
        line = bytearray(line)
        u.feed(line)
        # shortcut out of the loop to save reading further - particularly useful if we read a BOM.
        if u.done:
            break
    u.close()
    result = u.result
    if PY2:
        name = name.decode(sys.getfilesystemencoding(), 'ignore')
    if result['encoding']:
        return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
                                                     result['confidence'])
    else:
        return '{0}: no result'.format(name)


def main(argv=None):
    """
    Handles command line arguments and gets things started.

    :param argv: List of arguments, as if specified on the command-line.
                 If None, ``sys.argv[1:]`` is used instead.
    :type argv: list of str
    """
    # Get command line arguments
    parser = argparse.ArgumentParser(
        description="Takes one or more file paths and reports their detected \
                     encodings")
    parser.add_argument('input',
                        help='File whose encoding we would like to determine. \
                              (default: stdin)',
                        type=argparse.FileType('rb'), nargs='*',
                        default=[sys.stdin if PY2 else sys.stdin.buffer])
    parser.add_argument('--version', action='version',
                        version='%(prog)s {0}'.format(__version__))
    args = parser.parse_args(argv)

    for f in args.input:
        if f.isatty():
            print("You are running chardetect interactively. Press " +
                  "CTRL-D twice at the start of a blank line to signal the " +
                  "end of your input. If you want help, run chardetect " +
                  "--help\n", file=sys.stderr)
        print(description_of(f, f.name))


if __name__ == '__main__':
    main()
_vendor/chardet/latin1prober.py000064400000012372151733136250012601 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetprober import CharSetProber
from .enums import ProbingState

FREQ_CAT_NUM = 4

UDF = 0  # undefined
OTH = 1  # other
ASC = 2  # ascii capital letter
ASS = 3  # ascii small letter
ACV = 4  # accent capital vowel
ACO = 5  # accent capital other
ASV = 6  # accent small vowel
ASO = 7  # accent small other
CLASS_NUM = 8  # total classes

Latin1_CharToClass = (
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 00 - 07
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 08 - 0F
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 10 - 17
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 18 - 1F
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 20 - 27
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 28 - 2F
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 30 - 37
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 38 - 3F
    OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 40 - 47
    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 48 - 4F
    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 50 - 57
    ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH,   # 58 - 5F
    OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 60 - 67
    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 68 - 6F
    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 70 - 77
    ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH,   # 78 - 7F
    OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH,   # 80 - 87
    OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF,   # 88 - 8F
    UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 90 - 97
    OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO,   # 98 - 9F
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A0 - A7
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A8 - AF
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B0 - B7
    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B8 - BF
    ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO,   # C0 - C7
    ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV,   # C8 - CF
    ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH,   # D0 - D7
    ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO,   # D8 - DF
    ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO,   # E0 - E7
    ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV,   # E8 - EF
    ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH,   # F0 - F7
    ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO,   # F8 - FF
)

# 0 : illegal
# 1 : very unlikely
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
# UDF OTH ASC ASS ACV ACO ASV ASO
    0,  0,  0,  0,  0,  0,  0,  0,  # UDF
    0,  3,  3,  3,  3,  3,  3,  3,  # OTH
    0,  3,  3,  3,  3,  3,  3,  3,  # ASC
    0,  3,  3,  3,  1,  1,  3,  3,  # ASS
    0,  3,  3,  3,  1,  2,  1,  2,  # ACV
    0,  3,  3,  3,  3,  3,  3,  3,  # ACO
    0,  3,  1,  3,  1,  1,  1,  3,  # ASV
    0,  3,  1,  3,  1,  1,  3,  3,  # ASO
)


class Latin1Prober(CharSetProber):
    def __init__(self):
        super(Latin1Prober, self).__init__()
        self._last_char_class = None
        self._freq_counter = None
        self.reset()

    def reset(self):
        self._last_char_class = OTH
        self._freq_counter = [0] * FREQ_CAT_NUM
        CharSetProber.reset(self)

    @property
    def charset_name(self):
        return "ISO-8859-1"

    @property
    def language(self):
        return ""

    def feed(self, byte_str):
        byte_str = self.filter_with_english_letters(byte_str)
        for c in byte_str:
            char_class = Latin1_CharToClass[c]
            freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM)
                                    + char_class]
            if freq == 0:
                self._state = ProbingState.NOT_ME
                break
            self._freq_counter[freq] += 1
            self._last_char_class = char_class

        return self.state

    def get_confidence(self):
        if self.state == ProbingState.NOT_ME:
            return 0.01

        total = sum(self._freq_counter)
        if total < 0.01:
            confidence = 0.0
        else:
            confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0)
                          / total)
        if confidence < 0.0:
            confidence = 0.0
        # lower the confidence of latin1 so that other more accurate
        # detector can take priority.
        confidence = confidence * 0.73
        return confidence
_vendor/chardet/enums.py000064400000003175151733136250011327 0ustar00"""
All of the Enums that are used throughout the chardet package.

:author: Dan Blanchard (dan.blanchard@gmail.com)
"""


class InputState(object):
    """
    This enum represents the different states a universal detector can be in.
    """
    PURE_ASCII = 0
    ESC_ASCII = 1
    HIGH_BYTE = 2


class LanguageFilter(object):
    """
    This enum represents the different language filters we can apply to a
    ``UniversalDetector``.
    """
    CHINESE_SIMPLIFIED = 0x01
    CHINESE_TRADITIONAL = 0x02
    JAPANESE = 0x04
    KOREAN = 0x08
    NON_CJK = 0x10
    ALL = 0x1F
    CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
    CJK = CHINESE | JAPANESE | KOREAN


class ProbingState(object):
    """
    This enum represents the different states a prober can be in.
    """
    DETECTING = 0
    FOUND_IT = 1
    NOT_ME = 2


class MachineState(object):
    """
    This enum represents the different states a state machine can be in.
    """
    START = 0
    ERROR = 1
    ITS_ME = 2


class SequenceLikelihood(object):
    """
    This enum represents the likelihood of a character following the previous one.
    """
    NEGATIVE = 0
    UNLIKELY = 1
    LIKELY = 2
    POSITIVE = 3

    @classmethod
    def get_num_categories(cls):
        """:returns: The number of likelihood categories in the enum."""
        return 4


class CharacterCategory(object):
    """
    This enum represents the different categories language models for
    ``SingleByteCharsetProber`` put characters into.

    Anything less than CONTROL is considered a letter.
    """
    UNDEFINED = 255
    LINE_BREAK = 254
    SYMBOL = 253
    DIGIT = 252
    CONTROL = 251
_vendor/chardet/universaldetector.py000064400000030305151733136250013735 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################
"""
Module containing the UniversalDetector detector class, which is the primary
class a user of ``chardet`` should use.

:author: Mark Pilgrim (initial port to Python)
:author: Shy Shalom (original C code)
:author: Dan Blanchard (major refactoring for 3.0)
:author: Ian Cordasco
"""


import codecs
import logging
import re

from .charsetgroupprober import CharSetGroupProber
from .enums import InputState, LanguageFilter, ProbingState
from .escprober import EscCharSetProber
from .latin1prober import Latin1Prober
from .mbcsgroupprober import MBCSGroupProber
from .sbcsgroupprober import SBCSGroupProber


class UniversalDetector(object):
    """
    The ``UniversalDetector`` class underlies the ``chardet.detect`` function
    and coordinates all of the different charset probers.

    To get a ``dict`` containing an encoding and its confidence, you can simply
    run:

    .. code::

            u = UniversalDetector()
            u.feed(some_bytes)
            u.close()
            detected = u.result

    """

    MINIMUM_THRESHOLD = 0.20
    HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]')
    ESC_DETECTOR = re.compile(b'(\033|~{)')
    WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]')
    ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252',
                   'iso-8859-2': 'Windows-1250',
                   'iso-8859-5': 'Windows-1251',
                   'iso-8859-6': 'Windows-1256',
                   'iso-8859-7': 'Windows-1253',
                   'iso-8859-8': 'Windows-1255',
                   'iso-8859-9': 'Windows-1254',
                   'iso-8859-13': 'Windows-1257'}

    def __init__(self, lang_filter=LanguageFilter.ALL):
        self._esc_charset_prober = None
        self._charset_probers = []
        self.result = None
        self.done = None
        self._got_data = None
        self._input_state = None
        self._last_char = None
        self.lang_filter = lang_filter
        self.logger = logging.getLogger(__name__)
        self._has_win_bytes = None
        self.reset()

    def reset(self):
        """
        Reset the UniversalDetector and all of its probers back to their
        initial states.  This is called by ``__init__``, so you only need to
        call this directly in between analyses of different documents.
        """
        self.result = {'encoding': None, 'confidence': 0.0, 'language': None}
        self.done = False
        self._got_data = False
        self._has_win_bytes = False
        self._input_state = InputState.PURE_ASCII
        self._last_char = b''
        if self._esc_charset_prober:
            self._esc_charset_prober.reset()
        for prober in self._charset_probers:
            prober.reset()

    def feed(self, byte_str):
        """
        Takes a chunk of a document and feeds it through all of the relevant
        charset probers.

        After calling ``feed``, you can check the value of the ``done``
        attribute to see if you need to continue feeding the
        ``UniversalDetector`` more data, or if it has made a prediction
        (in the ``result`` attribute).

        .. note::
           You should always call ``close`` when you're done feeding in your
           document if ``done`` is not already ``True``.
        """
        if self.done:
            return

        if not len(byte_str):
            return

        if not isinstance(byte_str, bytearray):
            byte_str = bytearray(byte_str)

        # First check for known BOMs, since these are guaranteed to be correct
        if not self._got_data:
            # If the data starts with BOM, we know it is UTF
            if byte_str.startswith(codecs.BOM_UTF8):
                # EF BB BF  UTF-8 with BOM
                self.result = {'encoding': "UTF-8-SIG",
                               'confidence': 1.0,
                               'language': ''}
            elif byte_str.startswith((codecs.BOM_UTF32_LE,
                                      codecs.BOM_UTF32_BE)):
                # FF FE 00 00  UTF-32, little-endian BOM
                # 00 00 FE FF  UTF-32, big-endian BOM
                self.result = {'encoding': "UTF-32",
                               'confidence': 1.0,
                               'language': ''}
            elif byte_str.startswith(b'\xFE\xFF\x00\x00'):
                # FE FF 00 00  UCS-4, unusual octet order BOM (3412)
                self.result = {'encoding': "X-ISO-10646-UCS-4-3412",
                               'confidence': 1.0,
                               'language': ''}
            elif byte_str.startswith(b'\x00\x00\xFF\xFE'):
                # 00 00 FF FE  UCS-4, unusual octet order BOM (2143)
                self.result = {'encoding': "X-ISO-10646-UCS-4-2143",
                               'confidence': 1.0,
                               'language': ''}
            elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)):
                # FF FE  UTF-16, little endian BOM
                # FE FF  UTF-16, big endian BOM
                self.result = {'encoding': "UTF-16",
                               'confidence': 1.0,
                               'language': ''}

            self._got_data = True
            if self.result['encoding'] is not None:
                self.done = True
                return

        # If none of those matched and we've only see ASCII so far, check
        # for high bytes and escape sequences
        if self._input_state == InputState.PURE_ASCII:
            if self.HIGH_BYTE_DETECTOR.search(byte_str):
                self._input_state = InputState.HIGH_BYTE
            elif self._input_state == InputState.PURE_ASCII and \
                    self.ESC_DETECTOR.search(self._last_char + byte_str):
                self._input_state = InputState.ESC_ASCII

        self._last_char = byte_str[-1:]

        # If we've seen escape sequences, use the EscCharSetProber, which
        # uses a simple state machine to check for known escape sequences in
        # HZ and ISO-2022 encodings, since those are the only encodings that
        # use such sequences.
        if self._input_state == InputState.ESC_ASCII:
            if not self._esc_charset_prober:
                self._esc_charset_prober = EscCharSetProber(self.lang_filter)
            if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT:
                self.result = {'encoding':
                               self._esc_charset_prober.charset_name,
                               'confidence':
                               self._esc_charset_prober.get_confidence(),
                               'language':
                               self._esc_charset_prober.language}
                self.done = True
        # If we've seen high bytes (i.e., those with values greater than 127),
        # we need to do more complicated checks using all our multi-byte and
        # single-byte probers that are left.  The single-byte probers
        # use character bigram distributions to determine the encoding, whereas
        # the multi-byte probers use a combination of character unigram and
        # bigram distributions.
        elif self._input_state == InputState.HIGH_BYTE:
            if not self._charset_probers:
                self._charset_probers = [MBCSGroupProber(self.lang_filter)]
                # If we're checking non-CJK encodings, use single-byte prober
                if self.lang_filter & LanguageFilter.NON_CJK:
                    self._charset_probers.append(SBCSGroupProber())
                self._charset_probers.append(Latin1Prober())
            for prober in self._charset_probers:
                if prober.feed(byte_str) == ProbingState.FOUND_IT:
                    self.result = {'encoding': prober.charset_name,
                                   'confidence': prober.get_confidence(),
                                   'language': prober.language}
                    self.done = True
                    break
            if self.WIN_BYTE_DETECTOR.search(byte_str):
                self._has_win_bytes = True

    def close(self):
        """
        Stop analyzing the current document and come up with a final
        prediction.

        :returns:  The ``result`` attribute, a ``dict`` with the keys
                   `encoding`, `confidence`, and `language`.
        """
        # Don't bother with checks if we're already done
        if self.done:
            return self.result
        self.done = True

        if not self._got_data:
            self.logger.debug('no data received!')

        # Default to ASCII if it is all we've seen so far
        elif self._input_state == InputState.PURE_ASCII:
            self.result = {'encoding': 'ascii',
                           'confidence': 1.0,
                           'language': ''}

        # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD
        elif self._input_state == InputState.HIGH_BYTE:
            prober_confidence = None
            max_prober_confidence = 0.0
            max_prober = None
            for prober in self._charset_probers:
                if not prober:
                    continue
                prober_confidence = prober.get_confidence()
                if prober_confidence > max_prober_confidence:
                    max_prober_confidence = prober_confidence
                    max_prober = prober
            if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD):
                charset_name = max_prober.charset_name
                lower_charset_name = max_prober.charset_name.lower()
                confidence = max_prober.get_confidence()
                # Use Windows encoding name instead of ISO-8859 if we saw any
                # extra Windows-specific bytes
                if lower_charset_name.startswith('iso-8859'):
                    if self._has_win_bytes:
                        charset_name = self.ISO_WIN_MAP.get(lower_charset_name,
                                                            charset_name)
                self.result = {'encoding': charset_name,
                               'confidence': confidence,
                               'language': max_prober.language}

        # Log all prober confidences if none met MINIMUM_THRESHOLD
        if self.logger.getEffectiveLevel() == logging.DEBUG:
            if self.result['encoding'] is None:
                self.logger.debug('no probers hit minimum threshold')
                for group_prober in self._charset_probers:
                    if not group_prober:
                        continue
                    if isinstance(group_prober, CharSetGroupProber):
                        for prober in group_prober.probers:
                            self.logger.debug('%s %s confidence = %s',
                                              prober.charset_name,
                                              prober.language,
                                              prober.get_confidence())
                    else:
                        self.logger.debug('%s %s confidence = %s',
                                          prober.charset_name,
                                          prober.language,
                                          prober.get_confidence())
        return self.result
_vendor/chardet/langturkishmodel.py000064400000025536151733136250013561 0ustar00# -*- coding: utf-8 -*-
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Özgür Baskın - Turkish Language Model
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9

# Character Mapping Table:
Latin5_TurkishCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42,
 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255,
255,  1, 21, 28, 12,  2, 18, 27, 25,  3, 24, 10,  5, 13,  4, 15,
 26, 64,  7,  8,  9, 14, 32, 57, 58, 11, 22,255,255,255,255,255,
180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,
164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106,
150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136,
 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125,
124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119,
 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86,
 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96,
 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17,  6, 19,107,
)

TurkishLangModel = (
3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3,
3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3,
3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1,
3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3,
3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1,
3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2,
2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1,
3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2,
2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,
1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1,
2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2,
3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1,
3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2,
2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1,
3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2,
2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,
3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2,
3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3,
0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,
3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1,
0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,
3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1,
3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3,
2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3,
2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0,
0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0,
1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2,
3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1,
1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,
3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2,
2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0,
0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0,
3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1,
0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1,
1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,
1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3,
2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1,
2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1,
2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0,
0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,
3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1,
1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2,
0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1,
3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1,
0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0,
3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,
3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,
1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2,
2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1,
0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0,
3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0,
0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0,
0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0,
3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0,
0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0,
0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0,
3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0,
0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1,
3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,
0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1,
0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,
3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0,
0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0,
3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0,
0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0,
0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0,
3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0,
0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,
0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0,
0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0,
0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0,
1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0,
0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1,
0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0,
3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0,
0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,
2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,
2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0,
0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,
0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)

Latin5TurkishModel = {
  'char_to_order_map': Latin5_TurkishCharToOrderMap,
  'precedence_matrix': TurkishLangModel,
  'typical_positive_ratio': 0.970290,
  'keep_english_letter': True,
  'charset_name': "ISO-8859-9",
  'language': 'Turkish',
}
_vendor/chardet/gb2312freq.py000064400000050353151733136250011756 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# GB2312 most frequently used character table
#
# Char to FreqOrder table , from hz6763

# 512  --> 0.79  -- 0.79
# 1024 --> 0.92  -- 0.13
# 2048 --> 0.98  -- 0.06
# 6768 --> 1.00  -- 0.02
#
# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
# Random Distribution Ration = 512 / (3755 - 512) = 0.157
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR

GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9

GB2312_TABLE_SIZE = 3760

GB2312_CHAR_TO_FREQ_ORDER = (
1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
 152,1687,1539, 738,1559,  59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850,  70,3285,2729,3534,3575,
2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
2534,1546,2393,2760, 737,2494,  13, 447, 245,2747,  38,2765,2129,2589,1079, 606,
 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
3195,4115,5627,2489,2991,  24,2065,2697,1087,2719,  48,1634, 315,  68, 985,2052,
 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
 253,3099,  32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563,  26,
3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
3777,3657, 643,2298,1148,1779, 190, 989,3544, 414,  11,2135,2063,2979,1471, 403,
3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
3651, 210,  33,1608,2516, 200,1520, 415, 102,   0,3389,1287, 817,  91,3299,2940,
 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687,  20,1819, 121,
1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680,  72, 842,1990, 212,1233,
1154,1586,  75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
1910, 534, 529,3309,1721,1660, 274,  39,2827, 661,2670,1578, 925,3248,3815,1094,
4278,4901,4252,  41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
3568, 194,5062,  15, 961,3870,1241,1192,2664,  66,5215,3260,2111,1295,1127,2152,
3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426,  53,2909,
 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
1272,2363, 284,1753,3679,4064,1695,  81, 815,2677,2757,2731,1386, 859, 500,4221,
2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640,  67,2360,
4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
 296,3979,1739,1611,3684,  23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
3116,  17,1074, 467,2692,2201, 387,2922,  45,1326,3055,1645,3659,2817, 958, 243,
1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
4046,3572,2399,1571,3281,  79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
 814,4968,3487,1548,2644,1567,1285,   2, 295,2636,  97, 946,3576, 832, 141,4257,
3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
 602,1525,2608,1605,1639,3175, 694,3064,  10, 465,  76,2000,4846,4208, 444,3781,
1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844,  89, 937,
 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
 432, 445,2811, 206,4136,1472, 730, 349,  73, 397,2802,2547, 998,1637,1167, 789,
 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
4996, 371,1575,2436,1621,2210, 984,4033,1734,2638,  16,4529, 663,2755,3255,1451,
3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
 750,2058, 165,  80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
2357, 395,3740, 137,2075, 944,4089,2584,1267,3802,  62,1533,2285, 178, 176, 780,
2440, 201,3707, 590, 478,1560,4354,2117,1075,  30,  74,4643,4004,1635,1441,2745,
 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669,  43,2523,1657,
 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024,  40,3240,1536,
1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
  18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
  90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076,  46,4253,2873,1889,1894,
 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
2269,2246,1446,  36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906,  51, 369, 170,3541,
1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
2101,2730,2490,  82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
3750,2289,2795, 813,3123,2610,1136,4368,   5,3391,4541,2174, 420, 429,1728, 754,
1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
  47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096,  99,
1397,1769,2300,4428,1643,3455,1978,1757,3718,1440,  35,4879,3742,1296,4228,2280,
 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
3708, 135,2131,  87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
4314,   9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
1505,1911,1883,3526, 698,3629,3456,1833,1431, 746,  77,1261,2017,2296,1977,1885,
 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
3192,2910,2010, 140,2395,2859,  55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
3399,  98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
1086,1974,2034, 630, 257,3338,2788,4903,1017,  86,4790, 966,2789,1995,1696,1131,
 259,3095,4188,1308, 179,1463,5257, 289,4107,1248,  42,3413,1725,2288, 896,1947,
 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
3034,3310, 540,2370,1562,1288,2990, 502,4765,1147,   4,1853,2708, 207, 294,2814,
4078,2902,2509, 684,  34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196,  19, 941,3624,3480,
3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
 955,1089,3103,1053,  96,  88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
 642,4006, 903,2539,1877,2082, 596,  29,4066,1790, 722,2157, 130, 995,1569, 769,
1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445,  50, 625, 487,2207,
  57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
1783, 362,   8,3433,3422, 610,2793,3277,1390,1284,1654,  21,3823, 734, 367, 623,
 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
 819,1541, 142,2284,  44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434,  92,1466,4920,2616,
3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
4462,  64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
3264,2855,2722,1952,1029,2839,2467,  84,4383,2215, 820,1391,2015,2448,3672, 377,
1948,2168, 797,2545,3536,2578,2645,  94,2874,1678, 405,1259,3071, 771, 546,1315,
 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928,  14,2594, 557,
3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
4031,2641,4067,3145,1870,  37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
1178,2639,2351,  93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
3341,1618,4126,2595,2334, 603, 651,  69, 701, 268,2662,3411,2555,1380,1606, 503,
 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
1281,  52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169,  27,
1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
3891,2868,3621,2254,  58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
2724,1927,2333,4440, 567,  22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
  12, 974,3783,4391, 951,1412,   1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040,  31,
 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118,  63,2076, 314,1881,
1348,1061, 172, 978,3515,1747, 532, 511,3970,   6, 601, 905,2699,3300,1751, 276,
1467,3725,2668,  65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
2754,  95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
1985, 244,2546, 474, 495,1046,2611,1851,2061,  71,2089,1675,2590, 742,3758,2843,
3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
 451,   3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078,  49,3770,
3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
1197,1663,4476,3127,  85,4240,2528,  25,1111,1181,3673, 407,3470,4561,2679,2713,
 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
 391,2963, 187,  61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
 931, 317,2517,3027, 325, 569, 686,2107,3084,  60,1042,1333,2794, 264,3177,4014,
1628, 258,3712,   7,4464,1176,1043,1778, 683, 114,1975,  78,1492, 383,1886, 510,
 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
1282,1289,4609, 697,1453,3044,2666,3611,1856,2412,  54, 719,1330, 568,3778,2459,
1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421,  56,1908,1640,2387,2232,
1917,1874,2477,4921, 148,  83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
 852,1221,1400,1486, 882,2299,4036, 351,  28,1122, 700,6479,6480,6481,6482,6483,  #last 512
)

_vendor/chardet/euctwfreq.py000064400000075605151733136250012214 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>

# 128  --> 0.42261
# 256  --> 0.57851
# 512  --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR

EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75

# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 5376

EUCTW_CHAR_TO_FREQ_ORDER = (
   1,1800,1506, 255,1431, 198,   9,  82,   6,7310, 177, 202,3615,1256,2808, 110,  # 2742
3735,  33,3241, 261,  76,  44,2113,  16,2931,2184,1176, 659,3868,  26,3404,2643,  # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963,   8, 204,  58,4296,7311,1931,  # 2774
  63,7312,7313, 317,1614,  75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809,  # 2790
3616,   3,  10,3870,1471,  29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315,  # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932,  34,3501,3173,  64, 604,  # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337,  72, 406,7319,  80,  # 2838
 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449,  69,2969, 591,  # 2854
 179,2095, 471, 115,2034,1843,  60,  50,2970, 134, 806,1868, 734,2035,3407, 180,  # 2870
 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359,  # 2886
2495,  90,2707,1338, 663,  11, 906,1099,2545,  20,2436, 182, 532,1716,7321, 732,  # 2902
1376,4062,1311,1420,3175,  25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529,  # 2918
3243, 475,1447,3617,7322, 117,  21, 656, 810,1297,2295,2329,3502,7323, 126,4063,  # 2934
 706, 456, 150, 613,4299,  71,1118,2036,4064, 145,3069,  85, 835, 486,2114,1246,  # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221,  # 2966
3503,3110,7325,1955,1153,4065,  83, 296,1199,3070, 192, 624,  93,7326, 822,1897,  # 2982
2810,3111, 795,2064, 991,1554,1542,1592,  27,  43,2853, 859, 139,1456, 860,4300,  # 2998
 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618,  # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228,  # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077,  # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244,  88,1723,2037,3875,1950, 212,  # 3062
 266, 152, 149, 468,1898,4066,4302,  77, 187,7330,3018,  37,   5,2972,7331,3876,  # 3078
7332,7333,  39,2517,4303,2894,3177,2078,  55, 148,  74,4304, 545, 483,1474,1029,  # 3094
1665, 217,1869,1531,3113,1104,2645,4067,  24, 172,3507, 900,3877,3508,3509,4305,  # 3110
  32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674,   4,3019,3314,1427,1788,  # 3126
 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520,  # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439,  38,7339,1063,7340, 794,  # 3158
3879,1435,2296,  46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804,  35, 707,  # 3174
 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409,  # 3190
2128,1363,3623,1423, 697, 100,3071,  48,  70,1231, 495,3114,2193,7345,1294,7346,  # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411,  # 3222
 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412,  # 3238
 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933,  # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895,  # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369,  # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000,  # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381,   7,  # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313,  # 3334
 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513,  # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647,  # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357,  # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438,  # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978,  # 3414
 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210,  # 3430
  98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642,  # 3446
 523,2776,2777,2648,7364, 141,2231,1333,  68, 176, 441, 876, 907,4077, 603,2592,  # 3462
 710, 171,3417, 404, 549,  18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320,  # 3478
7366,2973, 368,7367, 146, 366,  99, 871,3627,1543, 748, 807,1586,1185,  22,2258,  # 3494
 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702,  # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371,  59,7372,  # 3526
 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836,  # 3542
 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629,  # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686,  # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496,  # 3590
 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560,  # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496,  # 3622
4081,  57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082,  # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083,  # 3654
 279,3120,  51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264,  # 3670
 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411,  # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483,  # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395,  79,4086,2441,3252, 608, 567,2680,  # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672,  # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681,  # 3750
2397,7400,7401,7402,4089,3025,   0,7403,2469, 315, 231,2442, 301,3319,4335,2380,  # 3766
7404, 233,4090,3631,1818,4336,4337,7405,  96,1776,1315,2082,7406, 257,7407,1809,  # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183,  # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934,  # 3814
1484,7413,1712, 127,  67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351,  # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545,  # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358,  # 3862
  78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338,  # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423,  # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859,  # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636,  # 3926
 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344,  # 3942
 165, 243,4345,3637,2521, 123, 683,4096, 764,4346,  36,3895,1792, 589,2902, 816,  # 3958
 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891,  # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662,  # 3990
7425, 611,1156, 854,2381,1316,2861,   2, 386, 515,2904,7426,7427,3253, 868,2234,  # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431,  # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676,  # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437,  # 4054
1993,7438,4350,7439,7440,2195,  13,2779,3638,2980,3124,1229,1916,7441,3756,2131,  # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307,  # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519,  # 4102
7452, 128,2132,  92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980,  # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401,  # 4134
4353,2248,  94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101,  # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937,  # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466,  # 4182
2332,2067,  23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526,  # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598,  # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471,  # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863,  41,7473,  # 4246
7474,4361,7475,1657,2333,  19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323,  # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416,  # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427,  # 4294
 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110,  # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485,  # 4326
2683, 733,  40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428,  # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907,  # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901,  # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870,  # 4390
2752,2986,7490, 435,7491, 343,1108, 596,  17,1751,4365,2235,3430,3643,7492,4366,  # 4406
 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031,  # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240,  # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521,  # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673,  # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260,  # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619,  # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506,  # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382,  # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324,  # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384,  # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551,  30,2263,4122,  # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192,  # 4598
 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388,  # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129,  # 4630
 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523,  # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692,  # 4662
 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915,  # 4678
1041,2987, 293,1168,  87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219,  # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825,  # 4710
 730,1515, 184,2827,  66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975,  # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394,  # 4742
3918,7535,7536,1186,  15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758,  # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434,  # 4774
3541,1342,1681,1718, 766,3264, 286,  89,2946,3649,7540,1713,7541,2597,3334,2990,  # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335,  # 4806
7544,3265, 310, 313,3435,2299, 770,4134,  54,3034, 189,4397,3082,3769,3922,7545,  # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137,  # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471,  # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555,  # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139,  # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729,  # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482,  # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652,  # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867,  # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499,  # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250,  # 4982
  97,  81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830,  # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188,  # 5014
 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408,  # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447,  # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527,  # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932,  # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411,  # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270,  # 5110
 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589,  # 5126
7590, 587,  14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591,  # 5142
1702,1226, 102,1547,  62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756,  # 5158
 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145,  # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598,  86,1494,1730,  # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069,  # 5206
 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938,  # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625,  # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885,  28,2686,  # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797,  # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958,  # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528,  # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241,  # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169,  # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540,  # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342,  # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425,  # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427,  # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131,  95,1504,3946, 723,4159,3141,  # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949,  # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654,  53,7624,2996,7625,  # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202,  # 5462
 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640,  # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936,  # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955,  # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910,  # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325,  # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024,  # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340,  # 5574
 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918,  # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439,  # 5606
2317,3283,7650,7651,4164,7652,4165,  84,4166, 112, 989,7653, 547,1059,3961, 701,  # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494,  # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285,  # 5654
 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077,  # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443,  # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169,  # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906,  # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968,  # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804,  # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794,  65,1128,2835,2913,2759,1590,  # 5766
3795,7674,7675,  12,2658,  45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676,  # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680,  # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285,  # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687,  # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454,  # 5846
3670,1858,  91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403,  # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973,  # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454,  # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761,  61,3976,3672,1822,3977,  # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695,  # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945,  # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460,  # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179,  # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706,  # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982,  # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183,  # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043,  56,1396,3090,  # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717,  # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985,  # 6070
7722,1076,  49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184,  # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472,  # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351,  # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714,  # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404,  # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629,  31,2838,  # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620,  # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738,  # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869,  # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558,  # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107,  # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216,  # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984,  # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705,  # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687,  # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840,  # 6326
 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521,  # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412,  42,3096, 464,7759,2632,  # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295,  # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765,  # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116,  52,7767,3047,1796,7768,7769,  # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572,  # 6422
 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776,  # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911,  # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693,  # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672,  # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013,  # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816,  # 6518
 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010,  # 6534
 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175,  # 6550
 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473,  # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298,  # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359,  # 6598
 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805,  # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807,  # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810,  # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812,  # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814,  # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818,  # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821,  # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877,  # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702,  # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813,  # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503,  # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484,  # 6790
 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833,  # 6806
 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457,  # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704,  # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878,  # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508,  # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451,  # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509,  # 6902
1561,2664,1452,4010,1375,7855,7856,  47,2959, 316,7857,1406,1591,2923,3156,7858,  # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428,  # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800,  # 6950
 919,2347,2960,2348,1270,4511,4012,  73,7862,7863, 647,7864,3228,2843,2255,1550,  # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347,  # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515,  # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665,  # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518,  # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833,  # 7046
 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961,  # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508,  # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482,  # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098,  # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483,  # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834,  # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904,  # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724,  # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910,  # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701,  # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062,  # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922,  # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925,  # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248,  # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487,  # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015,  # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935,  # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104,  # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580,  # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380,  # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951,  # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948,  # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488,  # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737,  # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017,  # 7446
 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047,  # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967,  # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385,  # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975,  # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979,  # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982,  # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306,  # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270,  # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012,  # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236,  # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550,  # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746,  # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066,  # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977,  # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009,  # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013,  # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552,  # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023,  # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143,  # 7750
 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278,  # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698,  # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706,  # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859,  # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344,  # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894,  # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194,  # 7862
 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760,  # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210,  # 7894
 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642,  # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013,  # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889,  # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239,  # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240,  # 7974
 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083,  # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088,  # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094,  # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101,  # 8038
 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104,  # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015,  # 8070
 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941,  # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118,  # 8102
)

_vendor/chardet/sbcharsetprober.py000064400000013031151733136250013360 0ustar00######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#   Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

from .charsetprober import CharSetProber
from .enums import CharacterCategory, ProbingState, SequenceLikelihood


class SingleByteCharSetProber(CharSetProber):
    SAMPLE_SIZE = 64
    SB_ENOUGH_REL_THRESHOLD = 1024  #  0.25 * SAMPLE_SIZE^2
    POSITIVE_SHORTCUT_THRESHOLD = 0.95
    NEGATIVE_SHORTCUT_THRESHOLD = 0.05

    def __init__(self, model, reversed=False, name_prober=None):
        super(SingleByteCharSetProber, self).__init__()
        self._model = model
        # TRUE if we need to reverse every pair in the model lookup
        self._reversed = reversed
        # Optional auxiliary prober for name decision
        self._name_prober = name_prober
        self._last_order = None
        self._seq_counters = None
        self._total_seqs = None
        self._total_char = None
        self._freq_char = None
        self.reset()

    def reset(self):
        super(SingleByteCharSetProber, self).reset()
        # char order of last character
        self._last_order = 255
        self._seq_counters = [0] * SequenceLikelihood.get_num_categories()
        self._total_seqs = 0
        self._total_char = 0
        # characters that fall in our sampling range
        self._freq_char = 0

    @property
    def charset_name(self):
        if self._name_prober:
            return self._name_prober.charset_name
        else:
            return self._model['charset_name']

    @property
    def language(self):
        if self._name_prober:
            return self._name_prober.language
        else:
            return self._model.get('language')

    def feed(self, byte_str):
        if not self._model['keep_english_letter']:
            byte_str = self.filter_international_words(byte_str)
        if not byte_str:
            return self.state
        char_to_order_map = self._model['char_to_order_map']
        for i, c in enumerate(byte_str):
            # XXX: Order is in range 1-64, so one would think we want 0-63 here,
            #      but that leads to 27 more test failures than before.
            order = char_to_order_map[c]
            # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
            #      CharacterCategory.SYMBOL is actually 253, so we use CONTROL
            #      to make it closer to the original intent. The only difference
            #      is whether or not we count digits and control characters for
            #      _total_char purposes.
            if order < CharacterCategory.CONTROL:
                self._total_char += 1
            if order < self.SAMPLE_SIZE:
                self._freq_char += 1
                if self._last_order < self.SAMPLE_SIZE:
                    self._total_seqs += 1
                    if not self._reversed:
                        i = (self._last_order * self.SAMPLE_SIZE) + order
                        model = self._model['precedence_matrix'][i]
                    else:  # reverse the order of the letters in the lookup
                        i = (order * self.SAMPLE_SIZE) + self._last_order
                        model = self._model['precedence_matrix'][i]
                    self._seq_counters[model] += 1
            self._last_order = order

        charset_name = self._model['charset_name']
        if self.state == ProbingState.DETECTING:
            if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
                confidence = self.get_confidence()
                if confidence > self.POSITIVE_SHORTCUT_THRESHOLD:
                    self.logger.debug('%s confidence = %s, we have a winner',
                                      charset_name, confidence)
                    self._state = ProbingState.FOUND_IT
                elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD:
                    self.logger.debug('%s confidence = %s, below negative '
                                      'shortcut threshhold %s', charset_name,
                                      confidence,
                                      self.NEGATIVE_SHORTCUT_THRESHOLD)
                    self._state = ProbingState.NOT_ME

        return self.state

    def get_confidence(self):
        r = 0.01
        if self._total_seqs > 0:
            r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) /
                 self._total_seqs / self._model['typical_positive_ratio'])
            r = r * self._freq_char / self._total_char
            if r >= 1.0:
                r = 0.99
        return r
_vendor/__pycache__/six.cpython-36.opt-1.pyc000064400000057532151733136250014622 0ustar003

�Pf�u�I@srdZddlmZddlZddlZddlZddlZddlZdZdZ	ej
ddkZej
ddkZej
dd��dzkZ
er�efZefZefZeZeZejZn�efZeefZeejfZeZeZejjd	�r�e�d|�ZnLGdd
�d
e�Z ye!e ��Wn e"k
�re�d~�ZYnXe�d��Z[ dd�Z#dd�Z$Gdd�de�Z%Gdd�de%�Z&Gdd�dej'�Z(Gdd�de%�Z)Gdd�de�Z*e*e+�Z,Gdd�de(�Z-e)ddd d!�e)d"d#d$d%d"�e)d&d#d#d'd&�e)d(d)d$d*d(�e)d+d)d,�e)d-d#d$d.d-�e)d/d0d0d1d/�e)d2d0d0d/d2�e)d3d)d$d4d3�e)d5d)e
�rd6nd7d8�e)d9d)d:�e)d;d<d=d>�e)d!d!d �e)d?d?d@�e)dAdAd@�e)dBdBd@�e)d4d)d$d4d3�e)dCd#d$dDdC�e)dEd#d#dFdE�e&d$d)�e&dGdH�e&dIdJ�e&dKdLdM�e&dNdOdN�e&dPdQdR�e&dSdTdU�e&dVdWdX�e&dYdZd[�e&d\d]d^�e&d_d`da�e&dbdcdd�e&dedfdg�e&dhdidj�e&dkdkdl�e&dmdmdl�e&dndndl�e&dododp�e&dqdr�e&dsdt�e&dudv�e&dwdxdw�e&dydz�e&d{d|d}�e&d~dd��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�e+d�d��e&d�e+d�d��e&d�e+d�e+d��e&d�d�d��e&d�d�d��e&d�d�d��g>Z.ejd�k�rZe.e&d�d��g7Z.x:e.D]2Z/e0e-e/j1e/�e2e/e&��r`e,j3e/d�e/j1��q`W[/e.e-_.e-e+d��Z4e,j3e4d��Gd�d��d�e(�Z5e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d>d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��gZ6xe6D]Z/e0e5e/j1e/��q�W[/e6e5_.e,j3e5e+d��d�dӃGd�dՄd�e(�Z7e)d�d�d��e)d�d�d��e)d�d�d��gZ8xe8D]Z/e0e7e/j1e/��q$W[/e8e7_.e,j3e7e+d��d�d܃Gd�dބd�e(�Z9e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)�dd�d�g!Z:xe:D]Z/e0e9e/j1e/��q�W[/e:e9_.e,j3e9e+�d��d�d�G�d�d��de(�Z;e)�dd��d�e)�dd��d�e)�d	d��d�e)�d
d��d�gZ<xe<D]Z/e0e;e/j1e/��qTW[/e<e;_.e,j3e;e+�d��d�d
�G�d�d��de(�Z=e)�dd�d��gZ>xe>D]Z/e0e=e/j1e/��q�W[/e>e=_.e,j3e=e+�d��d�d�G�d�d��dej'�Z?e,j3e?e+d���d��d�d�Z@�d�d�ZAe�	rj�dZB�dZC�dZD�dZE�dZF�d ZGn$�d!ZB�d"ZC�d#ZD�d$ZE�d%ZF�d&ZGyeHZIWn"eJk
�	r��d'�d(�ZIYnXeIZHyeKZKWn"eJk
�	r��d)�d*�ZKYnXe�
r�d+�d,�ZLejMZN�d-�d.�ZOeZPn>�d/�d,�ZL�d0�d1�ZN�d2�d.�ZOG�d3�d4��d4e�ZPeKZKe#eL�d5�ejQeB�ZRejQeC�ZSejQeD�ZTejQeE�ZUejQeF�ZVejQeG�ZWe�
r��d6�d7�ZX�d8�d9�ZY�d:�d;�ZZ�d<�d=�Z[ej\�d>�Z]ej\�d?�Z^ej\�d@�Z_nT�dA�d7�ZX�dB�d9�ZY�dC�d;�ZZ�dD�d=�Z[ej\�dE�Z]ej\�dF�Z^ej\�dG�Z_e#eX�dH�e#eY�dI�e#eZ�dJ�e#e[�dK�e�r�dL�dM�Z`�dN�dO�ZaebZcddldZdedje�dP�jfZg[dejhd�ZiejjZkelZmddlnZnenjoZoenjpZp�dQZqej
d
d
k�r�dRZr�dSZsn�dTZr�dUZsnj�dV�dM�Z`�dW�dO�ZaecZcebZg�dX�dY�Zi�dZ�d[�Zkejtejuev�ZmddloZoeojoZoZp�d\Zq�dRZr�dSZse#e`�d]�e#ea�d^��d_�dQ�Zw�d`�dT�Zx�da�dU�Zye�r�eze4j{�db�Z|�d��dc�dd�Z}n�d��de�df�Z|e|�dg�ej
dd��d�k�
re|�dh�n.ej
dd��d�k�
r8e|�di�n�dj�dk�Z~eze4j{�dld�Zedk�
rj�dm�dn�Zej
dd��d�k�
r�eZ��do�dn�Ze#e}�dp�ej
dd��d�k�
r�ej�ej�f�dq�dr�Z�nej�Z��ds�dt�Z��du�dv�Z��dw�dx�Z�gZ�e+Z�e��j��dy�dk	�rge�_�ej��rbx>e�ej��D]0\Z�Z�ee��j+dk�r*e�j1e+k�r*ej�e�=P�q*W[�[�ej�j�e,�dS(�z6Utilities for writing code that runs on Python 2 and 3�)�absolute_importNz'Benjamin Peterson <benjamin@python.org>z1.10.0����java��c@seZdZdd�ZdS)�XcCsdS)Nrrl�)�selfr
r
�/usr/lib/python3.6/six.py�__len__>sz	X.__len__N)�__name__�
__module__�__qualname__r
r
r
r
rr	<sr	�?cCs
||_dS)z Add documentation to a function.N)�__doc__)�func�docr
r
r�_add_docKsrcCst|�tj|S)z7Import module, returning the module after the last dot.)�
__import__�sys�modules)�namer
r
r�_import_modulePsrc@seZdZdd�Zdd�ZdS)�
_LazyDescrcCs
||_dS)N)r)rrr
r
r�__init__Xsz_LazyDescr.__init__cCsB|j�}t||j|�yt|j|j�Wntk
r<YnX|S)N)�_resolve�setattrr�delattr�	__class__�AttributeError)r�obj�tp�resultr
r
r�__get__[sz_LazyDescr.__get__N)rrrrr%r
r
r
rrVsrcs.eZdZd�fdd�	Zdd�Zdd�Z�ZS)	�MovedModuleNcs2tt|�j|�tr(|dkr |}||_n||_dS)N)�superr&r�PY3�mod)rr�old�new)r r
rriszMovedModule.__init__cCs
t|j�S)N)rr))rr
r
rrrszMovedModule._resolvecCs"|j�}t||�}t|||�|S)N)r�getattrr)r�attr�_module�valuer
r
r�__getattr__us
zMovedModule.__getattr__)N)rrrrrr0�
__classcell__r
r
)r rr&gs	r&cs(eZdZ�fdd�Zdd�ZgZ�ZS)�_LazyModulecstt|�j|�|jj|_dS)N)r'r2rr r)rr)r r
rr~sz_LazyModule.__init__cCs ddg}|dd�|jD�7}|S)NrrcSsg|]
}|j�qSr
)r)�.0r-r
r
r�
<listcomp>�sz'_LazyModule.__dir__.<locals>.<listcomp>)�_moved_attributes)rZattrsr
r
r�__dir__�sz_LazyModule.__dir__)rrrrr6r5r1r
r
)r rr2|sr2cs&eZdZd�fdd�	Zdd�Z�ZS)�MovedAttributeNcsdtt|�j|�trH|dkr |}||_|dkr@|dkr<|}n|}||_n||_|dkrZ|}||_dS)N)r'r7rr(r)r-)rrZold_modZnew_modZold_attrZnew_attr)r r
rr�szMovedAttribute.__init__cCst|j�}t||j�S)N)rr)r,r-)r�moduler
r
rr�s
zMovedAttribute._resolve)NN)rrrrrr1r
r
)r rr7�sr7c@sVeZdZdZdd�Zdd�Zdd�Zdd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZeZdS)�_SixMetaPathImporterz�
    A meta path importer to import six.moves and its submodules.

    This class implements a PEP302 finder and loader. It should be compatible
    with Python 2.5 and all existing versions of Python3
    cCs||_i|_dS)N)r�
known_modules)rZsix_module_namer
r
rr�sz_SixMetaPathImporter.__init__cGs&x |D]}||j|jd|<qWdS)N�.)r:r)rr)Z	fullnames�fullnamer
r
r�_add_module�s
z _SixMetaPathImporter._add_modulecCs|j|jd|S)Nr;)r:r)rr<r
r
r�_get_module�sz _SixMetaPathImporter._get_moduleNcCs||jkr|SdS)N)r:)rr<�pathr
r
r�find_module�s
z _SixMetaPathImporter.find_modulecCs0y
|j|Stk
r*td|��YnXdS)Nz!This loader does not know module )r:�KeyError�ImportError)rr<r
r
rZ__get_module�s
z!_SixMetaPathImporter.__get_modulecCsRy
tj|Stk
rYnX|j|�}t|t�r>|j�}n||_|tj|<|S)N)rrrA� _SixMetaPathImporter__get_module�
isinstancer&r�
__loader__)rr<r)r
r
r�load_module�s




z _SixMetaPathImporter.load_modulecCst|j|�d�S)z�
        Return true, if the named module is a package.

        We need this method to get correct spec objects with
        Python 3.4 (see PEP451)
        �__path__)�hasattrrC)rr<r
r
r�
is_package�sz_SixMetaPathImporter.is_packagecCs|j|�dS)z;Return None

        Required, if is_package is implementedN)rC)rr<r
r
r�get_code�s
z_SixMetaPathImporter.get_code)N)
rrrrrr=r>r@rCrFrIrJ�
get_sourcer
r
r
rr9�s
	r9c@seZdZdZgZdS)�_MovedItemszLazy loading of moved objectsN)rrrrrGr
r
r
rrL�srLZ	cStringIO�io�StringIO�filter�	itertools�builtinsZifilter�filterfalseZifilterfalse�inputZ__builtin__Z	raw_input�internr�map�imap�getcwd�osZgetcwdu�getcwdb�rangeZxrangeZ
reload_module�	importlibZimp�reload�reduce�	functoolsZshlex_quoteZpipesZshlexZquote�UserDict�collections�UserList�
UserString�zipZizip�zip_longestZizip_longestZconfigparserZConfigParser�copyregZcopy_regZdbm_gnuZgdbmzdbm.gnuZ
_dummy_threadZdummy_threadZhttp_cookiejarZ	cookielibzhttp.cookiejarZhttp_cookiesZCookiezhttp.cookiesZ
html_entitiesZhtmlentitydefsz
html.entitiesZhtml_parserZ
HTMLParserzhtml.parserZhttp_clientZhttplibzhttp.clientZemail_mime_multipartzemail.MIMEMultipartzemail.mime.multipartZemail_mime_nonmultipartzemail.MIMENonMultipartzemail.mime.nonmultipartZemail_mime_textzemail.MIMETextzemail.mime.textZemail_mime_basezemail.MIMEBasezemail.mime.baseZBaseHTTPServerzhttp.serverZ
CGIHTTPServerZSimpleHTTPServerZcPickle�pickleZqueueZQueue�reprlib�reprZsocketserverZSocketServer�_threadZthreadZtkinterZTkinterZtkinter_dialogZDialogztkinter.dialogZtkinter_filedialogZ
FileDialogztkinter.filedialogZtkinter_scrolledtextZScrolledTextztkinter.scrolledtextZtkinter_simpledialogZSimpleDialogztkinter.simpledialogZtkinter_tixZTixztkinter.tixZtkinter_ttkZttkztkinter.ttkZtkinter_constantsZTkconstantsztkinter.constantsZtkinter_dndZTkdndztkinter.dndZtkinter_colorchooserZtkColorChooserztkinter.colorchooserZtkinter_commondialogZtkCommonDialogztkinter.commondialogZtkinter_tkfiledialogZtkFileDialogZtkinter_fontZtkFontztkinter.fontZtkinter_messageboxZtkMessageBoxztkinter.messageboxZtkinter_tksimpledialogZtkSimpleDialogZurllib_parsez.moves.urllib_parsezurllib.parseZurllib_errorz.moves.urllib_errorzurllib.errorZurllibz
.moves.urllibZurllib_robotparser�robotparserzurllib.robotparserZ
xmlrpc_clientZ	xmlrpclibz
xmlrpc.clientZ
xmlrpc_serverZSimpleXMLRPCServerz
xmlrpc.serverZwin32�winreg�_winregzmoves.z.moves�movesc@seZdZdZdS)�Module_six_moves_urllib_parsez7Lazy loading of moved objects in six.moves.urllib_parseN)rrrrr
r
r
rrn@srnZParseResultZurlparseZSplitResultZparse_qsZ	parse_qslZ	urldefragZurljoinZurlsplitZ
urlunparseZ
urlunsplitZ
quote_plusZunquoteZunquote_plusZ	urlencodeZ
splitqueryZsplittagZ	splituserZ
uses_fragmentZuses_netlocZuses_paramsZ
uses_queryZ
uses_relativezmoves.urllib_parsezmoves.urllib.parsec@seZdZdZdS)�Module_six_moves_urllib_errorz7Lazy loading of moved objects in six.moves.urllib_errorN)rrrrr
r
r
rrohsroZURLErrorZurllib2Z	HTTPErrorZContentTooShortErrorz.moves.urllib.errorzmoves.urllib_errorzmoves.urllib.errorc@seZdZdZdS)�Module_six_moves_urllib_requestz9Lazy loading of moved objects in six.moves.urllib_requestN)rrrrr
r
r
rrp|srpZurlopenzurllib.requestZinstall_openerZbuild_openerZpathname2urlZurl2pathnameZ
getproxiesZRequestZOpenerDirectorZHTTPDefaultErrorHandlerZHTTPRedirectHandlerZHTTPCookieProcessorZProxyHandlerZBaseHandlerZHTTPPasswordMgrZHTTPPasswordMgrWithDefaultRealmZAbstractBasicAuthHandlerZHTTPBasicAuthHandlerZProxyBasicAuthHandlerZAbstractDigestAuthHandlerZHTTPDigestAuthHandlerZProxyDigestAuthHandlerZHTTPHandlerZHTTPSHandlerZFileHandlerZ
FTPHandlerZCacheFTPHandlerZUnknownHandlerZHTTPErrorProcessorZurlretrieveZ
urlcleanupZ	URLopenerZFancyURLopenerZproxy_bypassz.moves.urllib.requestzmoves.urllib_requestzmoves.urllib.requestc@seZdZdZdS)� Module_six_moves_urllib_responsez:Lazy loading of moved objects in six.moves.urllib_responseN)rrrrr
r
r
rrq�srqZaddbasezurllib.responseZaddclosehookZaddinfoZ
addinfourlz.moves.urllib.responsezmoves.urllib_responsezmoves.urllib.responsec@seZdZdZdS)�#Module_six_moves_urllib_robotparserz=Lazy loading of moved objects in six.moves.urllib_robotparserN)rrrrr
r
r
rrr�srrZRobotFileParserz.moves.urllib.robotparserzmoves.urllib_robotparserzmoves.urllib.robotparserc@sNeZdZdZgZejd�Zejd�Zejd�Z	ejd�Z
ejd�Zdd�Zd	S)
�Module_six_moves_urllibzICreate a six.moves.urllib namespace that resembles the Python 3 namespacezmoves.urllib_parsezmoves.urllib_errorzmoves.urllib_requestzmoves.urllib_responsezmoves.urllib_robotparsercCsdddddgS)N�parse�error�request�responserjr
)rr
r
rr6�szModule_six_moves_urllib.__dir__N)
rrrrrG�	_importerr>rtrurvrwrjr6r
r
r
rrs�s




rszmoves.urllibcCstt|j|�dS)zAdd an item to six.moves.N)rrLr)Zmover
r
r�add_move�srycCsXytt|�WnDtk
rRytj|=Wn"tk
rLtd|f��YnXYnXdS)zRemove item from six.moves.zno such move, %rN)rrLr!rm�__dict__rA)rr
r
r�remove_move�sr{�__func__�__self__�__closure__�__code__�__defaults__�__globals__�im_funcZim_selfZfunc_closureZ	func_codeZ
func_defaultsZfunc_globalscCs|j�S)N)�next)�itr
r
r�advance_iteratorsr�cCstdd�t|�jD��S)Ncss|]}d|jkVqdS)�__call__N)rz)r3�klassr
r
r�	<genexpr>szcallable.<locals>.<genexpr>)�any�type�__mro__)r"r
r
r�callablesr�cCs|S)Nr
)�unboundr
r
r�get_unbound_functionsr�cCs|S)Nr
)r�clsr
r
r�create_unbound_methodsr�cCs|jS)N)r�)r�r
r
rr�"scCstj|||j�S)N)�types�
MethodTyper )rr"r
r
r�create_bound_method%sr�cCstj|d|�S)N)r�r�)rr�r
r
rr�(sc@seZdZdd�ZdS)�IteratorcCst|�j|�S)N)r��__next__)rr
r
rr�-sz
Iterator.nextN)rrrr�r
r
r
rr�+sr�z3Get the function out of a possibly unbound functioncKst|jf|��S)N)�iter�keys)�d�kwr
r
r�iterkeys>sr�cKst|jf|��S)N)r��values)r�r�r
r
r�
itervaluesAsr�cKst|jf|��S)N)r��items)r�r�r
r
r�	iteritemsDsr�cKst|jf|��S)N)r�Zlists)r�r�r
r
r�	iterlistsGsr�r�r�r�cKs|jf|�S)N)r�)r�r�r
r
rr�PscKs|jf|�S)N)r�)r�r�r
r
rr�SscKs|jf|�S)N)r�)r�r�r
r
rr�VscKs|jf|�S)N)r�)r�r�r
r
rr�Ys�viewkeys�
viewvalues�	viewitemsz1Return an iterator over the keys of a dictionary.z3Return an iterator over the values of a dictionary.z?Return an iterator over the (key, value) pairs of a dictionary.zBReturn an iterator over the (key, [values]) pairs of a dictionary.cCs
|jd�S)Nzlatin-1)�encode)�sr
r
r�bksr�cCs|S)Nr
)r�r
r
r�unsr�z>B�assertCountEqualZassertRaisesRegexpZassertRegexpMatches�assertRaisesRegex�assertRegexcCs|S)Nr
)r�r
r
rr��scCst|jdd�d�S)Nz\\z\\\\Zunicode_escape)�unicode�replace)r�r
r
rr��scCst|d�S)Nr)�ord)Zbsr
r
r�byte2int�sr�cCst||�S)N)r�)Zbuf�ir
r
r�
indexbytes�sr�ZassertItemsEqualzByte literalzText literalcOst|t�||�S)N)r,�_assertCountEqual)r�args�kwargsr
r
rr��scOst|t�||�S)N)r,�_assertRaisesRegex)rr�r�r
r
rr��scOst|t�||�S)N)r,�_assertRegex)rr�r�r
r
rr��s�execcCs*|dkr|�}|j|k	r"|j|��|�dS)N)�
__traceback__�with_traceback)r#r/�tbr
r
r�reraise�s


r�cCsB|dkr*tjd�}|j}|dkr&|j}~n|dkr6|}td�dS)zExecute code in a namespace.Nrzexec _code_ in _globs_, _locs_)r�	_getframe�	f_globals�f_localsr�)Z_code_Z_globs_Z_locs_�framer
r
r�exec_�s
r�z9def reraise(tp, value, tb=None):
    raise tp, value, tb
zrdef raise_from(value, from_value):
    if from_value is None:
        raise value
    raise value from from_value
zCdef raise_from(value, from_value):
    raise value from from_value
cCs|�dS)Nr
)r/Z
from_valuer
r
r�
raise_from�sr��printc
s6|jdtj���dkrdS�fdd�}d}|jdd�}|dk	r`t|t�rNd}nt|t�s`td��|jd	d�}|dk	r�t|t�r�d}nt|t�s�td
��|r�td��|s�x|D]}t|t�r�d}Pq�W|r�td�}td
�}nd}d
}|dkr�|}|dk�r�|}x,t|�D] \}	}|	�r||�||��qW||�dS)z4The new-style print function for Python 2.4 and 2.5.�fileNcsdt|t�st|�}t�t�rVt|t�rV�jdk	rVt�dd�}|dkrHd}|j�j|�}�j|�dS)N�errors�strict)	rD�
basestring�strr�r��encodingr,r��write)�datar�)�fpr
rr��s



zprint_.<locals>.writeF�sepTzsep must be None or a string�endzend must be None or a stringz$invalid keyword arguments to print()�
� )�popr�stdoutrDr�r��	TypeError�	enumerate)
r�r�r�Zwant_unicoder�r��arg�newlineZspacer�r
)r�r�print_�sL







r�cOs<|jdtj�}|jdd�}t||�|r8|dk	r8|j�dS)Nr��flushF)�getrr�r��_printr�)r�r�r�r�r
r
rr�s

zReraise an exception.cs���fdd�}|S)Ncstj����|�}�|_|S)N)r^�wraps�__wrapped__)�f)�assigned�updated�wrappedr
r�wrapperszwraps.<locals>.wrapperr
)r�r�r�r�r
)r�r�r�rr�sr�cs&G��fdd�d��}tj|dfi�S)z%Create a base class with a metaclass.cseZdZ��fdd�ZdS)z!with_metaclass.<locals>.metaclasscs�|�|�S)Nr
)r�rZ
this_basesr�)�bases�metar
r�__new__'sz)with_metaclass.<locals>.metaclass.__new__N)rrrr�r
)r�r�r
r�	metaclass%sr�Ztemporary_class)r�r�)r�r�r�r
)r�r�r�with_metaclass sr�cs�fdd�}|S)z6Class decorator for creating a class with a metaclass.csl|jj�}|jd�}|dk	rDt|t�r,|g}x|D]}|j|�q2W|jdd�|jdd��|j|j|�S)N�	__slots__rz�__weakref__)rz�copyr�rDr�r�r�	__bases__)r�Z	orig_vars�slotsZ	slots_var)r�r
rr�.s



zadd_metaclass.<locals>.wrapperr
)r�r�r
)r�r�
add_metaclass,sr�cCs2tr.d|jkrtd|j��|j|_dd�|_|S)a
    A decorator that defines __unicode__ and __str__ methods under Python 2.
    Under Python 3 it does nothing.

    To support Python 2 and 3 with a single code base, define a __str__ method
    returning text and apply this decorator to the class.
    �__str__zY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cSs|j�jd�S)Nzutf-8)�__unicode__r�)rr
r
r�<lambda>Jsz-python_2_unicode_compatible.<locals>.<lambda>)�PY2rz�
ValueErrorrr�r�)r�r
r
r�python_2_unicode_compatible<s


r��__spec__)rrli���li���ll����)N)NN)rr)rr)rr)rr)�rZ
__future__rr^rP�operatorrr��
__author__�__version__�version_infor�r(ZPY34r�Zstring_types�intZ
integer_typesr�Zclass_typesZ	text_type�bytesZbinary_type�maxsizeZMAXSIZEr�ZlongZ	ClassTyper��platform�
startswith�objectr	�len�
OverflowErrorrrrr&�
ModuleTyper2r7r9rrxrLr5r-rrrDr=rmrnZ_urllib_parse_moved_attributesroZ_urllib_error_moved_attributesrpZ _urllib_request_moved_attributesrqZ!_urllib_response_moved_attributesrrZ$_urllib_robotparser_moved_attributesrsryr{Z
_meth_funcZ
_meth_selfZ
_func_closureZ
_func_codeZ_func_defaultsZ
_func_globalsr�r��	NameErrorr�r�r�r�r�r��
attrgetterZget_method_functionZget_method_selfZget_function_closureZget_function_codeZget_function_defaultsZget_function_globalsr�r�r�r��methodcallerr�r�r�r�r��chrZunichr�struct�Struct�packZint2byte�
itemgetterr��getitemr�r�Z	iterbytesrMrN�BytesIOr�r�r��partialrVr�r�r�r�r,rQr�r�r�r�r��WRAPPER_ASSIGNMENTS�WRAPPER_UPDATESr�r�r�r�rG�__package__�globalsr�r��submodule_search_locations�	meta_pathr�r�Zimporter�appendr
r
r
r�<module>s�

>












































































































5_vendor/__pycache__/re-vendor.cpython-36.opt-1.pyc000064400000002010151733136250015675 0ustar003

�Pf�@s�ddlZddlZddlZddlZddlZejjejje��Z	dd�Z
dd�Zdd�Ze
dkr�eej�d	krpe
�ejd
dkr�e�nejd
dkr�e�ne
�dS)�NcCstd�tjd�dS)Nz"Usage: re-vendor.py [clean|vendor]�)�print�sys�exit�rr�/usr/lib/python3.6/re-vendor.py�usage	srcCsPx6tjt�D](}tjjt|�}tjj|�rtj|�qWtjtjjtd��dS)Nzsix.py)	�os�listdir�here�path�join�isdir�shutil�rmtree�unlink)�fn�dirnamerrr�clean
s
rcCs6tjddtddg�xtjd�D]}tj|�q WdS)NZinstallz-tz-rz
vendor.txtz
*.egg-info)�pip�mainr�globrr)rrrr�vendorsr�__main__�r)r	rrrrr�abspathr�__file__rrrr�__name__�len�argvrrrr�<module>s _vendor/__pycache__/six.cpython-36.pyc000064400000057532151733136250013663 0ustar003

�Pf�u�I@srdZddlmZddlZddlZddlZddlZddlZdZdZ	ej
ddkZej
ddkZej
dd��dzkZ
er�efZefZefZeZeZejZn�efZeefZeejfZeZeZejjd	�r�e�d|�ZnLGdd
�d
e�Z ye!e ��Wn e"k
�re�d~�ZYnXe�d��Z[ dd�Z#dd�Z$Gdd�de�Z%Gdd�de%�Z&Gdd�dej'�Z(Gdd�de%�Z)Gdd�de�Z*e*e+�Z,Gdd�de(�Z-e)ddd d!�e)d"d#d$d%d"�e)d&d#d#d'd&�e)d(d)d$d*d(�e)d+d)d,�e)d-d#d$d.d-�e)d/d0d0d1d/�e)d2d0d0d/d2�e)d3d)d$d4d3�e)d5d)e
�rd6nd7d8�e)d9d)d:�e)d;d<d=d>�e)d!d!d �e)d?d?d@�e)dAdAd@�e)dBdBd@�e)d4d)d$d4d3�e)dCd#d$dDdC�e)dEd#d#dFdE�e&d$d)�e&dGdH�e&dIdJ�e&dKdLdM�e&dNdOdN�e&dPdQdR�e&dSdTdU�e&dVdWdX�e&dYdZd[�e&d\d]d^�e&d_d`da�e&dbdcdd�e&dedfdg�e&dhdidj�e&dkdkdl�e&dmdmdl�e&dndndl�e&dododp�e&dqdr�e&dsdt�e&dudv�e&dwdxdw�e&dydz�e&d{d|d}�e&d~dd��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�e+d�d��e&d�e+d�d��e&d�e+d�e+d��e&d�d�d��e&d�d�d��e&d�d�d��g>Z.ejd�k�rZe.e&d�d��g7Z.x:e.D]2Z/e0e-e/j1e/�e2e/e&��r`e,j3e/d�e/j1��q`W[/e.e-_.e-e+d��Z4e,j3e4d��Gd�d��d�e(�Z5e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d>d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��gZ6xe6D]Z/e0e5e/j1e/��q�W[/e6e5_.e,j3e5e+d��d�dӃGd�dՄd�e(�Z7e)d�d�d��e)d�d�d��e)d�d�d��gZ8xe8D]Z/e0e7e/j1e/��q$W[/e8e7_.e,j3e7e+d��d�d܃Gd�dބd�e(�Z9e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)�dd�d�g!Z:xe:D]Z/e0e9e/j1e/��q�W[/e:e9_.e,j3e9e+�d��d�d�G�d�d��de(�Z;e)�dd��d�e)�dd��d�e)�d	d��d�e)�d
d��d�gZ<xe<D]Z/e0e;e/j1e/��qTW[/e<e;_.e,j3e;e+�d��d�d
�G�d�d��de(�Z=e)�dd�d��gZ>xe>D]Z/e0e=e/j1e/��q�W[/e>e=_.e,j3e=e+�d��d�d�G�d�d��dej'�Z?e,j3e?e+d���d��d�d�Z@�d�d�ZAe�	rj�dZB�dZC�dZD�dZE�dZF�d ZGn$�d!ZB�d"ZC�d#ZD�d$ZE�d%ZF�d&ZGyeHZIWn"eJk
�	r��d'�d(�ZIYnXeIZHyeKZKWn"eJk
�	r��d)�d*�ZKYnXe�
r�d+�d,�ZLejMZN�d-�d.�ZOeZPn>�d/�d,�ZL�d0�d1�ZN�d2�d.�ZOG�d3�d4��d4e�ZPeKZKe#eL�d5�ejQeB�ZRejQeC�ZSejQeD�ZTejQeE�ZUejQeF�ZVejQeG�ZWe�
r��d6�d7�ZX�d8�d9�ZY�d:�d;�ZZ�d<�d=�Z[ej\�d>�Z]ej\�d?�Z^ej\�d@�Z_nT�dA�d7�ZX�dB�d9�ZY�dC�d;�ZZ�dD�d=�Z[ej\�dE�Z]ej\�dF�Z^ej\�dG�Z_e#eX�dH�e#eY�dI�e#eZ�dJ�e#e[�dK�e�r�dL�dM�Z`�dN�dO�ZaebZcddldZdedje�dP�jfZg[dejhd�ZiejjZkelZmddlnZnenjoZoenjpZp�dQZqej
d
d
k�r�dRZr�dSZsn�dTZr�dUZsnj�dV�dM�Z`�dW�dO�ZaecZcebZg�dX�dY�Zi�dZ�d[�Zkejtejuev�ZmddloZoeojoZoZp�d\Zq�dRZr�dSZse#e`�d]�e#ea�d^��d_�dQ�Zw�d`�dT�Zx�da�dU�Zye�r�eze4j{�db�Z|�d��dc�dd�Z}n�d��de�df�Z|e|�dg�ej
dd��d�k�
re|�dh�n.ej
dd��d�k�
r8e|�di�n�dj�dk�Z~eze4j{�dld�Zedk�
rj�dm�dn�Zej
dd��d�k�
r�eZ��do�dn�Ze#e}�dp�ej
dd��d�k�
r�ej�ej�f�dq�dr�Z�nej�Z��ds�dt�Z��du�dv�Z��dw�dx�Z�gZ�e+Z�e��j��dy�dk	�rge�_�ej��rbx>e�ej��D]0\Z�Z�ee��j+dk�r*e�j1e+k�r*ej�e�=P�q*W[�[�ej�j�e,�dS(�z6Utilities for writing code that runs on Python 2 and 3�)�absolute_importNz'Benjamin Peterson <benjamin@python.org>z1.10.0����java��c@seZdZdd�ZdS)�XcCsdS)Nrrl�)�selfr
r
�/usr/lib/python3.6/six.py�__len__>sz	X.__len__N)�__name__�
__module__�__qualname__r
r
r
r
rr	<sr	�?cCs
||_dS)z Add documentation to a function.N)�__doc__)�func�docr
r
r�_add_docKsrcCst|�tj|S)z7Import module, returning the module after the last dot.)�
__import__�sys�modules)�namer
r
r�_import_modulePsrc@seZdZdd�Zdd�ZdS)�
_LazyDescrcCs
||_dS)N)r)rrr
r
r�__init__Xsz_LazyDescr.__init__cCsB|j�}t||j|�yt|j|j�Wntk
r<YnX|S)N)�_resolve�setattrr�delattr�	__class__�AttributeError)r�obj�tp�resultr
r
r�__get__[sz_LazyDescr.__get__N)rrrrr%r
r
r
rrVsrcs.eZdZd�fdd�	Zdd�Zdd�Z�ZS)	�MovedModuleNcs2tt|�j|�tr(|dkr |}||_n||_dS)N)�superr&r�PY3�mod)rr�old�new)r r
rriszMovedModule.__init__cCs
t|j�S)N)rr))rr
r
rrrszMovedModule._resolvecCs"|j�}t||�}t|||�|S)N)r�getattrr)r�attr�_module�valuer
r
r�__getattr__us
zMovedModule.__getattr__)N)rrrrrr0�
__classcell__r
r
)r rr&gs	r&cs(eZdZ�fdd�Zdd�ZgZ�ZS)�_LazyModulecstt|�j|�|jj|_dS)N)r'r2rr r)rr)r r
rr~sz_LazyModule.__init__cCs ddg}|dd�|jD�7}|S)NrrcSsg|]
}|j�qSr
)r)�.0r-r
r
r�
<listcomp>�sz'_LazyModule.__dir__.<locals>.<listcomp>)�_moved_attributes)rZattrsr
r
r�__dir__�sz_LazyModule.__dir__)rrrrr6r5r1r
r
)r rr2|sr2cs&eZdZd�fdd�	Zdd�Z�ZS)�MovedAttributeNcsdtt|�j|�trH|dkr |}||_|dkr@|dkr<|}n|}||_n||_|dkrZ|}||_dS)N)r'r7rr(r)r-)rrZold_modZnew_modZold_attrZnew_attr)r r
rr�szMovedAttribute.__init__cCst|j�}t||j�S)N)rr)r,r-)r�moduler
r
rr�s
zMovedAttribute._resolve)NN)rrrrrr1r
r
)r rr7�sr7c@sVeZdZdZdd�Zdd�Zdd�Zdd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZeZdS)�_SixMetaPathImporterz�
    A meta path importer to import six.moves and its submodules.

    This class implements a PEP302 finder and loader. It should be compatible
    with Python 2.5 and all existing versions of Python3
    cCs||_i|_dS)N)r�
known_modules)rZsix_module_namer
r
rr�sz_SixMetaPathImporter.__init__cGs&x |D]}||j|jd|<qWdS)N�.)r:r)rr)Z	fullnames�fullnamer
r
r�_add_module�s
z _SixMetaPathImporter._add_modulecCs|j|jd|S)Nr;)r:r)rr<r
r
r�_get_module�sz _SixMetaPathImporter._get_moduleNcCs||jkr|SdS)N)r:)rr<�pathr
r
r�find_module�s
z _SixMetaPathImporter.find_modulecCs0y
|j|Stk
r*td|��YnXdS)Nz!This loader does not know module )r:�KeyError�ImportError)rr<r
r
rZ__get_module�s
z!_SixMetaPathImporter.__get_modulecCsRy
tj|Stk
rYnX|j|�}t|t�r>|j�}n||_|tj|<|S)N)rrrA� _SixMetaPathImporter__get_module�
isinstancer&r�
__loader__)rr<r)r
r
r�load_module�s




z _SixMetaPathImporter.load_modulecCst|j|�d�S)z�
        Return true, if the named module is a package.

        We need this method to get correct spec objects with
        Python 3.4 (see PEP451)
        �__path__)�hasattrrC)rr<r
r
r�
is_package�sz_SixMetaPathImporter.is_packagecCs|j|�dS)z;Return None

        Required, if is_package is implementedN)rC)rr<r
r
r�get_code�s
z_SixMetaPathImporter.get_code)N)
rrrrrr=r>r@rCrFrIrJ�
get_sourcer
r
r
rr9�s
	r9c@seZdZdZgZdS)�_MovedItemszLazy loading of moved objectsN)rrrrrGr
r
r
rrL�srLZ	cStringIO�io�StringIO�filter�	itertools�builtinsZifilter�filterfalseZifilterfalse�inputZ__builtin__Z	raw_input�internr�map�imap�getcwd�osZgetcwdu�getcwdb�rangeZxrangeZ
reload_module�	importlibZimp�reload�reduce�	functoolsZshlex_quoteZpipesZshlexZquote�UserDict�collections�UserList�
UserString�zipZizip�zip_longestZizip_longestZconfigparserZConfigParser�copyregZcopy_regZdbm_gnuZgdbmzdbm.gnuZ
_dummy_threadZdummy_threadZhttp_cookiejarZ	cookielibzhttp.cookiejarZhttp_cookiesZCookiezhttp.cookiesZ
html_entitiesZhtmlentitydefsz
html.entitiesZhtml_parserZ
HTMLParserzhtml.parserZhttp_clientZhttplibzhttp.clientZemail_mime_multipartzemail.MIMEMultipartzemail.mime.multipartZemail_mime_nonmultipartzemail.MIMENonMultipartzemail.mime.nonmultipartZemail_mime_textzemail.MIMETextzemail.mime.textZemail_mime_basezemail.MIMEBasezemail.mime.baseZBaseHTTPServerzhttp.serverZ
CGIHTTPServerZSimpleHTTPServerZcPickle�pickleZqueueZQueue�reprlib�reprZsocketserverZSocketServer�_threadZthreadZtkinterZTkinterZtkinter_dialogZDialogztkinter.dialogZtkinter_filedialogZ
FileDialogztkinter.filedialogZtkinter_scrolledtextZScrolledTextztkinter.scrolledtextZtkinter_simpledialogZSimpleDialogztkinter.simpledialogZtkinter_tixZTixztkinter.tixZtkinter_ttkZttkztkinter.ttkZtkinter_constantsZTkconstantsztkinter.constantsZtkinter_dndZTkdndztkinter.dndZtkinter_colorchooserZtkColorChooserztkinter.colorchooserZtkinter_commondialogZtkCommonDialogztkinter.commondialogZtkinter_tkfiledialogZtkFileDialogZtkinter_fontZtkFontztkinter.fontZtkinter_messageboxZtkMessageBoxztkinter.messageboxZtkinter_tksimpledialogZtkSimpleDialogZurllib_parsez.moves.urllib_parsezurllib.parseZurllib_errorz.moves.urllib_errorzurllib.errorZurllibz
.moves.urllibZurllib_robotparser�robotparserzurllib.robotparserZ
xmlrpc_clientZ	xmlrpclibz
xmlrpc.clientZ
xmlrpc_serverZSimpleXMLRPCServerz
xmlrpc.serverZwin32�winreg�_winregzmoves.z.moves�movesc@seZdZdZdS)�Module_six_moves_urllib_parsez7Lazy loading of moved objects in six.moves.urllib_parseN)rrrrr
r
r
rrn@srnZParseResultZurlparseZSplitResultZparse_qsZ	parse_qslZ	urldefragZurljoinZurlsplitZ
urlunparseZ
urlunsplitZ
quote_plusZunquoteZunquote_plusZ	urlencodeZ
splitqueryZsplittagZ	splituserZ
uses_fragmentZuses_netlocZuses_paramsZ
uses_queryZ
uses_relativezmoves.urllib_parsezmoves.urllib.parsec@seZdZdZdS)�Module_six_moves_urllib_errorz7Lazy loading of moved objects in six.moves.urllib_errorN)rrrrr
r
r
rrohsroZURLErrorZurllib2Z	HTTPErrorZContentTooShortErrorz.moves.urllib.errorzmoves.urllib_errorzmoves.urllib.errorc@seZdZdZdS)�Module_six_moves_urllib_requestz9Lazy loading of moved objects in six.moves.urllib_requestN)rrrrr
r
r
rrp|srpZurlopenzurllib.requestZinstall_openerZbuild_openerZpathname2urlZurl2pathnameZ
getproxiesZRequestZOpenerDirectorZHTTPDefaultErrorHandlerZHTTPRedirectHandlerZHTTPCookieProcessorZProxyHandlerZBaseHandlerZHTTPPasswordMgrZHTTPPasswordMgrWithDefaultRealmZAbstractBasicAuthHandlerZHTTPBasicAuthHandlerZProxyBasicAuthHandlerZAbstractDigestAuthHandlerZHTTPDigestAuthHandlerZProxyDigestAuthHandlerZHTTPHandlerZHTTPSHandlerZFileHandlerZ
FTPHandlerZCacheFTPHandlerZUnknownHandlerZHTTPErrorProcessorZurlretrieveZ
urlcleanupZ	URLopenerZFancyURLopenerZproxy_bypassz.moves.urllib.requestzmoves.urllib_requestzmoves.urllib.requestc@seZdZdZdS)� Module_six_moves_urllib_responsez:Lazy loading of moved objects in six.moves.urllib_responseN)rrrrr
r
r
rrq�srqZaddbasezurllib.responseZaddclosehookZaddinfoZ
addinfourlz.moves.urllib.responsezmoves.urllib_responsezmoves.urllib.responsec@seZdZdZdS)�#Module_six_moves_urllib_robotparserz=Lazy loading of moved objects in six.moves.urllib_robotparserN)rrrrr
r
r
rrr�srrZRobotFileParserz.moves.urllib.robotparserzmoves.urllib_robotparserzmoves.urllib.robotparserc@sNeZdZdZgZejd�Zejd�Zejd�Z	ejd�Z
ejd�Zdd�Zd	S)
�Module_six_moves_urllibzICreate a six.moves.urllib namespace that resembles the Python 3 namespacezmoves.urllib_parsezmoves.urllib_errorzmoves.urllib_requestzmoves.urllib_responsezmoves.urllib_robotparsercCsdddddgS)N�parse�error�request�responserjr
)rr
r
rr6�szModule_six_moves_urllib.__dir__N)
rrrrrG�	_importerr>rtrurvrwrjr6r
r
r
rrs�s




rszmoves.urllibcCstt|j|�dS)zAdd an item to six.moves.N)rrLr)Zmover
r
r�add_move�srycCsXytt|�WnDtk
rRytj|=Wn"tk
rLtd|f��YnXYnXdS)zRemove item from six.moves.zno such move, %rN)rrLr!rm�__dict__rA)rr
r
r�remove_move�sr{�__func__�__self__�__closure__�__code__�__defaults__�__globals__�im_funcZim_selfZfunc_closureZ	func_codeZ
func_defaultsZfunc_globalscCs|j�S)N)�next)�itr
r
r�advance_iteratorsr�cCstdd�t|�jD��S)Ncss|]}d|jkVqdS)�__call__N)rz)r3�klassr
r
r�	<genexpr>szcallable.<locals>.<genexpr>)�any�type�__mro__)r"r
r
r�callablesr�cCs|S)Nr
)�unboundr
r
r�get_unbound_functionsr�cCs|S)Nr
)r�clsr
r
r�create_unbound_methodsr�cCs|jS)N)r�)r�r
r
rr�"scCstj|||j�S)N)�types�
MethodTyper )rr"r
r
r�create_bound_method%sr�cCstj|d|�S)N)r�r�)rr�r
r
rr�(sc@seZdZdd�ZdS)�IteratorcCst|�j|�S)N)r��__next__)rr
r
rr�-sz
Iterator.nextN)rrrr�r
r
r
rr�+sr�z3Get the function out of a possibly unbound functioncKst|jf|��S)N)�iter�keys)�d�kwr
r
r�iterkeys>sr�cKst|jf|��S)N)r��values)r�r�r
r
r�
itervaluesAsr�cKst|jf|��S)N)r��items)r�r�r
r
r�	iteritemsDsr�cKst|jf|��S)N)r�Zlists)r�r�r
r
r�	iterlistsGsr�r�r�r�cKs|jf|�S)N)r�)r�r�r
r
rr�PscKs|jf|�S)N)r�)r�r�r
r
rr�SscKs|jf|�S)N)r�)r�r�r
r
rr�VscKs|jf|�S)N)r�)r�r�r
r
rr�Ys�viewkeys�
viewvalues�	viewitemsz1Return an iterator over the keys of a dictionary.z3Return an iterator over the values of a dictionary.z?Return an iterator over the (key, value) pairs of a dictionary.zBReturn an iterator over the (key, [values]) pairs of a dictionary.cCs
|jd�S)Nzlatin-1)�encode)�sr
r
r�bksr�cCs|S)Nr
)r�r
r
r�unsr�z>B�assertCountEqualZassertRaisesRegexpZassertRegexpMatches�assertRaisesRegex�assertRegexcCs|S)Nr
)r�r
r
rr��scCst|jdd�d�S)Nz\\z\\\\Zunicode_escape)�unicode�replace)r�r
r
rr��scCst|d�S)Nr)�ord)Zbsr
r
r�byte2int�sr�cCst||�S)N)r�)Zbuf�ir
r
r�
indexbytes�sr�ZassertItemsEqualzByte literalzText literalcOst|t�||�S)N)r,�_assertCountEqual)r�args�kwargsr
r
rr��scOst|t�||�S)N)r,�_assertRaisesRegex)rr�r�r
r
rr��scOst|t�||�S)N)r,�_assertRegex)rr�r�r
r
rr��s�execcCs*|dkr|�}|j|k	r"|j|��|�dS)N)�
__traceback__�with_traceback)r#r/�tbr
r
r�reraise�s


r�cCsB|dkr*tjd�}|j}|dkr&|j}~n|dkr6|}td�dS)zExecute code in a namespace.Nrzexec _code_ in _globs_, _locs_)r�	_getframe�	f_globals�f_localsr�)Z_code_Z_globs_Z_locs_�framer
r
r�exec_�s
r�z9def reraise(tp, value, tb=None):
    raise tp, value, tb
zrdef raise_from(value, from_value):
    if from_value is None:
        raise value
    raise value from from_value
zCdef raise_from(value, from_value):
    raise value from from_value
cCs|�dS)Nr
)r/Z
from_valuer
r
r�
raise_from�sr��printc
s6|jdtj���dkrdS�fdd�}d}|jdd�}|dk	r`t|t�rNd}nt|t�s`td��|jd	d�}|dk	r�t|t�r�d}nt|t�s�td
��|r�td��|s�x|D]}t|t�r�d}Pq�W|r�td�}td
�}nd}d
}|dkr�|}|dk�r�|}x,t|�D] \}	}|	�r||�||��qW||�dS)z4The new-style print function for Python 2.4 and 2.5.�fileNcsdt|t�st|�}t�t�rVt|t�rV�jdk	rVt�dd�}|dkrHd}|j�j|�}�j|�dS)N�errors�strict)	rD�
basestring�strr�r��encodingr,r��write)�datar�)�fpr
rr��s



zprint_.<locals>.writeF�sepTzsep must be None or a string�endzend must be None or a stringz$invalid keyword arguments to print()�
� )�popr�stdoutrDr�r��	TypeError�	enumerate)
r�r�r�Zwant_unicoder�r��arg�newlineZspacer�r
)r�r�print_�sL







r�cOs<|jdtj�}|jdd�}t||�|r8|dk	r8|j�dS)Nr��flushF)�getrr�r��_printr�)r�r�r�r�r
r
rr�s

zReraise an exception.cs���fdd�}|S)Ncstj����|�}�|_|S)N)r^�wraps�__wrapped__)�f)�assigned�updated�wrappedr
r�wrapperszwraps.<locals>.wrapperr
)r�r�r�r�r
)r�r�r�rr�sr�cs&G��fdd�d��}tj|dfi�S)z%Create a base class with a metaclass.cseZdZ��fdd�ZdS)z!with_metaclass.<locals>.metaclasscs�|�|�S)Nr
)r�rZ
this_basesr�)�bases�metar
r�__new__'sz)with_metaclass.<locals>.metaclass.__new__N)rrrr�r
)r�r�r
r�	metaclass%sr�Ztemporary_class)r�r�)r�r�r�r
)r�r�r�with_metaclass sr�cs�fdd�}|S)z6Class decorator for creating a class with a metaclass.csl|jj�}|jd�}|dk	rDt|t�r,|g}x|D]}|j|�q2W|jdd�|jdd��|j|j|�S)N�	__slots__rz�__weakref__)rz�copyr�rDr�r�r�	__bases__)r�Z	orig_vars�slotsZ	slots_var)r�r
rr�.s



zadd_metaclass.<locals>.wrapperr
)r�r�r
)r�r�
add_metaclass,sr�cCs2tr.d|jkrtd|j��|j|_dd�|_|S)a
    A decorator that defines __unicode__ and __str__ methods under Python 2.
    Under Python 3 it does nothing.

    To support Python 2 and 3 with a single code base, define a __str__ method
    returning text and apply this decorator to the class.
    �__str__zY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cSs|j�jd�S)Nzutf-8)�__unicode__r�)rr
r
r�<lambda>Jsz-python_2_unicode_compatible.<locals>.<lambda>)�PY2rz�
ValueErrorrr�r�)r�r
r
r�python_2_unicode_compatible<s


r��__spec__)rrli���li���ll����)N)NN)rr)rr)rr)rr)�rZ
__future__rr^rP�operatorrr��
__author__�__version__�version_infor�r(ZPY34r�Zstring_types�intZ
integer_typesr�Zclass_typesZ	text_type�bytesZbinary_type�maxsizeZMAXSIZEr�ZlongZ	ClassTyper��platform�
startswith�objectr	�len�
OverflowErrorrrrr&�
ModuleTyper2r7r9rrxrLr5r-rrrDr=rmrnZ_urllib_parse_moved_attributesroZ_urllib_error_moved_attributesrpZ _urllib_request_moved_attributesrqZ!_urllib_response_moved_attributesrrZ$_urllib_robotparser_moved_attributesrsryr{Z
_meth_funcZ
_meth_selfZ
_func_closureZ
_func_codeZ_func_defaultsZ
_func_globalsr�r��	NameErrorr�r�r�r�r�r��
attrgetterZget_method_functionZget_method_selfZget_function_closureZget_function_codeZget_function_defaultsZget_function_globalsr�r�r�r��methodcallerr�r�r�r�r��chrZunichr�struct�Struct�packZint2byte�
itemgetterr��getitemr�r�Z	iterbytesrMrN�BytesIOr�r�r��partialrVr�r�r�r�r,rQr�r�r�r�r��WRAPPER_ASSIGNMENTS�WRAPPER_UPDATESr�r�r�r�rG�__package__�globalsr�r��submodule_search_locations�	meta_pathr�r�Zimporter�appendr
r
r
r�<module>s�

>












































































































5_vendor/__pycache__/ipaddress.cpython-36.pyc000064400000201427151733136250015030 0ustar003

�Pf09�@sldZddlmZddlZddlZdZefZyeefZWne	k
rJYnXye
ZWn$e	k
rxeZe
ekstt�YnXdOdkr�dd�Zndd�Zy
ejZWnek
r�d	d
�ZYnXdd�Zeed
�r�dd�Zndd�ZdPdd�ZGdd�de�ZdZdZGdd�de�ZGdd�de�Zdd�ZdQdd �Zd!d"�Z d#d$�Z!d%d&�Z"d'd(�Z#d)d*�Z$d+d,�Z%d-d.�Z&d/d0�Z'd1d2�Z(d3d4�Z)Gd5d6�d6e�Z*Gd7d8�d8e*�Z+Gd9d:�d:e*�Z,Gd;d<�d<e�Z-Gd=d>�d>e-e+�Z.Gd?d@�d@e.�Z/GdAdB�dBe-e,�Z0GdCdD�dDe�Z1e1e._2GdEdF�dFe�Z3GdGdH�dHe3e+�Z4GdIdJ�dJe4�Z5GdKdL�dLe3e,�Z6GdMdN�dNe�Z7e7e4_2dS)Rz�A fast, lightweight IPv4/IPv6 manipulation library in Python.

This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.

�)�unicode_literalsNz1.0.17�cCs|S)N�)�bytrr�/usr/lib/python3.6/ipaddress.py�_compat_bytes_to_byte_valssrcCsdd�|D�S)NcSsg|]}tjd|�d�qS)s!Br)�struct�unpack)�.0�brrr�
<listcomp>#sz._compat_bytes_to_byte_vals.<locals>.<listcomp>r)rrrrr"scCs<|dkst�d}x&|D]}t|t�s(t�|d>|}qW|S)N�bigr�)�AssertionError�
isinstance�_compat_int_types)Zbytvals�	endianess�resZbvrrr�_compat_int_from_byte_vals's
rcCs�t|t�st�|dkst�|dkrH|dks2|dkr<tjd��tjd|�S|dkr�|dksd|dd	krntjd
��tjd|d?|d
@�St��dS)Nr
�r�� z(integer out of range for 'I' format codes!I��z)integer out of range for 'QQ' format codes!QQ�@l����l)rrrr�error�pack�NotImplementedError)ZintvalZlengthrrrr�_compat_to_bytes0s

r�
bit_lengthcCs|j�S)N)r)�irrr�_compat_bit_length?sr!cCs&x tj�D]}||?dkr
|Sq
WdS)Nr)�	itertools�count)r rrrrr!Bs�ccs0|dkst�|}x||kr*|V||7}qWdS)Nr)r)�start�end�stepr rrr�
_compat_rangeHs

r(c@s@eZdZfZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
S)�_TotalOrderingMixincCst�dS)N)r)�self�otherrrr�__eq__Wsz_TotalOrderingMixin.__eq__cCs|j|�}|tkrtS|S)N)r,�NotImplemented)r*r+�equalrrr�__ne__Zs
z_TotalOrderingMixin.__ne__cCst�dS)N)r)r*r+rrr�__lt__`sz_TotalOrderingMixin.__lt__cCs&|j|�}|tks|r"|j|�S|S)N)r0r-r,)r*r+�lessrrr�__le__cs

z_TotalOrderingMixin.__le__cCs6|j|�}|tkrtS|j|�}|tkr,tS|p2|S)N)r0r-r,)r*r+r1r.rrr�__gt__is

z_TotalOrderingMixin.__gt__cCs|j|�}|tkrtS|S)N)r0r-)r*r+r1rrr�__ge__rs
z_TotalOrderingMixin.__ge__N)
�__name__�
__module__�__qualname__�	__slots__r,r/r0r2r3r4rrrrr)Ps	r)rrc@seZdZdZdS)�AddressValueErrorz%A Value Error related to the address.N)r5r6r7�__doc__rrrrr9}sr9c@seZdZdZdS)�NetmaskValueErrorz%A Value Error related to the netmask.N)r5r6r7r:rrrrr;�sr;cCsjyt|�Sttfk
r YnXyt|�Sttfk
rBYnXt|t�rZtd|��td|��dS)a�Take an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP address.  Either IPv4 or
          IPv6 addresses may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Address or IPv6Address object.

    Raises:
        ValueError: if the *address* passed isn't either a v4 or a v6
          address

    zx%r does not appear to be an IPv4 or IPv6 address. Did you pass in a bytes (str in Python 2) instead of a unicode object?z0%r does not appear to be an IPv4 or IPv6 addressN)�IPv4Addressr9r;�IPv6Addressr�bytes�
ValueError)�addressrrr�
ip_address�s
rATcCsny
t||�Sttfk
r"YnXy
t||�Sttfk
rFYnXt|t�r^td|��td|��dS)a�Take an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP network.  Either IPv4 or
          IPv6 networks may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Network or IPv6Network object.

    Raises:
        ValueError: if the string passed isn't either a v4 or a v6
          address. Or if the network has host bits set.

    zx%r does not appear to be an IPv4 or IPv6 network. Did you pass in a bytes (str in Python 2) instead of a unicode object?z0%r does not appear to be an IPv4 or IPv6 networkN)�IPv4Networkr9r;�IPv6Networkrr>r?)r@�strictrrr�
ip_network�s


rEcCsTyt|�Sttfk
r YnXyt|�Sttfk
rBYnXtd|��dS)agTake an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP address.  Either IPv4 or
          IPv6 addresses may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Interface or IPv6Interface object.

    Raises:
        ValueError: if the string passed isn't either a v4 or a v6
          address.

    Notes:
        The IPv?Interface classes describe an Address on a particular
        Network, so they're basically a combination of both the Address
        and Network classes.

    z2%r does not appear to be an IPv4 or IPv6 interfaceN)�
IPv4Interfacer9r;�
IPv6Interfacer?)r@rrr�ip_interface�srHcCs4yt|dd�Stjtfk
r.td��YnXdS)a`Represent an address as 4 packed bytes in network (big-endian) order.

    Args:
        address: An integer representation of an IPv4 IP address.

    Returns:
        The integer address packed as 4 bytes in network (big-endian) order.

    Raises:
        ValueError: If the integer is negative or too large to be an
          IPv4 IP address.

    rr
z&Address negative or too large for IPv4N)rrr�
OverflowErrorr?)r@rrr�v4_int_to_packed�srJcCs4yt|dd�Stjtfk
r.td��YnXdS)z�Represent an address as 16 packed bytes in network (big-endian) order.

    Args:
        address: An integer representation of an IPv6 IP address.

    Returns:
        The integer address packed as 16 bytes in network (big-endian) order.

    rr
z&Address negative or too large for IPv6N)rrrrIr?)r@rrr�v6_int_to_packeds
rKcCs*t|�jd�}t|�dkr&td|��|S)zAHelper to split the netmask and raise AddressValueError if needed�/rzOnly one '/' permitted in %r)�_compat_str�split�lenr9)r@�addrrrr�_split_optional_netmasksrQccsRt|�}t|�}}x.|D]&}|j|jdkr<||fV|}|}qW||fVdS)z�Find a sequence of sorted deduplicated IPv#Address.

    Args:
        addresses: a list of IPv#Address objects.

    Yields:
        A tuple containing the first and last IP addresses in the sequence.

    r$N)�iter�next�_ip)�	addresses�it�first�last�iprrr�_find_address_ranges


rZcCs$|dkr|St|t||d@��S)z�Count the number of zero bits on the right hand side.

    Args:
        number: an integer.
        bits: maximum number of bits to count.

    Returns:
        The number of zero bits on the right hand side of the number.

    rr$)�minr!)Znumber�bitsrrr�_count_righthand_zero_bits0sr]ccs�t|t�ot|t�std��|j|jkr8td||f��||krHtd��|jdkrXt}n|jdkrht}ntd��|j}|j}|j}x^||kr�t	t
||�t||d�d�}||||f�}|V|d|>7}|d|jkr�Pq�WdS)	a�Summarize a network range given the first and last IP addresses.

    Example:
        >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
        ...                              IPv4Address('192.0.2.130')))
        ...                                #doctest: +NORMALIZE_WHITESPACE
        [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
         IPv4Network('192.0.2.130/32')]

    Args:
        first: the first IPv4Address or IPv6Address in the range.
        last: the last IPv4Address or IPv6Address in the range.

    Returns:
        An iterator of the summarized IPv(4|6) network objects.

    Raise:
        TypeError:
            If the first and last objects are not IP addresses.
            If the first and last objects are not the same version.
        ValueError:
            If the last object is not greater than the first.
            If the version of the first address is not 4 or 6.

    z1first and last must be IP addresses, not networksz%%s and %s are not of the same versionz*last IP address must be greater than firstr�zunknown IP versionr$N)
r�_BaseAddress�	TypeError�versionr?rBrC�_max_prefixlenrTr[r]r!�	_ALL_ONES)rWrXrYZip_bitsZ	first_intZlast_intZnbits�netrrr�summarize_address_range@s0





reccs�t|�}i}xL|rX|j�}|j�}|j|�}|dkr>|||<q||kr||=|j|�qWd}x4t|j��D]$}|dk	r�|j|jkr�ql|V|}qlWdS)auLoops through the addresses, collapsing concurrent netblocks.

    Example:

        ip1 = IPv4Network('192.0.2.0/26')
        ip2 = IPv4Network('192.0.2.64/26')
        ip3 = IPv4Network('192.0.2.128/26')
        ip4 = IPv4Network('192.0.2.192/26')

        _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
          [IPv4Network('192.0.2.0/24')]

        This shouldn't be called directly; it is called via
          collapse_addresses([]).

    Args:
        addresses: A list of IPv4Network's or IPv6Network's

    Returns:
        A list of IPv4Network's or IPv6Network's depending on what we were
        passed.

    N)�list�pop�supernet�get�append�sorted�values�broadcast_address)rUZto_merge�subnetsrdrhZexistingrXrrr�_collapse_addresses_internalws$

rocCs8g}g}g}x�|D]�}t|t�rT|rH|dj|jkrHtd||df��|j|�q|j|jkr�|r�|dj|jkr�td||df��y|j|j�Wq�tk
r�|j|j	�Yq�Xq|r�|dj|jkr�td||df��|j|�qWt
t|��}|�r,x&t|�D]\}}|j
t||���qWt||�S)	a�Collapse a list of IP objects.

    Example:
        collapse_addresses([IPv4Network('192.0.2.0/25'),
                            IPv4Network('192.0.2.128/25')]) ->
                           [IPv4Network('192.0.2.0/24')]

    Args:
        addresses: An iterator of IPv4Network or IPv6Network objects.

    Returns:
        An iterator of the collapsed IPv(4|6)Network objects.

    Raises:
        TypeError: If passed a list of mixed version objects.

    r$z%%s and %s are not of the same version���rprprprprp)rr_�_versionr`rj�
_prefixlenrbrY�AttributeError�network_addressrk�setrZ�extendrero)rUZaddrsZipsZnetsrYrWrXrrr�collapse_addresses�s4

rwcCs(t|t�r|j�St|t�r$|j�StS)a2Return a key suitable for sorting between networks and addresses.

    Address and Network objects are not sortable by default; they're
    fundamentally different so the expression

        IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')

    doesn't make any sense.  There are some times however, where you may wish
    to have ipaddress sort these for you anyway. If you need to do this, you
    can use this function as the key= argument to sorted().

    Args:
      obj: either a Network or Address object.
    Returns:
      appropriate key.

    )r�_BaseNetwork�_get_networks_keyr_�_get_address_keyr-)�objrrr�get_mixed_type_key�s


r|c@s�eZdZdZfZedd��Zedd��Zedd��Zedd	��Z	d
d�Z
dd
�Zedd��Z
edd��Zedd��Zedd��Zedd��Zdd�ZdS)�_IPAddressBasezThe mother class.cCs|j�S)z:Return the longhand version of the IP address as a string.)�_explode_shorthand_ip_string)r*rrr�exploded�sz_IPAddressBase.explodedcCst|�S)z;Return the shorthand version of the IP address as a string.)rM)r*rrr�
compressedsz_IPAddressBase.compressedcCs|j�S)aIThe name of the reverse DNS pointer for the IP address, e.g.:
            >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
            '1.0.0.127.in-addr.arpa'
            >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
            '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'

        )�_reverse_pointer)r*rrr�reverse_pointers	z_IPAddressBase.reverse_pointercCsdt|�f}t|��dS)Nz%200s has no version specified)�typer)r*�msgrrrrasz_IPAddressBase.versioncCsF|dkrd}t|||jf��||jkrBd}t|||j|jf��dS)Nrz-%d (< 0) is not permitted as an IPv%d addressz2%d (>= 2**%d) is not permitted as an IPv%d address)r9rqrcrb)r*r@r�rrr�_check_int_addresss

z!_IPAddressBase._check_int_addresscCs.t|�}||kr*d}t|||||jf��dS)Nz~%r (len %d != %d) is not permitted as an IPv%d address. Did you pass in a bytes (str in Python 2) instead of a unicode object?)rOr9rq)r*r@Zexpected_lenZaddress_lenr�rrr�_check_packed_address s
z$_IPAddressBase._check_packed_addresscCs|j|j|?AS)z�Turn the prefix length into a bitwise netmask

        Args:
            prefixlen: An integer, the prefix length.

        Returns:
            An integer.

        )rc)�cls�	prefixlenrrr�_ip_int_from_prefix+sz"_IPAddressBase._ip_int_from_prefixc	Cs\t||j�}|j|}||?}d|>d}||krX|jd}t||d�}d}t||��|S)aReturn prefix length from the bitwise netmask.

        Args:
            ip_int: An integer, the netmask in expanded bitwise format

        Returns:
            An integer, the prefix length.

        Raises:
            ValueError: If the input intermingles zeroes & ones
        r$rr
z&Netmask pattern %r mixes zeroes & ones)r]rbrr?)	r��ip_intZtrailing_zeroesr�Zleading_onesZall_onesZbyteslenZdetailsr�rrr�_prefix_from_ip_int8s


z"_IPAddressBase._prefix_from_ip_intcCsd|}t|��dS)Nz%r is not a valid netmask)r;)r�Znetmask_strr�rrr�_report_invalid_netmaskQsz&_IPAddressBase._report_invalid_netmaskcCsjtjj|�s|j|�yt|�}Wntk
r@|j|�YnXd|koV|jknsf|j|�|S)a	Return prefix length from a numeric string

        Args:
            prefixlen_str: The string to be converted

        Returns:
            An integer, the prefix length.

        Raises:
            NetmaskValueError: If the input is not a valid netmask
        r)�_BaseV4�_DECIMAL_DIGITS�
issupersetr��intr?rb)r�Z
prefixlen_strr�rrr�_prefix_from_prefix_stringVs

z)_IPAddressBase._prefix_from_prefix_stringcCs�y|j|�}Wntk
r,|j|�YnXy
|j|�Stk
rLYnX||jN}y
|j|�Stk
r�|j|�YnXdS)aTurn a netmask/hostmask string into a prefix length

        Args:
            ip_str: The netmask/hostmask to be converted

        Returns:
            An integer, the prefix length.

        Raises:
            NetmaskValueError: If the input is not a valid netmask/hostmask
        N)�_ip_int_from_stringr9r�r�r?rc)r��ip_strr�rrr�_prefix_from_ip_stringos


z%_IPAddressBase._prefix_from_ip_stringcCs|jt|�ffS)N)�	__class__rM)r*rrr�
__reduce__�sz_IPAddressBase.__reduce__N)r5r6r7r:r8�propertyrr�r�rar�r��classmethodr�r�r�r�r�r�rrrrr}�s	
"r}c@sdeZdZdZfZdd�Zdd�Zdd�Zdd	�Zd
d�Z	dd
�Z
dd�Zdd�Zdd�Z
dd�ZdS)r_z�A generic IP object.

    This IP class contains the version independent methods which are
    used by single IP addresses.
    cCs|jS)N)rT)r*rrr�__int__�sz_BaseAddress.__int__cCs2y|j|jko|j|jkStk
r,tSXdS)N)rTrqrsr-)r*r+rrrr,�s
z_BaseAddress.__eq__cCs`t|t�stSt|t�s(td||f��|j|jkrDtd||f��|j|jkr\|j|jkSdS)Nz"%s and %s are not of the same typez%%s and %s are not of the same versionF)rr}r-r_r`rqrT)r*r+rrrr0�s

z_BaseAddress.__lt__cCs t|t�stS|jt|�|�S)N)rrr-r�r�)r*r+rrr�__add__�s
z_BaseAddress.__add__cCs t|t�stS|jt|�|�S)N)rrr-r�r�)r*r+rrr�__sub__�s
z_BaseAddress.__sub__cCsd|jjt|�fS)Nz%s(%r))r�r5rM)r*rrr�__repr__�sz_BaseAddress.__repr__cCst|j|j��S)N)rM�_string_from_ip_intrT)r*rrr�__str__�sz_BaseAddress.__str__cCsttt|j���S)N)�hash�hexr�rT)r*rrr�__hash__�sz_BaseAddress.__hash__cCs
|j|fS)N)rq)r*rrrrz�sz_BaseAddress._get_address_keycCs|j|jffS)N)r�rT)r*rrrr��sz_BaseAddress.__reduce__N)r5r6r7r:r8r�r,r0r�r�r�r�r�rzr�rrrrr_�sr_c@sXeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zedd��Zedd��Zedd��Zedd��Zed d!��Zed"d#��Zed$d%��Zed&d'��Zd(d)�Zd*d+�Zd,d-�ZdFd0d1�ZdGd2d3�Zed4d5��Zd6d7�Zd8d9�Zed:d;��Z ed<d=��Z!ed>d?��Z"ed@dA��Z#edBdC��Z$edDdE��Z%d/S)Hrxz~A generic IP network object.

    This IP class contains the version independent methods which are
    used by networks.

    cCs
i|_dS)N)�_cache)r*r@rrr�__init__�sz_BaseNetwork.__init__cCsd|jjt|�fS)Nz%s(%r))r�r5rM)r*rrrr��sz_BaseNetwork.__repr__cCsd|j|jfS)Nz%s/%d)rtr�)r*rrrr��sz_BaseNetwork.__str__ccs<t|j�}t|j�}x"t|d|�D]}|j|�Vq$WdS)z�Generate Iterator over usable hosts in a network.

        This is like __iter__ except it doesn't return the network
        or broadcast addresses.

        r$N)r�rtrmr(�_address_class)r*�network�	broadcast�xrrr�hosts�s

z_BaseNetwork.hostsccs<t|j�}t|j�}x"t||d�D]}|j|�Vq$WdS)Nr$)r�rtrmr(r�)r*r�r�r�rrr�__iter__�s

z_BaseNetwork.__iter__cCslt|j�}t|j�}|dkr>|||kr0td��|j||�S|d7}|||krZtd��|j||�SdS)Nrzaddress out of ranger$)r�rtrm�
IndexErrorr�)r*�nr�r�rrr�__getitem__�s

z_BaseNetwork.__getitem__cCsxt|t�stSt|t�s(td||f��|j|jkrDtd||f��|j|jkr\|j|jkS|j|jkrt|j|jkSdS)Nz"%s and %s are not of the same typez%%s and %s are not of the same versionF)rr}r-rxr`rqrt�netmask)r*r+rrrr0s

z_BaseNetwork.__lt__cCsFy,|j|jko,|j|jko,t|j�t|j�kStk
r@tSXdS)N)rqrtr�r�rsr-)r*r+rrrr,sz_BaseNetwork.__eq__cCstt|j�t|j�A�S)N)r�r�rtr�)r*rrrr�sz_BaseNetwork.__hash__cCsL|j|jkrdSt|t�rdSt|j�t|j�koBt|j�kSSdS)NF)rqrrxr�rtrTrm)r*r+rrr�__contains__s
z_BaseNetwork.__contains__cCs(|j|kp&|j|kp&|j|kp&|j|kS)z*Tell if self is partly contained in other.)rtrm)r*r+rrr�overlaps)s


z_BaseNetwork.overlapscCs<|jjd�}|dkr8|jt|j�t|j�B�}||jd<|S)Nrm)r�rir�r�rt�hostmask)r*r�rrrrm0s
z_BaseNetwork.broadcast_addresscCs8|jjd�}|dkr4|jt|j�|jA�}||jd<|S)Nr�)r�rir�r�r�rc)r*r�rrrr�9s

z_BaseNetwork.hostmaskcCsd|j|jfS)Nz%s/%d)rtrr)r*rrr�with_prefixlenAsz_BaseNetwork.with_prefixlencCsd|j|jfS)Nz%s/%s)rtr�)r*rrr�with_netmaskEsz_BaseNetwork.with_netmaskcCsd|j|jfS)Nz%s/%s)rtr�)r*rrr�
with_hostmaskIsz_BaseNetwork.with_hostmaskcCst|j�t|j�dS)z&Number of hosts in the current subnet.r$)r�rmrt)r*rrr�
num_addressesMsz_BaseNetwork.num_addressescCsdt|�f}t|��dS)Nz%%200s has no associated address class)r�r)r*r�rrrr�Rsz_BaseNetwork._address_classcCs|jS)N)rr)r*rrrr�Zsz_BaseNetwork.prefixlenccs|j|jkstd||f��t|t�s2td|��|j|�sLtd||f��||krXdS|jd|j|jf�}|j	�\}}xb||kr�||kr�|j|�r�|V|j	�\}}q||j|�r�|V|j	�\}}q|t
d|||f��q|W||kr�|Vn$||k�r|Vnt
d|||f��dS)a�Remove an address from a larger block.

        For example:

            addr1 = ip_network('192.0.2.0/28')
            addr2 = ip_network('192.0.2.1/32')
            list(addr1.address_exclude(addr2)) =
                [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
                 IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]

        or IPv6:

            addr1 = ip_network('2001:db8::1/32')
            addr2 = ip_network('2001:db8::1/128')
            list(addr1.address_exclude(addr2)) =
                [ip_network('2001:db8::1/128'),
                 ip_network('2001:db8::2/127'),
                 ip_network('2001:db8::4/126'),
                 ip_network('2001:db8::8/125'),
                 ...
                 ip_network('2001:db8:8000::/33')]

        Args:
            other: An IPv4Network or IPv6Network object of the same type.

        Returns:
            An iterator of the IPv(4|6)Network objects which is self
            minus other.

        Raises:
            TypeError: If self and other are of differing address
              versions, or if other is not a network object.
            ValueError: If other is not completely contained by self.

        z%%s and %s are not of the same versionz%s is not a network objectz%s not contained in %sNz%s/%sz3Error performing exclusion: s1: %s s2: %s other: %s)rqr`rrx�	subnet_ofr?r�rtr�rnr)r*r+�s1�s2rrr�address_exclude^s6$





z_BaseNetwork.address_excludecCs`|j|jkrtd||f��|j|jkr,dS|j|jkr<dS|j|jkrLdS|j|jkr\dSdS)a�Compare two IP objects.

        This is only concerned about the comparison of the integer
        representation of the network addresses.  This means that the
        host bits aren't considered at all in this method.  If you want
        to compare host bits, you can easily enough do a
        'HostA._ip < HostB._ip'

        Args:
            other: An IP object.

        Returns:
            If the IP versions of self and other are the same, returns:

            -1 if self < other:
              eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
              IPv6Network('2001:db8::1000/124') <
                  IPv6Network('2001:db8::2000/124')
            0 if self == other
              eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
              IPv6Network('2001:db8::1000/124') ==
                  IPv6Network('2001:db8::1000/124')
            1 if self > other
              eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
                  IPv6Network('2001:db8::2000/124') >
                      IPv6Network('2001:db8::1000/124')

          Raises:
              TypeError if the IP versions are different.

        z"%s and %s are not of the same typer$rrprp)rqr`rtr�)r*r+rrr�compare_networks�s!z_BaseNetwork.compare_networkscCs|j|j|jfS)z�Network-only key function.

        Returns an object that identifies this address' network and
        netmask. This function is a suitable "key" argument for sorted()
        and list.sort().

        )rqrtr�)r*rrrry�sz_BaseNetwork._get_networks_keyr$Nc	cs�|j|jkr|VdS|dk	rJ||jkr0td��|dkr@td��||j}|dkrZtd��|j|}||jkr~td||f��t|j�}t|j�d}t|j�d|?}x(t|||�D]}|j||f�}|Vq�WdS)a�The subnets which join to make the current subnet.

        In the case that self contains only one IP
        (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
        for IPv6), yield an iterator with just ourself.

        Args:
            prefixlen_diff: An integer, the amount the prefix length
              should be increased by. This should not be set if
              new_prefix is also set.
            new_prefix: The desired new prefix length. This must be a
              larger number (smaller prefix) than the existing prefix.
              This should not be set if prefixlen_diff is also set.

        Returns:
            An iterator of IPv(4|6) objects.

        Raises:
            ValueError: The prefixlen_diff is too small or too large.
                OR
            prefixlen_diff and new_prefix are both set or new_prefix
              is a smaller number than the current prefix (smaller
              number means a larger network)

        Nznew prefix must be longerr$z(cannot set prefixlen_diff and new_prefixrzprefix length diff must be > 0z0prefix length diff %d is invalid for netblock %s)	rrrbr?r�rtrmr�r(r�)	r*�prefixlen_diff�
new_prefix�
new_prefixlenr%r&r'Znew_addrZcurrentrrrrn�s,




z_BaseNetwork.subnetscCs�|jdkr|S|dk	rB||jkr(td��|dkr8td��|j|}|j|}|dkrftd|j|f��|jt|j�t|j�|>@|f�S)a�The supernet containing the current network.

        Args:
            prefixlen_diff: An integer, the amount the prefix length of
              the network should be decreased by.  For example, given a
              /24 network and a prefixlen_diff of 3, a supernet with a
              /21 netmask is returned.

        Returns:
            An IPv4 network object.

        Raises:
            ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
              a negative prefix length.
                OR
            If prefixlen_diff and new_prefix are both set or new_prefix is a
              larger number than the current prefix (larger number means a
              smaller network)

        rNznew prefix must be shorterr$z(cannot set prefixlen_diff and new_prefixz;current prefixlen is %d, cannot have a prefixlen_diff of %d)rrr?r�r�r�rtr�)r*r�r�r�rrrrhs 



z_BaseNetwork.supernetcCs|jjo|jjS)z�Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is a multicast address.
            See RFC 2373 2.7 for details.

        )rt�is_multicastrm)r*rrrr�As	z_BaseNetwork.is_multicastcCsP|j|jkrdSt|d�r<t|d�r<|j|jko:|j|jkStdt|���dS)NFrtrmz9Unable to test subnet containment with element of type %s)rq�hasattrrtrmr`r�)r*r+rrrr�Ms

z_BaseNetwork.subnet_ofcCsP|j|jkrdSt|d�r<t|d�r<|j|jko:|j|jkStdt|���dS)NFrtrmz9Unable to test subnet containment with element of type %s)rqr�rtrmr`r�)r*r+rrr�supernet_of[s

z_BaseNetwork.supernet_ofcCs|jjo|jjS)z�Test if the address is otherwise IETF reserved.

        Returns:
            A boolean, True if the address is within one of the
            reserved IPv6 Network ranges.

        )rt�is_reservedrm)r*rrrr�is	z_BaseNetwork.is_reservedcCs|jjo|jjS)z�Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is reserved per RFC 4291.

        )rt�
is_link_localrm)r*rrrr�usz_BaseNetwork.is_link_localcCs|jjo|jjS)z�Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv4-special-registry or iana-ipv6-special-registry.

        )rt�
is_privaterm)r*rrrr��s	z_BaseNetwork.is_privatecCs|jS)z�Test if this address is allocated for public networks.

        Returns:
            A boolean, True if the address is not reserved per
            iana-ipv4-special-registry or iana-ipv6-special-registry.

        )r�)r*rrr�	is_global�s	z_BaseNetwork.is_globalcCs|jjo|jjS)z�Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 2373 2.5.2.

        )rt�is_unspecifiedrm)r*rrrr��s	z_BaseNetwork.is_unspecifiedcCs|jjo|jjS)z�Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback address as defined in
            RFC 2373 2.5.3.

        )rt�is_loopbackrm)r*rrrr��s	z_BaseNetwork.is_loopback)r$N)r$N)&r5r6r7r:r�r�r�r�r�r�r0r,r�r�r�r�rmr�r�r�r�r�r�r�r�r�ryrnrhr�r�r�r�r�r�r�r�r�rrrrrx�sD

	K0

5
)rxc
@s�eZdZdZfZdZdedZed�Z	edddd	d
ddd
dg	�Z
eZiZdd�Z
edd��Zedd��Zedd��Zedd��Zdd�Zdd�Zedd��Zedd ��Zd!S)"r�zyBase IPv4 object.

    The following methods are used by IPv4 objects in both single IP
    addresses and networks.

    rrr$�
0123456789���������rrcCst|�S)N)rM)r*rrrr~�sz$_BaseV4._explode_shorthand_ip_stringcCsn||jkrdt|t�r|}n.y|j|�}Wntk
rF|j|�}YnXt|j|��}||f|j|<|j|S)aMake a (netmask, prefix_len) tuple from the given argument.

        Argument can be:
        - an integer (the prefix length)
        - a string representing the prefix length (e.g. "24")
        - a string representing the prefix netmask (e.g. "255.255.255.0")
        )�_netmask_cacherrr�r;r�r<r�)r��argr�r�rrr�
_make_netmask�s	

z_BaseV4._make_netmaskcCsx|std��|jd�}t|�dkr.td|��ytt|j|�d�Stk
rr}ztd||f��WYdd}~XnXdS)aTurn the given IP string into an integer for comparison.

        Args:
            ip_str: A string, the IP ip_str.

        Returns:
            The IP ip_str as an integer.

        Raises:
            AddressValueError: if ip_str isn't a valid IPv4 Address.

        zAddress cannot be empty�.rzExpected 4 octets in %rr
z%s in %rN)r9rNrOr�map�_parse_octetr?)r�r�Zoctets�excrrrr��s
z_BaseV4._ip_int_from_stringcCs�|std��|jj|�s(d}t||��t|�dkrDd}t||��t|d�}|dkrr|ddkrrd	}t||��|d
kr�td|��|S)aConvert a decimal octet into an integer.

        Args:
            octet_str: A string, the number to parse.

        Returns:
            The octet as an integer.

        Raises:
            ValueError: if the octet isn't strictly a decimal from [0..255].

        zEmpty octet not permittedz#Only decimal digits permitted in %r�z$At most 3 characters permitted in %r�
�r�0z3Ambiguous (octal/decimal) value in %r not permittedr�zOctet %d (> 255) not permitted)r?r�r�rOr�)r�Z	octet_strr�Z	octet_intrrrr��s
z_BaseV4._parse_octetcCsdjdd�t|dd�D��S)z�Turns a 32-bit integer into dotted decimal notation.

        Args:
            ip_int: An integer, the IP address.

        Returns:
            The IP address as a string in dotted decimal notation.

        r�css0|](}tt|t�r"tjd|�dn|�VqdS)s!BrN)rMrr>rr	)r
rrrr�	<genexpr>-sz._BaseV4._string_from_ip_int.<locals>.<genexpr>rr
)�joinr)r�r�rrrr�"s
z_BaseV4._string_from_ip_intcsh|jd�}y�fdd�tt|�D�}Wntk
r:dSXt|�t|�krPdS|d|dkrddSdS)	z�Test if the IP string is a hostmask (rather than a netmask).

        Args:
            ip_str: A string, the potential hostmask.

        Returns:
            A boolean, True if the IP string is a hostmask.

        r�csg|]}|�jkr|�qSr)�_valid_mask_octets)r
r�)r*rrr>sz(_BaseV4._is_hostmask.<locals>.<listcomp>Frr$Trp)rNr�r�r?rO)r*r�r\�partsr)r*r�_is_hostmask2s

z_BaseV4._is_hostmaskcCs&t|�jd�ddd�}dj|�dS)z�Return the reverse DNS pointer name for the IPv4 address.

        This implements the method described in RFC1035 3.5.

        r�Nr$z
.in-addr.arparp)rMrNr�)r*Zreverse_octetsrrrr�Gsz_BaseV4._reverse_pointercCs|jS)N)rb)r*rrr�
max_prefixlenPsz_BaseV4.max_prefixlencCs|jS)N)rq)r*rrrraTsz_BaseV4.versionN)r5r6r7r:r8rq�
IPV4LENGTHrc�	frozensetr�r�rbr�r~r�r�r�r�r�r�r�r�r�rarrrrr��s"%	r�c@s|eZdZdZdZdd�Zedd��Zedd	��Zed
d��Z	edd
��Z
edd��Zedd��Zedd��Z
edd��ZdS)r<z/Represent and manipulate single IPv4 Addresses.rT�__weakref__cCsxt|t�r|j|�||_dSt|t�rL|j|d�t|�}t|d�|_dSt|�}d|krht	d|��|j
|�|_dS)a�
        Args:
            address: A string or integer representing the IP

              Additionally, an integer can be passed, so
              IPv4Address('192.0.2.1') == IPv4Address(3221225985).
              or, more generally
              IPv4Address(int(IPv4Address('192.0.2.1'))) ==
                IPv4Address('192.0.2.1')

        Raises:
            AddressValueError: If ipaddress isn't a valid IPv4 address.

        Nrr
rLzUnexpected '/' in %r)rrr�rTr>r�rrrMr9r�)r*r@�bvs�addr_strrrrr�_s


zIPv4Address.__init__cCs
t|j�S)z*The binary representation of this address.)rJrT)r*rrr�packed�szIPv4Address.packedcCs||jjkS)z�Test if the address is otherwise IETF reserved.

         Returns:
             A boolean, True if the address is within the
             reserved IPv4 Network range.

        )�
_constants�_reserved_network)r*rrrr��s	zIPv4Address.is_reservedcst�fdd��jjD��S)z�Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv4-special-registry.

        c3s|]}�|kVqdS)Nr)r
rd)r*rrr��sz)IPv4Address.is_private.<locals>.<genexpr>)�anyr��_private_networks)r*r)r*rr��s	zIPv4Address.is_privatecCs||jjko|jS)N)r��_public_networkr�)r*rrrr��szIPv4Address.is_globalcCs||jjkS)z�Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is multicast.
            See RFC 3171 for details.

        )r��_multicast_network)r*rrrr��s	zIPv4Address.is_multicastcCs||jjkS)z�Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 5735 3.

        )r��_unspecified_address)r*rrrr��s	zIPv4Address.is_unspecifiedcCs||jjkS)z�Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback per RFC 3330.

        )r��_loopback_network)r*rrrr��szIPv4Address.is_loopbackcCs||jjkS)z�Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is link-local per RFC 3927.

        )r��_linklocal_network)r*rrrr��szIPv4Address.is_link_localN)rTr�)r5r6r7r:r8r�r�r�r�r�r�r�r�r�r�rrrrr<Ys$
r<c@sjeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zej	Z	e
dd��Ze
d
d��Ze
dd��Z
e
dd��ZdS)rFcCs�t|ttf�r2tj||�t|j�|_|j|_	dSt|t
�r�tj||d�t|�dkrht|d�|_	n|j|_	t|dd�|_|jj
|_
|jj|_dSt|�}tj||d�t|dd�|_|jj	|_	|jj
|_
|jj|_dS)Nrr$F)rD)rr>rr<r�rBrTr�rbrr�tuplerOr�r�r�rQ)r*r@rPrrrr��s(




zIPv4Interface.__init__cCsd|j|j�|jjfS)Nz%s/%d)r�rTr�r�)r*rrrr��szIPv4Interface.__str__cCsDtj||�}|s|tkr|Sy|j|jkStk
r>dSXdS)NF)r<r,r-r�rs)r*r+�
address_equalrrrr,�szIPv4Interface.__eq__cCs>tj||�}|tkrtSy|j|jkStk
r8dSXdS)NF)r<r0r-r�rs)r*r+�address_lessrrrr0�szIPv4Interface.__lt__cCs|j|jAt|jj�AS)N)rTrrr�r�rt)r*rrrr�szIPv4Interface.__hash__cCs
t|j�S)N)r<rT)r*rrrrY
szIPv4Interface.ipcCsd|j|j�|jfS)Nz%s/%s)r�rTrr)r*rrrr�szIPv4Interface.with_prefixlencCsd|j|j�|jfS)Nz%s/%s)r�rTr�)r*rrrr�szIPv4Interface.with_netmaskcCsd|j|j�|jfS)Nz%s/%s)r�rTr�)r*rrrr�szIPv4Interface.with_hostmaskN)r5r6r7r�r�r,r0r�r}r�r�rYr�r�r�rrrrrF�srFc@s*eZdZdZeZddd�Zedd��ZdS)	rBaeThis class represents and manipulates 32-bit IPv4 network + addresses..

    Attributes: [examples for IPv4Network('192.0.2.0/27')]
        .network_address: IPv4Address('192.0.2.0')
        .hostmask: IPv4Address('0.0.0.31')
        .broadcast_address: IPv4Address('192.0.2.32')
        .netmask: IPv4Address('255.255.255.224')
        .prefixlen: 27

    TcCs|tj||�t|ttf�r<t|�|_|j|j�\|_	|_
dSt|t�r�t|�dkr\|d}n|j}t|d�|_|j|�\|_	|_
t
|j�}|t
|j	�@|kr�|r�td|��nt|t
|j	�@�|_dSt|�}t|j|d��|_t|�dkr�|d}n|j}|j|�\|_	|_
|�rDtt
|j�t
|j	�@�|jk�rDtd|��tt
|j�t
|j	�@�|_|j
|jdk�rx|j|_dS)aInstantiate a new IPv4 network object.

        Args:
            address: A string or integer representing the IP [& network].
              '192.0.2.0/24'
              '192.0.2.0/255.255.255.0'
              '192.0.0.2/0.0.0.255'
              are all functionally the same in IPv4. Similarly,
              '192.0.2.1'
              '192.0.2.1/255.255.255.255'
              '192.0.2.1/32'
              are also functionally equivalent. That is to say, failing to
              provide a subnetmask will create an object with a mask of /32.

              If the mask (portion after the / in the argument) is given in
              dotted quad form, it is treated as a netmask if it starts with a
              non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
              starts with a zero field (e.g. 0.255.255.255 == /8), with the
              single exception of an all-zero mask which is treated as a
              netmask == /0. If no mask is given, a default of /32 is used.

              Additionally, an integer can be passed, so
              IPv4Network('192.0.2.1') == IPv4Network(3221225985)
              or, more generally
              IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
                IPv4Interface('192.0.2.1')

        Raises:
            AddressValueError: If ipaddress isn't a valid IPv4 address.
            NetmaskValueError: If the netmask isn't valid for
              an IPv4 address.
            ValueError: If strict is True and a network address is not
              supplied.

        Nr$rz%s has host bits setr)rxr�rrr>r<rtr�rbr�rrr�rOr�r?rQr�r�r�)r*r@rDr�r�rPrrrr�0sB%






zIPv4Network.__init__cCs&|jtd�ko|jtd�ko$|jS)z�Test if this address is allocated for public networks.

        Returns:
            A boolean, True if the address is not reserved per
            iana-ipv4-special-registry.

        z
100.64.0.0/10)rtrBrmr�)r*rrrr��s	zIPv4Network.is_globalN)T)	r5r6r7r:r<r�r�r�r�rrrrrB!s
UrBc@s�eZdZed�Zed�Zed�Zed�Zed�ed�ed�ed�ed�ed�ed	�ed
�ed�ed�ed
�ed�ed�ed�gZed�Z	e
d�ZdS)�_IPv4Constantsz169.254.0.0/16z127.0.0.0/8z224.0.0.0/4z
100.64.0.0/10z	0.0.0.0/8z
10.0.0.0/8z
172.16.0.0/12z192.0.0.0/29z192.0.0.170/31z192.0.2.0/24z192.168.0.0/16z
198.18.0.0/15z198.51.100.0/24z203.0.113.0/24z240.0.0.0/4z255.255.255.255/32z0.0.0.0N)r5r6r7rBr�r�r�r�r�r�r<r�rrrrr��s(
r�c@s�eZdZdZfZdZdedZdZe	d�Z
eZiZe
dd��Ze
d	d
��Ze
dd��Ze
d
d��Ze
ddd��Zdd�Zdd�Zedd��Zedd��ZdS)�_BaseV6zyBase IPv6 object.

    The following methods are used by IPv6 objects in both single IP
    addresses and networks.

    r^rr$rZ0123456789ABCDEFabcdefcCsJ||jkr@t|t�r|}n
|j|�}t|j|��}||f|j|<|j|S)aMake a (netmask, prefix_len) tuple from the given argument.

        Argument can be:
        - an integer (the prefix length)
        - a string representing the prefix length (e.g. "24")
        - a string representing the prefix netmask (e.g. "255.255.255.0")
        )r�rrr�r=r�)r�r�r�r�rrrr��s	


z_BaseV6._make_netmaskcCs�|std��|jd�}d}t|�|kr:d||f}t|��d|dkr�yt|j��j}Wn2tk
r�}ztd||f��WYdd}~XnX|jd	|d
?d@�|jd	|d@�|jd}t|�|kr�d|d|f}t|��d}x@tdt|�d�D]*}	||	s�|dk	�r d
|}t|��|	}q�W|dk	�r�|}
t|�|d}|d�sn|
d8}
|
�rnd}t||��|d�s�|d8}|�r�d}t||��|j|
|}|dk�r4d}t||jd|f��njt|�|jk�r�d}t||j|f��|d�s
d}t||��|d�s$d}t||��t|�}
d}d}ytd}
x,t	|
�D] }	|
d
K}
|
|j
||	�O}
�qDW|
d
|K}
x0t	|d�D] }	|
d
K}
|
|j
||	�O}
�q�W|
Stk
�r�}ztd||f��WYdd}~XnXdS)z�Turn an IPv6 ip_str into an integer.

        Args:
            ip_str: A string, the IPv6 ip_str.

        Returns:
            An int, the IPv6 address

        Raises:
            AddressValueError: if ip_str isn't a valid IPv6 Address.

        zAddress cannot be empty�:r�z At least %d parts expected in %rr�r$z%s in %rNz%xri��z!At most %d colons permitted in %rz At most one '::' permitted in %rrz0Leading ':' only permitted as part of '::' in %rz1Trailing ':' only permitted as part of '::' in %rz/Expected at most %d other parts with '::' in %rz,Exactly %d parts expected without '::' in %rrprprp)r9rNrOr<rgrTrj�
_HEXTET_COUNTr(�range�
_parse_hextetr?)r�r�r�Z
_min_partsr�Zipv4_intr�Z
_max_partsZ
skip_indexr Zparts_hiZparts_loZ
parts_skippedr�rrrr��s�
"







z_BaseV6._ip_int_from_stringcCs>|jj|�std|��t|�dkr4d}t||��t|d�S)a&Convert an IPv6 hextet string into an integer.

        Args:
            hextet_str: A string, the number to parse.

        Returns:
            The hextet as an integer.

        Raises:
            ValueError: if the input isn't strictly a hex number from
              [0..FFFF].

        zOnly hex digits permitted in %rrz$At most 4 characters permitted in %rr)�_HEX_DIGITSr�r?rOr�)r�Z
hextet_strr�rrrr�Esz_BaseV6._parse_hextetc	Cs�d}d}d}d}xJt|�D]>\}}|dkrP|d7}|dkr>|}||krX|}|}qd}d}qW|dkr�||}|t|�kr�|dg7}dg|||�<|dkr�dg|}|S)	a�Compresses a list of hextets.

        Compresses a list of strings, replacing the longest continuous
        sequence of "0" in the list with "" and adding empty strings at
        the beginning or at the end of the string such that subsequently
        calling ":".join(hextets) will produce the compressed version of
        the IPv6 address.

        Args:
            hextets: A list of strings, the hextets to compress.

        Returns:
            A list of strings.

        r$rr��rprprprp)�	enumeraterO)	r��hextetsZbest_doublecolon_startZbest_doublecolon_lenZdoublecolon_startZdoublecolon_len�indexZhextetZbest_doublecolon_endrrr�_compress_hextets_s.

z_BaseV6._compress_hextetsNcsZ|dkrt|j�}||jkr$td��d|��fdd�tddd�D�}|j|�}d	j|�S)
a,Turns a 128-bit integer into hexadecimal notation.

        Args:
            ip_int: An integer, the IP address.

        Returns:
            A string, the hexadecimal representation of the address.

        Raises:
            ValueError: The address is bigger than 128 bits of all ones.

        NzIPv6 address is too largez%032xcs&g|]}dt�||d�d��qS)z%xrr)r�)r
r�)�hex_strrrr�sz/_BaseV6._string_from_ip_int.<locals>.<listcomp>rrrr�)r�rTrcr?r�r�r�)r�r�r�r)r�rr��s


z_BaseV6._string_from_ip_intcs�t|t�rt|j�}nt|t�r,t|j�}nt|�}|j|�}d|��fdd�tddd�D�}t|ttf�r�ddj	|�|j
fSdj	|�S)	z�Expand a shortened IPv6 address.

        Args:
            ip_str: A string, the IPv6 address.

        Returns:
            A string, the expanded IPv6 address.

        z%032xcsg|]}�||d��qS)rr)r
r�)r�rrr�sz8_BaseV6._explode_shorthand_ip_string.<locals>.<listcomp>rrrz%s/%dr�)rrCrMrtrGrYr�r�rxr�rr)r*r�r�r�r)r�rr~�s



z$_BaseV6._explode_shorthand_ip_stringcCs&|jddd�jdd�}dj|�dS)z�Return the reverse DNS pointer name for the IPv6 address.

        This implements the method described in RFC3596 2.5.

        Nr$r�r�r�z	.ip6.arparp)r�replacer�)r*Z
reverse_charsrrrr��sz_BaseV6._reverse_pointercCs|jS)N)rb)r*rrrr��sz_BaseV6.max_prefixlencCs|jS)N)rq)r*rrrra�sz_BaseV6.version)N)r5r6r7r:r8rq�
IPV6LENGTHrcr�r�r�rbr�r�r�r�r�r�r�r~r�r�r�rarrrrr��s$i0	r�c@s�eZdZdZdZdd�Zedd��Zedd	��Zed
d��Z	edd
��Z
edd��Zedd��Zedd��Z
edd��Zedd��Zedd��Zedd��Zedd��ZdS) r=z/Represent and manipulate single IPv6 Addresses.rTr�cCsxt|t�r|j|�||_dSt|t�rL|j|d�t|�}t|d�|_dSt|�}d|krht	d|��|j
|�|_dS)aInstantiate a new IPv6 address object.

        Args:
            address: A string or integer representing the IP

              Additionally, an integer can be passed, so
              IPv6Address('2001:db8::') ==
                IPv6Address(42540766411282592856903984951653826560)
              or, more generally
              IPv6Address(int(IPv6Address('2001:db8::'))) ==
                IPv6Address('2001:db8::')

        Raises:
            AddressValueError: If address isn't a valid IPv6 address.

        Nrr
rLzUnexpected '/' in %r)rrr�rTr>r�rrrMr9r�)r*r@r�r�rrrr��s


zIPv6Address.__init__cCs
t|j�S)z*The binary representation of this address.)rKrT)r*rrrr��szIPv6Address.packedcCs||jjkS)z�Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is a multicast address.
            See RFC 2373 2.7 for details.

        )r�r�)r*rrrr�s	zIPv6Address.is_multicastcst�fdd��jjD��S)z�Test if the address is otherwise IETF reserved.

        Returns:
            A boolean, True if the address is within one of the
            reserved IPv6 Network ranges.

        c3s|]}�|kVqdS)Nr)r
r�)r*rrr�sz*IPv6Address.is_reserved.<locals>.<genexpr>)r�r��_reserved_networks)r*r)r*rr�s	zIPv6Address.is_reservedcCs||jjkS)z�Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is reserved per RFC 4291.

        )r�r�)r*rrrr�szIPv6Address.is_link_localcCs||jjkS)a`Test if the address is reserved for site-local.

        Note that the site-local address space has been deprecated by RFC 3879.
        Use is_private to test if this address is in the space of unique local
        addresses as defined by RFC 4193.

        Returns:
            A boolean, True if the address is reserved per RFC 3513 2.5.6.

        )r��_sitelocal_network)r*rrr�
is_site_local#szIPv6Address.is_site_localcst�fdd��jjD��S)z�Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv6-special-registry.

        c3s|]}�|kVqdS)Nr)r
rd)r*rrr�:sz)IPv6Address.is_private.<locals>.<genexpr>)r�r�r�)r*r)r*rr�1s	zIPv6Address.is_privatecCs|jS)z�Test if this address is allocated for public networks.

        Returns:
            A boolean, true if the address is not reserved per
            iana-ipv6-special-registry.

        )r�)r*rrrr�<s	zIPv6Address.is_globalcCs
|jdkS)z�Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 2373 2.5.2.

        r)rT)r*rrrr�Gs	zIPv6Address.is_unspecifiedcCs
|jdkS)z�Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback address as defined in
            RFC 2373 2.5.3.

        r$)rT)r*rrrr�Rs	zIPv6Address.is_loopbackcCs |jd?dkrdSt|jd@�S)z�Return the IPv4 mapped address.

        Returns:
            If the IPv6 address is a v4 mapped address, return the
            IPv4 mapped address. Return None otherwise.

        ri��Nl��)rTr<)r*rrr�ipv4_mapped]s	zIPv6Address.ipv4_mappedcCs4|jd?dkrdSt|jd?d@�t|jd@�fS)z�Tuple of embedded teredo IPs.

        Returns:
            Tuple of the (server, client) IPs or None if the address
            doesn't appear to be a teredo address (doesn't start with
            2001::/32)

        �`i Nrl��)rTr<)r*rrr�teredojs
zIPv6Address.teredocCs$|jd?dkrdSt|jd?d@�S)z�Return the IPv4 6to4 embedded address.

        Returns:
            The IPv4 6to4-embedded address if present or None if the
            address doesn't appear to contain a 6to4 embedded address.

        �pi N�Pl��)rTr<)r*rrr�	sixtofourys	zIPv6Address.sixtofourN)rTr�)r5r6r7r:r8r�r�r�r�r�r�rr�r�r�r�rrrrrrrr=�s%

r=c@s�eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zej	Z	e
dd��Ze
d
d��Ze
dd��Z
e
dd��Ze
dd��Ze
dd��ZdS)rGcCs�t|ttf�r2tj||�t|j�|_|j|_	dSt|t
�r�tj||d�t|�dkrht|d�|_	n|j|_	t|dd�|_|jj
|_
|jj|_dSt|�}tj||d�t|dd�|_|jj
|_
|jj	|_	|jj|_dS)Nrr$F)rD)rr>rr=r�rCrTr�rbrrr�rOr�r�r�rQ)r*r@rPrrrr��s(




zIPv6Interface.__init__cCsd|j|j�|jjfS)Nz%s/%d)r�rTr�r�)r*rrrr��szIPv6Interface.__str__cCsDtj||�}|s|tkr|Sy|j|jkStk
r>dSXdS)NF)r=r,r-r�rs)r*r+r�rrrr,�szIPv6Interface.__eq__cCs>tj||�}|tkrtSy|j|jkStk
r8dSXdS)NF)r=r0r-r�rs)r*r+r�rrrr0�szIPv6Interface.__lt__cCs|j|jAt|jj�AS)N)rTrrr�r�rt)r*rrrr��szIPv6Interface.__hash__cCs
t|j�S)N)r=rT)r*rrrrY�szIPv6Interface.ipcCsd|j|j�|jfS)Nz%s/%s)r�rTrr)r*rrrr��szIPv6Interface.with_prefixlencCsd|j|j�|jfS)Nz%s/%s)r�rTr�)r*rrrr��szIPv6Interface.with_netmaskcCsd|j|j�|jfS)Nz%s/%s)r�rTr�)r*rrrr��szIPv6Interface.with_hostmaskcCs|jdko|jjS)Nr)rTr�r�)r*rrrr��szIPv6Interface.is_unspecifiedcCs|jdko|jjS)Nr$)rTr�r�)r*rrrr��szIPv6Interface.is_loopbackN)r5r6r7r�r�r,r0r�r}r�r�rYr�r�r�r�r�rrrrrG�srGc@s2eZdZdZeZd
dd�Zdd�Zedd��Z	d	S)rCavThis class represents and manipulates 128-bit IPv6 networks.

    Attributes: [examples for IPv6('2001:db8::1000/124')]
        .network_address: IPv6Address('2001:db8::1000')
        .hostmask: IPv6Address('::f')
        .broadcast_address: IPv6Address('2001:db8::100f')
        .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
        .prefixlen: 124

    TcCs|tj||�t|ttf�r<t|�|_|j|j�\|_	|_
dSt|t�r�t|�dkr\|d}n|j}|j|�\|_	|_
t|d�|_t
|j�}|t
|j	�@|kr�|r�td|��nt|t
|j	�@�|_dSt|�}t|j|d��|_t|�dkr�|d}n|j}|j|�\|_	|_
|�rDtt
|j�t
|j	�@�|jk�rDtd|��tt
|j�t
|j	�@�|_|j
|jdk�rx|j|_dS)a�Instantiate a new IPv6 Network object.

        Args:
            address: A string or integer representing the IPv6 network or the
              IP and prefix/netmask.
              '2001:db8::/128'
              '2001:db8:0000:0000:0000:0000:0000:0000/128'
              '2001:db8::'
              are all functionally the same in IPv6.  That is to say,
              failing to provide a subnetmask will create an object with
              a mask of /128.

              Additionally, an integer can be passed, so
              IPv6Network('2001:db8::') ==
                IPv6Network(42540766411282592856903984951653826560)
              or, more generally
              IPv6Network(int(IPv6Network('2001:db8::'))) ==
                IPv6Network('2001:db8::')

            strict: A boolean. If true, ensure that we have been passed
              A true network address, eg, 2001:db8::1000/124 and not an
              IP address on a network, eg, 2001:db8::1/124.

        Raises:
            AddressValueError: If address isn't a valid IPv6 address.
            NetmaskValueError: If the netmask isn't valid for
              an IPv6 address.
            ValueError: If strict was True and a network address was not
              supplied.

        Nr$rz%s has host bits setr)rxr�rr>rr=rtr�rbr�rrr�rOr�r?rQr�r�r�)r*r@rDr�r�rPrrrr��sB 






zIPv6Network.__init__ccs@t|j�}t|j�}x&t|d|d�D]}|j|�Vq(WdS)z�Generate Iterator over usable hosts in a network.

          This is like __iter__ except it doesn't return the
          Subnet-Router anycast address.

        r$N)r�rtrmr(r�)r*r�r�r�rrrr�<	s

zIPv6Network.hostscCs|jjo|jjS)a`Test if the address is reserved for site-local.

        Note that the site-local address space has been deprecated by RFC 3879.
        Use is_private to test if this address is in the space of unique local
        addresses as defined by RFC 4193.

        Returns:
            A boolean, True if the address is reserved per RFC 3513 2.5.6.

        )rtrrm)r*rrrrH	szIPv6Network.is_site_localN)T)
r5r6r7r:r=r�r�r�r�rrrrrrC�s

OrCc@s�eZdZed�Zed�Zed�ed�ed�ed�ed�ed�ed	�ed
�ed�ed�g
Zed�ed
�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�gZed�ZdS)�_IPv6Constantsz	fe80::/10zff00::/8z::1/128z::/128z
::ffff:0:0/96z100::/64z	2001::/23z2001:2::/48z
2001:db8::/32z2001:10::/28zfc00::/7z::/8z100::/8z200::/7z400::/6z800::/5z1000::/4z4000::/3z6000::/3z8000::/3zA000::/3zC000::/3zE000::/4zF000::/5zF800::/6zFE00::/9z	fec0::/10N)	r5r6r7rCr�r�r�rrrrrrr	X	s*

r	r)r$)T)8r:Z
__future__rr"r�__version__r�rZlong�	NameErrorZunicoderM�strr>rr�
from_bytesrrsrr�r!r(�objectr)r�r�r?r9r;rArErHrJrKrQrZr]rerorwr|r}r_rxr�r<rFrBr�r�r�r=rGrCr	rrrr�<module>	s�

	


)$
$#716=a*vRr 5V{!_vendor/__pycache__/ipaddress.cpython-36.opt-1.pyc000064400000201213151733136260015761 0ustar003

�Pf09�@s`dZddlmZddlZddlZdZefZyeefZWne	k
rJYnXye
ZWne	k
rleZYnXdOdkr�dd�Z
ndd�Z
y
ejZWnek
r�d	d
�ZYnXdd�Zeed
�r�dd�Zndd�ZdPdd�ZGdd�de�ZdZdZGdd�de�ZGdd�de�Zdd�ZdQdd �Zd!d"�Zd#d$�Zd%d&�Z d'd(�Z!d)d*�Z"d+d,�Z#d-d.�Z$d/d0�Z%d1d2�Z&d3d4�Z'Gd5d6�d6e�Z(Gd7d8�d8e(�Z)Gd9d:�d:e(�Z*Gd;d<�d<e�Z+Gd=d>�d>e+e)�Z,Gd?d@�d@e,�Z-GdAdB�dBe+e*�Z.GdCdD�dDe�Z/e/e,_0GdEdF�dFe�Z1GdGdH�dHe1e)�Z2GdIdJ�dJe2�Z3GdKdL�dLe1e*�Z4GdMdN�dNe�Z5e5e2_0dS)Rz�A fast, lightweight IPv4/IPv6 manipulation library in Python.

This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.

�)�unicode_literalsNz1.0.17�cCs|S)N�)�bytrr�/usr/lib/python3.6/ipaddress.py�_compat_bytes_to_byte_valssrcCsdd�|D�S)NcSsg|]}tjd|�d�qS)s!Br)�struct�unpack)�.0�brrr�
<listcomp>#sz._compat_bytes_to_byte_vals.<locals>.<listcomp>r)rrrrr"scCs"d}x|D]}|d>|}q
W|S)Nr�r)Zbytvals�	endianess�resZbvrrr�_compat_int_from_byte_vals's
rcCst|dkr.|dks|d
kr"tjd��tjd|�S|dkrj|dksJ|ddkrTtjd	��tjd
|d?|d@�St��dS)N�r�� z(integer out of range for 'I' format codes!I��z)integer out of range for 'QQ' format codes!QQ�@l����l)r�error�pack�NotImplementedError)ZintvalZlengthrrrr�_compat_to_bytes0s

r�
bit_lengthcCs|j�S)N)r)�irrr�_compat_bit_length?srcCs&x tj�D]}||?dkr
|Sq
WdS)Nr)�	itertools�count)rrrrrrBs�ccs$|}x||kr|V||7}qWdS)Nr)�start�end�steprrrr�
_compat_rangeHs
r$c@s@eZdZfZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
S)�_TotalOrderingMixincCst�dS)N)r)�self�otherrrr�__eq__Wsz_TotalOrderingMixin.__eq__cCs|j|�}|tkrtS|S)N)r(�NotImplemented)r&r'�equalrrr�__ne__Zs
z_TotalOrderingMixin.__ne__cCst�dS)N)r)r&r'rrr�__lt__`sz_TotalOrderingMixin.__lt__cCs&|j|�}|tks|r"|j|�S|S)N)r,r)r()r&r'�lessrrr�__le__cs

z_TotalOrderingMixin.__le__cCs6|j|�}|tkrtS|j|�}|tkr,tS|p2|S)N)r,r)r()r&r'r-r*rrr�__gt__is

z_TotalOrderingMixin.__gt__cCs|j|�}|tkrtS|S)N)r,r))r&r'r-rrr�__ge__rs
z_TotalOrderingMixin.__ge__N)
�__name__�
__module__�__qualname__�	__slots__r(r+r,r.r/r0rrrrr%Ps	r%rrc@seZdZdZdS)�AddressValueErrorz%A Value Error related to the address.N)r1r2r3�__doc__rrrrr5}sr5c@seZdZdZdS)�NetmaskValueErrorz%A Value Error related to the netmask.N)r1r2r3r6rrrrr7�sr7cCsjyt|�Sttfk
r YnXyt|�Sttfk
rBYnXt|t�rZtd|��td|��dS)a�Take an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP address.  Either IPv4 or
          IPv6 addresses may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Address or IPv6Address object.

    Raises:
        ValueError: if the *address* passed isn't either a v4 or a v6
          address

    zx%r does not appear to be an IPv4 or IPv6 address. Did you pass in a bytes (str in Python 2) instead of a unicode object?z0%r does not appear to be an IPv4 or IPv6 addressN)�IPv4Addressr5r7�IPv6Address�
isinstance�bytes�
ValueError)�addressrrr�
ip_address�s
r>TcCsny
t||�Sttfk
r"YnXy
t||�Sttfk
rFYnXt|t�r^td|��td|��dS)a�Take an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP network.  Either IPv4 or
          IPv6 networks may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Network or IPv6Network object.

    Raises:
        ValueError: if the string passed isn't either a v4 or a v6
          address. Or if the network has host bits set.

    zx%r does not appear to be an IPv4 or IPv6 network. Did you pass in a bytes (str in Python 2) instead of a unicode object?z0%r does not appear to be an IPv4 or IPv6 networkN)�IPv4Networkr5r7�IPv6Networkr:r;r<)r=�strictrrr�
ip_network�s


rBcCsTyt|�Sttfk
r YnXyt|�Sttfk
rBYnXtd|��dS)agTake an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP address.  Either IPv4 or
          IPv6 addresses may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Interface or IPv6Interface object.

    Raises:
        ValueError: if the string passed isn't either a v4 or a v6
          address.

    Notes:
        The IPv?Interface classes describe an Address on a particular
        Network, so they're basically a combination of both the Address
        and Network classes.

    z2%r does not appear to be an IPv4 or IPv6 interfaceN)�
IPv4Interfacer5r7�
IPv6Interfacer<)r=rrr�ip_interface�srEcCs4yt|dd�Stjtfk
r.td��YnXdS)a`Represent an address as 4 packed bytes in network (big-endian) order.

    Args:
        address: An integer representation of an IPv4 IP address.

    Returns:
        The integer address packed as 4 bytes in network (big-endian) order.

    Raises:
        ValueError: If the integer is negative or too large to be an
          IPv4 IP address.

    r�bigz&Address negative or too large for IPv4N)rrr�
OverflowErrorr<)r=rrr�v4_int_to_packed�srHcCs4yt|dd�Stjtfk
r.td��YnXdS)z�Represent an address as 16 packed bytes in network (big-endian) order.

    Args:
        address: An integer representation of an IPv6 IP address.

    Returns:
        The integer address packed as 16 bytes in network (big-endian) order.

    rrFz&Address negative or too large for IPv6N)rrrrGr<)r=rrr�v6_int_to_packeds
rIcCs*t|�jd�}t|�dkr&td|��|S)zAHelper to split the netmask and raise AddressValueError if needed�/rzOnly one '/' permitted in %r)�_compat_str�split�lenr5)r=�addrrrr�_split_optional_netmasksrOccsRt|�}t|�}}x.|D]&}|j|jdkr<||fV|}|}qW||fVdS)z�Find a sequence of sorted deduplicated IPv#Address.

    Args:
        addresses: a list of IPv#Address objects.

    Yields:
        A tuple containing the first and last IP addresses in the sequence.

    r N)�iter�next�_ip)�	addresses�it�first�last�iprrr�_find_address_ranges


rXcCs$|dkr|St|t||d@��S)z�Count the number of zero bits on the right hand side.

    Args:
        number: an integer.
        bits: maximum number of bits to count.

    Returns:
        The number of zero bits on the right hand side of the number.

    rr )�minr)Znumber�bitsrrr�_count_righthand_zero_bits0sr[ccs�t|t�ot|t�std��|j|jkr8td||f��||krHtd��|jdkrXt}n|jdkrht}ntd��|j}|j}|j}x^||kr�t	t
||�t||d�d�}||||f�}|V|d|>7}|d|jkr�Pq�WdS)	a�Summarize a network range given the first and last IP addresses.

    Example:
        >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
        ...                              IPv4Address('192.0.2.130')))
        ...                                #doctest: +NORMALIZE_WHITESPACE
        [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
         IPv4Network('192.0.2.130/32')]

    Args:
        first: the first IPv4Address or IPv6Address in the range.
        last: the last IPv4Address or IPv6Address in the range.

    Returns:
        An iterator of the summarized IPv(4|6) network objects.

    Raise:
        TypeError:
            If the first and last objects are not IP addresses.
            If the first and last objects are not the same version.
        ValueError:
            If the last object is not greater than the first.
            If the version of the first address is not 4 or 6.

    z1first and last must be IP addresses, not networksz%%s and %s are not of the same versionz*last IP address must be greater than firstr�zunknown IP versionr N)
r:�_BaseAddress�	TypeError�versionr<r?r@�_max_prefixlenrRrYr[r�	_ALL_ONES)rUrVrWZip_bitsZ	first_intZlast_intZnbits�netrrr�summarize_address_range@s0





rcccs�t|�}i}xL|rX|j�}|j�}|j|�}|dkr>|||<q||kr||=|j|�qWd}x4t|j��D]$}|dk	r�|j|jkr�ql|V|}qlWdS)auLoops through the addresses, collapsing concurrent netblocks.

    Example:

        ip1 = IPv4Network('192.0.2.0/26')
        ip2 = IPv4Network('192.0.2.64/26')
        ip3 = IPv4Network('192.0.2.128/26')
        ip4 = IPv4Network('192.0.2.192/26')

        _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
          [IPv4Network('192.0.2.0/24')]

        This shouldn't be called directly; it is called via
          collapse_addresses([]).

    Args:
        addresses: A list of IPv4Network's or IPv6Network's

    Returns:
        A list of IPv4Network's or IPv6Network's depending on what we were
        passed.

    N)�list�pop�supernet�get�append�sorted�values�broadcast_address)rSZto_merge�subnetsrbrfZexistingrVrrr�_collapse_addresses_internalws$

rmcCs8g}g}g}x�|D]�}t|t�rT|rH|dj|jkrHtd||df��|j|�q|j|jkr�|r�|dj|jkr�td||df��y|j|j�Wq�tk
r�|j|j	�Yq�Xq|r�|dj|jkr�td||df��|j|�qWt
t|��}|�r,x&t|�D]\}}|j
t||���qWt||�S)	a�Collapse a list of IP objects.

    Example:
        collapse_addresses([IPv4Network('192.0.2.0/25'),
                            IPv4Network('192.0.2.128/25')]) ->
                           [IPv4Network('192.0.2.0/24')]

    Args:
        addresses: An iterator of IPv4Network or IPv6Network objects.

    Returns:
        An iterator of the collapsed IPv(4|6)Network objects.

    Raises:
        TypeError: If passed a list of mixed version objects.

    r z%%s and %s are not of the same version���rnrnrnrnrn)r:r]�_versionr^rh�
_prefixlenr`rW�AttributeError�network_addressri�setrX�extendrcrm)rSZaddrsZipsZnetsrWrUrVrrr�collapse_addresses�s4

rucCs(t|t�r|j�St|t�r$|j�StS)a2Return a key suitable for sorting between networks and addresses.

    Address and Network objects are not sortable by default; they're
    fundamentally different so the expression

        IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')

    doesn't make any sense.  There are some times however, where you may wish
    to have ipaddress sort these for you anyway. If you need to do this, you
    can use this function as the key= argument to sorted().

    Args:
      obj: either a Network or Address object.
    Returns:
      appropriate key.

    )r:�_BaseNetwork�_get_networks_keyr]�_get_address_keyr))�objrrr�get_mixed_type_key�s


rzc@s�eZdZdZfZedd��Zedd��Zedd��Zedd	��Z	d
d�Z
dd
�Zedd��Z
edd��Zedd��Zedd��Zedd��Zdd�ZdS)�_IPAddressBasezThe mother class.cCs|j�S)z:Return the longhand version of the IP address as a string.)�_explode_shorthand_ip_string)r&rrr�exploded�sz_IPAddressBase.explodedcCst|�S)z;Return the shorthand version of the IP address as a string.)rK)r&rrr�
compressedsz_IPAddressBase.compressedcCs|j�S)aIThe name of the reverse DNS pointer for the IP address, e.g.:
            >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
            '1.0.0.127.in-addr.arpa'
            >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
            '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'

        )�_reverse_pointer)r&rrr�reverse_pointers	z_IPAddressBase.reverse_pointercCsdt|�f}t|��dS)Nz%200s has no version specified)�typer)r&�msgrrrr_sz_IPAddressBase.versioncCsF|dkrd}t|||jf��||jkrBd}t|||j|jf��dS)Nrz-%d (< 0) is not permitted as an IPv%d addressz2%d (>= 2**%d) is not permitted as an IPv%d address)r5rorar`)r&r=r�rrr�_check_int_addresss

z!_IPAddressBase._check_int_addresscCs.t|�}||kr*d}t|||||jf��dS)Nz~%r (len %d != %d) is not permitted as an IPv%d address. Did you pass in a bytes (str in Python 2) instead of a unicode object?)rMr5ro)r&r=Zexpected_lenZaddress_lenr�rrr�_check_packed_address s
z$_IPAddressBase._check_packed_addresscCs|j|j|?AS)z�Turn the prefix length into a bitwise netmask

        Args:
            prefixlen: An integer, the prefix length.

        Returns:
            An integer.

        )ra)�cls�	prefixlenrrr�_ip_int_from_prefix+sz"_IPAddressBase._ip_int_from_prefixc	Cs\t||j�}|j|}||?}d|>d}||krX|jd}t||d�}d}t||��|S)aReturn prefix length from the bitwise netmask.

        Args:
            ip_int: An integer, the netmask in expanded bitwise format

        Returns:
            An integer, the prefix length.

        Raises:
            ValueError: If the input intermingles zeroes & ones
        r r
rFz&Netmask pattern %r mixes zeroes & ones)r[r`rr<)	r��ip_intZtrailing_zeroesr�Zleading_onesZall_onesZbyteslenZdetailsr�rrr�_prefix_from_ip_int8s


z"_IPAddressBase._prefix_from_ip_intcCsd|}t|��dS)Nz%r is not a valid netmask)r7)r�Znetmask_strr�rrr�_report_invalid_netmaskQsz&_IPAddressBase._report_invalid_netmaskcCsjtjj|�s|j|�yt|�}Wntk
r@|j|�YnXd|koV|jknsf|j|�|S)a	Return prefix length from a numeric string

        Args:
            prefixlen_str: The string to be converted

        Returns:
            An integer, the prefix length.

        Raises:
            NetmaskValueError: If the input is not a valid netmask
        r)�_BaseV4�_DECIMAL_DIGITS�
issupersetr��intr<r`)r�Z
prefixlen_strr�rrr�_prefix_from_prefix_stringVs

z)_IPAddressBase._prefix_from_prefix_stringcCs�y|j|�}Wntk
r,|j|�YnXy
|j|�Stk
rLYnX||jN}y
|j|�Stk
r�|j|�YnXdS)aTurn a netmask/hostmask string into a prefix length

        Args:
            ip_str: The netmask/hostmask to be converted

        Returns:
            An integer, the prefix length.

        Raises:
            NetmaskValueError: If the input is not a valid netmask/hostmask
        N)�_ip_int_from_stringr5r�r�r<ra)r��ip_strr�rrr�_prefix_from_ip_stringos


z%_IPAddressBase._prefix_from_ip_stringcCs|jt|�ffS)N)�	__class__rK)r&rrr�
__reduce__�sz_IPAddressBase.__reduce__N)r1r2r3r6r4�propertyr}r~r�r_r�r��classmethodr�r�r�r�r�r�rrrrr{�s	
"r{c@sdeZdZdZfZdd�Zdd�Zdd�Zdd	�Zd
d�Z	dd
�Z
dd�Zdd�Zdd�Z
dd�ZdS)r]z�A generic IP object.

    This IP class contains the version independent methods which are
    used by single IP addresses.
    cCs|jS)N)rR)r&rrr�__int__�sz_BaseAddress.__int__cCs2y|j|jko|j|jkStk
r,tSXdS)N)rRrorqr))r&r'rrrr(�s
z_BaseAddress.__eq__cCs`t|t�stSt|t�s(td||f��|j|jkrDtd||f��|j|jkr\|j|jkSdS)Nz"%s and %s are not of the same typez%%s and %s are not of the same versionF)r:r{r)r]r^rorR)r&r'rrrr,�s

z_BaseAddress.__lt__cCs t|t�stS|jt|�|�S)N)r:�_compat_int_typesr)r�r�)r&r'rrr�__add__�s
z_BaseAddress.__add__cCs t|t�stS|jt|�|�S)N)r:r�r)r�r�)r&r'rrr�__sub__�s
z_BaseAddress.__sub__cCsd|jjt|�fS)Nz%s(%r))r�r1rK)r&rrr�__repr__�sz_BaseAddress.__repr__cCst|j|j��S)N)rK�_string_from_ip_intrR)r&rrr�__str__�sz_BaseAddress.__str__cCsttt|j���S)N)�hash�hexr�rR)r&rrr�__hash__�sz_BaseAddress.__hash__cCs
|j|fS)N)ro)r&rrrrx�sz_BaseAddress._get_address_keycCs|j|jffS)N)r�rR)r&rrrr��sz_BaseAddress.__reduce__N)r1r2r3r6r4r�r(r,r�r�r�r�r�rxr�rrrrr]�sr]c@sXeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zedd��Zedd��Zedd��Zedd��Zed d!��Zed"d#��Zed$d%��Zed&d'��Zd(d)�Zd*d+�Zd,d-�ZdFd0d1�ZdGd2d3�Zed4d5��Zd6d7�Zd8d9�Zed:d;��Z ed<d=��Z!ed>d?��Z"ed@dA��Z#edBdC��Z$edDdE��Z%d/S)Hrvz~A generic IP network object.

    This IP class contains the version independent methods which are
    used by networks.

    cCs
i|_dS)N)�_cache)r&r=rrr�__init__�sz_BaseNetwork.__init__cCsd|jjt|�fS)Nz%s(%r))r�r1rK)r&rrrr��sz_BaseNetwork.__repr__cCsd|j|jfS)Nz%s/%d)rrr�)r&rrrr��sz_BaseNetwork.__str__ccs<t|j�}t|j�}x"t|d|�D]}|j|�Vq$WdS)z�Generate Iterator over usable hosts in a network.

        This is like __iter__ except it doesn't return the network
        or broadcast addresses.

        r N)r�rrrkr$�_address_class)r&�network�	broadcast�xrrr�hosts�s

z_BaseNetwork.hostsccs<t|j�}t|j�}x"t||d�D]}|j|�Vq$WdS)Nr )r�rrrkr$r�)r&r�r�r�rrr�__iter__�s

z_BaseNetwork.__iter__cCslt|j�}t|j�}|dkr>|||kr0td��|j||�S|d7}|||krZtd��|j||�SdS)Nrzaddress out of ranger )r�rrrk�
IndexErrorr�)r&�nr�r�rrr�__getitem__�s

z_BaseNetwork.__getitem__cCsxt|t�stSt|t�s(td||f��|j|jkrDtd||f��|j|jkr\|j|jkS|j|jkrt|j|jkSdS)Nz"%s and %s are not of the same typez%%s and %s are not of the same versionF)r:r{r)rvr^rorr�netmask)r&r'rrrr,s

z_BaseNetwork.__lt__cCsFy,|j|jko,|j|jko,t|j�t|j�kStk
r@tSXdS)N)rorrr�r�rqr))r&r'rrrr(sz_BaseNetwork.__eq__cCstt|j�t|j�A�S)N)r�r�rrr�)r&rrrr�sz_BaseNetwork.__hash__cCsL|j|jkrdSt|t�rdSt|j�t|j�koBt|j�kSSdS)NF)ror:rvr�rrrRrk)r&r'rrr�__contains__s
z_BaseNetwork.__contains__cCs(|j|kp&|j|kp&|j|kp&|j|kS)z*Tell if self is partly contained in other.)rrrk)r&r'rrr�overlaps)s


z_BaseNetwork.overlapscCs<|jjd�}|dkr8|jt|j�t|j�B�}||jd<|S)Nrk)r�rgr�r�rr�hostmask)r&r�rrrrk0s
z_BaseNetwork.broadcast_addresscCs8|jjd�}|dkr4|jt|j�|jA�}||jd<|S)Nr�)r�rgr�r�r�ra)r&r�rrrr�9s

z_BaseNetwork.hostmaskcCsd|j|jfS)Nz%s/%d)rrrp)r&rrr�with_prefixlenAsz_BaseNetwork.with_prefixlencCsd|j|jfS)Nz%s/%s)rrr�)r&rrr�with_netmaskEsz_BaseNetwork.with_netmaskcCsd|j|jfS)Nz%s/%s)rrr�)r&rrr�
with_hostmaskIsz_BaseNetwork.with_hostmaskcCst|j�t|j�dS)z&Number of hosts in the current subnet.r )r�rkrr)r&rrr�
num_addressesMsz_BaseNetwork.num_addressescCsdt|�f}t|��dS)Nz%%200s has no associated address class)r�r)r&r�rrrr�Rsz_BaseNetwork._address_classcCs|jS)N)rp)r&rrrr�Zsz_BaseNetwork.prefixlenccs|j|jkstd||f��t|t�s2td|��|j|�sLtd||f��||krXdS|jd|j|jf�}|j	�\}}xb||kr�||kr�|j|�r�|V|j	�\}}q||j|�r�|V|j	�\}}q|t
d|||f��q|W||kr�|Vn$||k�r|Vnt
d|||f��dS)a�Remove an address from a larger block.

        For example:

            addr1 = ip_network('192.0.2.0/28')
            addr2 = ip_network('192.0.2.1/32')
            list(addr1.address_exclude(addr2)) =
                [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
                 IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]

        or IPv6:

            addr1 = ip_network('2001:db8::1/32')
            addr2 = ip_network('2001:db8::1/128')
            list(addr1.address_exclude(addr2)) =
                [ip_network('2001:db8::1/128'),
                 ip_network('2001:db8::2/127'),
                 ip_network('2001:db8::4/126'),
                 ip_network('2001:db8::8/125'),
                 ...
                 ip_network('2001:db8:8000::/33')]

        Args:
            other: An IPv4Network or IPv6Network object of the same type.

        Returns:
            An iterator of the IPv(4|6)Network objects which is self
            minus other.

        Raises:
            TypeError: If self and other are of differing address
              versions, or if other is not a network object.
            ValueError: If other is not completely contained by self.

        z%%s and %s are not of the same versionz%s is not a network objectz%s not contained in %sNz%s/%sz3Error performing exclusion: s1: %s s2: %s other: %s)ror^r:rv�	subnet_ofr<r�rrr�rl�AssertionError)r&r'�s1�s2rrr�address_exclude^s6$





z_BaseNetwork.address_excludecCs`|j|jkrtd||f��|j|jkr,dS|j|jkr<dS|j|jkrLdS|j|jkr\dSdS)a�Compare two IP objects.

        This is only concerned about the comparison of the integer
        representation of the network addresses.  This means that the
        host bits aren't considered at all in this method.  If you want
        to compare host bits, you can easily enough do a
        'HostA._ip < HostB._ip'

        Args:
            other: An IP object.

        Returns:
            If the IP versions of self and other are the same, returns:

            -1 if self < other:
              eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
              IPv6Network('2001:db8::1000/124') <
                  IPv6Network('2001:db8::2000/124')
            0 if self == other
              eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
              IPv6Network('2001:db8::1000/124') ==
                  IPv6Network('2001:db8::1000/124')
            1 if self > other
              eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
                  IPv6Network('2001:db8::2000/124') >
                      IPv6Network('2001:db8::1000/124')

          Raises:
              TypeError if the IP versions are different.

        z"%s and %s are not of the same typer rrnrn)ror^rrr�)r&r'rrr�compare_networks�s!z_BaseNetwork.compare_networkscCs|j|j|jfS)z�Network-only key function.

        Returns an object that identifies this address' network and
        netmask. This function is a suitable "key" argument for sorted()
        and list.sort().

        )rorrr�)r&rrrrw�sz_BaseNetwork._get_networks_keyr Nc	cs�|j|jkr|VdS|dk	rJ||jkr0td��|dkr@td��||j}|dkrZtd��|j|}||jkr~td||f��t|j�}t|j�d}t|j�d|?}x(t|||�D]}|j||f�}|Vq�WdS)a�The subnets which join to make the current subnet.

        In the case that self contains only one IP
        (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
        for IPv6), yield an iterator with just ourself.

        Args:
            prefixlen_diff: An integer, the amount the prefix length
              should be increased by. This should not be set if
              new_prefix is also set.
            new_prefix: The desired new prefix length. This must be a
              larger number (smaller prefix) than the existing prefix.
              This should not be set if prefixlen_diff is also set.

        Returns:
            An iterator of IPv(4|6) objects.

        Raises:
            ValueError: The prefixlen_diff is too small or too large.
                OR
            prefixlen_diff and new_prefix are both set or new_prefix
              is a smaller number than the current prefix (smaller
              number means a larger network)

        Nznew prefix must be longerr z(cannot set prefixlen_diff and new_prefixrzprefix length diff must be > 0z0prefix length diff %d is invalid for netblock %s)	rpr`r<r�rrrkr�r$r�)	r&�prefixlen_diff�
new_prefix�
new_prefixlenr!r"r#Znew_addrZcurrentrrrrl�s,




z_BaseNetwork.subnetscCs�|jdkr|S|dk	rB||jkr(td��|dkr8td��|j|}|j|}|dkrftd|j|f��|jt|j�t|j�|>@|f�S)a�The supernet containing the current network.

        Args:
            prefixlen_diff: An integer, the amount the prefix length of
              the network should be decreased by.  For example, given a
              /24 network and a prefixlen_diff of 3, a supernet with a
              /21 netmask is returned.

        Returns:
            An IPv4 network object.

        Raises:
            ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
              a negative prefix length.
                OR
            If prefixlen_diff and new_prefix are both set or new_prefix is a
              larger number than the current prefix (larger number means a
              smaller network)

        rNznew prefix must be shorterr z(cannot set prefixlen_diff and new_prefixz;current prefixlen is %d, cannot have a prefixlen_diff of %d)rpr<r�r�r�rrr�)r&r�r�r�rrrrfs 



z_BaseNetwork.supernetcCs|jjo|jjS)z�Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is a multicast address.
            See RFC 2373 2.7 for details.

        )rr�is_multicastrk)r&rrrr�As	z_BaseNetwork.is_multicastcCsP|j|jkrdSt|d�r<t|d�r<|j|jko:|j|jkStdt|���dS)NFrrrkz9Unable to test subnet containment with element of type %s)ro�hasattrrrrkr^r�)r&r'rrrr�Ms

z_BaseNetwork.subnet_ofcCsP|j|jkrdSt|d�r<t|d�r<|j|jko:|j|jkStdt|���dS)NFrrrkz9Unable to test subnet containment with element of type %s)ror�rrrkr^r�)r&r'rrr�supernet_of[s

z_BaseNetwork.supernet_ofcCs|jjo|jjS)z�Test if the address is otherwise IETF reserved.

        Returns:
            A boolean, True if the address is within one of the
            reserved IPv6 Network ranges.

        )rr�is_reservedrk)r&rrrr�is	z_BaseNetwork.is_reservedcCs|jjo|jjS)z�Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is reserved per RFC 4291.

        )rr�
is_link_localrk)r&rrrr�usz_BaseNetwork.is_link_localcCs|jjo|jjS)z�Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv4-special-registry or iana-ipv6-special-registry.

        )rr�
is_privaterk)r&rrrr��s	z_BaseNetwork.is_privatecCs|jS)z�Test if this address is allocated for public networks.

        Returns:
            A boolean, True if the address is not reserved per
            iana-ipv4-special-registry or iana-ipv6-special-registry.

        )r�)r&rrr�	is_global�s	z_BaseNetwork.is_globalcCs|jjo|jjS)z�Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 2373 2.5.2.

        )rr�is_unspecifiedrk)r&rrrr��s	z_BaseNetwork.is_unspecifiedcCs|jjo|jjS)z�Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback address as defined in
            RFC 2373 2.5.3.

        )rr�is_loopbackrk)r&rrrr��s	z_BaseNetwork.is_loopback)r N)r N)&r1r2r3r6r�r�r�r�r�r�r,r(r�r�r�r�rkr�r�r�r�r�r�r�r�r�rwrlrfr�r�r�r�r�r�r�r�r�rrrrrv�sD

	K0

5
)rvc
@s�eZdZdZfZdZdedZed�Z	edddd	d
ddd
dg	�Z
eZiZdd�Z
edd��Zedd��Zedd��Zedd��Zdd�Zdd�Zedd��Zedd ��Zd!S)"r�zyBase IPv4 object.

    The following methods are used by IPv4 objects in both single IP
    addresses and networks.

    rrr �
0123456789���������rrcCst|�S)N)rK)r&rrrr|�sz$_BaseV4._explode_shorthand_ip_stringcCsn||jkrdt|t�r|}n.y|j|�}Wntk
rF|j|�}YnXt|j|��}||f|j|<|j|S)aMake a (netmask, prefix_len) tuple from the given argument.

        Argument can be:
        - an integer (the prefix length)
        - a string representing the prefix length (e.g. "24")
        - a string representing the prefix netmask (e.g. "255.255.255.0")
        )�_netmask_cacher:r�r�r7r�r8r�)r��argr�r�rrr�
_make_netmask�s	

z_BaseV4._make_netmaskcCsx|std��|jd�}t|�dkr.td|��ytt|j|�d�Stk
rr}ztd||f��WYdd}~XnXdS)aTurn the given IP string into an integer for comparison.

        Args:
            ip_str: A string, the IP ip_str.

        Returns:
            The IP ip_str as an integer.

        Raises:
            AddressValueError: if ip_str isn't a valid IPv4 Address.

        zAddress cannot be empty�.rzExpected 4 octets in %rrFz%s in %rN)r5rLrMr�map�_parse_octetr<)r�r�Zoctets�excrrrr��s
z_BaseV4._ip_int_from_stringcCs�|std��|jj|�s(d}t||��t|�dkrDd}t||��t|d�}|dkrr|ddkrrd	}t||��|d
kr�td|��|S)aConvert a decimal octet into an integer.

        Args:
            octet_str: A string, the number to parse.

        Returns:
            The octet as an integer.

        Raises:
            ValueError: if the octet isn't strictly a decimal from [0..255].

        zEmpty octet not permittedz#Only decimal digits permitted in %r�z$At most 3 characters permitted in %r�
�r�0z3Ambiguous (octal/decimal) value in %r not permittedr�zOctet %d (> 255) not permitted)r<r�r�rMr�)r�Z	octet_strr�Z	octet_intrrrr��s
z_BaseV4._parse_octetcCsdjdd�t|dd�D��S)z�Turns a 32-bit integer into dotted decimal notation.

        Args:
            ip_int: An integer, the IP address.

        Returns:
            The IP address as a string in dotted decimal notation.

        r�css0|](}tt|t�r"tjd|�dn|�VqdS)s!BrN)rKr:r;rr	)r
rrrr�	<genexpr>-sz._BaseV4._string_from_ip_int.<locals>.<genexpr>rrF)�joinr)r�r�rrrr�"s
z_BaseV4._string_from_ip_intcsh|jd�}y�fdd�tt|�D�}Wntk
r:dSXt|�t|�krPdS|d|dkrddSdS)	z�Test if the IP string is a hostmask (rather than a netmask).

        Args:
            ip_str: A string, the potential hostmask.

        Returns:
            A boolean, True if the IP string is a hostmask.

        r�csg|]}|�jkr|�qSr)�_valid_mask_octets)r
r�)r&rrr>sz(_BaseV4._is_hostmask.<locals>.<listcomp>Frr Trn)rLr�r�r<rM)r&r�rZ�partsr)r&r�_is_hostmask2s

z_BaseV4._is_hostmaskcCs&t|�jd�ddd�}dj|�dS)z�Return the reverse DNS pointer name for the IPv4 address.

        This implements the method described in RFC1035 3.5.

        r�Nr z
.in-addr.arparn)rKrLr�)r&Zreverse_octetsrrrrGsz_BaseV4._reverse_pointercCs|jS)N)r`)r&rrr�
max_prefixlenPsz_BaseV4.max_prefixlencCs|jS)N)ro)r&rrrr_Tsz_BaseV4.versionN)r1r2r3r6r4ro�
IPV4LENGTHra�	frozensetr�r�r`r�r|r�r�r�r�r�r�rr�r�r_rrrrr��s"%	r�c@s|eZdZdZdZdd�Zedd��Zedd	��Zed
d��Z	edd
��Z
edd��Zedd��Zedd��Z
edd��ZdS)r8z/Represent and manipulate single IPv4 Addresses.rR�__weakref__cCsxt|t�r|j|�||_dSt|t�rL|j|d�t|�}t|d�|_dSt|�}d|krht	d|��|j
|�|_dS)a�
        Args:
            address: A string or integer representing the IP

              Additionally, an integer can be passed, so
              IPv4Address('192.0.2.1') == IPv4Address(3221225985).
              or, more generally
              IPv4Address(int(IPv4Address('192.0.2.1'))) ==
                IPv4Address('192.0.2.1')

        Raises:
            AddressValueError: If ipaddress isn't a valid IPv4 address.

        NrrFrJzUnexpected '/' in %r)r:r�r�rRr;r�rrrKr5r�)r&r=�bvs�addr_strrrrr�_s


zIPv4Address.__init__cCs
t|j�S)z*The binary representation of this address.)rHrR)r&rrr�packed�szIPv4Address.packedcCs||jjkS)z�Test if the address is otherwise IETF reserved.

         Returns:
             A boolean, True if the address is within the
             reserved IPv4 Network range.

        )�
_constants�_reserved_network)r&rrrr��s	zIPv4Address.is_reservedcst�fdd��jjD��S)z�Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv4-special-registry.

        c3s|]}�|kVqdS)Nr)r
rb)r&rrr��sz)IPv4Address.is_private.<locals>.<genexpr>)�anyr��_private_networks)r&r)r&rr��s	zIPv4Address.is_privatecCs||jjko|jS)N)r��_public_networkr�)r&rrrr��szIPv4Address.is_globalcCs||jjkS)z�Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is multicast.
            See RFC 3171 for details.

        )r��_multicast_network)r&rrrr��s	zIPv4Address.is_multicastcCs||jjkS)z�Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 5735 3.

        )r��_unspecified_address)r&rrrr��s	zIPv4Address.is_unspecifiedcCs||jjkS)z�Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback per RFC 3330.

        )r��_loopback_network)r&rrrr��szIPv4Address.is_loopbackcCs||jjkS)z�Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is link-local per RFC 3927.

        )r��_linklocal_network)r&rrrr��szIPv4Address.is_link_localN)rRr�)r1r2r3r6r4r�r�r�r�r�r�r�r�r�r�rrrrr8Ys$
r8c@sjeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zej	Z	e
dd��Ze
d
d��Ze
dd��Z
e
dd��ZdS)rCcCs�t|ttf�r2tj||�t|j�|_|j|_	dSt|t
�r�tj||d�t|�dkrht|d�|_	n|j|_	t|dd�|_|jj
|_
|jj|_dSt|�}tj||d�t|dd�|_|jj	|_	|jj
|_
|jj|_dS)Nrr F)rA)r:r;r�r8r�r?rRr�r`rp�tuplerMr�r�r�rO)r&r=rNrrrr��s(




zIPv4Interface.__init__cCsd|j|j�|jjfS)Nz%s/%d)r�rRr�r�)r&rrrr��szIPv4Interface.__str__cCsDtj||�}|s|tkr|Sy|j|jkStk
r>dSXdS)NF)r8r(r)r�rq)r&r'�
address_equalrrrr(�szIPv4Interface.__eq__cCs>tj||�}|tkrtSy|j|jkStk
r8dSXdS)NF)r8r,r)r�rq)r&r'�address_lessrrrr,�szIPv4Interface.__lt__cCs|j|jAt|jj�AS)N)rRrpr�r�rr)r&rrrr�szIPv4Interface.__hash__cCs
t|j�S)N)r8rR)r&rrrrW
szIPv4Interface.ipcCsd|j|j�|jfS)Nz%s/%s)r�rRrp)r&rrrr�szIPv4Interface.with_prefixlencCsd|j|j�|jfS)Nz%s/%s)r�rRr�)r&rrrr�szIPv4Interface.with_netmaskcCsd|j|j�|jfS)Nz%s/%s)r�rRr�)r&rrrr�szIPv4Interface.with_hostmaskN)r1r2r3r�r�r(r,r�r{r�r�rWr�r�r�rrrrrC�srCc@s*eZdZdZeZddd�Zedd��ZdS)	r?aeThis class represents and manipulates 32-bit IPv4 network + addresses..

    Attributes: [examples for IPv4Network('192.0.2.0/27')]
        .network_address: IPv4Address('192.0.2.0')
        .hostmask: IPv4Address('0.0.0.31')
        .broadcast_address: IPv4Address('192.0.2.32')
        .netmask: IPv4Address('255.255.255.224')
        .prefixlen: 27

    TcCs|tj||�t|ttf�r<t|�|_|j|j�\|_	|_
dSt|t�r�t|�dkr\|d}n|j}t|d�|_|j|�\|_	|_
t
|j�}|t
|j	�@|kr�|r�td|��nt|t
|j	�@�|_dSt|�}t|j|d��|_t|�dkr�|d}n|j}|j|�\|_	|_
|�rDtt
|j�t
|j	�@�|jk�rDtd|��tt
|j�t
|j	�@�|_|j
|jdk�rx|j|_dS)aInstantiate a new IPv4 network object.

        Args:
            address: A string or integer representing the IP [& network].
              '192.0.2.0/24'
              '192.0.2.0/255.255.255.0'
              '192.0.0.2/0.0.0.255'
              are all functionally the same in IPv4. Similarly,
              '192.0.2.1'
              '192.0.2.1/255.255.255.255'
              '192.0.2.1/32'
              are also functionally equivalent. That is to say, failing to
              provide a subnetmask will create an object with a mask of /32.

              If the mask (portion after the / in the argument) is given in
              dotted quad form, it is treated as a netmask if it starts with a
              non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
              starts with a zero field (e.g. 0.255.255.255 == /8), with the
              single exception of an all-zero mask which is treated as a
              netmask == /0. If no mask is given, a default of /32 is used.

              Additionally, an integer can be passed, so
              IPv4Network('192.0.2.1') == IPv4Network(3221225985)
              or, more generally
              IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
                IPv4Interface('192.0.2.1')

        Raises:
            AddressValueError: If ipaddress isn't a valid IPv4 address.
            NetmaskValueError: If the netmask isn't valid for
              an IPv4 address.
            ValueError: If strict is True and a network address is not
              supplied.

        Nr rz%s has host bits setr)rvr�r:r�r;r8rrr�r`r�rpr�rMr�r<rOr�r�r�)r&r=rAr�r�rNrrrr�0sB%






zIPv4Network.__init__cCs&|jtd�ko|jtd�ko$|jS)z�Test if this address is allocated for public networks.

        Returns:
            A boolean, True if the address is not reserved per
            iana-ipv4-special-registry.

        z
100.64.0.0/10)rrr?rkr�)r&rrrr��s	zIPv4Network.is_globalN)T)	r1r2r3r6r8r�r�r�r�rrrrr?!s
Ur?c@s�eZdZed�Zed�Zed�Zed�Zed�ed�ed�ed�ed�ed�ed	�ed
�ed�ed�ed
�ed�ed�ed�gZed�Z	e
d�ZdS)�_IPv4Constantsz169.254.0.0/16z127.0.0.0/8z224.0.0.0/4z
100.64.0.0/10z	0.0.0.0/8z
10.0.0.0/8z
172.16.0.0/12z192.0.0.0/29z192.0.0.170/31z192.0.2.0/24z192.168.0.0/16z
198.18.0.0/15z198.51.100.0/24z203.0.113.0/24z240.0.0.0/4z255.255.255.255/32z0.0.0.0N)r1r2r3r?r�r�r�r�r�r�r8r�rrrrr��s(
r�c@s�eZdZdZfZdZdedZdZe	d�Z
eZiZe
dd��Ze
d	d
��Ze
dd��Ze
d
d��Ze
ddd��Zdd�Zdd�Zedd��Zedd��ZdS)�_BaseV6zyBase IPv6 object.

    The following methods are used by IPv6 objects in both single IP
    addresses and networks.

    r\rr r
Z0123456789ABCDEFabcdefcCsJ||jkr@t|t�r|}n
|j|�}t|j|��}||f|j|<|j|S)aMake a (netmask, prefix_len) tuple from the given argument.

        Argument can be:
        - an integer (the prefix length)
        - a string representing the prefix length (e.g. "24")
        - a string representing the prefix netmask (e.g. "255.255.255.0")
        )r�r:r�r�r9r�)r�r�r�r�rrrr��s	


z_BaseV6._make_netmaskcCs�|std��|jd�}d}t|�|kr:d||f}t|��d|dkr�yt|j��j}Wn2tk
r�}ztd||f��WYdd}~XnX|jd	|d
?d@�|jd	|d@�|jd}t|�|kr�d|d|f}t|��d}x@tdt|�d�D]*}	||	s�|dk	�r d
|}t|��|	}q�W|dk	�r�|}
t|�|d}|d�sn|
d8}
|
�rnd}t||��|d�s�|d8}|�r�d}t||��|j|
|}|dk�r4d}t||jd|f��njt|�|jk�r�d}t||j|f��|d�s
d}t||��|d�s$d}t||��t|�}
d}d}ytd}
x,t	|
�D] }	|
d
K}
|
|j
||	�O}
�qDW|
d
|K}
x0t	|d�D] }	|
d
K}
|
|j
||	�O}
�q�W|
Stk
�r�}ztd||f��WYdd}~XnXdS)z�Turn an IPv6 ip_str into an integer.

        Args:
            ip_str: A string, the IPv6 ip_str.

        Returns:
            An int, the IPv6 address

        Raises:
            AddressValueError: if ip_str isn't a valid IPv6 Address.

        zAddress cannot be empty�:r�z At least %d parts expected in %rr�r z%s in %rNz%xri��z!At most %d colons permitted in %rz At most one '::' permitted in %rrz0Leading ':' only permitted as part of '::' in %rz1Trailing ':' only permitted as part of '::' in %rz/Expected at most %d other parts with '::' in %rz,Exactly %d parts expected without '::' in %rrnrnrn)r5rLrMr8rerRrh�
_HEXTET_COUNTr$�range�
_parse_hextetr<)r�r�r�Z
_min_partsr�Zipv4_intr�Z
_max_partsZ
skip_indexrZparts_hiZparts_loZ
parts_skippedr�rrrr��s�
"







z_BaseV6._ip_int_from_stringcCs>|jj|�std|��t|�dkr4d}t||��t|d�S)a&Convert an IPv6 hextet string into an integer.

        Args:
            hextet_str: A string, the number to parse.

        Returns:
            The hextet as an integer.

        Raises:
            ValueError: if the input isn't strictly a hex number from
              [0..FFFF].

        zOnly hex digits permitted in %rrz$At most 4 characters permitted in %rr)�_HEX_DIGITSr�r<rMr�)r�Z
hextet_strr�rrrr�Esz_BaseV6._parse_hextetc	Cs�d}d}d}d}xJt|�D]>\}}|dkrP|d7}|dkr>|}||krX|}|}qd}d}qW|dkr�||}|t|�kr�|dg7}dg|||�<|dkr�dg|}|S)	a�Compresses a list of hextets.

        Compresses a list of strings, replacing the longest continuous
        sequence of "0" in the list with "" and adding empty strings at
        the beginning or at the end of the string such that subsequently
        calling ":".join(hextets) will produce the compressed version of
        the IPv6 address.

        Args:
            hextets: A list of strings, the hextets to compress.

        Returns:
            A list of strings.

        r rr��rnrnrnrn)�	enumeraterM)	r��hextetsZbest_doublecolon_startZbest_doublecolon_lenZdoublecolon_startZdoublecolon_len�indexZhextetZbest_doublecolon_endrrr�_compress_hextets_s.

z_BaseV6._compress_hextetsNcsZ|dkrt|j�}||jkr$td��d|��fdd�tddd�D�}|j|�}d	j|�S)
a,Turns a 128-bit integer into hexadecimal notation.

        Args:
            ip_int: An integer, the IP address.

        Returns:
            A string, the hexadecimal representation of the address.

        Raises:
            ValueError: The address is bigger than 128 bits of all ones.

        NzIPv6 address is too largez%032xcs&g|]}dt�||d�d��qS)z%xrr)r�)r
r�)�hex_strrrr�sz/_BaseV6._string_from_ip_int.<locals>.<listcomp>rrrr�)r�rRrar<r�r�r�)r�r�r�r)r�rr��s


z_BaseV6._string_from_ip_intcs�t|t�rt|j�}nt|t�r,t|j�}nt|�}|j|�}d|��fdd�tddd�D�}t|ttf�r�ddj	|�|j
fSdj	|�S)	z�Expand a shortened IPv6 address.

        Args:
            ip_str: A string, the IPv6 address.

        Returns:
            A string, the expanded IPv6 address.

        z%032xcsg|]}�||d��qS)rr)r
r�)r�rrr�sz8_BaseV6._explode_shorthand_ip_string.<locals>.<listcomp>rrrz%s/%dr�)r:r@rKrrrDrWr�r�rvr�rp)r&r�r�r�r)r�rr|�s



z$_BaseV6._explode_shorthand_ip_stringcCs&|jddd�jdd�}dj|�dS)z�Return the reverse DNS pointer name for the IPv6 address.

        This implements the method described in RFC3596 2.5.

        Nr r�r�r�z	.ip6.arparn)r}�replacer�)r&Z
reverse_charsrrrr�sz_BaseV6._reverse_pointercCs|jS)N)r`)r&rrrr��sz_BaseV6.max_prefixlencCs|jS)N)ro)r&rrrr_�sz_BaseV6.version)N)r1r2r3r6r4ro�
IPV6LENGTHrar�r�r�r`r�r�r�r�r�r�r�r|rr�r�r_rrrrr��s$i0	r�c@s�eZdZdZdZdd�Zedd��Zedd	��Zed
d��Z	edd
��Z
edd��Zedd��Zedd��Z
edd��Zedd��Zedd��Zedd��Zedd��ZdS) r9z/Represent and manipulate single IPv6 Addresses.rRr�cCsxt|t�r|j|�||_dSt|t�rL|j|d�t|�}t|d�|_dSt|�}d|krht	d|��|j
|�|_dS)aInstantiate a new IPv6 address object.

        Args:
            address: A string or integer representing the IP

              Additionally, an integer can be passed, so
              IPv6Address('2001:db8::') ==
                IPv6Address(42540766411282592856903984951653826560)
              or, more generally
              IPv6Address(int(IPv6Address('2001:db8::'))) ==
                IPv6Address('2001:db8::')

        Raises:
            AddressValueError: If address isn't a valid IPv6 address.

        NrrFrJzUnexpected '/' in %r)r:r�r�rRr;r�rrrKr5r�)r&r=r�r�rrrr��s


zIPv6Address.__init__cCs
t|j�S)z*The binary representation of this address.)rIrR)r&rrrr��szIPv6Address.packedcCs||jjkS)z�Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is a multicast address.
            See RFC 2373 2.7 for details.

        )r�r�)r&rrrr�s	zIPv6Address.is_multicastcst�fdd��jjD��S)z�Test if the address is otherwise IETF reserved.

        Returns:
            A boolean, True if the address is within one of the
            reserved IPv6 Network ranges.

        c3s|]}�|kVqdS)Nr)r
r�)r&rrr�sz*IPv6Address.is_reserved.<locals>.<genexpr>)r�r��_reserved_networks)r&r)r&rr�s	zIPv6Address.is_reservedcCs||jjkS)z�Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is reserved per RFC 4291.

        )r�r�)r&rrrr�szIPv6Address.is_link_localcCs||jjkS)a`Test if the address is reserved for site-local.

        Note that the site-local address space has been deprecated by RFC 3879.
        Use is_private to test if this address is in the space of unique local
        addresses as defined by RFC 4193.

        Returns:
            A boolean, True if the address is reserved per RFC 3513 2.5.6.

        )r��_sitelocal_network)r&rrr�
is_site_local#szIPv6Address.is_site_localcst�fdd��jjD��S)z�Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv6-special-registry.

        c3s|]}�|kVqdS)Nr)r
rb)r&rrr�:sz)IPv6Address.is_private.<locals>.<genexpr>)r�r�r�)r&r)r&rr�1s	zIPv6Address.is_privatecCs|jS)z�Test if this address is allocated for public networks.

        Returns:
            A boolean, true if the address is not reserved per
            iana-ipv6-special-registry.

        )r�)r&rrrr�<s	zIPv6Address.is_globalcCs
|jdkS)z�Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 2373 2.5.2.

        r)rR)r&rrrr�Gs	zIPv6Address.is_unspecifiedcCs
|jdkS)z�Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback address as defined in
            RFC 2373 2.5.3.

        r )rR)r&rrrr�Rs	zIPv6Address.is_loopbackcCs |jd?dkrdSt|jd@�S)z�Return the IPv4 mapped address.

        Returns:
            If the IPv6 address is a v4 mapped address, return the
            IPv4 mapped address. Return None otherwise.

        ri��Nl��)rRr8)r&rrr�ipv4_mapped]s	zIPv6Address.ipv4_mappedcCs4|jd?dkrdSt|jd?d@�t|jd@�fS)z�Tuple of embedded teredo IPs.

        Returns:
            Tuple of the (server, client) IPs or None if the address
            doesn't appear to be a teredo address (doesn't start with
            2001::/32)

        �`i Nrl��)rRr8)r&rrr�teredojs
zIPv6Address.teredocCs$|jd?dkrdSt|jd?d@�S)z�Return the IPv4 6to4 embedded address.

        Returns:
            The IPv4 6to4-embedded address if present or None if the
            address doesn't appear to contain a 6to4 embedded address.

        �pi N�Pl��)rRr8)r&rrr�	sixtofourys	zIPv6Address.sixtofourN)rRr�)r1r2r3r6r4r�r�r�r�r�r�rr�r�r�r�rrrrrrrr9�s%

r9c@s�eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zej	Z	e
dd��Ze
d
d��Ze
dd��Z
e
dd��Ze
dd��Ze
dd��ZdS)rDcCs�t|ttf�r2tj||�t|j�|_|j|_	dSt|t
�r�tj||d�t|�dkrht|d�|_	n|j|_	t|dd�|_|jj
|_
|jj|_dSt|�}tj||d�t|dd�|_|jj
|_
|jj	|_	|jj|_dS)Nrr F)rA)r:r;r�r9r�r@rRr�r`rpr�rMr�r�r�rO)r&r=rNrrrr��s(




zIPv6Interface.__init__cCsd|j|j�|jjfS)Nz%s/%d)r�rRr�r�)r&rrrr��szIPv6Interface.__str__cCsDtj||�}|s|tkr|Sy|j|jkStk
r>dSXdS)NF)r9r(r)r�rq)r&r'r�rrrr(�szIPv6Interface.__eq__cCs>tj||�}|tkrtSy|j|jkStk
r8dSXdS)NF)r9r,r)r�rq)r&r'r�rrrr,�szIPv6Interface.__lt__cCs|j|jAt|jj�AS)N)rRrpr�r�rr)r&rrrr��szIPv6Interface.__hash__cCs
t|j�S)N)r9rR)r&rrrrW�szIPv6Interface.ipcCsd|j|j�|jfS)Nz%s/%s)r�rRrp)r&rrrr��szIPv6Interface.with_prefixlencCsd|j|j�|jfS)Nz%s/%s)r�rRr�)r&rrrr��szIPv6Interface.with_netmaskcCsd|j|j�|jfS)Nz%s/%s)r�rRr�)r&rrrr��szIPv6Interface.with_hostmaskcCs|jdko|jjS)Nr)rRr�r�)r&rrrr��szIPv6Interface.is_unspecifiedcCs|jdko|jjS)Nr )rRr�r�)r&rrrr��szIPv6Interface.is_loopbackN)r1r2r3r�r�r(r,r�r{r�r�rWr�r�r�r�r�rrrrrD�srDc@s2eZdZdZeZd
dd�Zdd�Zedd��Z	d	S)r@avThis class represents and manipulates 128-bit IPv6 networks.

    Attributes: [examples for IPv6('2001:db8::1000/124')]
        .network_address: IPv6Address('2001:db8::1000')
        .hostmask: IPv6Address('::f')
        .broadcast_address: IPv6Address('2001:db8::100f')
        .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
        .prefixlen: 124

    TcCs|tj||�t|ttf�r<t|�|_|j|j�\|_	|_
dSt|t�r�t|�dkr\|d}n|j}|j|�\|_	|_
t|d�|_t
|j�}|t
|j	�@|kr�|r�td|��nt|t
|j	�@�|_dSt|�}t|j|d��|_t|�dkr�|d}n|j}|j|�\|_	|_
|�rDtt
|j�t
|j	�@�|jk�rDtd|��tt
|j�t
|j	�@�|_|j
|jdk�rx|j|_dS)a�Instantiate a new IPv6 Network object.

        Args:
            address: A string or integer representing the IPv6 network or the
              IP and prefix/netmask.
              '2001:db8::/128'
              '2001:db8:0000:0000:0000:0000:0000:0000/128'
              '2001:db8::'
              are all functionally the same in IPv6.  That is to say,
              failing to provide a subnetmask will create an object with
              a mask of /128.

              Additionally, an integer can be passed, so
              IPv6Network('2001:db8::') ==
                IPv6Network(42540766411282592856903984951653826560)
              or, more generally
              IPv6Network(int(IPv6Network('2001:db8::'))) ==
                IPv6Network('2001:db8::')

            strict: A boolean. If true, ensure that we have been passed
              A true network address, eg, 2001:db8::1000/124 and not an
              IP address on a network, eg, 2001:db8::1/124.

        Raises:
            AddressValueError: If address isn't a valid IPv6 address.
            NetmaskValueError: If the netmask isn't valid for
              an IPv6 address.
            ValueError: If strict was True and a network address was not
              supplied.

        Nr rz%s has host bits setr)rvr�r:r;r�r9rrr�r`r�rpr�rMr�r<rOr�r�r�)r&r=rAr�r�rNrrrr��sB 






zIPv6Network.__init__ccs@t|j�}t|j�}x&t|d|d�D]}|j|�Vq(WdS)z�Generate Iterator over usable hosts in a network.

          This is like __iter__ except it doesn't return the
          Subnet-Router anycast address.

        r N)r�rrrkr$r�)r&r�r�r�rrrr�<	s

zIPv6Network.hostscCs|jjo|jjS)a`Test if the address is reserved for site-local.

        Note that the site-local address space has been deprecated by RFC 3879.
        Use is_private to test if this address is in the space of unique local
        addresses as defined by RFC 4193.

        Returns:
            A boolean, True if the address is reserved per RFC 3513 2.5.6.

        )rrrrk)r&rrrrH	szIPv6Network.is_site_localN)T)
r1r2r3r6r9r�r�r�r�rrrrrr@�s

Or@c@s�eZdZed�Zed�Zed�ed�ed�ed�ed�ed�ed	�ed
�ed�ed�g
Zed�ed
�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�ed�gZed�ZdS)�_IPv6Constantsz	fe80::/10zff00::/8z::1/128z::/128z
::ffff:0:0/96z100::/64z	2001::/23z2001:2::/48z
2001:db8::/32z2001:10::/28zfc00::/7z::/8z100::/8z200::/7z400::/6z800::/5z1000::/4z4000::/3z6000::/3z8000::/3zA000::/3zC000::/3zE000::/4zF000::/5zF800::/6zFE00::/9z	fec0::/10N)	r1r2r3r@r�r�r�rrrrrrr	X	s*

r	r)r )T)6r6Z
__future__rrr�__version__r�r�Zlong�	NameErrorZunicoderK�strr�
from_bytesrrqrr�rr$�objectr%r�r�r<r5r7r>rBrErHrIrOrXr[rcrmrurzr{r]rvr�r8rCr?r�r�r�r9rDr@r	rrrr�<module>	s�

	


)$
$#716=a*vRr 5V{!_vendor/__pycache__/pyparsing.cpython-36.opt-1.pyc000064400000610513151733136260016026 0ustar003

�Pf�k�@s�dZdZdZdZddlZddlmZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlmZyddlmZWn ek
r�ddlmZYnXydd	l
mZWn>ek
r�ydd	lmZWnek
r�dZYnXYnXd
ddd
ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrgiZee	j�dds�ZeddskZe�r"e	jZe Z!e"Z#e Z$e%e&e'e(e)ee*e+e,e-e.gZ/nbe	j0Ze1Z2dtdu�Z$gZ/ddl3Z3xBdvj4�D]6Z5ye/j6e7e3e5��Wne8k
�r|�wJYnX�qJWe9dwdx�e2dy�D��Z:dzd{�Z;Gd|d}�d}e<�Z=ej>ej?Z@d~ZAeAdZBe@eAZCe"d��ZDd�jEd�dx�ejFD��ZGGd�d!�d!eH�ZIGd�d#�d#eI�ZJGd�d%�d%eI�ZKGd�d'�d'eK�ZLGd�d*�d*eH�ZMGd�d��d�e<�ZNGd�d&�d&e<�ZOe
jPjQeO�d�d=�ZRd�dN�ZSd�dK�ZTd�d��ZUd�d��ZVd�d��ZWd�dU�ZX�d/d�d��ZYGd�d(�d(e<�ZZGd�d0�d0eZ�Z[Gd�d�de[�Z\Gd�d�de[�Z]Gd�d�de[�Z^e^Z_e^eZ_`Gd�d�de[�ZaGd�d�de^�ZbGd�d�dea�ZcGd�dp�dpe[�ZdGd�d3�d3e[�ZeGd�d+�d+e[�ZfGd�d)�d)e[�ZgGd�d
�d
e[�ZhGd�d2�d2e[�ZiGd�d��d�e[�ZjGd�d�dej�ZkGd�d�dej�ZlGd�d�dej�ZmGd�d.�d.ej�ZnGd�d-�d-ej�ZoGd�d5�d5ej�ZpGd�d4�d4ej�ZqGd�d$�d$eZ�ZrGd�d
�d
er�ZsGd�d �d er�ZtGd�d�der�ZuGd�d�der�ZvGd�d"�d"eZ�ZwGd�d�dew�ZxGd�d�dew�ZyGd�d��d�ew�ZzGd�d�dez�Z{Gd�d6�d6ez�Z|Gd�d��d�e<�Z}e}�Z~Gd�d�dew�ZGd�d,�d,ew�Z�Gd�d�dew�Z�Gd�d��d�e��Z�Gd�d1�d1ew�Z�Gd�d�de��Z�Gd�d�de��Z�Gd�d�de��Z�Gd�d/�d/e��Z�Gd�d�de<�Z�d�df�Z��d0d�dD�Z��d1d�d@�Z�d�d΄Z�d�dS�Z�d�dR�Z�d�d҄Z��d2d�dW�Z�d�dE�Z��d3d�dk�Z�d�dl�Z�d�dn�Z�e\�j�dG�Z�el�j�dM�Z�em�j�dL�Z�en�j�de�Z�eo�j�dd�Z�eeeDd�d�dڍj�d�d܄�Z�efd݃j�d�d܄�Z�efd߃j�d�d܄�Z�e�e�Be�BeeeGd�dyd�Befd�ej��BZ�e�e�e�d�e��Z�e^d�ed�j�d�e�e{e�e�B��j�d�d�Z�d�dc�Z�d�dQ�Z�d�d`�Z�d�d^�Z�d�dq�Z�e�d�d܄�Z�e�d�d܄�Z�d�d�Z�d�dO�Z�d�dP�Z�d�di�Z�e<�e�_��d4d�do�Z�e=�Z�e<�e�_�e<�e�_�e�d��e�d��fd�dm�Z�e�Z�e�efd��d��j�d��Z�e�efd��d��j�d��Z�e�efd��d�efd��d�B�j��d�Z�e�e_�d�e�j��j��d�Z�d�d�de�j�f�ddT�Z��d5�ddj�Z�e��d�Z�e��d�Z�e�eee@eC�d�j��d��\Z�Z�e�e��d	j4��d
��Z�ef�d�djEe�j��d
�j��d�ZĐdd_�Z�e�ef�d��d�j��d�Z�ef�d�j��d�Z�ef�d�jȃj��d�Z�ef�d�j��d�Z�e�ef�d��de�B�j��d�Z�e�Z�ef�d�j��d�Z�e�e{eeeGdɐd�eee�d�e^dɃem����j΃j��d�Z�e�ee�j�e�Bd��d��j�d>�Z�G�d dr�dr�Z�eҐd!k�r�eb�d"�Z�eb�d#�Z�eee@eC�d$�Z�e�eՐd%dӐd&�j�e��Z�e�e�eփ�j��d'�Zאd(e�BZ�e�eՐd%dӐd&�j�e��Z�e�e�eك�j��d)�Z�eӐd*�eؐd'�e�eڐd)�Z�e�jܐd+�e�j�jܐd,�e�j�jܐd,�e�j�jܐd-�ddl�Z�e�j�j�e�e�j��e�j�jܐd.�dS(6aS
pyparsing module - Classes and methods to define and execute parsing grammars

The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
provides a library of classes that you use to construct the grammar directly in Python.

Here is a program to parse "Hello, World!" (or any greeting of the form 
C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements 
(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to
L{Literal} expressions)::

    from pyparsing import Word, alphas

    # define grammar of a greeting
    greet = Word(alphas) + "," + Word(alphas) + "!"

    hello = "Hello, World!"
    print (hello, "->", greet.parseString(hello))

The program outputs the following::

    Hello, World! -> ['Hello', ',', 'World', '!']

The Python representation of the grammar is quite readable, owing to the self-explanatory
class names, and the use of '+', '|' and '^' operators.

The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
object with named attributes.

The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
 - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
 - quoted strings
 - embedded comments
z2.1.10z07 Oct 2016 01:31 UTCz*Paul McGuire <ptmcg@users.sourceforge.net>�N)�ref)�datetime)�RLock)�OrderedDict�And�CaselessKeyword�CaselessLiteral�
CharsNotIn�Combine�Dict�Each�Empty�
FollowedBy�Forward�
GoToColumn�Group�Keyword�LineEnd�	LineStart�Literal�
MatchFirst�NoMatch�NotAny�	OneOrMore�OnlyOnce�Optional�Or�ParseBaseException�ParseElementEnhance�ParseException�ParseExpression�ParseFatalException�ParseResults�ParseSyntaxException�
ParserElement�QuotedString�RecursiveGrammarException�Regex�SkipTo�	StringEnd�StringStart�Suppress�Token�TokenConverter�White�Word�WordEnd�	WordStart�
ZeroOrMore�	alphanums�alphas�
alphas8bit�anyCloseTag�
anyOpenTag�
cStyleComment�col�commaSeparatedList�commonHTMLEntity�countedArray�cppStyleComment�dblQuotedString�dblSlashComment�
delimitedList�dictOf�downcaseTokens�empty�hexnums�htmlComment�javaStyleComment�line�lineEnd�	lineStart�lineno�makeHTMLTags�makeXMLTags�matchOnlyAtCol�matchPreviousExpr�matchPreviousLiteral�
nestedExpr�nullDebugAction�nums�oneOf�opAssoc�operatorPrecedence�
printables�punc8bit�pythonStyleComment�quotedString�removeQuotes�replaceHTMLEntity�replaceWith�
restOfLine�sglQuotedString�srange�	stringEnd�stringStart�traceParseAction�
unicodeString�upcaseTokens�
withAttribute�
indentedBlock�originalTextFor�ungroup�
infixNotation�locatedExpr�	withClass�
CloseMatch�tokenMap�pyparsing_common�cCs`t|t�r|Syt|�Stk
rZt|�jtj�d�}td�}|jdd��|j	|�SXdS)aDrop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
           then < returns the unicode object | encodes it with the default encoding | ... >.
        �xmlcharrefreplacez&#\d+;cSs$dtt|ddd���dd�S)Nz\ur�����)�hex�int)�t�rw�/usr/lib/python3.6/pyparsing.py�<lambda>�sz_ustr.<locals>.<lambda>N)
�
isinstanceZunicode�str�UnicodeEncodeError�encode�sys�getdefaultencodingr'�setParseAction�transformString)�obj�retZ
xmlcharrefrwrwrx�_ustr�s
r�z6sum len sorted reversed list tuple set any all min maxccs|]
}|VqdS)Nrw)�.0�yrwrwrx�	<genexpr>�sr�rrcCs>d}dd�dj�D�}x"t||�D]\}}|j||�}q"W|S)z/Escape &, <, >, ", ', etc. in a string of data.z&><"'css|]}d|dVqdS)�&�;Nrw)r��srwrwrxr��sz_xml_escape.<locals>.<genexpr>zamp gt lt quot apos)�split�zip�replace)�dataZfrom_symbolsZ
to_symbolsZfrom_Zto_rwrwrx�_xml_escape�s
r�c@seZdZdS)�
_ConstantsN)�__name__�
__module__�__qualname__rwrwrwrxr��sr��
0123456789ZABCDEFabcdef�\�ccs|]}|tjkr|VqdS)N)�stringZ
whitespace)r��crwrwrxr��sc@sPeZdZdZddd�Zedd��Zdd	�Zd
d�Zdd
�Z	ddd�Z
dd�ZdS)rz7base exception class for all parsing runtime exceptionsrNcCs>||_|dkr||_d|_n||_||_||_|||f|_dS)Nr�)�loc�msg�pstr�
parserElement�args)�selfr�r�r��elemrwrwrx�__init__�szParseBaseException.__init__cCs||j|j|j|j�S)z�
        internal factory method to simplify creating one type of ParseException 
        from another - avoids having __init__ signature conflicts among subclasses
        )r�r�r�r�)�cls�perwrwrx�_from_exception�sz"ParseBaseException._from_exceptioncCsN|dkrt|j|j�S|dkr,t|j|j�S|dkrBt|j|j�St|��dS)z�supported attributes by name are:
            - lineno - returns the line number of the exception text
            - col - returns the column number of the exception text
            - line - returns the line containing the exception text
        rJr9�columnrGN)r9r�)rJr�r�r9rG�AttributeError)r�Zanamerwrwrx�__getattr__�szParseBaseException.__getattr__cCsd|j|j|j|jfS)Nz"%s (at char %d), (line:%d, col:%d))r�r�rJr�)r�rwrwrx�__str__�szParseBaseException.__str__cCst|�S)N)r�)r�rwrwrx�__repr__�szParseBaseException.__repr__�>!<cCs<|j}|jd}|r4dj|d|�|||d�f�}|j�S)z�Extracts the exception line from the input string, and marks
           the location of the exception with a special symbol.
        rrr�N)rGr��join�strip)r�ZmarkerStringZline_strZline_columnrwrwrx�
markInputline�s
z ParseBaseException.markInputlinecCsdj�tt|��S)Nzlineno col line)r��dir�type)r�rwrwrx�__dir__�szParseBaseException.__dir__)rNN)r�)r�r�r��__doc__r��classmethodr�r�r�r�r�r�rwrwrwrxr�s


c@seZdZdZdS)raN
    Exception thrown when parse expressions don't match class;
    supported attributes by name are:
     - lineno - returns the line number of the exception text
     - col - returns the column number of the exception text
     - line - returns the line containing the exception text
        
    Example::
        try:
            Word(nums).setName("integer").parseString("ABC")
        except ParseException as pe:
            print(pe)
            print("column: {}".format(pe.col))
            
    prints::
       Expected integer (at char 0), (line:1, col:1)
        column: 1
    N)r�r�r�r�rwrwrwrxr�sc@seZdZdZdS)r!znuser-throwable exception thrown when inconsistent parse content
       is found; stops all parsing immediatelyN)r�r�r�r�rwrwrwrxr!sc@seZdZdZdS)r#z�just like L{ParseFatalException}, but thrown internally when an
       L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop 
       immediately because an unbacktrackable syntax error has been foundN)r�r�r�r�rwrwrwrxr#sc@s eZdZdZdd�Zdd�ZdS)r&zZexception thrown by L{ParserElement.validate} if the grammar could be improperly recursivecCs
||_dS)N)�parseElementTrace)r��parseElementListrwrwrxr�sz"RecursiveGrammarException.__init__cCs
d|jS)NzRecursiveGrammarException: %s)r�)r�rwrwrxr� sz!RecursiveGrammarException.__str__N)r�r�r�r�r�r�rwrwrwrxr&sc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�_ParseResultsWithOffsetcCs||f|_dS)N)�tup)r�Zp1Zp2rwrwrxr�$sz _ParseResultsWithOffset.__init__cCs
|j|S)N)r�)r��irwrwrx�__getitem__&sz#_ParseResultsWithOffset.__getitem__cCst|jd�S)Nr)�reprr�)r�rwrwrxr�(sz _ParseResultsWithOffset.__repr__cCs|jd|f|_dS)Nr)r�)r�r�rwrwrx�	setOffset*sz!_ParseResultsWithOffset.setOffsetN)r�r�r�r�r�r�r�rwrwrwrxr�#sr�c@s�eZdZdZd[dd�Zddddefdd�Zdd	�Zefd
d�Zdd
�Z	dd�Z
dd�Zdd�ZeZ
dd�Zdd�Zdd�Zdd�Zdd�Zer�eZeZeZn$eZeZeZdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd\d(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Z d2d3�Z!d4d5�Z"d6d7�Z#d8d9�Z$d:d;�Z%d<d=�Z&d]d?d@�Z'dAdB�Z(dCdD�Z)dEdF�Z*d^dHdI�Z+dJdK�Z,dLdM�Z-d_dOdP�Z.dQdR�Z/dSdT�Z0dUdV�Z1dWdX�Z2dYdZ�Z3dS)`r"aI
    Structured parse results, to provide multiple means of access to the parsed data:
       - as a list (C{len(results)})
       - by list index (C{results[0], results[1]}, etc.)
       - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})

    Example::
        integer = Word(nums)
        date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))
        # equivalent form:
        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")

        # parseString returns a ParseResults object
        result = date_str.parseString("1999/12/31")

        def test(s, fn=repr):
            print("%s -> %s" % (s, fn(eval(s))))
        test("list(result)")
        test("result[0]")
        test("result['month']")
        test("result.day")
        test("'month' in result")
        test("'minutes' in result")
        test("result.dump()", str)
    prints::
        list(result) -> ['1999', '/', '12', '/', '31']
        result[0] -> '1999'
        result['month'] -> '12'
        result.day -> '31'
        'month' in result -> True
        'minutes' in result -> False
        result.dump() -> ['1999', '/', '12', '/', '31']
        - day: 31
        - month: 12
        - year: 1999
    NTcCs"t||�r|Stj|�}d|_|S)NT)rz�object�__new__�_ParseResults__doinit)r��toklist�name�asList�modalZretobjrwrwrxr�Ts


zParseResults.__new__c
Cs`|jrvd|_d|_d|_i|_||_||_|dkr6g}||t�rP|dd�|_n||t�rft|�|_n|g|_t	�|_
|dk	o�|�r\|s�d|j|<||t�r�t|�}||_||t
d�ttf�o�|ddgfk�s\||t�r�|g}|�r&||t��rt|j�d�||<ntt|d�d�||<|||_n6y|d||<Wn$tttfk
�rZ|||<YnXdS)NFrr�)r��_ParseResults__name�_ParseResults__parent�_ParseResults__accumNames�_ParseResults__asList�_ParseResults__modal�list�_ParseResults__toklist�_generatorType�dict�_ParseResults__tokdictrur�r��
basestringr"r��copy�KeyError�	TypeError�
IndexError)r�r�r�r�r�rzrwrwrxr�]sB



$
zParseResults.__init__cCsPt|ttf�r|j|S||jkr4|j|ddStdd�|j|D��SdS)NrrrcSsg|]}|d�qS)rrw)r��vrwrwrx�
<listcomp>�sz,ParseResults.__getitem__.<locals>.<listcomp>rs)rzru�slicer�r�r�r")r�r�rwrwrxr��s


zParseResults.__getitem__cCs�||t�r0|jj|t��|g|j|<|d}nD||ttf�rN||j|<|}n&|jj|t��t|d�g|j|<|}||t�r�t|�|_	dS)Nr)
r�r��getr�rur�r�r"�wkrefr�)r��kr�rz�subrwrwrx�__setitem__�s


"
zParseResults.__setitem__c
Cs�t|ttf�r�t|j�}|j|=t|t�rH|dkr:||7}t||d�}tt|j|���}|j�x^|j	j
�D]F\}}x<|D]4}x.t|�D]"\}\}}	t||	|	|k�||<q�Wq|WqnWn|j	|=dS)Nrrr)
rzrur��lenr�r��range�indices�reverser��items�	enumerater�)
r�r�ZmylenZremovedr��occurrences�jr��value�positionrwrwrx�__delitem__�s


$zParseResults.__delitem__cCs
||jkS)N)r�)r�r�rwrwrx�__contains__�szParseResults.__contains__cCs
t|j�S)N)r�r�)r�rwrwrx�__len__�szParseResults.__len__cCs
|jS)N)r�)r�rwrwrx�__bool__�szParseResults.__bool__cCs
t|j�S)N)�iterr�)r�rwrwrx�__iter__�szParseResults.__iter__cCst|jddd��S)Nrrrs)r�r�)r�rwrwrx�__reversed__�szParseResults.__reversed__cCs$t|jd�r|jj�St|j�SdS)N�iterkeys)�hasattrr�r�r�)r�rwrwrx�	_iterkeys�s
zParseResults._iterkeyscs�fdd��j�D�S)Nc3s|]}�|VqdS)Nrw)r�r�)r�rwrxr��sz+ParseResults._itervalues.<locals>.<genexpr>)r�)r�rw)r�rx�_itervalues�szParseResults._itervaluescs�fdd��j�D�S)Nc3s|]}|�|fVqdS)Nrw)r�r�)r�rwrxr��sz*ParseResults._iteritems.<locals>.<genexpr>)r�)r�rw)r�rx�
_iteritems�szParseResults._iteritemscCst|j��S)zVReturns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).)r�r�)r�rwrwrx�keys�szParseResults.keyscCst|j��S)zXReturns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).)r��
itervalues)r�rwrwrx�values�szParseResults.valuescCst|j��S)zfReturns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).)r��	iteritems)r�rwrwrxr��szParseResults.itemscCs
t|j�S)z�Since keys() returns an iterator, this method is helpful in bypassing
           code that looks for the existence of any defined results names.)�boolr�)r�rwrwrx�haskeys�szParseResults.haskeyscOs�|s
dg}x6|j�D]*\}}|dkr2|d|f}qtd|��qWt|dt�sht|�dksh|d|kr�|d}||}||=|S|d}|SdS)a�
        Removes and returns item at specified index (default=C{last}).
        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
        argument or an integer argument, it will use C{list} semantics
        and pop tokens from the list of parsed tokens. If passed a 
        non-integer argument (most likely a string), it will use C{dict}
        semantics and pop the corresponding value from any defined 
        results names. A second default return value argument is 
        supported, just as in C{dict.pop()}.

        Example::
            def remove_first(tokens):
                tokens.pop(0)
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']

            label = Word(alphas)
            patt = label("LABEL") + OneOrMore(Word(nums))
            print(patt.parseString("AAB 123 321").dump())

            # Use pop() in a parse action to remove named result (note that corresponding value is not
            # removed from list form of results)
            def remove_LABEL(tokens):
                tokens.pop("LABEL")
                return tokens
            patt.addParseAction(remove_LABEL)
            print(patt.parseString("AAB 123 321").dump())
        prints::
            ['AAB', '123', '321']
            - LABEL: AAB

            ['AAB', '123', '321']
        rr�defaultrz-pop() got an unexpected keyword argument '%s'Nrs)r�r�rzrur�)r�r��kwargsr�r��indexr�Zdefaultvaluerwrwrx�pop�s"zParseResults.popcCs||kr||S|SdS)ai
        Returns named result matching the given key, or if there is no
        such name, then returns the given C{defaultValue} or C{None} if no
        C{defaultValue} is specified.

        Similar to C{dict.get()}.
        
        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            result = date_str.parseString("1999/12/31")
            print(result.get("year")) # -> '1999'
            print(result.get("hour", "not specified")) # -> 'not specified'
            print(result.get("hour")) # -> None
        Nrw)r��key�defaultValuerwrwrxr�szParseResults.getcCsZ|jj||�xF|jj�D]8\}}x.t|�D]"\}\}}t||||k�||<q,WqWdS)a
        Inserts new element at location index in the list of parsed tokens.
        
        Similar to C{list.insert()}.

        Example::
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']

            # use a parse action to insert the parse location in the front of the parsed results
            def insert_locn(locn, tokens):
                tokens.insert(0, locn)
            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
        N)r��insertr�r�r�r�)r�r�ZinsStrr�r�r�r�r�rwrwrxr�2szParseResults.insertcCs|jj|�dS)a�
        Add single element to end of ParseResults list of elements.

        Example::
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            
            # use a parse action to compute the sum of the parsed integers, and add it to the end
            def append_sum(tokens):
                tokens.append(sum(map(int, tokens)))
            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
        N)r��append)r��itemrwrwrxr�FszParseResults.appendcCs$t|t�r||7}n|jj|�dS)a
        Add sequence of elements to end of ParseResults list of elements.

        Example::
            patt = OneOrMore(Word(alphas))
            
            # use a parse action to append the reverse of the matched strings, to make a palindrome
            def make_palindrome(tokens):
                tokens.extend(reversed([t[::-1] for t in tokens]))
                return ''.join(tokens)
            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
        N)rzr"r��extend)r�Zitemseqrwrwrxr�Ts

zParseResults.extendcCs|jdd�=|jj�dS)z7
        Clear all elements and results names.
        N)r�r��clear)r�rwrwrxr�fszParseResults.clearcCsfy||Stk
rdSX||jkr^||jkrD|j|ddStdd�|j|D��SndSdS)Nr�rrrcSsg|]}|d�qS)rrw)r�r�rwrwrxr�wsz,ParseResults.__getattr__.<locals>.<listcomp>rs)r�r�r�r")r�r�rwrwrxr�ms

zParseResults.__getattr__cCs|j�}||7}|S)N)r�)r��otherr�rwrwrx�__add__{szParseResults.__add__cs�|jrnt|j���fdd��|jj�}�fdd�|D�}x4|D],\}}|||<t|dt�r>t|�|d_q>W|j|j7_|jj	|j�|S)Ncs|dkr�S|�S)Nrrw)�a)�offsetrwrxry�sz'ParseResults.__iadd__.<locals>.<lambda>c	s4g|],\}}|D]}|t|d�|d��f�qqS)rrr)r�)r�r��vlistr�)�	addoffsetrwrxr��sz)ParseResults.__iadd__.<locals>.<listcomp>r)
r�r�r�r�rzr"r�r�r��update)r�r�Z
otheritemsZotherdictitemsr�r�rw)rrrx�__iadd__�s


zParseResults.__iadd__cCs&t|t�r|dkr|j�S||SdS)Nr)rzrur�)r�r�rwrwrx�__radd__�szParseResults.__radd__cCsdt|j�t|j�fS)Nz(%s, %s))r�r�r�)r�rwrwrxr��szParseResults.__repr__cCsddjdd�|jD��dS)N�[z, css(|] }t|t�rt|�nt|�VqdS)N)rzr"r�r�)r�r�rwrwrxr��sz'ParseResults.__str__.<locals>.<genexpr>�])r�r�)r�rwrwrxr��szParseResults.__str__r�cCsPg}xF|jD]<}|r"|r"|j|�t|t�r:||j�7}q|jt|��qW|S)N)r�r�rzr"�
_asStringListr�)r��sep�outr�rwrwrxr
�s

zParseResults._asStringListcCsdd�|jD�S)a�
        Returns the parse results as a nested list of matching tokens, all converted to strings.

        Example::
            patt = OneOrMore(Word(alphas))
            result = patt.parseString("sldkj lsdkj sldkj")
            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
            print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
            
            # Use asList() to create an actual list
            result_list = result.asList()
            print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
        cSs"g|]}t|t�r|j�n|�qSrw)rzr"r�)r��resrwrwrxr��sz'ParseResults.asList.<locals>.<listcomp>)r�)r�rwrwrxr��szParseResults.asListcs6tr|j}n|j}�fdd��t�fdd�|�D��S)a�
        Returns the named parse results as a nested dictionary.

        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
            
            result = date_str.parseString('12/31/1999')
            print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
            
            result_dict = result.asDict()
            print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}

            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
            import json
            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
        cs6t|t�r.|j�r|j�S�fdd�|D�Sn|SdS)Ncsg|]}�|��qSrwrw)r�r�)�toItemrwrxr��sz7ParseResults.asDict.<locals>.toItem.<locals>.<listcomp>)rzr"r��asDict)r�)rrwrxr�s

z#ParseResults.asDict.<locals>.toItemc3s|]\}}|�|�fVqdS)Nrw)r�r�r�)rrwrxr��sz&ParseResults.asDict.<locals>.<genexpr>)�PY_3r�r�r�)r�Zitem_fnrw)rrxr�s
	zParseResults.asDictcCs8t|j�}|jj�|_|j|_|jj|j�|j|_|S)zA
        Returns a new copy of a C{ParseResults} object.
        )r"r�r�r�r�r�rr�)r�r�rwrwrxr��s
zParseResults.copyFcCsPd}g}tdd�|jj�D��}|d}|s8d}d}d}d}	|dk	rJ|}	n|jrV|j}	|	sf|rbdSd}	|||d|	d	g7}x�t|j�D]�\}
}t|t�r�|
|kr�||j||
|o�|dk||�g7}n||jd|o�|dk||�g7}q�d}|
|kr�||
}|�s
|�rq�nd}t	t
|��}
|||d|d	|
d
|d	g	7}q�W|||d
|	d	g7}dj|�S)z�
        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
        �
css(|] \}}|D]}|d|fVqqdS)rrNrw)r�r�rr�rwrwrxr��sz%ParseResults.asXML.<locals>.<genexpr>z  r�NZITEM�<�>z</)r�r�r�r�r�r�rzr"�asXMLr�r�r�)r�ZdoctagZnamedItemsOnly�indentZ	formatted�nlrZ
namedItemsZnextLevelIndentZselfTagr�r
ZresTagZxmlBodyTextrwrwrxr�sT


zParseResults.asXMLcCs:x4|jj�D]&\}}x|D]\}}||kr|SqWqWdS)N)r�r�)r�r�r�rr�r�rwrwrxZ__lookup$s
zParseResults.__lookupcCs�|jr|jS|jr.|j�}|r(|j|�SdSnNt|�dkrxt|j�dkrxtt|jj���dddkrxtt|jj���SdSdS)a(
        Returns the results name for this token expression. Useful when several 
        different expressions might match at a particular location.

        Example::
            integer = Word(nums)
            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
            house_number_expr = Suppress('#') + Word(nums, alphanums)
            user_data = (Group(house_number_expr)("house_number") 
                        | Group(ssn_expr)("ssn")
                        | Group(integer)("age"))
            user_info = OneOrMore(user_data)
            
            result = user_info.parseString("22 111-22-3333 #221B")
            for item in result:
                print(item.getName(), ':', item[0])
        prints::
            age : 22
            ssn : 111-22-3333
            house_number : 221B
        Nrrrrs)rrs)	r�r��_ParseResults__lookupr�r��nextr�r�r�)r��parrwrwrx�getName+s
zParseResults.getNamercCsbg}d}|j|t|j���|�rX|j�r�tdd�|j�D��}xz|D]r\}}|r^|j|�|jd|d||f�t|t�r�|r�|j|j||d��q�|jt|��qH|jt	|��qHWn�t
dd�|D���rX|}x~t|�D]r\}	}
t|
t��r*|jd|d||	|d|d|
j||d�f�q�|jd|d||	|d|dt|
�f�q�Wd	j|�S)
aH
        Diagnostic method for listing out the contents of a C{ParseResults}.
        Accepts an optional C{indent} argument so that this string can be embedded
        in a nested display of other data.

        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
            
            result = date_str.parseString('12/31/1999')
            print(result.dump())
        prints::
            ['12', '/', '31', '/', '1999']
            - day: 1999
            - month: 31
            - year: 12
        rcss|]\}}t|�|fVqdS)N)r{)r�r�r�rwrwrxr�gsz$ParseResults.dump.<locals>.<genexpr>z
%s%s- %s: z  rrcss|]}t|t�VqdS)N)rzr")r��vvrwrwrxr�ssz
%s%s[%d]:
%s%s%sr�)
r�r�r�r��sortedr�rzr"�dumpr��anyr�r�)r�r�depth�fullr�NLr�r�r�r�rrwrwrxrPs,

4.zParseResults.dumpcOstj|j�f|�|�dS)a�
        Pretty-printer for parsed results as a list, using the C{pprint} module.
        Accepts additional positional or keyword args as defined for the 
        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})

        Example::
            ident = Word(alphas, alphanums)
            num = Word(nums)
            func = Forward()
            term = ident | num | Group('(' + func + ')')
            func <<= ident + Group(Optional(delimitedList(term)))
            result = func.parseString("fna a,b,(fnb c,d,200),100")
            result.pprint(width=40)
        prints::
            ['fna',
             ['a',
              'b',
              ['(', 'fnb', ['c', 'd', '200'], ')'],
              '100']]
        N)�pprintr�)r�r�r�rwrwrxr"}szParseResults.pprintcCs.|j|jj�|jdk	r|j�p d|j|jffS)N)r�r�r�r�r�r�)r�rwrwrx�__getstate__�s
zParseResults.__getstate__cCsN|d|_|d\|_}}|_i|_|jj|�|dk	rDt|�|_nd|_dS)Nrrr)r�r�r�r�rr�r�)r��staterZinAccumNamesrwrwrx�__setstate__�s
zParseResults.__setstate__cCs|j|j|j|jfS)N)r�r�r�r�)r�rwrwrx�__getnewargs__�szParseResults.__getnewargs__cCstt|��t|j��S)N)r�r�r�r�)r�rwrwrxr��szParseResults.__dir__)NNTT)N)r�)NFr�T)r�rT)4r�r�r�r�r�rzr�r�r�r�r�r�r��__nonzero__r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrr�r�r
r�rr�rrrrr"r#r%r&r�rwrwrwrxr"-sh&
	'	
4

#
=%
-
cCsF|}d|kot|�knr4||ddkr4dS||jdd|�S)aReturns current column within a string, counting newlines as line separators.
   The first column is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
   on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   rrrr)r��rfind)r��strgr�rwrwrxr9�s
cCs|jdd|�dS)aReturns current line number within a string, counting newlines as line separators.
   The first line is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
   on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   rrrr)�count)r�r)rwrwrxrJ�s
cCsF|jdd|�}|jd|�}|dkr2||d|�S||dd�SdS)zfReturns the line of text containing loc within a string, counting newlines as line separators.
       rrrrN)r(�find)r�r)ZlastCRZnextCRrwrwrxrG�s
cCs8tdt|�dt|�dt||�t||�f�dS)NzMatch z at loc z(%d,%d))�printr�rJr9)�instringr��exprrwrwrx�_defaultStartDebugAction�sr/cCs$tdt|�dt|j���dS)NzMatched z -> )r,r�r{r�)r-�startlocZendlocr.�toksrwrwrx�_defaultSuccessDebugAction�sr2cCstdt|��dS)NzException raised:)r,r�)r-r�r.�excrwrwrx�_defaultExceptionDebugAction�sr4cGsdS)zG'Do-nothing' debug action, to suppress debugging output during parsing.Nrw)r�rwrwrxrQ�srqcs��tkr�fdd�Sdg�dg�tdd�dkrFddd	�}dd
d��ntj}tj�d}|dd
�d}|d|d|f�������fdd�}d}yt�dt�d�j�}Wntk
r�t��}YnX||_|S)Ncs�|�S)Nrw)r��lrv)�funcrwrxry�sz_trim_arity.<locals>.<lambda>rFrqro�cSs8tdkrdnd	}tj||dd�|}|j|jfgS)
Nror7rrqrr)�limit)ror7r������)�system_version�	traceback�
extract_stack�filenamerJ)r8r�
frame_summaryrwrwrxr=sz"_trim_arity.<locals>.extract_stackcSs$tj||d�}|d}|j|jfgS)N)r8rrrs)r<�
extract_tbr>rJ)�tbr8Zframesr?rwrwrxr@sz_trim_arity.<locals>.extract_tb�)r8rrcs�x�y �|�dd��}d�d<|Stk
r��dr>�n4z.tj�d}�|dd�ddd��ksj�Wd~X�d�kr��dd7<w�YqXqWdS)NrTrrrq)r8rsrs)r�r~�exc_info)r�r�rA)r@�
foundArityr6r8�maxargs�pa_call_line_synthrwrx�wrappers"z_trim_arity.<locals>.wrapperz<parse action>r��	__class__)ror7)r)rrs)	�singleArgBuiltinsr;r<r=r@�getattrr��	Exceptionr{)r6rEr=Z	LINE_DIFFZ	this_linerG�	func_namerw)r@rDr6r8rErFrx�_trim_arity�s*
rMcs�eZdZdZdZdZedd��Zedd��Zd�dd	�Z	d
d�Z
dd
�Zd�dd�Zd�dd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zd�dd �Zd!d"�Zd�d#d$�Zd%d&�Zd'd(�ZGd)d*�d*e�Zed+k	r�Gd,d-�d-e�ZnGd.d-�d-e�ZiZe�Zd/d/gZ d�d0d1�Z!eZ"ed2d3��Z#dZ$ed�d5d6��Z%d�d7d8�Z&e'dfd9d:�Z(d;d<�Z)e'fd=d>�Z*e'dfd?d@�Z+dAdB�Z,dCdD�Z-dEdF�Z.dGdH�Z/dIdJ�Z0dKdL�Z1dMdN�Z2dOdP�Z3dQdR�Z4dSdT�Z5dUdV�Z6dWdX�Z7dYdZ�Z8d�d[d\�Z9d]d^�Z:d_d`�Z;dadb�Z<dcdd�Z=dedf�Z>dgdh�Z?d�didj�Z@dkdl�ZAdmdn�ZBdodp�ZCdqdr�ZDgfdsdt�ZEd�dudv�ZF�fdwdx�ZGdydz�ZHd{d|�ZId}d~�ZJdd��ZKd�d�d��ZLd�d�d��ZM�ZNS)�r$z)Abstract base level parser element class.z 
	
FcCs
|t_dS)a�
        Overrides the default whitespace chars

        Example::
            # default whitespace chars are space, <TAB> and newline
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
            
            # change to just treat newline as significant
            ParserElement.setDefaultWhitespaceChars(" \t")
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
        N)r$�DEFAULT_WHITE_CHARS)�charsrwrwrx�setDefaultWhitespaceChars=s
z'ParserElement.setDefaultWhitespaceCharscCs
|t_dS)a�
        Set class to be used for inclusion of string literals into a parser.
        
        Example::
            # default literal class used is Literal
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']


            # change to Suppress
            ParserElement.inlineLiteralsUsing(Suppress)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
        N)r$�_literalStringClass)r�rwrwrx�inlineLiteralsUsingLsz!ParserElement.inlineLiteralsUsingcCs�t�|_d|_d|_d|_||_d|_tj|_	d|_
d|_d|_t�|_
d|_d|_d|_d|_d|_d|_d|_d|_d|_dS)NTFr�)NNN)r��parseAction�
failAction�strRepr�resultsName�
saveAsList�skipWhitespacer$rN�
whiteChars�copyDefaultWhiteChars�mayReturnEmpty�keepTabs�ignoreExprs�debug�streamlined�
mayIndexError�errmsg�modalResults�debugActions�re�callPreparse�
callDuringTry)r��savelistrwrwrxr�as(zParserElement.__init__cCs<tj|�}|jdd�|_|jdd�|_|jr8tj|_|S)a$
        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
        for the same parsing pattern, using copies of the original parse element.
        
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
            
            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
        prints::
            [5120, 100, 655360, 268435456]
        Equivalent form of C{expr.copy()} is just C{expr()}::
            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
        N)r�rSr]rZr$rNrY)r�Zcpyrwrwrxr�xs
zParserElement.copycCs*||_d|j|_t|d�r&|j|j_|S)af
        Define name for this expression, makes debugging and exception messages clearer.
        
        Example::
            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
        z	Expected �	exception)r�rar�rhr�)r�r�rwrwrx�setName�s


zParserElement.setNamecCs4|j�}|jd�r"|dd�}d}||_||_|S)aP
        Define name for referencing matching tokens as a nested attribute
        of the returned parse results.
        NOTE: this returns a *copy* of the original C{ParserElement} object;
        this is so that the client can define a basic element, such as an
        integer, and reference it in multiple places with different names.

        You can also set results names using the abbreviated syntax,
        C{expr("name")} in place of C{expr.setResultsName("name")} - 
        see L{I{__call__}<__call__>}.

        Example::
            date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))

            # equivalent form:
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
        �*NrrTrs)r��endswithrVrb)r�r��listAllMatchesZnewselfrwrwrx�setResultsName�s
zParserElement.setResultsNameTcs@|r&|j�d�fdd�	}�|_||_nt|jd�r<|jj|_|S)z�Method to invoke the Python pdb debugger when this element is
           about to be parsed. Set C{breakFlag} to True to enable, False to
           disable.
        Tcsddl}|j��||||�S)Nr)�pdbZ	set_trace)r-r��	doActions�callPreParsern)�_parseMethodrwrx�breaker�sz'ParserElement.setBreak.<locals>.breaker�_originalParseMethod)TT)�_parsersr�)r�Z	breakFlagrrrw)rqrx�setBreak�s
zParserElement.setBreakcOs&tttt|���|_|jdd�|_|S)a
        Define action to perform when successfully matching parse element definition.
        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
         - s   = the original string being parsed (see note below)
         - loc = the location of the matching substring
         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
        If the functions in fns modify the tokens, they can return them as the return
        value from fn, and the modified list of tokens will replace the original.
        Otherwise, fn does not need to return any value.

        Optional keyword arguments:
         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing

        Note: the default parsing behavior is to expand tabs in the input string
        before starting the parsing process.  See L{I{parseString}<parseString>} for more information
        on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
        consistent view of the parsed string, the parse location, and line and column
        positions within the parsed string.
        
        Example::
            integer = Word(nums)
            date_str = integer + '/' + integer + '/' + integer

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']

            # use parse action to convert to ints at parse time
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            date_str = integer + '/' + integer + '/' + integer

            # note that integer fields are now ints, not strings
            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
        rfF)r��maprMrSr�rf)r��fnsr�rwrwrxr��s"zParserElement.setParseActioncOs4|jtttt|���7_|jp,|jdd�|_|S)z�
        Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
        
        See examples in L{I{copy}<copy>}.
        rfF)rSr�rvrMrfr�)r�rwr�rwrwrx�addParseAction�szParserElement.addParseActioncsb|jdd��|jdd�rtnt�x(|D] ����fdd�}|jj|�q&W|jpZ|jdd�|_|S)a�Add a boolean predicate function to expression's list of parse actions. See 
        L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, 
        functions passed to C{addCondition} need to return boolean success/fail of the condition.

        Optional keyword arguments:
         - message = define a custom message to be used in the raised exception
         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
         
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            year_int = integer.copy()
            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
            date_str = year_int + '/' + integer + '/' + integer

            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
        �messagezfailed user-defined condition�fatalFcs$tt��|||��s �||���dS)N)r�rM)r�r5rv)�exc_type�fnr�rwrx�pasz&ParserElement.addCondition.<locals>.parf)r�r!rrSr�rf)r�rwr�r}rw)r{r|r�rx�addCondition�s
zParserElement.addConditioncCs
||_|S)aDefine action to perform if parsing fails at this expression.
           Fail acton fn is a callable function that takes the arguments
           C{fn(s,loc,expr,err)} where:
            - s = string being parsed
            - loc = location where expression match was attempted and failed
            - expr = the parse expression that failed
            - err = the exception thrown
           The function returns no value.  It may throw C{L{ParseFatalException}}
           if it is desired to stop parsing immediately.)rT)r�r|rwrwrx�
setFailActions
zParserElement.setFailActioncCsZd}xP|rTd}xB|jD]8}yx|j||�\}}d}qWWqtk
rLYqXqWqW|S)NTF)r]rtr)r�r-r�Z
exprsFound�eZdummyrwrwrx�_skipIgnorables#szParserElement._skipIgnorablescCsL|jr|j||�}|jrH|j}t|�}x ||krF|||krF|d7}q(W|S)Nrr)r]r�rXrYr�)r�r-r�Zwt�instrlenrwrwrx�preParse0szParserElement.preParsecCs|gfS)Nrw)r�r-r�rorwrwrx�	parseImpl<szParserElement.parseImplcCs|S)Nrw)r�r-r��	tokenlistrwrwrx�	postParse?szParserElement.postParsec"Cs�|j}|s|jr�|jdr,|jd|||�|rD|jrD|j||�}n|}|}yDy|j|||�\}}Wn(tk
r�t|t|�|j	|��YnXWnXt
k
r�}	z<|jdr�|jd||||	�|jr�|j||||	��WYdd}	~	XnXn�|o�|j�r|j||�}n|}|}|j�s$|t|�k�rhy|j|||�\}}Wn*tk
�rdt|t|�|j	|��YnXn|j|||�\}}|j|||�}t
||j|j|jd�}
|j�r�|�s�|j�r�|�rVyRxL|jD]B}||||
�}|dk	�r�t
||j|j�o�t|t
tf�|jd�}
�q�WWnFt
k
�rR}	z(|jd�r@|jd||||	��WYdd}	~	XnXnNxL|jD]B}||||
�}|dk	�r^t
||j|j�o�t|t
tf�|jd�}
�q^W|�r�|jd�r�|jd|||||
�||
fS)Nrrq)r�r�rr)r^rTrcrer�r�r�rr�rarr`r�r"rVrWrbrSrfrzr�)r�r-r�rorpZ	debugging�prelocZtokensStart�tokens�errZ	retTokensr|rwrwrx�
_parseNoCacheCsp





zParserElement._parseNoCachecCs>y|j||dd�dStk
r8t|||j|��YnXdS)NF)ror)rtr!rra)r�r-r�rwrwrx�tryParse�szParserElement.tryParsecCs2y|j||�Wnttfk
r(dSXdSdS)NFT)r�rr�)r�r-r�rwrwrx�canParseNext�s
zParserElement.canParseNextc@seZdZdd�ZdS)zParserElement._UnboundedCachecsdi�t�|_���fdd�}�fdd�}�fdd�}tj||�|_tj||�|_tj||�|_dS)Ncs�j|��S)N)r�)r�r�)�cache�not_in_cacherwrxr��sz3ParserElement._UnboundedCache.__init__.<locals>.getcs|�|<dS)Nrw)r�r�r�)r�rwrx�set�sz3ParserElement._UnboundedCache.__init__.<locals>.setcs�j�dS)N)r�)r�)r�rwrxr��sz5ParserElement._UnboundedCache.__init__.<locals>.clear)r�r��types�
MethodTyper�r�r�)r�r�r�r�rw)r�r�rxr��sz&ParserElement._UnboundedCache.__init__N)r�r�r�r�rwrwrwrx�_UnboundedCache�sr�Nc@seZdZdd�ZdS)zParserElement._FifoCachecsht�|_�t����fdd�}��fdd�}�fdd�}tj||�|_tj||�|_tj||�|_dS)Ncs�j|��S)N)r�)r�r�)r�r�rwrxr��sz.ParserElement._FifoCache.__init__.<locals>.getcs"|�|<t���kr�jd�dS)NF)r��popitem)r�r�r�)r��sizerwrxr��sz.ParserElement._FifoCache.__init__.<locals>.setcs�j�dS)N)r�)r�)r�rwrxr��sz0ParserElement._FifoCache.__init__.<locals>.clear)r�r��_OrderedDictr�r�r�r�r�)r�r�r�r�r�rw)r�r�r�rxr��sz!ParserElement._FifoCache.__init__N)r�r�r�r�rwrwrwrx�
_FifoCache�sr�c@seZdZdd�ZdS)zParserElement._FifoCachecsvt�|_�i�tjg�����fdd�}���fdd�}��fdd�}tj||�|_tj||�|_tj||�|_dS)Ncs�j|��S)N)r�)r�r�)r�r�rwrxr��sz.ParserElement._FifoCache.__init__.<locals>.getcs2|�|<t���kr$�j�j�d��j|�dS)N)r�r��popleftr�)r�r�r�)r��key_fifor�rwrxr��sz.ParserElement._FifoCache.__init__.<locals>.setcs�j��j�dS)N)r�)r�)r�r�rwrxr��sz0ParserElement._FifoCache.__init__.<locals>.clear)	r�r��collections�dequer�r�r�r�r�)r�r�r�r�r�rw)r�r�r�r�rxr��sz!ParserElement._FifoCache.__init__N)r�r�r�r�rwrwrwrxr��srcCs�d\}}|||||f}tj��tj}|j|�}	|	|jkr�tj|d7<y|j||||�}	Wn8tk
r�}
z|j||
j	|
j
���WYdd}
~
Xq�X|j||	d|	dj�f�|	Sn4tj|d7<t|	t
�r�|	�|	d|	dj�fSWdQRXdS)Nrrr)rrr)r$�packrat_cache_lock�
packrat_cacher�r��packrat_cache_statsr�rr�rHr�r�rzrK)r�r-r�rorpZHITZMISS�lookupr�r�r�rwrwrx�_parseCache�s$


zParserElement._parseCachecCs(tjj�dgttj�tjdd�<dS)Nr)r$r�r�r�r�rwrwrwrx�
resetCache�s
zParserElement.resetCache�cCs8tjs4dt_|dkr tj�t_ntj|�t_tjt_dS)a�Enables "packrat" parsing, which adds memoizing to the parsing logic.
           Repeated parse attempts at the same string location (which happens
           often in many complex grammars) can immediately return a cached value,
           instead of re-executing parsing/validating code.  Memoizing is done of
           both valid results and parsing exceptions.
           
           Parameters:
            - cache_size_limit - (default=C{128}) - if an integer value is provided
              will limit the size of the packrat cache; if None is passed, then
              the cache size will be unbounded; if 0 is passed, the cache will
              be effectively disabled.
            
           This speedup may break existing programs that use parse actions that
           have side-effects.  For this reason, packrat parsing is disabled when
           you first import pyparsing.  To activate the packrat feature, your
           program must call the class method C{ParserElement.enablePackrat()}.  If
           your program uses C{psyco} to "compile as you go", you must call
           C{enablePackrat} before calling C{psyco.full()}.  If you do not do this,
           Python will crash.  For best results, call C{enablePackrat()} immediately
           after importing pyparsing.
           
           Example::
               import pyparsing
               pyparsing.ParserElement.enablePackrat()
        TN)r$�_packratEnabledr�r�r�r�rt)Zcache_size_limitrwrwrx�
enablePackratszParserElement.enablePackratcCs�tj�|js|j�x|jD]}|j�qW|js<|j�}y<|j|d�\}}|rv|j||�}t	�t
�}|j||�Wn0tk
r�}ztjr��n|�WYdd}~XnX|SdS)aB
        Execute the parse expression with the given string.
        This is the main interface to the client code, once the complete
        expression has been built.

        If you want the grammar to require that the entire input string be
        successfully parsed, then set C{parseAll} to True (equivalent to ending
        the grammar with C{L{StringEnd()}}).

        Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
        in order to report proper column numbers in parse actions.
        If the input string contains tabs and
        the grammar uses parse actions that use the C{loc} argument to index into the
        string being parsed, you can ensure you have a consistent view of the input
        string by:
         - calling C{parseWithTabs} on your grammar before calling C{parseString}
           (see L{I{parseWithTabs}<parseWithTabs>})
         - define your parse action using the full C{(s,loc,toks)} signature, and
           reference the input string using the parse action's C{s} argument
         - explictly expand the tabs in your input string before calling
           C{parseString}
        
        Example::
            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
        rN)
r$r�r_�
streamliner]r\�
expandtabsrtr�r
r)r�verbose_stacktrace)r�r-�parseAllr�r�r�Zser3rwrwrx�parseString#s$zParserElement.parseStringccs@|js|j�x|jD]}|j�qW|js8t|�j�}t|�}d}|j}|j}t	j
�d}	y�x�||kon|	|k�ry |||�}
|||
dd�\}}Wntk
r�|
d}Yq`X||kr�|	d7}	||
|fV|r�|||�}
|
|kr�|}q�|d7}n|}q`|
d}q`WWn4tk
�r:}zt	j
�r&�n|�WYdd}~XnXdS)a�
        Scan the input string for expression matches.  Each match will return the
        matching tokens, start location, and end location.  May be called with optional
        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
        C{overlap} is specified, then overlapping matches will be reported.

        Note that the start and end locations are reported relative to the string
        being parsed.  See L{I{parseString}<parseString>} for more information on parsing
        strings with embedded tabs.

        Example::
            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
            print(source)
            for tokens,start,end in Word(alphas).scanString(source):
                print(' '*start + '^'*(end-start))
                print(' '*start + tokens[0])
        
        prints::
        
            sldjf123lsdjjkf345sldkjf879lkjsfd987
            ^^^^^
            sldjf
                    ^^^^^^^
                    lsdjjkf
                              ^^^^^^
                              sldkjf
                                       ^^^^^^
                                       lkjsfd
        rF)rprrN)r_r�r]r\r�r�r�r�rtr$r�rrr�)r�r-�
maxMatchesZoverlapr�r�r�Z
preparseFnZparseFn�matchesr�ZnextLocr�Znextlocr3rwrwrx�
scanStringUsB


zParserElement.scanStringcCs�g}d}d|_y�xh|j|�D]Z\}}}|j|||��|rrt|t�rT||j�7}nt|t�rh||7}n
|j|�|}qW|j||d��dd�|D�}djtt	t
|���Stk
r�}ztj
rȂn|�WYdd}~XnXdS)af
        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
        be returned from a parse action.  To use C{transformString}, define a grammar and
        attach a parse action to it that modifies the returned token list.
        Invoking C{transformString()} on a target string will then scan for matches,
        and replace the matched text patterns according to the logic in the parse
        action.  C{transformString()} returns the resulting transformed string.
        
        Example::
            wd = Word(alphas)
            wd.setParseAction(lambda toks: toks[0].title())
            
            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
        Prints::
            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
        rTNcSsg|]}|r|�qSrwrw)r��orwrwrxr��sz1ParserElement.transformString.<locals>.<listcomp>r�)r\r�r�rzr"r�r�r�rvr��_flattenrr$r�)r�r-rZlastErvr�r�r3rwrwrxr��s(



zParserElement.transformStringcCsPytdd�|j||�D��Stk
rJ}ztjr6�n|�WYdd}~XnXdS)a~
        Another extension to C{L{scanString}}, simplifying the access to the tokens found
        to match the given parse expression.  May be called with optional
        C{maxMatches} argument, to clip searching after 'n' matches are found.
        
        Example::
            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
            cap_word = Word(alphas.upper(), alphas.lower())
            
            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
        prints::
            ['More', 'Iron', 'Lead', 'Gold', 'I']
        cSsg|]\}}}|�qSrwrw)r�rvr�r�rwrwrxr��sz.ParserElement.searchString.<locals>.<listcomp>N)r"r�rr$r�)r�r-r�r3rwrwrx�searchString�szParserElement.searchStringc	csXd}d}x<|j||d�D]*\}}}|||�V|r>|dV|}qW||d�VdS)a[
        Generator method to split a string using the given expression as a separator.
        May be called with optional C{maxsplit} argument, to limit the number of splits;
        and the optional C{includeSeparators} argument (default=C{False}), if the separating
        matching text should be included in the split results.
        
        Example::        
            punc = oneOf(list(".,;:/-!?"))
            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
        prints::
            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
        r)r�N)r�)	r�r-�maxsplitZincludeSeparatorsZsplitsZlastrvr�r�rwrwrxr��s

zParserElement.splitcCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)a�
        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
        converts them to L{Literal}s by default.
        
        Example::
            greet = Word(alphas) + "," + Word(alphas) + "!"
            hello = "Hello, World!"
            print (hello, "->", greet.parseString(hello))
        Prints::
            Hello, World! -> ['Hello', ',', 'World', '!']
        z4Cannot combine element of type %s with ParserElementrq)�
stacklevelN)	rzr�r$rQ�warnings�warnr��
SyntaxWarningr)r�r�rwrwrxr�s



zParserElement.__add__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||S)z]
        Implementation of + operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrxrs



zParserElement.__radd__cCsLt|t�rtj|�}t|t�s:tjdt|�tdd�dSt|tj	�|g�S)zQ
        Implementation of - operator, returns C{L{And}} with error stop
        z4Cannot combine element of type %s with ParserElementrq)r�N)
rzr�r$rQr�r�r�r�r�
_ErrorStop)r�r�rwrwrx�__sub__s



zParserElement.__sub__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||S)z]
        Implementation of - operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__rsub__ s



zParserElement.__rsub__cs�t|t�r|d}}n�t|t�r�|ddd�}|ddkrHd|df}t|dt�r�|ddkr�|ddkrvt��S|ddkr�t��S�|dt��SnJt|dt�r�t|dt�r�|\}}||8}ntdt|d�t|d���ntdt|���|dk�rtd��|dk�rtd��||k�o2dkn�rBtd	��|�r���fd
d��|�r�|dk�rt��|�}nt�g|��|�}n�|�}n|dk�r��}nt�g|�}|S)
a�
        Implementation of * operator, allows use of C{expr * 3} in place of
        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
        may also include C{None} as in:
         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
              to C{expr*n + L{ZeroOrMore}(expr)}
              (read as "at least n instances of C{expr}")
         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
              (read as "0 to n instances of C{expr}")
         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}

        Note that C{expr*(None,n)} does not raise an exception if
        more than n exprs exist in the input stream; that is,
        C{expr*(None,n)} does not enforce a maximum number of expr
        occurrences.  If this behavior is desired, then write
        C{expr*(None,n) + ~expr}
        rNrqrrz7cannot multiply 'ParserElement' and ('%s','%s') objectsz0cannot multiply 'ParserElement' and '%s' objectsz/cannot multiply ParserElement by negative valuez@second tuple value must be greater or equal to first tuple valuez+cannot multiply ParserElement by 0 or (0,0)cs(|dkrt��|d��St��SdS)Nrr)r)�n)�makeOptionalListr�rwrxr�]sz/ParserElement.__mul__.<locals>.makeOptionalList)NN)	rzru�tupler2rr�r��
ValueErrorr)r�r�ZminElementsZoptElementsr�rw)r�r�rx�__mul__,sD







zParserElement.__mul__cCs
|j|�S)N)r�)r�r�rwrwrx�__rmul__pszParserElement.__rmul__cCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)zI
        Implementation of | operator - returns C{L{MatchFirst}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)	rzr�r$rQr�r�r�r�r)r�r�rwrwrx�__or__ss



zParserElement.__or__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||BS)z]
        Implementation of | operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__ror__s



zParserElement.__ror__cCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)zA
        Implementation of ^ operator - returns C{L{Or}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)	rzr�r$rQr�r�r�r�r)r�r�rwrwrx�__xor__�s



zParserElement.__xor__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||AS)z]
        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__rxor__�s



zParserElement.__rxor__cCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)zC
        Implementation of & operator - returns C{L{Each}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)	rzr�r$rQr�r�r�r�r)r�r�rwrwrx�__and__�s



zParserElement.__and__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||@S)z]
        Implementation of & operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__rand__�s



zParserElement.__rand__cCst|�S)zE
        Implementation of ~ operator - returns C{L{NotAny}}
        )r)r�rwrwrx�
__invert__�szParserElement.__invert__cCs|dk	r|j|�S|j�SdS)a

        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
        
        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
        passed as C{True}.
           
        If C{name} is omitted, same as calling C{L{copy}}.

        Example::
            # these are equivalent
            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
        N)rmr�)r�r�rwrwrx�__call__�s
zParserElement.__call__cCst|�S)z�
        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
        cluttering up returned output.
        )r+)r�rwrwrx�suppress�szParserElement.suppresscCs
d|_|S)a
        Disables the skipping of whitespace before matching the characters in the
        C{ParserElement}'s defined pattern.  This is normally only used internally by
        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
        F)rX)r�rwrwrx�leaveWhitespace�szParserElement.leaveWhitespacecCsd|_||_d|_|S)z8
        Overrides the default whitespace chars
        TF)rXrYrZ)r�rOrwrwrx�setWhitespaceChars�sz ParserElement.setWhitespaceCharscCs
d|_|S)z�
        Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
        Must be called before C{parseString} when the input grammar contains elements that
        match C{<TAB>} characters.
        T)r\)r�rwrwrx�
parseWithTabs�szParserElement.parseWithTabscCsLt|t�rt|�}t|t�r4||jkrH|jj|�n|jjt|j���|S)a�
        Define expression to be ignored (e.g., comments) while doing pattern
        matching; may be called repeatedly, to define multiple comment or other
        ignorable patterns.
        
        Example::
            patt = OneOrMore(Word(alphas))
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
            
            patt.ignore(cStyleComment)
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
        )rzr�r+r]r�r�)r�r�rwrwrx�ignore�s


zParserElement.ignorecCs"|pt|pt|ptf|_d|_|S)zT
        Enable display of debugging messages while doing pattern matching.
        T)r/r2r4rcr^)r�ZstartActionZ
successActionZexceptionActionrwrwrx�setDebugActions
s
zParserElement.setDebugActionscCs|r|jttt�nd|_|S)a�
        Enable display of debugging messages while doing pattern matching.
        Set C{flag} to True to enable, False to disable.

        Example::
            wd = Word(alphas).setName("alphaword")
            integer = Word(nums).setName("numword")
            term = wd | integer
            
            # turn on debugging for wd
            wd.setDebug()

            OneOrMore(term).parseString("abc 123 xyz 890")
        
        prints::
            Match alphaword at loc 0(1,1)
            Matched alphaword -> ['abc']
            Match alphaword at loc 3(1,4)
            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
            Match alphaword at loc 7(1,8)
            Matched alphaword -> ['xyz']
            Match alphaword at loc 11(1,12)
            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
            Match alphaword at loc 15(1,16)
            Exception raised:Expected alphaword (at char 15), (line:1, col:16)

        The output shown is that produced by the default debug actions - custom debug actions can be
        specified using L{setDebugActions}. Prior to attempting
        to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}
        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
        which makes debugging and exception messages easier to understand - for instance, the default
        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
        F)r�r/r2r4r^)r��flagrwrwrx�setDebugs#zParserElement.setDebugcCs|jS)N)r�)r�rwrwrxr�@szParserElement.__str__cCst|�S)N)r�)r�rwrwrxr�CszParserElement.__repr__cCsd|_d|_|S)NT)r_rU)r�rwrwrxr�FszParserElement.streamlinecCsdS)Nrw)r�r�rwrwrx�checkRecursionKszParserElement.checkRecursioncCs|jg�dS)zj
        Check defined expressions for valid structure, check for infinite recursive definitions.
        N)r�)r��
validateTracerwrwrx�validateNszParserElement.validatecCs�y|j�}Wn2tk
r>t|d��}|j�}WdQRXYnXy|j||�Stk
r|}ztjrh�n|�WYdd}~XnXdS)z�
        Execute the parse expression on the given file or filename.
        If a filename is specified (instead of a file object),
        the entire file is opened, read, and closed before parsing.
        �rN)�readr��openr�rr$r�)r�Zfile_or_filenamer�Z
file_contents�fr3rwrwrx�	parseFileTszParserElement.parseFilecsHt|t�r"||kp t|�t|�kSt|t�r6|j|�Stt|�|kSdS)N)rzr$�varsr�r��super)r�r�)rHrwrx�__eq__hs



zParserElement.__eq__cCs
||kS)Nrw)r�r�rwrwrx�__ne__pszParserElement.__ne__cCstt|��S)N)�hash�id)r�rwrwrx�__hash__sszParserElement.__hash__cCs||kS)Nrw)r�r�rwrwrx�__req__vszParserElement.__req__cCs
||kS)Nrw)r�r�rwrwrx�__rne__yszParserElement.__rne__cCs0y|jt|�|d�dStk
r*dSXdS)a�
        Method for quick testing of a parser against a test string. Good for simple 
        inline microtests of sub expressions while building up larger parser.
           
        Parameters:
         - testString - to test against this expression for a match
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
            
        Example::
            expr = Word(nums)
            assert expr.matches("100")
        )r�TFN)r�r�r)r�Z
testStringr�rwrwrxr�|s

zParserElement.matches�#cCs�t|t�r"tttj|j�j���}t|t�r4t|�}g}g}d}	�x�|D�]�}
|dk	rb|j	|
d�sl|rx|
rx|j
|
�qH|
s~qHdj|�|
g}g}y:|
jdd�}
|j
|
|d�}|j
|j|d��|	o�|}	Wn�tk
�rx}
z�t|
t�r�dnd	}d|
k�r0|j
t|
j|
��|j
d
t|
j|
�dd|�n|j
d
|
jd|�|j
d
t|
��|	�ob|}	|
}WYdd}
~
XnDtk
�r�}z&|j
dt|��|	�o�|}	|}WYdd}~XnX|�r�|�r�|j
d	�tdj|��|j
|
|f�qHW|	|fS)a3
        Execute the parse expression on a series of test strings, showing each
        test, the parsed results or where the parse failed. Quick and easy way to
        run a parse expression against a list of sample strings.
           
        Parameters:
         - tests - a list of separate test strings, or a multiline string of test strings
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
              string; pass None to disable comment filtering
         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
              if False, only dump nested list
         - printResults - (default=C{True}) prints test output to stdout
         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing

        Returns: a (success, results) tuple, where success indicates that all tests succeeded
        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
        test's output
        
        Example::
            number_expr = pyparsing_common.number.copy()

            result = number_expr.runTests('''
                # unsigned integer
                100
                # negative integer
                -100
                # float with scientific notation
                6.02e23
                # integer with scientific notation
                1e-12
                ''')
            print("Success" if result[0] else "Failed!")

            result = number_expr.runTests('''
                # stray character
                100Z
                # missing leading digit before '.'
                -.100
                # too many '.'
                3.14.159
                ''', failureTests=True)
            print("Success" if result[0] else "Failed!")
        prints::
            # unsigned integer
            100
            [100]

            # negative integer
            -100
            [-100]

            # float with scientific notation
            6.02e23
            [6.02e+23]

            # integer with scientific notation
            1e-12
            [1e-12]

            Success
            
            # stray character
            100Z
               ^
            FAIL: Expected end of text (at char 3), (line:1, col:4)

            # missing leading digit before '.'
            -.100
            ^
            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)

            # too many '.'
            3.14.159
                ^
            FAIL: Expected end of text (at char 4), (line:1, col:5)

            Success

        Each test string must be on a single line. If you want to test a string that spans multiple
        lines, create a test like this::

            expr.runTest(r"this is a test\n of strings that spans \n 3 lines")
        
        (Note that this is a raw string literal, you must include the leading 'r'.)
        TNFrz\n)r�)r z(FATAL)r�� rr�^zFAIL: zFAIL-EXCEPTION: )rzr�r�rvr{r��rstrip�
splitlinesrr�r�r�r�r�rrr!rGr�r9rKr,)r�Ztestsr�ZcommentZfullDumpZprintResultsZfailureTestsZ
allResultsZcomments�successrvr�resultr�rzr3rwrwrx�runTests�sNW



$


zParserElement.runTests)F)F)T)T)TT)TT)r�)F)N)T)F)T)Tr�TTF)Or�r�r�r�rNr��staticmethodrPrRr�r�rirmrur�rxr~rr�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�rtr�r�r�r��_MAX_INTr�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r��
__classcell__rwrw)rHrxr$8s�


&




H
"
2G+D
			

)

cs eZdZdZ�fdd�Z�ZS)r,zT
    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
    cstt|�jdd�dS)NF)rg)r�r,r�)r�)rHrwrxr�	szToken.__init__)r�r�r�r�r�r�rwrw)rHrxr,	scs eZdZdZ�fdd�Z�ZS)r
z,
    An empty token, will always match.
    cs$tt|�j�d|_d|_d|_dS)Nr
TF)r�r
r�r�r[r`)r�)rHrwrxr�	szEmpty.__init__)r�r�r�r�r�r�rwrw)rHrxr
	scs*eZdZdZ�fdd�Zddd�Z�ZS)rz(
    A token that will never match.
    cs*tt|�j�d|_d|_d|_d|_dS)NrTFzUnmatchable token)r�rr�r�r[r`ra)r�)rHrwrxr�*	s
zNoMatch.__init__TcCst|||j|��dS)N)rra)r�r-r�rorwrwrxr�1	szNoMatch.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr&	scs*eZdZdZ�fdd�Zddd�Z�ZS)ra�
    Token to exactly match a specified string.
    
    Example::
        Literal('blah').parseString('blah')  # -> ['blah']
        Literal('blah').parseString('blahfooblah')  # -> ['blah']
        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
    
    For case-insensitive matching, use L{CaselessLiteral}.
    
    For keyword matching (force word break before and after the matched string),
    use L{Keyword} or L{CaselessKeyword}.
    cs�tt|�j�||_t|�|_y|d|_Wn*tk
rVtj	dt
dd�t|_YnXdt
|j�|_d|j|_d|_d|_dS)Nrz2null string passed to Literal; use Empty() insteadrq)r�z"%s"z	Expected F)r�rr��matchr��matchLen�firstMatchCharr�r�r�r�r
rHr�r�rar[r`)r��matchString)rHrwrxr�C	s

zLiteral.__init__TcCsJ|||jkr6|jdks&|j|j|�r6||j|jfSt|||j|��dS)Nrr)r�r��
startswithr�rra)r�r-r�rorwrwrxr�V	szLiteral.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr5	s
csLeZdZdZedZd�fdd�	Zddd	�Z�fd
d�Ze	dd
��Z
�ZS)ra\
    Token to exactly match a specified string as a keyword, that is, it must be
    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
    Accepts two optional constructor arguments in addition to the keyword string:
     - C{identChars} is a string of characters that would be valid identifier characters,
          defaulting to all alphanumerics + "_" and "$"
     - C{caseless} allows case-insensitive matching, default is C{False}.
       
    Example::
        Keyword("start").parseString("start")  # -> ['start']
        Keyword("start").parseString("starting")  # -> Exception

    For case-insensitive matching, use L{CaselessKeyword}.
    z_$NFcs�tt|�j�|dkrtj}||_t|�|_y|d|_Wn$tk
r^t	j
dtdd�YnXd|j|_d|j|_
d|_d|_||_|r�|j�|_|j�}t|�|_dS)Nrz2null string passed to Keyword; use Empty() insteadrq)r�z"%s"z	Expected F)r�rr��DEFAULT_KEYWORD_CHARSr�r�r�r�r�r�r�r�r�rar[r`�caseless�upper�
caselessmatchr��
identChars)r�r�r�r�)rHrwrxr�q	s&

zKeyword.__init__TcCs|jr|||||j�j�|jkr�|t|�|jksL|||jj�|jkr�|dksj||dj�|jkr�||j|jfSnv|||jkr�|jdks�|j|j|�r�|t|�|jks�|||j|jkr�|dks�||d|jkr�||j|jfSt	|||j
|��dS)Nrrr)r�r�r�r�r�r�r�r�r�rra)r�r-r�rorwrwrxr��	s*&zKeyword.parseImplcstt|�j�}tj|_|S)N)r�rr�r�r�)r�r�)rHrwrxr��	szKeyword.copycCs
|t_dS)z,Overrides the default Keyword chars
        N)rr�)rOrwrwrx�setDefaultKeywordChars�	szKeyword.setDefaultKeywordChars)NF)T)r�r�r�r�r3r�r�r�r�r�r�r�rwrw)rHrxr^	s
cs*eZdZdZ�fdd�Zddd�Z�ZS)ral
    Token to match a specified string, ignoring case of letters.
    Note: the matched results will always be in the case of the given
    match string, NOT the case of the input text.

    Example::
        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
        
    (Contrast with example for L{CaselessKeyword}.)
    cs6tt|�j|j��||_d|j|_d|j|_dS)Nz'%s'z	Expected )r�rr�r��returnStringr�ra)r�r�)rHrwrxr��	szCaselessLiteral.__init__TcCs@||||j�j�|jkr,||j|jfSt|||j|��dS)N)r�r�r�r�rra)r�r-r�rorwrwrxr��	szCaselessLiteral.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr�	s
cs,eZdZdZd�fdd�	Zd	dd�Z�ZS)
rz�
    Caseless version of L{Keyword}.

    Example::
        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
        
    (Contrast with example for L{CaselessLiteral}.)
    Ncstt|�j||dd�dS)NT)r�)r�rr�)r�r�r�)rHrwrxr��	szCaselessKeyword.__init__TcCsj||||j�j�|jkrV|t|�|jksF|||jj�|jkrV||j|jfSt|||j|��dS)N)r�r�r�r�r�r�rra)r�r-r�rorwrwrxr��	s*zCaselessKeyword.parseImpl)N)T)r�r�r�r�r�r�r�rwrw)rHrxr�	scs,eZdZdZd�fdd�	Zd	dd�Z�ZS)
rlax
    A variation on L{Literal} which matches "close" matches, that is, 
    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
     - C{match_string} - string to be matched
     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
    
    The results from a successful parse will contain the matched text from the input string and the following named results:
     - C{mismatches} - a list of the positions within the match_string where mismatches were found
     - C{original} - the original match_string used to compare against the input string
    
    If C{mismatches} is an empty list, then the match was an exact match.
    
    Example::
        patt = CloseMatch("ATCATCGAATGGA")
        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)

        # exact match
        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})

        # close match allowing up to 2 mismatches
        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
    rrcsBtt|�j�||_||_||_d|j|jf|_d|_d|_dS)Nz&Expected %r (with up to %d mismatches)F)	r�rlr�r��match_string�
maxMismatchesrar`r[)r�r�r�)rHrwrxr��	szCloseMatch.__init__TcCs�|}t|�}|t|j�}||kr�|j}d}g}	|j}
x�tt|||�|j��D]0\}}|\}}
||
krP|	j|�t|	�|
krPPqPW|d}t|||�g�}|j|d<|	|d<||fSt|||j|��dS)Nrrr�original�
mismatches)	r�r�r�r�r�r�r"rra)r�r-r�ro�startr��maxlocr�Zmatch_stringlocr�r�Zs_m�src�mat�resultsrwrwrxr��	s("

zCloseMatch.parseImpl)rr)T)r�r�r�r�r�r�r�rwrw)rHrxrl�	s	cs8eZdZdZd
�fdd�	Zdd	d
�Z�fdd�Z�ZS)r/a	
    Token for matching words composed of allowed character sets.
    Defined with string containing all allowed initial characters,
    an optional string containing allowed body characters (if omitted,
    defaults to the initial character set), and an optional minimum,
    maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction. An optional
    C{excludeChars} parameter can list characters that might be found in 
    the input C{bodyChars} string; useful to define a word of all printables
    except for one or two characters, for instance.
    
    L{srange} is useful for defining custom character set strings for defining 
    C{Word} expressions, using range notation from regular expression character sets.
    
    A common mistake is to use C{Word} to match a specific literal string, as in 
    C{Word("Address")}. Remember that C{Word} uses the string argument to define
    I{sets} of matchable characters. This expression would match "Add", "AAA",
    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
    To match an exact literal string, use L{Literal} or L{Keyword}.

    pyparsing includes helper strings for building Words:
     - L{alphas}
     - L{nums}
     - L{alphanums}
     - L{hexnums}
     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
     - L{printables} (any non-whitespace character)

    Example::
        # a word composed of digits
        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
        
        # a word with a leading capital, and zero or more lowercase
        capital_word = Word(alphas.upper(), alphas.lower())

        # hostnames are alphanumeric, with leading alpha, and '-'
        hostname = Word(alphas, alphanums+'-')
        
        # roman numeral (not a strict parser, accepts invalid mix of characters)
        roman = Word("IVXLCDM")
        
        # any string of non-whitespace characters, except for ','
        csv_value = Word(printables, excludeChars=",")
    NrrrFcs�tt|�j��rFdj�fdd�|D��}|rFdj�fdd�|D��}||_t|�|_|rl||_t|�|_n||_t|�|_|dk|_	|dkr�t
d��||_|dkr�||_nt
|_|dkr�||_||_t|�|_d|j|_d	|_||_d
|j|jk�r�|dk�r�|dk�r�|dk�r�|j|jk�r8dt|j�|_nHt|j�dk�rfdtj|j�t|j�f|_nd
t|j�t|j�f|_|j�r�d|jd|_ytj|j�|_Wntk
�r�d|_YnXdS)Nr�c3s|]}|�kr|VqdS)Nrw)r�r�)�excludeCharsrwrxr�7
sz Word.__init__.<locals>.<genexpr>c3s|]}|�kr|VqdS)Nrw)r�r�)r�rwrxr�9
srrrzZcannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permittedz	Expected Fr�z[%s]+z%s[%s]*z	[%s][%s]*z\b)r�r/r�r��
initCharsOrigr��	initChars�
bodyCharsOrig�	bodyChars�maxSpecifiedr��minLen�maxLenr�r�r�rar`�	asKeyword�_escapeRegexRangeChars�reStringr�rd�escape�compilerK)r�rr�min�max�exactrr�)rH)r�rxr�4
sT



0
z
Word.__init__Tc
CsD|jr<|jj||�}|s(t|||j|��|j�}||j�fS|||jkrZt|||j|��|}|d7}t|�}|j}||j	}t
||�}x ||kr�|||kr�|d7}q�Wd}	|||jkr�d}	|jr�||kr�|||kr�d}	|j
�r|dk�r||d|k�s||k�r|||k�rd}	|	�r4t|||j|��||||�fS)NrrFTr)rdr�rra�end�grouprr�rrrrrr)
r�r-r�ror�r�r�Z	bodycharsr�ZthrowExceptionrwrwrxr�j
s6

4zWord.parseImplcstytt|�j�Stk
r"YnX|jdkrndd�}|j|jkr^d||j�||j�f|_nd||j�|_|jS)NcSs$t|�dkr|dd�dS|SdS)N�z...)r�)r�rwrwrx�
charsAsStr�
sz Word.__str__.<locals>.charsAsStrz	W:(%s,%s)zW:(%s))r�r/r�rKrUr�r)r�r)rHrwrxr��
s
zWord.__str__)NrrrrFN)T)r�r�r�r�r�r�r�r�rwrw)rHrxr/
s.6
#csFeZdZdZeejd��Zd�fdd�	Zddd�Z	�fd	d
�Z
�ZS)
r'a�
    Token for matching strings that match a given regular expression.
    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
    If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as 
    named parse results.

    Example::
        realnum = Regex(r"[+-]?\d+\.\d*")
        date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
    z[A-Z]rcs�tt|�j�t|t�r�|s,tjdtdd�||_||_	yt
j|j|j	�|_
|j|_Wq�t
jk
r�tjd|tdd��Yq�Xn2t|tj�r�||_
t|�|_|_||_	ntd��t|�|_d|j|_d|_d|_d	S)
z�The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.z0null string passed to Regex; use Empty() insteadrq)r�z$invalid pattern (%s) passed to RegexzCRegex may only be constructed with a string or a compiled RE objectz	Expected FTN)r�r'r�rzr�r�r�r��pattern�flagsrdr
r�
sre_constants�error�compiledREtyper{r�r�r�rar`r[)r�rr)rHrwrxr��
s.





zRegex.__init__TcCsd|jj||�}|s"t|||j|��|j�}|j�}t|j��}|r\x|D]}||||<qHW||fS)N)rdr�rrar�	groupdictr"r)r�r-r�ror��dr�r�rwrwrxr��
s
zRegex.parseImplcsDytt|�j�Stk
r"YnX|jdkr>dt|j�|_|jS)NzRe:(%s))r�r'r�rKrUr�r)r�)rHrwrxr��
s
z
Regex.__str__)r)T)r�r�r�r�r�rdr
rr�r�r�r�rwrw)rHrxr'�
s
"

cs8eZdZdZd�fdd�	Zddd�Z�fd	d
�Z�ZS)
r%a�
    Token for matching strings that are delimited by quoting characters.
    
    Defined with the following parameters:
        - quoteChar - string of one or more characters defining the quote delimiting string
        - escChar - character to escape quotes, typically backslash (default=C{None})
        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})

    Example::
        qs = QuotedString('"')
        print(qs.searchString('lsjdf "This is the quote" sldjf'))
        complex_qs = QuotedString('{{', endQuoteChar='}}')
        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
        sql_qs = QuotedString('"', escQuote='""')
        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
    prints::
        [['This is the quote']]
        [['This is the "quote"']]
        [['This is the quote with "embedded" quotes']]
    NFTcsNtt��j�|j�}|s0tjdtdd�t��|dkr>|}n"|j�}|s`tjdtdd�t��|�_t	|��_
|d�_|�_t	|��_
|�_|�_|�_|�_|r�tjtjB�_dtj�j�t�jd�|dk	r�t|�p�df�_n<d�_dtj�j�t�jd�|dk	�rt|��pdf�_t	�j�d	k�rp�jd
dj�fdd
�tt	�j�d	dd�D��d7_|�r��jdtj|�7_|�r��jdtj|�7_tj�j�d�_�jdtj�j�7_ytj�j�j��_�j�_Wn0tjk
�r&tjd�jtdd��YnXt ���_!d�j!�_"d�_#d�_$dS)Nz$quoteChar cannot be the empty stringrq)r�z'endQuoteChar cannot be the empty stringrz%s(?:[^%s%s]r�z%s(?:[^%s\n\r%s]rrz|(?:z)|(?:c3s4|],}dtj�jd|��t�j|�fVqdS)z%s[^%s]N)rdr	�endQuoteCharr)r�r�)r�rwrxr�/sz(QuotedString.__init__.<locals>.<genexpr>�)z|(?:%s)z|(?:%s.)z(.)z)*%sz$invalid pattern (%s) passed to Regexz	Expected FTrs)%r�r%r�r�r�r�r��SyntaxError�	quoteCharr��quoteCharLen�firstQuoteCharr�endQuoteCharLen�escChar�escQuote�unquoteResults�convertWhitespaceEscapesrd�	MULTILINE�DOTALLrr	rrr�r��escCharReplacePatternr
rrrr�r�rar`r[)r�rr r!Z	multiliner"rr#)rH)r�rxr�sf




6

zQuotedString.__init__c	Cs�|||jkr|jj||�pd}|s4t|||j|��|j�}|j�}|jr�||j|j	�}t
|t�r�d|kr�|jr�ddddd�}x |j
�D]\}}|j||�}q�W|jr�tj|jd|�}|jr�|j|j|j�}||fS)N�\�	r��
)z\tz\nz\fz\rz\g<1>)rrdr�rrarrr"rrrzr�r#r�r�r r�r&r!r)	r�r-r�ror�r�Zws_mapZwslitZwscharrwrwrxr�Gs( 
zQuotedString.parseImplcsFytt|�j�Stk
r"YnX|jdkr@d|j|jf|_|jS)Nz.quoted string, starting with %s ending with %s)r�r%r�rKrUrr)r�)rHrwrxr�js
zQuotedString.__str__)NNFTNT)T)r�r�r�r�r�r�r�r�rwrw)rHrxr%�
sA
#cs8eZdZdZd�fdd�	Zddd�Z�fd	d
�Z�ZS)
r	a�
    Token for matching words composed of characters I{not} in a given set (will
    include whitespace in matched characters if not listed in the provided exclusion set - see example).
    Defined with string containing all disallowed characters, and an optional
    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction.

    Example::
        # define a comma-separated-value as anything that is not a ','
        csv_value = CharsNotIn(',')
        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
    prints::
        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
    rrrcs�tt|�j�d|_||_|dkr*td��||_|dkr@||_nt|_|dkrZ||_||_t	|�|_
d|j
|_|jdk|_d|_
dS)NFrrzfcannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permittedrz	Expected )r�r	r�rX�notCharsr�rrr�r�r�rar[r`)r�r+rrr
)rHrwrxr��s 
zCharsNotIn.__init__TcCs�|||jkrt|||j|��|}|d7}|j}t||jt|��}x ||krd|||krd|d7}qFW|||jkr�t|||j|��||||�fS)Nrr)r+rrarrr�r)r�r-r�ror�Znotchars�maxlenrwrwrxr��s
zCharsNotIn.parseImplcsdytt|�j�Stk
r"YnX|jdkr^t|j�dkrRd|jdd�|_nd|j|_|jS)Nrz
!W:(%s...)z!W:(%s))r�r	r�rKrUr�r+)r�)rHrwrxr��s
zCharsNotIn.__str__)rrrr)T)r�r�r�r�r�r�r�r�rwrw)rHrxr	vs
cs<eZdZdZdddddd�Zd�fdd�	Zddd�Z�ZS)r.a�
    Special matching class for matching whitespace.  Normally, whitespace is ignored
    by pyparsing grammars.  This class is included when some whitespace structures
    are significant.  Define with a string containing the whitespace characters to be
    matched; default is C{" \t\r\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
    as defined for the C{L{Word}} class.
    z<SPC>z<TAB>z<LF>z<CR>z<FF>)r�r(rr*r)� 	
rrrcs�tt��j�|�_�jdj�fdd��jD���djdd��jD���_d�_d�j�_	|�_
|dkrt|�_nt�_|dkr�|�_|�_
dS)Nr�c3s|]}|�jkr|VqdS)N)�
matchWhite)r�r�)r�rwrxr��sz!White.__init__.<locals>.<genexpr>css|]}tj|VqdS)N)r.�	whiteStrs)r�r�rwrwrxr��sTz	Expected r)
r�r.r�r.r�r�rYr�r[rarrr�)r�Zwsrrr
)rH)r�rxr��s zWhite.__init__TcCs�|||jkrt|||j|��|}|d7}||j}t|t|��}x"||krd|||jkrd|d7}qDW|||jkr�t|||j|��||||�fS)Nrr)r.rrarrr�r)r�r-r�ror�r�rwrwrxr��s
zWhite.parseImpl)r-rrrr)T)r�r�r�r�r/r�r�r�rwrw)rHrxr.�scseZdZ�fdd�Z�ZS)�_PositionTokencs(tt|�j�|jj|_d|_d|_dS)NTF)r�r0r�rHr�r�r[r`)r�)rHrwrxr��s
z_PositionToken.__init__)r�r�r�r�r�rwrw)rHrxr0�sr0cs2eZdZdZ�fdd�Zdd�Zd	dd�Z�ZS)
rzb
    Token to advance to a specific column of input text; useful for tabular report scraping.
    cstt|�j�||_dS)N)r�rr�r9)r��colno)rHrwrxr��szGoToColumn.__init__cCs`t||�|jkr\t|�}|jr*|j||�}x0||krZ||j�rZt||�|jkrZ|d7}q,W|S)Nrr)r9r�r]r��isspace)r�r-r�r�rwrwrxr��s&zGoToColumn.preParseTcCsDt||�}||jkr"t||d|��||j|}|||�}||fS)NzText not in expected column)r9r)r�r-r�roZthiscolZnewlocr�rwrwrxr�s

zGoToColumn.parseImpl)T)r�r�r�r�r�r�r�r�rwrw)rHrxr�s	cs*eZdZdZ�fdd�Zddd�Z�ZS)ra�
    Matches if current position is at the beginning of a line within the parse string
    
    Example::
    
        test = '''        AAA this line
        AAA and this line
          AAA but not this one
        B AAA and definitely not this one
        '''

        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
            print(t)
    
    Prints::
        ['AAA', ' this line']
        ['AAA', ' and this line']    

    cstt|�j�d|_dS)NzExpected start of line)r�rr�ra)r�)rHrwrxr�&szLineStart.__init__TcCs*t||�dkr|gfSt|||j|��dS)Nrr)r9rra)r�r-r�rorwrwrxr�*szLineStart.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxrscs*eZdZdZ�fdd�Zddd�Z�ZS)rzU
    Matches if current position is at the end of a line within the parse string
    cs,tt|�j�|jtjjdd��d|_dS)Nrr�zExpected end of line)r�rr�r�r$rNr�ra)r�)rHrwrxr�3szLineEnd.__init__TcCsb|t|�kr6||dkr$|ddfSt|||j|��n(|t|�krN|dgfSt|||j|��dS)Nrrr)r�rra)r�r-r�rorwrwrxr�8szLineEnd.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr/scs*eZdZdZ�fdd�Zddd�Z�ZS)r*zM
    Matches if current position is at the beginning of the parse string
    cstt|�j�d|_dS)NzExpected start of text)r�r*r�ra)r�)rHrwrxr�GszStringStart.__init__TcCs0|dkr(||j|d�kr(t|||j|��|gfS)Nr)r�rra)r�r-r�rorwrwrxr�KszStringStart.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr*Cscs*eZdZdZ�fdd�Zddd�Z�ZS)r)zG
    Matches if current position is at the end of the parse string
    cstt|�j�d|_dS)NzExpected end of text)r�r)r�ra)r�)rHrwrxr�VszStringEnd.__init__TcCs^|t|�krt|||j|��n<|t|�kr6|dgfS|t|�krJ|gfSt|||j|��dS)Nrr)r�rra)r�r-r�rorwrwrxr�ZszStringEnd.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr)Rscs.eZdZdZef�fdd�	Zddd�Z�ZS)r1ap
    Matches if the current position is at the beginning of a Word, and
    is not preceded by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{} behavior of regular expressions,
    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
    the string being parsed, or at the beginning of a line.
    cs"tt|�j�t|�|_d|_dS)NzNot at the start of a word)r�r1r�r��	wordCharsra)r�r3)rHrwrxr�ls
zWordStart.__init__TcCs@|dkr8||d|jks(|||jkr8t|||j|��|gfS)Nrrr)r3rra)r�r-r�rorwrwrxr�qs
zWordStart.parseImpl)T)r�r�r�r�rVr�r�r�rwrw)rHrxr1dscs.eZdZdZef�fdd�	Zddd�Z�ZS)r0aZ
    Matches if the current position is at the end of a Word, and
    is not followed by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{} behavior of regular expressions,
    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
    the string being parsed, or at the end of a line.
    cs(tt|�j�t|�|_d|_d|_dS)NFzNot at the end of a word)r�r0r�r�r3rXra)r�r3)rHrwrxr��s
zWordEnd.__init__TcCsPt|�}|dkrH||krH|||jks8||d|jkrHt|||j|��|gfS)Nrrr)r�r3rra)r�r-r�ror�rwrwrxr��szWordEnd.parseImpl)T)r�r�r�r�rVr�r�r�rwrw)rHrxr0xscs�eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�fdd�Z�fd
d�Z	�fdd�Z
d�fdd�	Zgfdd�Z�fdd�Z
�ZS)r z^
    Abstract subclass of ParserElement, for combining and post-processing parsed tokens.
    Fcs�tt|�j|�t|t�r"t|�}t|t�r<tj|�g|_	njt|t
j�rzt|�}tdd�|D��rnt
tj|�}t|�|_	n,yt|�|_	Wntk
r�|g|_	YnXd|_dS)Ncss|]}t|t�VqdS)N)rzr�)r�r.rwrwrxr��sz+ParseExpression.__init__.<locals>.<genexpr>F)r�r r�rzr�r�r�r$rQ�exprsr��Iterable�allrvr�re)r�r4rg)rHrwrxr��s

zParseExpression.__init__cCs
|j|S)N)r4)r�r�rwrwrxr��szParseExpression.__getitem__cCs|jj|�d|_|S)N)r4r�rU)r�r�rwrwrxr��szParseExpression.appendcCs4d|_dd�|jD�|_x|jD]}|j�q W|S)z~Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
           all contained expressions.FcSsg|]}|j��qSrw)r�)r�r�rwrwrxr��sz3ParseExpression.leaveWhitespace.<locals>.<listcomp>)rXr4r�)r�r�rwrwrxr��s
zParseExpression.leaveWhitespacecszt|t�rF||jkrvtt|�j|�xP|jD]}|j|jd�q,Wn0tt|�j|�x|jD]}|j|jd�q^W|S)Nrrrsrs)rzr+r]r�r r�r4)r�r�r�)rHrwrxr��s

zParseExpression.ignorecsLytt|�j�Stk
r"YnX|jdkrFd|jjt|j�f|_|jS)Nz%s:(%s))	r�r r�rKrUrHr�r�r4)r�)rHrwrxr��s
zParseExpression.__str__cs0tt|�j�x|jD]}|j�qWt|j�dk�r|jd}t||j�r�|jr�|jdkr�|j	r�|jdd�|jdg|_d|_
|j|jO_|j|jO_|jd}t||j�o�|jo�|jdko�|j	�r|jdd�|jdd�|_d|_
|j|jO_|j|jO_dt
|�|_|S)Nrqrrrz	Expected rsrs)r�r r�r4r�rzrHrSrVr^rUr[r`r�ra)r�r�r�)rHrwrxr��s0




zParseExpression.streamlinecstt|�j||�}|S)N)r�r rm)r�r�rlr�)rHrwrxrm�szParseExpression.setResultsNamecCs:|dd�|g}x|jD]}|j|�qW|jg�dS)N)r4r�r�)r�r��tmpr�rwrwrxr��szParseExpression.validatecs$tt|�j�}dd�|jD�|_|S)NcSsg|]}|j��qSrw)r�)r�r�rwrwrxr��sz(ParseExpression.copy.<locals>.<listcomp>)r�r r�r4)r�r�)rHrwrxr��szParseExpression.copy)F)F)r�r�r�r�r�r�r�r�r�r�r�rmr�r�r�rwrw)rHrxr �s	
"csTeZdZdZGdd�de�Zd�fdd�	Zddd�Zd	d
�Zdd�Z	d
d�Z
�ZS)ra

    Requires all given C{ParseExpression}s to be found in the given order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'+'} operator.
    May also be constructed using the C{'-'} operator, which will suppress backtracking.

    Example::
        integer = Word(nums)
        name_expr = OneOrMore(Word(alphas))

        expr = And([integer("id"),name_expr("name"),integer("age")])
        # more easily written as:
        expr = integer("id") + name_expr("name") + integer("age")
    cseZdZ�fdd�Z�ZS)zAnd._ErrorStopcs&ttj|�j||�d|_|j�dS)N�-)r�rr�r�r�r�)r�r�r�)rHrwrxr�
szAnd._ErrorStop.__init__)r�r�r�r�r�rwrw)rHrxr�
sr�TcsRtt|�j||�tdd�|jD��|_|j|jdj�|jdj|_d|_	dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr�
szAnd.__init__.<locals>.<genexpr>rT)
r�rr�r6r4r[r�rYrXre)r�r4rg)rHrwrxr�
s
zAnd.__init__c	Cs|jdj|||dd�\}}d}x�|jdd�D]�}t|tj�rFd}q0|r�y|j|||�\}}Wq�tk
rv�Yq�tk
r�}zd|_tj|��WYdd}~Xq�t	k
r�t|t
|�|j|��Yq�Xn|j|||�\}}|s�|j�r0||7}q0W||fS)NrF)rprrT)
r4rtrzrr�r#r�
__traceback__r�r�r�rar�)	r�r-r�ro�
resultlistZ	errorStopr�Z
exprtokensr�rwrwrxr�
s(z
And.parseImplcCst|t�rtj|�}|j|�S)N)rzr�r$rQr�)r�r�rwrwrxr5
s

zAnd.__iadd__cCs8|dd�|g}x |jD]}|j|�|jsPqWdS)N)r4r�r[)r�r��subRecCheckListr�rwrwrxr�:
s

zAnd.checkRecursioncCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr��{r�css|]}t|�VqdS)N)r�)r�r�rwrwrxr�F
szAnd.__str__.<locals>.<genexpr>�})r�r�rUr�r4)r�rwrwrxr�A
s


 zAnd.__str__)T)T)r�r�r�r�r
r�r�r�rr�r�r�rwrw)rHrxr�s
csDeZdZdZd�fdd�	Zddd�Zdd	�Zd
d�Zdd
�Z�Z	S)ra�
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the expression that matches the longest string will be used.
    May be constructed using the C{'^'} operator.

    Example::
        # construct Or using '^' operator
        
        number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789"))
    prints::
        [['123'], ['3.1416'], ['789']]
    Fcs:tt|�j||�|jr0tdd�|jD��|_nd|_dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr�\
szOr.__init__.<locals>.<genexpr>T)r�rr�r4rr[)r�r4rg)rHrwrxr�Y
szOr.__init__TcCsTd}d}g}x�|jD]�}y|j||�}Wnvtk
rd}	z d|	_|	j|krT|	}|	j}WYdd}	~	Xqtk
r�t|�|kr�t|t|�|j|�}t|�}YqX|j||f�qW|�r*|j	dd�d�x`|D]X\}
}y|j
|||�Stk
�r$}	z"d|	_|	j|k�r|	}|	j}WYdd}	~	Xq�Xq�W|dk	�rB|j|_|�nt||d|��dS)NrrcSs
|dS)Nrrw)�xrwrwrxryu
szOr.parseImpl.<locals>.<lambda>)r�z no defined alternatives to matchrs)r4r�rr9r�r�r�rar��sortrtr�)r�r-r�ro�	maxExcLoc�maxExceptionr�r�Zloc2r��_rwrwrxr�`
s<

zOr.parseImplcCst|t�rtj|�}|j|�S)N)rzr�r$rQr�)r�r�rwrwrx�__ixor__�
s

zOr.__ixor__cCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr�r<z ^ css|]}t|�VqdS)N)r�)r�r�rwrwrxr��
szOr.__str__.<locals>.<genexpr>r=)r�r�rUr�r4)r�rwrwrxr��
s


 z
Or.__str__cCs0|dd�|g}x|jD]}|j|�qWdS)N)r4r�)r�r�r;r�rwrwrxr��
szOr.checkRecursion)F)T)
r�r�r�r�r�r�rCr�r�r�rwrw)rHrxrK
s

&	csDeZdZdZd�fdd�	Zddd�Zdd	�Zd
d�Zdd
�Z�Z	S)ra�
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the first one listed is the one that will match.
    May be constructed using the C{'|'} operator.

    Example::
        # construct MatchFirst using '|' operator
        
        # watch the order of expressions to match
        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]

        # put more selective expression first
        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
    Fcs:tt|�j||�|jr0tdd�|jD��|_nd|_dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr��
sz&MatchFirst.__init__.<locals>.<genexpr>T)r�rr�r4rr[)r�r4rg)rHrwrxr��
szMatchFirst.__init__Tc	Cs�d}d}x�|jD]�}y|j|||�}|Stk
r\}z|j|krL|}|j}WYdd}~Xqtk
r�t|�|kr�t|t|�|j|�}t|�}YqXqW|dk	r�|j|_|�nt||d|��dS)Nrrz no defined alternatives to matchrs)r4rtrr�r�r�rar�)	r�r-r�ror@rAr�r�r�rwrwrxr��
s$
zMatchFirst.parseImplcCst|t�rtj|�}|j|�S)N)rzr�r$rQr�)r�r�rwrwrx�__ior__�
s

zMatchFirst.__ior__cCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr�r<z | css|]}t|�VqdS)N)r�)r�r�rwrwrxr��
sz%MatchFirst.__str__.<locals>.<genexpr>r=)r�r�rUr�r4)r�rwrwrxr��
s


 zMatchFirst.__str__cCs0|dd�|g}x|jD]}|j|�qWdS)N)r4r�)r�r�r;r�rwrwrxr��
szMatchFirst.checkRecursion)F)T)
r�r�r�r�r�r�rDr�r�r�rwrw)rHrxr�
s
	cs<eZdZdZd�fdd�	Zddd�Zdd�Zd	d
�Z�ZS)
ram
    Requires all given C{ParseExpression}s to be found, but in any order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'&'} operator.

    Example::
        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
        integer = Word(nums)
        shape_attr = "shape:" + shape_type("shape")
        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
        color_attr = "color:" + color("color")
        size_attr = "size:" + integer("size")

        # use Each (using operator '&') to accept attributes in any order 
        # (shape and posn are required, color and size are optional)
        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)

        shape_spec.runTests('''
            shape: SQUARE color: BLACK posn: 100, 120
            shape: CIRCLE size: 50 color: BLUE posn: 50,80
            color:GREEN size:20 shape:TRIANGLE posn:20,40
            '''
            )
    prints::
        shape: SQUARE color: BLACK posn: 100, 120
        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
        - color: BLACK
        - posn: ['100', ',', '120']
          - x: 100
          - y: 120
        - shape: SQUARE


        shape: CIRCLE size: 50 color: BLUE posn: 50,80
        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
        - color: BLUE
        - posn: ['50', ',', '80']
          - x: 50
          - y: 80
        - shape: CIRCLE
        - size: 50


        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
        - color: GREEN
        - posn: ['20', ',', '40']
          - x: 20
          - y: 40
        - shape: TRIANGLE
        - size: 20
    Tcs8tt|�j||�tdd�|jD��|_d|_d|_dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr�sz Each.__init__.<locals>.<genexpr>T)r�rr�r6r4r[rX�initExprGroups)r�r4rg)rHrwrxr�sz
Each.__init__c
s�|jr�tdd�|jD��|_dd�|jD�}dd�|jD�}|||_dd�|jD�|_dd�|jD�|_dd�|jD�|_|j|j7_d	|_|}|jdd�}|jdd��g}d
}	x�|	�rp|�|j|j}
g}x~|
D]v}y|j||�}Wn t	k
�r|j
|�Yq�X|j
|jjt|�|��||k�rD|j
|�q�|�kr�j
|�q�Wt|�t|
�kr�d	}	q�W|�r�djdd�|D��}
t	||d
|
��|�fdd�|jD�7}g}x*|D]"}|j|||�\}}|j
|��q�Wt|tg��}||fS)Ncss&|]}t|t�rt|j�|fVqdS)N)rzrr�r.)r�r�rwrwrxr�sz!Each.parseImpl.<locals>.<genexpr>cSsg|]}t|t�r|j�qSrw)rzrr.)r�r�rwrwrxr�sz"Each.parseImpl.<locals>.<listcomp>cSs"g|]}|jrt|t�r|�qSrw)r[rzr)r�r�rwrwrxr�scSsg|]}t|t�r|j�qSrw)rzr2r.)r�r�rwrwrxr� scSsg|]}t|t�r|j�qSrw)rzrr.)r�r�rwrwrxr�!scSs g|]}t|tttf�s|�qSrw)rzrr2r)r�r�rwrwrxr�"sFTz, css|]}t|�VqdS)N)r�)r�r�rwrwrxr�=sz*Missing one or more required elements (%s)cs$g|]}t|t�r|j�kr|�qSrw)rzrr.)r�r�)�tmpOptrwrxr�As)rEr�r4Zopt1mapZ	optionalsZmultioptionalsZ
multirequiredZrequiredr�rr�r�r��remover�r�rt�sumr")r�r-r�roZopt1Zopt2ZtmpLocZtmpReqdZ
matchOrderZkeepMatchingZtmpExprsZfailedr�Zmissingr:r�ZfinalResultsrw)rFrxr�sP



zEach.parseImplcCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr�r<z & css|]}t|�VqdS)N)r�)r�r�rwrwrxr�PszEach.__str__.<locals>.<genexpr>r=)r�r�rUr�r4)r�rwrwrxr�Ks


 zEach.__str__cCs0|dd�|g}x|jD]}|j|�qWdS)N)r4r�)r�r�r;r�rwrwrxr�TszEach.checkRecursion)T)T)	r�r�r�r�r�r�r�r�r�rwrw)rHrxr�
s
5
1	csleZdZdZd�fdd�	Zddd�Zdd	�Z�fd
d�Z�fdd
�Zdd�Z	gfdd�Z
�fdd�Z�ZS)rza
    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
    Fcs�tt|�j|�t|t�r@ttjt�r2tj|�}ntjt	|��}||_
d|_|dk	r�|j|_|j
|_
|j|j�|j|_|j|_|j|_|jj|j�dS)N)r�rr�rzr��
issubclassr$rQr,rr.rUr`r[r�rYrXrWrer]r�)r�r.rg)rHrwrxr�^s
zParseElementEnhance.__init__TcCs2|jdk	r|jj|||dd�Std||j|��dS)NF)rpr�)r.rtrra)r�r-r�rorwrwrxr�ps
zParseElementEnhance.parseImplcCs*d|_|jj�|_|jdk	r&|jj�|S)NF)rXr.r�r�)r�rwrwrxr�vs


z#ParseElementEnhance.leaveWhitespacecsrt|t�rB||jkrntt|�j|�|jdk	rn|jj|jd�n,tt|�j|�|jdk	rn|jj|jd�|S)Nrrrsrs)rzr+r]r�rr�r.)r�r�)rHrwrxr�}s



zParseElementEnhance.ignorecs&tt|�j�|jdk	r"|jj�|S)N)r�rr�r.)r�)rHrwrxr��s

zParseElementEnhance.streamlinecCsB||krt||g��|dd�|g}|jdk	r>|jj|�dS)N)r&r.r�)r�r�r;rwrwrxr��s

z"ParseElementEnhance.checkRecursioncCs6|dd�|g}|jdk	r(|jj|�|jg�dS)N)r.r�r�)r�r�r7rwrwrxr��s
zParseElementEnhance.validatecsVytt|�j�Stk
r"YnX|jdkrP|jdk	rPd|jjt|j�f|_|jS)Nz%s:(%s))	r�rr�rKrUr.rHr�r�)r�)rHrwrxr��szParseElementEnhance.__str__)F)T)
r�r�r�r�r�r�r�r�r�r�r�r�r�rwrw)rHrxrZs
cs*eZdZdZ�fdd�Zddd�Z�ZS)ra�
    Lookahead matching of the given parse expression.  C{FollowedBy}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression matches at the current
    position.  C{FollowedBy} always returns a null token list.

    Example::
        # use FollowedBy to match a label only if it is followed by a ':'
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
    prints::
        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
    cstt|�j|�d|_dS)NT)r�rr�r[)r�r.)rHrwrxr��szFollowedBy.__init__TcCs|jj||�|gfS)N)r.r�)r�r-r�rorwrwrxr��szFollowedBy.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr�scs2eZdZdZ�fdd�Zd	dd�Zdd�Z�ZS)
ra�
    Lookahead to disallow matching with the given parse expression.  C{NotAny}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression does I{not} match at the current
    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
    always returns a null token list.  May be constructed using the '~' operator.

    Example::
        
    cs0tt|�j|�d|_d|_dt|j�|_dS)NFTzFound unwanted token, )r�rr�rXr[r�r.ra)r�r.)rHrwrxr��szNotAny.__init__TcCs&|jj||�rt|||j|��|gfS)N)r.r�rra)r�r-r�rorwrwrxr��szNotAny.parseImplcCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�z~{r=)r�r�rUr�r.)r�rwrwrxr��s


zNotAny.__str__)T)r�r�r�r�r�r�r�r�rwrw)rHrxr�s

cs(eZdZd�fdd�	Zddd�Z�ZS)	�_MultipleMatchNcsFtt|�j|�d|_|}t|t�r.tj|�}|dk	r<|nd|_dS)NT)	r�rJr�rWrzr�r$rQ�	not_ender)r�r.�stopOnZender)rHrwrxr��s

z_MultipleMatch.__init__TcCs�|jj}|j}|jdk	}|r$|jj}|r2|||�||||dd�\}}yZ|j}	xJ|rb|||�|	rr|||�}
n|}
|||
|�\}}|s�|j�rT||7}qTWWnttfk
r�YnX||fS)NF)rp)	r.rtr�rKr�r]r�rr�)r�r-r�roZself_expr_parseZself_skip_ignorablesZcheck_enderZ
try_not_enderr�ZhasIgnoreExprsr�Z	tmptokensrwrwrxr��s,



z_MultipleMatch.parseImpl)N)T)r�r�r�r�r�r�rwrw)rHrxrJ�srJc@seZdZdZdd�ZdS)ra�
    Repetition of one or more of the given expression.
    
    Parameters:
     - expr - expression that must match one or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: BLACK"
        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]

        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
        
        # could also be written as
        (attr_expr * (1,)).parseString(text).pprint()
    cCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�r<z}...)r�r�rUr�r.)r�rwrwrxr�!s


zOneOrMore.__str__N)r�r�r�r�r�rwrwrwrxrscs8eZdZdZd
�fdd�	Zd�fdd�	Zdd	�Z�ZS)r2aw
    Optional repetition of zero or more of the given expression.
    
    Parameters:
     - expr - expression that must match zero or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example: similar to L{OneOrMore}
    Ncstt|�j||d�d|_dS)N)rLT)r�r2r�r[)r�r.rL)rHrwrxr�6szZeroOrMore.__init__Tcs6ytt|�j|||�Sttfk
r0|gfSXdS)N)r�r2r�rr�)r�r-r�ro)rHrwrxr�:szZeroOrMore.parseImplcCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�rz]...)r�r�rUr�r.)r�rwrwrxr�@s


zZeroOrMore.__str__)N)T)r�r�r�r�r�r�r�r�rwrw)rHrxr2*sc@s eZdZdd�ZeZdd�ZdS)�
_NullTokencCsdS)NFrw)r�rwrwrxr�Jsz_NullToken.__bool__cCsdS)Nr�rw)r�rwrwrxr�Msz_NullToken.__str__N)r�r�r�r�r'r�rwrwrwrxrMIsrMcs6eZdZdZef�fdd�	Zd	dd�Zdd�Z�ZS)
raa
    Optional matching of the given expression.

    Parameters:
     - expr - expression that must match zero or more times
     - default (optional) - value to be returned if the optional expression is not found.

    Example::
        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
        zip.runTests('''
            # traditional ZIP code
            12345
            
            # ZIP+4 form
            12101-0001
            
            # invalid ZIP
            98765-
            ''')
    prints::
        # traditional ZIP code
        12345
        ['12345']

        # ZIP+4 form
        12101-0001
        ['12101-0001']

        # invalid ZIP
        98765-
             ^
        FAIL: Expected end of text (at char 5), (line:1, col:6)
    cs.tt|�j|dd�|jj|_||_d|_dS)NF)rgT)r�rr�r.rWr�r[)r�r.r�)rHrwrxr�ts
zOptional.__init__TcCszy|jj|||dd�\}}WnTttfk
rp|jtk	rh|jjr^t|jg�}|j||jj<ql|jg}ng}YnX||fS)NF)rp)r.rtrr�r��_optionalNotMatchedrVr")r�r-r�ror�rwrwrxr�zs


zOptional.parseImplcCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�rr	)r�r�rUr�r.)r�rwrwrxr��s


zOptional.__str__)T)	r�r�r�r�rNr�r�r�r�rwrw)rHrxrQs"
cs,eZdZdZd	�fdd�	Zd
dd�Z�ZS)r(a�	
    Token for skipping over all undefined text until the matched expression is found.

    Parameters:
     - expr - target expression marking the end of the data to be skipped
     - include - (default=C{False}) if True, the target expression is also parsed 
          (the skipped text and target expression are returned as a 2-element list).
     - ignore - (default=C{None}) used to define grammars (typically quoted strings and 
          comments) that might contain false matches to the target expression
     - failOn - (default=C{None}) define expressions that are not allowed to be 
          included in the skipped test; if found before the target expression is found, 
          the SkipTo is not a match

    Example::
        report = '''
            Outstanding Issues Report - 1 Jan 2000

               # | Severity | Description                               |  Days Open
            -----+----------+-------------------------------------------+-----------
             101 | Critical | Intermittent system crash                 |          6
              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
              79 | Minor    | System slow when running too many reports |         47
            '''
        integer = Word(nums)
        SEP = Suppress('|')
        # use SkipTo to simply match everything up until the next SEP
        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
        # - parse action will call token.strip() for each matched token, i.e., the description body
        string_data = SkipTo(SEP, ignore=quotedString)
        string_data.setParseAction(tokenMap(str.strip))
        ticket_expr = (integer("issue_num") + SEP 
                      + string_data("sev") + SEP 
                      + string_data("desc") + SEP 
                      + integer("days_open"))
        
        for tkt in ticket_expr.searchString(report):
            print tkt.dump()
    prints::
        ['101', 'Critical', 'Intermittent system crash', '6']
        - days_open: 6
        - desc: Intermittent system crash
        - issue_num: 101
        - sev: Critical
        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
        - days_open: 14
        - desc: Spelling error on Login ('log|n')
        - issue_num: 94
        - sev: Cosmetic
        ['79', 'Minor', 'System slow when running too many reports', '47']
        - days_open: 47
        - desc: System slow when running too many reports
        - issue_num: 79
        - sev: Minor
    FNcs`tt|�j|�||_d|_d|_||_d|_t|t	�rFt
j|�|_n||_dt
|j�|_dS)NTFzNo match found for )r�r(r��
ignoreExprr[r`�includeMatchr�rzr�r$rQ�failOnr�r.ra)r�r��includer�rQ)rHrwrxr��s
zSkipTo.__init__TcCs,|}t|�}|j}|jj}|jdk	r,|jjnd}|jdk	rB|jjnd}	|}
x�|
|kr�|dk	rh|||
�rhP|	dk	r�x*y|	||
�}
Wqrtk
r�PYqrXqrWy|||
ddd�Wn tt	fk
r�|
d7}
YqLXPqLWt|||j
|��|
}|||�}t|�}|j�r$||||dd�\}}
||
7}||fS)NF)rorprr)rp)
r�r.rtrQr�rOr�rrr�rar"rP)r�r-r�ror0r�r.Z
expr_parseZself_failOn_canParseNextZself_ignoreExpr_tryParseZtmplocZskiptextZ
skipresultr�rwrwrxr��s<

zSkipTo.parseImpl)FNN)T)r�r�r�r�r�r�r�rwrw)rHrxr(�s6
csbeZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Zdd�Zgfd
d�Z	dd�Z
�fdd�Z�ZS)raK
    Forward declaration of an expression to be defined later -
    used for recursive grammars, such as algebraic infix notation.
    When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.

    Note: take care when assigning to C{Forward} not to overlook precedence of operators.
    Specifically, '|' has a lower precedence than '<<', so that::
        fwdExpr << a | b | c
    will actually be evaluated as::
        (fwdExpr << a) | b | c
    thereby leaving b and c out as parseable alternatives.  It is recommended that you
    explicitly group the values inserted into the C{Forward}::
        fwdExpr << (a | b | c)
    Converting to use the '<<=' operator instead will avoid this problem.

    See L{ParseResults.pprint} for an example of a recursive parser created using
    C{Forward}.
    Ncstt|�j|dd�dS)NF)rg)r�rr�)r�r�)rHrwrxr�szForward.__init__cCsjt|t�rtj|�}||_d|_|jj|_|jj|_|j|jj	�|jj
|_
|jj|_|jj
|jj�|S)N)rzr�r$rQr.rUr`r[r�rYrXrWr]r�)r�r�rwrwrx�
__lshift__s





zForward.__lshift__cCs||>S)Nrw)r�r�rwrwrx�__ilshift__'szForward.__ilshift__cCs
d|_|S)NF)rX)r�rwrwrxr�*szForward.leaveWhitespacecCs$|js d|_|jdk	r |jj�|S)NT)r_r.r�)r�rwrwrxr�.s


zForward.streamlinecCs>||kr0|dd�|g}|jdk	r0|jj|�|jg�dS)N)r.r�r�)r�r�r7rwrwrxr�5s

zForward.validatecCs>t|d�r|jS|jjdSd}Wd|j|_X|jjd|S)Nr�z: ...�Nonez: )r�r�rHr�Z_revertClass�_ForwardNoRecurser.r�)r�Z	retStringrwrwrxr�<s

zForward.__str__cs.|jdk	rtt|�j�St�}||K}|SdS)N)r.r�rr�)r�r�)rHrwrxr�Ms

zForward.copy)N)
r�r�r�r�r�rSrTr�r�r�r�r�r�rwrw)rHrxrs
c@seZdZdd�ZdS)rVcCsdS)Nz...rw)r�rwrwrxr�Vsz_ForwardNoRecurse.__str__N)r�r�r�r�rwrwrwrxrVUsrVcs"eZdZdZd�fdd�	Z�ZS)r-zQ
    Abstract subclass of C{ParseExpression}, for converting parsed results.
    Fcstt|�j|�d|_dS)NF)r�r-r�rW)r�r.rg)rHrwrxr�]szTokenConverter.__init__)F)r�r�r�r�r�r�rwrw)rHrxr-Yscs6eZdZdZd
�fdd�	Z�fdd�Zdd	�Z�ZS)r
a�
    Converter to concatenate all matching tokens to a single string.
    By default, the matching patterns must also be contiguous in the input string;
    this can be disabled by specifying C{'adjacent=False'} in the constructor.

    Example::
        real = Word(nums) + '.' + Word(nums)
        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
        # will also erroneously match the following
        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']

        real = Combine(Word(nums) + '.' + Word(nums))
        print(real.parseString('3.1416')) # -> ['3.1416']
        # no match when there are internal spaces
        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
    r�Tcs8tt|�j|�|r|j�||_d|_||_d|_dS)NT)r�r
r�r��adjacentrX�
joinStringre)r�r.rXrW)rHrwrxr�rszCombine.__init__cs(|jrtj||�ntt|�j|�|S)N)rWr$r�r�r
)r�r�)rHrwrxr�|szCombine.ignorecCsP|j�}|dd�=|tdj|j|j��g|jd�7}|jrH|j�rH|gS|SdS)Nr�)r�)r�r"r�r
rXrbrVr�)r�r-r�r�ZretToksrwrwrxr��s
"zCombine.postParse)r�T)r�r�r�r�r�r�r�r�rwrw)rHrxr
as
cs(eZdZdZ�fdd�Zdd�Z�ZS)ra�
    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.

    Example::
        ident = Word(alphas)
        num = Word(nums)
        term = ident | num
        func = ident + Optional(delimitedList(term))
        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']

        func = ident + Group(Optional(delimitedList(term)))
        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
    cstt|�j|�d|_dS)NT)r�rr�rW)r�r.)rHrwrxr��szGroup.__init__cCs|gS)Nrw)r�r-r�r�rwrwrxr��szGroup.postParse)r�r�r�r�r�r�r�rwrw)rHrxr�s
cs(eZdZdZ�fdd�Zdd�Z�ZS)raW
    Converter to return a repetitive expression as a list, but also as a dictionary.
    Each element can also be referenced using the first token in the expression as its key.
    Useful for tabular report scraping when the first column can be used as a item key.

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        # print attributes as plain groups
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
        print(result.dump())
        
        # access named fields as dict entries, or output as dict
        print(result['shape'])        
        print(result.asDict())
    prints::
        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']

        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
    See more examples at L{ParseResults} of accessing fields by results name.
    cstt|�j|�d|_dS)NT)r�rr�rW)r�r.)rHrwrxr��sz
Dict.__init__cCs�x�t|�D]�\}}t|�dkr q
|d}t|t�rBt|d�j�}t|�dkr^td|�||<q
t|�dkr�t|dt�r�t|d|�||<q
|j�}|d=t|�dks�t|t�r�|j	�r�t||�||<q
t|d|�||<q
W|j
r�|gS|SdS)Nrrrr�rq)r�r�rzrur�r�r�r"r�r�rV)r�r-r�r�r��tokZikeyZ	dictvaluerwrwrxr��s$
zDict.postParse)r�r�r�r�r�r�r�rwrw)rHrxr�s#c@s eZdZdZdd�Zdd�ZdS)r+aV
    Converter for ignoring the results of a parsed expression.

    Example::
        source = "a, b, c,d"
        wd = Word(alphas)
        wd_list1 = wd + ZeroOrMore(',' + wd)
        print(wd_list1.parseString(source))

        # often, delimiters that are useful during parsing are just in the
        # way afterward - use Suppress to keep them out of the parsed output
        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
        print(wd_list2.parseString(source))
    prints::
        ['a', ',', 'b', ',', 'c', ',', 'd']
        ['a', 'b', 'c', 'd']
    (See also L{delimitedList}.)
    cCsgS)Nrw)r�r-r�r�rwrwrxr��szSuppress.postParsecCs|S)Nrw)r�rwrwrxr��szSuppress.suppressN)r�r�r�r�r�r�rwrwrwrxr+�sc@s(eZdZdZdd�Zdd�Zdd�ZdS)	rzI
    Wrapper for parse actions, to ensure they are only called once.
    cCst|�|_d|_dS)NF)rM�callable�called)r�Z
methodCallrwrwrxr�s
zOnlyOnce.__init__cCs.|js|j|||�}d|_|St||d��dS)NTr�)r[rZr)r�r�r5rvr�rwrwrxr�s
zOnlyOnce.__call__cCs
d|_dS)NF)r[)r�rwrwrx�reset
szOnlyOnce.resetN)r�r�r�r�r�r�r\rwrwrwrxr�scs:t����fdd�}y�j|_Wntk
r4YnX|S)as
    Decorator for debugging parse actions. 
    
    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.

    Example::
        wd = Word(alphas)

        @traceParseAction
        def remove_duplicate_chars(tokens):
            return ''.join(sorted(set(''.join(tokens)))

        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
    prints::
        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
        <<leaving remove_duplicate_chars (ret: 'dfjkls')
        ['dfjkls']
    cs��j}|dd�\}}}t|�dkr8|djjd|}tjjd|t||�||f�y�|�}Wn8tk
r�}ztjjd||f��WYdd}~XnXtjjd||f�|S)Nror�.z">>entering %s(line: '%s', %d, %r)
z<<leaving %s (exception: %s)
z<<leaving %s (ret: %r)
r9)r�r�rHr~�stderr�writerGrK)ZpaArgsZthisFuncr�r5rvr�r3)r�rwrx�z#sztraceParseAction.<locals>.z)rMr�r�)r�r`rw)r�rxrb
s
�,FcCs`t|�dt|�dt|�d}|rBt|t||��j|�S|tt|�|�j|�SdS)a�
    Helper to define a delimited list of expressions - the delimiter defaults to ','.
    By default, the list elements and delimiters can have intervening whitespace, and
    comments, but this can be overridden by passing C{combine=True} in the constructor.
    If C{combine} is set to C{True}, the matching tokens are returned as a single token
    string, with the delimiters included; otherwise, the matching tokens are returned
    as a list of tokens, with the delimiters suppressed.

    Example::
        delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
    z [r�z]...N)r�r
r2rir+)r.Zdelim�combineZdlNamerwrwrxr@9s
$csjt����fdd�}|dkr0tt�jdd��}n|j�}|jd�|j|dd�|�jd	t��d
�S)a:
    Helper to define a counted list of expressions.
    This helper defines a pattern of the form::
        integer expr expr expr...
    where the leading integer tells how many expr expressions follow.
    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
    
    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.

    Example::
        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']

        # in this parser, the leading integer value is given in binary,
        # '10' indicating that 2 values are in the array
        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
    cs.|d}�|r tt�g|��p&tt�>gS)Nr)rrrC)r�r5rvr�)�	arrayExprr.rwrx�countFieldParseAction_s"z+countedArray.<locals>.countFieldParseActionNcSst|d�S)Nr)ru)rvrwrwrxrydszcountedArray.<locals>.<lambda>ZarrayLenT)rfz(len) z...)rr/rRr�r�rirxr�)r.ZintExprrdrw)rcr.rxr<Ls
cCs:g}x0|D](}t|t�r(|jt|��q
|j|�q
W|S)N)rzr�r�r�r�)�Lr�r�rwrwrxr�ks

r�cs6t���fdd�}|j|dd��jdt|���S)a*
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousLiteral(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
    If this is not desired, use C{matchPreviousExpr}.
    Do I{not} use with packrat parsing enabled.
    csP|rBt|�dkr�|d>qLt|j��}�tdd�|D��>n
�t�>dS)Nrrrcss|]}t|�VqdS)N)r)r��ttrwrwrxr��szDmatchPreviousLiteral.<locals>.copyTokenToRepeater.<locals>.<genexpr>)r�r�r�rr
)r�r5rvZtflat)�reprwrx�copyTokenToRepeater�sz1matchPreviousLiteral.<locals>.copyTokenToRepeaterT)rfz(prev) )rrxrir�)r.rhrw)rgrxrOts


csFt��|j�}�|K��fdd�}|j|dd��jdt|���S)aS
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousExpr(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
    the expressions are evaluated first, and then compared, so
    C{"1"} is compared with C{"10"}.
    Do I{not} use with packrat parsing enabled.
    cs*t|j����fdd�}�j|dd�dS)Ncs$t|j��}|�kr tddd��dS)Nr�r)r�r�r)r�r5rvZtheseTokens)�matchTokensrwrx�mustMatchTheseTokens�szLmatchPreviousExpr.<locals>.copyTokenToRepeater.<locals>.mustMatchTheseTokensT)rf)r�r�r�)r�r5rvrj)rg)rirxrh�sz.matchPreviousExpr.<locals>.copyTokenToRepeaterT)rfz(prev) )rr�rxrir�)r.Ze2rhrw)rgrxrN�scCs>xdD]}|j|t|�}qW|jdd�}|jdd�}t|�S)Nz\^-]rz\nr(z\t)r��_bslashr�)r�r�rwrwrxr�s

rTc
s�|rdd�}dd�}t�ndd�}dd�}t�g}t|t�rF|j�}n&t|tj�r\t|�}ntj	dt
dd�|svt�Sd	}x�|t|�d
k�r||}xnt
||d
d��D]N\}}	||	|�r�|||d
=Pq�|||	�r�|||d
=|j||	�|	}Pq�W|d
7}q|W|�r�|�r�yht|�tdj|��k�rZtd
djdd�|D���jdj|��Stdjdd�|D���jdj|��SWn&tk
�r�tj	dt
dd�YnXt�fdd�|D��jdj|��S)a�
    Helper to quickly define a set of alternative Literals, and makes sure to do
    longest-first testing when there is a conflict, regardless of the input order,
    but returns a C{L{MatchFirst}} for best performance.

    Parameters:
     - strs - a string of space-delimited literals, or a collection of string literals
     - caseless - (default=C{False}) - treat all literals as caseless
     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
          if creating a C{Regex} raises an exception)

    Example::
        comp_oper = oneOf("< = > <= >= !=")
        var = Word(alphas)
        number = Word(nums)
        term = var | number
        comparison_expr = term + comp_oper + term
        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
    prints::
        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
    cSs|j�|j�kS)N)r�)r�brwrwrxry�szoneOf.<locals>.<lambda>cSs|j�j|j��S)N)r�r�)rrlrwrwrxry�scSs||kS)Nrw)rrlrwrwrxry�scSs
|j|�S)N)r�)rrlrwrwrxry�sz6Invalid argument to oneOf, expected string or iterablerq)r�rrrNr�z[%s]css|]}t|�VqdS)N)r)r��symrwrwrxr��szoneOf.<locals>.<genexpr>z | �|css|]}tj|�VqdS)N)rdr	)r�rmrwrwrxr��sz7Exception creating Regex for oneOf, building MatchFirstc3s|]}�|�VqdS)Nrw)r�rm)�parseElementClassrwrxr��s)rrrzr�r�r�r5r�r�r�r�rr�r�r�r�r'rirKr)
Zstrsr�ZuseRegexZisequalZmasksZsymbolsr�Zcurr�r�rw)rorxrS�sL





((cCsttt||���S)a�
    Helper to easily and clearly define a dictionary by specifying the respective patterns
    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
    in the proper order.  The key pattern can include delimiting markers or punctuation,
    as long as they are suppressed, thereby leaving the significant key text.  The value
    pattern can include named results, so that the C{Dict} results can include named token
    fields.

    Example::
        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        attr_label = label
        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)

        # similar to Dict, but simpler call format
        result = dictOf(attr_label, attr_value).parseString(text)
        print(result.dump())
        print(result['shape'])
        print(result.shape)  # object attribute access works too
        print(result.asDict())
    prints::
        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        SQUARE
        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
    )rr2r)r�r�rwrwrxrA�s!cCs^t�jdd��}|j�}d|_|d�||d�}|r@dd�}ndd�}|j|�|j|_|S)	a�
    Helper to return the original, untokenized text for a given expression.  Useful to
    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
    revert separate tokens with intervening whitespace back to the original matching
    input text. By default, returns astring containing the original parsed text.  
       
    If the optional C{asString} argument is passed as C{False}, then the return value is a 
    C{L{ParseResults}} containing any results names that were originally matched, and a 
    single token containing the original matched text from the input string.  So if 
    the expression passed to C{L{originalTextFor}} contains expressions with defined
    results names, you must set C{asString} to C{False} if you want to preserve those
    results name values.

    Example::
        src = "this is test <b> bold <i>text</i> </b> normal text "
        for tag in ("b","i"):
            opener,closer = makeHTMLTags(tag)
            patt = originalTextFor(opener + SkipTo(closer) + closer)
            print(patt.searchString(src)[0])
    prints::
        ['<b> bold <i>text</i> </b>']
        ['<i>text</i>']
    cSs|S)Nrw)r�r�rvrwrwrxry8sz!originalTextFor.<locals>.<lambda>F�_original_start�
_original_endcSs||j|j�S)N)rprq)r�r5rvrwrwrxry=scSs&||jd�|jd��g|dd�<dS)Nrprq)r�)r�r5rvrwrwrx�extractText?sz$originalTextFor.<locals>.extractText)r
r�r�rer])r.ZasStringZ	locMarkerZendlocMarker�	matchExprrrrwrwrxrg s

cCst|�jdd��S)zp
    Helper to undo pyparsing's default grouping of And expressions, even
    if all but one are non-empty.
    cSs|dS)Nrrw)rvrwrwrxryJszungroup.<locals>.<lambda>)r-r�)r.rwrwrxrhEscCs4t�jdd��}t|d�|d�|j�j�d��S)a�
    Helper to decorate a returned token with its starting and ending locations in the input string.
    This helper adds the following results names:
     - locn_start = location where matched expression begins
     - locn_end = location where matched expression ends
     - value = the actual parsed results

    Be careful if the input text contains C{<TAB>} characters, you may want to call
    C{L{ParserElement.parseWithTabs}}

    Example::
        wd = Word(alphas)
        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
            print(match)
    prints::
        [[0, 'ljsdf', 5]]
        [[8, 'lksdjjf', 15]]
        [[18, 'lkkjj', 23]]
    cSs|S)Nrw)r�r5rvrwrwrxry`szlocatedExpr.<locals>.<lambda>Z
locn_startr�Zlocn_end)r
r�rr�r�)r.ZlocatorrwrwrxrjLsz\[]-*.$+^?()~ )r
cCs|ddS)Nrrrrw)r�r5rvrwrwrxryksryz\\0?[xX][0-9a-fA-F]+cCstt|djd�d��S)Nrz\0x�)�unichrru�lstrip)r�r5rvrwrwrxrylsz	\\0[0-7]+cCstt|ddd�d��S)Nrrr�)ruru)r�r5rvrwrwrxrymsz\])r�r
z\wr8rr�Znegate�bodyr	csBdd��y dj�fdd�tj|�jD��Stk
r<dSXdS)a�
    Helper to easily define string ranges for use in Word construction.  Borrows
    syntax from regexp '[]' string range definitions::
        srange("[0-9]")   -> "0123456789"
        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
    The input string must be enclosed in []'s, and the returned string is the expanded
    character set joined into a single string.
    The values enclosed in the []'s may be:
     - a single character
     - an escaped character with a leading backslash (such as C{\-} or C{\]})
     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
         (C{\0x##} is also supported for backwards compatibility) 
     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
    cSs<t|t�s|Sdjdd�tt|d�t|d�d�D��S)Nr�css|]}t|�VqdS)N)ru)r�r�rwrwrxr��sz+srange.<locals>.<lambda>.<locals>.<genexpr>rrr)rzr"r�r��ord)�prwrwrxry�szsrange.<locals>.<lambda>r�c3s|]}�|�VqdS)Nrw)r��part)�	_expandedrwrxr��szsrange.<locals>.<genexpr>N)r��_reBracketExprr�rxrK)r�rw)r|rxr_rs
 cs�fdd�}|S)zt
    Helper method for defining parse actions that require matching at a specific
    column in the input text.
    cs"t||��krt||d���dS)Nzmatched token not at column %d)r9r)r)Zlocnr1)r�rwrx�	verifyCol�sz!matchOnlyAtCol.<locals>.verifyColrw)r�r~rw)r�rxrM�scs�fdd�S)a�
    Helper method for common parse actions that simply return a literal value.  Especially
    useful when used with C{L{transformString<ParserElement.transformString>}()}.

    Example::
        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
        term = na | num
        
        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
    cs�gS)Nrw)r�r5rv)�replStrrwrxry�szreplaceWith.<locals>.<lambda>rw)rrw)rrxr\�scCs|ddd�S)a
    Helper parse action for removing quotation marks from parsed quoted strings.

    Example::
        # by default, quotation marks are included in parsed results
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]

        # use removeQuotes to strip quotation marks from parsed results
        quotedString.setParseAction(removeQuotes)
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
    rrrrsrw)r�r5rvrwrwrxrZ�scsN��fdd�}yt�dt�d�j�}Wntk
rBt��}YnX||_|S)aG
    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
    args are passed, they are forwarded to the given function as additional arguments after
    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
    parsed data to an integer using base 16.

    Example (compare the last to example in L{ParserElement.transformString}::
        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
        hex_ints.runTests('''
            00 11 22 aa FF 0a 0d 1a
            ''')
        
        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
        OneOrMore(upperword).runTests('''
            my kingdom for a horse
            ''')

        wd = Word(alphas).setParseAction(tokenMap(str.title))
        OneOrMore(wd).setParseAction(' '.join).runTests('''
            now is the winter of our discontent made glorious summer by this sun of york
            ''')
    prints::
        00 11 22 aa FF 0a 0d 1a
        [0, 17, 34, 170, 255, 10, 13, 26]

        my kingdom for a horse
        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']

        now is the winter of our discontent made glorious summer by this sun of york
        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
    cs��fdd�|D�S)Ncsg|]}�|f����qSrwrw)r�Ztokn)r�r6rwrxr��sz(tokenMap.<locals>.pa.<locals>.<listcomp>rw)r�r5rv)r�r6rwrxr}�sztokenMap.<locals>.par�rH)rJr�rKr{)r6r�r}rLrw)r�r6rxrm�s cCst|�j�S)N)r�r�)rvrwrwrxry�scCst|�j�S)N)r��lower)rvrwrwrxry�scCs�t|t�r|}t||d�}n|j}tttd�}|r�tj�j	t
�}td�|d�tt
t|td�|���tddgd�jd	�j	d
d��td�}n�d
jdd�tD��}tj�j	t
�t|�B}td�|d�tt
t|j	t�ttd�|����tddgd�jd	�j	dd��td�}ttd�|d�}|jdd
j|jdd�j�j���jd|�}|jdd
j|jdd�j�j���jd|�}||_||_||fS)zRInternal helper to construct opening and closing tag expressions, given a tag name)r�z_-:r�tag�=�/F)r�rCcSs|ddkS)Nrr�rw)r�r5rvrwrwrxry�sz_makeTags.<locals>.<lambda>rr�css|]}|dkr|VqdS)rNrw)r�r�rwrwrxr��sz_makeTags.<locals>.<genexpr>cSs|ddkS)Nrr�rw)r�r5rvrwrwrxry�sz</r��:r�z<%s>rz</%s>)rzr�rr�r/r4r3r>r�r�rZr+rr2rrrmr�rVrYrBr
�_Lr��titler�rir�)�tagStrZxmlZresnameZtagAttrNameZtagAttrValueZopenTagZprintablesLessRAbrackZcloseTagrwrwrx�	_makeTags�s"
T\..r�cCs
t|d�S)a 
    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.

    Example::
        text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
        a,a_end = makeHTMLTags("A")
        link_expr = a + SkipTo(a_end)("link_text") + a_end
        
        for link in link_expr.searchString(text):
            # attributes in the <A> tag (like "href" shown here) are also accessible as named results
            print(link.link_text, '->', link.href)
    prints::
        pyparsing -> http://pyparsing.wikispaces.com
    F)r�)r�rwrwrxrK�scCs
t|d�S)z�
    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
    tags only in the given upper/lower case.

    Example: similar to L{makeHTMLTags}
    T)r�)r�rwrwrxrLscs8|r|dd��n|j��dd��D���fdd�}|S)a<
    Helper to create a validating parse action to be used with start tags created
    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
    with a required attribute value, to avoid false matches on common tags such as
    C{<TD>} or C{<DIV>}.

    Call C{withAttribute} with a series of attribute names and values. Specify the list
    of filter attributes names and values as:
     - keyword arguments, as in C{(align="right")}, or
     - as an explicit dict with C{**} operator, when an attribute name is also a Python
          reserved word, as in C{**{"class":"Customer", "align":"right"}}
     - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
    For attribute names with a namespace prefix, you must use the second form.  Attribute
    names are matched insensitive to upper/lower case.
       
    If just testing for C{class} (with or without a namespace), use C{L{withClass}}.

    To verify that the attribute exists, but without specifying a value, pass
    C{withAttribute.ANY_VALUE} as the value.

    Example::
        html = '''
            <div>
            Some text
            <div type="grid">1 4 0 1 0</div>
            <div type="graph">1,3 2,3 1,1</div>
            <div>this has no type</div>
            </div>
                
        '''
        div,div_end = makeHTMLTags("div")

        # only match div tag having a type attribute with value "grid"
        div_grid = div().setParseAction(withAttribute(type="grid"))
        grid_expr = div_grid + SkipTo(div | div_end)("body")
        for grid_header in grid_expr.searchString(html):
            print(grid_header.body)
        
        # construct a match with any div tag having a type attribute, regardless of the value
        div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
        div_expr = div_any_type + SkipTo(div | div_end)("body")
        for div_header in div_expr.searchString(html):
            print(div_header.body)
    prints::
        1 4 0 1 0

        1 4 0 1 0
        1,3 2,3 1,1
    NcSsg|]\}}||f�qSrwrw)r�r�r�rwrwrxr�Qsz!withAttribute.<locals>.<listcomp>cs^xX�D]P\}}||kr&t||d|��|tjkr|||krt||d||||f��qWdS)Nzno matching attribute z+attribute '%s' has value '%s', must be '%s')rre�	ANY_VALUE)r�r5r�ZattrNameZ	attrValue)�attrsrwrxr}RszwithAttribute.<locals>.pa)r�)r�ZattrDictr}rw)r�rxres2cCs|rd|nd}tf||i�S)a�
    Simplified version of C{L{withAttribute}} when matching on a div class - made
    difficult because C{class} is a reserved word in Python.

    Example::
        html = '''
            <div>
            Some text
            <div class="grid">1 4 0 1 0</div>
            <div class="graph">1,3 2,3 1,1</div>
            <div>this &lt;div&gt; has no class</div>
            </div>
                
        '''
        div,div_end = makeHTMLTags("div")
        div_grid = div().setParseAction(withClass("grid"))
        
        grid_expr = div_grid + SkipTo(div | div_end)("body")
        for grid_header in grid_expr.searchString(html):
            print(grid_header.body)
        
        div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
        div_expr = div_any_type + SkipTo(div | div_end)("body")
        for div_header in div_expr.searchString(html):
            print(div_header.body)
    prints::
        1 4 0 1 0

        1 4 0 1 0
        1,3 2,3 1,1
    z%s:class�class)re)Z	classname�	namespaceZ	classattrrwrwrxrk\s �(rcCs�t�}||||B}�x`t|�D�]R\}}|ddd�\}}	}
}|	dkrTd|nd|}|	dkr�|dksxt|�dkr�td��|\}
}t�j|�}|
tjk�rd|	dkr�t||�t|t	|��}n�|	dk�r|dk	�rt|||�t|t	||��}nt||�t|t	|��}nD|	dk�rZt||
|||�t||
|||�}ntd	��n�|
tj
k�rH|	dk�r�t|t��s�t|�}t|j
|�t||�}n�|	dk�r|dk	�r�t|||�t|t	||��}nt||�t|t	|��}nD|	dk�r>t||
|||�t||
|||�}ntd	��ntd
��|�r`|j|�||j|�|BK}|}q"W||K}|S)a�	
    Helper method for constructing grammars of expressions made up of
    operators working in a precedence hierarchy.  Operators may be unary or
    binary, left- or right-associative.  Parse actions can also be attached
    to operator expressions. The generated parser will also recognize the use 
    of parentheses to override operator precedences (see example below).
    
    Note: if you define a deep operator list, you may see performance issues
    when using infixNotation. See L{ParserElement.enablePackrat} for a
    mechanism to potentially improve your parser performance.

    Parameters:
     - baseExpr - expression representing the most basic element for the nested
     - opList - list of tuples, one for each operator precedence level in the
      expression grammar; each tuple is of the form
      (opExpr, numTerms, rightLeftAssoc, parseAction), where:
       - opExpr is the pyparsing expression for the operator;
          may also be a string, which will be converted to a Literal;
          if numTerms is 3, opExpr is a tuple of two expressions, for the
          two operators separating the 3 terms
       - numTerms is the number of terms for this operator (must
          be 1, 2, or 3)
       - rightLeftAssoc is the indicator whether the operator is
          right or left associative, using the pyparsing-defined
          constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
       - parseAction is the parse action to be associated with
          expressions matching this operator expression (the
          parse action tuple member may be omitted)
     - lpar - expression for matching left-parentheses (default=C{Suppress('(')})
     - rpar - expression for matching right-parentheses (default=C{Suppress(')')})

    Example::
        # simple example of four-function arithmetic with ints and variable names
        integer = pyparsing_common.signed_integer
        varname = pyparsing_common.identifier 
        
        arith_expr = infixNotation(integer | varname,
            [
            ('-', 1, opAssoc.RIGHT),
            (oneOf('* /'), 2, opAssoc.LEFT),
            (oneOf('+ -'), 2, opAssoc.LEFT),
            ])
        
        arith_expr.runTests('''
            5+3*6
            (5+3)*6
            -2--11
            ''', fullDump=False)
    prints::
        5+3*6
        [[5, '+', [3, '*', 6]]]

        (5+3)*6
        [[[5, '+', 3], '*', 6]]

        -2--11
        [[['-', 2], '-', ['-', 11]]]
    Nrroz%s termz	%s%s termrqz@if numterms=3, opExpr must be a tuple or list of two expressionsrrz6operator must be unary (1), binary (2), or ternary (3)z2operator must indicate right or left associativity)N)rr�r�r�rirT�LEFTrrr�RIGHTrzrr.r�)ZbaseExprZopListZlparZrparr�ZlastExprr�ZoperDefZopExprZarityZrightLeftAssocr}ZtermNameZopExpr1ZopExpr2ZthisExprrsrwrwrxri�sR;

&




&


z4"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*�"z string enclosed in double quotesz4'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*�'z string enclosed in single quotesz*quotedString using single or double quotes�uzunicode string literalcCs�||krtd��|dk�r(t|t�o,t|t��r t|�dkr�t|�dkr�|dk	r�tt|t||tjdd���j	dd��}n$t
j�t||tj�j	dd��}nx|dk	r�tt|t|�t|�ttjdd���j	dd��}n4ttt|�t|�ttjdd���j	d	d��}ntd
��t
�}|dk	�rb|tt|�t||B|B�t|��K}n$|tt|�t||B�t|��K}|jd||f�|S)a~	
    Helper method for defining nested lists enclosed in opening and closing
    delimiters ("(" and ")" are the default).

    Parameters:
     - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression
     - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression
     - content - expression for items within the nested lists (default=C{None})
     - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})

    If an expression is not provided for the content argument, the nested
    expression will capture all whitespace-delimited content between delimiters
    as a list of separate values.

    Use the C{ignoreExpr} argument to define expressions that may contain
    opening or closing characters that should not be treated as opening
    or closing characters for nesting, such as quotedString or a comment
    expression.  Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
    The default is L{quotedString}, but if no expressions are to be ignored,
    then pass C{None} for this argument.

    Example::
        data_type = oneOf("void int short long char float double")
        decl_data_type = Combine(data_type + Optional(Word('*')))
        ident = Word(alphas+'_', alphanums+'_')
        number = pyparsing_common.number
        arg = Group(decl_data_type + ident)
        LPAR,RPAR = map(Suppress, "()")

        code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))

        c_function = (decl_data_type("type") 
                      + ident("name")
                      + LPAR + Optional(delimitedList(arg), [])("args") + RPAR 
                      + code_body("body"))
        c_function.ignore(cStyleComment)
        
        source_code = '''
            int is_odd(int x) { 
                return (x%2); 
            }
                
            int dec_to_hex(char hchar) { 
                if (hchar >= '0' && hchar <= '9') { 
                    return (ord(hchar)-ord('0')); 
                } else { 
                    return (10+ord(hchar)-ord('A'));
                } 
            }
        '''
        for func in c_function.searchString(source_code):
            print("%(name)s (%(type)s) args: %(args)s" % func)

    prints::
        is_odd (int) args: [['int', 'x']]
        dec_to_hex (int) args: [['char', 'hchar']]
    z.opening and closing strings cannot be the sameNrr)r
cSs|dj�S)Nr)r�)rvrwrwrxry9sznestedExpr.<locals>.<lambda>cSs|dj�S)Nr)r�)rvrwrwrxry<scSs|dj�S)Nr)r�)rvrwrwrxryBscSs|dj�S)Nr)r�)rvrwrwrxryFszOopening and closing arguments must be strings if no content expression is givenznested %s%s expression)r�rzr�r�r
rr	r$rNr�rCr�rrrr+r2ri)�openerZcloserZcontentrOr�rwrwrxrP�s4:

*$cs��fdd�}�fdd�}�fdd�}tt�jd�j��}t�t�j|�jd�}t�j|�jd	�}t�j|�jd
�}	|r�tt|�|t|t|�t|��|	�}
n$tt|�t|t|�t|���}
|j	t
t��|
jd�S)a
	
    Helper method for defining space-delimited indentation blocks, such as
    those used to define block statements in Python source code.

    Parameters:
     - blockStatementExpr - expression defining syntax of statement that
            is repeated within the indented block
     - indentStack - list created by caller to manage indentation stack
            (multiple statementWithIndentedBlock expressions within a single grammar
            should share a common indentStack)
     - indent - boolean indicating whether block must be indented beyond the
            the current level; set to False for block of left-most statements
            (default=C{True})

    A valid block must contain at least one C{blockStatement}.

    Example::
        data = '''
        def A(z):
          A1
          B = 100
          G = A2
          A2
          A3
        B
        def BB(a,b,c):
          BB1
          def BBA():
            bba1
            bba2
            bba3
        C
        D
        def spam(x,y):
             def eggs(z):
                 pass
        '''


        indentStack = [1]
        stmt = Forward()

        identifier = Word(alphas, alphanums)
        funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")
        func_body = indentedBlock(stmt, indentStack)
        funcDef = Group( funcDecl + func_body )

        rvalue = Forward()
        funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
        rvalue << (funcCall | identifier | Word(nums))
        assignment = Group(identifier + "=" + rvalue)
        stmt << ( funcDef | assignment | identifier )

        module_body = OneOrMore(stmt)

        parseTree = module_body.parseString(data)
        parseTree.pprint()
    prints::
        [['def',
          'A',
          ['(', 'z', ')'],
          ':',
          [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
         'B',
         ['def',
          'BB',
          ['(', 'a', 'b', 'c', ')'],
          ':',
          [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
         'C',
         'D',
         ['def',
          'spam',
          ['(', 'x', 'y', ')'],
          ':',
          [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] 
    csN|t|�krdSt||�}|�dkrJ|�dkr>t||d��t||d��dS)Nrrzillegal nestingznot a peer entryrsrs)r�r9r!r)r�r5rv�curCol)�indentStackrwrx�checkPeerIndent�s
z&indentedBlock.<locals>.checkPeerIndentcs2t||�}|�dkr"�j|�nt||d��dS)Nrrznot a subentryrs)r9r�r)r�r5rvr�)r�rwrx�checkSubIndent�s
z%indentedBlock.<locals>.checkSubIndentcsN|t|�krdSt||�}�o4|�dko4|�dksBt||d���j�dS)Nrrrqznot an unindentrsr:)r�r9rr�)r�r5rvr�)r�rwrx�
checkUnindent�s
z$indentedBlock.<locals>.checkUnindentz	 �INDENTr�ZUNINDENTzindented block)rrr�r�r
r�rirrr�rk)ZblockStatementExprr�rr�r�r�r!r�ZPEERZUNDENTZsmExprrw)r�rxrfQsN,z#[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]z[\0xa1-\0xbf\0xd7\0xf7]z_:zany tagzgt lt amp nbsp quot aposz><& "'z&(?P<entity>rnz);zcommon HTML entitycCstj|j�S)zRHelper parser action to replace common HTML entities with their special characters)�_htmlEntityMapr�Zentity)rvrwrwrxr[�sz/\*(?:[^*]|\*(?!/))*z*/zC style commentz<!--[\s\S]*?-->zHTML commentz.*zrest of linez//(?:\\\n|[^\n])*z
// commentzC++ style commentz#.*zPython style comment)r�z 	�	commaItem)r�c@s�eZdZdZee�Zee�Ze	e
�jd�je�Z
e	e�jd�jeed��Zed�jd�je�Ze�je�de�je�jd�Zejd	d
��eeeed�j�e�Bjd�Zeje�ed
�jd�je�Zed�jd�je�ZeeBeBj�Zed�jd�je�Ze	eded�jd�Zed�jd�Z ed�jd�Z!e!de!djd�Z"ee!de!d>�dee!de!d?�jd�Z#e#j$d d
��d!e jd"�Z%e&e"e%Be#Bjd#��jd#�Z'ed$�jd%�Z(e)d@d'd(��Z*e)dAd*d+��Z+ed,�jd-�Z,ed.�jd/�Z-ed0�jd1�Z.e/j�e0j�BZ1e)d2d3��Z2e&e3e4d4�e5�e	e6d4d5�ee7d6����j�jd7�Z8e9ee:j;�e8Bd8d9��jd:�Z<e)ed;d
���Z=e)ed<d
���Z>d=S)Brna�

    Here are some common low-level expressions that may be useful in jump-starting parser development:
     - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})
     - common L{programming identifiers<identifier>}
     - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})
     - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}
     - L{UUID<uuid>}
     - L{comma-separated list<comma_separated_list>}
    Parse actions:
     - C{L{convertToInteger}}
     - C{L{convertToFloat}}
     - C{L{convertToDate}}
     - C{L{convertToDatetime}}
     - C{L{stripHTMLTags}}
     - C{L{upcaseTokens}}
     - C{L{downcaseTokens}}

    Example::
        pyparsing_common.number.runTests('''
            # any int or real number, returned as the appropriate type
            100
            -100
            +100
            3.14159
            6.02e23
            1e-12
            ''')

        pyparsing_common.fnumber.runTests('''
            # any int or real number, returned as float
            100
            -100
            +100
            3.14159
            6.02e23
            1e-12
            ''')

        pyparsing_common.hex_integer.runTests('''
            # hex numbers
            100
            FF
            ''')

        pyparsing_common.fraction.runTests('''
            # fractions
            1/2
            -3/4
            ''')

        pyparsing_common.mixed_integer.runTests('''
            # mixed fractions
            1
            1/2
            -3/4
            1-3/4
            ''')

        import uuid
        pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
        pyparsing_common.uuid.runTests('''
            # uuid
            12345678-1234-5678-1234-567812345678
            ''')
    prints::
        # any int or real number, returned as the appropriate type
        100
        [100]

        -100
        [-100]

        +100
        [100]

        3.14159
        [3.14159]

        6.02e23
        [6.02e+23]

        1e-12
        [1e-12]

        # any int or real number, returned as float
        100
        [100.0]

        -100
        [-100.0]

        +100
        [100.0]

        3.14159
        [3.14159]

        6.02e23
        [6.02e+23]

        1e-12
        [1e-12]

        # hex numbers
        100
        [256]

        FF
        [255]

        # fractions
        1/2
        [0.5]

        -3/4
        [-0.75]

        # mixed fractions
        1
        [1]

        1/2
        [0.5]

        -3/4
        [-0.75]

        1-3/4
        [1.75]

        # uuid
        12345678-1234-5678-1234-567812345678
        [UUID('12345678-1234-5678-1234-567812345678')]
    �integerzhex integerrtz[+-]?\d+zsigned integerr��fractioncCs|d|dS)Nrrrrsrw)rvrwrwrxry�szpyparsing_common.<lambda>r8z"fraction or mixed integer-fractionz
[+-]?\d+\.\d*zreal numberz+[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)z$real number with scientific notationz[+-]?\d+\.?\d*([eE][+-]?\d+)?�fnumberrB�
identifierzK(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}zIPv4 addressz[0-9a-fA-F]{1,4}�hex_integerr��zfull IPv6 addressrrBz::zshort IPv6 addresscCstdd�|D��dkS)Ncss|]}tjj|�rdVqdS)rrN)rn�
_ipv6_partr�)r�rfrwrwrxr��sz,pyparsing_common.<lambda>.<locals>.<genexpr>rw)rH)rvrwrwrxry�sz::ffff:zmixed IPv6 addresszIPv6 addressz:[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}zMAC address�%Y-%m-%dcs�fdd�}|S)a�
        Helper to create a parse action for converting parsed date string to Python datetime.date

        Params -
         - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})

        Example::
            date_expr = pyparsing_common.iso8601_date.copy()
            date_expr.setParseAction(pyparsing_common.convertToDate())
            print(date_expr.parseString("1999-12-31"))
        prints::
            [datetime.date(1999, 12, 31)]
        csLytj|d��j�Stk
rF}zt||t|���WYdd}~XnXdS)Nr)r�strptimeZdater�rr{)r�r5rv�ve)�fmtrwrx�cvt_fn�sz.pyparsing_common.convertToDate.<locals>.cvt_fnrw)r�r�rw)r�rx�
convertToDate�szpyparsing_common.convertToDate�%Y-%m-%dT%H:%M:%S.%fcs�fdd�}|S)a
        Helper to create a parse action for converting parsed datetime string to Python datetime.datetime

        Params -
         - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})

        Example::
            dt_expr = pyparsing_common.iso8601_datetime.copy()
            dt_expr.setParseAction(pyparsing_common.convertToDatetime())
            print(dt_expr.parseString("1999-12-31T23:59:59.999"))
        prints::
            [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
        csHytj|d��Stk
rB}zt||t|���WYdd}~XnXdS)Nr)rr�r�rr{)r�r5rvr�)r�rwrxr��sz2pyparsing_common.convertToDatetime.<locals>.cvt_fnrw)r�r�rw)r�rx�convertToDatetime�sz"pyparsing_common.convertToDatetimez7(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?zISO8601 datez�(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?zISO8601 datetimez2[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}�UUIDcCstjj|d�S)a
        Parse action to remove HTML tags from web page HTML source

        Example::
            # strip HTML links from normal text 
            text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
            td,td_end = makeHTMLTags("TD")
            table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
            
            print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'
        r)rn�_html_stripperr�)r�r5r�rwrwrx�
stripHTMLTags�s
zpyparsing_common.stripHTMLTagsra)r�z 	r�r�)r�zcomma separated listcCst|�j�S)N)r�r�)rvrwrwrxry�scCst|�j�S)N)r�r�)rvrwrwrxry�sN)rrB)rrB)r�)r�)?r�r�r�r�rmruZconvertToInteger�floatZconvertToFloatr/rRrir�r�rDr�r'Zsigned_integerr�rxrr�Z
mixed_integerrH�realZsci_realr��numberr�r4r3r�Zipv4_addressr�Z_full_ipv6_addressZ_short_ipv6_addressr~Z_mixed_ipv6_addressr
Zipv6_addressZmac_addressr�r�r�Ziso8601_dateZiso8601_datetime�uuidr7r6r�r�rrrrVr.�
_commasepitemr@rYr�Zcomma_separated_listrdrBrwrwrwrxrn�sN""
28�__main__Zselect�fromz_$r])rb�columnsrjZtablesZcommandaK
        # '*' as column list and dotted table name
        select * from SYS.XYZZY

        # caseless match on "SELECT", and casts back to "select"
        SELECT * from XYZZY, ABC

        # list of column names, and mixed case SELECT keyword
        Select AA,BB,CC from Sys.dual

        # multiple tables
        Select A, B, C from Sys.dual, Table2

        # invalid SELECT keyword - should fail
        Xelect A, B, C from Sys.dual

        # incomplete command - should fail
        Select

        # invalid column name - should fail
        Select ^^^ frox Sys.dual

        z]
        100
        -100
        +100
        3.14159
        6.02e23
        1e-12
        z 
        100
        FF
        z6
        12345678-1234-5678-1234-567812345678
        )rq)raF)N)FT)T)r�)T)�r��__version__Z__versionTime__�
__author__r��weakrefrr�r�r~r�rdrr�r"r<r�r�_threadr�ImportErrorZ	threadingrr�Zordereddict�__all__r��version_infor;r�maxsizer�r{r��chrrur�rHr�r�reversedr�r�rr6rrrIZmaxintZxranger�Z__builtin__r�Zfnamer�rJr�r�r�r�r�r�Zascii_uppercaseZascii_lowercaser4rRrDr3rkr�Z	printablerVrKrrr!r#r&r�r"�MutableMapping�registerr9rJrGr/r2r4rQrMr$r,r
rrr�rQrrrrlr/r'r%r	r.r0rrrr*r)r1r0r rrrrrrrrJrr2rMrNrr(rrVr-r
rrr+rrbr@r<r�rOrNrrSrArgrhrjrirCrIrHrar`r�Z_escapedPuncZ_escapedHexCharZ_escapedOctChar�UNICODEZ_singleCharZ
_charRangermr}r_rMr\rZrmrdrBr�rKrLrer�rkrTr�r�rirUr>r^rYrcrPrfr5rWr7r6r�r�r�r�r;r[r8rEr�r]r?r=rFrXr�r�r:rnr�ZselectTokenZ	fromTokenZidentZ
columnNameZcolumnNameListZ
columnSpecZ	tableNameZ
tableNameListZ	simpleSQLr�r�r�r�r�r�rwrwrwrx�<module>=s�









8


@d&A= I
G3pLOD|M &#@sQ,A,	I#%&0
,	?#kZr

 (
 0 


"_vendor/__pycache__/retrying.cpython-36.opt-1.pyc000064400000017525151733136260015661 0ustar003

�Pf�&�@slddlZddlmZddlZddlZddlZdZdd�ZGdd�de�Z	Gdd	�d	e�Z
Gd
d�de�ZdS)�N)�sixi���?csBt��dkr,t�d�r,dd�}|�d�S��fdd�}|SdS)z�
    Decorator function that instantiates the Retrying object
    @param *dargs: positional arguments passed to Retrying object
    @param **dkw: keyword arguments passed to the Retrying object
    �rcstj���fdd��}|S)Ncst�j�f|�|�S)N)�Retrying�call)�args�kw)�f��/usr/lib/python3.6/retrying.py�	wrapped_f$sz-retry.<locals>.wrap_simple.<locals>.wrapped_f)r�wraps)rrr	)rr
�wrap_simple"szretry.<locals>.wrap_simplecstj�����fdd��}|S)Ncst���j�f|�|�S)N)rr)rr)�dargs�dkwrr	r
r/sz&retry.<locals>.wrap.<locals>.wrapped_f)rr)rr)rr)rr
�wrap-szretry.<locals>.wrapN)�len�callable)rrr
rr	)rrr
�retrys
rc@sneZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�ZdS)rNFcs|dkrdn||_|dkrdn||_|dkr0dn||_|dkrBdn||_|dkrTdn||_|dkrfdn||_|	dkrxdn|	|_|
dkr�dn|
|_|dkr�tn||_	|dkr�dn||_
g�|dk	r̈j|j�|dk	r�j|j
�|dk	r�||_n&|dk�r
�fdd�|_nt||�|_dd�g�|dk	�r6�j|j�|dk	�sJ|dk	�rV�j|j�|dk	�sj|	dk	�rv�j|j�|
dk	�s�|dk	�r��j|j�|dk	�r�||_n&|dk�r‡fd	d�|_nt||�|_|dk�r�|j|_n||_|
dk�r�|j|_n|
|_||_dS)
N��di�rrcst��fdd��D��S)Nc3s|]}|���VqdS)Nr	)�.0r)�attempts�delayr	r
�	<genexpr>asz6Retrying.__init__.<locals>.<lambda>.<locals>.<genexpr>)�any)rr)�
stop_funcs)rrr
�<lambda>asz#Retrying.__init__.<locals>.<lambda>c_sdS)Nrr	)r�kwargsr	r	r
rhscst��fdd��D��S)Nc3s|]}|���VqdS)Nr	)rr)rrr	r
rysz6Retrying.__init__.<locals>.<lambda>.<locals>.<genexpr>)�max)rr)�
wait_funcs)rrr
rys)�_stop_max_attempt_number�_stop_max_delay�_wait_fixed�_wait_random_min�_wait_random_max�_wait_incrementing_start�_wait_incrementing_increment�_wait_exponential_multiplier�MAX_WAIT�_wait_exponential_max�_wait_jitter_max�append�stop_after_attempt�stop_after_delay�stop�getattr�fixed_sleep�random_sleep�incrementing_sleep�exponential_sleep�wait�
always_reject�_retry_on_exception�never_reject�_retry_on_result�_wrap_exception)�selfr.r4Zstop_max_attempt_numberZstop_max_delayZ
wait_fixedZwait_random_minZwait_random_maxZwait_incrementing_startZwait_incrementing_incrementZwait_exponential_multiplierZwait_exponential_maxZretry_on_exceptionZretry_on_result�wrap_exceptionZ	stop_funcZ	wait_funcZwait_jitter_maxr	)rrr
�__init__:sR








zRetrying.__init__cCs
||jkS)z;Stop after the previous attempt >= stop_max_attempt_number.)r )r:�previous_attempt_number�delay_since_first_attempt_msr	r	r
r,�szRetrying.stop_after_attemptcCs
||jkS)z=Stop after the time from the first attempt >= stop_max_delay.)r!)r:r=r>r	r	r
r-�szRetrying.stop_after_delaycCsdS)z#Don't sleep at all before retrying.rr	)r:r=r>r	r	r
�no_sleep�szRetrying.no_sleepcCs|jS)z0Sleep a fixed amount of time between each retry.)r")r:r=r>r	r	r
r0�szRetrying.fixed_sleepcCstj|j|j�S)zISleep a random amount of time between wait_random_min and wait_random_max)�randomZrandintr#r$)r:r=r>r	r	r
r1�szRetrying.random_sleepcCs$|j|j|d}|dkr d}|S)z�
        Sleep an incremental amount of time after each attempt, starting at
        wait_incrementing_start and incrementing by wait_incrementing_increment
        rr)r%r&)r:r=r>�resultr	r	r
r2�szRetrying.incrementing_sleepcCs2d|}|j|}||jkr"|j}|dkr.d}|S)N�r)r'r))r:r=r>ZexprAr	r	r
r3�s

zRetrying.exponential_sleepcCsdS)NFr	)r:rAr	r	r
r7�szRetrying.never_rejectcCsdS)NTr	)r:rAr	r	r
r5�szRetrying.always_rejectcCs4d}|jr ||j|jd�O}n||j|j�O}|S)NFr)�
has_exceptionr6�valuer8)r:�attemptZrejectr	r	r
�
should_reject�s
zRetrying.should_rejectc
Os�tttj�d��}d}x�yt|||�|d�}Wn tj�}t||d�}YnX|j|�sh|j|j�Stttj�d��|}|j	||�r�|jr�|j
r�|j��q�t|��n<|j||�}	|j
r�tj�|j
}
|	td|
�}	tj|	d�|d7}qWdS)Ni�rFTrg@�@)�int�round�time�Attempt�sys�exc_inforF�getr9r.rC�
RetryErrorr4r*r@r�sleep)r:�fnrrZ
start_time�attempt_numberrE�tbr>rOZjitterr	r	r
r�s*


z
Retrying.call)NNNNNNNNNNNNNFNNN)�__name__�
__module__�__qualname__r<r,r-r?r0r1r2r3r7r5rFrr	r	r	r
r8s0
F
		rc@s*eZdZdZdd�Zd
dd�Zdd�Zd	S)rJz�
    An Attempt encapsulates a call to a target function that may end as a
    normal return value from the function or an Exception depending on what
    occurred during the execution.
    cCs||_||_||_dS)N)rDrQrC)r:rDrQrCr	r	r
r<�szAttempt.__init__FcCs@|jr6|rt|��q<tj|jd|jd|jd�n|jSdS)z�
        Return the return value of this Attempt instance or raise an Exception.
        If wrap_exception is true, this Attempt is wrapped inside of a
        RetryError before being raised.
        rrrBN)rCrNrZreraiserD)r:r;r	r	r
rM�s

"zAttempt.getcCs:|jr&dj|jdjtj|jd���Sdj|j|j�SdS)NzAttempts: {0}, Error:
{1}�rBzAttempts: {0}, Value: {1})rC�formatrQ�join�	traceback�	format_tbrD)r:r	r	r
�__repr__�s zAttempt.__repr__N)F)rSrTrU�__doc__r<rMr[r	r	r	r
rJ�s
rJc@s eZdZdZdd�Zdd�ZdS)rNzU
    A RetryError encapsulates the last Attempt instance right before giving up.
    cCs
||_dS)N)�last_attempt)r:r]r	r	r
r<szRetryError.__init__cCsdj|j�S)NzRetryError[{0}])rWr])r:r	r	r
�__str__
szRetryError.__str__N)rSrTrUr\r<r^r	r	r	r
rNsrN)
r@Zpip._vendorrrKrIrYr(r�objectrrJ�	ExceptionrNr	r	r	r
�<module>s*!_vendor/__pycache__/distro.cpython-36.pyc000064400000077377151733136260014376 0ustar003

�Pf͕�@sbdZddlZddlZddlZddlZddlZddlZddlZejj	d�sXe
djej���dZdZ
iZddd	�Zd
diZejd�Zejd�Zd
dde
dfZd:dd�Zdd�Zd;dd�Zd<dd�Zd=dd�Zd>dd�Zd?dd �Zd@d!d"�Zd#d$�Zd%d&�ZdAd'd(�Zd)d*�Z d+d,�Z!d-d.�Z"d/d0�Z#d1d2�Z$d3d4�Z%Gd5d6�d6e&�Z'e'�Z(d7d8�Z)e*d9k�r^e)�dS)Ba,
The ``distro`` package (``distro`` stands for Linux Distribution) provides
information about the Linux distribution it runs on, such as a reliable
machine-readable distro ID, or version information.

It is a renewed alternative implementation for Python's original
:py:func:`platform.linux_distribution` function, but it provides much more
functionality. An alternative implementation became necessary because Python
3.5 deprecated this function, and Python 3.7 is expected to remove it
altogether. Its predecessor function :py:func:`platform.dist` was already
deprecated since Python 2.6 and is also expected to be removed in Python 3.7.
Still, there are many cases in which access to Linux distribution information
is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
more information.
�N�linuxzUnsupported platform: {0}z/etcz
os-releaseZoracleZrhel)ZenterpriseenterpriseZredhatenterpriseworkstationZredhatzA(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)z(\w+)[-_](release|version)$Zdebian_versionzlsb-releasezoem-releasezsystem-releaseTcCs
tj|�S)a$
    Return information about the current Linux distribution as a tuple
    ``(id_name, version, codename)`` with items as follows:

    * ``id_name``:  If *full_distribution_name* is false, the result of
      :func:`distro.id`. Otherwise, the result of :func:`distro.name`.

    * ``version``:  The result of :func:`distro.version`.

    * ``codename``:  The result of :func:`distro.codename`.

    The interface of this function is compatible with the original
    :py:func:`platform.linux_distribution` function, supporting a subset of
    its parameters.

    The data it returns may not exactly be the same, because it uses more data
    sources than the original function, and that may lead to different data if
    the Linux distribution is not consistent across multiple data sources it
    provides (there are indeed such distributions ...).

    Another reason for differences is the fact that the :func:`distro.id`
    method normalizes the distro ID string to a reliable machine-readable value
    for a number of popular Linux distributions.
    )�_distro�linux_distribution)�full_distribution_name�r�/usr/lib/python3.6/distro.pyr`srcCstj�S)a�

    Return the distro ID of the current Linux distribution, as a
    machine-readable string.

    For a number of Linux distributions, the returned distro ID value is
    *reliable*, in the sense that it is documented and that it does not change
    across releases of the distribution.

    This package maintains the following reliable distro ID values:

    ==============  =========================================
    Distro ID       Distribution
    ==============  =========================================
    "ubuntu"        Ubuntu
    "debian"        Debian
    "rhel"          RedHat Enterprise Linux
    "centos"        CentOS
    "fedora"        Fedora
    "sles"          SUSE Linux Enterprise Server
    "opensuse"      openSUSE
    "amazon"        Amazon Linux
    "arch"          Arch Linux
    "cloudlinux"    CloudLinux OS
    "exherbo"       Exherbo Linux
    "gentoo"        GenToo Linux
    "ibm_powerkvm"  IBM PowerKVM
    "kvmibm"        KVM for IBM z Systems
    "linuxmint"     Linux Mint
    "mageia"        Mageia
    "mandriva"      Mandriva Linux
    "parallels"     Parallels
    "pidora"        Pidora
    "raspbian"      Raspbian
    "oracle"        Oracle Linux (and Oracle Enterprise Linux)
    "scientific"    Scientific Linux
    "slackware"     Slackware
    "xenserver"     XenServer
    ==============  =========================================

    If you have a need to get distros for reliable IDs added into this set,
    or if you find that the :func:`distro.id` function returns a different
    distro ID for one of the listed distros, please create an issue in the
    `distro issue tracker`_.

    **Lookup hierarchy and transformations:**

    First, the ID is obtained from the following sources, in the specified
    order. The first available and non-empty value is used:

    * the value of the "ID" attribute of the os-release file,

    * the value of the "Distributor ID" attribute returned by the lsb_release
      command,

    * the first part of the file name of the distro release file,

    The so determined ID value then passes the following transformations,
    before it is returned by this method:

    * it is translated to lower case,

    * blanks (which should not be there anyway) are translated to underscores,

    * a normalization of the ID is performed, based upon
      `normalization tables`_. The purpose of this normalization is to ensure
      that the ID is as reliable as possible, even across incompatible changes
      in the Linux distributions. A common reason for an incompatible change is
      the addition of an os-release file, or the addition of the lsb_release
      command, with ID values that differ from what was previously determined
      from the distro release file name.
    )r�idrrrrr|sHrFcCs
tj|�S)an
    Return the name of the current Linux distribution, as a human-readable
    string.

    If *pretty* is false, the name is returned without version or codename.
    (e.g. "CentOS Linux")

    If *pretty* is true, the version and codename are appended.
    (e.g. "CentOS Linux 7.1.1503 (Core)")

    **Lookup hierarchy:**

    The name is obtained from the following sources, in the specified order.
    The first available and non-empty value is used:

    * If *pretty* is false:

      - the value of the "NAME" attribute of the os-release file,

      - the value of the "Distributor ID" attribute returned by the lsb_release
        command,

      - the value of the "<name>" field of the distro release file.

    * If *pretty* is true:

      - the value of the "PRETTY_NAME" attribute of the os-release file,

      - the value of the "Description" attribute returned by the lsb_release
        command,

      - the value of the "<name>" field of the distro release file, appended
        with the value of the pretty version ("<version_id>" and "<codename>"
        fields) of the distro release file, if available.
    )r�name)�prettyrrrr	�s$r	cCstj||�S)ay
    Return the version of the current Linux distribution, as a human-readable
    string.

    If *pretty* is false, the version is returned without codename (e.g.
    "7.0").

    If *pretty* is true, the codename in parenthesis is appended, if the
    codename is non-empty (e.g. "7.0 (Maipo)").

    Some distributions provide version numbers with different precisions in
    the different sources of distribution information. Examining the different
    sources in a fixed priority order does not always yield the most precise
    version (e.g. for Debian 8.2, or CentOS 7.1).

    The *best* parameter can be used to control the approach for the returned
    version:

    If *best* is false, the first non-empty version number in priority order of
    the examined sources is returned.

    If *best* is true, the most precise version number out of all examined
    sources is returned.

    **Lookup hierarchy:**

    In all cases, the version number is obtained from the following sources.
    If *best* is false, this order represents the priority order:

    * the value of the "VERSION_ID" attribute of the os-release file,
    * the value of the "Release" attribute returned by the lsb_release
      command,
    * the version number parsed from the "<version_id>" field of the first line
      of the distro release file,
    * the version number parsed from the "PRETTY_NAME" attribute of the
      os-release file, if it follows the format of the distro release files.
    * the version number parsed from the "Description" attribute returned by
      the lsb_release command, if it follows the format of the distro release
      files.
    )r�version)r
�bestrrrr�s)rcCs
tj|�S)a�
    Return the version of the current Linux distribution as a tuple
    ``(major, minor, build_number)`` with items as follows:

    * ``major``:  The result of :func:`distro.major_version`.

    * ``minor``:  The result of :func:`distro.minor_version`.

    * ``build_number``:  The result of :func:`distro.build_number`.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�
version_parts)rrrrr
sr
cCs
tj|�S)a8
    Return the major version of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The major version is the first
    part of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�
major_version)rrrrr+s
rcCs
tj|�S)a9
    Return the minor version of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The minor version is the second
    part of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�
minor_version)rrrrr8s
rcCs
tj|�S)a6
    Return the build number of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The build number is the third part
    of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�build_number)rrrrrEs
rcCstj�S)a
    Return a space-separated list of distro IDs of distributions that are
    closely related to the current Linux distribution in regards to packaging
    and programming interfaces, for example distributions the current
    distribution is a derivative from.

    **Lookup hierarchy:**

    This information item is only provided by the os-release file.
    For details, see the description of the "ID_LIKE" attribute in the
    `os-release man page
    <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
    )r�likerrrrrRsrcCstj�S)a�
    Return the codename for the release of the current Linux distribution,
    as a string.

    If the distribution does not have a codename, an empty string is returned.

    Note that the returned codename is not always really a codename. For
    example, openSUSE returns "x86_64". This function does not handle such
    cases in any special way and just returns the string it finds, if any.

    **Lookup hierarchy:**

    * the codename within the "VERSION" attribute of the os-release file, if
      provided,

    * the value of the "Codename" attribute returned by the lsb_release
      command,

    * the value of the "<codename>" field of the distro release file.
    )r�codenamerrrrrcsrcCstj||�S)a�
    Return certain machine-readable information items about the current Linux
    distribution in a dictionary, as shown in the following example:

    .. sourcecode:: python

        {
            'id': 'rhel',
            'version': '7.0',
            'version_parts': {
                'major': '7',
                'minor': '0',
                'build_number': ''
            },
            'like': 'fedora',
            'codename': 'Maipo'
        }

    The dictionary structure and keys are always the same, regardless of which
    information items are available in the underlying data sources. The values
    for the various keys are as follows:

    * ``id``:  The result of :func:`distro.id`.

    * ``version``:  The result of :func:`distro.version`.

    * ``version_parts -> major``:  The result of :func:`distro.major_version`.

    * ``version_parts -> minor``:  The result of :func:`distro.minor_version`.

    * ``version_parts -> build_number``:  The result of
      :func:`distro.build_number`.

    * ``like``:  The result of :func:`distro.like`.

    * ``codename``:  The result of :func:`distro.codename`.

    For a description of the *pretty* and *best* parameters, see the
    :func:`distro.version` method.
    )r�info)r
rrrrr{s)rcCstj�S)z�
    Return a dictionary containing key-value pairs for the information items
    from the os-release file data source of the current Linux distribution.

    See `os-release file`_ for details about these information items.
    )r�os_release_inforrrrr�srcCstj�S)z�
    Return a dictionary containing key-value pairs for the information items
    from the lsb_release command data source of the current Linux distribution.

    See `lsb_release command output`_ for details about these information
    items.
    )r�lsb_release_inforrrrr�srcCstj�S)z�
    Return a dictionary containing key-value pairs for the information items
    from the distro release file data source of the current Linux distribution.

    See `distro release file`_ for details about these information items.
    )r�distro_release_inforrrrr�srcCs
tj|�S)a�
    Return a single named information item from the os-release file data source
    of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `os-release file`_ for details about these information items.
    )r�os_release_attr)�	attributerrrr�srcCs
tj|�S)a�
    Return a single named information item from the lsb_release command output
    data source of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `lsb_release command output`_ for details about these information
    items.
    )r�lsb_release_attr)rrrrr�srcCs
tj|�S)a�
    Return a single named information item from the distro release file
    data source of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `distro release file`_ for details about these information items.
    )r�distro_release_attr)rrrrr�src@s�eZdZdZd:dd�Zdd�Zd;dd	�Zd
d�Zd<d
d�Zd=dd�Z	d>dd�Z
d?dd�Zd@dd�ZdAdd�Z
dd�Zdd�ZdBdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd+d,�Zed-d.��Zd/d0�Zed1d2��Zd3d4�Zd5d6�Zed7d8��Zd9S)C�LinuxDistributiona
    Provides information about a Linux distribution.

    This package creates a private module-global instance of this class with
    default initialization arguments, that is used by the
    `consolidated accessor functions`_ and `single source accessor functions`_.
    By using default initialization arguments, that module-global instance
    returns data about the current Linux distribution (i.e. the distro this
    package runs on).

    Normally, it is not necessary to create additional instances of this class.
    However, in situations where control is needed over the exact data sources
    that are used, instances of this class can be created with a specific
    distro release file, or a specific os-release file, or without invoking the
    lsb_release command.
    T�cCsH|ptjjtt�|_|pd|_|j�|_|r4|j	�ni|_
|j�|_dS)a8	
        The initialization method of this class gathers information from the
        available data sources, and stores that in private instance attributes.
        Subsequent access to the information items uses these private instance
        attributes, so that the data sources are read only once.

        Parameters:

        * ``include_lsb`` (bool): Controls whether the
          `lsb_release command output`_ is included as a data source.

          If the lsb_release command is not available in the program execution
          path, the data source for the lsb_release command will be empty.

        * ``os_release_file`` (string): The path name of the
          `os-release file`_ that is to be used as a data source.

          An empty string (the default) will cause the default path name to
          be used (see `os-release file`_ for details).

          If the specified or defaulted os-release file does not exist, the
          data source for the os-release file will be empty.

        * ``distro_release_file`` (string): The path name of the
          `distro release file`_ that is to be used as a data source.

          An empty string (the default) will cause a default search algorithm
          to be used (see `distro release file`_ for details).

          If the specified distro release file does not exist, or if no default
          distro release file can be found, the data source for the distro
          release file will be empty.

        Public instance attributes:

        * ``os_release_file`` (string): The path name of the
          `os-release file`_ that is actually used as a data source. The
          empty string if no distro release file is used as a data source.

        * ``distro_release_file`` (string): The path name of the
          `distro release file`_ that is actually used as a data source. The
          empty string if no distro release file is used as a data source.

        Raises:

        * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
          release file.

        * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
          some issue (other than not being available in the program execution
          path).

        * :py:exc:`UnicodeError`: A data source has unexpected characters or
          uses an unexpected encoding.
        rN)
�os�path�join�_UNIXCONFDIR�_OS_RELEASE_BASENAME�os_release_file�distro_release_file�_get_os_release_info�_os_release_info�_get_lsb_release_info�_lsb_release_info�_get_distro_release_info�_distro_release_info)�selfZinclude_lsbr"r#rrr�__init__s;

zLinuxDistribution.__init__cCsdj|j|j|j|j|j�S)z Return repr of all info
        z�LinuxDistribution(os_release_file={0!r}, distro_release_file={1!r}, _os_release_info={2!r}, _lsb_release_info={3!r}, _distro_release_info={4!r}))�formatr"r#r%r'r))r*rrr�__repr__UszLinuxDistribution.__repr__cCs"|r|j�n|j�|j�|j�fS)z�
        Return information about the Linux distribution that is compatible
        with Python's :func:`platform.linux_distribution`, supporting a subset
        of its parameters.

        For details, see :func:`distro.linux_distribution`.
        )r	rrr)r*rrrrres	z$LinuxDistribution.linux_distributioncCsTdd�}|jd�}|r ||t�S|jd�}|r8||t�S|jd�}|rP||t�SdS)zrReturn the distro ID of the Linux distribution, as a string.

        For details, see :func:`distro.id`.
        cSs|j�jdd�}|j||�S)N� �_)�lower�replace�get)�	distro_id�tablerrr�	normalizexsz'LinuxDistribution.id.<locals>.normalizer�distributor_idr)r�NORMALIZED_OS_IDr�NORMALIZED_LSB_IDr�NORMALIZED_DISTRO_ID)r*r5r3rrrrss





zLinuxDistribution.idFcCsh|jd�p|jd�p|jd�}|r`|jd�p4|jd�}|s`|jd�}|jdd�}|r`|d|}|pfdS)	zx
        Return the name of the Linux distribution, as a string.

        For details, see :func:`distro.name`.
        r	r6�pretty_name�descriptionT)r
r.r)rrrr)r*r
r	rrrrr	�s





zLinuxDistribution.namecCs�|jd�|jd�|jd�|j|jd��jdd�|j|jd��jdd�g}d}|r�xJ|D]$}|jd�|jd�ksv|dkrV|}qVWnx|D]}|dkr�|}Pq�W|r�|r�|j�r�dj||j��}|S)z~
        Return the version of the Linux distribution, as a string.

        For details, see :func:`distro.version`.
        �
version_id�releaser:rr;�.z	{0} ({1}))rrr�_parse_distro_release_contentr2�countrr,)r*r
rZversionsr�vrrrr�s&


zLinuxDistribution.versioncCsL|j|d�}|rHtjd�}|j|�}|rH|j�\}}}||p>d|pDdfSdS)z�
        Return the version of the Linux distribution, as a tuple of version
        numbers.

        For details, see :func:`distro.version_parts`.
        )rz(\d+)\.?(\d+)?\.?(\d+)?r)rrr)r�re�compile�match�groups)r*rZversion_strZ
version_regex�matches�major�minorrrrrr
�s

zLinuxDistribution.version_partscCs|j|�dS)z�
        Return the major version number of the current distribution.

        For details, see :func:`distro.major_version`.
        r)r
)r*rrrrr�szLinuxDistribution.major_versioncCs|j|�dS)z�
        Return the minor version number of the Linux distribution.

        For details, see :func:`distro.minor_version`.
        �)r
)r*rrrrr�szLinuxDistribution.minor_versioncCs|j|�dS)z{
        Return the build number of the Linux distribution.

        For details, see :func:`distro.build_number`.
        �)r
)r*rrrrr�szLinuxDistribution.build_numbercCs|jd�pdS)z�
        Return the IDs of distributions that are like the Linux distribution.

        For details, see :func:`distro.like`.
        Zid_liker)r)r*rrrr�szLinuxDistribution.likecCs"|jd�p |jd�p |jd�p dS)zs
        Return the codename of the Linux distribution.

        For details, see :func:`distro.codename`.
        rr)rrr)r*rrrr�s


zLinuxDistribution.codenamecCsBt|j�|j||�t|j|�|j|�|j|�d�|j�|j�d�S)z�
        Return certain machine-readable information about the Linux
        distribution.

        For details, see :func:`distro.info`.
        )rGrHr)rrr
rr)�dictrrrrrrr)r*r
rrrrr�s
zLinuxDistribution.infocCs|jS)z�
        Return a dictionary containing key-value pairs for the information
        items from the os-release file data source of the Linux distribution.

        For details, see :func:`distro.os_release_info`.
        )r%)r*rrrrsz!LinuxDistribution.os_release_infocCs|jS)z�
        Return a dictionary containing key-value pairs for the information
        items from the lsb_release command data source of the Linux
        distribution.

        For details, see :func:`distro.lsb_release_info`.
        )r')r*rrrrsz"LinuxDistribution.lsb_release_infocCs|jS)z�
        Return a dictionary containing key-value pairs for the information
        items from the distro release file data source of the Linux
        distribution.

        For details, see :func:`distro.distro_release_info`.
        )r))r*rrrrsz%LinuxDistribution.distro_release_infocCs|jj|d�S)z�
        Return a single named information item from the os-release file data
        source of the Linux distribution.

        For details, see :func:`distro.os_release_attr`.
        r)r%r2)r*rrrrr)sz!LinuxDistribution.os_release_attrcCs|jj|d�S)z�
        Return a single named information item from the lsb_release command
        output data source of the Linux distribution.

        For details, see :func:`distro.lsb_release_attr`.
        r)r'r2)r*rrrrr2sz"LinuxDistribution.lsb_release_attrcCs|jj|d�S)z�
        Return a single named information item from the distro release file
        data source of the Linux distribution.

        For details, see :func:`distro.distro_release_attr`.
        r)r)r2)r*rrrrr;sz%LinuxDistribution.distro_release_attrc	Cs.tjj|j�r*t|j��}|j|�SQRXiS)z�
        Get the information items from the specified os-release file.

        Returns:
            A dictionary containing all information items.
        N)rr�isfiler"�open�_parse_os_release_content)r*Zrelease_filerrrr$Dsz&LinuxDistribution._get_os_release_infocCs�i}tj|dd�}d|_tjddkr@t|jt�r@|jjd�|_t|�}x�|D]�}d|krN|j	dd�\}}t|t�r~|jd�}|||j
�<|d	kr�tjd
|�}|r�|j
�}|jd�}|jd�}|j�}||d
<q�d|d
<qNqNW|S)aD
        Parse the lines of an os-release file.

        Parameters:

        * lines: Iterable through the lines in the os-release file.
                 Each line must be a unicode string or a UTF-8 encoded byte
                 string.

        Returns:
            A dictionary containing all information items.
        T)�posixrrJz
iso-8859-1�=rIzutf-8�VERSIONz(\(\D+\))|,(\s+)?\D+z()�,rr)�shlexZwhitespace_split�sys�version_info�
isinstanceZ	wordchars�bytes�decode�list�splitr0rB�search�group�strip)�lines�propsZlexer�tokens�token�krArrrrrNPs.	






z+LinuxDistribution._parse_os_release_contentcCs�d}tj|dtjtjd�}|j�\}}|jd�|jd�}}|j}|dkr\|j�}|j|�S|dkrhiStj	dd�d
kr�tj
||||��n@tj	dd�dkr�tj
|||��ntj	dd�dkr�tj
||��dS)z�
        Get the information items from the lsb_release command output.

        Returns:
            A dictionary containing all information items.
        zlsb_release -aT)�shell�stdout�stderrzutf-8r�NrJ����)rgrh)rJri)rJrj)�
subprocess�Popen�PIPEZcommunicaterX�
returncode�
splitlines�_parse_lsb_release_contentrTrUZCalledProcessError)r*�cmdZprocessrdre�codeZcontentrrrr&�s(

z'LinuxDistribution._get_lsb_release_infocCsti}xj|D]b}t|t�r"|jd�n|}|jd�jdd�}t|�dkrFq
|\}}|j|jdd�j�|j�i�q
W|S)aM
        Parse the output of the lsb_release command.

        Parameters:

        * lines: Iterable through the lines of the lsb_release output.
                 Each line must be a unicode string or a UTF-8 encoded byte
                 string.

        Returns:
            A dictionary containing all information items.
        zutf-8�
�:rIrJr.r/)	rVrWrXr]rZ�len�updater1r0)r^r_�lineZkvrbrArrrrp�s
"z,LinuxDistribution._parse_lsb_release_contentcCs�|jr@|j|j�}tjj|j�}tj|�}|r<|jd�|d<|Stjt	�}|j
�x\|D]T}|tkrfqXtj|�}|rXtjjt	|�}|j|�}d|krX||_|jd�|d<|SqXWiSdS)z�
        Get the information items from the specified distro release file.

        Returns:
            A dictionary containing all information items.
        rIrr	N)
r#�_parse_distro_release_filerr�basename� _DISTRO_RELEASE_BASENAME_PATTERNrDr\�listdirr �sort� _DISTRO_RELEASE_IGNORE_BASENAMESr)r*�distro_inforyrDZ	basenames�filepathrrrr(�s,




z*LinuxDistribution._get_distro_release_infoc	Cs.tjj|�r*t|��}|j|j��SQRXiS)z�
        Parse a distro release file.

        Parameters:

        * filepath: Path name of the distro release file.

        Returns:
            A dictionary containing all information items.
        N)rrrLrMr?�readline)r*r�fprrrrx�s
z,LinuxDistribution._parse_distro_release_filecCs�t|t�r|jd�}tj|j�ddd	��}i}|r�|jd�ddd
�|d<|jd�rn|jd�ddd�|d<|jd�r�|jd�ddd�|d<n|r�|j�|d<|S)
a
        Parse a line from a distro release file.

        Parameters:
        * line: Line from the distro release file. Must be a unicode string
                or a UTF-8 encoded byte string.

        Returns:
            A dictionary containing all information items.
        zutf-8NrIrgr	rJr<r���r�r�r�)rVrWrX�(_DISTRO_RELEASE_CONTENT_REVERSED_PATTERNrDr]r\)rwrFr~rrrr?�s



z/LinuxDistribution._parse_distro_release_contentN)Trr)T)F)FF)F)F)F)F)FF)�__name__�
__module__�__qualname__�__doc__r+r-rrr	rr
rrrrrrrrrrrrr$�staticmethodrNr&rpr(rxr?rrrrrs:
@


!




	

			<)rcCs�ddl}tjt�}|jtj�|jtjtj	��|j
dd�}|jddddd�|j�}|j
rv|jt
jt�d	d
d��nB|jdtd
d
��td
d
�}|r�|jd|�t�}|r�|jd|�dS)NrzLinux distro info tool)r;z--jsonz-jz!Output in machine readable format�
store_true)�help�action�T)�indentZ	sort_keyszName: %s)r
zVersion: %szCodename: %s)�argparse�loggingZ	getLoggerr�ZsetLevel�DEBUGZ
addHandlerZ
StreamHandlerrTrd�ArgumentParser�add_argument�
parse_args�jsonr�dumpsr	rr)r�Zlogger�parser�argsZdistribution_versionZdistribution_codenamerrr�mains(

r��__main__)T)F)FF)F)F)F)F)FF)+r�rrBrTr�rSr�rk�platform�
startswith�ImportErrorr,r r!r7r8r9rCr�rzr}rrr	rr
rrrrrrrrrrrr�objectrrr�r�rrrr�<module>sd	

K
'
,





,


_vendor/__pycache__/appdirs.cpython-36.pyc000064400000044153151733136260014516 0ustar003

�Pf`W�@s�dZd1Zdjeee��ZddlZddlZejddkZ	e	r>eZ
ejjd�r�ddlZej
�ddZejd�rrd	Zq�ejd
�r�dZq�dZnejZd2dd�Zd3dd�Zd4dd�Zd5dd�Zd6dd�Zd7dd�ZGdd�de�Zdd�Zdd �Zd!d"�Zd#d$�Zed	k�r�yddlZeZWnnek
�r�ydd%l m!Z!eZWnBek
�r|yddl"Z#eZWnek
�rveZYnXYnXYnXe$d&k�r~d'Z%d(Z&d8Z'e(d)�ee%e&d*d+�Z)x$e'D]Z*e(d,e*e+e)e*�f��q�We(d-�ee%e&�Z)x$e'D]Z*e(d,e*e+e)e*�f��q�We(d.�ee%�Z)x$e'D]Z*e(d,e*e+e)e*�f��q$We(d/�ee%d
d0�Z)x$e'D]Z*e(d,e*e+e)e*�f��q^WdS)9zyUtilities for determining application-specific dirs.

See <http://github.com/ActiveState/appdirs> for details and usage.
����.N��javaZWindows�win32ZMac�darwinZlinux2FcCs�tdkr^|dkr|}|rdpd}tjjt|��}|r�|dk	rNtjj|||�}q�tjj||�}nNtdkr�tjjd�}|r�tjj||�}n&tjdtjjd	��}|r�tjj||�}|r�|r�tjj||�}|S)
aJReturn full path to the user-specific data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  ~/Library/Application Support/<AppName>
        Unix:                   ~/.local/share/<AppName>    # or in $XDG_DATA_HOME, if defined
        Win XP (not roaming):   C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
        Win XP (roaming):       C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
        Win 7  (not roaming):   C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
        Win 7  (roaming):       C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>

    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
    That means, by default "~/.local/share/<AppName>".
    rN�
CSIDL_APPDATA�CSIDL_LOCAL_APPDATAFrz~/Library/Application Support/Z
XDG_DATA_HOMEz~/.local/share)�system�os�path�normpath�_get_win_folder�join�
expanduser�getenv)�appname�	appauthor�version�roaming�constr
�r�/usr/lib/python3.6/appdirs.py�
user_data_dir-s& rcs
tdkrR|dkr�}tjjtd��}�r�|dk	rBtjj||��}q�tjj|��}n�tdkrztjjd�}�r�tjj|��}nttjdtjjdd	g��}d
d�|j	tj�D�}�r�|r�tjj�|���fdd�|D�}|r�tjj|�}n|d
}|S�o�|�rtjj||�}|S)aiReturn full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of data dirs should be
            returned. By default, the first item from XDG_DATA_DIRS is
            returned, or '/usr/local/share/<AppName>',
            if XDG_DATA_DIRS is not set

    Typical user data directories are:
        macOS:      /Library/Application Support/<AppName>
        Unix:       /usr/local/share/<AppName> or /usr/share/<AppName>
        Win XP:     C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
        Win 7:      C:\ProgramData\<AppAuthor>\<AppName>   # Hidden, but writeable on Win 7.

    For Unix, this is using the $XDG_DATA_DIRS[0] default.

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    rN�CSIDL_COMMON_APPDATAFrz/Library/Application SupportZ
XDG_DATA_DIRSz/usr/local/sharez
/usr/sharecSs g|]}tjj|jtj���qSr)rr
r�rstrip�sep)�.0�xrrr�
<listcomp>�sz!site_data_dir.<locals>.<listcomp>csg|]}tjj|�g��qSr)rrr)rr)rrrr �sr)
rrr
rrrrr�pathsep�split)rrr�	multipathr
�pathlistr)rr�
site_data_dirds4
r%cCsXtdkrt||d|�}n&tjdtjjd��}|r>tjj||�}|rT|rTtjj||�}|S)a�Return full path to the user-specific config dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  same as user_data_dir
        Unix:                   ~/.config/<AppName>     # or in $XDG_CONFIG_HOME, if defined
        Win *:                  same as user_data_dir

    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
    That means, by deafult "~/.config/<AppName>".
    rrNZXDG_CONFIG_HOMEz	~/.config)rr)rrrrr
rr)rrrrr
rrr�user_config_dir�sr&cs�td	kr*t�|�}�r�|r�tjj||�}ndtjdd�}dd�|jtj�D�}�rt|rbtjj�|���fdd�|D�}|r�tjj|�}n|d}|S)
aReturn full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of config dirs should be
            returned. By default, the first item from XDG_CONFIG_DIRS is
            returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set

    Typical user data directories are:
        macOS:      same as site_data_dir
        Unix:       /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
                    $XDG_CONFIG_DIRS
        Win *:      same as site_data_dir
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)

    For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    rrZXDG_CONFIG_DIRSz/etc/xdgcSs g|]}tjj|jtj���qSr)rr
rrr)rrrrrr �sz#site_config_dir.<locals>.<listcomp>csg|]}tjj|�g��qSr)rrr)rr)rrrr �sr)rr)rr%rr
rrr"r!)rrrr#r
r$r)rr�site_config_dir�s
r'TcCs�tdkrd|dkr|}tjjtd��}|r�|dk	rBtjj|||�}ntjj||�}|r�tjj|d�}nNtdkr�tjjd�}|r�tjj||�}n&tjdtjjd	��}|r�tjj||�}|r�|r�tjj||�}|S)
aReturn full path to the user-specific cache dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Cache" to the base app data dir for Windows. See
            discussion below.

    Typical user cache directories are:
        macOS:      ~/Library/Caches/<AppName>
        Unix:       ~/.cache/<AppName> (XDG default)
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache

    On Windows the only suggestion in the MSDN docs is that local settings go in
    the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
    app data dir (the default returned by `user_data_dir` above). Apps typically
    put cache data somewhere *under* the given dir here. Some examples:
        ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
        ...\Acme\SuperApp\Cache\1.0
    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
    This can be disabled with the `opinion=False` option.
    rNr
FZCacherz~/Library/CachesZXDG_CACHE_HOMEz~/.cache)rrr
rrrrr)rrr�opinionr
rrr�user_cache_dirs(!r)cCs�tdkr tjjtjjd�|�}nNtdkrLt|||�}d}|rntjj|d�}n"t|||�}d}|rntjj|d�}|r�|r�tjj||�}|S)a�Return full path to the user-specific log dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Logs" to the base app data dir for Windows, and "log" to the
            base cache dir for Unix. See discussion below.

    Typical user cache directories are:
        macOS:      ~/Library/Logs/<AppName>
        Unix:       ~/.cache/<AppName>/log  # or under $XDG_CACHE_HOME if defined
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs

    On Windows the only suggestion in the MSDN docs is that local settings
    go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
    examples of what some windows apps use for a logs dir.)

    OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
    value for Windows and appends "log" to the user cache dir for Unix.
    This can be disabled with the `opinion=False` option.
    rz~/Library/LogsrFZLogs�log)rrr
rrrr))rrrr(r
rrr�user_log_dir:s  
r+c@sbeZdZdZddd�Zedd��Zedd	��Zed
d��Zedd
��Z	edd��Z
edd��ZdS)�AppDirsz1Convenience wrapper for getting application dirs.NFcCs"||_||_||_||_||_dS)N)rrrrr#)�selfrrrrr#rrr�__init__os
zAppDirs.__init__cCst|j|j|j|jd�S)N)rr)rrrrr)r-rrrrws
zAppDirs.user_data_dircCst|j|j|j|jd�S)N)rr#)r%rrrr#)r-rrrr%|s
zAppDirs.site_data_dircCst|j|j|j|jd�S)N)rr)r&rrrr)r-rrrr&�s
zAppDirs.user_config_dircCst|j|j|j|jd�S)N)rr#)r'rrrr#)r-rrrr'�s
zAppDirs.site_config_dircCst|j|j|jd�S)N)r)r)rrr)r-rrrr)�s
zAppDirs.user_cache_dircCst|j|j|jd�S)N)r)r+rrr)r-rrrr+�s
zAppDirs.user_log_dir)NNFF)�__name__�
__module__�__qualname__�__doc__r.�propertyrr%r&r'r)r+rrrrr,ms
r,cCs:ddl}dddd�|}|j|jd�}|j||�\}}|S)z�This is a fallback technique at best. I'm not sure if using the
    registry for this guarantees us the correct answer for all CSIDL_*
    names.
    rNZAppDatazCommon AppDataz
Local AppData)r	rr
z@Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders)�_winreg�OpenKey�HKEY_CURRENT_USERZQueryValueEx)�
csidl_namer4Zshell_folder_name�key�dir�typerrr�_get_win_folder_from_registry�sr;cCs�ddlm}m}|jdt||�dd�}y`t|�}d}x|D]}t|�dkr:d}Pq:W|r�yddl}|j|�}Wnt	k
r�YnXWnt
k
r�YnX|S)Nr)�shellcon�shellF�T)�win32com.shellr<r=�SHGetFolderPath�getattr�unicode�ord�win32api�GetShortPathName�ImportError�UnicodeError)r7r<r=r9�
has_high_char�crDrrr�_get_win_folder_with_pywin32�s$

rJcCs�ddl}dddd�|}|jd�}|jjjd|dd|�d}x|D]}t|�dkrBd	}PqBW|r�|jd�}|jjj|j|d�r�|}|jS)
Nr��#�)r	rr
iFr>T)	�ctypesZcreate_unicode_buffer�windllZshell32ZSHGetFolderPathWrCZkernel32ZGetShortPathNameW�value)r7rNZcsidl_const�bufrHrIZbuf2rrr�_get_win_folder_with_ctypes�s"


rRcCs�ddl}ddlm}ddlm}|jjd}|jd|�}|jj	}|j
dt|j|�d|jj
|�|jj|j��jd�}d}x|D]}	t|	�dkr~d	}Pq~W|r�|jd|�}|jj	}
tj|||�r�|jj|j��jd�}|S)
Nr)�jna)r�rI�Fr>T)�arrayZcom.sunrSZcom.sun.jna.platformrZWinDefZMAX_PATHZzerosZShell32ZINSTANCEr@rAZShlObjZSHGFP_TYPE_CURRENTZNativeZtoStringZtostringrrCZKernel32ZkernalrE)r7rVrSrZbuf_sizerQr=r9rHrIZkernelrrr�_get_win_folder_with_jna�s&
rW)rO�__main__ZMyAppZ	MyCompanyz%-- app dirs (with optional 'version')z1.0)rz%s: %sz)
-- app dirs (without optional 'version')z+
-- app dirs (without optional 'appauthor')z(
-- app dirs (with disabled 'appauthor'))r)rrr)NNNF)NNNF)NNNF)NNNF)NNNT)NNNT)rr%r&r'r)r+),r2Z__version_info__r�map�str�__version__�sysr�version_infoZPY3rB�platform�
startswithZjava_verZos_namerrr%r&r'r)r+�objectr,r;rJrRrWr?Zwin32comrrFrNrOZcom.sun.jnaZcomr/rrZprops�print�dirsZproprArrrr�<module>	s~


7
B
(
3
9
3+






_vendor/__pycache__/__init__.cpython-36.pyc000064400000005240151733136260014605 0ustar003

�Pf>�@s�dZddlmZddlZddlZddlZdZejj	ejj
e��Zdd�Z
e�r�ejejjed��ejejdd�<e
d�e
d	�e
d
�e
d�e
d�e
d
�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d �e
d!�e
d"�e
d#�e
d$�e
d%�e
d&�e
d'�e
d(�e
d)�e
d*�e
d+�e
d,�e
d-�e
d.�e
d/�e
d0�e
d1�e
d2�dS)3z�
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
depend on something external.

Files inside of pip._vendor should be considered immutable and should only be
updated to versions from upstream.
�)�absolute_importNFcCs�djt|�}yt|t�t�dd�Wnztk
r�yt|t�t�dd�Wntk
r`Yn:Xtj|tj|<|jdd�\}}t	tj||tj|�YnXdS)Nz{0}.{1}r)�level�.�)
�format�__name__�
__import__�globals�locals�ImportError�sys�modules�rsplit�setattr)Z
modulenameZ
vendored_name�base�head�r�/usr/lib/python3.6/__init__.py�vendoreds	rz*.whlZcachecontrolZcoloramaZdistlibZdistroZhtml5libZlockfileZsixz	six.moveszsix.moves.urllibZ	packagingzpackaging.versionzpackaging.specifiersZ
pkg_resourcesZprogressZretryingZrequestszrequests.packageszrequests.packages.urllib3z&requests.packages.urllib3._collectionsz$requests.packages.urllib3.connectionz(requests.packages.urllib3.connectionpoolz!requests.packages.urllib3.contribz*requests.packages.urllib3.contrib.ntlmpoolz+requests.packages.urllib3.contrib.pyopensslz$requests.packages.urllib3.exceptionsz requests.packages.urllib3.fieldsz"requests.packages.urllib3.filepostz"requests.packages.urllib3.packagesz/requests.packages.urllib3.packages.ordered_dictz&requests.packages.urllib3.packages.sixz5requests.packages.urllib3.packages.ssl_match_hostnamezErequests.packages.urllib3.packages.ssl_match_hostname._implementationz%requests.packages.urllib3.poolmanagerz!requests.packages.urllib3.requestz"requests.packages.urllib3.responsezrequests.packages.urllib3.utilz)requests.packages.urllib3.util.connectionz&requests.packages.urllib3.util.requestz'requests.packages.urllib3.util.responsez$requests.packages.urllib3.util.retryz#requests.packages.urllib3.util.ssl_z&requests.packages.urllib3.util.timeoutz"requests.packages.urllib3.util.url)�__doc__Z
__future__rZglobZos.path�osrZ	DEBUNDLED�path�abspath�dirname�__file__Z	WHEEL_DIRr�joinrrrr�<module>sh$_vendor/__pycache__/distro.cpython-36.opt-1.pyc000064400000077377151733136260015335 0ustar003

�Pf͕�@sbdZddlZddlZddlZddlZddlZddlZddlZejj	d�sXe
djej���dZdZ
iZddd	�Zd
diZejd�Zejd�Zd
dde
dfZd:dd�Zdd�Zd;dd�Zd<dd�Zd=dd�Zd>dd�Zd?dd �Zd@d!d"�Zd#d$�Zd%d&�ZdAd'd(�Zd)d*�Z d+d,�Z!d-d.�Z"d/d0�Z#d1d2�Z$d3d4�Z%Gd5d6�d6e&�Z'e'�Z(d7d8�Z)e*d9k�r^e)�dS)Ba,
The ``distro`` package (``distro`` stands for Linux Distribution) provides
information about the Linux distribution it runs on, such as a reliable
machine-readable distro ID, or version information.

It is a renewed alternative implementation for Python's original
:py:func:`platform.linux_distribution` function, but it provides much more
functionality. An alternative implementation became necessary because Python
3.5 deprecated this function, and Python 3.7 is expected to remove it
altogether. Its predecessor function :py:func:`platform.dist` was already
deprecated since Python 2.6 and is also expected to be removed in Python 3.7.
Still, there are many cases in which access to Linux distribution information
is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
more information.
�N�linuxzUnsupported platform: {0}z/etcz
os-releaseZoracleZrhel)ZenterpriseenterpriseZredhatenterpriseworkstationZredhatzA(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)z(\w+)[-_](release|version)$Zdebian_versionzlsb-releasezoem-releasezsystem-releaseTcCs
tj|�S)a$
    Return information about the current Linux distribution as a tuple
    ``(id_name, version, codename)`` with items as follows:

    * ``id_name``:  If *full_distribution_name* is false, the result of
      :func:`distro.id`. Otherwise, the result of :func:`distro.name`.

    * ``version``:  The result of :func:`distro.version`.

    * ``codename``:  The result of :func:`distro.codename`.

    The interface of this function is compatible with the original
    :py:func:`platform.linux_distribution` function, supporting a subset of
    its parameters.

    The data it returns may not exactly be the same, because it uses more data
    sources than the original function, and that may lead to different data if
    the Linux distribution is not consistent across multiple data sources it
    provides (there are indeed such distributions ...).

    Another reason for differences is the fact that the :func:`distro.id`
    method normalizes the distro ID string to a reliable machine-readable value
    for a number of popular Linux distributions.
    )�_distro�linux_distribution)�full_distribution_name�r�/usr/lib/python3.6/distro.pyr`srcCstj�S)a�

    Return the distro ID of the current Linux distribution, as a
    machine-readable string.

    For a number of Linux distributions, the returned distro ID value is
    *reliable*, in the sense that it is documented and that it does not change
    across releases of the distribution.

    This package maintains the following reliable distro ID values:

    ==============  =========================================
    Distro ID       Distribution
    ==============  =========================================
    "ubuntu"        Ubuntu
    "debian"        Debian
    "rhel"          RedHat Enterprise Linux
    "centos"        CentOS
    "fedora"        Fedora
    "sles"          SUSE Linux Enterprise Server
    "opensuse"      openSUSE
    "amazon"        Amazon Linux
    "arch"          Arch Linux
    "cloudlinux"    CloudLinux OS
    "exherbo"       Exherbo Linux
    "gentoo"        GenToo Linux
    "ibm_powerkvm"  IBM PowerKVM
    "kvmibm"        KVM for IBM z Systems
    "linuxmint"     Linux Mint
    "mageia"        Mageia
    "mandriva"      Mandriva Linux
    "parallels"     Parallels
    "pidora"        Pidora
    "raspbian"      Raspbian
    "oracle"        Oracle Linux (and Oracle Enterprise Linux)
    "scientific"    Scientific Linux
    "slackware"     Slackware
    "xenserver"     XenServer
    ==============  =========================================

    If you have a need to get distros for reliable IDs added into this set,
    or if you find that the :func:`distro.id` function returns a different
    distro ID for one of the listed distros, please create an issue in the
    `distro issue tracker`_.

    **Lookup hierarchy and transformations:**

    First, the ID is obtained from the following sources, in the specified
    order. The first available and non-empty value is used:

    * the value of the "ID" attribute of the os-release file,

    * the value of the "Distributor ID" attribute returned by the lsb_release
      command,

    * the first part of the file name of the distro release file,

    The so determined ID value then passes the following transformations,
    before it is returned by this method:

    * it is translated to lower case,

    * blanks (which should not be there anyway) are translated to underscores,

    * a normalization of the ID is performed, based upon
      `normalization tables`_. The purpose of this normalization is to ensure
      that the ID is as reliable as possible, even across incompatible changes
      in the Linux distributions. A common reason for an incompatible change is
      the addition of an os-release file, or the addition of the lsb_release
      command, with ID values that differ from what was previously determined
      from the distro release file name.
    )r�idrrrrr|sHrFcCs
tj|�S)an
    Return the name of the current Linux distribution, as a human-readable
    string.

    If *pretty* is false, the name is returned without version or codename.
    (e.g. "CentOS Linux")

    If *pretty* is true, the version and codename are appended.
    (e.g. "CentOS Linux 7.1.1503 (Core)")

    **Lookup hierarchy:**

    The name is obtained from the following sources, in the specified order.
    The first available and non-empty value is used:

    * If *pretty* is false:

      - the value of the "NAME" attribute of the os-release file,

      - the value of the "Distributor ID" attribute returned by the lsb_release
        command,

      - the value of the "<name>" field of the distro release file.

    * If *pretty* is true:

      - the value of the "PRETTY_NAME" attribute of the os-release file,

      - the value of the "Description" attribute returned by the lsb_release
        command,

      - the value of the "<name>" field of the distro release file, appended
        with the value of the pretty version ("<version_id>" and "<codename>"
        fields) of the distro release file, if available.
    )r�name)�prettyrrrr	�s$r	cCstj||�S)ay
    Return the version of the current Linux distribution, as a human-readable
    string.

    If *pretty* is false, the version is returned without codename (e.g.
    "7.0").

    If *pretty* is true, the codename in parenthesis is appended, if the
    codename is non-empty (e.g. "7.0 (Maipo)").

    Some distributions provide version numbers with different precisions in
    the different sources of distribution information. Examining the different
    sources in a fixed priority order does not always yield the most precise
    version (e.g. for Debian 8.2, or CentOS 7.1).

    The *best* parameter can be used to control the approach for the returned
    version:

    If *best* is false, the first non-empty version number in priority order of
    the examined sources is returned.

    If *best* is true, the most precise version number out of all examined
    sources is returned.

    **Lookup hierarchy:**

    In all cases, the version number is obtained from the following sources.
    If *best* is false, this order represents the priority order:

    * the value of the "VERSION_ID" attribute of the os-release file,
    * the value of the "Release" attribute returned by the lsb_release
      command,
    * the version number parsed from the "<version_id>" field of the first line
      of the distro release file,
    * the version number parsed from the "PRETTY_NAME" attribute of the
      os-release file, if it follows the format of the distro release files.
    * the version number parsed from the "Description" attribute returned by
      the lsb_release command, if it follows the format of the distro release
      files.
    )r�version)r
�bestrrrr�s)rcCs
tj|�S)a�
    Return the version of the current Linux distribution as a tuple
    ``(major, minor, build_number)`` with items as follows:

    * ``major``:  The result of :func:`distro.major_version`.

    * ``minor``:  The result of :func:`distro.minor_version`.

    * ``build_number``:  The result of :func:`distro.build_number`.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�
version_parts)rrrrr
sr
cCs
tj|�S)a8
    Return the major version of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The major version is the first
    part of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�
major_version)rrrrr+s
rcCs
tj|�S)a9
    Return the minor version of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The minor version is the second
    part of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�
minor_version)rrrrr8s
rcCs
tj|�S)a6
    Return the build number of the current Linux distribution, as a string,
    if provided.
    Otherwise, the empty string is returned. The build number is the third part
    of the dot-separated version string.

    For a description of the *best* parameter, see the :func:`distro.version`
    method.
    )r�build_number)rrrrrEs
rcCstj�S)a
    Return a space-separated list of distro IDs of distributions that are
    closely related to the current Linux distribution in regards to packaging
    and programming interfaces, for example distributions the current
    distribution is a derivative from.

    **Lookup hierarchy:**

    This information item is only provided by the os-release file.
    For details, see the description of the "ID_LIKE" attribute in the
    `os-release man page
    <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
    )r�likerrrrrRsrcCstj�S)a�
    Return the codename for the release of the current Linux distribution,
    as a string.

    If the distribution does not have a codename, an empty string is returned.

    Note that the returned codename is not always really a codename. For
    example, openSUSE returns "x86_64". This function does not handle such
    cases in any special way and just returns the string it finds, if any.

    **Lookup hierarchy:**

    * the codename within the "VERSION" attribute of the os-release file, if
      provided,

    * the value of the "Codename" attribute returned by the lsb_release
      command,

    * the value of the "<codename>" field of the distro release file.
    )r�codenamerrrrrcsrcCstj||�S)a�
    Return certain machine-readable information items about the current Linux
    distribution in a dictionary, as shown in the following example:

    .. sourcecode:: python

        {
            'id': 'rhel',
            'version': '7.0',
            'version_parts': {
                'major': '7',
                'minor': '0',
                'build_number': ''
            },
            'like': 'fedora',
            'codename': 'Maipo'
        }

    The dictionary structure and keys are always the same, regardless of which
    information items are available in the underlying data sources. The values
    for the various keys are as follows:

    * ``id``:  The result of :func:`distro.id`.

    * ``version``:  The result of :func:`distro.version`.

    * ``version_parts -> major``:  The result of :func:`distro.major_version`.

    * ``version_parts -> minor``:  The result of :func:`distro.minor_version`.

    * ``version_parts -> build_number``:  The result of
      :func:`distro.build_number`.

    * ``like``:  The result of :func:`distro.like`.

    * ``codename``:  The result of :func:`distro.codename`.

    For a description of the *pretty* and *best* parameters, see the
    :func:`distro.version` method.
    )r�info)r
rrrrr{s)rcCstj�S)z�
    Return a dictionary containing key-value pairs for the information items
    from the os-release file data source of the current Linux distribution.

    See `os-release file`_ for details about these information items.
    )r�os_release_inforrrrr�srcCstj�S)z�
    Return a dictionary containing key-value pairs for the information items
    from the lsb_release command data source of the current Linux distribution.

    See `lsb_release command output`_ for details about these information
    items.
    )r�lsb_release_inforrrrr�srcCstj�S)z�
    Return a dictionary containing key-value pairs for the information items
    from the distro release file data source of the current Linux distribution.

    See `distro release file`_ for details about these information items.
    )r�distro_release_inforrrrr�srcCs
tj|�S)a�
    Return a single named information item from the os-release file data source
    of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `os-release file`_ for details about these information items.
    )r�os_release_attr)�	attributerrrr�srcCs
tj|�S)a�
    Return a single named information item from the lsb_release command output
    data source of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `lsb_release command output`_ for details about these information
    items.
    )r�lsb_release_attr)rrrrr�srcCs
tj|�S)a�
    Return a single named information item from the distro release file
    data source of the current Linux distribution.

    Parameters:

    * ``attribute`` (string): Key of the information item.

    Returns:

    * (string): Value of the information item, if the item exists.
      The empty string, if the item does not exist.

    See `distro release file`_ for details about these information items.
    )r�distro_release_attr)rrrrr�src@s�eZdZdZd:dd�Zdd�Zd;dd	�Zd
d�Zd<d
d�Zd=dd�Z	d>dd�Z
d?dd�Zd@dd�ZdAdd�Z
dd�Zdd�ZdBdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd+d,�Zed-d.��Zd/d0�Zed1d2��Zd3d4�Zd5d6�Zed7d8��Zd9S)C�LinuxDistributiona
    Provides information about a Linux distribution.

    This package creates a private module-global instance of this class with
    default initialization arguments, that is used by the
    `consolidated accessor functions`_ and `single source accessor functions`_.
    By using default initialization arguments, that module-global instance
    returns data about the current Linux distribution (i.e. the distro this
    package runs on).

    Normally, it is not necessary to create additional instances of this class.
    However, in situations where control is needed over the exact data sources
    that are used, instances of this class can be created with a specific
    distro release file, or a specific os-release file, or without invoking the
    lsb_release command.
    T�cCsH|ptjjtt�|_|pd|_|j�|_|r4|j	�ni|_
|j�|_dS)a8	
        The initialization method of this class gathers information from the
        available data sources, and stores that in private instance attributes.
        Subsequent access to the information items uses these private instance
        attributes, so that the data sources are read only once.

        Parameters:

        * ``include_lsb`` (bool): Controls whether the
          `lsb_release command output`_ is included as a data source.

          If the lsb_release command is not available in the program execution
          path, the data source for the lsb_release command will be empty.

        * ``os_release_file`` (string): The path name of the
          `os-release file`_ that is to be used as a data source.

          An empty string (the default) will cause the default path name to
          be used (see `os-release file`_ for details).

          If the specified or defaulted os-release file does not exist, the
          data source for the os-release file will be empty.

        * ``distro_release_file`` (string): The path name of the
          `distro release file`_ that is to be used as a data source.

          An empty string (the default) will cause a default search algorithm
          to be used (see `distro release file`_ for details).

          If the specified distro release file does not exist, or if no default
          distro release file can be found, the data source for the distro
          release file will be empty.

        Public instance attributes:

        * ``os_release_file`` (string): The path name of the
          `os-release file`_ that is actually used as a data source. The
          empty string if no distro release file is used as a data source.

        * ``distro_release_file`` (string): The path name of the
          `distro release file`_ that is actually used as a data source. The
          empty string if no distro release file is used as a data source.

        Raises:

        * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
          release file.

        * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
          some issue (other than not being available in the program execution
          path).

        * :py:exc:`UnicodeError`: A data source has unexpected characters or
          uses an unexpected encoding.
        rN)
�os�path�join�_UNIXCONFDIR�_OS_RELEASE_BASENAME�os_release_file�distro_release_file�_get_os_release_info�_os_release_info�_get_lsb_release_info�_lsb_release_info�_get_distro_release_info�_distro_release_info)�selfZinclude_lsbr"r#rrr�__init__s;

zLinuxDistribution.__init__cCsdj|j|j|j|j|j�S)z Return repr of all info
        z�LinuxDistribution(os_release_file={0!r}, distro_release_file={1!r}, _os_release_info={2!r}, _lsb_release_info={3!r}, _distro_release_info={4!r}))�formatr"r#r%r'r))r*rrr�__repr__UszLinuxDistribution.__repr__cCs"|r|j�n|j�|j�|j�fS)z�
        Return information about the Linux distribution that is compatible
        with Python's :func:`platform.linux_distribution`, supporting a subset
        of its parameters.

        For details, see :func:`distro.linux_distribution`.
        )r	rrr)r*rrrrres	z$LinuxDistribution.linux_distributioncCsTdd�}|jd�}|r ||t�S|jd�}|r8||t�S|jd�}|rP||t�SdS)zrReturn the distro ID of the Linux distribution, as a string.

        For details, see :func:`distro.id`.
        cSs|j�jdd�}|j||�S)N� �_)�lower�replace�get)�	distro_id�tablerrr�	normalizexsz'LinuxDistribution.id.<locals>.normalizer�distributor_idr)r�NORMALIZED_OS_IDr�NORMALIZED_LSB_IDr�NORMALIZED_DISTRO_ID)r*r5r3rrrrss





zLinuxDistribution.idFcCsh|jd�p|jd�p|jd�}|r`|jd�p4|jd�}|s`|jd�}|jdd�}|r`|d|}|pfdS)	zx
        Return the name of the Linux distribution, as a string.

        For details, see :func:`distro.name`.
        r	r6�pretty_name�descriptionT)r
r.r)rrrr)r*r
r	rrrrr	�s





zLinuxDistribution.namecCs�|jd�|jd�|jd�|j|jd��jdd�|j|jd��jdd�g}d}|r�xJ|D]$}|jd�|jd�ksv|dkrV|}qVWnx|D]}|dkr�|}Pq�W|r�|r�|j�r�dj||j��}|S)z~
        Return the version of the Linux distribution, as a string.

        For details, see :func:`distro.version`.
        �
version_id�releaser:rr;�.z	{0} ({1}))rrr�_parse_distro_release_contentr2�countrr,)r*r
rZversionsr�vrrrr�s&


zLinuxDistribution.versioncCsL|j|d�}|rHtjd�}|j|�}|rH|j�\}}}||p>d|pDdfSdS)z�
        Return the version of the Linux distribution, as a tuple of version
        numbers.

        For details, see :func:`distro.version_parts`.
        )rz(\d+)\.?(\d+)?\.?(\d+)?r)rrr)r�re�compile�match�groups)r*rZversion_strZ
version_regex�matches�major�minorrrrrr
�s

zLinuxDistribution.version_partscCs|j|�dS)z�
        Return the major version number of the current distribution.

        For details, see :func:`distro.major_version`.
        r)r
)r*rrrrr�szLinuxDistribution.major_versioncCs|j|�dS)z�
        Return the minor version number of the Linux distribution.

        For details, see :func:`distro.minor_version`.
        �)r
)r*rrrrr�szLinuxDistribution.minor_versioncCs|j|�dS)z{
        Return the build number of the Linux distribution.

        For details, see :func:`distro.build_number`.
        �)r
)r*rrrrr�szLinuxDistribution.build_numbercCs|jd�pdS)z�
        Return the IDs of distributions that are like the Linux distribution.

        For details, see :func:`distro.like`.
        Zid_liker)r)r*rrrr�szLinuxDistribution.likecCs"|jd�p |jd�p |jd�p dS)zs
        Return the codename of the Linux distribution.

        For details, see :func:`distro.codename`.
        rr)rrr)r*rrrr�s


zLinuxDistribution.codenamecCsBt|j�|j||�t|j|�|j|�|j|�d�|j�|j�d�S)z�
        Return certain machine-readable information about the Linux
        distribution.

        For details, see :func:`distro.info`.
        )rGrHr)rrr
rr)�dictrrrrrrr)r*r
rrrrr�s
zLinuxDistribution.infocCs|jS)z�
        Return a dictionary containing key-value pairs for the information
        items from the os-release file data source of the Linux distribution.

        For details, see :func:`distro.os_release_info`.
        )r%)r*rrrrsz!LinuxDistribution.os_release_infocCs|jS)z�
        Return a dictionary containing key-value pairs for the information
        items from the lsb_release command data source of the Linux
        distribution.

        For details, see :func:`distro.lsb_release_info`.
        )r')r*rrrrsz"LinuxDistribution.lsb_release_infocCs|jS)z�
        Return a dictionary containing key-value pairs for the information
        items from the distro release file data source of the Linux
        distribution.

        For details, see :func:`distro.distro_release_info`.
        )r))r*rrrrsz%LinuxDistribution.distro_release_infocCs|jj|d�S)z�
        Return a single named information item from the os-release file data
        source of the Linux distribution.

        For details, see :func:`distro.os_release_attr`.
        r)r%r2)r*rrrrr)sz!LinuxDistribution.os_release_attrcCs|jj|d�S)z�
        Return a single named information item from the lsb_release command
        output data source of the Linux distribution.

        For details, see :func:`distro.lsb_release_attr`.
        r)r'r2)r*rrrrr2sz"LinuxDistribution.lsb_release_attrcCs|jj|d�S)z�
        Return a single named information item from the distro release file
        data source of the Linux distribution.

        For details, see :func:`distro.distro_release_attr`.
        r)r)r2)r*rrrrr;sz%LinuxDistribution.distro_release_attrc	Cs.tjj|j�r*t|j��}|j|�SQRXiS)z�
        Get the information items from the specified os-release file.

        Returns:
            A dictionary containing all information items.
        N)rr�isfiler"�open�_parse_os_release_content)r*Zrelease_filerrrr$Dsz&LinuxDistribution._get_os_release_infocCs�i}tj|dd�}d|_tjddkr@t|jt�r@|jjd�|_t|�}x�|D]�}d|krN|j	dd�\}}t|t�r~|jd�}|||j
�<|d	kr�tjd
|�}|r�|j
�}|jd�}|jd�}|j�}||d
<q�d|d
<qNqNW|S)aD
        Parse the lines of an os-release file.

        Parameters:

        * lines: Iterable through the lines in the os-release file.
                 Each line must be a unicode string or a UTF-8 encoded byte
                 string.

        Returns:
            A dictionary containing all information items.
        T)�posixrrJz
iso-8859-1�=rIzutf-8�VERSIONz(\(\D+\))|,(\s+)?\D+z()�,rr)�shlexZwhitespace_split�sys�version_info�
isinstanceZ	wordchars�bytes�decode�list�splitr0rB�search�group�strip)�lines�propsZlexer�tokens�token�krArrrrrNPs.	






z+LinuxDistribution._parse_os_release_contentcCs�d}tj|dtjtjd�}|j�\}}|jd�|jd�}}|j}|dkr\|j�}|j|�S|dkrhiStj	dd�d
kr�tj
||||��n@tj	dd�dkr�tj
|||��ntj	dd�dkr�tj
||��dS)z�
        Get the information items from the lsb_release command output.

        Returns:
            A dictionary containing all information items.
        zlsb_release -aT)�shell�stdout�stderrzutf-8r�NrJ����)rgrh)rJri)rJrj)�
subprocess�Popen�PIPEZcommunicaterX�
returncode�
splitlines�_parse_lsb_release_contentrTrUZCalledProcessError)r*�cmdZprocessrdre�codeZcontentrrrr&�s(

z'LinuxDistribution._get_lsb_release_infocCsti}xj|D]b}t|t�r"|jd�n|}|jd�jdd�}t|�dkrFq
|\}}|j|jdd�j�|j�i�q
W|S)aM
        Parse the output of the lsb_release command.

        Parameters:

        * lines: Iterable through the lines of the lsb_release output.
                 Each line must be a unicode string or a UTF-8 encoded byte
                 string.

        Returns:
            A dictionary containing all information items.
        zutf-8�
�:rIrJr.r/)	rVrWrXr]rZ�len�updater1r0)r^r_�lineZkvrbrArrrrp�s
"z,LinuxDistribution._parse_lsb_release_contentcCs�|jr@|j|j�}tjj|j�}tj|�}|r<|jd�|d<|Stjt	�}|j
�x\|D]T}|tkrfqXtj|�}|rXtjjt	|�}|j|�}d|krX||_|jd�|d<|SqXWiSdS)z�
        Get the information items from the specified distro release file.

        Returns:
            A dictionary containing all information items.
        rIrr	N)
r#�_parse_distro_release_filerr�basename� _DISTRO_RELEASE_BASENAME_PATTERNrDr\�listdirr �sort� _DISTRO_RELEASE_IGNORE_BASENAMESr)r*�distro_inforyrDZ	basenames�filepathrrrr(�s,




z*LinuxDistribution._get_distro_release_infoc	Cs.tjj|�r*t|��}|j|j��SQRXiS)z�
        Parse a distro release file.

        Parameters:

        * filepath: Path name of the distro release file.

        Returns:
            A dictionary containing all information items.
        N)rrrLrMr?�readline)r*r�fprrrrx�s
z,LinuxDistribution._parse_distro_release_filecCs�t|t�r|jd�}tj|j�ddd	��}i}|r�|jd�ddd
�|d<|jd�rn|jd�ddd�|d<|jd�r�|jd�ddd�|d<n|r�|j�|d<|S)
a
        Parse a line from a distro release file.

        Parameters:
        * line: Line from the distro release file. Must be a unicode string
                or a UTF-8 encoded byte string.

        Returns:
            A dictionary containing all information items.
        zutf-8NrIrgr	rJr<r���r�r�r�)rVrWrX�(_DISTRO_RELEASE_CONTENT_REVERSED_PATTERNrDr]r\)rwrFr~rrrr?�s



z/LinuxDistribution._parse_distro_release_contentN)Trr)T)F)FF)F)F)F)F)FF)�__name__�
__module__�__qualname__�__doc__r+r-rrr	rr
rrrrrrrrrrrrr$�staticmethodrNr&rpr(rxr?rrrrrs:
@


!




	

			<)rcCs�ddl}tjt�}|jtj�|jtjtj	��|j
dd�}|jddddd�|j�}|j
rv|jt
jt�d	d
d��nB|jdtd
d
��td
d
�}|r�|jd|�t�}|r�|jd|�dS)NrzLinux distro info tool)r;z--jsonz-jz!Output in machine readable format�
store_true)�help�action�T)�indentZ	sort_keyszName: %s)r
zVersion: %szCodename: %s)�argparse�loggingZ	getLoggerr�ZsetLevel�DEBUGZ
addHandlerZ
StreamHandlerrTrd�ArgumentParser�add_argument�
parse_args�jsonr�dumpsr	rr)r�Zlogger�parser�argsZdistribution_versionZdistribution_codenamerrr�mains(

r��__main__)T)F)FF)F)F)F)F)FF)+r�rrBrTr�rSr�rk�platform�
startswith�ImportErrorr,r r!r7r8r9rCr�rzr}rrr	rr
rrrrrrrrrrrr�objectrrr�r�rrrr�<module>sd	

K
'
,





,


_vendor/__pycache__/retrying.cpython-36.pyc000064400000017525151733136260014722 0ustar003

�Pf�&�@slddlZddlmZddlZddlZddlZdZdd�ZGdd�de�Z	Gdd	�d	e�Z
Gd
d�de�ZdS)�N)�sixi���?csBt��dkr,t�d�r,dd�}|�d�S��fdd�}|SdS)z�
    Decorator function that instantiates the Retrying object
    @param *dargs: positional arguments passed to Retrying object
    @param **dkw: keyword arguments passed to the Retrying object
    �rcstj���fdd��}|S)Ncst�j�f|�|�S)N)�Retrying�call)�args�kw)�f��/usr/lib/python3.6/retrying.py�	wrapped_f$sz-retry.<locals>.wrap_simple.<locals>.wrapped_f)r�wraps)rrr	)rr
�wrap_simple"szretry.<locals>.wrap_simplecstj�����fdd��}|S)Ncst���j�f|�|�S)N)rr)rr)�dargs�dkwrr	r
r/sz&retry.<locals>.wrap.<locals>.wrapped_f)rr)rr)rr)rr
�wrap-szretry.<locals>.wrapN)�len�callable)rrr
rr	)rrr
�retrys
rc@sneZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�ZdS)rNFcs|dkrdn||_|dkrdn||_|dkr0dn||_|dkrBdn||_|dkrTdn||_|dkrfdn||_|	dkrxdn|	|_|
dkr�dn|
|_|dkr�tn||_	|dkr�dn||_
g�|dk	r̈j|j�|dk	r�j|j
�|dk	r�||_n&|dk�r
�fdd�|_nt||�|_dd�g�|dk	�r6�j|j�|dk	�sJ|dk	�rV�j|j�|dk	�sj|	dk	�rv�j|j�|
dk	�s�|dk	�r��j|j�|dk	�r�||_n&|dk�r‡fd	d�|_nt||�|_|dk�r�|j|_n||_|
dk�r�|j|_n|
|_||_dS)
N��di�rrcst��fdd��D��S)Nc3s|]}|���VqdS)Nr	)�.0r)�attempts�delayr	r
�	<genexpr>asz6Retrying.__init__.<locals>.<lambda>.<locals>.<genexpr>)�any)rr)�
stop_funcs)rrr
�<lambda>asz#Retrying.__init__.<locals>.<lambda>c_sdS)Nrr	)r�kwargsr	r	r
rhscst��fdd��D��S)Nc3s|]}|���VqdS)Nr	)rr)rrr	r
rysz6Retrying.__init__.<locals>.<lambda>.<locals>.<genexpr>)�max)rr)�
wait_funcs)rrr
rys)�_stop_max_attempt_number�_stop_max_delay�_wait_fixed�_wait_random_min�_wait_random_max�_wait_incrementing_start�_wait_incrementing_increment�_wait_exponential_multiplier�MAX_WAIT�_wait_exponential_max�_wait_jitter_max�append�stop_after_attempt�stop_after_delay�stop�getattr�fixed_sleep�random_sleep�incrementing_sleep�exponential_sleep�wait�
always_reject�_retry_on_exception�never_reject�_retry_on_result�_wrap_exception)�selfr.r4Zstop_max_attempt_numberZstop_max_delayZ
wait_fixedZwait_random_minZwait_random_maxZwait_incrementing_startZwait_incrementing_incrementZwait_exponential_multiplierZwait_exponential_maxZretry_on_exceptionZretry_on_result�wrap_exceptionZ	stop_funcZ	wait_funcZwait_jitter_maxr	)rrr
�__init__:sR








zRetrying.__init__cCs
||jkS)z;Stop after the previous attempt >= stop_max_attempt_number.)r )r:�previous_attempt_number�delay_since_first_attempt_msr	r	r
r,�szRetrying.stop_after_attemptcCs
||jkS)z=Stop after the time from the first attempt >= stop_max_delay.)r!)r:r=r>r	r	r
r-�szRetrying.stop_after_delaycCsdS)z#Don't sleep at all before retrying.rr	)r:r=r>r	r	r
�no_sleep�szRetrying.no_sleepcCs|jS)z0Sleep a fixed amount of time between each retry.)r")r:r=r>r	r	r
r0�szRetrying.fixed_sleepcCstj|j|j�S)zISleep a random amount of time between wait_random_min and wait_random_max)�randomZrandintr#r$)r:r=r>r	r	r
r1�szRetrying.random_sleepcCs$|j|j|d}|dkr d}|S)z�
        Sleep an incremental amount of time after each attempt, starting at
        wait_incrementing_start and incrementing by wait_incrementing_increment
        rr)r%r&)r:r=r>�resultr	r	r
r2�szRetrying.incrementing_sleepcCs2d|}|j|}||jkr"|j}|dkr.d}|S)N�r)r'r))r:r=r>ZexprAr	r	r
r3�s

zRetrying.exponential_sleepcCsdS)NFr	)r:rAr	r	r
r7�szRetrying.never_rejectcCsdS)NTr	)r:rAr	r	r
r5�szRetrying.always_rejectcCs4d}|jr ||j|jd�O}n||j|j�O}|S)NFr)�
has_exceptionr6�valuer8)r:�attemptZrejectr	r	r
�
should_reject�s
zRetrying.should_rejectc
Os�tttj�d��}d}x�yt|||�|d�}Wn tj�}t||d�}YnX|j|�sh|j|j�Stttj�d��|}|j	||�r�|jr�|j
r�|j��q�t|��n<|j||�}	|j
r�tj�|j
}
|	td|
�}	tj|	d�|d7}qWdS)Ni�rFTrg@�@)�int�round�time�Attempt�sys�exc_inforF�getr9r.rC�
RetryErrorr4r*r@r�sleep)r:�fnrrZ
start_time�attempt_numberrE�tbr>rOZjitterr	r	r
r�s*


z
Retrying.call)NNNNNNNNNNNNNFNNN)�__name__�
__module__�__qualname__r<r,r-r?r0r1r2r3r7r5rFrr	r	r	r
r8s0
F
		rc@s*eZdZdZdd�Zd
dd�Zdd�Zd	S)rJz�
    An Attempt encapsulates a call to a target function that may end as a
    normal return value from the function or an Exception depending on what
    occurred during the execution.
    cCs||_||_||_dS)N)rDrQrC)r:rDrQrCr	r	r
r<�szAttempt.__init__FcCs@|jr6|rt|��q<tj|jd|jd|jd�n|jSdS)z�
        Return the return value of this Attempt instance or raise an Exception.
        If wrap_exception is true, this Attempt is wrapped inside of a
        RetryError before being raised.
        rrrBN)rCrNrZreraiserD)r:r;r	r	r
rM�s

"zAttempt.getcCs:|jr&dj|jdjtj|jd���Sdj|j|j�SdS)NzAttempts: {0}, Error:
{1}�rBzAttempts: {0}, Value: {1})rC�formatrQ�join�	traceback�	format_tbrD)r:r	r	r
�__repr__�s zAttempt.__repr__N)F)rSrTrU�__doc__r<rMr[r	r	r	r
rJ�s
rJc@s eZdZdZdd�Zdd�ZdS)rNzU
    A RetryError encapsulates the last Attempt instance right before giving up.
    cCs
||_dS)N)�last_attempt)r:r]r	r	r
r<szRetryError.__init__cCsdj|j�S)NzRetryError[{0}])rWr])r:r	r	r
�__str__
szRetryError.__str__N)rSrTrUr\r<r^r	r	r	r
rNsrN)
r@Zpip._vendorrrKrIrYr(r�objectrrJ�	ExceptionrNr	r	r	r
�<module>s*!_vendor/__pycache__/re-vendor.cpython-36.pyc000064400000002010151733136260014737 0ustar003

�Pf�@s�ddlZddlZddlZddlZddlZejjejje��Z	dd�Z
dd�Zdd�Ze
dkr�eej�d	krpe
�ejd
dkr�e�nejd
dkr�e�ne
�dS)�NcCstd�tjd�dS)Nz"Usage: re-vendor.py [clean|vendor]�)�print�sys�exit�rr�/usr/lib/python3.6/re-vendor.py�usage	srcCsPx6tjt�D](}tjjt|�}tjj|�rtj|�qWtjtjjtd��dS)Nzsix.py)	�os�listdir�here�path�join�isdir�shutil�rmtree�unlink)�fn�dirnamerrr�clean
s
rcCs6tjddtddg�xtjd�D]}tj|�q WdS)NZinstallz-tz-rz
vendor.txtz
*.egg-info)�pip�mainr�globrr)rrrr�vendorsr�__main__�r)r	rrrrr�abspathr�__file__rrrr�__name__�len�argvrrrr�<module>s _vendor/__pycache__/pyparsing.cpython-36.pyc000064400000610513151733136260015067 0ustar003

�Pf�k�@s�dZdZdZdZddlZddlmZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlmZyddlmZWn ek
r�ddlmZYnXydd	l
mZWn>ek
r�ydd	lmZWnek
r�dZYnXYnXd
ddd
ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrgiZee	j�dds�ZeddskZe�r"e	jZe Z!e"Z#e Z$e%e&e'e(e)ee*e+e,e-e.gZ/nbe	j0Ze1Z2dtdu�Z$gZ/ddl3Z3xBdvj4�D]6Z5ye/j6e7e3e5��Wne8k
�r|�wJYnX�qJWe9dwdx�e2dy�D��Z:dzd{�Z;Gd|d}�d}e<�Z=ej>ej?Z@d~ZAeAdZBe@eAZCe"d��ZDd�jEd�dx�ejFD��ZGGd�d!�d!eH�ZIGd�d#�d#eI�ZJGd�d%�d%eI�ZKGd�d'�d'eK�ZLGd�d*�d*eH�ZMGd�d��d�e<�ZNGd�d&�d&e<�ZOe
jPjQeO�d�d=�ZRd�dN�ZSd�dK�ZTd�d��ZUd�d��ZVd�d��ZWd�dU�ZX�d/d�d��ZYGd�d(�d(e<�ZZGd�d0�d0eZ�Z[Gd�d�de[�Z\Gd�d�de[�Z]Gd�d�de[�Z^e^Z_e^eZ_`Gd�d�de[�ZaGd�d�de^�ZbGd�d�dea�ZcGd�dp�dpe[�ZdGd�d3�d3e[�ZeGd�d+�d+e[�ZfGd�d)�d)e[�ZgGd�d
�d
e[�ZhGd�d2�d2e[�ZiGd�d��d�e[�ZjGd�d�dej�ZkGd�d�dej�ZlGd�d�dej�ZmGd�d.�d.ej�ZnGd�d-�d-ej�ZoGd�d5�d5ej�ZpGd�d4�d4ej�ZqGd�d$�d$eZ�ZrGd�d
�d
er�ZsGd�d �d er�ZtGd�d�der�ZuGd�d�der�ZvGd�d"�d"eZ�ZwGd�d�dew�ZxGd�d�dew�ZyGd�d��d�ew�ZzGd�d�dez�Z{Gd�d6�d6ez�Z|Gd�d��d�e<�Z}e}�Z~Gd�d�dew�ZGd�d,�d,ew�Z�Gd�d�dew�Z�Gd�d��d�e��Z�Gd�d1�d1ew�Z�Gd�d�de��Z�Gd�d�de��Z�Gd�d�de��Z�Gd�d/�d/e��Z�Gd�d�de<�Z�d�df�Z��d0d�dD�Z��d1d�d@�Z�d�d΄Z�d�dS�Z�d�dR�Z�d�d҄Z��d2d�dW�Z�d�dE�Z��d3d�dk�Z�d�dl�Z�d�dn�Z�e\�j�dG�Z�el�j�dM�Z�em�j�dL�Z�en�j�de�Z�eo�j�dd�Z�eeeDd�d�dڍj�d�d܄�Z�efd݃j�d�d܄�Z�efd߃j�d�d܄�Z�e�e�Be�BeeeGd�dyd�Befd�ej��BZ�e�e�e�d�e��Z�e^d�ed�j�d�e�e{e�e�B��j�d�d�Z�d�dc�Z�d�dQ�Z�d�d`�Z�d�d^�Z�d�dq�Z�e�d�d܄�Z�e�d�d܄�Z�d�d�Z�d�dO�Z�d�dP�Z�d�di�Z�e<�e�_��d4d�do�Z�e=�Z�e<�e�_�e<�e�_�e�d��e�d��fd�dm�Z�e�Z�e�efd��d��j�d��Z�e�efd��d��j�d��Z�e�efd��d�efd��d�B�j��d�Z�e�e_�d�e�j��j��d�Z�d�d�de�j�f�ddT�Z��d5�ddj�Z�e��d�Z�e��d�Z�e�eee@eC�d�j��d��\Z�Z�e�e��d	j4��d
��Z�ef�d�djEe�j��d
�j��d�ZĐdd_�Z�e�ef�d��d�j��d�Z�ef�d�j��d�Z�ef�d�jȃj��d�Z�ef�d�j��d�Z�e�ef�d��de�B�j��d�Z�e�Z�ef�d�j��d�Z�e�e{eeeGdɐd�eee�d�e^dɃem����j΃j��d�Z�e�ee�j�e�Bd��d��j�d>�Z�G�d dr�dr�Z�eҐd!k�r�eb�d"�Z�eb�d#�Z�eee@eC�d$�Z�e�eՐd%dӐd&�j�e��Z�e�e�eփ�j��d'�Zאd(e�BZ�e�eՐd%dӐd&�j�e��Z�e�e�eك�j��d)�Z�eӐd*�eؐd'�e�eڐd)�Z�e�jܐd+�e�j�jܐd,�e�j�jܐd,�e�j�jܐd-�ddl�Z�e�j�j�e�e�j��e�j�jܐd.�dS(6aS
pyparsing module - Classes and methods to define and execute parsing grammars

The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
provides a library of classes that you use to construct the grammar directly in Python.

Here is a program to parse "Hello, World!" (or any greeting of the form 
C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements 
(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to
L{Literal} expressions)::

    from pyparsing import Word, alphas

    # define grammar of a greeting
    greet = Word(alphas) + "," + Word(alphas) + "!"

    hello = "Hello, World!"
    print (hello, "->", greet.parseString(hello))

The program outputs the following::

    Hello, World! -> ['Hello', ',', 'World', '!']

The Python representation of the grammar is quite readable, owing to the self-explanatory
class names, and the use of '+', '|' and '^' operators.

The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
object with named attributes.

The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
 - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
 - quoted strings
 - embedded comments
z2.1.10z07 Oct 2016 01:31 UTCz*Paul McGuire <ptmcg@users.sourceforge.net>�N)�ref)�datetime)�RLock)�OrderedDict�And�CaselessKeyword�CaselessLiteral�
CharsNotIn�Combine�Dict�Each�Empty�
FollowedBy�Forward�
GoToColumn�Group�Keyword�LineEnd�	LineStart�Literal�
MatchFirst�NoMatch�NotAny�	OneOrMore�OnlyOnce�Optional�Or�ParseBaseException�ParseElementEnhance�ParseException�ParseExpression�ParseFatalException�ParseResults�ParseSyntaxException�
ParserElement�QuotedString�RecursiveGrammarException�Regex�SkipTo�	StringEnd�StringStart�Suppress�Token�TokenConverter�White�Word�WordEnd�	WordStart�
ZeroOrMore�	alphanums�alphas�
alphas8bit�anyCloseTag�
anyOpenTag�
cStyleComment�col�commaSeparatedList�commonHTMLEntity�countedArray�cppStyleComment�dblQuotedString�dblSlashComment�
delimitedList�dictOf�downcaseTokens�empty�hexnums�htmlComment�javaStyleComment�line�lineEnd�	lineStart�lineno�makeHTMLTags�makeXMLTags�matchOnlyAtCol�matchPreviousExpr�matchPreviousLiteral�
nestedExpr�nullDebugAction�nums�oneOf�opAssoc�operatorPrecedence�
printables�punc8bit�pythonStyleComment�quotedString�removeQuotes�replaceHTMLEntity�replaceWith�
restOfLine�sglQuotedString�srange�	stringEnd�stringStart�traceParseAction�
unicodeString�upcaseTokens�
withAttribute�
indentedBlock�originalTextFor�ungroup�
infixNotation�locatedExpr�	withClass�
CloseMatch�tokenMap�pyparsing_common�cCs`t|t�r|Syt|�Stk
rZt|�jtj�d�}td�}|jdd��|j	|�SXdS)aDrop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
           then < returns the unicode object | encodes it with the default encoding | ... >.
        �xmlcharrefreplacez&#\d+;cSs$dtt|ddd���dd�S)Nz\ur�����)�hex�int)�t�rw�/usr/lib/python3.6/pyparsing.py�<lambda>�sz_ustr.<locals>.<lambda>N)
�
isinstanceZunicode�str�UnicodeEncodeError�encode�sys�getdefaultencodingr'�setParseAction�transformString)�obj�retZ
xmlcharrefrwrwrx�_ustr�s
r�z6sum len sorted reversed list tuple set any all min maxccs|]
}|VqdS)Nrw)�.0�yrwrwrx�	<genexpr>�sr�rrcCs>d}dd�dj�D�}x"t||�D]\}}|j||�}q"W|S)z/Escape &, <, >, ", ', etc. in a string of data.z&><"'css|]}d|dVqdS)�&�;Nrw)r��srwrwrxr��sz_xml_escape.<locals>.<genexpr>zamp gt lt quot apos)�split�zip�replace)�dataZfrom_symbolsZ
to_symbolsZfrom_Zto_rwrwrx�_xml_escape�s
r�c@seZdZdS)�
_ConstantsN)�__name__�
__module__�__qualname__rwrwrwrxr��sr��
0123456789ZABCDEFabcdef�\�ccs|]}|tjkr|VqdS)N)�stringZ
whitespace)r��crwrwrxr��sc@sPeZdZdZddd�Zedd��Zdd	�Zd
d�Zdd
�Z	ddd�Z
dd�ZdS)rz7base exception class for all parsing runtime exceptionsrNcCs>||_|dkr||_d|_n||_||_||_|||f|_dS)Nr�)�loc�msg�pstr�
parserElement�args)�selfr�r�r��elemrwrwrx�__init__�szParseBaseException.__init__cCs||j|j|j|j�S)z�
        internal factory method to simplify creating one type of ParseException 
        from another - avoids having __init__ signature conflicts among subclasses
        )r�r�r�r�)�cls�perwrwrx�_from_exception�sz"ParseBaseException._from_exceptioncCsN|dkrt|j|j�S|dkr,t|j|j�S|dkrBt|j|j�St|��dS)z�supported attributes by name are:
            - lineno - returns the line number of the exception text
            - col - returns the column number of the exception text
            - line - returns the line containing the exception text
        rJr9�columnrGN)r9r�)rJr�r�r9rG�AttributeError)r�Zanamerwrwrx�__getattr__�szParseBaseException.__getattr__cCsd|j|j|j|jfS)Nz"%s (at char %d), (line:%d, col:%d))r�r�rJr�)r�rwrwrx�__str__�szParseBaseException.__str__cCst|�S)N)r�)r�rwrwrx�__repr__�szParseBaseException.__repr__�>!<cCs<|j}|jd}|r4dj|d|�|||d�f�}|j�S)z�Extracts the exception line from the input string, and marks
           the location of the exception with a special symbol.
        rrr�N)rGr��join�strip)r�ZmarkerStringZline_strZline_columnrwrwrx�
markInputline�s
z ParseBaseException.markInputlinecCsdj�tt|��S)Nzlineno col line)r��dir�type)r�rwrwrx�__dir__�szParseBaseException.__dir__)rNN)r�)r�r�r��__doc__r��classmethodr�r�r�r�r�r�rwrwrwrxr�s


c@seZdZdZdS)raN
    Exception thrown when parse expressions don't match class;
    supported attributes by name are:
     - lineno - returns the line number of the exception text
     - col - returns the column number of the exception text
     - line - returns the line containing the exception text
        
    Example::
        try:
            Word(nums).setName("integer").parseString("ABC")
        except ParseException as pe:
            print(pe)
            print("column: {}".format(pe.col))
            
    prints::
       Expected integer (at char 0), (line:1, col:1)
        column: 1
    N)r�r�r�r�rwrwrwrxr�sc@seZdZdZdS)r!znuser-throwable exception thrown when inconsistent parse content
       is found; stops all parsing immediatelyN)r�r�r�r�rwrwrwrxr!sc@seZdZdZdS)r#z�just like L{ParseFatalException}, but thrown internally when an
       L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop 
       immediately because an unbacktrackable syntax error has been foundN)r�r�r�r�rwrwrwrxr#sc@s eZdZdZdd�Zdd�ZdS)r&zZexception thrown by L{ParserElement.validate} if the grammar could be improperly recursivecCs
||_dS)N)�parseElementTrace)r��parseElementListrwrwrxr�sz"RecursiveGrammarException.__init__cCs
d|jS)NzRecursiveGrammarException: %s)r�)r�rwrwrxr� sz!RecursiveGrammarException.__str__N)r�r�r�r�r�r�rwrwrwrxr&sc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�_ParseResultsWithOffsetcCs||f|_dS)N)�tup)r�Zp1Zp2rwrwrxr�$sz _ParseResultsWithOffset.__init__cCs
|j|S)N)r�)r��irwrwrx�__getitem__&sz#_ParseResultsWithOffset.__getitem__cCst|jd�S)Nr)�reprr�)r�rwrwrxr�(sz _ParseResultsWithOffset.__repr__cCs|jd|f|_dS)Nr)r�)r�r�rwrwrx�	setOffset*sz!_ParseResultsWithOffset.setOffsetN)r�r�r�r�r�r�r�rwrwrwrxr�#sr�c@s�eZdZdZd[dd�Zddddefdd�Zdd	�Zefd
d�Zdd
�Z	dd�Z
dd�Zdd�ZeZ
dd�Zdd�Zdd�Zdd�Zdd�Zer�eZeZeZn$eZeZeZdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd\d(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Z d2d3�Z!d4d5�Z"d6d7�Z#d8d9�Z$d:d;�Z%d<d=�Z&d]d?d@�Z'dAdB�Z(dCdD�Z)dEdF�Z*d^dHdI�Z+dJdK�Z,dLdM�Z-d_dOdP�Z.dQdR�Z/dSdT�Z0dUdV�Z1dWdX�Z2dYdZ�Z3dS)`r"aI
    Structured parse results, to provide multiple means of access to the parsed data:
       - as a list (C{len(results)})
       - by list index (C{results[0], results[1]}, etc.)
       - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})

    Example::
        integer = Word(nums)
        date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))
        # equivalent form:
        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")

        # parseString returns a ParseResults object
        result = date_str.parseString("1999/12/31")

        def test(s, fn=repr):
            print("%s -> %s" % (s, fn(eval(s))))
        test("list(result)")
        test("result[0]")
        test("result['month']")
        test("result.day")
        test("'month' in result")
        test("'minutes' in result")
        test("result.dump()", str)
    prints::
        list(result) -> ['1999', '/', '12', '/', '31']
        result[0] -> '1999'
        result['month'] -> '12'
        result.day -> '31'
        'month' in result -> True
        'minutes' in result -> False
        result.dump() -> ['1999', '/', '12', '/', '31']
        - day: 31
        - month: 12
        - year: 1999
    NTcCs"t||�r|Stj|�}d|_|S)NT)rz�object�__new__�_ParseResults__doinit)r��toklist�name�asList�modalZretobjrwrwrxr�Ts


zParseResults.__new__c
Cs`|jrvd|_d|_d|_i|_||_||_|dkr6g}||t�rP|dd�|_n||t�rft|�|_n|g|_t	�|_
|dk	o�|�r\|s�d|j|<||t�r�t|�}||_||t
d�ttf�o�|ddgfk�s\||t�r�|g}|�r&||t��rt|j�d�||<ntt|d�d�||<|||_n6y|d||<Wn$tttfk
�rZ|||<YnXdS)NFrr�)r��_ParseResults__name�_ParseResults__parent�_ParseResults__accumNames�_ParseResults__asList�_ParseResults__modal�list�_ParseResults__toklist�_generatorType�dict�_ParseResults__tokdictrur�r��
basestringr"r��copy�KeyError�	TypeError�
IndexError)r�r�r�r�r�rzrwrwrxr�]sB



$
zParseResults.__init__cCsPt|ttf�r|j|S||jkr4|j|ddStdd�|j|D��SdS)NrrrcSsg|]}|d�qS)rrw)r��vrwrwrx�
<listcomp>�sz,ParseResults.__getitem__.<locals>.<listcomp>rs)rzru�slicer�r�r�r")r�r�rwrwrxr��s


zParseResults.__getitem__cCs�||t�r0|jj|t��|g|j|<|d}nD||ttf�rN||j|<|}n&|jj|t��t|d�g|j|<|}||t�r�t|�|_	dS)Nr)
r�r��getr�rur�r�r"�wkrefr�)r��kr�rz�subrwrwrx�__setitem__�s


"
zParseResults.__setitem__c
Cs�t|ttf�r�t|j�}|j|=t|t�rH|dkr:||7}t||d�}tt|j|���}|j�x^|j	j
�D]F\}}x<|D]4}x.t|�D]"\}\}}	t||	|	|k�||<q�Wq|WqnWn|j	|=dS)Nrrr)
rzrur��lenr�r��range�indices�reverser��items�	enumerater�)
r�r�ZmylenZremovedr��occurrences�jr��value�positionrwrwrx�__delitem__�s


$zParseResults.__delitem__cCs
||jkS)N)r�)r�r�rwrwrx�__contains__�szParseResults.__contains__cCs
t|j�S)N)r�r�)r�rwrwrx�__len__�szParseResults.__len__cCs
|jS)N)r�)r�rwrwrx�__bool__�szParseResults.__bool__cCs
t|j�S)N)�iterr�)r�rwrwrx�__iter__�szParseResults.__iter__cCst|jddd��S)Nrrrs)r�r�)r�rwrwrx�__reversed__�szParseResults.__reversed__cCs$t|jd�r|jj�St|j�SdS)N�iterkeys)�hasattrr�r�r�)r�rwrwrx�	_iterkeys�s
zParseResults._iterkeyscs�fdd��j�D�S)Nc3s|]}�|VqdS)Nrw)r�r�)r�rwrxr��sz+ParseResults._itervalues.<locals>.<genexpr>)r�)r�rw)r�rx�_itervalues�szParseResults._itervaluescs�fdd��j�D�S)Nc3s|]}|�|fVqdS)Nrw)r�r�)r�rwrxr��sz*ParseResults._iteritems.<locals>.<genexpr>)r�)r�rw)r�rx�
_iteritems�szParseResults._iteritemscCst|j��S)zVReturns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).)r�r�)r�rwrwrx�keys�szParseResults.keyscCst|j��S)zXReturns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).)r��
itervalues)r�rwrwrx�values�szParseResults.valuescCst|j��S)zfReturns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).)r��	iteritems)r�rwrwrxr��szParseResults.itemscCs
t|j�S)z�Since keys() returns an iterator, this method is helpful in bypassing
           code that looks for the existence of any defined results names.)�boolr�)r�rwrwrx�haskeys�szParseResults.haskeyscOs�|s
dg}x6|j�D]*\}}|dkr2|d|f}qtd|��qWt|dt�sht|�dksh|d|kr�|d}||}||=|S|d}|SdS)a�
        Removes and returns item at specified index (default=C{last}).
        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
        argument or an integer argument, it will use C{list} semantics
        and pop tokens from the list of parsed tokens. If passed a 
        non-integer argument (most likely a string), it will use C{dict}
        semantics and pop the corresponding value from any defined 
        results names. A second default return value argument is 
        supported, just as in C{dict.pop()}.

        Example::
            def remove_first(tokens):
                tokens.pop(0)
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']

            label = Word(alphas)
            patt = label("LABEL") + OneOrMore(Word(nums))
            print(patt.parseString("AAB 123 321").dump())

            # Use pop() in a parse action to remove named result (note that corresponding value is not
            # removed from list form of results)
            def remove_LABEL(tokens):
                tokens.pop("LABEL")
                return tokens
            patt.addParseAction(remove_LABEL)
            print(patt.parseString("AAB 123 321").dump())
        prints::
            ['AAB', '123', '321']
            - LABEL: AAB

            ['AAB', '123', '321']
        rr�defaultrz-pop() got an unexpected keyword argument '%s'Nrs)r�r�rzrur�)r�r��kwargsr�r��indexr�Zdefaultvaluerwrwrx�pop�s"zParseResults.popcCs||kr||S|SdS)ai
        Returns named result matching the given key, or if there is no
        such name, then returns the given C{defaultValue} or C{None} if no
        C{defaultValue} is specified.

        Similar to C{dict.get()}.
        
        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            result = date_str.parseString("1999/12/31")
            print(result.get("year")) # -> '1999'
            print(result.get("hour", "not specified")) # -> 'not specified'
            print(result.get("hour")) # -> None
        Nrw)r��key�defaultValuerwrwrxr�szParseResults.getcCsZ|jj||�xF|jj�D]8\}}x.t|�D]"\}\}}t||||k�||<q,WqWdS)a
        Inserts new element at location index in the list of parsed tokens.
        
        Similar to C{list.insert()}.

        Example::
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']

            # use a parse action to insert the parse location in the front of the parsed results
            def insert_locn(locn, tokens):
                tokens.insert(0, locn)
            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
        N)r��insertr�r�r�r�)r�r�ZinsStrr�r�r�r�r�rwrwrxr�2szParseResults.insertcCs|jj|�dS)a�
        Add single element to end of ParseResults list of elements.

        Example::
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            
            # use a parse action to compute the sum of the parsed integers, and add it to the end
            def append_sum(tokens):
                tokens.append(sum(map(int, tokens)))
            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
        N)r��append)r��itemrwrwrxr�FszParseResults.appendcCs$t|t�r||7}n|jj|�dS)a
        Add sequence of elements to end of ParseResults list of elements.

        Example::
            patt = OneOrMore(Word(alphas))
            
            # use a parse action to append the reverse of the matched strings, to make a palindrome
            def make_palindrome(tokens):
                tokens.extend(reversed([t[::-1] for t in tokens]))
                return ''.join(tokens)
            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
        N)rzr"r��extend)r�Zitemseqrwrwrxr�Ts

zParseResults.extendcCs|jdd�=|jj�dS)z7
        Clear all elements and results names.
        N)r�r��clear)r�rwrwrxr�fszParseResults.clearcCsfy||Stk
rdSX||jkr^||jkrD|j|ddStdd�|j|D��SndSdS)Nr�rrrcSsg|]}|d�qS)rrw)r�r�rwrwrxr�wsz,ParseResults.__getattr__.<locals>.<listcomp>rs)r�r�r�r")r�r�rwrwrxr�ms

zParseResults.__getattr__cCs|j�}||7}|S)N)r�)r��otherr�rwrwrx�__add__{szParseResults.__add__cs�|jrnt|j���fdd��|jj�}�fdd�|D�}x4|D],\}}|||<t|dt�r>t|�|d_q>W|j|j7_|jj	|j�|S)Ncs|dkr�S|�S)Nrrw)�a)�offsetrwrxry�sz'ParseResults.__iadd__.<locals>.<lambda>c	s4g|],\}}|D]}|t|d�|d��f�qqS)rrr)r�)r�r��vlistr�)�	addoffsetrwrxr��sz)ParseResults.__iadd__.<locals>.<listcomp>r)
r�r�r�r�rzr"r�r�r��update)r�r�Z
otheritemsZotherdictitemsr�r�rw)rrrx�__iadd__�s


zParseResults.__iadd__cCs&t|t�r|dkr|j�S||SdS)Nr)rzrur�)r�r�rwrwrx�__radd__�szParseResults.__radd__cCsdt|j�t|j�fS)Nz(%s, %s))r�r�r�)r�rwrwrxr��szParseResults.__repr__cCsddjdd�|jD��dS)N�[z, css(|] }t|t�rt|�nt|�VqdS)N)rzr"r�r�)r�r�rwrwrxr��sz'ParseResults.__str__.<locals>.<genexpr>�])r�r�)r�rwrwrxr��szParseResults.__str__r�cCsPg}xF|jD]<}|r"|r"|j|�t|t�r:||j�7}q|jt|��qW|S)N)r�r�rzr"�
_asStringListr�)r��sep�outr�rwrwrxr
�s

zParseResults._asStringListcCsdd�|jD�S)a�
        Returns the parse results as a nested list of matching tokens, all converted to strings.

        Example::
            patt = OneOrMore(Word(alphas))
            result = patt.parseString("sldkj lsdkj sldkj")
            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
            print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
            
            # Use asList() to create an actual list
            result_list = result.asList()
            print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
        cSs"g|]}t|t�r|j�n|�qSrw)rzr"r�)r��resrwrwrxr��sz'ParseResults.asList.<locals>.<listcomp>)r�)r�rwrwrxr��szParseResults.asListcs6tr|j}n|j}�fdd��t�fdd�|�D��S)a�
        Returns the named parse results as a nested dictionary.

        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
            
            result = date_str.parseString('12/31/1999')
            print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
            
            result_dict = result.asDict()
            print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}

            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
            import json
            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
        cs6t|t�r.|j�r|j�S�fdd�|D�Sn|SdS)Ncsg|]}�|��qSrwrw)r�r�)�toItemrwrxr��sz7ParseResults.asDict.<locals>.toItem.<locals>.<listcomp>)rzr"r��asDict)r�)rrwrxr�s

z#ParseResults.asDict.<locals>.toItemc3s|]\}}|�|�fVqdS)Nrw)r�r�r�)rrwrxr��sz&ParseResults.asDict.<locals>.<genexpr>)�PY_3r�r�r�)r�Zitem_fnrw)rrxr�s
	zParseResults.asDictcCs8t|j�}|jj�|_|j|_|jj|j�|j|_|S)zA
        Returns a new copy of a C{ParseResults} object.
        )r"r�r�r�r�r�rr�)r�r�rwrwrxr��s
zParseResults.copyFcCsPd}g}tdd�|jj�D��}|d}|s8d}d}d}d}	|dk	rJ|}	n|jrV|j}	|	sf|rbdSd}	|||d|	d	g7}x�t|j�D]�\}
}t|t�r�|
|kr�||j||
|o�|dk||�g7}n||jd|o�|dk||�g7}q�d}|
|kr�||
}|�s
|�rq�nd}t	t
|��}
|||d|d	|
d
|d	g	7}q�W|||d
|	d	g7}dj|�S)z�
        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
        �
css(|] \}}|D]}|d|fVqqdS)rrNrw)r�r�rr�rwrwrxr��sz%ParseResults.asXML.<locals>.<genexpr>z  r�NZITEM�<�>z</)r�r�r�r�r�r�rzr"�asXMLr�r�r�)r�ZdoctagZnamedItemsOnly�indentZ	formatted�nlrZ
namedItemsZnextLevelIndentZselfTagr�r
ZresTagZxmlBodyTextrwrwrxr�sT


zParseResults.asXMLcCs:x4|jj�D]&\}}x|D]\}}||kr|SqWqWdS)N)r�r�)r�r�r�rr�r�rwrwrxZ__lookup$s
zParseResults.__lookupcCs�|jr|jS|jr.|j�}|r(|j|�SdSnNt|�dkrxt|j�dkrxtt|jj���dddkrxtt|jj���SdSdS)a(
        Returns the results name for this token expression. Useful when several 
        different expressions might match at a particular location.

        Example::
            integer = Word(nums)
            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
            house_number_expr = Suppress('#') + Word(nums, alphanums)
            user_data = (Group(house_number_expr)("house_number") 
                        | Group(ssn_expr)("ssn")
                        | Group(integer)("age"))
            user_info = OneOrMore(user_data)
            
            result = user_info.parseString("22 111-22-3333 #221B")
            for item in result:
                print(item.getName(), ':', item[0])
        prints::
            age : 22
            ssn : 111-22-3333
            house_number : 221B
        Nrrrrs)rrs)	r�r��_ParseResults__lookupr�r��nextr�r�r�)r��parrwrwrx�getName+s
zParseResults.getNamercCsbg}d}|j|t|j���|�rX|j�r�tdd�|j�D��}xz|D]r\}}|r^|j|�|jd|d||f�t|t�r�|r�|j|j||d��q�|jt|��qH|jt	|��qHWn�t
dd�|D���rX|}x~t|�D]r\}	}
t|
t��r*|jd|d||	|d|d|
j||d�f�q�|jd|d||	|d|dt|
�f�q�Wd	j|�S)
aH
        Diagnostic method for listing out the contents of a C{ParseResults}.
        Accepts an optional C{indent} argument so that this string can be embedded
        in a nested display of other data.

        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
            
            result = date_str.parseString('12/31/1999')
            print(result.dump())
        prints::
            ['12', '/', '31', '/', '1999']
            - day: 1999
            - month: 31
            - year: 12
        rcss|]\}}t|�|fVqdS)N)r{)r�r�r�rwrwrxr�gsz$ParseResults.dump.<locals>.<genexpr>z
%s%s- %s: z  rrcss|]}t|t�VqdS)N)rzr")r��vvrwrwrxr�ssz
%s%s[%d]:
%s%s%sr�)
r�r�r�r��sortedr�rzr"�dumpr��anyr�r�)r�r�depth�fullr�NLr�r�r�r�rrwrwrxrPs,

4.zParseResults.dumpcOstj|j�f|�|�dS)a�
        Pretty-printer for parsed results as a list, using the C{pprint} module.
        Accepts additional positional or keyword args as defined for the 
        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})

        Example::
            ident = Word(alphas, alphanums)
            num = Word(nums)
            func = Forward()
            term = ident | num | Group('(' + func + ')')
            func <<= ident + Group(Optional(delimitedList(term)))
            result = func.parseString("fna a,b,(fnb c,d,200),100")
            result.pprint(width=40)
        prints::
            ['fna',
             ['a',
              'b',
              ['(', 'fnb', ['c', 'd', '200'], ')'],
              '100']]
        N)�pprintr�)r�r�r�rwrwrxr"}szParseResults.pprintcCs.|j|jj�|jdk	r|j�p d|j|jffS)N)r�r�r�r�r�r�)r�rwrwrx�__getstate__�s
zParseResults.__getstate__cCsN|d|_|d\|_}}|_i|_|jj|�|dk	rDt|�|_nd|_dS)Nrrr)r�r�r�r�rr�r�)r��staterZinAccumNamesrwrwrx�__setstate__�s
zParseResults.__setstate__cCs|j|j|j|jfS)N)r�r�r�r�)r�rwrwrx�__getnewargs__�szParseResults.__getnewargs__cCstt|��t|j��S)N)r�r�r�r�)r�rwrwrxr��szParseResults.__dir__)NNTT)N)r�)NFr�T)r�rT)4r�r�r�r�r�rzr�r�r�r�r�r�r��__nonzero__r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrr�r�r
r�rr�rrrrr"r#r%r&r�rwrwrwrxr"-sh&
	'	
4

#
=%
-
cCsF|}d|kot|�knr4||ddkr4dS||jdd|�S)aReturns current column within a string, counting newlines as line separators.
   The first column is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
   on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   rrrr)r��rfind)r��strgr�rwrwrxr9�s
cCs|jdd|�dS)aReturns current line number within a string, counting newlines as line separators.
   The first line is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
   on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   rrrr)�count)r�r)rwrwrxrJ�s
cCsF|jdd|�}|jd|�}|dkr2||d|�S||dd�SdS)zfReturns the line of text containing loc within a string, counting newlines as line separators.
       rrrrN)r(�find)r�r)ZlastCRZnextCRrwrwrxrG�s
cCs8tdt|�dt|�dt||�t||�f�dS)NzMatch z at loc z(%d,%d))�printr�rJr9)�instringr��exprrwrwrx�_defaultStartDebugAction�sr/cCs$tdt|�dt|j���dS)NzMatched z -> )r,r�r{r�)r-�startlocZendlocr.�toksrwrwrx�_defaultSuccessDebugAction�sr2cCstdt|��dS)NzException raised:)r,r�)r-r�r.�excrwrwrx�_defaultExceptionDebugAction�sr4cGsdS)zG'Do-nothing' debug action, to suppress debugging output during parsing.Nrw)r�rwrwrxrQ�srqcs��tkr�fdd�Sdg�dg�tdd�dkrFddd	�}dd
d��ntj}tj�d}|dd
�d}|d|d|f�������fdd�}d}yt�dt�d�j�}Wntk
r�t��}YnX||_|S)Ncs�|�S)Nrw)r��lrv)�funcrwrxry�sz_trim_arity.<locals>.<lambda>rFrqro�cSs8tdkrdnd	}tj||dd�|}|j|jfgS)
Nror7rrqrr)�limit)ror7r������)�system_version�	traceback�
extract_stack�filenamerJ)r8r�
frame_summaryrwrwrxr=sz"_trim_arity.<locals>.extract_stackcSs$tj||d�}|d}|j|jfgS)N)r8rrrs)r<�
extract_tbr>rJ)�tbr8Zframesr?rwrwrxr@sz_trim_arity.<locals>.extract_tb�)r8rrcs�x�y �|�dd��}d�d<|Stk
r��dr>�n4z.tj�d}�|dd�ddd��ksj�Wd~X�d�kr��dd7<w�YqXqWdS)NrTrrrq)r8rsrs)r�r~�exc_info)r�r�rA)r@�
foundArityr6r8�maxargs�pa_call_line_synthrwrx�wrappers"z_trim_arity.<locals>.wrapperz<parse action>r��	__class__)ror7)r)rrs)	�singleArgBuiltinsr;r<r=r@�getattrr��	Exceptionr{)r6rEr=Z	LINE_DIFFZ	this_linerG�	func_namerw)r@rDr6r8rErFrx�_trim_arity�s*
rMcs�eZdZdZdZdZedd��Zedd��Zd�dd	�Z	d
d�Z
dd
�Zd�dd�Zd�dd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zd�dd �Zd!d"�Zd�d#d$�Zd%d&�Zd'd(�ZGd)d*�d*e�Zed+k	r�Gd,d-�d-e�ZnGd.d-�d-e�ZiZe�Zd/d/gZ d�d0d1�Z!eZ"ed2d3��Z#dZ$ed�d5d6��Z%d�d7d8�Z&e'dfd9d:�Z(d;d<�Z)e'fd=d>�Z*e'dfd?d@�Z+dAdB�Z,dCdD�Z-dEdF�Z.dGdH�Z/dIdJ�Z0dKdL�Z1dMdN�Z2dOdP�Z3dQdR�Z4dSdT�Z5dUdV�Z6dWdX�Z7dYdZ�Z8d�d[d\�Z9d]d^�Z:d_d`�Z;dadb�Z<dcdd�Z=dedf�Z>dgdh�Z?d�didj�Z@dkdl�ZAdmdn�ZBdodp�ZCdqdr�ZDgfdsdt�ZEd�dudv�ZF�fdwdx�ZGdydz�ZHd{d|�ZId}d~�ZJdd��ZKd�d�d��ZLd�d�d��ZM�ZNS)�r$z)Abstract base level parser element class.z 
	
FcCs
|t_dS)a�
        Overrides the default whitespace chars

        Example::
            # default whitespace chars are space, <TAB> and newline
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
            
            # change to just treat newline as significant
            ParserElement.setDefaultWhitespaceChars(" \t")
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
        N)r$�DEFAULT_WHITE_CHARS)�charsrwrwrx�setDefaultWhitespaceChars=s
z'ParserElement.setDefaultWhitespaceCharscCs
|t_dS)a�
        Set class to be used for inclusion of string literals into a parser.
        
        Example::
            # default literal class used is Literal
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']


            # change to Suppress
            ParserElement.inlineLiteralsUsing(Suppress)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
        N)r$�_literalStringClass)r�rwrwrx�inlineLiteralsUsingLsz!ParserElement.inlineLiteralsUsingcCs�t�|_d|_d|_d|_||_d|_tj|_	d|_
d|_d|_t�|_
d|_d|_d|_d|_d|_d|_d|_d|_d|_dS)NTFr�)NNN)r��parseAction�
failAction�strRepr�resultsName�
saveAsList�skipWhitespacer$rN�
whiteChars�copyDefaultWhiteChars�mayReturnEmpty�keepTabs�ignoreExprs�debug�streamlined�
mayIndexError�errmsg�modalResults�debugActions�re�callPreparse�
callDuringTry)r��savelistrwrwrxr�as(zParserElement.__init__cCs<tj|�}|jdd�|_|jdd�|_|jr8tj|_|S)a$
        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
        for the same parsing pattern, using copies of the original parse element.
        
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
            
            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
        prints::
            [5120, 100, 655360, 268435456]
        Equivalent form of C{expr.copy()} is just C{expr()}::
            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
        N)r�rSr]rZr$rNrY)r�Zcpyrwrwrxr�xs
zParserElement.copycCs*||_d|j|_t|d�r&|j|j_|S)af
        Define name for this expression, makes debugging and exception messages clearer.
        
        Example::
            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
        z	Expected �	exception)r�rar�rhr�)r�r�rwrwrx�setName�s


zParserElement.setNamecCs4|j�}|jd�r"|dd�}d}||_||_|S)aP
        Define name for referencing matching tokens as a nested attribute
        of the returned parse results.
        NOTE: this returns a *copy* of the original C{ParserElement} object;
        this is so that the client can define a basic element, such as an
        integer, and reference it in multiple places with different names.

        You can also set results names using the abbreviated syntax,
        C{expr("name")} in place of C{expr.setResultsName("name")} - 
        see L{I{__call__}<__call__>}.

        Example::
            date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))

            # equivalent form:
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
        �*NrrTrs)r��endswithrVrb)r�r��listAllMatchesZnewselfrwrwrx�setResultsName�s
zParserElement.setResultsNameTcs@|r&|j�d�fdd�	}�|_||_nt|jd�r<|jj|_|S)z�Method to invoke the Python pdb debugger when this element is
           about to be parsed. Set C{breakFlag} to True to enable, False to
           disable.
        Tcsddl}|j��||||�S)Nr)�pdbZ	set_trace)r-r��	doActions�callPreParsern)�_parseMethodrwrx�breaker�sz'ParserElement.setBreak.<locals>.breaker�_originalParseMethod)TT)�_parsersr�)r�Z	breakFlagrrrw)rqrx�setBreak�s
zParserElement.setBreakcOs&tttt|���|_|jdd�|_|S)a
        Define action to perform when successfully matching parse element definition.
        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
         - s   = the original string being parsed (see note below)
         - loc = the location of the matching substring
         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
        If the functions in fns modify the tokens, they can return them as the return
        value from fn, and the modified list of tokens will replace the original.
        Otherwise, fn does not need to return any value.

        Optional keyword arguments:
         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing

        Note: the default parsing behavior is to expand tabs in the input string
        before starting the parsing process.  See L{I{parseString}<parseString>} for more information
        on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
        consistent view of the parsed string, the parse location, and line and column
        positions within the parsed string.
        
        Example::
            integer = Word(nums)
            date_str = integer + '/' + integer + '/' + integer

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']

            # use parse action to convert to ints at parse time
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            date_str = integer + '/' + integer + '/' + integer

            # note that integer fields are now ints, not strings
            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
        rfF)r��maprMrSr�rf)r��fnsr�rwrwrxr��s"zParserElement.setParseActioncOs4|jtttt|���7_|jp,|jdd�|_|S)z�
        Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
        
        See examples in L{I{copy}<copy>}.
        rfF)rSr�rvrMrfr�)r�rwr�rwrwrx�addParseAction�szParserElement.addParseActioncsb|jdd��|jdd�rtnt�x(|D] ����fdd�}|jj|�q&W|jpZ|jdd�|_|S)a�Add a boolean predicate function to expression's list of parse actions. See 
        L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, 
        functions passed to C{addCondition} need to return boolean success/fail of the condition.

        Optional keyword arguments:
         - message = define a custom message to be used in the raised exception
         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
         
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            year_int = integer.copy()
            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
            date_str = year_int + '/' + integer + '/' + integer

            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
        �messagezfailed user-defined condition�fatalFcs$tt��|||��s �||���dS)N)r�rM)r�r5rv)�exc_type�fnr�rwrx�pasz&ParserElement.addCondition.<locals>.parf)r�r!rrSr�rf)r�rwr�r}rw)r{r|r�rx�addCondition�s
zParserElement.addConditioncCs
||_|S)aDefine action to perform if parsing fails at this expression.
           Fail acton fn is a callable function that takes the arguments
           C{fn(s,loc,expr,err)} where:
            - s = string being parsed
            - loc = location where expression match was attempted and failed
            - expr = the parse expression that failed
            - err = the exception thrown
           The function returns no value.  It may throw C{L{ParseFatalException}}
           if it is desired to stop parsing immediately.)rT)r�r|rwrwrx�
setFailActions
zParserElement.setFailActioncCsZd}xP|rTd}xB|jD]8}yx|j||�\}}d}qWWqtk
rLYqXqWqW|S)NTF)r]rtr)r�r-r�Z
exprsFound�eZdummyrwrwrx�_skipIgnorables#szParserElement._skipIgnorablescCsL|jr|j||�}|jrH|j}t|�}x ||krF|||krF|d7}q(W|S)Nrr)r]r�rXrYr�)r�r-r�Zwt�instrlenrwrwrx�preParse0szParserElement.preParsecCs|gfS)Nrw)r�r-r�rorwrwrx�	parseImpl<szParserElement.parseImplcCs|S)Nrw)r�r-r��	tokenlistrwrwrx�	postParse?szParserElement.postParsec"Cs�|j}|s|jr�|jdr,|jd|||�|rD|jrD|j||�}n|}|}yDy|j|||�\}}Wn(tk
r�t|t|�|j	|��YnXWnXt
k
r�}	z<|jdr�|jd||||	�|jr�|j||||	��WYdd}	~	XnXn�|o�|j�r|j||�}n|}|}|j�s$|t|�k�rhy|j|||�\}}Wn*tk
�rdt|t|�|j	|��YnXn|j|||�\}}|j|||�}t
||j|j|jd�}
|j�r�|�s�|j�r�|�rVyRxL|jD]B}||||
�}|dk	�r�t
||j|j�o�t|t
tf�|jd�}
�q�WWnFt
k
�rR}	z(|jd�r@|jd||||	��WYdd}	~	XnXnNxL|jD]B}||||
�}|dk	�r^t
||j|j�o�t|t
tf�|jd�}
�q^W|�r�|jd�r�|jd|||||
�||
fS)Nrrq)r�r�rr)r^rTrcrer�r�r�rr�rarr`r�r"rVrWrbrSrfrzr�)r�r-r�rorpZ	debugging�prelocZtokensStart�tokens�errZ	retTokensr|rwrwrx�
_parseNoCacheCsp





zParserElement._parseNoCachecCs>y|j||dd�dStk
r8t|||j|��YnXdS)NF)ror)rtr!rra)r�r-r�rwrwrx�tryParse�szParserElement.tryParsecCs2y|j||�Wnttfk
r(dSXdSdS)NFT)r�rr�)r�r-r�rwrwrx�canParseNext�s
zParserElement.canParseNextc@seZdZdd�ZdS)zParserElement._UnboundedCachecsdi�t�|_���fdd�}�fdd�}�fdd�}tj||�|_tj||�|_tj||�|_dS)Ncs�j|��S)N)r�)r�r�)�cache�not_in_cacherwrxr��sz3ParserElement._UnboundedCache.__init__.<locals>.getcs|�|<dS)Nrw)r�r�r�)r�rwrx�set�sz3ParserElement._UnboundedCache.__init__.<locals>.setcs�j�dS)N)r�)r�)r�rwrxr��sz5ParserElement._UnboundedCache.__init__.<locals>.clear)r�r��types�
MethodTyper�r�r�)r�r�r�r�rw)r�r�rxr��sz&ParserElement._UnboundedCache.__init__N)r�r�r�r�rwrwrwrx�_UnboundedCache�sr�Nc@seZdZdd�ZdS)zParserElement._FifoCachecsht�|_�t����fdd�}��fdd�}�fdd�}tj||�|_tj||�|_tj||�|_dS)Ncs�j|��S)N)r�)r�r�)r�r�rwrxr��sz.ParserElement._FifoCache.__init__.<locals>.getcs"|�|<t���kr�jd�dS)NF)r��popitem)r�r�r�)r��sizerwrxr��sz.ParserElement._FifoCache.__init__.<locals>.setcs�j�dS)N)r�)r�)r�rwrxr��sz0ParserElement._FifoCache.__init__.<locals>.clear)r�r��_OrderedDictr�r�r�r�r�)r�r�r�r�r�rw)r�r�r�rxr��sz!ParserElement._FifoCache.__init__N)r�r�r�r�rwrwrwrx�
_FifoCache�sr�c@seZdZdd�ZdS)zParserElement._FifoCachecsvt�|_�i�tjg�����fdd�}���fdd�}��fdd�}tj||�|_tj||�|_tj||�|_dS)Ncs�j|��S)N)r�)r�r�)r�r�rwrxr��sz.ParserElement._FifoCache.__init__.<locals>.getcs2|�|<t���kr$�j�j�d��j|�dS)N)r�r��popleftr�)r�r�r�)r��key_fifor�rwrxr��sz.ParserElement._FifoCache.__init__.<locals>.setcs�j��j�dS)N)r�)r�)r�r�rwrxr��sz0ParserElement._FifoCache.__init__.<locals>.clear)	r�r��collections�dequer�r�r�r�r�)r�r�r�r�r�rw)r�r�r�r�rxr��sz!ParserElement._FifoCache.__init__N)r�r�r�r�rwrwrwrxr��srcCs�d\}}|||||f}tj��tj}|j|�}	|	|jkr�tj|d7<y|j||||�}	Wn8tk
r�}
z|j||
j	|
j
���WYdd}
~
Xq�X|j||	d|	dj�f�|	Sn4tj|d7<t|	t
�r�|	�|	d|	dj�fSWdQRXdS)Nrrr)rrr)r$�packrat_cache_lock�
packrat_cacher�r��packrat_cache_statsr�rr�rHr�r�rzrK)r�r-r�rorpZHITZMISS�lookupr�r�r�rwrwrx�_parseCache�s$


zParserElement._parseCachecCs(tjj�dgttj�tjdd�<dS)Nr)r$r�r�r�r�rwrwrwrx�
resetCache�s
zParserElement.resetCache�cCs8tjs4dt_|dkr tj�t_ntj|�t_tjt_dS)a�Enables "packrat" parsing, which adds memoizing to the parsing logic.
           Repeated parse attempts at the same string location (which happens
           often in many complex grammars) can immediately return a cached value,
           instead of re-executing parsing/validating code.  Memoizing is done of
           both valid results and parsing exceptions.
           
           Parameters:
            - cache_size_limit - (default=C{128}) - if an integer value is provided
              will limit the size of the packrat cache; if None is passed, then
              the cache size will be unbounded; if 0 is passed, the cache will
              be effectively disabled.
            
           This speedup may break existing programs that use parse actions that
           have side-effects.  For this reason, packrat parsing is disabled when
           you first import pyparsing.  To activate the packrat feature, your
           program must call the class method C{ParserElement.enablePackrat()}.  If
           your program uses C{psyco} to "compile as you go", you must call
           C{enablePackrat} before calling C{psyco.full()}.  If you do not do this,
           Python will crash.  For best results, call C{enablePackrat()} immediately
           after importing pyparsing.
           
           Example::
               import pyparsing
               pyparsing.ParserElement.enablePackrat()
        TN)r$�_packratEnabledr�r�r�r�rt)Zcache_size_limitrwrwrx�
enablePackratszParserElement.enablePackratcCs�tj�|js|j�x|jD]}|j�qW|js<|j�}y<|j|d�\}}|rv|j||�}t	�t
�}|j||�Wn0tk
r�}ztjr��n|�WYdd}~XnX|SdS)aB
        Execute the parse expression with the given string.
        This is the main interface to the client code, once the complete
        expression has been built.

        If you want the grammar to require that the entire input string be
        successfully parsed, then set C{parseAll} to True (equivalent to ending
        the grammar with C{L{StringEnd()}}).

        Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
        in order to report proper column numbers in parse actions.
        If the input string contains tabs and
        the grammar uses parse actions that use the C{loc} argument to index into the
        string being parsed, you can ensure you have a consistent view of the input
        string by:
         - calling C{parseWithTabs} on your grammar before calling C{parseString}
           (see L{I{parseWithTabs}<parseWithTabs>})
         - define your parse action using the full C{(s,loc,toks)} signature, and
           reference the input string using the parse action's C{s} argument
         - explictly expand the tabs in your input string before calling
           C{parseString}
        
        Example::
            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
        rN)
r$r�r_�
streamliner]r\�
expandtabsrtr�r
r)r�verbose_stacktrace)r�r-�parseAllr�r�r�Zser3rwrwrx�parseString#s$zParserElement.parseStringccs@|js|j�x|jD]}|j�qW|js8t|�j�}t|�}d}|j}|j}t	j
�d}	y�x�||kon|	|k�ry |||�}
|||
dd�\}}Wntk
r�|
d}Yq`X||kr�|	d7}	||
|fV|r�|||�}
|
|kr�|}q�|d7}n|}q`|
d}q`WWn4tk
�r:}zt	j
�r&�n|�WYdd}~XnXdS)a�
        Scan the input string for expression matches.  Each match will return the
        matching tokens, start location, and end location.  May be called with optional
        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
        C{overlap} is specified, then overlapping matches will be reported.

        Note that the start and end locations are reported relative to the string
        being parsed.  See L{I{parseString}<parseString>} for more information on parsing
        strings with embedded tabs.

        Example::
            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
            print(source)
            for tokens,start,end in Word(alphas).scanString(source):
                print(' '*start + '^'*(end-start))
                print(' '*start + tokens[0])
        
        prints::
        
            sldjf123lsdjjkf345sldkjf879lkjsfd987
            ^^^^^
            sldjf
                    ^^^^^^^
                    lsdjjkf
                              ^^^^^^
                              sldkjf
                                       ^^^^^^
                                       lkjsfd
        rF)rprrN)r_r�r]r\r�r�r�r�rtr$r�rrr�)r�r-�
maxMatchesZoverlapr�r�r�Z
preparseFnZparseFn�matchesr�ZnextLocr�Znextlocr3rwrwrx�
scanStringUsB


zParserElement.scanStringcCs�g}d}d|_y�xh|j|�D]Z\}}}|j|||��|rrt|t�rT||j�7}nt|t�rh||7}n
|j|�|}qW|j||d��dd�|D�}djtt	t
|���Stk
r�}ztj
rȂn|�WYdd}~XnXdS)af
        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
        be returned from a parse action.  To use C{transformString}, define a grammar and
        attach a parse action to it that modifies the returned token list.
        Invoking C{transformString()} on a target string will then scan for matches,
        and replace the matched text patterns according to the logic in the parse
        action.  C{transformString()} returns the resulting transformed string.
        
        Example::
            wd = Word(alphas)
            wd.setParseAction(lambda toks: toks[0].title())
            
            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
        Prints::
            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
        rTNcSsg|]}|r|�qSrwrw)r��orwrwrxr��sz1ParserElement.transformString.<locals>.<listcomp>r�)r\r�r�rzr"r�r�r�rvr��_flattenrr$r�)r�r-rZlastErvr�r�r3rwrwrxr��s(



zParserElement.transformStringcCsPytdd�|j||�D��Stk
rJ}ztjr6�n|�WYdd}~XnXdS)a~
        Another extension to C{L{scanString}}, simplifying the access to the tokens found
        to match the given parse expression.  May be called with optional
        C{maxMatches} argument, to clip searching after 'n' matches are found.
        
        Example::
            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
            cap_word = Word(alphas.upper(), alphas.lower())
            
            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
        prints::
            ['More', 'Iron', 'Lead', 'Gold', 'I']
        cSsg|]\}}}|�qSrwrw)r�rvr�r�rwrwrxr��sz.ParserElement.searchString.<locals>.<listcomp>N)r"r�rr$r�)r�r-r�r3rwrwrx�searchString�szParserElement.searchStringc	csXd}d}x<|j||d�D]*\}}}|||�V|r>|dV|}qW||d�VdS)a[
        Generator method to split a string using the given expression as a separator.
        May be called with optional C{maxsplit} argument, to limit the number of splits;
        and the optional C{includeSeparators} argument (default=C{False}), if the separating
        matching text should be included in the split results.
        
        Example::        
            punc = oneOf(list(".,;:/-!?"))
            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
        prints::
            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
        r)r�N)r�)	r�r-�maxsplitZincludeSeparatorsZsplitsZlastrvr�r�rwrwrxr��s

zParserElement.splitcCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)a�
        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
        converts them to L{Literal}s by default.
        
        Example::
            greet = Word(alphas) + "," + Word(alphas) + "!"
            hello = "Hello, World!"
            print (hello, "->", greet.parseString(hello))
        Prints::
            Hello, World! -> ['Hello', ',', 'World', '!']
        z4Cannot combine element of type %s with ParserElementrq)�
stacklevelN)	rzr�r$rQ�warnings�warnr��
SyntaxWarningr)r�r�rwrwrxr�s



zParserElement.__add__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||S)z]
        Implementation of + operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrxrs



zParserElement.__radd__cCsLt|t�rtj|�}t|t�s:tjdt|�tdd�dSt|tj	�|g�S)zQ
        Implementation of - operator, returns C{L{And}} with error stop
        z4Cannot combine element of type %s with ParserElementrq)r�N)
rzr�r$rQr�r�r�r�r�
_ErrorStop)r�r�rwrwrx�__sub__s



zParserElement.__sub__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||S)z]
        Implementation of - operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__rsub__ s



zParserElement.__rsub__cs�t|t�r|d}}n�t|t�r�|ddd�}|ddkrHd|df}t|dt�r�|ddkr�|ddkrvt��S|ddkr�t��S�|dt��SnJt|dt�r�t|dt�r�|\}}||8}ntdt|d�t|d���ntdt|���|dk�rtd��|dk�rtd��||k�o2dkn�rBtd	��|�r���fd
d��|�r�|dk�rt��|�}nt�g|��|�}n�|�}n|dk�r��}nt�g|�}|S)
a�
        Implementation of * operator, allows use of C{expr * 3} in place of
        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
        may also include C{None} as in:
         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
              to C{expr*n + L{ZeroOrMore}(expr)}
              (read as "at least n instances of C{expr}")
         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
              (read as "0 to n instances of C{expr}")
         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}

        Note that C{expr*(None,n)} does not raise an exception if
        more than n exprs exist in the input stream; that is,
        C{expr*(None,n)} does not enforce a maximum number of expr
        occurrences.  If this behavior is desired, then write
        C{expr*(None,n) + ~expr}
        rNrqrrz7cannot multiply 'ParserElement' and ('%s','%s') objectsz0cannot multiply 'ParserElement' and '%s' objectsz/cannot multiply ParserElement by negative valuez@second tuple value must be greater or equal to first tuple valuez+cannot multiply ParserElement by 0 or (0,0)cs(|dkrt��|d��St��SdS)Nrr)r)�n)�makeOptionalListr�rwrxr�]sz/ParserElement.__mul__.<locals>.makeOptionalList)NN)	rzru�tupler2rr�r��
ValueErrorr)r�r�ZminElementsZoptElementsr�rw)r�r�rx�__mul__,sD







zParserElement.__mul__cCs
|j|�S)N)r�)r�r�rwrwrx�__rmul__pszParserElement.__rmul__cCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)zI
        Implementation of | operator - returns C{L{MatchFirst}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)	rzr�r$rQr�r�r�r�r)r�r�rwrwrx�__or__ss



zParserElement.__or__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||BS)z]
        Implementation of | operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__ror__s



zParserElement.__ror__cCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)zA
        Implementation of ^ operator - returns C{L{Or}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)	rzr�r$rQr�r�r�r�r)r�r�rwrwrx�__xor__�s



zParserElement.__xor__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||AS)z]
        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__rxor__�s



zParserElement.__rxor__cCsFt|t�rtj|�}t|t�s:tjdt|�tdd�dSt||g�S)zC
        Implementation of & operator - returns C{L{Each}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)	rzr�r$rQr�r�r�r�r)r�r�rwrwrx�__and__�s



zParserElement.__and__cCsBt|t�rtj|�}t|t�s:tjdt|�tdd�dS||@S)z]
        Implementation of & operator when left operand is not a C{L{ParserElement}}
        z4Cannot combine element of type %s with ParserElementrq)r�N)rzr�r$rQr�r�r�r�)r�r�rwrwrx�__rand__�s



zParserElement.__rand__cCst|�S)zE
        Implementation of ~ operator - returns C{L{NotAny}}
        )r)r�rwrwrx�
__invert__�szParserElement.__invert__cCs|dk	r|j|�S|j�SdS)a

        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
        
        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
        passed as C{True}.
           
        If C{name} is omitted, same as calling C{L{copy}}.

        Example::
            # these are equivalent
            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
        N)rmr�)r�r�rwrwrx�__call__�s
zParserElement.__call__cCst|�S)z�
        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
        cluttering up returned output.
        )r+)r�rwrwrx�suppress�szParserElement.suppresscCs
d|_|S)a
        Disables the skipping of whitespace before matching the characters in the
        C{ParserElement}'s defined pattern.  This is normally only used internally by
        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
        F)rX)r�rwrwrx�leaveWhitespace�szParserElement.leaveWhitespacecCsd|_||_d|_|S)z8
        Overrides the default whitespace chars
        TF)rXrYrZ)r�rOrwrwrx�setWhitespaceChars�sz ParserElement.setWhitespaceCharscCs
d|_|S)z�
        Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
        Must be called before C{parseString} when the input grammar contains elements that
        match C{<TAB>} characters.
        T)r\)r�rwrwrx�
parseWithTabs�szParserElement.parseWithTabscCsLt|t�rt|�}t|t�r4||jkrH|jj|�n|jjt|j���|S)a�
        Define expression to be ignored (e.g., comments) while doing pattern
        matching; may be called repeatedly, to define multiple comment or other
        ignorable patterns.
        
        Example::
            patt = OneOrMore(Word(alphas))
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
            
            patt.ignore(cStyleComment)
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
        )rzr�r+r]r�r�)r�r�rwrwrx�ignore�s


zParserElement.ignorecCs"|pt|pt|ptf|_d|_|S)zT
        Enable display of debugging messages while doing pattern matching.
        T)r/r2r4rcr^)r�ZstartActionZ
successActionZexceptionActionrwrwrx�setDebugActions
s
zParserElement.setDebugActionscCs|r|jttt�nd|_|S)a�
        Enable display of debugging messages while doing pattern matching.
        Set C{flag} to True to enable, False to disable.

        Example::
            wd = Word(alphas).setName("alphaword")
            integer = Word(nums).setName("numword")
            term = wd | integer
            
            # turn on debugging for wd
            wd.setDebug()

            OneOrMore(term).parseString("abc 123 xyz 890")
        
        prints::
            Match alphaword at loc 0(1,1)
            Matched alphaword -> ['abc']
            Match alphaword at loc 3(1,4)
            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
            Match alphaword at loc 7(1,8)
            Matched alphaword -> ['xyz']
            Match alphaword at loc 11(1,12)
            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
            Match alphaword at loc 15(1,16)
            Exception raised:Expected alphaword (at char 15), (line:1, col:16)

        The output shown is that produced by the default debug actions - custom debug actions can be
        specified using L{setDebugActions}. Prior to attempting
        to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}
        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
        which makes debugging and exception messages easier to understand - for instance, the default
        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
        F)r�r/r2r4r^)r��flagrwrwrx�setDebugs#zParserElement.setDebugcCs|jS)N)r�)r�rwrwrxr�@szParserElement.__str__cCst|�S)N)r�)r�rwrwrxr�CszParserElement.__repr__cCsd|_d|_|S)NT)r_rU)r�rwrwrxr�FszParserElement.streamlinecCsdS)Nrw)r�r�rwrwrx�checkRecursionKszParserElement.checkRecursioncCs|jg�dS)zj
        Check defined expressions for valid structure, check for infinite recursive definitions.
        N)r�)r��
validateTracerwrwrx�validateNszParserElement.validatecCs�y|j�}Wn2tk
r>t|d��}|j�}WdQRXYnXy|j||�Stk
r|}ztjrh�n|�WYdd}~XnXdS)z�
        Execute the parse expression on the given file or filename.
        If a filename is specified (instead of a file object),
        the entire file is opened, read, and closed before parsing.
        �rN)�readr��openr�rr$r�)r�Zfile_or_filenamer�Z
file_contents�fr3rwrwrx�	parseFileTszParserElement.parseFilecsHt|t�r"||kp t|�t|�kSt|t�r6|j|�Stt|�|kSdS)N)rzr$�varsr�r��super)r�r�)rHrwrx�__eq__hs



zParserElement.__eq__cCs
||kS)Nrw)r�r�rwrwrx�__ne__pszParserElement.__ne__cCstt|��S)N)�hash�id)r�rwrwrx�__hash__sszParserElement.__hash__cCs||kS)Nrw)r�r�rwrwrx�__req__vszParserElement.__req__cCs
||kS)Nrw)r�r�rwrwrx�__rne__yszParserElement.__rne__cCs0y|jt|�|d�dStk
r*dSXdS)a�
        Method for quick testing of a parser against a test string. Good for simple 
        inline microtests of sub expressions while building up larger parser.
           
        Parameters:
         - testString - to test against this expression for a match
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
            
        Example::
            expr = Word(nums)
            assert expr.matches("100")
        )r�TFN)r�r�r)r�Z
testStringr�rwrwrxr�|s

zParserElement.matches�#cCs�t|t�r"tttj|j�j���}t|t�r4t|�}g}g}d}	�x�|D�]�}
|dk	rb|j	|
d�sl|rx|
rx|j
|
�qH|
s~qHdj|�|
g}g}y:|
jdd�}
|j
|
|d�}|j
|j|d��|	o�|}	Wn�tk
�rx}
z�t|
t�r�dnd	}d|
k�r0|j
t|
j|
��|j
d
t|
j|
�dd|�n|j
d
|
jd|�|j
d
t|
��|	�ob|}	|
}WYdd}
~
XnDtk
�r�}z&|j
dt|��|	�o�|}	|}WYdd}~XnX|�r�|�r�|j
d	�tdj|��|j
|
|f�qHW|	|fS)a3
        Execute the parse expression on a series of test strings, showing each
        test, the parsed results or where the parse failed. Quick and easy way to
        run a parse expression against a list of sample strings.
           
        Parameters:
         - tests - a list of separate test strings, or a multiline string of test strings
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
              string; pass None to disable comment filtering
         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
              if False, only dump nested list
         - printResults - (default=C{True}) prints test output to stdout
         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing

        Returns: a (success, results) tuple, where success indicates that all tests succeeded
        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
        test's output
        
        Example::
            number_expr = pyparsing_common.number.copy()

            result = number_expr.runTests('''
                # unsigned integer
                100
                # negative integer
                -100
                # float with scientific notation
                6.02e23
                # integer with scientific notation
                1e-12
                ''')
            print("Success" if result[0] else "Failed!")

            result = number_expr.runTests('''
                # stray character
                100Z
                # missing leading digit before '.'
                -.100
                # too many '.'
                3.14.159
                ''', failureTests=True)
            print("Success" if result[0] else "Failed!")
        prints::
            # unsigned integer
            100
            [100]

            # negative integer
            -100
            [-100]

            # float with scientific notation
            6.02e23
            [6.02e+23]

            # integer with scientific notation
            1e-12
            [1e-12]

            Success
            
            # stray character
            100Z
               ^
            FAIL: Expected end of text (at char 3), (line:1, col:4)

            # missing leading digit before '.'
            -.100
            ^
            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)

            # too many '.'
            3.14.159
                ^
            FAIL: Expected end of text (at char 4), (line:1, col:5)

            Success

        Each test string must be on a single line. If you want to test a string that spans multiple
        lines, create a test like this::

            expr.runTest(r"this is a test\n of strings that spans \n 3 lines")
        
        (Note that this is a raw string literal, you must include the leading 'r'.)
        TNFrz\n)r�)r z(FATAL)r�� rr�^zFAIL: zFAIL-EXCEPTION: )rzr�r�rvr{r��rstrip�
splitlinesrr�r�r�r�r�rrr!rGr�r9rKr,)r�Ztestsr�ZcommentZfullDumpZprintResultsZfailureTestsZ
allResultsZcomments�successrvr�resultr�rzr3rwrwrx�runTests�sNW



$


zParserElement.runTests)F)F)T)T)TT)TT)r�)F)N)T)F)T)Tr�TTF)Or�r�r�r�rNr��staticmethodrPrRr�r�rirmrur�rxr~rr�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�rtr�r�r�r��_MAX_INTr�r�r�r�rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r��
__classcell__rwrw)rHrxr$8s�


&




H
"
2G+D
			

)

cs eZdZdZ�fdd�Z�ZS)r,zT
    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
    cstt|�jdd�dS)NF)rg)r�r,r�)r�)rHrwrxr�	szToken.__init__)r�r�r�r�r�r�rwrw)rHrxr,	scs eZdZdZ�fdd�Z�ZS)r
z,
    An empty token, will always match.
    cs$tt|�j�d|_d|_d|_dS)Nr
TF)r�r
r�r�r[r`)r�)rHrwrxr�	szEmpty.__init__)r�r�r�r�r�r�rwrw)rHrxr
	scs*eZdZdZ�fdd�Zddd�Z�ZS)rz(
    A token that will never match.
    cs*tt|�j�d|_d|_d|_d|_dS)NrTFzUnmatchable token)r�rr�r�r[r`ra)r�)rHrwrxr�*	s
zNoMatch.__init__TcCst|||j|��dS)N)rra)r�r-r�rorwrwrxr�1	szNoMatch.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr&	scs*eZdZdZ�fdd�Zddd�Z�ZS)ra�
    Token to exactly match a specified string.
    
    Example::
        Literal('blah').parseString('blah')  # -> ['blah']
        Literal('blah').parseString('blahfooblah')  # -> ['blah']
        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
    
    For case-insensitive matching, use L{CaselessLiteral}.
    
    For keyword matching (force word break before and after the matched string),
    use L{Keyword} or L{CaselessKeyword}.
    cs�tt|�j�||_t|�|_y|d|_Wn*tk
rVtj	dt
dd�t|_YnXdt
|j�|_d|j|_d|_d|_dS)Nrz2null string passed to Literal; use Empty() insteadrq)r�z"%s"z	Expected F)r�rr��matchr��matchLen�firstMatchCharr�r�r�r�r
rHr�r�rar[r`)r��matchString)rHrwrxr�C	s

zLiteral.__init__TcCsJ|||jkr6|jdks&|j|j|�r6||j|jfSt|||j|��dS)Nrr)r�r��
startswithr�rra)r�r-r�rorwrwrxr�V	szLiteral.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr5	s
csLeZdZdZedZd�fdd�	Zddd	�Z�fd
d�Ze	dd
��Z
�ZS)ra\
    Token to exactly match a specified string as a keyword, that is, it must be
    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
    Accepts two optional constructor arguments in addition to the keyword string:
     - C{identChars} is a string of characters that would be valid identifier characters,
          defaulting to all alphanumerics + "_" and "$"
     - C{caseless} allows case-insensitive matching, default is C{False}.
       
    Example::
        Keyword("start").parseString("start")  # -> ['start']
        Keyword("start").parseString("starting")  # -> Exception

    For case-insensitive matching, use L{CaselessKeyword}.
    z_$NFcs�tt|�j�|dkrtj}||_t|�|_y|d|_Wn$tk
r^t	j
dtdd�YnXd|j|_d|j|_
d|_d|_||_|r�|j�|_|j�}t|�|_dS)Nrz2null string passed to Keyword; use Empty() insteadrq)r�z"%s"z	Expected F)r�rr��DEFAULT_KEYWORD_CHARSr�r�r�r�r�r�r�r�r�rar[r`�caseless�upper�
caselessmatchr��
identChars)r�r�r�r�)rHrwrxr�q	s&

zKeyword.__init__TcCs|jr|||||j�j�|jkr�|t|�|jksL|||jj�|jkr�|dksj||dj�|jkr�||j|jfSnv|||jkr�|jdks�|j|j|�r�|t|�|jks�|||j|jkr�|dks�||d|jkr�||j|jfSt	|||j
|��dS)Nrrr)r�r�r�r�r�r�r�r�r�rra)r�r-r�rorwrwrxr��	s*&zKeyword.parseImplcstt|�j�}tj|_|S)N)r�rr�r�r�)r�r�)rHrwrxr��	szKeyword.copycCs
|t_dS)z,Overrides the default Keyword chars
        N)rr�)rOrwrwrx�setDefaultKeywordChars�	szKeyword.setDefaultKeywordChars)NF)T)r�r�r�r�r3r�r�r�r�r�r�r�rwrw)rHrxr^	s
cs*eZdZdZ�fdd�Zddd�Z�ZS)ral
    Token to match a specified string, ignoring case of letters.
    Note: the matched results will always be in the case of the given
    match string, NOT the case of the input text.

    Example::
        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
        
    (Contrast with example for L{CaselessKeyword}.)
    cs6tt|�j|j��||_d|j|_d|j|_dS)Nz'%s'z	Expected )r�rr�r��returnStringr�ra)r�r�)rHrwrxr��	szCaselessLiteral.__init__TcCs@||||j�j�|jkr,||j|jfSt|||j|��dS)N)r�r�r�r�rra)r�r-r�rorwrwrxr��	szCaselessLiteral.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr�	s
cs,eZdZdZd�fdd�	Zd	dd�Z�ZS)
rz�
    Caseless version of L{Keyword}.

    Example::
        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
        
    (Contrast with example for L{CaselessLiteral}.)
    Ncstt|�j||dd�dS)NT)r�)r�rr�)r�r�r�)rHrwrxr��	szCaselessKeyword.__init__TcCsj||||j�j�|jkrV|t|�|jksF|||jj�|jkrV||j|jfSt|||j|��dS)N)r�r�r�r�r�r�rra)r�r-r�rorwrwrxr��	s*zCaselessKeyword.parseImpl)N)T)r�r�r�r�r�r�r�rwrw)rHrxr�	scs,eZdZdZd�fdd�	Zd	dd�Z�ZS)
rlax
    A variation on L{Literal} which matches "close" matches, that is, 
    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
     - C{match_string} - string to be matched
     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
    
    The results from a successful parse will contain the matched text from the input string and the following named results:
     - C{mismatches} - a list of the positions within the match_string where mismatches were found
     - C{original} - the original match_string used to compare against the input string
    
    If C{mismatches} is an empty list, then the match was an exact match.
    
    Example::
        patt = CloseMatch("ATCATCGAATGGA")
        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)

        # exact match
        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})

        # close match allowing up to 2 mismatches
        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
    rrcsBtt|�j�||_||_||_d|j|jf|_d|_d|_dS)Nz&Expected %r (with up to %d mismatches)F)	r�rlr�r��match_string�
maxMismatchesrar`r[)r�r�r�)rHrwrxr��	szCloseMatch.__init__TcCs�|}t|�}|t|j�}||kr�|j}d}g}	|j}
x�tt|||�|j��D]0\}}|\}}
||
krP|	j|�t|	�|
krPPqPW|d}t|||�g�}|j|d<|	|d<||fSt|||j|��dS)Nrrr�original�
mismatches)	r�r�r�r�r�r�r"rra)r�r-r�ro�startr��maxlocr�Zmatch_stringlocr�r�Zs_m�src�mat�resultsrwrwrxr��	s("

zCloseMatch.parseImpl)rr)T)r�r�r�r�r�r�r�rwrw)rHrxrl�	s	cs8eZdZdZd
�fdd�	Zdd	d
�Z�fdd�Z�ZS)r/a	
    Token for matching words composed of allowed character sets.
    Defined with string containing all allowed initial characters,
    an optional string containing allowed body characters (if omitted,
    defaults to the initial character set), and an optional minimum,
    maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction. An optional
    C{excludeChars} parameter can list characters that might be found in 
    the input C{bodyChars} string; useful to define a word of all printables
    except for one or two characters, for instance.
    
    L{srange} is useful for defining custom character set strings for defining 
    C{Word} expressions, using range notation from regular expression character sets.
    
    A common mistake is to use C{Word} to match a specific literal string, as in 
    C{Word("Address")}. Remember that C{Word} uses the string argument to define
    I{sets} of matchable characters. This expression would match "Add", "AAA",
    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
    To match an exact literal string, use L{Literal} or L{Keyword}.

    pyparsing includes helper strings for building Words:
     - L{alphas}
     - L{nums}
     - L{alphanums}
     - L{hexnums}
     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
     - L{printables} (any non-whitespace character)

    Example::
        # a word composed of digits
        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
        
        # a word with a leading capital, and zero or more lowercase
        capital_word = Word(alphas.upper(), alphas.lower())

        # hostnames are alphanumeric, with leading alpha, and '-'
        hostname = Word(alphas, alphanums+'-')
        
        # roman numeral (not a strict parser, accepts invalid mix of characters)
        roman = Word("IVXLCDM")
        
        # any string of non-whitespace characters, except for ','
        csv_value = Word(printables, excludeChars=",")
    NrrrFcs�tt|�j��rFdj�fdd�|D��}|rFdj�fdd�|D��}||_t|�|_|rl||_t|�|_n||_t|�|_|dk|_	|dkr�t
d��||_|dkr�||_nt
|_|dkr�||_||_t|�|_d|j|_d	|_||_d
|j|jk�r�|dk�r�|dk�r�|dk�r�|j|jk�r8dt|j�|_nHt|j�dk�rfdtj|j�t|j�f|_nd
t|j�t|j�f|_|j�r�d|jd|_ytj|j�|_Wntk
�r�d|_YnXdS)Nr�c3s|]}|�kr|VqdS)Nrw)r�r�)�excludeCharsrwrxr�7
sz Word.__init__.<locals>.<genexpr>c3s|]}|�kr|VqdS)Nrw)r�r�)r�rwrxr�9
srrrzZcannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permittedz	Expected Fr�z[%s]+z%s[%s]*z	[%s][%s]*z\b)r�r/r�r��
initCharsOrigr��	initChars�
bodyCharsOrig�	bodyChars�maxSpecifiedr��minLen�maxLenr�r�r�rar`�	asKeyword�_escapeRegexRangeChars�reStringr�rd�escape�compilerK)r�rr�min�max�exactrr�)rH)r�rxr�4
sT



0
z
Word.__init__Tc
CsD|jr<|jj||�}|s(t|||j|��|j�}||j�fS|||jkrZt|||j|��|}|d7}t|�}|j}||j	}t
||�}x ||kr�|||kr�|d7}q�Wd}	|||jkr�d}	|jr�||kr�|||kr�d}	|j
�r|dk�r||d|k�s||k�r|||k�rd}	|	�r4t|||j|��||||�fS)NrrFTr)rdr�rra�end�grouprr�rrrrrr)
r�r-r�ror�r�r�Z	bodycharsr�ZthrowExceptionrwrwrxr�j
s6

4zWord.parseImplcstytt|�j�Stk
r"YnX|jdkrndd�}|j|jkr^d||j�||j�f|_nd||j�|_|jS)NcSs$t|�dkr|dd�dS|SdS)N�z...)r�)r�rwrwrx�
charsAsStr�
sz Word.__str__.<locals>.charsAsStrz	W:(%s,%s)zW:(%s))r�r/r�rKrUr�r)r�r)rHrwrxr��
s
zWord.__str__)NrrrrFN)T)r�r�r�r�r�r�r�r�rwrw)rHrxr/
s.6
#csFeZdZdZeejd��Zd�fdd�	Zddd�Z	�fd	d
�Z
�ZS)
r'a�
    Token for matching strings that match a given regular expression.
    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
    If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as 
    named parse results.

    Example::
        realnum = Regex(r"[+-]?\d+\.\d*")
        date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
    z[A-Z]rcs�tt|�j�t|t�r�|s,tjdtdd�||_||_	yt
j|j|j	�|_
|j|_Wq�t
jk
r�tjd|tdd��Yq�Xn2t|tj�r�||_
t|�|_|_||_	ntd��t|�|_d|j|_d|_d|_d	S)
z�The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.z0null string passed to Regex; use Empty() insteadrq)r�z$invalid pattern (%s) passed to RegexzCRegex may only be constructed with a string or a compiled RE objectz	Expected FTN)r�r'r�rzr�r�r�r��pattern�flagsrdr
r�
sre_constants�error�compiledREtyper{r�r�r�rar`r[)r�rr)rHrwrxr��
s.





zRegex.__init__TcCsd|jj||�}|s"t|||j|��|j�}|j�}t|j��}|r\x|D]}||||<qHW||fS)N)rdr�rrar�	groupdictr"r)r�r-r�ror��dr�r�rwrwrxr��
s
zRegex.parseImplcsDytt|�j�Stk
r"YnX|jdkr>dt|j�|_|jS)NzRe:(%s))r�r'r�rKrUr�r)r�)rHrwrxr��
s
z
Regex.__str__)r)T)r�r�r�r�r�rdr
rr�r�r�r�rwrw)rHrxr'�
s
"

cs8eZdZdZd�fdd�	Zddd�Z�fd	d
�Z�ZS)
r%a�
    Token for matching strings that are delimited by quoting characters.
    
    Defined with the following parameters:
        - quoteChar - string of one or more characters defining the quote delimiting string
        - escChar - character to escape quotes, typically backslash (default=C{None})
        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})

    Example::
        qs = QuotedString('"')
        print(qs.searchString('lsjdf "This is the quote" sldjf'))
        complex_qs = QuotedString('{{', endQuoteChar='}}')
        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
        sql_qs = QuotedString('"', escQuote='""')
        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
    prints::
        [['This is the quote']]
        [['This is the "quote"']]
        [['This is the quote with "embedded" quotes']]
    NFTcsNtt��j�|j�}|s0tjdtdd�t��|dkr>|}n"|j�}|s`tjdtdd�t��|�_t	|��_
|d�_|�_t	|��_
|�_|�_|�_|�_|r�tjtjB�_dtj�j�t�jd�|dk	r�t|�p�df�_n<d�_dtj�j�t�jd�|dk	�rt|��pdf�_t	�j�d	k�rp�jd
dj�fdd
�tt	�j�d	dd�D��d7_|�r��jdtj|�7_|�r��jdtj|�7_tj�j�d�_�jdtj�j�7_ytj�j�j��_�j�_Wn0tjk
�r&tjd�jtdd��YnXt ���_!d�j!�_"d�_#d�_$dS)Nz$quoteChar cannot be the empty stringrq)r�z'endQuoteChar cannot be the empty stringrz%s(?:[^%s%s]r�z%s(?:[^%s\n\r%s]rrz|(?:z)|(?:c3s4|],}dtj�jd|��t�j|�fVqdS)z%s[^%s]N)rdr	�endQuoteCharr)r�r�)r�rwrxr�/sz(QuotedString.__init__.<locals>.<genexpr>�)z|(?:%s)z|(?:%s.)z(.)z)*%sz$invalid pattern (%s) passed to Regexz	Expected FTrs)%r�r%r�r�r�r�r��SyntaxError�	quoteCharr��quoteCharLen�firstQuoteCharr�endQuoteCharLen�escChar�escQuote�unquoteResults�convertWhitespaceEscapesrd�	MULTILINE�DOTALLrr	rrr�r��escCharReplacePatternr
rrrr�r�rar`r[)r�rr r!Z	multiliner"rr#)rH)r�rxr�sf




6

zQuotedString.__init__c	Cs�|||jkr|jj||�pd}|s4t|||j|��|j�}|j�}|jr�||j|j	�}t
|t�r�d|kr�|jr�ddddd�}x |j
�D]\}}|j||�}q�W|jr�tj|jd|�}|jr�|j|j|j�}||fS)N�\�	r��
)z\tz\nz\fz\rz\g<1>)rrdr�rrarrr"rrrzr�r#r�r�r r�r&r!r)	r�r-r�ror�r�Zws_mapZwslitZwscharrwrwrxr�Gs( 
zQuotedString.parseImplcsFytt|�j�Stk
r"YnX|jdkr@d|j|jf|_|jS)Nz.quoted string, starting with %s ending with %s)r�r%r�rKrUrr)r�)rHrwrxr�js
zQuotedString.__str__)NNFTNT)T)r�r�r�r�r�r�r�r�rwrw)rHrxr%�
sA
#cs8eZdZdZd�fdd�	Zddd�Z�fd	d
�Z�ZS)
r	a�
    Token for matching words composed of characters I{not} in a given set (will
    include whitespace in matched characters if not listed in the provided exclusion set - see example).
    Defined with string containing all disallowed characters, and an optional
    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction.

    Example::
        # define a comma-separated-value as anything that is not a ','
        csv_value = CharsNotIn(',')
        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
    prints::
        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
    rrrcs�tt|�j�d|_||_|dkr*td��||_|dkr@||_nt|_|dkrZ||_||_t	|�|_
d|j
|_|jdk|_d|_
dS)NFrrzfcannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permittedrz	Expected )r�r	r�rX�notCharsr�rrr�r�r�rar[r`)r�r+rrr
)rHrwrxr��s 
zCharsNotIn.__init__TcCs�|||jkrt|||j|��|}|d7}|j}t||jt|��}x ||krd|||krd|d7}qFW|||jkr�t|||j|��||||�fS)Nrr)r+rrarrr�r)r�r-r�ror�Znotchars�maxlenrwrwrxr��s
zCharsNotIn.parseImplcsdytt|�j�Stk
r"YnX|jdkr^t|j�dkrRd|jdd�|_nd|j|_|jS)Nrz
!W:(%s...)z!W:(%s))r�r	r�rKrUr�r+)r�)rHrwrxr��s
zCharsNotIn.__str__)rrrr)T)r�r�r�r�r�r�r�r�rwrw)rHrxr	vs
cs<eZdZdZdddddd�Zd�fdd�	Zddd�Z�ZS)r.a�
    Special matching class for matching whitespace.  Normally, whitespace is ignored
    by pyparsing grammars.  This class is included when some whitespace structures
    are significant.  Define with a string containing the whitespace characters to be
    matched; default is C{" \t\r\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
    as defined for the C{L{Word}} class.
    z<SPC>z<TAB>z<LF>z<CR>z<FF>)r�r(rr*r)� 	
rrrcs�tt��j�|�_�jdj�fdd��jD���djdd��jD���_d�_d�j�_	|�_
|dkrt|�_nt�_|dkr�|�_|�_
dS)Nr�c3s|]}|�jkr|VqdS)N)�
matchWhite)r�r�)r�rwrxr��sz!White.__init__.<locals>.<genexpr>css|]}tj|VqdS)N)r.�	whiteStrs)r�r�rwrwrxr��sTz	Expected r)
r�r.r�r.r�r�rYr�r[rarrr�)r�Zwsrrr
)rH)r�rxr��s zWhite.__init__TcCs�|||jkrt|||j|��|}|d7}||j}t|t|��}x"||krd|||jkrd|d7}qDW|||jkr�t|||j|��||||�fS)Nrr)r.rrarrr�r)r�r-r�ror�r�rwrwrxr��s
zWhite.parseImpl)r-rrrr)T)r�r�r�r�r/r�r�r�rwrw)rHrxr.�scseZdZ�fdd�Z�ZS)�_PositionTokencs(tt|�j�|jj|_d|_d|_dS)NTF)r�r0r�rHr�r�r[r`)r�)rHrwrxr��s
z_PositionToken.__init__)r�r�r�r�r�rwrw)rHrxr0�sr0cs2eZdZdZ�fdd�Zdd�Zd	dd�Z�ZS)
rzb
    Token to advance to a specific column of input text; useful for tabular report scraping.
    cstt|�j�||_dS)N)r�rr�r9)r��colno)rHrwrxr��szGoToColumn.__init__cCs`t||�|jkr\t|�}|jr*|j||�}x0||krZ||j�rZt||�|jkrZ|d7}q,W|S)Nrr)r9r�r]r��isspace)r�r-r�r�rwrwrxr��s&zGoToColumn.preParseTcCsDt||�}||jkr"t||d|��||j|}|||�}||fS)NzText not in expected column)r9r)r�r-r�roZthiscolZnewlocr�rwrwrxr�s

zGoToColumn.parseImpl)T)r�r�r�r�r�r�r�r�rwrw)rHrxr�s	cs*eZdZdZ�fdd�Zddd�Z�ZS)ra�
    Matches if current position is at the beginning of a line within the parse string
    
    Example::
    
        test = '''        AAA this line
        AAA and this line
          AAA but not this one
        B AAA and definitely not this one
        '''

        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
            print(t)
    
    Prints::
        ['AAA', ' this line']
        ['AAA', ' and this line']    

    cstt|�j�d|_dS)NzExpected start of line)r�rr�ra)r�)rHrwrxr�&szLineStart.__init__TcCs*t||�dkr|gfSt|||j|��dS)Nrr)r9rra)r�r-r�rorwrwrxr�*szLineStart.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxrscs*eZdZdZ�fdd�Zddd�Z�ZS)rzU
    Matches if current position is at the end of a line within the parse string
    cs,tt|�j�|jtjjdd��d|_dS)Nrr�zExpected end of line)r�rr�r�r$rNr�ra)r�)rHrwrxr�3szLineEnd.__init__TcCsb|t|�kr6||dkr$|ddfSt|||j|��n(|t|�krN|dgfSt|||j|��dS)Nrrr)r�rra)r�r-r�rorwrwrxr�8szLineEnd.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr/scs*eZdZdZ�fdd�Zddd�Z�ZS)r*zM
    Matches if current position is at the beginning of the parse string
    cstt|�j�d|_dS)NzExpected start of text)r�r*r�ra)r�)rHrwrxr�GszStringStart.__init__TcCs0|dkr(||j|d�kr(t|||j|��|gfS)Nr)r�rra)r�r-r�rorwrwrxr�KszStringStart.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr*Cscs*eZdZdZ�fdd�Zddd�Z�ZS)r)zG
    Matches if current position is at the end of the parse string
    cstt|�j�d|_dS)NzExpected end of text)r�r)r�ra)r�)rHrwrxr�VszStringEnd.__init__TcCs^|t|�krt|||j|��n<|t|�kr6|dgfS|t|�krJ|gfSt|||j|��dS)Nrr)r�rra)r�r-r�rorwrwrxr�ZszStringEnd.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr)Rscs.eZdZdZef�fdd�	Zddd�Z�ZS)r1ap
    Matches if the current position is at the beginning of a Word, and
    is not preceded by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{} behavior of regular expressions,
    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
    the string being parsed, or at the beginning of a line.
    cs"tt|�j�t|�|_d|_dS)NzNot at the start of a word)r�r1r�r��	wordCharsra)r�r3)rHrwrxr�ls
zWordStart.__init__TcCs@|dkr8||d|jks(|||jkr8t|||j|��|gfS)Nrrr)r3rra)r�r-r�rorwrwrxr�qs
zWordStart.parseImpl)T)r�r�r�r�rVr�r�r�rwrw)rHrxr1dscs.eZdZdZef�fdd�	Zddd�Z�ZS)r0aZ
    Matches if the current position is at the end of a Word, and
    is not followed by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{} behavior of regular expressions,
    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
    the string being parsed, or at the end of a line.
    cs(tt|�j�t|�|_d|_d|_dS)NFzNot at the end of a word)r�r0r�r�r3rXra)r�r3)rHrwrxr��s
zWordEnd.__init__TcCsPt|�}|dkrH||krH|||jks8||d|jkrHt|||j|��|gfS)Nrrr)r�r3rra)r�r-r�ror�rwrwrxr��szWordEnd.parseImpl)T)r�r�r�r�rVr�r�r�rwrw)rHrxr0xscs�eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�fdd�Z�fd
d�Z	�fdd�Z
d�fdd�	Zgfdd�Z�fdd�Z
�ZS)r z^
    Abstract subclass of ParserElement, for combining and post-processing parsed tokens.
    Fcs�tt|�j|�t|t�r"t|�}t|t�r<tj|�g|_	njt|t
j�rzt|�}tdd�|D��rnt
tj|�}t|�|_	n,yt|�|_	Wntk
r�|g|_	YnXd|_dS)Ncss|]}t|t�VqdS)N)rzr�)r�r.rwrwrxr��sz+ParseExpression.__init__.<locals>.<genexpr>F)r�r r�rzr�r�r�r$rQ�exprsr��Iterable�allrvr�re)r�r4rg)rHrwrxr��s

zParseExpression.__init__cCs
|j|S)N)r4)r�r�rwrwrxr��szParseExpression.__getitem__cCs|jj|�d|_|S)N)r4r�rU)r�r�rwrwrxr��szParseExpression.appendcCs4d|_dd�|jD�|_x|jD]}|j�q W|S)z~Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
           all contained expressions.FcSsg|]}|j��qSrw)r�)r�r�rwrwrxr��sz3ParseExpression.leaveWhitespace.<locals>.<listcomp>)rXr4r�)r�r�rwrwrxr��s
zParseExpression.leaveWhitespacecszt|t�rF||jkrvtt|�j|�xP|jD]}|j|jd�q,Wn0tt|�j|�x|jD]}|j|jd�q^W|S)Nrrrsrs)rzr+r]r�r r�r4)r�r�r�)rHrwrxr��s

zParseExpression.ignorecsLytt|�j�Stk
r"YnX|jdkrFd|jjt|j�f|_|jS)Nz%s:(%s))	r�r r�rKrUrHr�r�r4)r�)rHrwrxr��s
zParseExpression.__str__cs0tt|�j�x|jD]}|j�qWt|j�dk�r|jd}t||j�r�|jr�|jdkr�|j	r�|jdd�|jdg|_d|_
|j|jO_|j|jO_|jd}t||j�o�|jo�|jdko�|j	�r|jdd�|jdd�|_d|_
|j|jO_|j|jO_dt
|�|_|S)Nrqrrrz	Expected rsrs)r�r r�r4r�rzrHrSrVr^rUr[r`r�ra)r�r�r�)rHrwrxr��s0




zParseExpression.streamlinecstt|�j||�}|S)N)r�r rm)r�r�rlr�)rHrwrxrm�szParseExpression.setResultsNamecCs:|dd�|g}x|jD]}|j|�qW|jg�dS)N)r4r�r�)r�r��tmpr�rwrwrxr��szParseExpression.validatecs$tt|�j�}dd�|jD�|_|S)NcSsg|]}|j��qSrw)r�)r�r�rwrwrxr��sz(ParseExpression.copy.<locals>.<listcomp>)r�r r�r4)r�r�)rHrwrxr��szParseExpression.copy)F)F)r�r�r�r�r�r�r�r�r�r�r�rmr�r�r�rwrw)rHrxr �s	
"csTeZdZdZGdd�de�Zd�fdd�	Zddd�Zd	d
�Zdd�Z	d
d�Z
�ZS)ra

    Requires all given C{ParseExpression}s to be found in the given order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'+'} operator.
    May also be constructed using the C{'-'} operator, which will suppress backtracking.

    Example::
        integer = Word(nums)
        name_expr = OneOrMore(Word(alphas))

        expr = And([integer("id"),name_expr("name"),integer("age")])
        # more easily written as:
        expr = integer("id") + name_expr("name") + integer("age")
    cseZdZ�fdd�Z�ZS)zAnd._ErrorStopcs&ttj|�j||�d|_|j�dS)N�-)r�rr�r�r�r�)r�r�r�)rHrwrxr�
szAnd._ErrorStop.__init__)r�r�r�r�r�rwrw)rHrxr�
sr�TcsRtt|�j||�tdd�|jD��|_|j|jdj�|jdj|_d|_	dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr�
szAnd.__init__.<locals>.<genexpr>rT)
r�rr�r6r4r[r�rYrXre)r�r4rg)rHrwrxr�
s
zAnd.__init__c	Cs|jdj|||dd�\}}d}x�|jdd�D]�}t|tj�rFd}q0|r�y|j|||�\}}Wq�tk
rv�Yq�tk
r�}zd|_tj|��WYdd}~Xq�t	k
r�t|t
|�|j|��Yq�Xn|j|||�\}}|s�|j�r0||7}q0W||fS)NrF)rprrT)
r4rtrzrr�r#r�
__traceback__r�r�r�rar�)	r�r-r�ro�
resultlistZ	errorStopr�Z
exprtokensr�rwrwrxr�
s(z
And.parseImplcCst|t�rtj|�}|j|�S)N)rzr�r$rQr�)r�r�rwrwrxr5
s

zAnd.__iadd__cCs8|dd�|g}x |jD]}|j|�|jsPqWdS)N)r4r�r[)r�r��subRecCheckListr�rwrwrxr�:
s

zAnd.checkRecursioncCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr��{r�css|]}t|�VqdS)N)r�)r�r�rwrwrxr�F
szAnd.__str__.<locals>.<genexpr>�})r�r�rUr�r4)r�rwrwrxr�A
s


 zAnd.__str__)T)T)r�r�r�r�r
r�r�r�rr�r�r�rwrw)rHrxr�s
csDeZdZdZd�fdd�	Zddd�Zdd	�Zd
d�Zdd
�Z�Z	S)ra�
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the expression that matches the longest string will be used.
    May be constructed using the C{'^'} operator.

    Example::
        # construct Or using '^' operator
        
        number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789"))
    prints::
        [['123'], ['3.1416'], ['789']]
    Fcs:tt|�j||�|jr0tdd�|jD��|_nd|_dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr�\
szOr.__init__.<locals>.<genexpr>T)r�rr�r4rr[)r�r4rg)rHrwrxr�Y
szOr.__init__TcCsTd}d}g}x�|jD]�}y|j||�}Wnvtk
rd}	z d|	_|	j|krT|	}|	j}WYdd}	~	Xqtk
r�t|�|kr�t|t|�|j|�}t|�}YqX|j||f�qW|�r*|j	dd�d�x`|D]X\}
}y|j
|||�Stk
�r$}	z"d|	_|	j|k�r|	}|	j}WYdd}	~	Xq�Xq�W|dk	�rB|j|_|�nt||d|��dS)NrrcSs
|dS)Nrrw)�xrwrwrxryu
szOr.parseImpl.<locals>.<lambda>)r�z no defined alternatives to matchrs)r4r�rr9r�r�r�rar��sortrtr�)r�r-r�ro�	maxExcLoc�maxExceptionr�r�Zloc2r��_rwrwrxr�`
s<

zOr.parseImplcCst|t�rtj|�}|j|�S)N)rzr�r$rQr�)r�r�rwrwrx�__ixor__�
s

zOr.__ixor__cCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr�r<z ^ css|]}t|�VqdS)N)r�)r�r�rwrwrxr��
szOr.__str__.<locals>.<genexpr>r=)r�r�rUr�r4)r�rwrwrxr��
s


 z
Or.__str__cCs0|dd�|g}x|jD]}|j|�qWdS)N)r4r�)r�r�r;r�rwrwrxr��
szOr.checkRecursion)F)T)
r�r�r�r�r�r�rCr�r�r�rwrw)rHrxrK
s

&	csDeZdZdZd�fdd�	Zddd�Zdd	�Zd
d�Zdd
�Z�Z	S)ra�
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the first one listed is the one that will match.
    May be constructed using the C{'|'} operator.

    Example::
        # construct MatchFirst using '|' operator
        
        # watch the order of expressions to match
        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]

        # put more selective expression first
        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
    Fcs:tt|�j||�|jr0tdd�|jD��|_nd|_dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr��
sz&MatchFirst.__init__.<locals>.<genexpr>T)r�rr�r4rr[)r�r4rg)rHrwrxr��
szMatchFirst.__init__Tc	Cs�d}d}x�|jD]�}y|j|||�}|Stk
r\}z|j|krL|}|j}WYdd}~Xqtk
r�t|�|kr�t|t|�|j|�}t|�}YqXqW|dk	r�|j|_|�nt||d|��dS)Nrrz no defined alternatives to matchrs)r4rtrr�r�r�rar�)	r�r-r�ror@rAr�r�r�rwrwrxr��
s$
zMatchFirst.parseImplcCst|t�rtj|�}|j|�S)N)rzr�r$rQr�)r�r�rwrwrx�__ior__�
s

zMatchFirst.__ior__cCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr�r<z | css|]}t|�VqdS)N)r�)r�r�rwrwrxr��
sz%MatchFirst.__str__.<locals>.<genexpr>r=)r�r�rUr�r4)r�rwrwrxr��
s


 zMatchFirst.__str__cCs0|dd�|g}x|jD]}|j|�qWdS)N)r4r�)r�r�r;r�rwrwrxr��
szMatchFirst.checkRecursion)F)T)
r�r�r�r�r�r�rDr�r�r�rwrw)rHrxr�
s
	cs<eZdZdZd�fdd�	Zddd�Zdd�Zd	d
�Z�ZS)
ram
    Requires all given C{ParseExpression}s to be found, but in any order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'&'} operator.

    Example::
        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
        integer = Word(nums)
        shape_attr = "shape:" + shape_type("shape")
        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
        color_attr = "color:" + color("color")
        size_attr = "size:" + integer("size")

        # use Each (using operator '&') to accept attributes in any order 
        # (shape and posn are required, color and size are optional)
        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)

        shape_spec.runTests('''
            shape: SQUARE color: BLACK posn: 100, 120
            shape: CIRCLE size: 50 color: BLUE posn: 50,80
            color:GREEN size:20 shape:TRIANGLE posn:20,40
            '''
            )
    prints::
        shape: SQUARE color: BLACK posn: 100, 120
        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
        - color: BLACK
        - posn: ['100', ',', '120']
          - x: 100
          - y: 120
        - shape: SQUARE


        shape: CIRCLE size: 50 color: BLUE posn: 50,80
        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
        - color: BLUE
        - posn: ['50', ',', '80']
          - x: 50
          - y: 80
        - shape: CIRCLE
        - size: 50


        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
        - color: GREEN
        - posn: ['20', ',', '40']
          - x: 20
          - y: 40
        - shape: TRIANGLE
        - size: 20
    Tcs8tt|�j||�tdd�|jD��|_d|_d|_dS)Ncss|]}|jVqdS)N)r[)r�r�rwrwrxr�sz Each.__init__.<locals>.<genexpr>T)r�rr�r6r4r[rX�initExprGroups)r�r4rg)rHrwrxr�sz
Each.__init__c
s�|jr�tdd�|jD��|_dd�|jD�}dd�|jD�}|||_dd�|jD�|_dd�|jD�|_dd�|jD�|_|j|j7_d	|_|}|jdd�}|jdd��g}d
}	x�|	�rp|�|j|j}
g}x~|
D]v}y|j||�}Wn t	k
�r|j
|�Yq�X|j
|jjt|�|��||k�rD|j
|�q�|�kr�j
|�q�Wt|�t|
�kr�d	}	q�W|�r�djdd�|D��}
t	||d
|
��|�fdd�|jD�7}g}x*|D]"}|j|||�\}}|j
|��q�Wt|tg��}||fS)Ncss&|]}t|t�rt|j�|fVqdS)N)rzrr�r.)r�r�rwrwrxr�sz!Each.parseImpl.<locals>.<genexpr>cSsg|]}t|t�r|j�qSrw)rzrr.)r�r�rwrwrxr�sz"Each.parseImpl.<locals>.<listcomp>cSs"g|]}|jrt|t�r|�qSrw)r[rzr)r�r�rwrwrxr�scSsg|]}t|t�r|j�qSrw)rzr2r.)r�r�rwrwrxr� scSsg|]}t|t�r|j�qSrw)rzrr.)r�r�rwrwrxr�!scSs g|]}t|tttf�s|�qSrw)rzrr2r)r�r�rwrwrxr�"sFTz, css|]}t|�VqdS)N)r�)r�r�rwrwrxr�=sz*Missing one or more required elements (%s)cs$g|]}t|t�r|j�kr|�qSrw)rzrr.)r�r�)�tmpOptrwrxr�As)rEr�r4Zopt1mapZ	optionalsZmultioptionalsZ
multirequiredZrequiredr�rr�r�r��remover�r�rt�sumr")r�r-r�roZopt1Zopt2ZtmpLocZtmpReqdZ
matchOrderZkeepMatchingZtmpExprsZfailedr�Zmissingr:r�ZfinalResultsrw)rFrxr�sP



zEach.parseImplcCs@t|d�r|jS|jdkr:ddjdd�|jD��d|_|jS)Nr�r<z & css|]}t|�VqdS)N)r�)r�r�rwrwrxr�PszEach.__str__.<locals>.<genexpr>r=)r�r�rUr�r4)r�rwrwrxr�Ks


 zEach.__str__cCs0|dd�|g}x|jD]}|j|�qWdS)N)r4r�)r�r�r;r�rwrwrxr�TszEach.checkRecursion)T)T)	r�r�r�r�r�r�r�r�r�rwrw)rHrxr�
s
5
1	csleZdZdZd�fdd�	Zddd�Zdd	�Z�fd
d�Z�fdd
�Zdd�Z	gfdd�Z
�fdd�Z�ZS)rza
    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
    Fcs�tt|�j|�t|t�r@ttjt�r2tj|�}ntjt	|��}||_
d|_|dk	r�|j|_|j
|_
|j|j�|j|_|j|_|j|_|jj|j�dS)N)r�rr�rzr��
issubclassr$rQr,rr.rUr`r[r�rYrXrWrer]r�)r�r.rg)rHrwrxr�^s
zParseElementEnhance.__init__TcCs2|jdk	r|jj|||dd�Std||j|��dS)NF)rpr�)r.rtrra)r�r-r�rorwrwrxr�ps
zParseElementEnhance.parseImplcCs*d|_|jj�|_|jdk	r&|jj�|S)NF)rXr.r�r�)r�rwrwrxr�vs


z#ParseElementEnhance.leaveWhitespacecsrt|t�rB||jkrntt|�j|�|jdk	rn|jj|jd�n,tt|�j|�|jdk	rn|jj|jd�|S)Nrrrsrs)rzr+r]r�rr�r.)r�r�)rHrwrxr�}s



zParseElementEnhance.ignorecs&tt|�j�|jdk	r"|jj�|S)N)r�rr�r.)r�)rHrwrxr��s

zParseElementEnhance.streamlinecCsB||krt||g��|dd�|g}|jdk	r>|jj|�dS)N)r&r.r�)r�r�r;rwrwrxr��s

z"ParseElementEnhance.checkRecursioncCs6|dd�|g}|jdk	r(|jj|�|jg�dS)N)r.r�r�)r�r�r7rwrwrxr��s
zParseElementEnhance.validatecsVytt|�j�Stk
r"YnX|jdkrP|jdk	rPd|jjt|j�f|_|jS)Nz%s:(%s))	r�rr�rKrUr.rHr�r�)r�)rHrwrxr��szParseElementEnhance.__str__)F)T)
r�r�r�r�r�r�r�r�r�r�r�r�r�rwrw)rHrxrZs
cs*eZdZdZ�fdd�Zddd�Z�ZS)ra�
    Lookahead matching of the given parse expression.  C{FollowedBy}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression matches at the current
    position.  C{FollowedBy} always returns a null token list.

    Example::
        # use FollowedBy to match a label only if it is followed by a ':'
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
    prints::
        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
    cstt|�j|�d|_dS)NT)r�rr�r[)r�r.)rHrwrxr��szFollowedBy.__init__TcCs|jj||�|gfS)N)r.r�)r�r-r�rorwrwrxr��szFollowedBy.parseImpl)T)r�r�r�r�r�r�r�rwrw)rHrxr�scs2eZdZdZ�fdd�Zd	dd�Zdd�Z�ZS)
ra�
    Lookahead to disallow matching with the given parse expression.  C{NotAny}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression does I{not} match at the current
    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
    always returns a null token list.  May be constructed using the '~' operator.

    Example::
        
    cs0tt|�j|�d|_d|_dt|j�|_dS)NFTzFound unwanted token, )r�rr�rXr[r�r.ra)r�r.)rHrwrxr��szNotAny.__init__TcCs&|jj||�rt|||j|��|gfS)N)r.r�rra)r�r-r�rorwrwrxr��szNotAny.parseImplcCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�z~{r=)r�r�rUr�r.)r�rwrwrxr��s


zNotAny.__str__)T)r�r�r�r�r�r�r�r�rwrw)rHrxr�s

cs(eZdZd�fdd�	Zddd�Z�ZS)	�_MultipleMatchNcsFtt|�j|�d|_|}t|t�r.tj|�}|dk	r<|nd|_dS)NT)	r�rJr�rWrzr�r$rQ�	not_ender)r�r.�stopOnZender)rHrwrxr��s

z_MultipleMatch.__init__TcCs�|jj}|j}|jdk	}|r$|jj}|r2|||�||||dd�\}}yZ|j}	xJ|rb|||�|	rr|||�}
n|}
|||
|�\}}|s�|j�rT||7}qTWWnttfk
r�YnX||fS)NF)rp)	r.rtr�rKr�r]r�rr�)r�r-r�roZself_expr_parseZself_skip_ignorablesZcheck_enderZ
try_not_enderr�ZhasIgnoreExprsr�Z	tmptokensrwrwrxr��s,



z_MultipleMatch.parseImpl)N)T)r�r�r�r�r�r�rwrw)rHrxrJ�srJc@seZdZdZdd�ZdS)ra�
    Repetition of one or more of the given expression.
    
    Parameters:
     - expr - expression that must match one or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: BLACK"
        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]

        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
        
        # could also be written as
        (attr_expr * (1,)).parseString(text).pprint()
    cCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�r<z}...)r�r�rUr�r.)r�rwrwrxr�!s


zOneOrMore.__str__N)r�r�r�r�r�rwrwrwrxrscs8eZdZdZd
�fdd�	Zd�fdd�	Zdd	�Z�ZS)r2aw
    Optional repetition of zero or more of the given expression.
    
    Parameters:
     - expr - expression that must match zero or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example: similar to L{OneOrMore}
    Ncstt|�j||d�d|_dS)N)rLT)r�r2r�r[)r�r.rL)rHrwrxr�6szZeroOrMore.__init__Tcs6ytt|�j|||�Sttfk
r0|gfSXdS)N)r�r2r�rr�)r�r-r�ro)rHrwrxr�:szZeroOrMore.parseImplcCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�rz]...)r�r�rUr�r.)r�rwrwrxr�@s


zZeroOrMore.__str__)N)T)r�r�r�r�r�r�r�r�rwrw)rHrxr2*sc@s eZdZdd�ZeZdd�ZdS)�
_NullTokencCsdS)NFrw)r�rwrwrxr�Jsz_NullToken.__bool__cCsdS)Nr�rw)r�rwrwrxr�Msz_NullToken.__str__N)r�r�r�r�r'r�rwrwrwrxrMIsrMcs6eZdZdZef�fdd�	Zd	dd�Zdd�Z�ZS)
raa
    Optional matching of the given expression.

    Parameters:
     - expr - expression that must match zero or more times
     - default (optional) - value to be returned if the optional expression is not found.

    Example::
        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
        zip.runTests('''
            # traditional ZIP code
            12345
            
            # ZIP+4 form
            12101-0001
            
            # invalid ZIP
            98765-
            ''')
    prints::
        # traditional ZIP code
        12345
        ['12345']

        # ZIP+4 form
        12101-0001
        ['12101-0001']

        # invalid ZIP
        98765-
             ^
        FAIL: Expected end of text (at char 5), (line:1, col:6)
    cs.tt|�j|dd�|jj|_||_d|_dS)NF)rgT)r�rr�r.rWr�r[)r�r.r�)rHrwrxr�ts
zOptional.__init__TcCszy|jj|||dd�\}}WnTttfk
rp|jtk	rh|jjr^t|jg�}|j||jj<ql|jg}ng}YnX||fS)NF)rp)r.rtrr�r��_optionalNotMatchedrVr")r�r-r�ror�rwrwrxr�zs


zOptional.parseImplcCs4t|d�r|jS|jdkr.dt|j�d|_|jS)Nr�rr	)r�r�rUr�r.)r�rwrwrxr��s


zOptional.__str__)T)	r�r�r�r�rNr�r�r�r�rwrw)rHrxrQs"
cs,eZdZdZd	�fdd�	Zd
dd�Z�ZS)r(a�	
    Token for skipping over all undefined text until the matched expression is found.

    Parameters:
     - expr - target expression marking the end of the data to be skipped
     - include - (default=C{False}) if True, the target expression is also parsed 
          (the skipped text and target expression are returned as a 2-element list).
     - ignore - (default=C{None}) used to define grammars (typically quoted strings and 
          comments) that might contain false matches to the target expression
     - failOn - (default=C{None}) define expressions that are not allowed to be 
          included in the skipped test; if found before the target expression is found, 
          the SkipTo is not a match

    Example::
        report = '''
            Outstanding Issues Report - 1 Jan 2000

               # | Severity | Description                               |  Days Open
            -----+----------+-------------------------------------------+-----------
             101 | Critical | Intermittent system crash                 |          6
              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
              79 | Minor    | System slow when running too many reports |         47
            '''
        integer = Word(nums)
        SEP = Suppress('|')
        # use SkipTo to simply match everything up until the next SEP
        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
        # - parse action will call token.strip() for each matched token, i.e., the description body
        string_data = SkipTo(SEP, ignore=quotedString)
        string_data.setParseAction(tokenMap(str.strip))
        ticket_expr = (integer("issue_num") + SEP 
                      + string_data("sev") + SEP 
                      + string_data("desc") + SEP 
                      + integer("days_open"))
        
        for tkt in ticket_expr.searchString(report):
            print tkt.dump()
    prints::
        ['101', 'Critical', 'Intermittent system crash', '6']
        - days_open: 6
        - desc: Intermittent system crash
        - issue_num: 101
        - sev: Critical
        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
        - days_open: 14
        - desc: Spelling error on Login ('log|n')
        - issue_num: 94
        - sev: Cosmetic
        ['79', 'Minor', 'System slow when running too many reports', '47']
        - days_open: 47
        - desc: System slow when running too many reports
        - issue_num: 79
        - sev: Minor
    FNcs`tt|�j|�||_d|_d|_||_d|_t|t	�rFt
j|�|_n||_dt
|j�|_dS)NTFzNo match found for )r�r(r��
ignoreExprr[r`�includeMatchr�rzr�r$rQ�failOnr�r.ra)r�r��includer�rQ)rHrwrxr��s
zSkipTo.__init__TcCs,|}t|�}|j}|jj}|jdk	r,|jjnd}|jdk	rB|jjnd}	|}
x�|
|kr�|dk	rh|||
�rhP|	dk	r�x*y|	||
�}
Wqrtk
r�PYqrXqrWy|||
ddd�Wn tt	fk
r�|
d7}
YqLXPqLWt|||j
|��|
}|||�}t|�}|j�r$||||dd�\}}
||
7}||fS)NF)rorprr)rp)
r�r.rtrQr�rOr�rrr�rar"rP)r�r-r�ror0r�r.Z
expr_parseZself_failOn_canParseNextZself_ignoreExpr_tryParseZtmplocZskiptextZ
skipresultr�rwrwrxr��s<

zSkipTo.parseImpl)FNN)T)r�r�r�r�r�r�r�rwrw)rHrxr(�s6
csbeZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Zdd�Zgfd
d�Z	dd�Z
�fdd�Z�ZS)raK
    Forward declaration of an expression to be defined later -
    used for recursive grammars, such as algebraic infix notation.
    When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.

    Note: take care when assigning to C{Forward} not to overlook precedence of operators.
    Specifically, '|' has a lower precedence than '<<', so that::
        fwdExpr << a | b | c
    will actually be evaluated as::
        (fwdExpr << a) | b | c
    thereby leaving b and c out as parseable alternatives.  It is recommended that you
    explicitly group the values inserted into the C{Forward}::
        fwdExpr << (a | b | c)
    Converting to use the '<<=' operator instead will avoid this problem.

    See L{ParseResults.pprint} for an example of a recursive parser created using
    C{Forward}.
    Ncstt|�j|dd�dS)NF)rg)r�rr�)r�r�)rHrwrxr�szForward.__init__cCsjt|t�rtj|�}||_d|_|jj|_|jj|_|j|jj	�|jj
|_
|jj|_|jj
|jj�|S)N)rzr�r$rQr.rUr`r[r�rYrXrWr]r�)r�r�rwrwrx�
__lshift__s





zForward.__lshift__cCs||>S)Nrw)r�r�rwrwrx�__ilshift__'szForward.__ilshift__cCs
d|_|S)NF)rX)r�rwrwrxr�*szForward.leaveWhitespacecCs$|js d|_|jdk	r |jj�|S)NT)r_r.r�)r�rwrwrxr�.s


zForward.streamlinecCs>||kr0|dd�|g}|jdk	r0|jj|�|jg�dS)N)r.r�r�)r�r�r7rwrwrxr�5s

zForward.validatecCs>t|d�r|jS|jjdSd}Wd|j|_X|jjd|S)Nr�z: ...�Nonez: )r�r�rHr�Z_revertClass�_ForwardNoRecurser.r�)r�Z	retStringrwrwrxr�<s

zForward.__str__cs.|jdk	rtt|�j�St�}||K}|SdS)N)r.r�rr�)r�r�)rHrwrxr�Ms

zForward.copy)N)
r�r�r�r�r�rSrTr�r�r�r�r�r�rwrw)rHrxrs
c@seZdZdd�ZdS)rVcCsdS)Nz...rw)r�rwrwrxr�Vsz_ForwardNoRecurse.__str__N)r�r�r�r�rwrwrwrxrVUsrVcs"eZdZdZd�fdd�	Z�ZS)r-zQ
    Abstract subclass of C{ParseExpression}, for converting parsed results.
    Fcstt|�j|�d|_dS)NF)r�r-r�rW)r�r.rg)rHrwrxr�]szTokenConverter.__init__)F)r�r�r�r�r�r�rwrw)rHrxr-Yscs6eZdZdZd
�fdd�	Z�fdd�Zdd	�Z�ZS)r
a�
    Converter to concatenate all matching tokens to a single string.
    By default, the matching patterns must also be contiguous in the input string;
    this can be disabled by specifying C{'adjacent=False'} in the constructor.

    Example::
        real = Word(nums) + '.' + Word(nums)
        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
        # will also erroneously match the following
        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']

        real = Combine(Word(nums) + '.' + Word(nums))
        print(real.parseString('3.1416')) # -> ['3.1416']
        # no match when there are internal spaces
        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
    r�Tcs8tt|�j|�|r|j�||_d|_||_d|_dS)NT)r�r
r�r��adjacentrX�
joinStringre)r�r.rXrW)rHrwrxr�rszCombine.__init__cs(|jrtj||�ntt|�j|�|S)N)rWr$r�r�r
)r�r�)rHrwrxr�|szCombine.ignorecCsP|j�}|dd�=|tdj|j|j��g|jd�7}|jrH|j�rH|gS|SdS)Nr�)r�)r�r"r�r
rXrbrVr�)r�r-r�r�ZretToksrwrwrxr��s
"zCombine.postParse)r�T)r�r�r�r�r�r�r�r�rwrw)rHrxr
as
cs(eZdZdZ�fdd�Zdd�Z�ZS)ra�
    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.

    Example::
        ident = Word(alphas)
        num = Word(nums)
        term = ident | num
        func = ident + Optional(delimitedList(term))
        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']

        func = ident + Group(Optional(delimitedList(term)))
        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
    cstt|�j|�d|_dS)NT)r�rr�rW)r�r.)rHrwrxr��szGroup.__init__cCs|gS)Nrw)r�r-r�r�rwrwrxr��szGroup.postParse)r�r�r�r�r�r�r�rwrw)rHrxr�s
cs(eZdZdZ�fdd�Zdd�Z�ZS)raW
    Converter to return a repetitive expression as a list, but also as a dictionary.
    Each element can also be referenced using the first token in the expression as its key.
    Useful for tabular report scraping when the first column can be used as a item key.

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        # print attributes as plain groups
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
        print(result.dump())
        
        # access named fields as dict entries, or output as dict
        print(result['shape'])        
        print(result.asDict())
    prints::
        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']

        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
    See more examples at L{ParseResults} of accessing fields by results name.
    cstt|�j|�d|_dS)NT)r�rr�rW)r�r.)rHrwrxr��sz
Dict.__init__cCs�x�t|�D]�\}}t|�dkr q
|d}t|t�rBt|d�j�}t|�dkr^td|�||<q
t|�dkr�t|dt�r�t|d|�||<q
|j�}|d=t|�dks�t|t�r�|j	�r�t||�||<q
t|d|�||<q
W|j
r�|gS|SdS)Nrrrr�rq)r�r�rzrur�r�r�r"r�r�rV)r�r-r�r�r��tokZikeyZ	dictvaluerwrwrxr��s$
zDict.postParse)r�r�r�r�r�r�r�rwrw)rHrxr�s#c@s eZdZdZdd�Zdd�ZdS)r+aV
    Converter for ignoring the results of a parsed expression.

    Example::
        source = "a, b, c,d"
        wd = Word(alphas)
        wd_list1 = wd + ZeroOrMore(',' + wd)
        print(wd_list1.parseString(source))

        # often, delimiters that are useful during parsing are just in the
        # way afterward - use Suppress to keep them out of the parsed output
        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
        print(wd_list2.parseString(source))
    prints::
        ['a', ',', 'b', ',', 'c', ',', 'd']
        ['a', 'b', 'c', 'd']
    (See also L{delimitedList}.)
    cCsgS)Nrw)r�r-r�r�rwrwrxr��szSuppress.postParsecCs|S)Nrw)r�rwrwrxr��szSuppress.suppressN)r�r�r�r�r�r�rwrwrwrxr+�sc@s(eZdZdZdd�Zdd�Zdd�ZdS)	rzI
    Wrapper for parse actions, to ensure they are only called once.
    cCst|�|_d|_dS)NF)rM�callable�called)r�Z
methodCallrwrwrxr�s
zOnlyOnce.__init__cCs.|js|j|||�}d|_|St||d��dS)NTr�)r[rZr)r�r�r5rvr�rwrwrxr�s
zOnlyOnce.__call__cCs
d|_dS)NF)r[)r�rwrwrx�reset
szOnlyOnce.resetN)r�r�r�r�r�r�r\rwrwrwrxr�scs:t����fdd�}y�j|_Wntk
r4YnX|S)as
    Decorator for debugging parse actions. 
    
    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.

    Example::
        wd = Word(alphas)

        @traceParseAction
        def remove_duplicate_chars(tokens):
            return ''.join(sorted(set(''.join(tokens)))

        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
    prints::
        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
        <<leaving remove_duplicate_chars (ret: 'dfjkls')
        ['dfjkls']
    cs��j}|dd�\}}}t|�dkr8|djjd|}tjjd|t||�||f�y�|�}Wn8tk
r�}ztjjd||f��WYdd}~XnXtjjd||f�|S)Nror�.z">>entering %s(line: '%s', %d, %r)
z<<leaving %s (exception: %s)
z<<leaving %s (ret: %r)
r9)r�r�rHr~�stderr�writerGrK)ZpaArgsZthisFuncr�r5rvr�r3)r�rwrx�z#sztraceParseAction.<locals>.z)rMr�r�)r�r`rw)r�rxrb
s
�,FcCs`t|�dt|�dt|�d}|rBt|t||��j|�S|tt|�|�j|�SdS)a�
    Helper to define a delimited list of expressions - the delimiter defaults to ','.
    By default, the list elements and delimiters can have intervening whitespace, and
    comments, but this can be overridden by passing C{combine=True} in the constructor.
    If C{combine} is set to C{True}, the matching tokens are returned as a single token
    string, with the delimiters included; otherwise, the matching tokens are returned
    as a list of tokens, with the delimiters suppressed.

    Example::
        delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
    z [r�z]...N)r�r
r2rir+)r.Zdelim�combineZdlNamerwrwrxr@9s
$csjt����fdd�}|dkr0tt�jdd��}n|j�}|jd�|j|dd�|�jd	t��d
�S)a:
    Helper to define a counted list of expressions.
    This helper defines a pattern of the form::
        integer expr expr expr...
    where the leading integer tells how many expr expressions follow.
    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
    
    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.

    Example::
        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']

        # in this parser, the leading integer value is given in binary,
        # '10' indicating that 2 values are in the array
        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
    cs.|d}�|r tt�g|��p&tt�>gS)Nr)rrrC)r�r5rvr�)�	arrayExprr.rwrx�countFieldParseAction_s"z+countedArray.<locals>.countFieldParseActionNcSst|d�S)Nr)ru)rvrwrwrxrydszcountedArray.<locals>.<lambda>ZarrayLenT)rfz(len) z...)rr/rRr�r�rirxr�)r.ZintExprrdrw)rcr.rxr<Ls
cCs:g}x0|D](}t|t�r(|jt|��q
|j|�q
W|S)N)rzr�r�r�r�)�Lr�r�rwrwrxr�ks

r�cs6t���fdd�}|j|dd��jdt|���S)a*
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousLiteral(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
    If this is not desired, use C{matchPreviousExpr}.
    Do I{not} use with packrat parsing enabled.
    csP|rBt|�dkr�|d>qLt|j��}�tdd�|D��>n
�t�>dS)Nrrrcss|]}t|�VqdS)N)r)r��ttrwrwrxr��szDmatchPreviousLiteral.<locals>.copyTokenToRepeater.<locals>.<genexpr>)r�r�r�rr
)r�r5rvZtflat)�reprwrx�copyTokenToRepeater�sz1matchPreviousLiteral.<locals>.copyTokenToRepeaterT)rfz(prev) )rrxrir�)r.rhrw)rgrxrOts


csFt��|j�}�|K��fdd�}|j|dd��jdt|���S)aS
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousExpr(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
    the expressions are evaluated first, and then compared, so
    C{"1"} is compared with C{"10"}.
    Do I{not} use with packrat parsing enabled.
    cs*t|j����fdd�}�j|dd�dS)Ncs$t|j��}|�kr tddd��dS)Nr�r)r�r�r)r�r5rvZtheseTokens)�matchTokensrwrx�mustMatchTheseTokens�szLmatchPreviousExpr.<locals>.copyTokenToRepeater.<locals>.mustMatchTheseTokensT)rf)r�r�r�)r�r5rvrj)rg)rirxrh�sz.matchPreviousExpr.<locals>.copyTokenToRepeaterT)rfz(prev) )rr�rxrir�)r.Ze2rhrw)rgrxrN�scCs>xdD]}|j|t|�}qW|jdd�}|jdd�}t|�S)Nz\^-]rz\nr(z\t)r��_bslashr�)r�r�rwrwrxr�s

rTc
s�|rdd�}dd�}t�ndd�}dd�}t�g}t|t�rF|j�}n&t|tj�r\t|�}ntj	dt
dd�|svt�Sd	}x�|t|�d
k�r||}xnt
||d
d��D]N\}}	||	|�r�|||d
=Pq�|||	�r�|||d
=|j||	�|	}Pq�W|d
7}q|W|�r�|�r�yht|�tdj|��k�rZtd
djdd�|D���jdj|��Stdjdd�|D���jdj|��SWn&tk
�r�tj	dt
dd�YnXt�fdd�|D��jdj|��S)a�
    Helper to quickly define a set of alternative Literals, and makes sure to do
    longest-first testing when there is a conflict, regardless of the input order,
    but returns a C{L{MatchFirst}} for best performance.

    Parameters:
     - strs - a string of space-delimited literals, or a collection of string literals
     - caseless - (default=C{False}) - treat all literals as caseless
     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
          if creating a C{Regex} raises an exception)

    Example::
        comp_oper = oneOf("< = > <= >= !=")
        var = Word(alphas)
        number = Word(nums)
        term = var | number
        comparison_expr = term + comp_oper + term
        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
    prints::
        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
    cSs|j�|j�kS)N)r�)r�brwrwrxry�szoneOf.<locals>.<lambda>cSs|j�j|j��S)N)r�r�)rrlrwrwrxry�scSs||kS)Nrw)rrlrwrwrxry�scSs
|j|�S)N)r�)rrlrwrwrxry�sz6Invalid argument to oneOf, expected string or iterablerq)r�rrrNr�z[%s]css|]}t|�VqdS)N)r)r��symrwrwrxr��szoneOf.<locals>.<genexpr>z | �|css|]}tj|�VqdS)N)rdr	)r�rmrwrwrxr��sz7Exception creating Regex for oneOf, building MatchFirstc3s|]}�|�VqdS)Nrw)r�rm)�parseElementClassrwrxr��s)rrrzr�r�r�r5r�r�r�r�rr�r�r�r�r'rirKr)
Zstrsr�ZuseRegexZisequalZmasksZsymbolsr�Zcurr�r�rw)rorxrS�sL





((cCsttt||���S)a�
    Helper to easily and clearly define a dictionary by specifying the respective patterns
    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
    in the proper order.  The key pattern can include delimiting markers or punctuation,
    as long as they are suppressed, thereby leaving the significant key text.  The value
    pattern can include named results, so that the C{Dict} results can include named token
    fields.

    Example::
        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        attr_label = label
        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)

        # similar to Dict, but simpler call format
        result = dictOf(attr_label, attr_value).parseString(text)
        print(result.dump())
        print(result['shape'])
        print(result.shape)  # object attribute access works too
        print(result.asDict())
    prints::
        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        SQUARE
        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
    )rr2r)r�r�rwrwrxrA�s!cCs^t�jdd��}|j�}d|_|d�||d�}|r@dd�}ndd�}|j|�|j|_|S)	a�
    Helper to return the original, untokenized text for a given expression.  Useful to
    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
    revert separate tokens with intervening whitespace back to the original matching
    input text. By default, returns astring containing the original parsed text.  
       
    If the optional C{asString} argument is passed as C{False}, then the return value is a 
    C{L{ParseResults}} containing any results names that were originally matched, and a 
    single token containing the original matched text from the input string.  So if 
    the expression passed to C{L{originalTextFor}} contains expressions with defined
    results names, you must set C{asString} to C{False} if you want to preserve those
    results name values.

    Example::
        src = "this is test <b> bold <i>text</i> </b> normal text "
        for tag in ("b","i"):
            opener,closer = makeHTMLTags(tag)
            patt = originalTextFor(opener + SkipTo(closer) + closer)
            print(patt.searchString(src)[0])
    prints::
        ['<b> bold <i>text</i> </b>']
        ['<i>text</i>']
    cSs|S)Nrw)r�r�rvrwrwrxry8sz!originalTextFor.<locals>.<lambda>F�_original_start�
_original_endcSs||j|j�S)N)rprq)r�r5rvrwrwrxry=scSs&||jd�|jd��g|dd�<dS)Nrprq)r�)r�r5rvrwrwrx�extractText?sz$originalTextFor.<locals>.extractText)r
r�r�rer])r.ZasStringZ	locMarkerZendlocMarker�	matchExprrrrwrwrxrg s

cCst|�jdd��S)zp
    Helper to undo pyparsing's default grouping of And expressions, even
    if all but one are non-empty.
    cSs|dS)Nrrw)rvrwrwrxryJszungroup.<locals>.<lambda>)r-r�)r.rwrwrxrhEscCs4t�jdd��}t|d�|d�|j�j�d��S)a�
    Helper to decorate a returned token with its starting and ending locations in the input string.
    This helper adds the following results names:
     - locn_start = location where matched expression begins
     - locn_end = location where matched expression ends
     - value = the actual parsed results

    Be careful if the input text contains C{<TAB>} characters, you may want to call
    C{L{ParserElement.parseWithTabs}}

    Example::
        wd = Word(alphas)
        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
            print(match)
    prints::
        [[0, 'ljsdf', 5]]
        [[8, 'lksdjjf', 15]]
        [[18, 'lkkjj', 23]]
    cSs|S)Nrw)r�r5rvrwrwrxry`szlocatedExpr.<locals>.<lambda>Z
locn_startr�Zlocn_end)r
r�rr�r�)r.ZlocatorrwrwrxrjLsz\[]-*.$+^?()~ )r
cCs|ddS)Nrrrrw)r�r5rvrwrwrxryksryz\\0?[xX][0-9a-fA-F]+cCstt|djd�d��S)Nrz\0x�)�unichrru�lstrip)r�r5rvrwrwrxrylsz	\\0[0-7]+cCstt|ddd�d��S)Nrrr�)ruru)r�r5rvrwrwrxrymsz\])r�r
z\wr8rr�Znegate�bodyr	csBdd��y dj�fdd�tj|�jD��Stk
r<dSXdS)a�
    Helper to easily define string ranges for use in Word construction.  Borrows
    syntax from regexp '[]' string range definitions::
        srange("[0-9]")   -> "0123456789"
        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
    The input string must be enclosed in []'s, and the returned string is the expanded
    character set joined into a single string.
    The values enclosed in the []'s may be:
     - a single character
     - an escaped character with a leading backslash (such as C{\-} or C{\]})
     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
         (C{\0x##} is also supported for backwards compatibility) 
     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
    cSs<t|t�s|Sdjdd�tt|d�t|d�d�D��S)Nr�css|]}t|�VqdS)N)ru)r�r�rwrwrxr��sz+srange.<locals>.<lambda>.<locals>.<genexpr>rrr)rzr"r�r��ord)�prwrwrxry�szsrange.<locals>.<lambda>r�c3s|]}�|�VqdS)Nrw)r��part)�	_expandedrwrxr��szsrange.<locals>.<genexpr>N)r��_reBracketExprr�rxrK)r�rw)r|rxr_rs
 cs�fdd�}|S)zt
    Helper method for defining parse actions that require matching at a specific
    column in the input text.
    cs"t||��krt||d���dS)Nzmatched token not at column %d)r9r)r)Zlocnr1)r�rwrx�	verifyCol�sz!matchOnlyAtCol.<locals>.verifyColrw)r�r~rw)r�rxrM�scs�fdd�S)a�
    Helper method for common parse actions that simply return a literal value.  Especially
    useful when used with C{L{transformString<ParserElement.transformString>}()}.

    Example::
        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
        term = na | num
        
        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
    cs�gS)Nrw)r�r5rv)�replStrrwrxry�szreplaceWith.<locals>.<lambda>rw)rrw)rrxr\�scCs|ddd�S)a
    Helper parse action for removing quotation marks from parsed quoted strings.

    Example::
        # by default, quotation marks are included in parsed results
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]

        # use removeQuotes to strip quotation marks from parsed results
        quotedString.setParseAction(removeQuotes)
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
    rrrrsrw)r�r5rvrwrwrxrZ�scsN��fdd�}yt�dt�d�j�}Wntk
rBt��}YnX||_|S)aG
    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
    args are passed, they are forwarded to the given function as additional arguments after
    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
    parsed data to an integer using base 16.

    Example (compare the last to example in L{ParserElement.transformString}::
        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
        hex_ints.runTests('''
            00 11 22 aa FF 0a 0d 1a
            ''')
        
        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
        OneOrMore(upperword).runTests('''
            my kingdom for a horse
            ''')

        wd = Word(alphas).setParseAction(tokenMap(str.title))
        OneOrMore(wd).setParseAction(' '.join).runTests('''
            now is the winter of our discontent made glorious summer by this sun of york
            ''')
    prints::
        00 11 22 aa FF 0a 0d 1a
        [0, 17, 34, 170, 255, 10, 13, 26]

        my kingdom for a horse
        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']

        now is the winter of our discontent made glorious summer by this sun of york
        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
    cs��fdd�|D�S)Ncsg|]}�|f����qSrwrw)r�Ztokn)r�r6rwrxr��sz(tokenMap.<locals>.pa.<locals>.<listcomp>rw)r�r5rv)r�r6rwrxr}�sztokenMap.<locals>.par�rH)rJr�rKr{)r6r�r}rLrw)r�r6rxrm�s cCst|�j�S)N)r�r�)rvrwrwrxry�scCst|�j�S)N)r��lower)rvrwrwrxry�scCs�t|t�r|}t||d�}n|j}tttd�}|r�tj�j	t
�}td�|d�tt
t|td�|���tddgd�jd	�j	d
d��td�}n�d
jdd�tD��}tj�j	t
�t|�B}td�|d�tt
t|j	t�ttd�|����tddgd�jd	�j	dd��td�}ttd�|d�}|jdd
j|jdd�j�j���jd|�}|jdd
j|jdd�j�j���jd|�}||_||_||fS)zRInternal helper to construct opening and closing tag expressions, given a tag name)r�z_-:r�tag�=�/F)r�rCcSs|ddkS)Nrr�rw)r�r5rvrwrwrxry�sz_makeTags.<locals>.<lambda>rr�css|]}|dkr|VqdS)rNrw)r�r�rwrwrxr��sz_makeTags.<locals>.<genexpr>cSs|ddkS)Nrr�rw)r�r5rvrwrwrxry�sz</r��:r�z<%s>rz</%s>)rzr�rr�r/r4r3r>r�r�rZr+rr2rrrmr�rVrYrBr
�_Lr��titler�rir�)�tagStrZxmlZresnameZtagAttrNameZtagAttrValueZopenTagZprintablesLessRAbrackZcloseTagrwrwrx�	_makeTags�s"
T\..r�cCs
t|d�S)a 
    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.

    Example::
        text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
        a,a_end = makeHTMLTags("A")
        link_expr = a + SkipTo(a_end)("link_text") + a_end
        
        for link in link_expr.searchString(text):
            # attributes in the <A> tag (like "href" shown here) are also accessible as named results
            print(link.link_text, '->', link.href)
    prints::
        pyparsing -> http://pyparsing.wikispaces.com
    F)r�)r�rwrwrxrK�scCs
t|d�S)z�
    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
    tags only in the given upper/lower case.

    Example: similar to L{makeHTMLTags}
    T)r�)r�rwrwrxrLscs8|r|dd��n|j��dd��D���fdd�}|S)a<
    Helper to create a validating parse action to be used with start tags created
    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
    with a required attribute value, to avoid false matches on common tags such as
    C{<TD>} or C{<DIV>}.

    Call C{withAttribute} with a series of attribute names and values. Specify the list
    of filter attributes names and values as:
     - keyword arguments, as in C{(align="right")}, or
     - as an explicit dict with C{**} operator, when an attribute name is also a Python
          reserved word, as in C{**{"class":"Customer", "align":"right"}}
     - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
    For attribute names with a namespace prefix, you must use the second form.  Attribute
    names are matched insensitive to upper/lower case.
       
    If just testing for C{class} (with or without a namespace), use C{L{withClass}}.

    To verify that the attribute exists, but without specifying a value, pass
    C{withAttribute.ANY_VALUE} as the value.

    Example::
        html = '''
            <div>
            Some text
            <div type="grid">1 4 0 1 0</div>
            <div type="graph">1,3 2,3 1,1</div>
            <div>this has no type</div>
            </div>
                
        '''
        div,div_end = makeHTMLTags("div")

        # only match div tag having a type attribute with value "grid"
        div_grid = div().setParseAction(withAttribute(type="grid"))
        grid_expr = div_grid + SkipTo(div | div_end)("body")
        for grid_header in grid_expr.searchString(html):
            print(grid_header.body)
        
        # construct a match with any div tag having a type attribute, regardless of the value
        div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
        div_expr = div_any_type + SkipTo(div | div_end)("body")
        for div_header in div_expr.searchString(html):
            print(div_header.body)
    prints::
        1 4 0 1 0

        1 4 0 1 0
        1,3 2,3 1,1
    NcSsg|]\}}||f�qSrwrw)r�r�r�rwrwrxr�Qsz!withAttribute.<locals>.<listcomp>cs^xX�D]P\}}||kr&t||d|��|tjkr|||krt||d||||f��qWdS)Nzno matching attribute z+attribute '%s' has value '%s', must be '%s')rre�	ANY_VALUE)r�r5r�ZattrNameZ	attrValue)�attrsrwrxr}RszwithAttribute.<locals>.pa)r�)r�ZattrDictr}rw)r�rxres2cCs|rd|nd}tf||i�S)a�
    Simplified version of C{L{withAttribute}} when matching on a div class - made
    difficult because C{class} is a reserved word in Python.

    Example::
        html = '''
            <div>
            Some text
            <div class="grid">1 4 0 1 0</div>
            <div class="graph">1,3 2,3 1,1</div>
            <div>this &lt;div&gt; has no class</div>
            </div>
                
        '''
        div,div_end = makeHTMLTags("div")
        div_grid = div().setParseAction(withClass("grid"))
        
        grid_expr = div_grid + SkipTo(div | div_end)("body")
        for grid_header in grid_expr.searchString(html):
            print(grid_header.body)
        
        div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
        div_expr = div_any_type + SkipTo(div | div_end)("body")
        for div_header in div_expr.searchString(html):
            print(div_header.body)
    prints::
        1 4 0 1 0

        1 4 0 1 0
        1,3 2,3 1,1
    z%s:class�class)re)Z	classname�	namespaceZ	classattrrwrwrxrk\s �(rcCs�t�}||||B}�x`t|�D�]R\}}|ddd�\}}	}
}|	dkrTd|nd|}|	dkr�|dksxt|�dkr�td��|\}
}t�j|�}|
tjk�rd|	dkr�t||�t|t	|��}n�|	dk�r|dk	�rt|||�t|t	||��}nt||�t|t	|��}nD|	dk�rZt||
|||�t||
|||�}ntd	��n�|
tj
k�rH|	dk�r�t|t��s�t|�}t|j
|�t||�}n�|	dk�r|dk	�r�t|||�t|t	||��}nt||�t|t	|��}nD|	dk�r>t||
|||�t||
|||�}ntd	��ntd
��|�r`|j|�||j|�|BK}|}q"W||K}|S)a�	
    Helper method for constructing grammars of expressions made up of
    operators working in a precedence hierarchy.  Operators may be unary or
    binary, left- or right-associative.  Parse actions can also be attached
    to operator expressions. The generated parser will also recognize the use 
    of parentheses to override operator precedences (see example below).
    
    Note: if you define a deep operator list, you may see performance issues
    when using infixNotation. See L{ParserElement.enablePackrat} for a
    mechanism to potentially improve your parser performance.

    Parameters:
     - baseExpr - expression representing the most basic element for the nested
     - opList - list of tuples, one for each operator precedence level in the
      expression grammar; each tuple is of the form
      (opExpr, numTerms, rightLeftAssoc, parseAction), where:
       - opExpr is the pyparsing expression for the operator;
          may also be a string, which will be converted to a Literal;
          if numTerms is 3, opExpr is a tuple of two expressions, for the
          two operators separating the 3 terms
       - numTerms is the number of terms for this operator (must
          be 1, 2, or 3)
       - rightLeftAssoc is the indicator whether the operator is
          right or left associative, using the pyparsing-defined
          constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
       - parseAction is the parse action to be associated with
          expressions matching this operator expression (the
          parse action tuple member may be omitted)
     - lpar - expression for matching left-parentheses (default=C{Suppress('(')})
     - rpar - expression for matching right-parentheses (default=C{Suppress(')')})

    Example::
        # simple example of four-function arithmetic with ints and variable names
        integer = pyparsing_common.signed_integer
        varname = pyparsing_common.identifier 
        
        arith_expr = infixNotation(integer | varname,
            [
            ('-', 1, opAssoc.RIGHT),
            (oneOf('* /'), 2, opAssoc.LEFT),
            (oneOf('+ -'), 2, opAssoc.LEFT),
            ])
        
        arith_expr.runTests('''
            5+3*6
            (5+3)*6
            -2--11
            ''', fullDump=False)
    prints::
        5+3*6
        [[5, '+', [3, '*', 6]]]

        (5+3)*6
        [[[5, '+', 3], '*', 6]]

        -2--11
        [[['-', 2], '-', ['-', 11]]]
    Nrroz%s termz	%s%s termrqz@if numterms=3, opExpr must be a tuple or list of two expressionsrrz6operator must be unary (1), binary (2), or ternary (3)z2operator must indicate right or left associativity)N)rr�r�r�rirT�LEFTrrr�RIGHTrzrr.r�)ZbaseExprZopListZlparZrparr�ZlastExprr�ZoperDefZopExprZarityZrightLeftAssocr}ZtermNameZopExpr1ZopExpr2ZthisExprrsrwrwrxri�sR;

&




&


z4"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*�"z string enclosed in double quotesz4'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*�'z string enclosed in single quotesz*quotedString using single or double quotes�uzunicode string literalcCs�||krtd��|dk�r(t|t�o,t|t��r t|�dkr�t|�dkr�|dk	r�tt|t||tjdd���j	dd��}n$t
j�t||tj�j	dd��}nx|dk	r�tt|t|�t|�ttjdd���j	dd��}n4ttt|�t|�ttjdd���j	d	d��}ntd
��t
�}|dk	�rb|tt|�t||B|B�t|��K}n$|tt|�t||B�t|��K}|jd||f�|S)a~	
    Helper method for defining nested lists enclosed in opening and closing
    delimiters ("(" and ")" are the default).

    Parameters:
     - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression
     - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression
     - content - expression for items within the nested lists (default=C{None})
     - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})

    If an expression is not provided for the content argument, the nested
    expression will capture all whitespace-delimited content between delimiters
    as a list of separate values.

    Use the C{ignoreExpr} argument to define expressions that may contain
    opening or closing characters that should not be treated as opening
    or closing characters for nesting, such as quotedString or a comment
    expression.  Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
    The default is L{quotedString}, but if no expressions are to be ignored,
    then pass C{None} for this argument.

    Example::
        data_type = oneOf("void int short long char float double")
        decl_data_type = Combine(data_type + Optional(Word('*')))
        ident = Word(alphas+'_', alphanums+'_')
        number = pyparsing_common.number
        arg = Group(decl_data_type + ident)
        LPAR,RPAR = map(Suppress, "()")

        code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))

        c_function = (decl_data_type("type") 
                      + ident("name")
                      + LPAR + Optional(delimitedList(arg), [])("args") + RPAR 
                      + code_body("body"))
        c_function.ignore(cStyleComment)
        
        source_code = '''
            int is_odd(int x) { 
                return (x%2); 
            }
                
            int dec_to_hex(char hchar) { 
                if (hchar >= '0' && hchar <= '9') { 
                    return (ord(hchar)-ord('0')); 
                } else { 
                    return (10+ord(hchar)-ord('A'));
                } 
            }
        '''
        for func in c_function.searchString(source_code):
            print("%(name)s (%(type)s) args: %(args)s" % func)

    prints::
        is_odd (int) args: [['int', 'x']]
        dec_to_hex (int) args: [['char', 'hchar']]
    z.opening and closing strings cannot be the sameNrr)r
cSs|dj�S)Nr)r�)rvrwrwrxry9sznestedExpr.<locals>.<lambda>cSs|dj�S)Nr)r�)rvrwrwrxry<scSs|dj�S)Nr)r�)rvrwrwrxryBscSs|dj�S)Nr)r�)rvrwrwrxryFszOopening and closing arguments must be strings if no content expression is givenznested %s%s expression)r�rzr�r�r
rr	r$rNr�rCr�rrrr+r2ri)�openerZcloserZcontentrOr�rwrwrxrP�s4:

*$cs��fdd�}�fdd�}�fdd�}tt�jd�j��}t�t�j|�jd�}t�j|�jd	�}t�j|�jd
�}	|r�tt|�|t|t|�t|��|	�}
n$tt|�t|t|�t|���}
|j	t
t��|
jd�S)a
	
    Helper method for defining space-delimited indentation blocks, such as
    those used to define block statements in Python source code.

    Parameters:
     - blockStatementExpr - expression defining syntax of statement that
            is repeated within the indented block
     - indentStack - list created by caller to manage indentation stack
            (multiple statementWithIndentedBlock expressions within a single grammar
            should share a common indentStack)
     - indent - boolean indicating whether block must be indented beyond the
            the current level; set to False for block of left-most statements
            (default=C{True})

    A valid block must contain at least one C{blockStatement}.

    Example::
        data = '''
        def A(z):
          A1
          B = 100
          G = A2
          A2
          A3
        B
        def BB(a,b,c):
          BB1
          def BBA():
            bba1
            bba2
            bba3
        C
        D
        def spam(x,y):
             def eggs(z):
                 pass
        '''


        indentStack = [1]
        stmt = Forward()

        identifier = Word(alphas, alphanums)
        funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")
        func_body = indentedBlock(stmt, indentStack)
        funcDef = Group( funcDecl + func_body )

        rvalue = Forward()
        funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
        rvalue << (funcCall | identifier | Word(nums))
        assignment = Group(identifier + "=" + rvalue)
        stmt << ( funcDef | assignment | identifier )

        module_body = OneOrMore(stmt)

        parseTree = module_body.parseString(data)
        parseTree.pprint()
    prints::
        [['def',
          'A',
          ['(', 'z', ')'],
          ':',
          [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
         'B',
         ['def',
          'BB',
          ['(', 'a', 'b', 'c', ')'],
          ':',
          [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
         'C',
         'D',
         ['def',
          'spam',
          ['(', 'x', 'y', ')'],
          ':',
          [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] 
    csN|t|�krdSt||�}|�dkrJ|�dkr>t||d��t||d��dS)Nrrzillegal nestingznot a peer entryrsrs)r�r9r!r)r�r5rv�curCol)�indentStackrwrx�checkPeerIndent�s
z&indentedBlock.<locals>.checkPeerIndentcs2t||�}|�dkr"�j|�nt||d��dS)Nrrznot a subentryrs)r9r�r)r�r5rvr�)r�rwrx�checkSubIndent�s
z%indentedBlock.<locals>.checkSubIndentcsN|t|�krdSt||�}�o4|�dko4|�dksBt||d���j�dS)Nrrrqznot an unindentrsr:)r�r9rr�)r�r5rvr�)r�rwrx�
checkUnindent�s
z$indentedBlock.<locals>.checkUnindentz	 �INDENTr�ZUNINDENTzindented block)rrr�r�r
r�rirrr�rk)ZblockStatementExprr�rr�r�r�r!r�ZPEERZUNDENTZsmExprrw)r�rxrfQsN,z#[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]z[\0xa1-\0xbf\0xd7\0xf7]z_:zany tagzgt lt amp nbsp quot aposz><& "'z&(?P<entity>rnz);zcommon HTML entitycCstj|j�S)zRHelper parser action to replace common HTML entities with their special characters)�_htmlEntityMapr�Zentity)rvrwrwrxr[�sz/\*(?:[^*]|\*(?!/))*z*/zC style commentz<!--[\s\S]*?-->zHTML commentz.*zrest of linez//(?:\\\n|[^\n])*z
// commentzC++ style commentz#.*zPython style comment)r�z 	�	commaItem)r�c@s�eZdZdZee�Zee�Ze	e
�jd�je�Z
e	e�jd�jeed��Zed�jd�je�Ze�je�de�je�jd�Zejd	d
��eeeed�j�e�Bjd�Zeje�ed
�jd�je�Zed�jd�je�ZeeBeBj�Zed�jd�je�Ze	eded�jd�Zed�jd�Z ed�jd�Z!e!de!djd�Z"ee!de!d>�dee!de!d?�jd�Z#e#j$d d
��d!e jd"�Z%e&e"e%Be#Bjd#��jd#�Z'ed$�jd%�Z(e)d@d'd(��Z*e)dAd*d+��Z+ed,�jd-�Z,ed.�jd/�Z-ed0�jd1�Z.e/j�e0j�BZ1e)d2d3��Z2e&e3e4d4�e5�e	e6d4d5�ee7d6����j�jd7�Z8e9ee:j;�e8Bd8d9��jd:�Z<e)ed;d
���Z=e)ed<d
���Z>d=S)Brna�

    Here are some common low-level expressions that may be useful in jump-starting parser development:
     - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})
     - common L{programming identifiers<identifier>}
     - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})
     - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}
     - L{UUID<uuid>}
     - L{comma-separated list<comma_separated_list>}
    Parse actions:
     - C{L{convertToInteger}}
     - C{L{convertToFloat}}
     - C{L{convertToDate}}
     - C{L{convertToDatetime}}
     - C{L{stripHTMLTags}}
     - C{L{upcaseTokens}}
     - C{L{downcaseTokens}}

    Example::
        pyparsing_common.number.runTests('''
            # any int or real number, returned as the appropriate type
            100
            -100
            +100
            3.14159
            6.02e23
            1e-12
            ''')

        pyparsing_common.fnumber.runTests('''
            # any int or real number, returned as float
            100
            -100
            +100
            3.14159
            6.02e23
            1e-12
            ''')

        pyparsing_common.hex_integer.runTests('''
            # hex numbers
            100
            FF
            ''')

        pyparsing_common.fraction.runTests('''
            # fractions
            1/2
            -3/4
            ''')

        pyparsing_common.mixed_integer.runTests('''
            # mixed fractions
            1
            1/2
            -3/4
            1-3/4
            ''')

        import uuid
        pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
        pyparsing_common.uuid.runTests('''
            # uuid
            12345678-1234-5678-1234-567812345678
            ''')
    prints::
        # any int or real number, returned as the appropriate type
        100
        [100]

        -100
        [-100]

        +100
        [100]

        3.14159
        [3.14159]

        6.02e23
        [6.02e+23]

        1e-12
        [1e-12]

        # any int or real number, returned as float
        100
        [100.0]

        -100
        [-100.0]

        +100
        [100.0]

        3.14159
        [3.14159]

        6.02e23
        [6.02e+23]

        1e-12
        [1e-12]

        # hex numbers
        100
        [256]

        FF
        [255]

        # fractions
        1/2
        [0.5]

        -3/4
        [-0.75]

        # mixed fractions
        1
        [1]

        1/2
        [0.5]

        -3/4
        [-0.75]

        1-3/4
        [1.75]

        # uuid
        12345678-1234-5678-1234-567812345678
        [UUID('12345678-1234-5678-1234-567812345678')]
    �integerzhex integerrtz[+-]?\d+zsigned integerr��fractioncCs|d|dS)Nrrrrsrw)rvrwrwrxry�szpyparsing_common.<lambda>r8z"fraction or mixed integer-fractionz
[+-]?\d+\.\d*zreal numberz+[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)z$real number with scientific notationz[+-]?\d+\.?\d*([eE][+-]?\d+)?�fnumberrB�
identifierzK(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}zIPv4 addressz[0-9a-fA-F]{1,4}�hex_integerr��zfull IPv6 addressrrBz::zshort IPv6 addresscCstdd�|D��dkS)Ncss|]}tjj|�rdVqdS)rrN)rn�
_ipv6_partr�)r�rfrwrwrxr��sz,pyparsing_common.<lambda>.<locals>.<genexpr>rw)rH)rvrwrwrxry�sz::ffff:zmixed IPv6 addresszIPv6 addressz:[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}zMAC address�%Y-%m-%dcs�fdd�}|S)a�
        Helper to create a parse action for converting parsed date string to Python datetime.date

        Params -
         - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})

        Example::
            date_expr = pyparsing_common.iso8601_date.copy()
            date_expr.setParseAction(pyparsing_common.convertToDate())
            print(date_expr.parseString("1999-12-31"))
        prints::
            [datetime.date(1999, 12, 31)]
        csLytj|d��j�Stk
rF}zt||t|���WYdd}~XnXdS)Nr)r�strptimeZdater�rr{)r�r5rv�ve)�fmtrwrx�cvt_fn�sz.pyparsing_common.convertToDate.<locals>.cvt_fnrw)r�r�rw)r�rx�
convertToDate�szpyparsing_common.convertToDate�%Y-%m-%dT%H:%M:%S.%fcs�fdd�}|S)a
        Helper to create a parse action for converting parsed datetime string to Python datetime.datetime

        Params -
         - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})

        Example::
            dt_expr = pyparsing_common.iso8601_datetime.copy()
            dt_expr.setParseAction(pyparsing_common.convertToDatetime())
            print(dt_expr.parseString("1999-12-31T23:59:59.999"))
        prints::
            [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
        csHytj|d��Stk
rB}zt||t|���WYdd}~XnXdS)Nr)rr�r�rr{)r�r5rvr�)r�rwrxr��sz2pyparsing_common.convertToDatetime.<locals>.cvt_fnrw)r�r�rw)r�rx�convertToDatetime�sz"pyparsing_common.convertToDatetimez7(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?zISO8601 datez�(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?zISO8601 datetimez2[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}�UUIDcCstjj|d�S)a
        Parse action to remove HTML tags from web page HTML source

        Example::
            # strip HTML links from normal text 
            text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
            td,td_end = makeHTMLTags("TD")
            table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
            
            print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'
        r)rn�_html_stripperr�)r�r5r�rwrwrx�
stripHTMLTags�s
zpyparsing_common.stripHTMLTagsra)r�z 	r�r�)r�zcomma separated listcCst|�j�S)N)r�r�)rvrwrwrxry�scCst|�j�S)N)r�r�)rvrwrwrxry�sN)rrB)rrB)r�)r�)?r�r�r�r�rmruZconvertToInteger�floatZconvertToFloatr/rRrir�r�rDr�r'Zsigned_integerr�rxrr�Z
mixed_integerrH�realZsci_realr��numberr�r4r3r�Zipv4_addressr�Z_full_ipv6_addressZ_short_ipv6_addressr~Z_mixed_ipv6_addressr
Zipv6_addressZmac_addressr�r�r�Ziso8601_dateZiso8601_datetime�uuidr7r6r�r�rrrrVr.�
_commasepitemr@rYr�Zcomma_separated_listrdrBrwrwrwrxrn�sN""
28�__main__Zselect�fromz_$r])rb�columnsrjZtablesZcommandaK
        # '*' as column list and dotted table name
        select * from SYS.XYZZY

        # caseless match on "SELECT", and casts back to "select"
        SELECT * from XYZZY, ABC

        # list of column names, and mixed case SELECT keyword
        Select AA,BB,CC from Sys.dual

        # multiple tables
        Select A, B, C from Sys.dual, Table2

        # invalid SELECT keyword - should fail
        Xelect A, B, C from Sys.dual

        # incomplete command - should fail
        Select

        # invalid column name - should fail
        Select ^^^ frox Sys.dual

        z]
        100
        -100
        +100
        3.14159
        6.02e23
        1e-12
        z 
        100
        FF
        z6
        12345678-1234-5678-1234-567812345678
        )rq)raF)N)FT)T)r�)T)�r��__version__Z__versionTime__�
__author__r��weakrefrr�r�r~r�rdrr�r"r<r�r�_threadr�ImportErrorZ	threadingrr�Zordereddict�__all__r��version_infor;r�maxsizer�r{r��chrrur�rHr�r�reversedr�r�rr6rrrIZmaxintZxranger�Z__builtin__r�Zfnamer�rJr�r�r�r�r�r�Zascii_uppercaseZascii_lowercaser4rRrDr3rkr�Z	printablerVrKrrr!r#r&r�r"�MutableMapping�registerr9rJrGr/r2r4rQrMr$r,r
rrr�rQrrrrlr/r'r%r	r.r0rrrr*r)r1r0r rrrrrrrrJrr2rMrNrr(rrVr-r
rrr+rrbr@r<r�rOrNrrSrArgrhrjrirCrIrHrar`r�Z_escapedPuncZ_escapedHexCharZ_escapedOctChar�UNICODEZ_singleCharZ
_charRangermr}r_rMr\rZrmrdrBr�rKrLrer�rkrTr�r�rirUr>r^rYrcrPrfr5rWr7r6r�r�r�r�r;r[r8rEr�r]r?r=rFrXr�r�r:rnr�ZselectTokenZ	fromTokenZidentZ
columnNameZcolumnNameListZ
columnSpecZ	tableNameZ
tableNameListZ	simpleSQLr�r�r�r�r�r�rwrwrwrx�<module>=s�









8


@d&A= I
G3pLOD|M &#@sQ,A,	I#%&0
,	?#kZr

 (
 0 


"_vendor/__pycache__/__init__.cpython-36.opt-1.pyc000064400000005240151733136260015544 0ustar003

�Pf>�@s�dZddlmZddlZddlZddlZdZejj	ejj
e��Zdd�Z
e�r�ejejjed��ejejdd�<e
d�e
d	�e
d
�e
d�e
d�e
d
�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d�e
d �e
d!�e
d"�e
d#�e
d$�e
d%�e
d&�e
d'�e
d(�e
d)�e
d*�e
d+�e
d,�e
d-�e
d.�e
d/�e
d0�e
d1�e
d2�dS)3z�
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
depend on something external.

Files inside of pip._vendor should be considered immutable and should only be
updated to versions from upstream.
�)�absolute_importNFcCs�djt|�}yt|t�t�dd�Wnztk
r�yt|t�t�dd�Wntk
r`Yn:Xtj|tj|<|jdd�\}}t	tj||tj|�YnXdS)Nz{0}.{1}r)�level�.�)
�format�__name__�
__import__�globals�locals�ImportError�sys�modules�rsplit�setattr)Z
modulenameZ
vendored_name�base�head�r�/usr/lib/python3.6/__init__.py�vendoreds	rz*.whlZcachecontrolZcoloramaZdistlibZdistroZhtml5libZlockfileZsixz	six.moveszsix.moves.urllibZ	packagingzpackaging.versionzpackaging.specifiersZ
pkg_resourcesZprogressZretryingZrequestszrequests.packageszrequests.packages.urllib3z&requests.packages.urllib3._collectionsz$requests.packages.urllib3.connectionz(requests.packages.urllib3.connectionpoolz!requests.packages.urllib3.contribz*requests.packages.urllib3.contrib.ntlmpoolz+requests.packages.urllib3.contrib.pyopensslz$requests.packages.urllib3.exceptionsz requests.packages.urllib3.fieldsz"requests.packages.urllib3.filepostz"requests.packages.urllib3.packagesz/requests.packages.urllib3.packages.ordered_dictz&requests.packages.urllib3.packages.sixz5requests.packages.urllib3.packages.ssl_match_hostnamezErequests.packages.urllib3.packages.ssl_match_hostname._implementationz%requests.packages.urllib3.poolmanagerz!requests.packages.urllib3.requestz"requests.packages.urllib3.responsezrequests.packages.urllib3.utilz)requests.packages.urllib3.util.connectionz&requests.packages.urllib3.util.requestz'requests.packages.urllib3.util.responsez$requests.packages.urllib3.util.retryz#requests.packages.urllib3.util.ssl_z&requests.packages.urllib3.util.timeoutz"requests.packages.urllib3.util.url)�__doc__Z
__future__rZglobZos.path�osrZ	DEBUNDLED�path�abspath�dirname�__file__Z	WHEEL_DIRr�joinrrrr�<module>sh$_vendor/__pycache__/appdirs.cpython-36.opt-1.pyc000064400000044153151733136260015455 0ustar003

�Pf`W�@s�dZd1Zdjeee��ZddlZddlZejddkZ	e	r>eZ
ejjd�r�ddlZej
�ddZejd�rrd	Zq�ejd
�r�dZq�dZnejZd2dd�Zd3dd�Zd4dd�Zd5dd�Zd6dd�Zd7dd�ZGdd�de�Zdd�Zdd �Zd!d"�Zd#d$�Zed	k�r�yddlZeZWnnek
�r�ydd%l m!Z!eZWnBek
�r|yddl"Z#eZWnek
�rveZYnXYnXYnXe$d&k�r~d'Z%d(Z&d8Z'e(d)�ee%e&d*d+�Z)x$e'D]Z*e(d,e*e+e)e*�f��q�We(d-�ee%e&�Z)x$e'D]Z*e(d,e*e+e)e*�f��q�We(d.�ee%�Z)x$e'D]Z*e(d,e*e+e)e*�f��q$We(d/�ee%d
d0�Z)x$e'D]Z*e(d,e*e+e)e*�f��q^WdS)9zyUtilities for determining application-specific dirs.

See <http://github.com/ActiveState/appdirs> for details and usage.
����.N��javaZWindows�win32ZMac�darwinZlinux2FcCs�tdkr^|dkr|}|rdpd}tjjt|��}|r�|dk	rNtjj|||�}q�tjj||�}nNtdkr�tjjd�}|r�tjj||�}n&tjdtjjd	��}|r�tjj||�}|r�|r�tjj||�}|S)
aJReturn full path to the user-specific data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  ~/Library/Application Support/<AppName>
        Unix:                   ~/.local/share/<AppName>    # or in $XDG_DATA_HOME, if defined
        Win XP (not roaming):   C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
        Win XP (roaming):       C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
        Win 7  (not roaming):   C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
        Win 7  (roaming):       C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>

    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
    That means, by default "~/.local/share/<AppName>".
    rN�
CSIDL_APPDATA�CSIDL_LOCAL_APPDATAFrz~/Library/Application Support/Z
XDG_DATA_HOMEz~/.local/share)�system�os�path�normpath�_get_win_folder�join�
expanduser�getenv)�appname�	appauthor�version�roaming�constr
�r�/usr/lib/python3.6/appdirs.py�
user_data_dir-s& rcs
tdkrR|dkr�}tjjtd��}�r�|dk	rBtjj||��}q�tjj|��}n�tdkrztjjd�}�r�tjj|��}nttjdtjjdd	g��}d
d�|j	tj�D�}�r�|r�tjj�|���fdd�|D�}|r�tjj|�}n|d
}|S�o�|�rtjj||�}|S)aiReturn full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of data dirs should be
            returned. By default, the first item from XDG_DATA_DIRS is
            returned, or '/usr/local/share/<AppName>',
            if XDG_DATA_DIRS is not set

    Typical user data directories are:
        macOS:      /Library/Application Support/<AppName>
        Unix:       /usr/local/share/<AppName> or /usr/share/<AppName>
        Win XP:     C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
        Win 7:      C:\ProgramData\<AppAuthor>\<AppName>   # Hidden, but writeable on Win 7.

    For Unix, this is using the $XDG_DATA_DIRS[0] default.

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    rN�CSIDL_COMMON_APPDATAFrz/Library/Application SupportZ
XDG_DATA_DIRSz/usr/local/sharez
/usr/sharecSs g|]}tjj|jtj���qSr)rr
r�rstrip�sep)�.0�xrrr�
<listcomp>�sz!site_data_dir.<locals>.<listcomp>csg|]}tjj|�g��qSr)rrr)rr)rrrr �sr)
rrr
rrrrr�pathsep�split)rrr�	multipathr
�pathlistr)rr�
site_data_dirds4
r%cCsXtdkrt||d|�}n&tjdtjjd��}|r>tjj||�}|rT|rTtjj||�}|S)a�Return full path to the user-specific config dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  same as user_data_dir
        Unix:                   ~/.config/<AppName>     # or in $XDG_CONFIG_HOME, if defined
        Win *:                  same as user_data_dir

    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
    That means, by deafult "~/.config/<AppName>".
    rrNZXDG_CONFIG_HOMEz	~/.config)rr)rrrrr
rr)rrrrr
rrr�user_config_dir�sr&cs�td	kr*t�|�}�r�|r�tjj||�}ndtjdd�}dd�|jtj�D�}�rt|rbtjj�|���fdd�|D�}|r�tjj|�}n|d}|S)
aReturn full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of config dirs should be
            returned. By default, the first item from XDG_CONFIG_DIRS is
            returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set

    Typical user data directories are:
        macOS:      same as site_data_dir
        Unix:       /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
                    $XDG_CONFIG_DIRS
        Win *:      same as site_data_dir
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)

    For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    rrZXDG_CONFIG_DIRSz/etc/xdgcSs g|]}tjj|jtj���qSr)rr
rrr)rrrrrr �sz#site_config_dir.<locals>.<listcomp>csg|]}tjj|�g��qSr)rrr)rr)rrrr �sr)rr)rr%rr
rrr"r!)rrrr#r
r$r)rr�site_config_dir�s
r'TcCs�tdkrd|dkr|}tjjtd��}|r�|dk	rBtjj|||�}ntjj||�}|r�tjj|d�}nNtdkr�tjjd�}|r�tjj||�}n&tjdtjjd	��}|r�tjj||�}|r�|r�tjj||�}|S)
aReturn full path to the user-specific cache dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Cache" to the base app data dir for Windows. See
            discussion below.

    Typical user cache directories are:
        macOS:      ~/Library/Caches/<AppName>
        Unix:       ~/.cache/<AppName> (XDG default)
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache

    On Windows the only suggestion in the MSDN docs is that local settings go in
    the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
    app data dir (the default returned by `user_data_dir` above). Apps typically
    put cache data somewhere *under* the given dir here. Some examples:
        ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
        ...\Acme\SuperApp\Cache\1.0
    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
    This can be disabled with the `opinion=False` option.
    rNr
FZCacherz~/Library/CachesZXDG_CACHE_HOMEz~/.cache)rrr
rrrrr)rrr�opinionr
rrr�user_cache_dirs(!r)cCs�tdkr tjjtjjd�|�}nNtdkrLt|||�}d}|rntjj|d�}n"t|||�}d}|rntjj|d�}|r�|r�tjj||�}|S)a�Return full path to the user-specific log dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Logs" to the base app data dir for Windows, and "log" to the
            base cache dir for Unix. See discussion below.

    Typical user cache directories are:
        macOS:      ~/Library/Logs/<AppName>
        Unix:       ~/.cache/<AppName>/log  # or under $XDG_CACHE_HOME if defined
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs

    On Windows the only suggestion in the MSDN docs is that local settings
    go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
    examples of what some windows apps use for a logs dir.)

    OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
    value for Windows and appends "log" to the user cache dir for Unix.
    This can be disabled with the `opinion=False` option.
    rz~/Library/LogsrFZLogs�log)rrr
rrrr))rrrr(r
rrr�user_log_dir:s  
r+c@sbeZdZdZddd�Zedd��Zedd	��Zed
d��Zedd
��Z	edd��Z
edd��ZdS)�AppDirsz1Convenience wrapper for getting application dirs.NFcCs"||_||_||_||_||_dS)N)rrrrr#)�selfrrrrr#rrr�__init__os
zAppDirs.__init__cCst|j|j|j|jd�S)N)rr)rrrrr)r-rrrrws
zAppDirs.user_data_dircCst|j|j|j|jd�S)N)rr#)r%rrrr#)r-rrrr%|s
zAppDirs.site_data_dircCst|j|j|j|jd�S)N)rr)r&rrrr)r-rrrr&�s
zAppDirs.user_config_dircCst|j|j|j|jd�S)N)rr#)r'rrrr#)r-rrrr'�s
zAppDirs.site_config_dircCst|j|j|jd�S)N)r)r)rrr)r-rrrr)�s
zAppDirs.user_cache_dircCst|j|j|jd�S)N)r)r+rrr)r-rrrr+�s
zAppDirs.user_log_dir)NNFF)�__name__�
__module__�__qualname__�__doc__r.�propertyrr%r&r'r)r+rrrrr,ms
r,cCs:ddl}dddd�|}|j|jd�}|j||�\}}|S)z�This is a fallback technique at best. I'm not sure if using the
    registry for this guarantees us the correct answer for all CSIDL_*
    names.
    rNZAppDatazCommon AppDataz
Local AppData)r	rr
z@Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders)�_winreg�OpenKey�HKEY_CURRENT_USERZQueryValueEx)�
csidl_namer4Zshell_folder_name�key�dir�typerrr�_get_win_folder_from_registry�sr;cCs�ddlm}m}|jdt||�dd�}y`t|�}d}x|D]}t|�dkr:d}Pq:W|r�yddl}|j|�}Wnt	k
r�YnXWnt
k
r�YnX|S)Nr)�shellcon�shellF�T)�win32com.shellr<r=�SHGetFolderPath�getattr�unicode�ord�win32api�GetShortPathName�ImportError�UnicodeError)r7r<r=r9�
has_high_char�crDrrr�_get_win_folder_with_pywin32�s$

rJcCs�ddl}dddd�|}|jd�}|jjjd|dd|�d}x|D]}t|�dkrBd	}PqBW|r�|jd�}|jjj|j|d�r�|}|jS)
Nr��#�)r	rr
iFr>T)	�ctypesZcreate_unicode_buffer�windllZshell32ZSHGetFolderPathWrCZkernel32ZGetShortPathNameW�value)r7rNZcsidl_const�bufrHrIZbuf2rrr�_get_win_folder_with_ctypes�s"


rRcCs�ddl}ddlm}ddlm}|jjd}|jd|�}|jj	}|j
dt|j|�d|jj
|�|jj|j��jd�}d}x|D]}	t|	�dkr~d	}Pq~W|r�|jd|�}|jj	}
tj|||�r�|jj|j��jd�}|S)
Nr)�jna)r�rI�Fr>T)�arrayZcom.sunrSZcom.sun.jna.platformrZWinDefZMAX_PATHZzerosZShell32ZINSTANCEr@rAZShlObjZSHGFP_TYPE_CURRENTZNativeZtoStringZtostringrrCZKernel32ZkernalrE)r7rVrSrZbuf_sizerQr=r9rHrIZkernelrrr�_get_win_folder_with_jna�s&
rW)rO�__main__ZMyAppZ	MyCompanyz%-- app dirs (with optional 'version')z1.0)rz%s: %sz)
-- app dirs (without optional 'version')z+
-- app dirs (without optional 'appauthor')z(
-- app dirs (with disabled 'appauthor'))r)rrr)NNNF)NNNF)NNNF)NNNF)NNNT)NNNT)rr%r&r'r)r+),r2Z__version_info__r�map�str�__version__�sysr�version_infoZPY3rB�platform�
startswithZjava_verZos_namerrr%r&r'r)r+�objectr,r;rJrRrWr?Zwin32comrrFrNrOZcom.sun.jnaZcomr/rrZprops�print�dirsZproprArrrr�<module>	s~


7
B
(
3
9
3+






_vendor/re-vendor.py000064400000001405151733136260010462 0ustar00import os
import sys
import pip
import glob
import shutil

here = os.path.abspath(os.path.dirname(__file__))

def usage():
    print("Usage: re-vendor.py [clean|vendor]")
    sys.exit(1)

def clean():
    for fn in os.listdir(here):
        dirname = os.path.join(here, fn)
        if os.path.isdir(dirname):
            shutil.rmtree(dirname)
    # six is a single file, not a package
    os.unlink(os.path.join(here, 'six.py'))

def vendor():
    pip.main(['install', '-t', here, '-r', 'vendor.txt'])
    for dirname in glob.glob('*.egg-info'):
        shutil.rmtree(dirname)

if __name__ == '__main__':
    if len(sys.argv) != 2:
        usage()
    if sys.argv[1] == 'clean':
        clean()
    elif sys.argv[1] == 'vendor':
        vendor()
    else:
        usage()
_vendor/lockfile/sqlitelockfile.py000064400000012602151733136260013364 0ustar00from __future__ import absolute_import, division

import time
import os

try:
    unicode
except NameError:
    unicode = str

from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked


class SQLiteLockFile(LockBase):
    "Demonstrate SQL-based locking."

    testdb = None

    def __init__(self, path, threaded=True, timeout=None):
        """
        >>> lock = SQLiteLockFile('somefile')
        >>> lock = SQLiteLockFile('somefile', threaded=False)
        """
        LockBase.__init__(self, path, threaded, timeout)
        self.lock_file = unicode(self.lock_file)
        self.unique_name = unicode(self.unique_name)

        if SQLiteLockFile.testdb is None:
            import tempfile
            _fd, testdb = tempfile.mkstemp()
            os.close(_fd)
            os.unlink(testdb)
            del _fd, tempfile
            SQLiteLockFile.testdb = testdb

        import sqlite3
        self.connection = sqlite3.connect(SQLiteLockFile.testdb)

        c = self.connection.cursor()
        try:
            c.execute("create table locks"
                      "("
                      "   lock_file varchar(32),"
                      "   unique_name varchar(32)"
                      ")")
        except sqlite3.OperationalError:
            pass
        else:
            self.connection.commit()
            import atexit
            atexit.register(os.unlink, SQLiteLockFile.testdb)

    def acquire(self, timeout=None):
        timeout = timeout if timeout is not None else self.timeout
        end_time = time.time()
        if timeout is not None and timeout > 0:
            end_time += timeout

        if timeout is None:
            wait = 0.1
        elif timeout <= 0:
            wait = 0
        else:
            wait = timeout / 10

        cursor = self.connection.cursor()

        while True:
            if not self.is_locked():
                # Not locked.  Try to lock it.
                cursor.execute("insert into locks"
                               "  (lock_file, unique_name)"
                               "  values"
                               "  (?, ?)",
                               (self.lock_file, self.unique_name))
                self.connection.commit()

                # Check to see if we are the only lock holder.
                cursor.execute("select * from locks"
                               "  where unique_name = ?",
                               (self.unique_name,))
                rows = cursor.fetchall()
                if len(rows) > 1:
                    # Nope.  Someone else got there.  Remove our lock.
                    cursor.execute("delete from locks"
                                   "  where unique_name = ?",
                                   (self.unique_name,))
                    self.connection.commit()
                else:
                    # Yup.  We're done, so go home.
                    return
            else:
                # Check to see if we are the only lock holder.
                cursor.execute("select * from locks"
                               "  where unique_name = ?",
                               (self.unique_name,))
                rows = cursor.fetchall()
                if len(rows) == 1:
                    # We're the locker, so go home.
                    return

            # Maybe we should wait a bit longer.
            if timeout is not None and time.time() > end_time:
                if timeout > 0:
                    # No more waiting.
                    raise LockTimeout("Timeout waiting to acquire"
                                      " lock for %s" %
                                      self.path)
                else:
                    # Someone else has the lock and we are impatient..
                    raise AlreadyLocked("%s is already locked" % self.path)

            # Well, okay.  We'll give it a bit longer.
            time.sleep(wait)

    def release(self):
        if not self.is_locked():
            raise NotLocked("%s is not locked" % self.path)
        if not self.i_am_locking():
            raise NotMyLock("%s is locked, but not by me (by %s)" %
                            (self.unique_name, self._who_is_locking()))
        cursor = self.connection.cursor()
        cursor.execute("delete from locks"
                       "  where unique_name = ?",
                       (self.unique_name,))
        self.connection.commit()

    def _who_is_locking(self):
        cursor = self.connection.cursor()
        cursor.execute("select unique_name from locks"
                       "  where lock_file = ?",
                       (self.lock_file,))
        return cursor.fetchone()[0]

    def is_locked(self):
        cursor = self.connection.cursor()
        cursor.execute("select * from locks"
                       "  where lock_file = ?",
                       (self.lock_file,))
        rows = cursor.fetchall()
        return not not rows

    def i_am_locking(self):
        cursor = self.connection.cursor()
        cursor.execute("select * from locks"
                       "  where lock_file = ?"
                       "    and unique_name = ?",
                       (self.lock_file, self.unique_name))
        return not not cursor.fetchall()

    def break_lock(self):
        cursor = self.connection.cursor()
        cursor.execute("delete from locks"
                       "  where lock_file = ?",
                       (self.lock_file,))
        self.connection.commit()
_vendor/lockfile/__pycache__/symlinklockfile.cpython-36.pyc000064400000004056151733136260020041 0ustar003

�Pf8
�@sLddlmZddlZddlZddlmZmZmZmZm	Z	Gdd�de�Z
dS)�)�absolute_importN�)�LockBase�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@sDeZdZdZddd�Zddd�Zdd	�Zd
d�Zdd
�Zdd�Z	dS)�SymlinkLockFilez'Lock access to a file using symlink(2).TNcCs(tj||||�tjj|j�d|_dS)Nr)r�__init__�os�path�split�unique_name)�selfrZthreaded�timeout�r�%/usr/lib/python3.6/symlinklockfile.pyr

szSymlinkLockFile.__init__cCs�|dk	r|n|j}tj�}|dk	r2|dkr2||7}x�ytj|j|j�Wnttk
r�|j�rddS|dk	r�tj�|kr�|dkr�td|j	��nt
d|j	��tj|dk	r�|dnd�Yq4XdSq4WdS)Nrz&Timeout waiting to acquire lock for %sz%s is already locked�
g�������?)r�timer�symlinkr�	lock_file�OSError�i_am_lockingrrrZsleep)rrZend_timerrr�acquires$
 zSymlinkLockFile.acquirecCs>|j�std|j��n|j�s.td|j��tj|j�dS)Nz%s is not lockedz%s is locked, but not by me)�	is_lockedrrrrr�unlinkr)rrrr�release6s
zSymlinkLockFile.releasecCstjj|j�S)N)rr�islinkr)rrrrr=szSymlinkLockFile.is_lockedcCs tjj|j�otj|j�|jkS)N)rrrr�readlinkr)rrrrr@szSymlinkLockFile.i_am_lockingcCstjj|j�rtj|j�dS)N)rrrrr)rrrr�
break_lockDszSymlinkLockFile.break_lock)TN)N)
�__name__�
__module__�__qualname__�__doc__r
rrrrrrrrrr	
s

#r	)Z
__future__rrr�rrrrrr	rrrr�<module>s_vendor/lockfile/__pycache__/sqlitelockfile.cpython-36.pyc000064400000007126151733136260017655 0ustar003

�Pf��@srddlmZmZddlZddlZyeWnek
r@eZYnXddlm	Z	m
Z
mZmZm
Z
Gdd�de	�ZdS)�)�absolute_import�divisionN�)�LockBase�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@sPeZdZdZdZddd�Zddd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�ZdS)�SQLiteLockFilezDemonstrate SQL-based locking.NTc
Cs�tj||||�t|j�|_t|j�|_tjdkrdddl}|j�\}}t	j
|�t	j|�~~|t_ddl}|j
tj�|_|jj�}y|jd�Wn|jk
r�Yn$X|jj�ddl}	|	jt	jtj�dS)zu
        >>> lock = SQLiteLockFile('somefile')
        >>> lock = SQLiteLockFile('somefile', threaded=False)
        NrzGcreate table locks(   lock_file varchar(32),   unique_name varchar(32)))r�__init__�unicode�	lock_file�unique_namer
�testdb�tempfileZmkstemp�os�close�unlink�sqlite3Zconnect�
connection�cursor�executeZOperationalError�commit�atexit�register)
�self�pathZthreaded�timeoutrZ_fdrr�cr�r�$/usr/lib/python3.6/sqlitelockfile.pyrs(




zSQLiteLockFile.__init__cCsH|dk	r|n|j}tj�}|dk	r2|dkr2||7}|dkr@d}n|dkrNd}n|d}|jj�}x�|j�s�|jd|j|jf�|jj�|jd|jf�|j	�}t
|�dkr�|jd|jf�|jj�q�dSn(|jd|jf�|j	�}t
|�dkr�dS|dk	�r6tj�|k�r6|dk�r(td|j��nt
d	|j��tj|�qbWdS)
Nrg�������?�
z;insert into locks  (lock_file, unique_name)  values  (?, ?)z*select * from locks  where unique_name = ?rz(delete from locks  where unique_name = ?z&Timeout waiting to acquire lock for %sz%s is already locked)r�timerr�	is_lockedrr
rr�fetchall�lenrrr	Zsleep)rrZend_time�waitr�rowsrrr �acquire5sD





zSQLiteLockFile.acquirecCs\|j�std|j��|j�s4td|j|j�f��|jj�}|j	d|jf�|jj
�dS)Nz%s is not lockedz#%s is locked, but not by me (by %s)z(delete from locks  where unique_name = ?)r#rr�i_am_lockingrr�_who_is_lockingrrrr)rrrrr �releasets

zSQLiteLockFile.releasecCs&|jj�}|jd|jf�|j�dS)Nz2select unique_name from locks  where lock_file = ?r)rrrr
Zfetchone)rrrrr r*�s

zSQLiteLockFile._who_is_lockingcCs*|jj�}|jd|jf�|j�}|S)Nz(select * from locks  where lock_file = ?)rrrr
r$)rrr'rrr r#�s


zSQLiteLockFile.is_lockedcCs*|jj�}|jd|j|jf�|j�S)Nz?select * from locks  where lock_file = ?    and unique_name = ?)rrrr
rr$)rrrrr r)�s
zSQLiteLockFile.i_am_lockingcCs(|jj�}|jd|jf�|jj�dS)Nz&delete from locks  where lock_file = ?)rrrr
r)rrrrr �
break_lock�s

zSQLiteLockFile.break_lock)TN)N)�__name__�
__module__�__qualname__�__doc__rrr(r+r*r#r)r,rrrr r
s
"
?r
)Z
__future__rrr"rr�	NameError�str�rrrrr	r
rrrr �<module>s
_vendor/lockfile/__pycache__/mkdirlockfile.cpython-36.pyc000064400000005013151733136260017453 0ustar003

�Pf�@sdddlmZmZddlZddlZddlZddlZddlmZm	Z	m
Z
mZmZm
Z
Gdd�de�ZdS)�)�absolute_import�divisionN�)�LockBase�
LockFailed�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@sDeZdZdZddd�Zddd�Zdd	�Zd
d�Zdd
�Zdd�Z	dS)�
MkdirLockFilez"Lock file by creating a directory.TNcCs6tj||||�tjj|jd|j|j|jf�|_	dS)zs
        >>> lock = MkdirLockFile('somefile')
        >>> lock = MkdirLockFile('somefile', threaded=False)
        z%s.%s%sN)
r�__init__�os�path�join�	lock_fileZhostnameZtname�pid�unique_name)�selfrZthreaded�timeout�r�#/usr/lib/python3.6/mkdirlockfile.pyrs

zMkdirLockFile.__init__cCs|dk	r|n|j}tj�}|dk	r2|dkr2||7}|dkr@d}ntd|d�}x�ytj|j�Wn�tk
r�tj�d}|j	t	j
kr�tjj|j
�r�dS|dk	r�tj�|kr�|dkr�td|j��ntd|j��tj|�ntd|j��YqPXt|j
d�j�dSqPWdS)	Nrg�������?�
rz&Timeout waiting to acquire lock for %sz%s is already lockedzfailed to create %s�wb)r�time�maxr
�mkdirr�OSError�sys�exc_info�errnoZEEXISTr�existsrr	r
Zsleepr�open�close)rrZend_time�wait�errrrr�acquires2
zMkdirLockFile.acquirecCsP|j�std|j��ntjj|j�s4td|j��tj|j�tj|j	�dS)Nz%s is not lockedz%s is locked, but not by me)
�	is_lockedrrr
r rr�unlink�rmdirr)rrrr�releaseAszMkdirLockFile.releasecCstjj|j�S)N)r
rr r)rrrrr&IszMkdirLockFile.is_lockedcCs|j�otjj|j�S)N)r&r
rr r)rrrr�i_am_lockingLszMkdirLockFile.i_am_lockingcCsJtjj|j�rFx*tj|j�D]}tjtjj|j|��qWtj|j�dS)N)r
rr r�listdirr'rr()r�namerrr�
break_lockPszMkdirLockFile.break_lock)TN)N)
�__name__�
__module__�__qualname__�__doc__rr%r)r&r*r-rrrrrs

&r)Z
__future__rrrr
rr�rrrrr	r
rrrrr�<module>s _vendor/lockfile/__pycache__/sqlitelockfile.cpython-36.opt-1.pyc000064400000007126151733136260020614 0ustar003

�Pf��@srddlmZmZddlZddlZyeWnek
r@eZYnXddlm	Z	m
Z
mZmZm
Z
Gdd�de	�ZdS)�)�absolute_import�divisionN�)�LockBase�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@sPeZdZdZdZddd�Zddd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�ZdS)�SQLiteLockFilezDemonstrate SQL-based locking.NTc
Cs�tj||||�t|j�|_t|j�|_tjdkrdddl}|j�\}}t	j
|�t	j|�~~|t_ddl}|j
tj�|_|jj�}y|jd�Wn|jk
r�Yn$X|jj�ddl}	|	jt	jtj�dS)zu
        >>> lock = SQLiteLockFile('somefile')
        >>> lock = SQLiteLockFile('somefile', threaded=False)
        NrzGcreate table locks(   lock_file varchar(32),   unique_name varchar(32)))r�__init__�unicode�	lock_file�unique_namer
�testdb�tempfileZmkstemp�os�close�unlink�sqlite3Zconnect�
connection�cursor�executeZOperationalError�commit�atexit�register)
�self�pathZthreaded�timeoutrZ_fdrr�cr�r�$/usr/lib/python3.6/sqlitelockfile.pyrs(




zSQLiteLockFile.__init__cCsH|dk	r|n|j}tj�}|dk	r2|dkr2||7}|dkr@d}n|dkrNd}n|d}|jj�}x�|j�s�|jd|j|jf�|jj�|jd|jf�|j	�}t
|�dkr�|jd|jf�|jj�q�dSn(|jd|jf�|j	�}t
|�dkr�dS|dk	�r6tj�|k�r6|dk�r(td|j��nt
d	|j��tj|�qbWdS)
Nrg�������?�
z;insert into locks  (lock_file, unique_name)  values  (?, ?)z*select * from locks  where unique_name = ?rz(delete from locks  where unique_name = ?z&Timeout waiting to acquire lock for %sz%s is already locked)r�timerr�	is_lockedrr
rr�fetchall�lenrrr	Zsleep)rrZend_time�waitr�rowsrrr �acquire5sD





zSQLiteLockFile.acquirecCs\|j�std|j��|j�s4td|j|j�f��|jj�}|j	d|jf�|jj
�dS)Nz%s is not lockedz#%s is locked, but not by me (by %s)z(delete from locks  where unique_name = ?)r#rr�i_am_lockingrr�_who_is_lockingrrrr)rrrrr �releasets

zSQLiteLockFile.releasecCs&|jj�}|jd|jf�|j�dS)Nz2select unique_name from locks  where lock_file = ?r)rrrr
Zfetchone)rrrrr r*�s

zSQLiteLockFile._who_is_lockingcCs*|jj�}|jd|jf�|j�}|S)Nz(select * from locks  where lock_file = ?)rrrr
r$)rrr'rrr r#�s


zSQLiteLockFile.is_lockedcCs*|jj�}|jd|j|jf�|j�S)Nz?select * from locks  where lock_file = ?    and unique_name = ?)rrrr
rr$)rrrrr r)�s
zSQLiteLockFile.i_am_lockingcCs(|jj�}|jd|jf�|jj�dS)Nz&delete from locks  where lock_file = ?)rrrr
r)rrrrr �
break_lock�s

zSQLiteLockFile.break_lock)TN)N)�__name__�
__module__�__qualname__�__doc__rrr(r+r*r#r)r,rrrr r
s
"
?r
)Z
__future__rrr"rr�	NameError�str�rrrrr	r
rrrr �<module>s
_vendor/lockfile/__pycache__/pidlockfile.cpython-36.pyc000064400000011243151733136260017123 0ustar003

�Pf��@stdZddlmZddlZddlZddlZddlmZmZm	Z	m
Z
mZmZGdd�de�Z
dd	�Zd
d�Zdd
�ZdS)z8 Lockfile behaviour implemented via Unix PID files.
    �)�absolute_importN�)�LockBase�
AlreadyLocked�
LockFailed�	NotLocked�	NotMyLock�LockTimeoutc@sLeZdZdZddd�Zdd�Zdd	�Zd
d�Zddd
�Zdd�Z	dd�Z
dS)�PIDLockFileaA Lockfile implemented as a Unix PID file.

    The lock file is a normal file named by the attribute `path`.
    A lock's PID file contains a single line of text, containing
    the process ID (PID) of the process that acquired the lock.

    >>> lock = PIDLockFile('somefile')
    >>> lock = PIDLockFile('somefile')
    FNcCstj||d|�|j|_dS)NF)r�__init__�pathZunique_name)�selfrZthreaded�timeout�r�!/usr/lib/python3.6/pidlockfile.pyr$szPIDLockFile.__init__cCs
t|j�S)z- Get the PID from the lock file.
            )�read_pid_from_pidfiler)r
rrr�read_pid*szPIDLockFile.read_pidcCstjj|j�S)zv Test if the lock is currently held.

            The lock is held if the PID file for this lock exists.

            )�osr�exists)r
rrr�	is_locked/szPIDLockFile.is_lockedcCs|j�otj�|j�kS)z� Test if the lock is held by the current process.

        Returns ``True`` if the current process ID matches the
        number stored in the PID file.
        )rr�getpidr)r
rrr�i_am_locking7szPIDLockFile.i_am_lockingcCs�|dk	r|n|j}tj�}|dk	r2|dkr2||7}x�yt|j�Wn�tk
r�}zv|jtjkr�tj�|kr�|dk	r�|dkr�td|j��ntd|j��tj	|dk	r�|dp�d�nt
d|j��WYdd}~Xq4XdSq4WdS)z� Acquire the lock.

        Creates the PID file for this lock, or raises an error if
        the lock could not be acquired.
        Nrz&Timeout waiting to acquire lock for %sz%s is already locked�
g�������?zfailed to create %s)r�time�write_pid_to_pidfiler�OSError�errnoZEEXISTr	rZsleepr)r
rZend_time�excrrr�acquire?s$
 zPIDLockFile.acquirecCs:|j�std|j��|j�s,td|j��t|j�dS)z� Release the lock.

            Removes the PID file to release the lock, or raises an
            error if the current process does not hold the lock.

            z%s is not lockedz%s is locked, but not by meN)rrrrr�remove_existing_pidfile)r
rrr�release_s
zPIDLockFile.releasecCst|j�dS)z� Break an existing lock.

            Removes the PID file if it already exists, otherwise does
            nothing.

            N)rr)r
rrr�
break_locklszPIDLockFile.break_lock)FN)N)�__name__�
__module__�__qualname__�__doc__rrrrrr r!rrrrr
s	

 
r
cCsbd}yt|d�}Wntk
r&Yn8X|j�j�}yt|�}Wntk
rTYnX|j�|S)z� Read the PID recorded in the named PID file.

        Read and return the numeric PID recorded as text in the named
        PID file. If the PID file cannot be read, or if the content is
        not a valid PID, return ``None``.

        N�r)�open�IOError�readline�strip�int�
ValueError�close)�pidfile_path�pid�pidfile�linerrrrvsrcCsRtjtjBtjB}d}tj|||�}tj|d�}tj�}|jd|�|j�dS)u� Write the PID in the named PID file.

        Get the numeric process ID (“PID”) of the current process
        and write it to the named file as a line of text.

        i��wz%s
N)	r�O_CREAT�O_EXCL�O_WRONLYr'�fdopenr�writer-)r.Z
open_flagsZ	open_modeZ
pidfile_fdr0r/rrrr�s	rcCsFytj|�Wn2tk
r@}z|jtjkr.n�WYdd}~XnXdS)z� Remove the named PID file if it exists.

        Removing a PID file that doesn't already exist puts us in the
        desired state, so we ignore the condition if the file does not
        exist.

        N)r�removerr�ENOENT)r.rrrrr�sr)r%Z
__future__rrrr�rrrrrr	r
rrrrrrr�<module>
s ]"_vendor/lockfile/__pycache__/__init__.cpython-36.pyc000064400000023146151733136260016402 0ustar003

�Pf�$�
@s�dZddlmZddlZddlZddlZddlZddlZeed�sJej	e_
eejd�sbejjej_
dddd	d
ddd
dddddg
ZGdd�de�ZGdd�de�ZGdd�de�ZGdd	�d	e�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�Zdd�Zd d�Zd!d�Zd"d�Zd(d#d�Zeed$��rjd%d&l m!Z"e"j#Z$nd%d'l m%Z&e&j'Z$e$Z(dS))a
lockfile.py - Platform-independent advisory file locks.

Requires Python 2.5 unless you apply 2.4.diff
Locking is done on a per-thread basis instead of a per-process basis.

Usage:

>>> lock = LockFile('somefile')
>>> try:
...     lock.acquire()
... except AlreadyLocked:
...     print 'somefile', 'is locked already.'
... except LockFailed:
...     print 'somefile', 'can\'t be locked.'
... else:
...     print 'got lock'
got lock
>>> print lock.is_locked()
True
>>> lock.release()

>>> lock = LockFile('somefile')
>>> print lock.is_locked()
False
>>> with lock:
...    print lock.is_locked()
True
>>> print lock.is_locked()
False

>>> lock = LockFile('somefile')
>>> # It is okay to lock twice from the same thread...
>>> with lock:
...     lock.acquire()
...
>>> # Though no counter is kept, so you can't unlock multiple times...
>>> print lock.is_locked()
False

Exceptions:

    Error - base class for other exceptions
        LockError - base class for all locking exceptions
            AlreadyLocked - Another thread or process already holds the lock
            LockFailed - Lock failed for some other reason
        UnlockError - base class for all unlocking exceptions
            AlreadyUnlocked - File was not locked.
            NotMyLock - File was locked but not by the current thread/process
�)�absolute_importN�current_thread�get_name�Error�	LockError�LockTimeout�
AlreadyLocked�
LockFailed�UnlockError�	NotLocked�	NotMyLock�LinkFileLock�
MkdirFileLock�SQLiteFileLock�LockBase�lockedc@seZdZdZdS)rzw
    Base class for other exceptions.

    >>> try:
    ...   raise Error
    ... except Exception:
    ...   pass
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/__init__.pyrJsc@seZdZdZdS)rz�
    Base class for error arising from attempts to acquire the lock.

    >>> try:
    ...   raise LockError
    ... except Error:
    ...   pass
    N)rrrrrrrrrVsc@seZdZdZdS)rz�Raised when lock creation fails within a user-defined period of time.

    >>> try:
    ...   raise LockTimeout
    ... except LockError:
    ...   pass
    N)rrrrrrrrrbsc@seZdZdZdS)rz�Some other thread/process is locking the file.

    >>> try:
    ...   raise AlreadyLocked
    ... except LockError:
    ...   pass
    N)rrrrrrrrrmsc@seZdZdZdS)r	z�Lock file creation failed for some other reason.

    >>> try:
    ...   raise LockFailed
    ... except LockError:
    ...   pass
    N)rrrrrrrrr	xsc@seZdZdZdS)r
z�
    Base class for errors arising from attempts to release the lock.

    >>> try:
    ...   raise UnlockError
    ... except Error:
    ...   pass
    N)rrrrrrrrr
�sc@seZdZdZdS)rz�Raised when an attempt is made to unlock an unlocked file.

    >>> try:
    ...   raise NotLocked
    ... except UnlockError:
    ...   pass
    N)rrrrrrrrr�sc@seZdZdZdS)rz�Raised when an attempt is made to unlock a file someone else locked.

    >>> try:
    ...   raise NotMyLock
    ... except UnlockError:
    ...   pass
    N)rrrrrrrrr�sc@s>eZdZdd�Zddd�Zdd�Zdd	�Zd
d�Zdd
�ZdS)�_SharedBasecCs
||_dS)N)�path)�selfrrrr�__init__�sz_SharedBase.__init__NcCstd��dS)a�
        Acquire the lock.

        * If timeout is omitted (or None), wait forever trying to lock the
          file.

        * If timeout > 0, try to acquire the lock for that many seconds.  If
          the lock period expires and the file is still locked, raise
          LockTimeout.

        * If timeout <= 0, raise AlreadyLocked immediately if the file is
          already locked.
        zimplement in subclassN)�NotImplemented)r�timeoutrrr�acquire�sz_SharedBase.acquirecCstd��dS)zX
        Release the lock.

        If the file is not locked, raise NotLocked.
        zimplement in subclassN)r)rrrr�release�sz_SharedBase.releasecCs|j�|S)z*
        Context manager support.
        )r)rrrr�	__enter__�sz_SharedBase.__enter__cGs|j�dS)z*
        Context manager support.
        N)r)rZ_excrrr�__exit__�sz_SharedBase.__exit__cCsd|jj|jfS)Nz<%s: %r>)�	__class__rr)rrrr�__repr__�sz_SharedBase.__repr__)N)	rrrrrrr r!r#rrrrr�s
rcsBeZdZdZd�fdd�	Zdd�Zdd	�Zd
d�Zdd
�Z�Z	S)rz.Base class for platform-specific lock classes.TNcs�tt|�j|�tjj|�d|_tj�|_	tj
�|_|rbtj
�}t|dt|��}d|d@|_nd|_tjj|j�}tjj|d|j	|j|jt|j�f�|_||_dS)zi
        >>> lock = LockBase('somefile')
        >>> lock = LockBase('somefile', threaded=False)
        z.lock�identz-%xl���z	%s%s.%s%sN)�superrr�osr�abspathZ	lock_file�socketZgethostnameZhostname�getpid�pid�	threadingr�getattr�hashZtname�dirname�join�unique_namer)rr�threadedr�tr$r/)r"rrr�s 

	zLockBase.__init__cCstd��dS)z9
        Tell whether or not the file is locked.
        zimplement in subclassN)r)rrrr�	is_locked�szLockBase.is_lockedcCstd��dS)zA
        Return True if this object is locking the file.
        zimplement in subclassN)r)rrrr�i_am_locking�szLockBase.i_am_lockingcCstd��dS)zN
        Remove a lock.  Useful if a locking thread failed to unlock.
        zimplement in subclassN)r)rrrr�
break_lockszLockBase.break_lockcCsd|jj|j|jfS)Nz<%s: %r -- %r>)r"rr1r)rrrrr#szLockBase.__repr__)TN)
rrrrrr4r5r6r#�
__classcell__rr)r"rr�s!cOsRtjd|tdd�t|dt�s.|dd�}t|�dkrH|rHd|d<|||�S)Nz1Import from %s module instead of lockfile package�)�
stacklevelr�Tr2)�warnings�warn�DeprecationWarning�
isinstance�str�len)�cls�mod�args�kwdsrrr�
_fl_helpers

rEcOs ddlm}t|jdf|�|�S)z�Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import LinkLockFile from the
    lockfile.linklockfile module.
    r:)�linklockfilezlockfile.linklockfile)r%rFrE�LinkLockFile)rCrDrFrrrr
s
cOs ddlm}t|jdf|�|�S)z�Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import MkdirLockFile from the
    lockfile.mkdirlockfile module.
    r:)�
mkdirlockfilezlockfile.mkdirlockfile)r%rHrE�
MkdirLockFile)rCrDrHrrrr%s
cOs ddlm}t|jdf|�|�S)z�Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import SQLiteLockFile from the
    lockfile.mkdirlockfile module.
    r:)�sqlitelockfilezlockfile.sqlitelockfile)r%rJrEZSQLiteLockFile)rCrDrJrrrr0s
cs��fdd�}|S)aDecorator which enables locks for decorated function.

    Arguments:
     - path: path for lockfile.
     - timeout (optional): Timeout for acquiring lock.

     Usage:
         @locked('/var/run/myname', timeout=0)
         def myname(...):
             ...
    cstj�����fdd��}|S)Nc
s.t��d�}|j�z
�||�S|j�XdS)N)r)�FileLockrr)rC�kwargs�lock)�funcrrrr�wrapperHs

z&locked.<locals>.decor.<locals>.wrapper)�	functools�wraps)rNrO)rr)rNr�decorGszlocked.<locals>.decorr)rrrRr)rrrr;s
�linkr:)rF)rH)N))rZ
__future__rrPr'r)r,r;�hasattrZ
currentThreadrZThreadZgetNamer�__all__�	Exceptionrrrrr	r
rr�objectrrrEr
rrrr%rFZ_llfrGZLockFilerHZ_mlfrIrKrrrr�<module>4sF
-:
_vendor/lockfile/__pycache__/symlinklockfile.cpython-36.opt-1.pyc000064400000004056151733136270021001 0ustar003

�Pf8
�@sLddlmZddlZddlZddlmZmZmZmZm	Z	Gdd�de�Z
dS)�)�absolute_importN�)�LockBase�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@sDeZdZdZddd�Zddd�Zdd	�Zd
d�Zdd
�Zdd�Z	dS)�SymlinkLockFilez'Lock access to a file using symlink(2).TNcCs(tj||||�tjj|j�d|_dS)Nr)r�__init__�os�path�split�unique_name)�selfrZthreaded�timeout�r�%/usr/lib/python3.6/symlinklockfile.pyr

szSymlinkLockFile.__init__cCs�|dk	r|n|j}tj�}|dk	r2|dkr2||7}x�ytj|j|j�Wnttk
r�|j�rddS|dk	r�tj�|kr�|dkr�td|j	��nt
d|j	��tj|dk	r�|dnd�Yq4XdSq4WdS)Nrz&Timeout waiting to acquire lock for %sz%s is already locked�
g�������?)r�timer�symlinkr�	lock_file�OSError�i_am_lockingrrrZsleep)rrZend_timerrr�acquires$
 zSymlinkLockFile.acquirecCs>|j�std|j��n|j�s.td|j��tj|j�dS)Nz%s is not lockedz%s is locked, but not by me)�	is_lockedrrrrr�unlinkr)rrrr�release6s
zSymlinkLockFile.releasecCstjj|j�S)N)rr�islinkr)rrrrr=szSymlinkLockFile.is_lockedcCs tjj|j�otj|j�|jkS)N)rrrr�readlinkr)rrrrr@szSymlinkLockFile.i_am_lockingcCstjj|j�rtj|j�dS)N)rrrrr)rrrr�
break_lockDszSymlinkLockFile.break_lock)TN)N)
�__name__�
__module__�__qualname__�__doc__r
rrrrrrrrrr	
s

#r	)Z
__future__rrr�rrrrrr	rrrr�<module>s_vendor/lockfile/__pycache__/pidlockfile.cpython-36.opt-1.pyc000064400000011243151733136270020063 0ustar003

�Pf��@stdZddlmZddlZddlZddlZddlmZmZm	Z	m
Z
mZmZGdd�de�Z
dd	�Zd
d�Zdd
�ZdS)z8 Lockfile behaviour implemented via Unix PID files.
    �)�absolute_importN�)�LockBase�
AlreadyLocked�
LockFailed�	NotLocked�	NotMyLock�LockTimeoutc@sLeZdZdZddd�Zdd�Zdd	�Zd
d�Zddd
�Zdd�Z	dd�Z
dS)�PIDLockFileaA Lockfile implemented as a Unix PID file.

    The lock file is a normal file named by the attribute `path`.
    A lock's PID file contains a single line of text, containing
    the process ID (PID) of the process that acquired the lock.

    >>> lock = PIDLockFile('somefile')
    >>> lock = PIDLockFile('somefile')
    FNcCstj||d|�|j|_dS)NF)r�__init__�pathZunique_name)�selfrZthreaded�timeout�r�!/usr/lib/python3.6/pidlockfile.pyr$szPIDLockFile.__init__cCs
t|j�S)z- Get the PID from the lock file.
            )�read_pid_from_pidfiler)r
rrr�read_pid*szPIDLockFile.read_pidcCstjj|j�S)zv Test if the lock is currently held.

            The lock is held if the PID file for this lock exists.

            )�osr�exists)r
rrr�	is_locked/szPIDLockFile.is_lockedcCs|j�otj�|j�kS)z� Test if the lock is held by the current process.

        Returns ``True`` if the current process ID matches the
        number stored in the PID file.
        )rr�getpidr)r
rrr�i_am_locking7szPIDLockFile.i_am_lockingcCs�|dk	r|n|j}tj�}|dk	r2|dkr2||7}x�yt|j�Wn�tk
r�}zv|jtjkr�tj�|kr�|dk	r�|dkr�td|j��ntd|j��tj	|dk	r�|dp�d�nt
d|j��WYdd}~Xq4XdSq4WdS)z� Acquire the lock.

        Creates the PID file for this lock, or raises an error if
        the lock could not be acquired.
        Nrz&Timeout waiting to acquire lock for %sz%s is already locked�
g�������?zfailed to create %s)r�time�write_pid_to_pidfiler�OSError�errnoZEEXISTr	rZsleepr)r
rZend_time�excrrr�acquire?s$
 zPIDLockFile.acquirecCs:|j�std|j��|j�s,td|j��t|j�dS)z� Release the lock.

            Removes the PID file to release the lock, or raises an
            error if the current process does not hold the lock.

            z%s is not lockedz%s is locked, but not by meN)rrrrr�remove_existing_pidfile)r
rrr�release_s
zPIDLockFile.releasecCst|j�dS)z� Break an existing lock.

            Removes the PID file if it already exists, otherwise does
            nothing.

            N)rr)r
rrr�
break_locklszPIDLockFile.break_lock)FN)N)�__name__�
__module__�__qualname__�__doc__rrrrrr r!rrrrr
s	

 
r
cCsbd}yt|d�}Wntk
r&Yn8X|j�j�}yt|�}Wntk
rTYnX|j�|S)z� Read the PID recorded in the named PID file.

        Read and return the numeric PID recorded as text in the named
        PID file. If the PID file cannot be read, or if the content is
        not a valid PID, return ``None``.

        N�r)�open�IOError�readline�strip�int�
ValueError�close)�pidfile_path�pid�pidfile�linerrrrvsrcCsRtjtjBtjB}d}tj|||�}tj|d�}tj�}|jd|�|j�dS)u� Write the PID in the named PID file.

        Get the numeric process ID (“PID”) of the current process
        and write it to the named file as a line of text.

        i��wz%s
N)	r�O_CREAT�O_EXCL�O_WRONLYr'�fdopenr�writer-)r.Z
open_flagsZ	open_modeZ
pidfile_fdr0r/rrrr�s	rcCsFytj|�Wn2tk
r@}z|jtjkr.n�WYdd}~XnXdS)z� Remove the named PID file if it exists.

        Removing a PID file that doesn't already exist puts us in the
        desired state, so we ignore the condition if the file does not
        exist.

        N)r�removerr�ENOENT)r.rrrrr�sr)r%Z
__future__rrrr�rrrrrr	r
rrrrrrr�<module>
s ]"_vendor/lockfile/__pycache__/__init__.cpython-36.opt-1.pyc000064400000023146151733136270017342 0ustar003

�Pf�$�
@s�dZddlmZddlZddlZddlZddlZddlZeed�sJej	e_
eejd�sbejjej_
dddd	d
ddd
dddddg
ZGdd�de�ZGdd�de�ZGdd�de�ZGdd	�d	e�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�Zdd�Zd d�Zd!d�Zd"d�Zd(d#d�Zeed$��rjd%d&l m!Z"e"j#Z$nd%d'l m%Z&e&j'Z$e$Z(dS))a
lockfile.py - Platform-independent advisory file locks.

Requires Python 2.5 unless you apply 2.4.diff
Locking is done on a per-thread basis instead of a per-process basis.

Usage:

>>> lock = LockFile('somefile')
>>> try:
...     lock.acquire()
... except AlreadyLocked:
...     print 'somefile', 'is locked already.'
... except LockFailed:
...     print 'somefile', 'can\'t be locked.'
... else:
...     print 'got lock'
got lock
>>> print lock.is_locked()
True
>>> lock.release()

>>> lock = LockFile('somefile')
>>> print lock.is_locked()
False
>>> with lock:
...    print lock.is_locked()
True
>>> print lock.is_locked()
False

>>> lock = LockFile('somefile')
>>> # It is okay to lock twice from the same thread...
>>> with lock:
...     lock.acquire()
...
>>> # Though no counter is kept, so you can't unlock multiple times...
>>> print lock.is_locked()
False

Exceptions:

    Error - base class for other exceptions
        LockError - base class for all locking exceptions
            AlreadyLocked - Another thread or process already holds the lock
            LockFailed - Lock failed for some other reason
        UnlockError - base class for all unlocking exceptions
            AlreadyUnlocked - File was not locked.
            NotMyLock - File was locked but not by the current thread/process
�)�absolute_importN�current_thread�get_name�Error�	LockError�LockTimeout�
AlreadyLocked�
LockFailed�UnlockError�	NotLocked�	NotMyLock�LinkFileLock�
MkdirFileLock�SQLiteFileLock�LockBase�lockedc@seZdZdZdS)rzw
    Base class for other exceptions.

    >>> try:
    ...   raise Error
    ... except Exception:
    ...   pass
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/__init__.pyrJsc@seZdZdZdS)rz�
    Base class for error arising from attempts to acquire the lock.

    >>> try:
    ...   raise LockError
    ... except Error:
    ...   pass
    N)rrrrrrrrrVsc@seZdZdZdS)rz�Raised when lock creation fails within a user-defined period of time.

    >>> try:
    ...   raise LockTimeout
    ... except LockError:
    ...   pass
    N)rrrrrrrrrbsc@seZdZdZdS)rz�Some other thread/process is locking the file.

    >>> try:
    ...   raise AlreadyLocked
    ... except LockError:
    ...   pass
    N)rrrrrrrrrmsc@seZdZdZdS)r	z�Lock file creation failed for some other reason.

    >>> try:
    ...   raise LockFailed
    ... except LockError:
    ...   pass
    N)rrrrrrrrr	xsc@seZdZdZdS)r
z�
    Base class for errors arising from attempts to release the lock.

    >>> try:
    ...   raise UnlockError
    ... except Error:
    ...   pass
    N)rrrrrrrrr
�sc@seZdZdZdS)rz�Raised when an attempt is made to unlock an unlocked file.

    >>> try:
    ...   raise NotLocked
    ... except UnlockError:
    ...   pass
    N)rrrrrrrrr�sc@seZdZdZdS)rz�Raised when an attempt is made to unlock a file someone else locked.

    >>> try:
    ...   raise NotMyLock
    ... except UnlockError:
    ...   pass
    N)rrrrrrrrr�sc@s>eZdZdd�Zddd�Zdd�Zdd	�Zd
d�Zdd
�ZdS)�_SharedBasecCs
||_dS)N)�path)�selfrrrr�__init__�sz_SharedBase.__init__NcCstd��dS)a�
        Acquire the lock.

        * If timeout is omitted (or None), wait forever trying to lock the
          file.

        * If timeout > 0, try to acquire the lock for that many seconds.  If
          the lock period expires and the file is still locked, raise
          LockTimeout.

        * If timeout <= 0, raise AlreadyLocked immediately if the file is
          already locked.
        zimplement in subclassN)�NotImplemented)r�timeoutrrr�acquire�sz_SharedBase.acquirecCstd��dS)zX
        Release the lock.

        If the file is not locked, raise NotLocked.
        zimplement in subclassN)r)rrrr�release�sz_SharedBase.releasecCs|j�|S)z*
        Context manager support.
        )r)rrrr�	__enter__�sz_SharedBase.__enter__cGs|j�dS)z*
        Context manager support.
        N)r)rZ_excrrr�__exit__�sz_SharedBase.__exit__cCsd|jj|jfS)Nz<%s: %r>)�	__class__rr)rrrr�__repr__�sz_SharedBase.__repr__)N)	rrrrrrr r!r#rrrrr�s
rcsBeZdZdZd�fdd�	Zdd�Zdd	�Zd
d�Zdd
�Z�Z	S)rz.Base class for platform-specific lock classes.TNcs�tt|�j|�tjj|�d|_tj�|_	tj
�|_|rbtj
�}t|dt|��}d|d@|_nd|_tjj|j�}tjj|d|j	|j|jt|j�f�|_||_dS)zi
        >>> lock = LockBase('somefile')
        >>> lock = LockBase('somefile', threaded=False)
        z.lock�identz-%xl���z	%s%s.%s%sN)�superrr�osr�abspathZ	lock_file�socketZgethostnameZhostname�getpid�pid�	threadingr�getattr�hashZtname�dirname�join�unique_namer)rr�threadedr�tr$r/)r"rrr�s 

	zLockBase.__init__cCstd��dS)z9
        Tell whether or not the file is locked.
        zimplement in subclassN)r)rrrr�	is_locked�szLockBase.is_lockedcCstd��dS)zA
        Return True if this object is locking the file.
        zimplement in subclassN)r)rrrr�i_am_locking�szLockBase.i_am_lockingcCstd��dS)zN
        Remove a lock.  Useful if a locking thread failed to unlock.
        zimplement in subclassN)r)rrrr�
break_lockszLockBase.break_lockcCsd|jj|j|jfS)Nz<%s: %r -- %r>)r"rr1r)rrrrr#szLockBase.__repr__)TN)
rrrrrr4r5r6r#�
__classcell__rr)r"rr�s!cOsRtjd|tdd�t|dt�s.|dd�}t|�dkrH|rHd|d<|||�S)Nz1Import from %s module instead of lockfile package�)�
stacklevelr�Tr2)�warnings�warn�DeprecationWarning�
isinstance�str�len)�cls�mod�args�kwdsrrr�
_fl_helpers

rEcOs ddlm}t|jdf|�|�S)z�Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import LinkLockFile from the
    lockfile.linklockfile module.
    r:)�linklockfilezlockfile.linklockfile)r%rFrE�LinkLockFile)rCrDrFrrrr
s
cOs ddlm}t|jdf|�|�S)z�Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import MkdirLockFile from the
    lockfile.mkdirlockfile module.
    r:)�
mkdirlockfilezlockfile.mkdirlockfile)r%rHrE�
MkdirLockFile)rCrDrHrrrr%s
cOs ddlm}t|jdf|�|�S)z�Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import SQLiteLockFile from the
    lockfile.mkdirlockfile module.
    r:)�sqlitelockfilezlockfile.sqlitelockfile)r%rJrEZSQLiteLockFile)rCrDrJrrrr0s
cs��fdd�}|S)aDecorator which enables locks for decorated function.

    Arguments:
     - path: path for lockfile.
     - timeout (optional): Timeout for acquiring lock.

     Usage:
         @locked('/var/run/myname', timeout=0)
         def myname(...):
             ...
    cstj�����fdd��}|S)Nc
s.t��d�}|j�z
�||�S|j�XdS)N)r)�FileLockrr)rC�kwargs�lock)�funcrrrr�wrapperHs

z&locked.<locals>.decor.<locals>.wrapper)�	functools�wraps)rNrO)rr)rNr�decorGszlocked.<locals>.decorr)rrrRr)rrrr;s
�linkr:)rF)rH)N))rZ
__future__rrPr'r)r,r;�hasattrZ
currentThreadrZThreadZgetNamer�__all__�	Exceptionrrrrr	r
rr�objectrrrEr
rrrr%rFZ_llfrGZLockFilerHZ_mlfrIrKrrrr�<module>4sF
-:
_vendor/lockfile/__pycache__/linklockfile.cpython-36.pyc000064400000004241151733136270017305 0ustar003

�Pf\
�@sPddlmZddlZddlZddlmZmZmZmZm	Z	m
Z
Gdd�de�ZdS)�)�absolute_importN�)�LockBase�
LockFailed�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@s:eZdZdZd
dd�Zdd�Zdd�Zd	d
�Zdd�ZdS)�LinkLockFilez�Lock access to a file using atomic property of link(2).

    >>> lock = LinkLockFile('somefile')
    >>> lock = LinkLockFile('somefile', threaded=False)
    NcCs"yt|jd�j�Wn"tk
r6td|j��YnX|dk	rD|n|j}tj�}|dk	rj|dkrj||7}x�ytj|j|j	�Wn�t
k
�rtj|j�j}|dkr�dS|dk	r�tj�|kr�tj
|j�|dkr�td|j��ntd|j��tj|dk	�r
|d�pd�YqlXdSqlWdS)	N�wbzfailed to create %sr�z&Timeout waiting to acquire lock for %sz%s is already locked�
g�������?)�open�unique_name�close�IOErrorr�timeout�time�os�link�	lock_file�OSError�stat�st_nlink�unlinkr�pathr	Zsleep)�selfrZend_timeZnlinks�r�"/usr/lib/python3.6/linklockfile.py�acquires0
$zLinkLockFile.acquirecCsP|j�std|j��ntjj|j�s4td|j��tj|j�tj|j�dS)Nz%s is not lockedz%s is locked, but not by me)	�	is_lockedrrr�existsrrrr)rrrr�release7szLinkLockFile.releasecCstjj|j�S)N)rrr!r)rrrrr ?szLinkLockFile.is_lockedcCs(|j�o&tjj|j�o&tj|j�jdkS)Nr)r rrr!rrr)rrrr�i_am_lockingBszLinkLockFile.i_am_lockingcCstjj|j�rtj|j�dS)N)rrr!rr)rrrr�
break_lockGszLinkLockFile.break_lock)N)	�__name__�
__module__�__qualname__�__doc__rr"r r#r$rrrrr

s
&r
)Z
__future__rrr�rrrrrr	r
rrrr�<module>s _vendor/lockfile/__pycache__/mkdirlockfile.cpython-36.opt-1.pyc000064400000005013151733136270020413 0ustar003

�Pf�@sdddlmZmZddlZddlZddlZddlZddlmZm	Z	m
Z
mZmZm
Z
Gdd�de�ZdS)�)�absolute_import�divisionN�)�LockBase�
LockFailed�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@sDeZdZdZddd�Zddd�Zdd	�Zd
d�Zdd
�Zdd�Z	dS)�
MkdirLockFilez"Lock file by creating a directory.TNcCs6tj||||�tjj|jd|j|j|jf�|_	dS)zs
        >>> lock = MkdirLockFile('somefile')
        >>> lock = MkdirLockFile('somefile', threaded=False)
        z%s.%s%sN)
r�__init__�os�path�join�	lock_fileZhostnameZtname�pid�unique_name)�selfrZthreaded�timeout�r�#/usr/lib/python3.6/mkdirlockfile.pyrs

zMkdirLockFile.__init__cCs|dk	r|n|j}tj�}|dk	r2|dkr2||7}|dkr@d}ntd|d�}x�ytj|j�Wn�tk
r�tj�d}|j	t	j
kr�tjj|j
�r�dS|dk	r�tj�|kr�|dkr�td|j��ntd|j��tj|�ntd|j��YqPXt|j
d�j�dSqPWdS)	Nrg�������?�
rz&Timeout waiting to acquire lock for %sz%s is already lockedzfailed to create %s�wb)r�time�maxr
�mkdirr�OSError�sys�exc_info�errnoZEEXISTr�existsrr	r
Zsleepr�open�close)rrZend_time�wait�errrrr�acquires2
zMkdirLockFile.acquirecCsP|j�std|j��ntjj|j�s4td|j��tj|j�tj|j	�dS)Nz%s is not lockedz%s is locked, but not by me)
�	is_lockedrrr
r rr�unlink�rmdirr)rrrr�releaseAszMkdirLockFile.releasecCstjj|j�S)N)r
rr r)rrrrr&IszMkdirLockFile.is_lockedcCs|j�otjj|j�S)N)r&r
rr r)rrrr�i_am_lockingLszMkdirLockFile.i_am_lockingcCsJtjj|j�rFx*tj|j�D]}tjtjj|j|��qWtj|j�dS)N)r
rr r�listdirr'rr()r�namerrr�
break_lockPszMkdirLockFile.break_lock)TN)N)
�__name__�
__module__�__qualname__�__doc__rr%r)r&r*r-rrrrrs

&r)Z
__future__rrrr
rr�rrrrr	r
rrrrr�<module>s _vendor/lockfile/__pycache__/linklockfile.cpython-36.opt-1.pyc000064400000004241151733136270020244 0ustar003

�Pf\
�@sPddlmZddlZddlZddlmZmZmZmZm	Z	m
Z
Gdd�de�ZdS)�)�absolute_importN�)�LockBase�
LockFailed�	NotLocked�	NotMyLock�LockTimeout�
AlreadyLockedc@s:eZdZdZd
dd�Zdd�Zdd�Zd	d
�Zdd�ZdS)�LinkLockFilez�Lock access to a file using atomic property of link(2).

    >>> lock = LinkLockFile('somefile')
    >>> lock = LinkLockFile('somefile', threaded=False)
    NcCs"yt|jd�j�Wn"tk
r6td|j��YnX|dk	rD|n|j}tj�}|dk	rj|dkrj||7}x�ytj|j|j	�Wn�t
k
�rtj|j�j}|dkr�dS|dk	r�tj�|kr�tj
|j�|dkr�td|j��ntd|j��tj|dk	�r
|d�pd�YqlXdSqlWdS)	N�wbzfailed to create %sr�z&Timeout waiting to acquire lock for %sz%s is already locked�
g�������?)�open�unique_name�close�IOErrorr�timeout�time�os�link�	lock_file�OSError�stat�st_nlink�unlinkr�pathr	Zsleep)�selfrZend_timeZnlinks�r�"/usr/lib/python3.6/linklockfile.py�acquires0
$zLinkLockFile.acquirecCsP|j�std|j��ntjj|j�s4td|j��tj|j�tj|j�dS)Nz%s is not lockedz%s is locked, but not by me)	�	is_lockedrrr�existsrrrr)rrrr�release7szLinkLockFile.releasecCstjj|j�S)N)rrr!r)rrrrr ?szLinkLockFile.is_lockedcCs(|j�o&tjj|j�o&tj|j�jdkS)Nr)r rrr!rrr)rrrr�i_am_lockingBszLinkLockFile.i_am_lockingcCstjj|j�rtj|j�dS)N)rrr!rr)rrrr�
break_lockGszLinkLockFile.break_lock)N)	�__name__�
__module__�__qualname__�__doc__rr"r r#r$rrrrr

s
&r
)Z
__future__rrr�rrrrrr	r
rrrr�<module>s _vendor/lockfile/symlinklockfile.py000064400000005070151733136270013553 0ustar00from __future__ import absolute_import

import os
import time

from . import (LockBase, NotLocked, NotMyLock, LockTimeout,
               AlreadyLocked)


class SymlinkLockFile(LockBase):
    """Lock access to a file using symlink(2)."""

    def __init__(self, path, threaded=True, timeout=None):
        # super(SymlinkLockFile).__init(...)
        LockBase.__init__(self, path, threaded, timeout)
        # split it back!
        self.unique_name = os.path.split(self.unique_name)[1]

    def acquire(self, timeout=None):
        # Hopefully unnecessary for symlink.
        # try:
        #     open(self.unique_name, "wb").close()
        # except IOError:
        #     raise LockFailed("failed to create %s" % self.unique_name)
        timeout = timeout if timeout is not None else self.timeout
        end_time = time.time()
        if timeout is not None and timeout > 0:
            end_time += timeout

        while True:
            # Try and create a symbolic link to it.
            try:
                os.symlink(self.unique_name, self.lock_file)
            except OSError:
                # Link creation failed.  Maybe we've double-locked?
                if self.i_am_locking():
                    # Linked to out unique name. Proceed.
                    return
                else:
                    # Otherwise the lock creation failed.
                    if timeout is not None and time.time() > end_time:
                        if timeout > 0:
                            raise LockTimeout("Timeout waiting to acquire"
                                              " lock for %s" %
                                              self.path)
                        else:
                            raise AlreadyLocked("%s is already locked" %
                                                self.path)
                    time.sleep(timeout / 10 if timeout is not None else 0.1)
            else:
                # Link creation succeeded.  We're good to go.
                return

    def release(self):
        if not self.is_locked():
            raise NotLocked("%s is not locked" % self.path)
        elif not self.i_am_locking():
            raise NotMyLock("%s is locked, but not by me" % self.path)
        os.unlink(self.lock_file)

    def is_locked(self):
        return os.path.islink(self.lock_file)

    def i_am_locking(self):
        return (os.path.islink(self.lock_file)
                and os.readlink(self.lock_file) == self.unique_name)

    def break_lock(self):
        if os.path.islink(self.lock_file):  # exists && link
            os.unlink(self.lock_file)
_vendor/lockfile/pidlockfile.py000064400000013712151733136270012643 0ustar00# -*- coding: utf-8 -*-

# pidlockfile.py
#
# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au>
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.

""" Lockfile behaviour implemented via Unix PID files.
    """

from __future__ import absolute_import

import errno
import os
import time

from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock,
               LockTimeout)


class PIDLockFile(LockBase):
    """ Lockfile implemented as a Unix PID file.

    The lock file is a normal file named by the attribute `path`.
    A lock's PID file contains a single line of text, containing
    the process ID (PID) of the process that acquired the lock.

    >>> lock = PIDLockFile('somefile')
    >>> lock = PIDLockFile('somefile')
    """

    def __init__(self, path, threaded=False, timeout=None):
        # pid lockfiles don't support threaded operation, so always force
        # False as the threaded arg.
        LockBase.__init__(self, path, False, timeout)
        self.unique_name = self.path

    def read_pid(self):
        """ Get the PID from the lock file.
            """
        return read_pid_from_pidfile(self.path)

    def is_locked(self):
        """ Test if the lock is currently held.

            The lock is held if the PID file for this lock exists.

            """
        return os.path.exists(self.path)

    def i_am_locking(self):
        """ Test if the lock is held by the current process.

        Returns ``True`` if the current process ID matches the
        number stored in the PID file.
        """
        return self.is_locked() and os.getpid() == self.read_pid()

    def acquire(self, timeout=None):
        """ Acquire the lock.

        Creates the PID file for this lock, or raises an error if
        the lock could not be acquired.
        """

        timeout = timeout if timeout is not None else self.timeout
        end_time = time.time()
        if timeout is not None and timeout > 0:
            end_time += timeout

        while True:
            try:
                write_pid_to_pidfile(self.path)
            except OSError as exc:
                if exc.errno == errno.EEXIST:
                    # The lock creation failed.  Maybe sleep a bit.
                    if time.time() > end_time:
                        if timeout is not None and timeout > 0:
                            raise LockTimeout("Timeout waiting to acquire"
                                              " lock for %s" %
                                              self.path)
                        else:
                            raise AlreadyLocked("%s is already locked" %
                                                self.path)
                    time.sleep(timeout is not None and timeout / 10 or 0.1)
                else:
                    raise LockFailed("failed to create %s" % self.path)
            else:
                return

    def release(self):
        """ Release the lock.

            Removes the PID file to release the lock, or raises an
            error if the current process does not hold the lock.

            """
        if not self.is_locked():
            raise NotLocked("%s is not locked" % self.path)
        if not self.i_am_locking():
            raise NotMyLock("%s is locked, but not by me" % self.path)
        remove_existing_pidfile(self.path)

    def break_lock(self):
        """ Break an existing lock.

            Removes the PID file if it already exists, otherwise does
            nothing.

            """
        remove_existing_pidfile(self.path)


def read_pid_from_pidfile(pidfile_path):
    """ Read the PID recorded in the named PID file.

        Read and return the numeric PID recorded as text in the named
        PID file. If the PID file cannot be read, or if the content is
        not a valid PID, return ``None``.

        """
    pid = None
    try:
        pidfile = open(pidfile_path, 'r')
    except IOError:
        pass
    else:
        # According to the FHS 2.3 section on PID files in /var/run:
        #
        #   The file must consist of the process identifier in
        #   ASCII-encoded decimal, followed by a newline character.
        #
        #   Programs that read PID files should be somewhat flexible
        #   in what they accept; i.e., they should ignore extra
        #   whitespace, leading zeroes, absence of the trailing
        #   newline, or additional lines in the PID file.

        line = pidfile.readline().strip()
        try:
            pid = int(line)
        except ValueError:
            pass
        pidfile.close()

    return pid


def write_pid_to_pidfile(pidfile_path):
    """ Write the PID in the named PID file.

        Get the numeric process ID (“PID”) of the current process
        and write it to the named file as a line of text.

        """
    open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
    open_mode = 0o644
    pidfile_fd = os.open(pidfile_path, open_flags, open_mode)
    pidfile = os.fdopen(pidfile_fd, 'w')

    # According to the FHS 2.3 section on PID files in /var/run:
    #
    #   The file must consist of the process identifier in
    #   ASCII-encoded decimal, followed by a newline character. For
    #   example, if crond was process number 25, /var/run/crond.pid
    #   would contain three characters: two, five, and newline.

    pid = os.getpid()
    pidfile.write("%s\n" % pid)
    pidfile.close()


def remove_existing_pidfile(pidfile_path):
    """ Remove the named PID file if it exists.

        Removing a PID file that doesn't already exist puts us in the
        desired state, so we ignore the condition if the file does not
        exist.

        """
    try:
        os.remove(pidfile_path)
    except OSError as exc:
        if exc.errno == errno.ENOENT:
            pass
        else:
            raise
_vendor/lockfile/__init__.py000064400000022233151733136270012113 0ustar00# -*- coding: utf-8 -*-

"""
lockfile.py - Platform-independent advisory file locks.

Requires Python 2.5 unless you apply 2.4.diff
Locking is done on a per-thread basis instead of a per-process basis.

Usage:

>>> lock = LockFile('somefile')
>>> try:
...     lock.acquire()
... except AlreadyLocked:
...     print 'somefile', 'is locked already.'
... except LockFailed:
...     print 'somefile', 'can\\'t be locked.'
... else:
...     print 'got lock'
got lock
>>> print lock.is_locked()
True
>>> lock.release()

>>> lock = LockFile('somefile')
>>> print lock.is_locked()
False
>>> with lock:
...    print lock.is_locked()
True
>>> print lock.is_locked()
False

>>> lock = LockFile('somefile')
>>> # It is okay to lock twice from the same thread...
>>> with lock:
...     lock.acquire()
...
>>> # Though no counter is kept, so you can't unlock multiple times...
>>> print lock.is_locked()
False

Exceptions:

    Error - base class for other exceptions
        LockError - base class for all locking exceptions
            AlreadyLocked - Another thread or process already holds the lock
            LockFailed - Lock failed for some other reason
        UnlockError - base class for all unlocking exceptions
            AlreadyUnlocked - File was not locked.
            NotMyLock - File was locked but not by the current thread/process
"""

from __future__ import absolute_import

import functools
import os
import socket
import threading
import warnings

# Work with PEP8 and non-PEP8 versions of threading module.
if not hasattr(threading, "current_thread"):
    threading.current_thread = threading.currentThread
if not hasattr(threading.Thread, "get_name"):
    threading.Thread.get_name = threading.Thread.getName

__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked',
           'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock',
           'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock',
           'LockBase', 'locked']


class Error(Exception):
    """
    Base class for other exceptions.

    >>> try:
    ...   raise Error
    ... except Exception:
    ...   pass
    """
    pass


class LockError(Error):
    """
    Base class for error arising from attempts to acquire the lock.

    >>> try:
    ...   raise LockError
    ... except Error:
    ...   pass
    """
    pass


class LockTimeout(LockError):
    """Raised when lock creation fails within a user-defined period of time.

    >>> try:
    ...   raise LockTimeout
    ... except LockError:
    ...   pass
    """
    pass


class AlreadyLocked(LockError):
    """Some other thread/process is locking the file.

    >>> try:
    ...   raise AlreadyLocked
    ... except LockError:
    ...   pass
    """
    pass


class LockFailed(LockError):
    """Lock file creation failed for some other reason.

    >>> try:
    ...   raise LockFailed
    ... except LockError:
    ...   pass
    """
    pass


class UnlockError(Error):
    """
    Base class for errors arising from attempts to release the lock.

    >>> try:
    ...   raise UnlockError
    ... except Error:
    ...   pass
    """
    pass


class NotLocked(UnlockError):
    """Raised when an attempt is made to unlock an unlocked file.

    >>> try:
    ...   raise NotLocked
    ... except UnlockError:
    ...   pass
    """
    pass


class NotMyLock(UnlockError):
    """Raised when an attempt is made to unlock a file someone else locked.

    >>> try:
    ...   raise NotMyLock
    ... except UnlockError:
    ...   pass
    """
    pass


class _SharedBase(object):
    def __init__(self, path):
        self.path = path

    def acquire(self, timeout=None):
        """
        Acquire the lock.

        * If timeout is omitted (or None), wait forever trying to lock the
          file.

        * If timeout > 0, try to acquire the lock for that many seconds.  If
          the lock period expires and the file is still locked, raise
          LockTimeout.

        * If timeout <= 0, raise AlreadyLocked immediately if the file is
          already locked.
        """
        raise NotImplemented("implement in subclass")

    def release(self):
        """
        Release the lock.

        If the file is not locked, raise NotLocked.
        """
        raise NotImplemented("implement in subclass")

    def __enter__(self):
        """
        Context manager support.
        """
        self.acquire()
        return self

    def __exit__(self, *_exc):
        """
        Context manager support.
        """
        self.release()

    def __repr__(self):
        return "<%s: %r>" % (self.__class__.__name__, self.path)


class LockBase(_SharedBase):
    """Base class for platform-specific lock classes."""
    def __init__(self, path, threaded=True, timeout=None):
        """
        >>> lock = LockBase('somefile')
        >>> lock = LockBase('somefile', threaded=False)
        """
        super(LockBase, self).__init__(path)
        self.lock_file = os.path.abspath(path) + ".lock"
        self.hostname = socket.gethostname()
        self.pid = os.getpid()
        if threaded:
            t = threading.current_thread()
            # Thread objects in Python 2.4 and earlier do not have ident
            # attrs.  Worm around that.
            ident = getattr(t, "ident", hash(t))
            self.tname = "-%x" % (ident & 0xffffffff)
        else:
            self.tname = ""
        dirname = os.path.dirname(self.lock_file)

        # unique name is mostly about the current process, but must
        # also contain the path -- otherwise, two adjacent locked
        # files conflict (one file gets locked, creating lock-file and
        # unique file, the other one gets locked, creating lock-file
        # and overwriting the already existing lock-file, then one
        # gets unlocked, deleting both lock-file and unique file,
        # finally the last lock errors out upon releasing.
        self.unique_name = os.path.join(dirname,
                                        "%s%s.%s%s" % (self.hostname,
                                                       self.tname,
                                                       self.pid,
                                                       hash(self.path)))
        self.timeout = timeout

    def is_locked(self):
        """
        Tell whether or not the file is locked.
        """
        raise NotImplemented("implement in subclass")

    def i_am_locking(self):
        """
        Return True if this object is locking the file.
        """
        raise NotImplemented("implement in subclass")

    def break_lock(self):
        """
        Remove a lock.  Useful if a locking thread failed to unlock.
        """
        raise NotImplemented("implement in subclass")

    def __repr__(self):
        return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,
                                   self.path)


def _fl_helper(cls, mod, *args, **kwds):
    warnings.warn("Import from %s module instead of lockfile package" % mod,
                  DeprecationWarning, stacklevel=2)
    # This is a bit funky, but it's only for awhile.  The way the unit tests
    # are constructed this function winds up as an unbound method, so it
    # actually takes three args, not two.  We want to toss out self.
    if not isinstance(args[0], str):
        # We are testing, avoid the first arg
        args = args[1:]
    if len(args) == 1 and not kwds:
        kwds["threaded"] = True
    return cls(*args, **kwds)


def LinkFileLock(*args, **kwds):
    """Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import LinkLockFile from the
    lockfile.linklockfile module.
    """
    from . import linklockfile
    return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",
                      *args, **kwds)


def MkdirFileLock(*args, **kwds):
    """Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import MkdirLockFile from the
    lockfile.mkdirlockfile module.
    """
    from . import mkdirlockfile
    return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",
                      *args, **kwds)


def SQLiteFileLock(*args, **kwds):
    """Factory function provided for backwards compatibility.

    Do not use in new code.  Instead, import SQLiteLockFile from the
    lockfile.mkdirlockfile module.
    """
    from . import sqlitelockfile
    return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",
                      *args, **kwds)


def locked(path, timeout=None):
    """Decorator which enables locks for decorated function.

    Arguments:
     - path: path for lockfile.
     - timeout (optional): Timeout for acquiring lock.

     Usage:
         @locked('/var/run/myname', timeout=0)
         def myname(...):
             ...
    """
    def decor(func):
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            lock = FileLock(path, timeout=timeout)
            lock.acquire()
            try:
                return func(*args, **kwargs)
            finally:
                lock.release()
        return wrapper
    return decor


if hasattr(os, "link"):
    from . import linklockfile as _llf
    LockFile = _llf.LinkLockFile
else:
    from . import mkdirlockfile as _mlf
    LockFile = _mlf.MkdirLockFile

FileLock = LockFile
_vendor/lockfile/linklockfile.py000064400000005134151733136270013023 0ustar00from __future__ import absolute_import

import time
import os

from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
               AlreadyLocked)


class LinkLockFile(LockBase):
    """Lock access to a file using atomic property of link(2).

    >>> lock = LinkLockFile('somefile')
    >>> lock = LinkLockFile('somefile', threaded=False)
    """

    def acquire(self, timeout=None):
        try:
            open(self.unique_name, "wb").close()
        except IOError:
            raise LockFailed("failed to create %s" % self.unique_name)

        timeout = timeout if timeout is not None else self.timeout
        end_time = time.time()
        if timeout is not None and timeout > 0:
            end_time += timeout

        while True:
            # Try and create a hard link to it.
            try:
                os.link(self.unique_name, self.lock_file)
            except OSError:
                # Link creation failed.  Maybe we've double-locked?
                nlinks = os.stat(self.unique_name).st_nlink
                if nlinks == 2:
                    # The original link plus the one I created == 2.  We're
                    # good to go.
                    return
                else:
                    # Otherwise the lock creation failed.
                    if timeout is not None and time.time() > end_time:
                        os.unlink(self.unique_name)
                        if timeout > 0:
                            raise LockTimeout("Timeout waiting to acquire"
                                              " lock for %s" %
                                              self.path)
                        else:
                            raise AlreadyLocked("%s is already locked" %
                                                self.path)
                    time.sleep(timeout is not None and timeout / 10 or 0.1)
            else:
                # Link creation succeeded.  We're good to go.
                return

    def release(self):
        if not self.is_locked():
            raise NotLocked("%s is not locked" % self.path)
        elif not os.path.exists(self.unique_name):
            raise NotMyLock("%s is locked, but not by me" % self.path)
        os.unlink(self.unique_name)
        os.unlink(self.lock_file)

    def is_locked(self):
        return os.path.exists(self.lock_file)

    def i_am_locking(self):
        return (self.is_locked() and
                os.path.exists(self.unique_name) and
                os.stat(self.unique_name).st_nlink == 2)

    def break_lock(self):
        if os.path.exists(self.lock_file):
            os.unlink(self.lock_file)
_vendor/lockfile/mkdirlockfile.py000064400000006030151733136270013170 0ustar00from __future__ import absolute_import, division

import time
import os
import sys
import errno

from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
               AlreadyLocked)


class MkdirLockFile(LockBase):
    """Lock file by creating a directory."""
    def __init__(self, path, threaded=True, timeout=None):
        """
        >>> lock = MkdirLockFile('somefile')
        >>> lock = MkdirLockFile('somefile', threaded=False)
        """
        LockBase.__init__(self, path, threaded, timeout)
        # Lock file itself is a directory.  Place the unique file name into
        # it.
        self.unique_name = os.path.join(self.lock_file,
                                        "%s.%s%s" % (self.hostname,
                                                     self.tname,
                                                     self.pid))

    def acquire(self, timeout=None):
        timeout = timeout if timeout is not None else self.timeout
        end_time = time.time()
        if timeout is not None and timeout > 0:
            end_time += timeout

        if timeout is None:
            wait = 0.1
        else:
            wait = max(0, timeout / 10)

        while True:
            try:
                os.mkdir(self.lock_file)
            except OSError:
                err = sys.exc_info()[1]
                if err.errno == errno.EEXIST:
                    # Already locked.
                    if os.path.exists(self.unique_name):
                        # Already locked by me.
                        return
                    if timeout is not None and time.time() > end_time:
                        if timeout > 0:
                            raise LockTimeout("Timeout waiting to acquire"
                                              " lock for %s" %
                                              self.path)
                        else:
                            # Someone else has the lock.
                            raise AlreadyLocked("%s is already locked" %
                                                self.path)
                    time.sleep(wait)
                else:
                    # Couldn't create the lock for some other reason
                    raise LockFailed("failed to create %s" % self.lock_file)
            else:
                open(self.unique_name, "wb").close()
                return

    def release(self):
        if not self.is_locked():
            raise NotLocked("%s is not locked" % self.path)
        elif not os.path.exists(self.unique_name):
            raise NotMyLock("%s is locked, but not by me" % self.path)
        os.unlink(self.unique_name)
        os.rmdir(self.lock_file)

    def is_locked(self):
        return os.path.exists(self.lock_file)

    def i_am_locking(self):
        return (self.is_locked() and
                os.path.exists(self.unique_name))

    def break_lock(self):
        if os.path.exists(self.lock_file):
            for name in os.listdir(self.lock_file):
                os.unlink(os.path.join(self.lock_file, name))
            os.rmdir(self.lock_file)
_vendor/pyparsing.py000064400000665653151733136270010623 0ustar00# module pyparsing.py
#
# Copyright (c) 2003-2016  Paul T. McGuire
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#

__doc__ = \
"""
pyparsing module - Classes and methods to define and execute parsing grammars

The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
provides a library of classes that you use to construct the grammar directly in Python.

Here is a program to parse "Hello, World!" (or any greeting of the form 
C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements 
(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to
L{Literal} expressions)::

    from pyparsing import Word, alphas

    # define grammar of a greeting
    greet = Word(alphas) + "," + Word(alphas) + "!"

    hello = "Hello, World!"
    print (hello, "->", greet.parseString(hello))

The program outputs the following::

    Hello, World! -> ['Hello', ',', 'World', '!']

The Python representation of the grammar is quite readable, owing to the self-explanatory
class names, and the use of '+', '|' and '^' operators.

The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
object with named attributes.

The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
 - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
 - quoted strings
 - embedded comments
"""

__version__ = "2.1.10"
__versionTime__ = "07 Oct 2016 01:31 UTC"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"

import string
from weakref import ref as wkref
import copy
import sys
import warnings
import re
import sre_constants
import collections
import pprint
import traceback
import types
from datetime import datetime

try:
    from _thread import RLock
except ImportError:
    from threading import RLock

try:
    from collections import OrderedDict as _OrderedDict
except ImportError:
    try:
        from ordereddict import OrderedDict as _OrderedDict
    except ImportError:
        _OrderedDict = None

#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )

__all__ = [
'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 
'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 
'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
'CloseMatch', 'tokenMap', 'pyparsing_common',
]

system_version = tuple(sys.version_info)[:3]
PY_3 = system_version[0] == 3
if PY_3:
    _MAX_INT = sys.maxsize
    basestring = str
    unichr = chr
    _ustr = str

    # build list of single arg builtins, that can be used as parse actions
    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]

else:
    _MAX_INT = sys.maxint
    range = xrange

    def _ustr(obj):
        """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
           then < returns the unicode object | encodes it with the default encoding | ... >.
        """
        if isinstance(obj,unicode):
            return obj

        try:
            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
            # it won't break any existing code.
            return str(obj)

        except UnicodeEncodeError:
            # Else encode it
            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
            xmlcharref = Regex('&#\d+;')
            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
            return xmlcharref.transformString(ret)

    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
    singleArgBuiltins = []
    import __builtin__
    for fname in "sum len sorted reversed list tuple set any all min max".split():
        try:
            singleArgBuiltins.append(getattr(__builtin__,fname))
        except AttributeError:
            continue
            
_generatorType = type((y for y in range(1)))
 
def _xml_escape(data):
    """Escape &, <, >, ", ', etc. in a string of data."""

    # ampersand must be replaced first
    from_symbols = '&><"\''
    to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
    for from_,to_ in zip(from_symbols, to_symbols):
        data = data.replace(from_, to_)
    return data

class _Constants(object):
    pass

alphas     = string.ascii_uppercase + string.ascii_lowercase
nums       = "0123456789"
hexnums    = nums + "ABCDEFabcdef"
alphanums  = alphas + nums
_bslash    = chr(92)
printables = "".join(c for c in string.printable if c not in string.whitespace)

class ParseBaseException(Exception):
    """base exception class for all parsing runtime exceptions"""
    # Performance tuning: we construct a *lot* of these, so keep this
    # constructor as small and fast as possible
    def __init__( self, pstr, loc=0, msg=None, elem=None ):
        self.loc = loc
        if msg is None:
            self.msg = pstr
            self.pstr = ""
        else:
            self.msg = msg
            self.pstr = pstr
        self.parserElement = elem
        self.args = (pstr, loc, msg)

    @classmethod
    def _from_exception(cls, pe):
        """
        internal factory method to simplify creating one type of ParseException 
        from another - avoids having __init__ signature conflicts among subclasses
        """
        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)

    def __getattr__( self, aname ):
        """supported attributes by name are:
            - lineno - returns the line number of the exception text
            - col - returns the column number of the exception text
            - line - returns the line containing the exception text
        """
        if( aname == "lineno" ):
            return lineno( self.loc, self.pstr )
        elif( aname in ("col", "column") ):
            return col( self.loc, self.pstr )
        elif( aname == "line" ):
            return line( self.loc, self.pstr )
        else:
            raise AttributeError(aname)

    def __str__( self ):
        return "%s (at char %d), (line:%d, col:%d)" % \
                ( self.msg, self.loc, self.lineno, self.column )
    def __repr__( self ):
        return _ustr(self)
    def markInputline( self, markerString = ">!<" ):
        """Extracts the exception line from the input string, and marks
           the location of the exception with a special symbol.
        """
        line_str = self.line
        line_column = self.column - 1
        if markerString:
            line_str = "".join((line_str[:line_column],
                                markerString, line_str[line_column:]))
        return line_str.strip()
    def __dir__(self):
        return "lineno col line".split() + dir(type(self))

class ParseException(ParseBaseException):
    """
    Exception thrown when parse expressions don't match class;
    supported attributes by name are:
     - lineno - returns the line number of the exception text
     - col - returns the column number of the exception text
     - line - returns the line containing the exception text
        
    Example::
        try:
            Word(nums).setName("integer").parseString("ABC")
        except ParseException as pe:
            print(pe)
            print("column: {}".format(pe.col))
            
    prints::
       Expected integer (at char 0), (line:1, col:1)
        column: 1
    """
    pass

class ParseFatalException(ParseBaseException):
    """user-throwable exception thrown when inconsistent parse content
       is found; stops all parsing immediately"""
    pass

class ParseSyntaxException(ParseFatalException):
    """just like L{ParseFatalException}, but thrown internally when an
       L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop 
       immediately because an unbacktrackable syntax error has been found"""
    pass

#~ class ReparseException(ParseBaseException):
    #~ """Experimental class - parse actions can raise this exception to cause
       #~ pyparsing to reparse the input string:
        #~ - with a modified input string, and/or
        #~ - with a modified start location
       #~ Set the values of the ReparseException in the constructor, and raise the
       #~ exception in a parse action to cause pyparsing to use the new string/location.
       #~ Setting the values as None causes no change to be made.
       #~ """
    #~ def __init_( self, newstring, restartLoc ):
        #~ self.newParseText = newstring
        #~ self.reparseLoc = restartLoc

class RecursiveGrammarException(Exception):
    """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
    def __init__( self, parseElementList ):
        self.parseElementTrace = parseElementList

    def __str__( self ):
        return "RecursiveGrammarException: %s" % self.parseElementTrace

class _ParseResultsWithOffset(object):
    def __init__(self,p1,p2):
        self.tup = (p1,p2)
    def __getitem__(self,i):
        return self.tup[i]
    def __repr__(self):
        return repr(self.tup[0])
    def setOffset(self,i):
        self.tup = (self.tup[0],i)

class ParseResults(object):
    """
    Structured parse results, to provide multiple means of access to the parsed data:
       - as a list (C{len(results)})
       - by list index (C{results[0], results[1]}, etc.)
       - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})

    Example::
        integer = Word(nums)
        date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))
        # equivalent form:
        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")

        # parseString returns a ParseResults object
        result = date_str.parseString("1999/12/31")

        def test(s, fn=repr):
            print("%s -> %s" % (s, fn(eval(s))))
        test("list(result)")
        test("result[0]")
        test("result['month']")
        test("result.day")
        test("'month' in result")
        test("'minutes' in result")
        test("result.dump()", str)
    prints::
        list(result) -> ['1999', '/', '12', '/', '31']
        result[0] -> '1999'
        result['month'] -> '12'
        result.day -> '31'
        'month' in result -> True
        'minutes' in result -> False
        result.dump() -> ['1999', '/', '12', '/', '31']
        - day: 31
        - month: 12
        - year: 1999
    """
    def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
        if isinstance(toklist, cls):
            return toklist
        retobj = object.__new__(cls)
        retobj.__doinit = True
        return retobj

    # Performance tuning: we construct a *lot* of these, so keep this
    # constructor as small and fast as possible
    def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
        if self.__doinit:
            self.__doinit = False
            self.__name = None
            self.__parent = None
            self.__accumNames = {}
            self.__asList = asList
            self.__modal = modal
            if toklist is None:
                toklist = []
            if isinstance(toklist, list):
                self.__toklist = toklist[:]
            elif isinstance(toklist, _generatorType):
                self.__toklist = list(toklist)
            else:
                self.__toklist = [toklist]
            self.__tokdict = dict()

        if name is not None and name:
            if not modal:
                self.__accumNames[name] = 0
            if isinstance(name,int):
                name = _ustr(name) # will always return a str, but use _ustr for consistency
            self.__name = name
            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
                if isinstance(toklist,basestring):
                    toklist = [ toklist ]
                if asList:
                    if isinstance(toklist,ParseResults):
                        self[name] = _ParseResultsWithOffset(toklist.copy(),0)
                    else:
                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
                    self[name].__name = name
                else:
                    try:
                        self[name] = toklist[0]
                    except (KeyError,TypeError,IndexError):
                        self[name] = toklist

    def __getitem__( self, i ):
        if isinstance( i, (int,slice) ):
            return self.__toklist[i]
        else:
            if i not in self.__accumNames:
                return self.__tokdict[i][-1][0]
            else:
                return ParseResults([ v[0] for v in self.__tokdict[i] ])

    def __setitem__( self, k, v, isinstance=isinstance ):
        if isinstance(v,_ParseResultsWithOffset):
            self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
            sub = v[0]
        elif isinstance(k,(int,slice)):
            self.__toklist[k] = v
            sub = v
        else:
            self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
            sub = v
        if isinstance(sub,ParseResults):
            sub.__parent = wkref(self)

    def __delitem__( self, i ):
        if isinstance(i,(int,slice)):
            mylen = len( self.__toklist )
            del self.__toklist[i]

            # convert int to slice
            if isinstance(i, int):
                if i < 0:
                    i += mylen
                i = slice(i, i+1)
            # get removed indices
            removed = list(range(*i.indices(mylen)))
            removed.reverse()
            # fixup indices in token dictionary
            for name,occurrences in self.__tokdict.items():
                for j in removed:
                    for k, (value, position) in enumerate(occurrences):
                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
        else:
            del self.__tokdict[i]

    def __contains__( self, k ):
        return k in self.__tokdict

    def __len__( self ): return len( self.__toklist )
    def __bool__(self): return ( not not self.__toklist )
    __nonzero__ = __bool__
    def __iter__( self ): return iter( self.__toklist )
    def __reversed__( self ): return iter( self.__toklist[::-1] )
    def _iterkeys( self ):
        if hasattr(self.__tokdict, "iterkeys"):
            return self.__tokdict.iterkeys()
        else:
            return iter(self.__tokdict)

    def _itervalues( self ):
        return (self[k] for k in self._iterkeys())
            
    def _iteritems( self ):
        return ((k, self[k]) for k in self._iterkeys())

    if PY_3:
        keys = _iterkeys       
        """Returns an iterator of all named result keys (Python 3.x only)."""

        values = _itervalues
        """Returns an iterator of all named result values (Python 3.x only)."""

        items = _iteritems
        """Returns an iterator of all named result key-value tuples (Python 3.x only)."""

    else:
        iterkeys = _iterkeys
        """Returns an iterator of all named result keys (Python 2.x only)."""

        itervalues = _itervalues
        """Returns an iterator of all named result values (Python 2.x only)."""

        iteritems = _iteritems
        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""

        def keys( self ):
            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
            return list(self.iterkeys())

        def values( self ):
            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
            return list(self.itervalues())
                
        def items( self ):
            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
            return list(self.iteritems())

    def haskeys( self ):
        """Since keys() returns an iterator, this method is helpful in bypassing
           code that looks for the existence of any defined results names."""
        return bool(self.__tokdict)
        
    def pop( self, *args, **kwargs):
        """
        Removes and returns item at specified index (default=C{last}).
        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
        argument or an integer argument, it will use C{list} semantics
        and pop tokens from the list of parsed tokens. If passed a 
        non-integer argument (most likely a string), it will use C{dict}
        semantics and pop the corresponding value from any defined 
        results names. A second default return value argument is 
        supported, just as in C{dict.pop()}.

        Example::
            def remove_first(tokens):
                tokens.pop(0)
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']

            label = Word(alphas)
            patt = label("LABEL") + OneOrMore(Word(nums))
            print(patt.parseString("AAB 123 321").dump())

            # Use pop() in a parse action to remove named result (note that corresponding value is not
            # removed from list form of results)
            def remove_LABEL(tokens):
                tokens.pop("LABEL")
                return tokens
            patt.addParseAction(remove_LABEL)
            print(patt.parseString("AAB 123 321").dump())
        prints::
            ['AAB', '123', '321']
            - LABEL: AAB

            ['AAB', '123', '321']
        """
        if not args:
            args = [-1]
        for k,v in kwargs.items():
            if k == 'default':
                args = (args[0], v)
            else:
                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
        if (isinstance(args[0], int) or 
                        len(args) == 1 or 
                        args[0] in self):
            index = args[0]
            ret = self[index]
            del self[index]
            return ret
        else:
            defaultvalue = args[1]
            return defaultvalue

    def get(self, key, defaultValue=None):
        """
        Returns named result matching the given key, or if there is no
        such name, then returns the given C{defaultValue} or C{None} if no
        C{defaultValue} is specified.

        Similar to C{dict.get()}.
        
        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            result = date_str.parseString("1999/12/31")
            print(result.get("year")) # -> '1999'
            print(result.get("hour", "not specified")) # -> 'not specified'
            print(result.get("hour")) # -> None
        """
        if key in self:
            return self[key]
        else:
            return defaultValue

    def insert( self, index, insStr ):
        """
        Inserts new element at location index in the list of parsed tokens.
        
        Similar to C{list.insert()}.

        Example::
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']

            # use a parse action to insert the parse location in the front of the parsed results
            def insert_locn(locn, tokens):
                tokens.insert(0, locn)
            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
        """
        self.__toklist.insert(index, insStr)
        # fixup indices in token dictionary
        for name,occurrences in self.__tokdict.items():
            for k, (value, position) in enumerate(occurrences):
                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))

    def append( self, item ):
        """
        Add single element to end of ParseResults list of elements.

        Example::
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            
            # use a parse action to compute the sum of the parsed integers, and add it to the end
            def append_sum(tokens):
                tokens.append(sum(map(int, tokens)))
            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
        """
        self.__toklist.append(item)

    def extend( self, itemseq ):
        """
        Add sequence of elements to end of ParseResults list of elements.

        Example::
            patt = OneOrMore(Word(alphas))
            
            # use a parse action to append the reverse of the matched strings, to make a palindrome
            def make_palindrome(tokens):
                tokens.extend(reversed([t[::-1] for t in tokens]))
                return ''.join(tokens)
            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
        """
        if isinstance(itemseq, ParseResults):
            self += itemseq
        else:
            self.__toklist.extend(itemseq)

    def clear( self ):
        """
        Clear all elements and results names.
        """
        del self.__toklist[:]
        self.__tokdict.clear()

    def __getattr__( self, name ):
        try:
            return self[name]
        except KeyError:
            return ""
            
        if name in self.__tokdict:
            if name not in self.__accumNames:
                return self.__tokdict[name][-1][0]
            else:
                return ParseResults([ v[0] for v in self.__tokdict[name] ])
        else:
            return ""

    def __add__( self, other ):
        ret = self.copy()
        ret += other
        return ret

    def __iadd__( self, other ):
        if other.__tokdict:
            offset = len(self.__toklist)
            addoffset = lambda a: offset if a<0 else a+offset
            otheritems = other.__tokdict.items()
            otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
                                for (k,vlist) in otheritems for v in vlist]
            for k,v in otherdictitems:
                self[k] = v
                if isinstance(v[0],ParseResults):
                    v[0].__parent = wkref(self)
            
        self.__toklist += other.__toklist
        self.__accumNames.update( other.__accumNames )
        return self

    def __radd__(self, other):
        if isinstance(other,int) and other == 0:
            # useful for merging many ParseResults using sum() builtin
            return self.copy()
        else:
            # this may raise a TypeError - so be it
            return other + self
        
    def __repr__( self ):
        return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )

    def __str__( self ):
        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'

    def _asStringList( self, sep='' ):
        out = []
        for item in self.__toklist:
            if out and sep:
                out.append(sep)
            if isinstance( item, ParseResults ):
                out += item._asStringList()
            else:
                out.append( _ustr(item) )
        return out

    def asList( self ):
        """
        Returns the parse results as a nested list of matching tokens, all converted to strings.

        Example::
            patt = OneOrMore(Word(alphas))
            result = patt.parseString("sldkj lsdkj sldkj")
            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
            print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
            
            # Use asList() to create an actual list
            result_list = result.asList()
            print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
        """
        return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]

    def asDict( self ):
        """
        Returns the named parse results as a nested dictionary.

        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
            
            result = date_str.parseString('12/31/1999')
            print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
            
            result_dict = result.asDict()
            print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}

            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
            import json
            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
        """
        if PY_3:
            item_fn = self.items
        else:
            item_fn = self.iteritems
            
        def toItem(obj):
            if isinstance(obj, ParseResults):
                if obj.haskeys():
                    return obj.asDict()
                else:
                    return [toItem(v) for v in obj]
            else:
                return obj
                
        return dict((k,toItem(v)) for k,v in item_fn())

    def copy( self ):
        """
        Returns a new copy of a C{ParseResults} object.
        """
        ret = ParseResults( self.__toklist )
        ret.__tokdict = self.__tokdict.copy()
        ret.__parent = self.__parent
        ret.__accumNames.update( self.__accumNames )
        ret.__name = self.__name
        return ret

    def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
        """
        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
        """
        nl = "\n"
        out = []
        namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
                                                            for v in vlist)
        nextLevelIndent = indent + "  "

        # collapse out indents if formatting is not desired
        if not formatted:
            indent = ""
            nextLevelIndent = ""
            nl = ""

        selfTag = None
        if doctag is not None:
            selfTag = doctag
        else:
            if self.__name:
                selfTag = self.__name

        if not selfTag:
            if namedItemsOnly:
                return ""
            else:
                selfTag = "ITEM"

        out += [ nl, indent, "<", selfTag, ">" ]

        for i,res in enumerate(self.__toklist):
            if isinstance(res,ParseResults):
                if i in namedItems:
                    out += [ res.asXML(namedItems[i],
                                        namedItemsOnly and doctag is None,
                                        nextLevelIndent,
                                        formatted)]
                else:
                    out += [ res.asXML(None,
                                        namedItemsOnly and doctag is None,
                                        nextLevelIndent,
                                        formatted)]
            else:
                # individual token, see if there is a name for it
                resTag = None
                if i in namedItems:
                    resTag = namedItems[i]
                if not resTag:
                    if namedItemsOnly:
                        continue
                    else:
                        resTag = "ITEM"
                xmlBodyText = _xml_escape(_ustr(res))
                out += [ nl, nextLevelIndent, "<", resTag, ">",
                                                xmlBodyText,
                                                "</", resTag, ">" ]

        out += [ nl, indent, "</", selfTag, ">" ]
        return "".join(out)

    def __lookup(self,sub):
        for k,vlist in self.__tokdict.items():
            for v,loc in vlist:
                if sub is v:
                    return k
        return None

    def getName(self):
        """
        Returns the results name for this token expression. Useful when several 
        different expressions might match at a particular location.

        Example::
            integer = Word(nums)
            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
            house_number_expr = Suppress('#') + Word(nums, alphanums)
            user_data = (Group(house_number_expr)("house_number") 
                        | Group(ssn_expr)("ssn")
                        | Group(integer)("age"))
            user_info = OneOrMore(user_data)
            
            result = user_info.parseString("22 111-22-3333 #221B")
            for item in result:
                print(item.getName(), ':', item[0])
        prints::
            age : 22
            ssn : 111-22-3333
            house_number : 221B
        """
        if self.__name:
            return self.__name
        elif self.__parent:
            par = self.__parent()
            if par:
                return par.__lookup(self)
            else:
                return None
        elif (len(self) == 1 and
               len(self.__tokdict) == 1 and
               next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
            return next(iter(self.__tokdict.keys()))
        else:
            return None

    def dump(self, indent='', depth=0, full=True):
        """
        Diagnostic method for listing out the contents of a C{ParseResults}.
        Accepts an optional C{indent} argument so that this string can be embedded
        in a nested display of other data.

        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
            
            result = date_str.parseString('12/31/1999')
            print(result.dump())
        prints::
            ['12', '/', '31', '/', '1999']
            - day: 1999
            - month: 31
            - year: 12
        """
        out = []
        NL = '\n'
        out.append( indent+_ustr(self.asList()) )
        if full:
            if self.haskeys():
                items = sorted((str(k), v) for k,v in self.items())
                for k,v in items:
                    if out:
                        out.append(NL)
                    out.append( "%s%s- %s: " % (indent,('  '*depth), k) )
                    if isinstance(v,ParseResults):
                        if v:
                            out.append( v.dump(indent,depth+1) )
                        else:
                            out.append(_ustr(v))
                    else:
                        out.append(repr(v))
            elif any(isinstance(vv,ParseResults) for vv in self):
                v = self
                for i,vv in enumerate(v):
                    if isinstance(vv,ParseResults):
                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),vv.dump(indent,depth+1) ))
                    else:
                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),_ustr(vv)))
            
        return "".join(out)

    def pprint(self, *args, **kwargs):
        """
        Pretty-printer for parsed results as a list, using the C{pprint} module.
        Accepts additional positional or keyword args as defined for the 
        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})

        Example::
            ident = Word(alphas, alphanums)
            num = Word(nums)
            func = Forward()
            term = ident | num | Group('(' + func + ')')
            func <<= ident + Group(Optional(delimitedList(term)))
            result = func.parseString("fna a,b,(fnb c,d,200),100")
            result.pprint(width=40)
        prints::
            ['fna',
             ['a',
              'b',
              ['(', 'fnb', ['c', 'd', '200'], ')'],
              '100']]
        """
        pprint.pprint(self.asList(), *args, **kwargs)

    # add support for pickle protocol
    def __getstate__(self):
        return ( self.__toklist,
                 ( self.__tokdict.copy(),
                   self.__parent is not None and self.__parent() or None,
                   self.__accumNames,
                   self.__name ) )

    def __setstate__(self,state):
        self.__toklist = state[0]
        (self.__tokdict,
         par,
         inAccumNames,
         self.__name) = state[1]
        self.__accumNames = {}
        self.__accumNames.update(inAccumNames)
        if par is not None:
            self.__parent = wkref(par)
        else:
            self.__parent = None

    def __getnewargs__(self):
        return self.__toklist, self.__name, self.__asList, self.__modal

    def __dir__(self):
        return (dir(type(self)) + list(self.keys()))

collections.MutableMapping.register(ParseResults)

def col (loc,strg):
    """Returns current column within a string, counting newlines as line separators.
   The first column is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
   on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   """
    s = strg
    return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)

def lineno(loc,strg):
    """Returns current line number within a string, counting newlines as line separators.
   The first line is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
   on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   """
    return strg.count("\n",0,loc) + 1

def line( loc, strg ):
    """Returns the line of text containing loc within a string, counting newlines as line separators.
       """
    lastCR = strg.rfind("\n", 0, loc)
    nextCR = strg.find("\n", loc)
    if nextCR >= 0:
        return strg[lastCR+1:nextCR]
    else:
        return strg[lastCR+1:]

def _defaultStartDebugAction( instring, loc, expr ):
    print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))

def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
    print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))

def _defaultExceptionDebugAction( instring, loc, expr, exc ):
    print ("Exception raised:" + _ustr(exc))

def nullDebugAction(*args):
    """'Do-nothing' debug action, to suppress debugging output during parsing."""
    pass

# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
#~ 'decorator to trim function calls to match the arity of the target'
#~ def _trim_arity(func, maxargs=3):
    #~ if func in singleArgBuiltins:
        #~ return lambda s,l,t: func(t)
    #~ limit = 0
    #~ foundArity = False
    #~ def wrapper(*args):
        #~ nonlocal limit,foundArity
        #~ while 1:
            #~ try:
                #~ ret = func(*args[limit:])
                #~ foundArity = True
                #~ return ret
            #~ except TypeError:
                #~ if limit == maxargs or foundArity:
                    #~ raise
                #~ limit += 1
                #~ continue
    #~ return wrapper

# this version is Python 2.x-3.x cross-compatible
'decorator to trim function calls to match the arity of the target'
def _trim_arity(func, maxargs=2):
    if func in singleArgBuiltins:
        return lambda s,l,t: func(t)
    limit = [0]
    foundArity = [False]
    
    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
    if system_version[:2] >= (3,5):
        def extract_stack(limit=0):
            # special handling for Python 3.5.0 - extra deep call stack by 1
            offset = -3 if system_version == (3,5,0) else -2
            frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
            return [(frame_summary.filename, frame_summary.lineno)]
        def extract_tb(tb, limit=0):
            frames = traceback.extract_tb(tb, limit=limit)
            frame_summary = frames[-1]
            return [(frame_summary.filename, frame_summary.lineno)]
    else:
        extract_stack = traceback.extract_stack
        extract_tb = traceback.extract_tb
    
    # synthesize what would be returned by traceback.extract_stack at the call to 
    # user's parse action 'func', so that we don't incur call penalty at parse time
    
    LINE_DIFF = 6
    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND 
    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
    this_line = extract_stack(limit=2)[-1]
    pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)

    def wrapper(*args):
        while 1:
            try:
                ret = func(*args[limit[0]:])
                foundArity[0] = True
                return ret
            except TypeError:
                # re-raise TypeErrors if they did not come from our arity testing
                if foundArity[0]:
                    raise
                else:
                    try:
                        tb = sys.exc_info()[-1]
                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
                            raise
                    finally:
                        del tb

                if limit[0] <= maxargs:
                    limit[0] += 1
                    continue
                raise

    # copy func name to wrapper for sensible debug output
    func_name = "<parse action>"
    try:
        func_name = getattr(func, '__name__', 
                            getattr(func, '__class__').__name__)
    except Exception:
        func_name = str(func)
    wrapper.__name__ = func_name

    return wrapper

class ParserElement(object):
    """Abstract base level parser element class."""
    DEFAULT_WHITE_CHARS = " \n\t\r"
    verbose_stacktrace = False

    @staticmethod
    def setDefaultWhitespaceChars( chars ):
        r"""
        Overrides the default whitespace chars

        Example::
            # default whitespace chars are space, <TAB> and newline
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
            
            # change to just treat newline as significant
            ParserElement.setDefaultWhitespaceChars(" \t")
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
        """
        ParserElement.DEFAULT_WHITE_CHARS = chars

    @staticmethod
    def inlineLiteralsUsing(cls):
        """
        Set class to be used for inclusion of string literals into a parser.
        
        Example::
            # default literal class used is Literal
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']


            # change to Suppress
            ParserElement.inlineLiteralsUsing(Suppress)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
        """
        ParserElement._literalStringClass = cls

    def __init__( self, savelist=False ):
        self.parseAction = list()
        self.failAction = None
        #~ self.name = "<unknown>"  # don't define self.name, let subclasses try/except upcall
        self.strRepr = None
        self.resultsName = None
        self.saveAsList = savelist
        self.skipWhitespace = True
        self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
        self.copyDefaultWhiteChars = True
        self.mayReturnEmpty = False # used when checking for left-recursion
        self.keepTabs = False
        self.ignoreExprs = list()
        self.debug = False
        self.streamlined = False
        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
        self.errmsg = ""
        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
        self.debugActions = ( None, None, None ) #custom debug actions
        self.re = None
        self.callPreparse = True # used to avoid redundant calls to preParse
        self.callDuringTry = False

    def copy( self ):
        """
        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
        for the same parsing pattern, using copies of the original parse element.
        
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
            
            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
        prints::
            [5120, 100, 655360, 268435456]
        Equivalent form of C{expr.copy()} is just C{expr()}::
            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
        """
        cpy = copy.copy( self )
        cpy.parseAction = self.parseAction[:]
        cpy.ignoreExprs = self.ignoreExprs[:]
        if self.copyDefaultWhiteChars:
            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
        return cpy

    def setName( self, name ):
        """
        Define name for this expression, makes debugging and exception messages clearer.
        
        Example::
            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
        """
        self.name = name
        self.errmsg = "Expected " + self.name
        if hasattr(self,"exception"):
            self.exception.msg = self.errmsg
        return self

    def setResultsName( self, name, listAllMatches=False ):
        """
        Define name for referencing matching tokens as a nested attribute
        of the returned parse results.
        NOTE: this returns a *copy* of the original C{ParserElement} object;
        this is so that the client can define a basic element, such as an
        integer, and reference it in multiple places with different names.

        You can also set results names using the abbreviated syntax,
        C{expr("name")} in place of C{expr.setResultsName("name")} - 
        see L{I{__call__}<__call__>}.

        Example::
            date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))

            # equivalent form:
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
        """
        newself = self.copy()
        if name.endswith("*"):
            name = name[:-1]
            listAllMatches=True
        newself.resultsName = name
        newself.modalResults = not listAllMatches
        return newself

    def setBreak(self,breakFlag = True):
        """Method to invoke the Python pdb debugger when this element is
           about to be parsed. Set C{breakFlag} to True to enable, False to
           disable.
        """
        if breakFlag:
            _parseMethod = self._parse
            def breaker(instring, loc, doActions=True, callPreParse=True):
                import pdb
                pdb.set_trace()
                return _parseMethod( instring, loc, doActions, callPreParse )
            breaker._originalParseMethod = _parseMethod
            self._parse = breaker
        else:
            if hasattr(self._parse,"_originalParseMethod"):
                self._parse = self._parse._originalParseMethod
        return self

    def setParseAction( self, *fns, **kwargs ):
        """
        Define action to perform when successfully matching parse element definition.
        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
         - s   = the original string being parsed (see note below)
         - loc = the location of the matching substring
         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
        If the functions in fns modify the tokens, they can return them as the return
        value from fn, and the modified list of tokens will replace the original.
        Otherwise, fn does not need to return any value.

        Optional keyword arguments:
         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing

        Note: the default parsing behavior is to expand tabs in the input string
        before starting the parsing process.  See L{I{parseString}<parseString>} for more information
        on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
        consistent view of the parsed string, the parse location, and line and column
        positions within the parsed string.
        
        Example::
            integer = Word(nums)
            date_str = integer + '/' + integer + '/' + integer

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']

            # use parse action to convert to ints at parse time
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            date_str = integer + '/' + integer + '/' + integer

            # note that integer fields are now ints, not strings
            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
        """
        self.parseAction = list(map(_trim_arity, list(fns)))
        self.callDuringTry = kwargs.get("callDuringTry", False)
        return self

    def addParseAction( self, *fns, **kwargs ):
        """
        Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
        
        See examples in L{I{copy}<copy>}.
        """
        self.parseAction += list(map(_trim_arity, list(fns)))
        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
        return self

    def addCondition(self, *fns, **kwargs):
        """Add a boolean predicate function to expression's list of parse actions. See 
        L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, 
        functions passed to C{addCondition} need to return boolean success/fail of the condition.

        Optional keyword arguments:
         - message = define a custom message to be used in the raised exception
         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
         
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            year_int = integer.copy()
            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
            date_str = year_int + '/' + integer + '/' + integer

            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
        """
        msg = kwargs.get("message", "failed user-defined condition")
        exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
        for fn in fns:
            def pa(s,l,t):
                if not bool(_trim_arity(fn)(s,l,t)):
                    raise exc_type(s,l,msg)
            self.parseAction.append(pa)
        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
        return self

    def setFailAction( self, fn ):
        """Define action to perform if parsing fails at this expression.
           Fail acton fn is a callable function that takes the arguments
           C{fn(s,loc,expr,err)} where:
            - s = string being parsed
            - loc = location where expression match was attempted and failed
            - expr = the parse expression that failed
            - err = the exception thrown
           The function returns no value.  It may throw C{L{ParseFatalException}}
           if it is desired to stop parsing immediately."""
        self.failAction = fn
        return self

    def _skipIgnorables( self, instring, loc ):
        exprsFound = True
        while exprsFound:
            exprsFound = False
            for e in self.ignoreExprs:
                try:
                    while 1:
                        loc,dummy = e._parse( instring, loc )
                        exprsFound = True
                except ParseException:
                    pass
        return loc

    def preParse( self, instring, loc ):
        if self.ignoreExprs:
            loc = self._skipIgnorables( instring, loc )

        if self.skipWhitespace:
            wt = self.whiteChars
            instrlen = len(instring)
            while loc < instrlen and instring[loc] in wt:
                loc += 1

        return loc

    def parseImpl( self, instring, loc, doActions=True ):
        return loc, []

    def postParse( self, instring, loc, tokenlist ):
        return tokenlist

    #~ @profile
    def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
        debugging = ( self.debug ) #and doActions )

        if debugging or self.failAction:
            #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
            if (self.debugActions[0] ):
                self.debugActions[0]( instring, loc, self )
            if callPreParse and self.callPreparse:
                preloc = self.preParse( instring, loc )
            else:
                preloc = loc
            tokensStart = preloc
            try:
                try:
                    loc,tokens = self.parseImpl( instring, preloc, doActions )
                except IndexError:
                    raise ParseException( instring, len(instring), self.errmsg, self )
            except ParseBaseException as err:
                #~ print ("Exception raised:", err)
                if self.debugActions[2]:
                    self.debugActions[2]( instring, tokensStart, self, err )
                if self.failAction:
                    self.failAction( instring, tokensStart, self, err )
                raise
        else:
            if callPreParse and self.callPreparse:
                preloc = self.preParse( instring, loc )
            else:
                preloc = loc
            tokensStart = preloc
            if self.mayIndexError or loc >= len(instring):
                try:
                    loc,tokens = self.parseImpl( instring, preloc, doActions )
                except IndexError:
                    raise ParseException( instring, len(instring), self.errmsg, self )
            else:
                loc,tokens = self.parseImpl( instring, preloc, doActions )

        tokens = self.postParse( instring, loc, tokens )

        retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
        if self.parseAction and (doActions or self.callDuringTry):
            if debugging:
                try:
                    for fn in self.parseAction:
                        tokens = fn( instring, tokensStart, retTokens )
                        if tokens is not None:
                            retTokens = ParseResults( tokens,
                                                      self.resultsName,
                                                      asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
                                                      modal=self.modalResults )
                except ParseBaseException as err:
                    #~ print "Exception raised in user parse action:", err
                    if (self.debugActions[2] ):
                        self.debugActions[2]( instring, tokensStart, self, err )
                    raise
            else:
                for fn in self.parseAction:
                    tokens = fn( instring, tokensStart, retTokens )
                    if tokens is not None:
                        retTokens = ParseResults( tokens,
                                                  self.resultsName,
                                                  asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
                                                  modal=self.modalResults )

        if debugging:
            #~ print ("Matched",self,"->",retTokens.asList())
            if (self.debugActions[1] ):
                self.debugActions[1]( instring, tokensStart, loc, self, retTokens )

        return loc, retTokens

    def tryParse( self, instring, loc ):
        try:
            return self._parse( instring, loc, doActions=False )[0]
        except ParseFatalException:
            raise ParseException( instring, loc, self.errmsg, self)
    
    def canParseNext(self, instring, loc):
        try:
            self.tryParse(instring, loc)
        except (ParseException, IndexError):
            return False
        else:
            return True

    class _UnboundedCache(object):
        def __init__(self):
            cache = {}
            self.not_in_cache = not_in_cache = object()

            def get(self, key):
                return cache.get(key, not_in_cache)

            def set(self, key, value):
                cache[key] = value

            def clear(self):
                cache.clear()

            self.get = types.MethodType(get, self)
            self.set = types.MethodType(set, self)
            self.clear = types.MethodType(clear, self)

    if _OrderedDict is not None:
        class _FifoCache(object):
            def __init__(self, size):
                self.not_in_cache = not_in_cache = object()

                cache = _OrderedDict()

                def get(self, key):
                    return cache.get(key, not_in_cache)

                def set(self, key, value):
                    cache[key] = value
                    if len(cache) > size:
                        cache.popitem(False)

                def clear(self):
                    cache.clear()

                self.get = types.MethodType(get, self)
                self.set = types.MethodType(set, self)
                self.clear = types.MethodType(clear, self)

    else:
        class _FifoCache(object):
            def __init__(self, size):
                self.not_in_cache = not_in_cache = object()

                cache = {}
                key_fifo = collections.deque([], size)

                def get(self, key):
                    return cache.get(key, not_in_cache)

                def set(self, key, value):
                    cache[key] = value
                    if len(cache) > size:
                        cache.pop(key_fifo.popleft(), None)
                    key_fifo.append(key)

                def clear(self):
                    cache.clear()
                    key_fifo.clear()

                self.get = types.MethodType(get, self)
                self.set = types.MethodType(set, self)
                self.clear = types.MethodType(clear, self)

    # argument cache for optimizing repeated calls when backtracking through recursive expressions
    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
    packrat_cache_lock = RLock()
    packrat_cache_stats = [0, 0]

    # this method gets repeatedly called during backtracking with the same arguments -
    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
    def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
        HIT, MISS = 0, 1
        lookup = (self, instring, loc, callPreParse, doActions)
        with ParserElement.packrat_cache_lock:
            cache = ParserElement.packrat_cache
            value = cache.get(lookup)
            if value is cache.not_in_cache:
                ParserElement.packrat_cache_stats[MISS] += 1
                try:
                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
                except ParseBaseException as pe:
                    # cache a copy of the exception, without the traceback
                    cache.set(lookup, pe.__class__(*pe.args))
                    raise
                else:
                    cache.set(lookup, (value[0], value[1].copy()))
                    return value
            else:
                ParserElement.packrat_cache_stats[HIT] += 1
                if isinstance(value, Exception):
                    raise value
                return (value[0], value[1].copy())

    _parse = _parseNoCache

    @staticmethod
    def resetCache():
        ParserElement.packrat_cache.clear()
        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)

    _packratEnabled = False
    @staticmethod
    def enablePackrat(cache_size_limit=128):
        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
           Repeated parse attempts at the same string location (which happens
           often in many complex grammars) can immediately return a cached value,
           instead of re-executing parsing/validating code.  Memoizing is done of
           both valid results and parsing exceptions.
           
           Parameters:
            - cache_size_limit - (default=C{128}) - if an integer value is provided
              will limit the size of the packrat cache; if None is passed, then
              the cache size will be unbounded; if 0 is passed, the cache will
              be effectively disabled.
            
           This speedup may break existing programs that use parse actions that
           have side-effects.  For this reason, packrat parsing is disabled when
           you first import pyparsing.  To activate the packrat feature, your
           program must call the class method C{ParserElement.enablePackrat()}.  If
           your program uses C{psyco} to "compile as you go", you must call
           C{enablePackrat} before calling C{psyco.full()}.  If you do not do this,
           Python will crash.  For best results, call C{enablePackrat()} immediately
           after importing pyparsing.
           
           Example::
               import pyparsing
               pyparsing.ParserElement.enablePackrat()
        """
        if not ParserElement._packratEnabled:
            ParserElement._packratEnabled = True
            if cache_size_limit is None:
                ParserElement.packrat_cache = ParserElement._UnboundedCache()
            else:
                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
            ParserElement._parse = ParserElement._parseCache

    def parseString( self, instring, parseAll=False ):
        """
        Execute the parse expression with the given string.
        This is the main interface to the client code, once the complete
        expression has been built.

        If you want the grammar to require that the entire input string be
        successfully parsed, then set C{parseAll} to True (equivalent to ending
        the grammar with C{L{StringEnd()}}).

        Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
        in order to report proper column numbers in parse actions.
        If the input string contains tabs and
        the grammar uses parse actions that use the C{loc} argument to index into the
        string being parsed, you can ensure you have a consistent view of the input
        string by:
         - calling C{parseWithTabs} on your grammar before calling C{parseString}
           (see L{I{parseWithTabs}<parseWithTabs>})
         - define your parse action using the full C{(s,loc,toks)} signature, and
           reference the input string using the parse action's C{s} argument
         - explictly expand the tabs in your input string before calling
           C{parseString}
        
        Example::
            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
        """
        ParserElement.resetCache()
        if not self.streamlined:
            self.streamline()
            #~ self.saveAsList = True
        for e in self.ignoreExprs:
            e.streamline()
        if not self.keepTabs:
            instring = instring.expandtabs()
        try:
            loc, tokens = self._parse( instring, 0 )
            if parseAll:
                loc = self.preParse( instring, loc )
                se = Empty() + StringEnd()
                se._parse( instring, loc )
        except ParseBaseException as exc:
            if ParserElement.verbose_stacktrace:
                raise
            else:
                # catch and re-raise exception from here, clears out pyparsing internal stack trace
                raise exc
        else:
            return tokens

    def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
        """
        Scan the input string for expression matches.  Each match will return the
        matching tokens, start location, and end location.  May be called with optional
        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
        C{overlap} is specified, then overlapping matches will be reported.

        Note that the start and end locations are reported relative to the string
        being parsed.  See L{I{parseString}<parseString>} for more information on parsing
        strings with embedded tabs.

        Example::
            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
            print(source)
            for tokens,start,end in Word(alphas).scanString(source):
                print(' '*start + '^'*(end-start))
                print(' '*start + tokens[0])
        
        prints::
        
            sldjf123lsdjjkf345sldkjf879lkjsfd987
            ^^^^^
            sldjf
                    ^^^^^^^
                    lsdjjkf
                              ^^^^^^
                              sldkjf
                                       ^^^^^^
                                       lkjsfd
        """
        if not self.streamlined:
            self.streamline()
        for e in self.ignoreExprs:
            e.streamline()

        if not self.keepTabs:
            instring = _ustr(instring).expandtabs()
        instrlen = len(instring)
        loc = 0
        preparseFn = self.preParse
        parseFn = self._parse
        ParserElement.resetCache()
        matches = 0
        try:
            while loc <= instrlen and matches < maxMatches:
                try:
                    preloc = preparseFn( instring, loc )
                    nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
                except ParseException:
                    loc = preloc+1
                else:
                    if nextLoc > loc:
                        matches += 1
                        yield tokens, preloc, nextLoc
                        if overlap:
                            nextloc = preparseFn( instring, loc )
                            if nextloc > loc:
                                loc = nextLoc
                            else:
                                loc += 1
                        else:
                            loc = nextLoc
                    else:
                        loc = preloc+1
        except ParseBaseException as exc:
            if ParserElement.verbose_stacktrace:
                raise
            else:
                # catch and re-raise exception from here, clears out pyparsing internal stack trace
                raise exc

    def transformString( self, instring ):
        """
        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
        be returned from a parse action.  To use C{transformString}, define a grammar and
        attach a parse action to it that modifies the returned token list.
        Invoking C{transformString()} on a target string will then scan for matches,
        and replace the matched text patterns according to the logic in the parse
        action.  C{transformString()} returns the resulting transformed string.
        
        Example::
            wd = Word(alphas)
            wd.setParseAction(lambda toks: toks[0].title())
            
            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
        Prints::
            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
        """
        out = []
        lastE = 0
        # force preservation of <TAB>s, to minimize unwanted transformation of string, and to
        # keep string locs straight between transformString and scanString
        self.keepTabs = True
        try:
            for t,s,e in self.scanString( instring ):
                out.append( instring[lastE:s] )
                if t:
                    if isinstance(t,ParseResults):
                        out += t.asList()
                    elif isinstance(t,list):
                        out += t
                    else:
                        out.append(t)
                lastE = e
            out.append(instring[lastE:])
            out = [o for o in out if o]
            return "".join(map(_ustr,_flatten(out)))
        except ParseBaseException as exc:
            if ParserElement.verbose_stacktrace:
                raise
            else:
                # catch and re-raise exception from here, clears out pyparsing internal stack trace
                raise exc

    def searchString( self, instring, maxMatches=_MAX_INT ):
        """
        Another extension to C{L{scanString}}, simplifying the access to the tokens found
        to match the given parse expression.  May be called with optional
        C{maxMatches} argument, to clip searching after 'n' matches are found.
        
        Example::
            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
            cap_word = Word(alphas.upper(), alphas.lower())
            
            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
        prints::
            ['More', 'Iron', 'Lead', 'Gold', 'I']
        """
        try:
            return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
        except ParseBaseException as exc:
            if ParserElement.verbose_stacktrace:
                raise
            else:
                # catch and re-raise exception from here, clears out pyparsing internal stack trace
                raise exc

    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
        """
        Generator method to split a string using the given expression as a separator.
        May be called with optional C{maxsplit} argument, to limit the number of splits;
        and the optional C{includeSeparators} argument (default=C{False}), if the separating
        matching text should be included in the split results.
        
        Example::        
            punc = oneOf(list(".,;:/-!?"))
            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
        prints::
            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
        """
        splits = 0
        last = 0
        for t,s,e in self.scanString(instring, maxMatches=maxsplit):
            yield instring[last:s]
            if includeSeparators:
                yield t[0]
            last = e
        yield instring[last:]

    def __add__(self, other ):
        """
        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
        converts them to L{Literal}s by default.
        
        Example::
            greet = Word(alphas) + "," + Word(alphas) + "!"
            hello = "Hello, World!"
            print (hello, "->", greet.parseString(hello))
        Prints::
            Hello, World! -> ['Hello', ',', 'World', '!']
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return And( [ self, other ] )

    def __radd__(self, other ):
        """
        Implementation of + operator when left operand is not a C{L{ParserElement}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return other + self

    def __sub__(self, other):
        """
        Implementation of - operator, returns C{L{And}} with error stop
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return And( [ self, And._ErrorStop(), other ] )

    def __rsub__(self, other ):
        """
        Implementation of - operator when left operand is not a C{L{ParserElement}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return other - self

    def __mul__(self,other):
        """
        Implementation of * operator, allows use of C{expr * 3} in place of
        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
        may also include C{None} as in:
         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
              to C{expr*n + L{ZeroOrMore}(expr)}
              (read as "at least n instances of C{expr}")
         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
              (read as "0 to n instances of C{expr}")
         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}

        Note that C{expr*(None,n)} does not raise an exception if
        more than n exprs exist in the input stream; that is,
        C{expr*(None,n)} does not enforce a maximum number of expr
        occurrences.  If this behavior is desired, then write
        C{expr*(None,n) + ~expr}
        """
        if isinstance(other,int):
            minElements, optElements = other,0
        elif isinstance(other,tuple):
            other = (other + (None, None))[:2]
            if other[0] is None:
                other = (0, other[1])
            if isinstance(other[0],int) and other[1] is None:
                if other[0] == 0:
                    return ZeroOrMore(self)
                if other[0] == 1:
                    return OneOrMore(self)
                else:
                    return self*other[0] + ZeroOrMore(self)
            elif isinstance(other[0],int) and isinstance(other[1],int):
                minElements, optElements = other
                optElements -= minElements
            else:
                raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
        else:
            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))

        if minElements < 0:
            raise ValueError("cannot multiply ParserElement by negative value")
        if optElements < 0:
            raise ValueError("second tuple value must be greater or equal to first tuple value")
        if minElements == optElements == 0:
            raise ValueError("cannot multiply ParserElement by 0 or (0,0)")

        if (optElements):
            def makeOptionalList(n):
                if n>1:
                    return Optional(self + makeOptionalList(n-1))
                else:
                    return Optional(self)
            if minElements:
                if minElements == 1:
                    ret = self + makeOptionalList(optElements)
                else:
                    ret = And([self]*minElements) + makeOptionalList(optElements)
            else:
                ret = makeOptionalList(optElements)
        else:
            if minElements == 1:
                ret = self
            else:
                ret = And([self]*minElements)
        return ret

    def __rmul__(self, other):
        return self.__mul__(other)

    def __or__(self, other ):
        """
        Implementation of | operator - returns C{L{MatchFirst}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return MatchFirst( [ self, other ] )

    def __ror__(self, other ):
        """
        Implementation of | operator when left operand is not a C{L{ParserElement}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return other | self

    def __xor__(self, other ):
        """
        Implementation of ^ operator - returns C{L{Or}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return Or( [ self, other ] )

    def __rxor__(self, other ):
        """
        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return other ^ self

    def __and__(self, other ):
        """
        Implementation of & operator - returns C{L{Each}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return Each( [ self, other ] )

    def __rand__(self, other ):
        """
        Implementation of & operator when left operand is not a C{L{ParserElement}}
        """
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        if not isinstance( other, ParserElement ):
            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
                    SyntaxWarning, stacklevel=2)
            return None
        return other & self

    def __invert__( self ):
        """
        Implementation of ~ operator - returns C{L{NotAny}}
        """
        return NotAny( self )

    def __call__(self, name=None):
        """
        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
        
        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
        passed as C{True}.
           
        If C{name} is omitted, same as calling C{L{copy}}.

        Example::
            # these are equivalent
            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
        """
        if name is not None:
            return self.setResultsName(name)
        else:
            return self.copy()

    def suppress( self ):
        """
        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
        cluttering up returned output.
        """
        return Suppress( self )

    def leaveWhitespace( self ):
        """
        Disables the skipping of whitespace before matching the characters in the
        C{ParserElement}'s defined pattern.  This is normally only used internally by
        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
        """
        self.skipWhitespace = False
        return self

    def setWhitespaceChars( self, chars ):
        """
        Overrides the default whitespace chars
        """
        self.skipWhitespace = True
        self.whiteChars = chars
        self.copyDefaultWhiteChars = False
        return self

    def parseWithTabs( self ):
        """
        Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
        Must be called before C{parseString} when the input grammar contains elements that
        match C{<TAB>} characters.
        """
        self.keepTabs = True
        return self

    def ignore( self, other ):
        """
        Define expression to be ignored (e.g., comments) while doing pattern
        matching; may be called repeatedly, to define multiple comment or other
        ignorable patterns.
        
        Example::
            patt = OneOrMore(Word(alphas))
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
            
            patt.ignore(cStyleComment)
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
        """
        if isinstance(other, basestring):
            other = Suppress(other)

        if isinstance( other, Suppress ):
            if other not in self.ignoreExprs:
                self.ignoreExprs.append(other)
        else:
            self.ignoreExprs.append( Suppress( other.copy() ) )
        return self

    def setDebugActions( self, startAction, successAction, exceptionAction ):
        """
        Enable display of debugging messages while doing pattern matching.
        """
        self.debugActions = (startAction or _defaultStartDebugAction,
                             successAction or _defaultSuccessDebugAction,
                             exceptionAction or _defaultExceptionDebugAction)
        self.debug = True
        return self

    def setDebug( self, flag=True ):
        """
        Enable display of debugging messages while doing pattern matching.
        Set C{flag} to True to enable, False to disable.

        Example::
            wd = Word(alphas).setName("alphaword")
            integer = Word(nums).setName("numword")
            term = wd | integer
            
            # turn on debugging for wd
            wd.setDebug()

            OneOrMore(term).parseString("abc 123 xyz 890")
        
        prints::
            Match alphaword at loc 0(1,1)
            Matched alphaword -> ['abc']
            Match alphaword at loc 3(1,4)
            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
            Match alphaword at loc 7(1,8)
            Matched alphaword -> ['xyz']
            Match alphaword at loc 11(1,12)
            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
            Match alphaword at loc 15(1,16)
            Exception raised:Expected alphaword (at char 15), (line:1, col:16)

        The output shown is that produced by the default debug actions - custom debug actions can be
        specified using L{setDebugActions}. Prior to attempting
        to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}
        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
        which makes debugging and exception messages easier to understand - for instance, the default
        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
        """
        if flag:
            self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
        else:
            self.debug = False
        return self

    def __str__( self ):
        return self.name

    def __repr__( self ):
        return _ustr(self)

    def streamline( self ):
        self.streamlined = True
        self.strRepr = None
        return self

    def checkRecursion( self, parseElementList ):
        pass

    def validate( self, validateTrace=[] ):
        """
        Check defined expressions for valid structure, check for infinite recursive definitions.
        """
        self.checkRecursion( [] )

    def parseFile( self, file_or_filename, parseAll=False ):
        """
        Execute the parse expression on the given file or filename.
        If a filename is specified (instead of a file object),
        the entire file is opened, read, and closed before parsing.
        """
        try:
            file_contents = file_or_filename.read()
        except AttributeError:
            with open(file_or_filename, "r") as f:
                file_contents = f.read()
        try:
            return self.parseString(file_contents, parseAll)
        except ParseBaseException as exc:
            if ParserElement.verbose_stacktrace:
                raise
            else:
                # catch and re-raise exception from here, clears out pyparsing internal stack trace
                raise exc

    def __eq__(self,other):
        if isinstance(other, ParserElement):
            return self is other or vars(self) == vars(other)
        elif isinstance(other, basestring):
            return self.matches(other)
        else:
            return super(ParserElement,self)==other

    def __ne__(self,other):
        return not (self == other)

    def __hash__(self):
        return hash(id(self))

    def __req__(self,other):
        return self == other

    def __rne__(self,other):
        return not (self == other)

    def matches(self, testString, parseAll=True):
        """
        Method for quick testing of a parser against a test string. Good for simple 
        inline microtests of sub expressions while building up larger parser.
           
        Parameters:
         - testString - to test against this expression for a match
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
            
        Example::
            expr = Word(nums)
            assert expr.matches("100")
        """
        try:
            self.parseString(_ustr(testString), parseAll=parseAll)
            return True
        except ParseBaseException:
            return False
                
    def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
        """
        Execute the parse expression on a series of test strings, showing each
        test, the parsed results or where the parse failed. Quick and easy way to
        run a parse expression against a list of sample strings.
           
        Parameters:
         - tests - a list of separate test strings, or a multiline string of test strings
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
              string; pass None to disable comment filtering
         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
              if False, only dump nested list
         - printResults - (default=C{True}) prints test output to stdout
         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing

        Returns: a (success, results) tuple, where success indicates that all tests succeeded
        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
        test's output
        
        Example::
            number_expr = pyparsing_common.number.copy()

            result = number_expr.runTests('''
                # unsigned integer
                100
                # negative integer
                -100
                # float with scientific notation
                6.02e23
                # integer with scientific notation
                1e-12
                ''')
            print("Success" if result[0] else "Failed!")

            result = number_expr.runTests('''
                # stray character
                100Z
                # missing leading digit before '.'
                -.100
                # too many '.'
                3.14.159
                ''', failureTests=True)
            print("Success" if result[0] else "Failed!")
        prints::
            # unsigned integer
            100
            [100]

            # negative integer
            -100
            [-100]

            # float with scientific notation
            6.02e23
            [6.02e+23]

            # integer with scientific notation
            1e-12
            [1e-12]

            Success
            
            # stray character
            100Z
               ^
            FAIL: Expected end of text (at char 3), (line:1, col:4)

            # missing leading digit before '.'
            -.100
            ^
            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)

            # too many '.'
            3.14.159
                ^
            FAIL: Expected end of text (at char 4), (line:1, col:5)

            Success

        Each test string must be on a single line. If you want to test a string that spans multiple
        lines, create a test like this::

            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
        
        (Note that this is a raw string literal, you must include the leading 'r'.)
        """
        if isinstance(tests, basestring):
            tests = list(map(str.strip, tests.rstrip().splitlines()))
        if isinstance(comment, basestring):
            comment = Literal(comment)
        allResults = []
        comments = []
        success = True
        for t in tests:
            if comment is not None and comment.matches(t, False) or comments and not t:
                comments.append(t)
                continue
            if not t:
                continue
            out = ['\n'.join(comments), t]
            comments = []
            try:
                t = t.replace(r'\n','\n')
                result = self.parseString(t, parseAll=parseAll)
                out.append(result.dump(full=fullDump))
                success = success and not failureTests
            except ParseBaseException as pe:
                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
                if '\n' in t:
                    out.append(line(pe.loc, t))
                    out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
                else:
                    out.append(' '*pe.loc + '^' + fatal)
                out.append("FAIL: " + str(pe))
                success = success and failureTests
                result = pe
            except Exception as exc:
                out.append("FAIL-EXCEPTION: " + str(exc))
                success = success and failureTests
                result = exc

            if printResults:
                if fullDump:
                    out.append('')
                print('\n'.join(out))

            allResults.append((t, result))
        
        return success, allResults

        
class Token(ParserElement):
    """
    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
    """
    def __init__( self ):
        super(Token,self).__init__( savelist=False )


class Empty(Token):
    """
    An empty token, will always match.
    """
    def __init__( self ):
        super(Empty,self).__init__()
        self.name = "Empty"
        self.mayReturnEmpty = True
        self.mayIndexError = False


class NoMatch(Token):
    """
    A token that will never match.
    """
    def __init__( self ):
        super(NoMatch,self).__init__()
        self.name = "NoMatch"
        self.mayReturnEmpty = True
        self.mayIndexError = False
        self.errmsg = "Unmatchable token"

    def parseImpl( self, instring, loc, doActions=True ):
        raise ParseException(instring, loc, self.errmsg, self)


class Literal(Token):
    """
    Token to exactly match a specified string.
    
    Example::
        Literal('blah').parseString('blah')  # -> ['blah']
        Literal('blah').parseString('blahfooblah')  # -> ['blah']
        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
    
    For case-insensitive matching, use L{CaselessLiteral}.
    
    For keyword matching (force word break before and after the matched string),
    use L{Keyword} or L{CaselessKeyword}.
    """
    def __init__( self, matchString ):
        super(Literal,self).__init__()
        self.match = matchString
        self.matchLen = len(matchString)
        try:
            self.firstMatchChar = matchString[0]
        except IndexError:
            warnings.warn("null string passed to Literal; use Empty() instead",
                            SyntaxWarning, stacklevel=2)
            self.__class__ = Empty
        self.name = '"%s"' % _ustr(self.match)
        self.errmsg = "Expected " + self.name
        self.mayReturnEmpty = False
        self.mayIndexError = False

    # Performance tuning: this routine gets called a *lot*
    # if this is a single character match string  and the first character matches,
    # short-circuit as quickly as possible, and avoid calling startswith
    #~ @profile
    def parseImpl( self, instring, loc, doActions=True ):
        if (instring[loc] == self.firstMatchChar and
            (self.matchLen==1 or instring.startswith(self.match,loc)) ):
            return loc+self.matchLen, self.match
        raise ParseException(instring, loc, self.errmsg, self)
_L = Literal
ParserElement._literalStringClass = Literal

class Keyword(Token):
    """
    Token to exactly match a specified string as a keyword, that is, it must be
    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
    Accepts two optional constructor arguments in addition to the keyword string:
     - C{identChars} is a string of characters that would be valid identifier characters,
          defaulting to all alphanumerics + "_" and "$"
     - C{caseless} allows case-insensitive matching, default is C{False}.
       
    Example::
        Keyword("start").parseString("start")  # -> ['start']
        Keyword("start").parseString("starting")  # -> Exception

    For case-insensitive matching, use L{CaselessKeyword}.
    """
    DEFAULT_KEYWORD_CHARS = alphanums+"_$"

    def __init__( self, matchString, identChars=None, caseless=False ):
        super(Keyword,self).__init__()
        if identChars is None:
            identChars = Keyword.DEFAULT_KEYWORD_CHARS
        self.match = matchString
        self.matchLen = len(matchString)
        try:
            self.firstMatchChar = matchString[0]
        except IndexError:
            warnings.warn("null string passed to Keyword; use Empty() instead",
                            SyntaxWarning, stacklevel=2)
        self.name = '"%s"' % self.match
        self.errmsg = "Expected " + self.name
        self.mayReturnEmpty = False
        self.mayIndexError = False
        self.caseless = caseless
        if caseless:
            self.caselessmatch = matchString.upper()
            identChars = identChars.upper()
        self.identChars = set(identChars)

    def parseImpl( self, instring, loc, doActions=True ):
        if self.caseless:
            if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
                 (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
                 (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
                return loc+self.matchLen, self.match
        else:
            if (instring[loc] == self.firstMatchChar and
                (self.matchLen==1 or instring.startswith(self.match,loc)) and
                (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
                (loc == 0 or instring[loc-1] not in self.identChars) ):
                return loc+self.matchLen, self.match
        raise ParseException(instring, loc, self.errmsg, self)

    def copy(self):
        c = super(Keyword,self).copy()
        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
        return c

    @staticmethod
    def setDefaultKeywordChars( chars ):
        """Overrides the default Keyword chars
        """
        Keyword.DEFAULT_KEYWORD_CHARS = chars

class CaselessLiteral(Literal):
    """
    Token to match a specified string, ignoring case of letters.
    Note: the matched results will always be in the case of the given
    match string, NOT the case of the input text.

    Example::
        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
        
    (Contrast with example for L{CaselessKeyword}.)
    """
    def __init__( self, matchString ):
        super(CaselessLiteral,self).__init__( matchString.upper() )
        # Preserve the defining literal.
        self.returnString = matchString
        self.name = "'%s'" % self.returnString
        self.errmsg = "Expected " + self.name

    def parseImpl( self, instring, loc, doActions=True ):
        if instring[ loc:loc+self.matchLen ].upper() == self.match:
            return loc+self.matchLen, self.returnString
        raise ParseException(instring, loc, self.errmsg, self)

class CaselessKeyword(Keyword):
    """
    Caseless version of L{Keyword}.

    Example::
        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
        
    (Contrast with example for L{CaselessLiteral}.)
    """
    def __init__( self, matchString, identChars=None ):
        super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )

    def parseImpl( self, instring, loc, doActions=True ):
        if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
             (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
            return loc+self.matchLen, self.match
        raise ParseException(instring, loc, self.errmsg, self)

class CloseMatch(Token):
    """
    A variation on L{Literal} which matches "close" matches, that is, 
    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
     - C{match_string} - string to be matched
     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
    
    The results from a successful parse will contain the matched text from the input string and the following named results:
     - C{mismatches} - a list of the positions within the match_string where mismatches were found
     - C{original} - the original match_string used to compare against the input string
    
    If C{mismatches} is an empty list, then the match was an exact match.
    
    Example::
        patt = CloseMatch("ATCATCGAATGGA")
        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)

        # exact match
        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})

        # close match allowing up to 2 mismatches
        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
    """
    def __init__(self, match_string, maxMismatches=1):
        super(CloseMatch,self).__init__()
        self.name = match_string
        self.match_string = match_string
        self.maxMismatches = maxMismatches
        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
        self.mayIndexError = False
        self.mayReturnEmpty = False

    def parseImpl( self, instring, loc, doActions=True ):
        start = loc
        instrlen = len(instring)
        maxloc = start + len(self.match_string)

        if maxloc <= instrlen:
            match_string = self.match_string
            match_stringloc = 0
            mismatches = []
            maxMismatches = self.maxMismatches

            for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
                src,mat = s_m
                if src != mat:
                    mismatches.append(match_stringloc)
                    if len(mismatches) > maxMismatches:
                        break
            else:
                loc = match_stringloc + 1
                results = ParseResults([instring[start:loc]])
                results['original'] = self.match_string
                results['mismatches'] = mismatches
                return loc, results

        raise ParseException(instring, loc, self.errmsg, self)


class Word(Token):
    """
    Token for matching words composed of allowed character sets.
    Defined with string containing all allowed initial characters,
    an optional string containing allowed body characters (if omitted,
    defaults to the initial character set), and an optional minimum,
    maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction. An optional
    C{excludeChars} parameter can list characters that might be found in 
    the input C{bodyChars} string; useful to define a word of all printables
    except for one or two characters, for instance.
    
    L{srange} is useful for defining custom character set strings for defining 
    C{Word} expressions, using range notation from regular expression character sets.
    
    A common mistake is to use C{Word} to match a specific literal string, as in 
    C{Word("Address")}. Remember that C{Word} uses the string argument to define
    I{sets} of matchable characters. This expression would match "Add", "AAA",
    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
    To match an exact literal string, use L{Literal} or L{Keyword}.

    pyparsing includes helper strings for building Words:
     - L{alphas}
     - L{nums}
     - L{alphanums}
     - L{hexnums}
     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
     - L{printables} (any non-whitespace character)

    Example::
        # a word composed of digits
        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
        
        # a word with a leading capital, and zero or more lowercase
        capital_word = Word(alphas.upper(), alphas.lower())

        # hostnames are alphanumeric, with leading alpha, and '-'
        hostname = Word(alphas, alphanums+'-')
        
        # roman numeral (not a strict parser, accepts invalid mix of characters)
        roman = Word("IVXLCDM")
        
        # any string of non-whitespace characters, except for ','
        csv_value = Word(printables, excludeChars=",")
    """
    def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
        super(Word,self).__init__()
        if excludeChars:
            initChars = ''.join(c for c in initChars if c not in excludeChars)
            if bodyChars:
                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
        self.initCharsOrig = initChars
        self.initChars = set(initChars)
        if bodyChars :
            self.bodyCharsOrig = bodyChars
            self.bodyChars = set(bodyChars)
        else:
            self.bodyCharsOrig = initChars
            self.bodyChars = set(initChars)

        self.maxSpecified = max > 0

        if min < 1:
            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")

        self.minLen = min

        if max > 0:
            self.maxLen = max
        else:
            self.maxLen = _MAX_INT

        if exact > 0:
            self.maxLen = exact
            self.minLen = exact

        self.name = _ustr(self)
        self.errmsg = "Expected " + self.name
        self.mayIndexError = False
        self.asKeyword = asKeyword

        if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
            if self.bodyCharsOrig == self.initCharsOrig:
                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
            elif len(self.initCharsOrig) == 1:
                self.reString = "%s[%s]*" % \
                                      (re.escape(self.initCharsOrig),
                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
            else:
                self.reString = "[%s][%s]*" % \
                                      (_escapeRegexRangeChars(self.initCharsOrig),
                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
            if self.asKeyword:
                self.reString = r"\b"+self.reString+r"\b"
            try:
                self.re = re.compile( self.reString )
            except Exception:
                self.re = None

    def parseImpl( self, instring, loc, doActions=True ):
        if self.re:
            result = self.re.match(instring,loc)
            if not result:
                raise ParseException(instring, loc, self.errmsg, self)

            loc = result.end()
            return loc, result.group()

        if not(instring[ loc ] in self.initChars):
            raise ParseException(instring, loc, self.errmsg, self)

        start = loc
        loc += 1
        instrlen = len(instring)
        bodychars = self.bodyChars
        maxloc = start + self.maxLen
        maxloc = min( maxloc, instrlen )
        while loc < maxloc and instring[loc] in bodychars:
            loc += 1

        throwException = False
        if loc - start < self.minLen:
            throwException = True
        if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
            throwException = True
        if self.asKeyword:
            if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
                throwException = True

        if throwException:
            raise ParseException(instring, loc, self.errmsg, self)

        return loc, instring[start:loc]

    def __str__( self ):
        try:
            return super(Word,self).__str__()
        except Exception:
            pass


        if self.strRepr is None:

            def charsAsStr(s):
                if len(s)>4:
                    return s[:4]+"..."
                else:
                    return s

            if ( self.initCharsOrig != self.bodyCharsOrig ):
                self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
            else:
                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)

        return self.strRepr


class Regex(Token):
    """
    Token for matching strings that match a given regular expression.
    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
    If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as 
    named parse results.

    Example::
        realnum = Regex(r"[+-]?\d+\.\d*")
        date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
    """
    compiledREtype = type(re.compile("[A-Z]"))
    def __init__( self, pattern, flags=0):
        """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
        super(Regex,self).__init__()

        if isinstance(pattern, basestring):
            if not pattern:
                warnings.warn("null string passed to Regex; use Empty() instead",
                        SyntaxWarning, stacklevel=2)

            self.pattern = pattern
            self.flags = flags

            try:
                self.re = re.compile(self.pattern, self.flags)
                self.reString = self.pattern
            except sre_constants.error:
                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
                    SyntaxWarning, stacklevel=2)
                raise

        elif isinstance(pattern, Regex.compiledREtype):
            self.re = pattern
            self.pattern = \
            self.reString = str(pattern)
            self.flags = flags
            
        else:
            raise ValueError("Regex may only be constructed with a string or a compiled RE object")

        self.name = _ustr(self)
        self.errmsg = "Expected " + self.name
        self.mayIndexError = False
        self.mayReturnEmpty = True

    def parseImpl( self, instring, loc, doActions=True ):
        result = self.re.match(instring,loc)
        if not result:
            raise ParseException(instring, loc, self.errmsg, self)

        loc = result.end()
        d = result.groupdict()
        ret = ParseResults(result.group())
        if d:
            for k in d:
                ret[k] = d[k]
        return loc,ret

    def __str__( self ):
        try:
            return super(Regex,self).__str__()
        except Exception:
            pass

        if self.strRepr is None:
            self.strRepr = "Re:(%s)" % repr(self.pattern)

        return self.strRepr


class QuotedString(Token):
    r"""
    Token for matching strings that are delimited by quoting characters.
    
    Defined with the following parameters:
        - quoteChar - string of one or more characters defining the quote delimiting string
        - escChar - character to escape quotes, typically backslash (default=C{None})
        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})

    Example::
        qs = QuotedString('"')
        print(qs.searchString('lsjdf "This is the quote" sldjf'))
        complex_qs = QuotedString('{{', endQuoteChar='}}')
        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
        sql_qs = QuotedString('"', escQuote='""')
        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
    prints::
        [['This is the quote']]
        [['This is the "quote"']]
        [['This is the quote with "embedded" quotes']]
    """
    def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
        super(QuotedString,self).__init__()

        # remove white space from quote chars - wont work anyway
        quoteChar = quoteChar.strip()
        if not quoteChar:
            warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
            raise SyntaxError()

        if endQuoteChar is None:
            endQuoteChar = quoteChar
        else:
            endQuoteChar = endQuoteChar.strip()
            if not endQuoteChar:
                warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
                raise SyntaxError()

        self.quoteChar = quoteChar
        self.quoteCharLen = len(quoteChar)
        self.firstQuoteChar = quoteChar[0]
        self.endQuoteChar = endQuoteChar
        self.endQuoteCharLen = len(endQuoteChar)
        self.escChar = escChar
        self.escQuote = escQuote
        self.unquoteResults = unquoteResults
        self.convertWhitespaceEscapes = convertWhitespaceEscapes

        if multiline:
            self.flags = re.MULTILINE | re.DOTALL
            self.pattern = r'%s(?:[^%s%s]' % \
                ( re.escape(self.quoteChar),
                  _escapeRegexRangeChars(self.endQuoteChar[0]),
                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
        else:
            self.flags = 0
            self.pattern = r'%s(?:[^%s\n\r%s]' % \
                ( re.escape(self.quoteChar),
                  _escapeRegexRangeChars(self.endQuoteChar[0]),
                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
        if len(self.endQuoteChar) > 1:
            self.pattern += (
                '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
                                               _escapeRegexRangeChars(self.endQuoteChar[i]))
                                    for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
                )
        if escQuote:
            self.pattern += (r'|(?:%s)' % re.escape(escQuote))
        if escChar:
            self.pattern += (r'|(?:%s.)' % re.escape(escChar))
            self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
        self.pattern += (r')*%s' % re.escape(self.endQuoteChar))

        try:
            self.re = re.compile(self.pattern, self.flags)
            self.reString = self.pattern
        except sre_constants.error:
            warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
                SyntaxWarning, stacklevel=2)
            raise

        self.name = _ustr(self)
        self.errmsg = "Expected " + self.name
        self.mayIndexError = False
        self.mayReturnEmpty = True

    def parseImpl( self, instring, loc, doActions=True ):
        result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
        if not result:
            raise ParseException(instring, loc, self.errmsg, self)

        loc = result.end()
        ret = result.group()

        if self.unquoteResults:

            # strip off quotes
            ret = ret[self.quoteCharLen:-self.endQuoteCharLen]

            if isinstance(ret,basestring):
                # replace escaped whitespace
                if '\\' in ret and self.convertWhitespaceEscapes:
                    ws_map = {
                        r'\t' : '\t',
                        r'\n' : '\n',
                        r'\f' : '\f',
                        r'\r' : '\r',
                    }
                    for wslit,wschar in ws_map.items():
                        ret = ret.replace(wslit, wschar)

                # replace escaped characters
                if self.escChar:
                    ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)

                # replace escaped quotes
                if self.escQuote:
                    ret = ret.replace(self.escQuote, self.endQuoteChar)

        return loc, ret

    def __str__( self ):
        try:
            return super(QuotedString,self).__str__()
        except Exception:
            pass

        if self.strRepr is None:
            self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)

        return self.strRepr


class CharsNotIn(Token):
    """
    Token for matching words composed of characters I{not} in a given set (will
    include whitespace in matched characters if not listed in the provided exclusion set - see example).
    Defined with string containing all disallowed characters, and an optional
    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction.

    Example::
        # define a comma-separated-value as anything that is not a ','
        csv_value = CharsNotIn(',')
        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
    prints::
        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
    """
    def __init__( self, notChars, min=1, max=0, exact=0 ):
        super(CharsNotIn,self).__init__()
        self.skipWhitespace = False
        self.notChars = notChars

        if min < 1:
            raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")

        self.minLen = min

        if max > 0:
            self.maxLen = max
        else:
            self.maxLen = _MAX_INT

        if exact > 0:
            self.maxLen = exact
            self.minLen = exact

        self.name = _ustr(self)
        self.errmsg = "Expected " + self.name
        self.mayReturnEmpty = ( self.minLen == 0 )
        self.mayIndexError = False

    def parseImpl( self, instring, loc, doActions=True ):
        if instring[loc] in self.notChars:
            raise ParseException(instring, loc, self.errmsg, self)

        start = loc
        loc += 1
        notchars = self.notChars
        maxlen = min( start+self.maxLen, len(instring) )
        while loc < maxlen and \
              (instring[loc] not in notchars):
            loc += 1

        if loc - start < self.minLen:
            raise ParseException(instring, loc, self.errmsg, self)

        return loc, instring[start:loc]

    def __str__( self ):
        try:
            return super(CharsNotIn, self).__str__()
        except Exception:
            pass

        if self.strRepr is None:
            if len(self.notChars) > 4:
                self.strRepr = "!W:(%s...)" % self.notChars[:4]
            else:
                self.strRepr = "!W:(%s)" % self.notChars

        return self.strRepr

class White(Token):
    """
    Special matching class for matching whitespace.  Normally, whitespace is ignored
    by pyparsing grammars.  This class is included when some whitespace structures
    are significant.  Define with a string containing the whitespace characters to be
    matched; default is C{" \\t\\r\\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
    as defined for the C{L{Word}} class.
    """
    whiteStrs = {
        " " : "<SPC>",
        "\t": "<TAB>",
        "\n": "<LF>",
        "\r": "<CR>",
        "\f": "<FF>",
        }
    def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
        super(White,self).__init__()
        self.matchWhite = ws
        self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
        #~ self.leaveWhitespace()
        self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
        self.mayReturnEmpty = True
        self.errmsg = "Expected " + self.name

        self.minLen = min

        if max > 0:
            self.maxLen = max
        else:
            self.maxLen = _MAX_INT

        if exact > 0:
            self.maxLen = exact
            self.minLen = exact

    def parseImpl( self, instring, loc, doActions=True ):
        if not(instring[ loc ] in self.matchWhite):
            raise ParseException(instring, loc, self.errmsg, self)
        start = loc
        loc += 1
        maxloc = start + self.maxLen
        maxloc = min( maxloc, len(instring) )
        while loc < maxloc and instring[loc] in self.matchWhite:
            loc += 1

        if loc - start < self.minLen:
            raise ParseException(instring, loc, self.errmsg, self)

        return loc, instring[start:loc]


class _PositionToken(Token):
    def __init__( self ):
        super(_PositionToken,self).__init__()
        self.name=self.__class__.__name__
        self.mayReturnEmpty = True
        self.mayIndexError = False

class GoToColumn(_PositionToken):
    """
    Token to advance to a specific column of input text; useful for tabular report scraping.
    """
    def __init__( self, colno ):
        super(GoToColumn,self).__init__()
        self.col = colno

    def preParse( self, instring, loc ):
        if col(loc,instring) != self.col:
            instrlen = len(instring)
            if self.ignoreExprs:
                loc = self._skipIgnorables( instring, loc )
            while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
                loc += 1
        return loc

    def parseImpl( self, instring, loc, doActions=True ):
        thiscol = col( loc, instring )
        if thiscol > self.col:
            raise ParseException( instring, loc, "Text not in expected column", self )
        newloc = loc + self.col - thiscol
        ret = instring[ loc: newloc ]
        return newloc, ret


class LineStart(_PositionToken):
    """
    Matches if current position is at the beginning of a line within the parse string
    
    Example::
    
        test = '''\
        AAA this line
        AAA and this line
          AAA but not this one
        B AAA and definitely not this one
        '''

        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
            print(t)
    
    Prints::
        ['AAA', ' this line']
        ['AAA', ' and this line']    

    """
    def __init__( self ):
        super(LineStart,self).__init__()
        self.errmsg = "Expected start of line"

    def parseImpl( self, instring, loc, doActions=True ):
        if col(loc, instring) == 1:
            return loc, []
        raise ParseException(instring, loc, self.errmsg, self)

class LineEnd(_PositionToken):
    """
    Matches if current position is at the end of a line within the parse string
    """
    def __init__( self ):
        super(LineEnd,self).__init__()
        self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
        self.errmsg = "Expected end of line"

    def parseImpl( self, instring, loc, doActions=True ):
        if loc<len(instring):
            if instring[loc] == "\n":
                return loc+1, "\n"
            else:
                raise ParseException(instring, loc, self.errmsg, self)
        elif loc == len(instring):
            return loc+1, []
        else:
            raise ParseException(instring, loc, self.errmsg, self)

class StringStart(_PositionToken):
    """
    Matches if current position is at the beginning of the parse string
    """
    def __init__( self ):
        super(StringStart,self).__init__()
        self.errmsg = "Expected start of text"

    def parseImpl( self, instring, loc, doActions=True ):
        if loc != 0:
            # see if entire string up to here is just whitespace and ignoreables
            if loc != self.preParse( instring, 0 ):
                raise ParseException(instring, loc, self.errmsg, self)
        return loc, []

class StringEnd(_PositionToken):
    """
    Matches if current position is at the end of the parse string
    """
    def __init__( self ):
        super(StringEnd,self).__init__()
        self.errmsg = "Expected end of text"

    def parseImpl( self, instring, loc, doActions=True ):
        if loc < len(instring):
            raise ParseException(instring, loc, self.errmsg, self)
        elif loc == len(instring):
            return loc+1, []
        elif loc > len(instring):
            return loc, []
        else:
            raise ParseException(instring, loc, self.errmsg, self)

class WordStart(_PositionToken):
    """
    Matches if the current position is at the beginning of a Word, and
    is not preceded by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
    the string being parsed, or at the beginning of a line.
    """
    def __init__(self, wordChars = printables):
        super(WordStart,self).__init__()
        self.wordChars = set(wordChars)
        self.errmsg = "Not at the start of a word"

    def parseImpl(self, instring, loc, doActions=True ):
        if loc != 0:
            if (instring[loc-1] in self.wordChars or
                instring[loc] not in self.wordChars):
                raise ParseException(instring, loc, self.errmsg, self)
        return loc, []

class WordEnd(_PositionToken):
    """
    Matches if the current position is at the end of a Word, and
    is not followed by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
    the string being parsed, or at the end of a line.
    """
    def __init__(self, wordChars = printables):
        super(WordEnd,self).__init__()
        self.wordChars = set(wordChars)
        self.skipWhitespace = False
        self.errmsg = "Not at the end of a word"

    def parseImpl(self, instring, loc, doActions=True ):
        instrlen = len(instring)
        if instrlen>0 and loc<instrlen:
            if (instring[loc] in self.wordChars or
                instring[loc-1] not in self.wordChars):
                raise ParseException(instring, loc, self.errmsg, self)
        return loc, []


class ParseExpression(ParserElement):
    """
    Abstract subclass of ParserElement, for combining and post-processing parsed tokens.
    """
    def __init__( self, exprs, savelist = False ):
        super(ParseExpression,self).__init__(savelist)
        if isinstance( exprs, _generatorType ):
            exprs = list(exprs)

        if isinstance( exprs, basestring ):
            self.exprs = [ ParserElement._literalStringClass( exprs ) ]
        elif isinstance( exprs, collections.Iterable ):
            exprs = list(exprs)
            # if sequence of strings provided, wrap with Literal
            if all(isinstance(expr, basestring) for expr in exprs):
                exprs = map(ParserElement._literalStringClass, exprs)
            self.exprs = list(exprs)
        else:
            try:
                self.exprs = list( exprs )
            except TypeError:
                self.exprs = [ exprs ]
        self.callPreparse = False

    def __getitem__( self, i ):
        return self.exprs[i]

    def append( self, other ):
        self.exprs.append( other )
        self.strRepr = None
        return self

    def leaveWhitespace( self ):
        """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
           all contained expressions."""
        self.skipWhitespace = False
        self.exprs = [ e.copy() for e in self.exprs ]
        for e in self.exprs:
            e.leaveWhitespace()
        return self

    def ignore( self, other ):
        if isinstance( other, Suppress ):
            if other not in self.ignoreExprs:
                super( ParseExpression, self).ignore( other )
                for e in self.exprs:
                    e.ignore( self.ignoreExprs[-1] )
        else:
            super( ParseExpression, self).ignore( other )
            for e in self.exprs:
                e.ignore( self.ignoreExprs[-1] )
        return self

    def __str__( self ):
        try:
            return super(ParseExpression,self).__str__()
        except Exception:
            pass

        if self.strRepr is None:
            self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
        return self.strRepr

    def streamline( self ):
        super(ParseExpression,self).streamline()

        for e in self.exprs:
            e.streamline()

        # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
        # but only if there are no parse actions or resultsNames on the nested And's
        # (likewise for Or's and MatchFirst's)
        if ( len(self.exprs) == 2 ):
            other = self.exprs[0]
            if ( isinstance( other, self.__class__ ) and
                  not(other.parseAction) and
                  other.resultsName is None and
                  not other.debug ):
                self.exprs = other.exprs[:] + [ self.exprs[1] ]
                self.strRepr = None
                self.mayReturnEmpty |= other.mayReturnEmpty
                self.mayIndexError  |= other.mayIndexError

            other = self.exprs[-1]
            if ( isinstance( other, self.__class__ ) and
                  not(other.parseAction) and
                  other.resultsName is None and
                  not other.debug ):
                self.exprs = self.exprs[:-1] + other.exprs[:]
                self.strRepr = None
                self.mayReturnEmpty |= other.mayReturnEmpty
                self.mayIndexError  |= other.mayIndexError

        self.errmsg = "Expected " + _ustr(self)
        
        return self

    def setResultsName( self, name, listAllMatches=False ):
        ret = super(ParseExpression,self).setResultsName(name,listAllMatches)
        return ret

    def validate( self, validateTrace=[] ):
        tmp = validateTrace[:]+[self]
        for e in self.exprs:
            e.validate(tmp)
        self.checkRecursion( [] )
        
    def copy(self):
        ret = super(ParseExpression,self).copy()
        ret.exprs = [e.copy() for e in self.exprs]
        return ret

class And(ParseExpression):
    """
    Requires all given C{ParseExpression}s to be found in the given order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'+'} operator.
    May also be constructed using the C{'-'} operator, which will suppress backtracking.

    Example::
        integer = Word(nums)
        name_expr = OneOrMore(Word(alphas))

        expr = And([integer("id"),name_expr("name"),integer("age")])
        # more easily written as:
        expr = integer("id") + name_expr("name") + integer("age")
    """

    class _ErrorStop(Empty):
        def __init__(self, *args, **kwargs):
            super(And._ErrorStop,self).__init__(*args, **kwargs)
            self.name = '-'
            self.leaveWhitespace()

    def __init__( self, exprs, savelist = True ):
        super(And,self).__init__(exprs, savelist)
        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
        self.setWhitespaceChars( self.exprs[0].whiteChars )
        self.skipWhitespace = self.exprs[0].skipWhitespace
        self.callPreparse = True

    def parseImpl( self, instring, loc, doActions=True ):
        # pass False as last arg to _parse for first element, since we already
        # pre-parsed the string as part of our And pre-parsing
        loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
        errorStop = False
        for e in self.exprs[1:]:
            if isinstance(e, And._ErrorStop):
                errorStop = True
                continue
            if errorStop:
                try:
                    loc, exprtokens = e._parse( instring, loc, doActions )
                except ParseSyntaxException:
                    raise
                except ParseBaseException as pe:
                    pe.__traceback__ = None
                    raise ParseSyntaxException._from_exception(pe)
                except IndexError:
                    raise ParseSyntaxException(instring, len(instring), self.errmsg, self)
            else:
                loc, exprtokens = e._parse( instring, loc, doActions )
            if exprtokens or exprtokens.haskeys():
                resultlist += exprtokens
        return loc, resultlist

    def __iadd__(self, other ):
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        return self.append( other ) #And( [ self, other ] )

    def checkRecursion( self, parseElementList ):
        subRecCheckList = parseElementList[:] + [ self ]
        for e in self.exprs:
            e.checkRecursion( subRecCheckList )
            if not e.mayReturnEmpty:
                break

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"

        return self.strRepr


class Or(ParseExpression):
    """
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the expression that matches the longest string will be used.
    May be constructed using the C{'^'} operator.

    Example::
        # construct Or using '^' operator
        
        number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789"))
    prints::
        [['123'], ['3.1416'], ['789']]
    """
    def __init__( self, exprs, savelist = False ):
        super(Or,self).__init__(exprs, savelist)
        if self.exprs:
            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
        else:
            self.mayReturnEmpty = True

    def parseImpl( self, instring, loc, doActions=True ):
        maxExcLoc = -1
        maxException = None
        matches = []
        for e in self.exprs:
            try:
                loc2 = e.tryParse( instring, loc )
            except ParseException as err:
                err.__traceback__ = None
                if err.loc > maxExcLoc:
                    maxException = err
                    maxExcLoc = err.loc
            except IndexError:
                if len(instring) > maxExcLoc:
                    maxException = ParseException(instring,len(instring),e.errmsg,self)
                    maxExcLoc = len(instring)
            else:
                # save match among all matches, to retry longest to shortest
                matches.append((loc2, e))

        if matches:
            matches.sort(key=lambda x: -x[0])
            for _,e in matches:
                try:
                    return e._parse( instring, loc, doActions )
                except ParseException as err:
                    err.__traceback__ = None
                    if err.loc > maxExcLoc:
                        maxException = err
                        maxExcLoc = err.loc

        if maxException is not None:
            maxException.msg = self.errmsg
            raise maxException
        else:
            raise ParseException(instring, loc, "no defined alternatives to match", self)


    def __ixor__(self, other ):
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        return self.append( other ) #Or( [ self, other ] )

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"

        return self.strRepr

    def checkRecursion( self, parseElementList ):
        subRecCheckList = parseElementList[:] + [ self ]
        for e in self.exprs:
            e.checkRecursion( subRecCheckList )


class MatchFirst(ParseExpression):
    """
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the first one listed is the one that will match.
    May be constructed using the C{'|'} operator.

    Example::
        # construct MatchFirst using '|' operator
        
        # watch the order of expressions to match
        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]

        # put more selective expression first
        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
    """
    def __init__( self, exprs, savelist = False ):
        super(MatchFirst,self).__init__(exprs, savelist)
        if self.exprs:
            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
        else:
            self.mayReturnEmpty = True

    def parseImpl( self, instring, loc, doActions=True ):
        maxExcLoc = -1
        maxException = None
        for e in self.exprs:
            try:
                ret = e._parse( instring, loc, doActions )
                return ret
            except ParseException as err:
                if err.loc > maxExcLoc:
                    maxException = err
                    maxExcLoc = err.loc
            except IndexError:
                if len(instring) > maxExcLoc:
                    maxException = ParseException(instring,len(instring),e.errmsg,self)
                    maxExcLoc = len(instring)

        # only got here if no expression matched, raise exception for match that made it the furthest
        else:
            if maxException is not None:
                maxException.msg = self.errmsg
                raise maxException
            else:
                raise ParseException(instring, loc, "no defined alternatives to match", self)

    def __ior__(self, other ):
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass( other )
        return self.append( other ) #MatchFirst( [ self, other ] )

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"

        return self.strRepr

    def checkRecursion( self, parseElementList ):
        subRecCheckList = parseElementList[:] + [ self ]
        for e in self.exprs:
            e.checkRecursion( subRecCheckList )


class Each(ParseExpression):
    """
    Requires all given C{ParseExpression}s to be found, but in any order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'&'} operator.

    Example::
        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
        integer = Word(nums)
        shape_attr = "shape:" + shape_type("shape")
        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
        color_attr = "color:" + color("color")
        size_attr = "size:" + integer("size")

        # use Each (using operator '&') to accept attributes in any order 
        # (shape and posn are required, color and size are optional)
        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)

        shape_spec.runTests('''
            shape: SQUARE color: BLACK posn: 100, 120
            shape: CIRCLE size: 50 color: BLUE posn: 50,80
            color:GREEN size:20 shape:TRIANGLE posn:20,40
            '''
            )
    prints::
        shape: SQUARE color: BLACK posn: 100, 120
        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
        - color: BLACK
        - posn: ['100', ',', '120']
          - x: 100
          - y: 120
        - shape: SQUARE


        shape: CIRCLE size: 50 color: BLUE posn: 50,80
        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
        - color: BLUE
        - posn: ['50', ',', '80']
          - x: 50
          - y: 80
        - shape: CIRCLE
        - size: 50


        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
        - color: GREEN
        - posn: ['20', ',', '40']
          - x: 20
          - y: 40
        - shape: TRIANGLE
        - size: 20
    """
    def __init__( self, exprs, savelist = True ):
        super(Each,self).__init__(exprs, savelist)
        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
        self.skipWhitespace = True
        self.initExprGroups = True

    def parseImpl( self, instring, loc, doActions=True ):
        if self.initExprGroups:
            self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
            opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
            opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
            self.optionals = opt1 + opt2
            self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
            self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
            self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
            self.required += self.multirequired
            self.initExprGroups = False
        tmpLoc = loc
        tmpReqd = self.required[:]
        tmpOpt  = self.optionals[:]
        matchOrder = []

        keepMatching = True
        while keepMatching:
            tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
            failed = []
            for e in tmpExprs:
                try:
                    tmpLoc = e.tryParse( instring, tmpLoc )
                except ParseException:
                    failed.append(e)
                else:
                    matchOrder.append(self.opt1map.get(id(e),e))
                    if e in tmpReqd:
                        tmpReqd.remove(e)
                    elif e in tmpOpt:
                        tmpOpt.remove(e)
            if len(failed) == len(tmpExprs):
                keepMatching = False

        if tmpReqd:
            missing = ", ".join(_ustr(e) for e in tmpReqd)
            raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )

        # add any unmatched Optionals, in case they have default values defined
        matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]

        resultlist = []
        for e in matchOrder:
            loc,results = e._parse(instring,loc,doActions)
            resultlist.append(results)

        finalResults = sum(resultlist, ParseResults([]))
        return loc, finalResults

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"

        return self.strRepr

    def checkRecursion( self, parseElementList ):
        subRecCheckList = parseElementList[:] + [ self ]
        for e in self.exprs:
            e.checkRecursion( subRecCheckList )


class ParseElementEnhance(ParserElement):
    """
    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
    """
    def __init__( self, expr, savelist=False ):
        super(ParseElementEnhance,self).__init__(savelist)
        if isinstance( expr, basestring ):
            if issubclass(ParserElement._literalStringClass, Token):
                expr = ParserElement._literalStringClass(expr)
            else:
                expr = ParserElement._literalStringClass(Literal(expr))
        self.expr = expr
        self.strRepr = None
        if expr is not None:
            self.mayIndexError = expr.mayIndexError
            self.mayReturnEmpty = expr.mayReturnEmpty
            self.setWhitespaceChars( expr.whiteChars )
            self.skipWhitespace = expr.skipWhitespace
            self.saveAsList = expr.saveAsList
            self.callPreparse = expr.callPreparse
            self.ignoreExprs.extend(expr.ignoreExprs)

    def parseImpl( self, instring, loc, doActions=True ):
        if self.expr is not None:
            return self.expr._parse( instring, loc, doActions, callPreParse=False )
        else:
            raise ParseException("",loc,self.errmsg,self)

    def leaveWhitespace( self ):
        self.skipWhitespace = False
        self.expr = self.expr.copy()
        if self.expr is not None:
            self.expr.leaveWhitespace()
        return self

    def ignore( self, other ):
        if isinstance( other, Suppress ):
            if other not in self.ignoreExprs:
                super( ParseElementEnhance, self).ignore( other )
                if self.expr is not None:
                    self.expr.ignore( self.ignoreExprs[-1] )
        else:
            super( ParseElementEnhance, self).ignore( other )
            if self.expr is not None:
                self.expr.ignore( self.ignoreExprs[-1] )
        return self

    def streamline( self ):
        super(ParseElementEnhance,self).streamline()
        if self.expr is not None:
            self.expr.streamline()
        return self

    def checkRecursion( self, parseElementList ):
        if self in parseElementList:
            raise RecursiveGrammarException( parseElementList+[self] )
        subRecCheckList = parseElementList[:] + [ self ]
        if self.expr is not None:
            self.expr.checkRecursion( subRecCheckList )

    def validate( self, validateTrace=[] ):
        tmp = validateTrace[:]+[self]
        if self.expr is not None:
            self.expr.validate(tmp)
        self.checkRecursion( [] )

    def __str__( self ):
        try:
            return super(ParseElementEnhance,self).__str__()
        except Exception:
            pass

        if self.strRepr is None and self.expr is not None:
            self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
        return self.strRepr


class FollowedBy(ParseElementEnhance):
    """
    Lookahead matching of the given parse expression.  C{FollowedBy}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression matches at the current
    position.  C{FollowedBy} always returns a null token list.

    Example::
        # use FollowedBy to match a label only if it is followed by a ':'
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
    prints::
        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
    """
    def __init__( self, expr ):
        super(FollowedBy,self).__init__(expr)
        self.mayReturnEmpty = True

    def parseImpl( self, instring, loc, doActions=True ):
        self.expr.tryParse( instring, loc )
        return loc, []


class NotAny(ParseElementEnhance):
    """
    Lookahead to disallow matching with the given parse expression.  C{NotAny}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression does I{not} match at the current
    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
    always returns a null token list.  May be constructed using the '~' operator.

    Example::
        
    """
    def __init__( self, expr ):
        super(NotAny,self).__init__(expr)
        #~ self.leaveWhitespace()
        self.skipWhitespace = False  # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
        self.mayReturnEmpty = True
        self.errmsg = "Found unwanted token, "+_ustr(self.expr)

    def parseImpl( self, instring, loc, doActions=True ):
        if self.expr.canParseNext(instring, loc):
            raise ParseException(instring, loc, self.errmsg, self)
        return loc, []

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "~{" + _ustr(self.expr) + "}"

        return self.strRepr

class _MultipleMatch(ParseElementEnhance):
    def __init__( self, expr, stopOn=None):
        super(_MultipleMatch, self).__init__(expr)
        self.saveAsList = True
        ender = stopOn
        if isinstance(ender, basestring):
            ender = ParserElement._literalStringClass(ender)
        self.not_ender = ~ender if ender is not None else None

    def parseImpl( self, instring, loc, doActions=True ):
        self_expr_parse = self.expr._parse
        self_skip_ignorables = self._skipIgnorables
        check_ender = self.not_ender is not None
        if check_ender:
            try_not_ender = self.not_ender.tryParse
        
        # must be at least one (but first see if we are the stopOn sentinel;
        # if so, fail)
        if check_ender:
            try_not_ender(instring, loc)
        loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
        try:
            hasIgnoreExprs = (not not self.ignoreExprs)
            while 1:
                if check_ender:
                    try_not_ender(instring, loc)
                if hasIgnoreExprs:
                    preloc = self_skip_ignorables( instring, loc )
                else:
                    preloc = loc
                loc, tmptokens = self_expr_parse( instring, preloc, doActions )
                if tmptokens or tmptokens.haskeys():
                    tokens += tmptokens
        except (ParseException,IndexError):
            pass

        return loc, tokens
        
class OneOrMore(_MultipleMatch):
    """
    Repetition of one or more of the given expression.
    
    Parameters:
     - expr - expression that must match one or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: BLACK"
        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]

        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
        
        # could also be written as
        (attr_expr * (1,)).parseString(text).pprint()
    """

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "{" + _ustr(self.expr) + "}..."

        return self.strRepr

class ZeroOrMore(_MultipleMatch):
    """
    Optional repetition of zero or more of the given expression.
    
    Parameters:
     - expr - expression that must match zero or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example: similar to L{OneOrMore}
    """
    def __init__( self, expr, stopOn=None):
        super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
        self.mayReturnEmpty = True
        
    def parseImpl( self, instring, loc, doActions=True ):
        try:
            return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
        except (ParseException,IndexError):
            return loc, []

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "[" + _ustr(self.expr) + "]..."

        return self.strRepr

class _NullToken(object):
    def __bool__(self):
        return False
    __nonzero__ = __bool__
    def __str__(self):
        return ""

_optionalNotMatched = _NullToken()
class Optional(ParseElementEnhance):
    """
    Optional matching of the given expression.

    Parameters:
     - expr - expression that must match zero or more times
     - default (optional) - value to be returned if the optional expression is not found.

    Example::
        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
        zip.runTests('''
            # traditional ZIP code
            12345
            
            # ZIP+4 form
            12101-0001
            
            # invalid ZIP
            98765-
            ''')
    prints::
        # traditional ZIP code
        12345
        ['12345']

        # ZIP+4 form
        12101-0001
        ['12101-0001']

        # invalid ZIP
        98765-
             ^
        FAIL: Expected end of text (at char 5), (line:1, col:6)
    """
    def __init__( self, expr, default=_optionalNotMatched ):
        super(Optional,self).__init__( expr, savelist=False )
        self.saveAsList = self.expr.saveAsList
        self.defaultValue = default
        self.mayReturnEmpty = True

    def parseImpl( self, instring, loc, doActions=True ):
        try:
            loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
        except (ParseException,IndexError):
            if self.defaultValue is not _optionalNotMatched:
                if self.expr.resultsName:
                    tokens = ParseResults([ self.defaultValue ])
                    tokens[self.expr.resultsName] = self.defaultValue
                else:
                    tokens = [ self.defaultValue ]
            else:
                tokens = []
        return loc, tokens

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name

        if self.strRepr is None:
            self.strRepr = "[" + _ustr(self.expr) + "]"

        return self.strRepr

class SkipTo(ParseElementEnhance):
    """
    Token for skipping over all undefined text until the matched expression is found.

    Parameters:
     - expr - target expression marking the end of the data to be skipped
     - include - (default=C{False}) if True, the target expression is also parsed 
          (the skipped text and target expression are returned as a 2-element list).
     - ignore - (default=C{None}) used to define grammars (typically quoted strings and 
          comments) that might contain false matches to the target expression
     - failOn - (default=C{None}) define expressions that are not allowed to be 
          included in the skipped test; if found before the target expression is found, 
          the SkipTo is not a match

    Example::
        report = '''
            Outstanding Issues Report - 1 Jan 2000

               # | Severity | Description                               |  Days Open
            -----+----------+-------------------------------------------+-----------
             101 | Critical | Intermittent system crash                 |          6
              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
              79 | Minor    | System slow when running too many reports |         47
            '''
        integer = Word(nums)
        SEP = Suppress('|')
        # use SkipTo to simply match everything up until the next SEP
        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
        # - parse action will call token.strip() for each matched token, i.e., the description body
        string_data = SkipTo(SEP, ignore=quotedString)
        string_data.setParseAction(tokenMap(str.strip))
        ticket_expr = (integer("issue_num") + SEP 
                      + string_data("sev") + SEP 
                      + string_data("desc") + SEP 
                      + integer("days_open"))
        
        for tkt in ticket_expr.searchString(report):
            print tkt.dump()
    prints::
        ['101', 'Critical', 'Intermittent system crash', '6']
        - days_open: 6
        - desc: Intermittent system crash
        - issue_num: 101
        - sev: Critical
        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
        - days_open: 14
        - desc: Spelling error on Login ('log|n')
        - issue_num: 94
        - sev: Cosmetic
        ['79', 'Minor', 'System slow when running too many reports', '47']
        - days_open: 47
        - desc: System slow when running too many reports
        - issue_num: 79
        - sev: Minor
    """
    def __init__( self, other, include=False, ignore=None, failOn=None ):
        super( SkipTo, self ).__init__( other )
        self.ignoreExpr = ignore
        self.mayReturnEmpty = True
        self.mayIndexError = False
        self.includeMatch = include
        self.asList = False
        if isinstance(failOn, basestring):
            self.failOn = ParserElement._literalStringClass(failOn)
        else:
            self.failOn = failOn
        self.errmsg = "No match found for "+_ustr(self.expr)

    def parseImpl( self, instring, loc, doActions=True ):
        startloc = loc
        instrlen = len(instring)
        expr = self.expr
        expr_parse = self.expr._parse
        self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
        self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
        
        tmploc = loc
        while tmploc <= instrlen:
            if self_failOn_canParseNext is not None:
                # break if failOn expression matches
                if self_failOn_canParseNext(instring, tmploc):
                    break
                    
            if self_ignoreExpr_tryParse is not None:
                # advance past ignore expressions
                while 1:
                    try:
                        tmploc = self_ignoreExpr_tryParse(instring, tmploc)
                    except ParseBaseException:
                        break
            
            try:
                expr_parse(instring, tmploc, doActions=False, callPreParse=False)
            except (ParseException, IndexError):
                # no match, advance loc in string
                tmploc += 1
            else:
                # matched skipto expr, done
                break

        else:
            # ran off the end of the input string without matching skipto expr, fail
            raise ParseException(instring, loc, self.errmsg, self)

        # build up return values
        loc = tmploc
        skiptext = instring[startloc:loc]
        skipresult = ParseResults(skiptext)
        
        if self.includeMatch:
            loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
            skipresult += mat

        return loc, skipresult

class Forward(ParseElementEnhance):
    """
    Forward declaration of an expression to be defined later -
    used for recursive grammars, such as algebraic infix notation.
    When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.

    Note: take care when assigning to C{Forward} not to overlook precedence of operators.
    Specifically, '|' has a lower precedence than '<<', so that::
        fwdExpr << a | b | c
    will actually be evaluated as::
        (fwdExpr << a) | b | c
    thereby leaving b and c out as parseable alternatives.  It is recommended that you
    explicitly group the values inserted into the C{Forward}::
        fwdExpr << (a | b | c)
    Converting to use the '<<=' operator instead will avoid this problem.

    See L{ParseResults.pprint} for an example of a recursive parser created using
    C{Forward}.
    """
    def __init__( self, other=None ):
        super(Forward,self).__init__( other, savelist=False )

    def __lshift__( self, other ):
        if isinstance( other, basestring ):
            other = ParserElement._literalStringClass(other)
        self.expr = other
        self.strRepr = None
        self.mayIndexError = self.expr.mayIndexError
        self.mayReturnEmpty = self.expr.mayReturnEmpty
        self.setWhitespaceChars( self.expr.whiteChars )
        self.skipWhitespace = self.expr.skipWhitespace
        self.saveAsList = self.expr.saveAsList
        self.ignoreExprs.extend(self.expr.ignoreExprs)
        return self
        
    def __ilshift__(self, other):
        return self << other
    
    def leaveWhitespace( self ):
        self.skipWhitespace = False
        return self

    def streamline( self ):
        if not self.streamlined:
            self.streamlined = True
            if self.expr is not None:
                self.expr.streamline()
        return self

    def validate( self, validateTrace=[] ):
        if self not in validateTrace:
            tmp = validateTrace[:]+[self]
            if self.expr is not None:
                self.expr.validate(tmp)
        self.checkRecursion([])

    def __str__( self ):
        if hasattr(self,"name"):
            return self.name
        return self.__class__.__name__ + ": ..."

        # stubbed out for now - creates awful memory and perf issues
        self._revertClass = self.__class__
        self.__class__ = _ForwardNoRecurse
        try:
            if self.expr is not None:
                retString = _ustr(self.expr)
            else:
                retString = "None"
        finally:
            self.__class__ = self._revertClass
        return self.__class__.__name__ + ": " + retString

    def copy(self):
        if self.expr is not None:
            return super(Forward,self).copy()
        else:
            ret = Forward()
            ret <<= self
            return ret

class _ForwardNoRecurse(Forward):
    def __str__( self ):
        return "..."

class TokenConverter(ParseElementEnhance):
    """
    Abstract subclass of C{ParseExpression}, for converting parsed results.
    """
    def __init__( self, expr, savelist=False ):
        super(TokenConverter,self).__init__( expr )#, savelist )
        self.saveAsList = False

class Combine(TokenConverter):
    """
    Converter to concatenate all matching tokens to a single string.
    By default, the matching patterns must also be contiguous in the input string;
    this can be disabled by specifying C{'adjacent=False'} in the constructor.

    Example::
        real = Word(nums) + '.' + Word(nums)
        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
        # will also erroneously match the following
        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']

        real = Combine(Word(nums) + '.' + Word(nums))
        print(real.parseString('3.1416')) # -> ['3.1416']
        # no match when there are internal spaces
        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
    """
    def __init__( self, expr, joinString="", adjacent=True ):
        super(Combine,self).__init__( expr )
        # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
        if adjacent:
            self.leaveWhitespace()
        self.adjacent = adjacent
        self.skipWhitespace = True
        self.joinString = joinString
        self.callPreparse = True

    def ignore( self, other ):
        if self.adjacent:
            ParserElement.ignore(self, other)
        else:
            super( Combine, self).ignore( other )
        return self

    def postParse( self, instring, loc, tokenlist ):
        retToks = tokenlist.copy()
        del retToks[:]
        retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)

        if self.resultsName and retToks.haskeys():
            return [ retToks ]
        else:
            return retToks

class Group(TokenConverter):
    """
    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.

    Example::
        ident = Word(alphas)
        num = Word(nums)
        term = ident | num
        func = ident + Optional(delimitedList(term))
        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']

        func = ident + Group(Optional(delimitedList(term)))
        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
    """
    def __init__( self, expr ):
        super(Group,self).__init__( expr )
        self.saveAsList = True

    def postParse( self, instring, loc, tokenlist ):
        return [ tokenlist ]

class Dict(TokenConverter):
    """
    Converter to return a repetitive expression as a list, but also as a dictionary.
    Each element can also be referenced using the first token in the expression as its key.
    Useful for tabular report scraping when the first column can be used as a item key.

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        # print attributes as plain groups
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
        print(result.dump())
        
        # access named fields as dict entries, or output as dict
        print(result['shape'])        
        print(result.asDict())
    prints::
        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']

        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
    See more examples at L{ParseResults} of accessing fields by results name.
    """
    def __init__( self, expr ):
        super(Dict,self).__init__( expr )
        self.saveAsList = True

    def postParse( self, instring, loc, tokenlist ):
        for i,tok in enumerate(tokenlist):
            if len(tok) == 0:
                continue
            ikey = tok[0]
            if isinstance(ikey,int):
                ikey = _ustr(tok[0]).strip()
            if len(tok)==1:
                tokenlist[ikey] = _ParseResultsWithOffset("",i)
            elif len(tok)==2 and not isinstance(tok[1],ParseResults):
                tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
            else:
                dictvalue = tok.copy() #ParseResults(i)
                del dictvalue[0]
                if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
                else:
                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)

        if self.resultsName:
            return [ tokenlist ]
        else:
            return tokenlist


class Suppress(TokenConverter):
    """
    Converter for ignoring the results of a parsed expression.

    Example::
        source = "a, b, c,d"
        wd = Word(alphas)
        wd_list1 = wd + ZeroOrMore(',' + wd)
        print(wd_list1.parseString(source))

        # often, delimiters that are useful during parsing are just in the
        # way afterward - use Suppress to keep them out of the parsed output
        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
        print(wd_list2.parseString(source))
    prints::
        ['a', ',', 'b', ',', 'c', ',', 'd']
        ['a', 'b', 'c', 'd']
    (See also L{delimitedList}.)
    """
    def postParse( self, instring, loc, tokenlist ):
        return []

    def suppress( self ):
        return self


class OnlyOnce(object):
    """
    Wrapper for parse actions, to ensure they are only called once.
    """
    def __init__(self, methodCall):
        self.callable = _trim_arity(methodCall)
        self.called = False
    def __call__(self,s,l,t):
        if not self.called:
            results = self.callable(s,l,t)
            self.called = True
            return results
        raise ParseException(s,l,"")
    def reset(self):
        self.called = False

def traceParseAction(f):
    """
    Decorator for debugging parse actions. 
    
    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.

    Example::
        wd = Word(alphas)

        @traceParseAction
        def remove_duplicate_chars(tokens):
            return ''.join(sorted(set(''.join(tokens)))

        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
    prints::
        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
        <<leaving remove_duplicate_chars (ret: 'dfjkls')
        ['dfjkls']
    """
    f = _trim_arity(f)
    def z(*paArgs):
        thisFunc = f.__name__
        s,l,t = paArgs[-3:]
        if len(paArgs)>3:
            thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
        sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
        try:
            ret = f(*paArgs)
        except Exception as exc:
            sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
            raise
        sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) )
        return ret
    try:
        z.__name__ = f.__name__
    except AttributeError:
        pass
    return z

#
# global helpers
#
def delimitedList( expr, delim=",", combine=False ):
    """
    Helper to define a delimited list of expressions - the delimiter defaults to ','.
    By default, the list elements and delimiters can have intervening whitespace, and
    comments, but this can be overridden by passing C{combine=True} in the constructor.
    If C{combine} is set to C{True}, the matching tokens are returned as a single token
    string, with the delimiters included; otherwise, the matching tokens are returned
    as a list of tokens, with the delimiters suppressed.

    Example::
        delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
    """
    dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
    if combine:
        return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
    else:
        return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)

def countedArray( expr, intExpr=None ):
    """
    Helper to define a counted list of expressions.
    This helper defines a pattern of the form::
        integer expr expr expr...
    where the leading integer tells how many expr expressions follow.
    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
    
    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.

    Example::
        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']

        # in this parser, the leading integer value is given in binary,
        # '10' indicating that 2 values are in the array
        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
    """
    arrayExpr = Forward()
    def countFieldParseAction(s,l,t):
        n = t[0]
        arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
        return []
    if intExpr is None:
        intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
    else:
        intExpr = intExpr.copy()
    intExpr.setName("arrayLen")
    intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
    return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')

def _flatten(L):
    ret = []
    for i in L:
        if isinstance(i,list):
            ret.extend(_flatten(i))
        else:
            ret.append(i)
    return ret

def matchPreviousLiteral(expr):
    """
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousLiteral(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
    If this is not desired, use C{matchPreviousExpr}.
    Do I{not} use with packrat parsing enabled.
    """
    rep = Forward()
    def copyTokenToRepeater(s,l,t):
        if t:
            if len(t) == 1:
                rep << t[0]
            else:
                # flatten t tokens
                tflat = _flatten(t.asList())
                rep << And(Literal(tt) for tt in tflat)
        else:
            rep << Empty()
    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
    rep.setName('(prev) ' + _ustr(expr))
    return rep

def matchPreviousExpr(expr):
    """
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousExpr(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
    the expressions are evaluated first, and then compared, so
    C{"1"} is compared with C{"10"}.
    Do I{not} use with packrat parsing enabled.
    """
    rep = Forward()
    e2 = expr.copy()
    rep <<= e2
    def copyTokenToRepeater(s,l,t):
        matchTokens = _flatten(t.asList())
        def mustMatchTheseTokens(s,l,t):
            theseTokens = _flatten(t.asList())
            if  theseTokens != matchTokens:
                raise ParseException("",0,"")
        rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
    rep.setName('(prev) ' + _ustr(expr))
    return rep

def _escapeRegexRangeChars(s):
    #~  escape these chars: ^-]
    for c in r"\^-]":
        s = s.replace(c,_bslash+c)
    s = s.replace("\n",r"\n")
    s = s.replace("\t",r"\t")
    return _ustr(s)

def oneOf( strs, caseless=False, useRegex=True ):
    """
    Helper to quickly define a set of alternative Literals, and makes sure to do
    longest-first testing when there is a conflict, regardless of the input order,
    but returns a C{L{MatchFirst}} for best performance.

    Parameters:
     - strs - a string of space-delimited literals, or a collection of string literals
     - caseless - (default=C{False}) - treat all literals as caseless
     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
          if creating a C{Regex} raises an exception)

    Example::
        comp_oper = oneOf("< = > <= >= !=")
        var = Word(alphas)
        number = Word(nums)
        term = var | number
        comparison_expr = term + comp_oper + term
        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
    prints::
        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
    """
    if caseless:
        isequal = ( lambda a,b: a.upper() == b.upper() )
        masks = ( lambda a,b: b.upper().startswith(a.upper()) )
        parseElementClass = CaselessLiteral
    else:
        isequal = ( lambda a,b: a == b )
        masks = ( lambda a,b: b.startswith(a) )
        parseElementClass = Literal

    symbols = []
    if isinstance(strs,basestring):
        symbols = strs.split()
    elif isinstance(strs, collections.Iterable):
        symbols = list(strs)
    else:
        warnings.warn("Invalid argument to oneOf, expected string or iterable",
                SyntaxWarning, stacklevel=2)
    if not symbols:
        return NoMatch()

    i = 0
    while i < len(symbols)-1:
        cur = symbols[i]
        for j,other in enumerate(symbols[i+1:]):
            if ( isequal(other, cur) ):
                del symbols[i+j+1]
                break
            elif ( masks(cur, other) ):
                del symbols[i+j+1]
                symbols.insert(i,other)
                cur = other
                break
        else:
            i += 1

    if not caseless and useRegex:
        #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
        try:
            if len(symbols)==len("".join(symbols)):
                return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
            else:
                return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
        except Exception:
            warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
                    SyntaxWarning, stacklevel=2)


    # last resort, just use MatchFirst
    return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))

def dictOf( key, value ):
    """
    Helper to easily and clearly define a dictionary by specifying the respective patterns
    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
    in the proper order.  The key pattern can include delimiting markers or punctuation,
    as long as they are suppressed, thereby leaving the significant key text.  The value
    pattern can include named results, so that the C{Dict} results can include named token
    fields.

    Example::
        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        attr_label = label
        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)

        # similar to Dict, but simpler call format
        result = dictOf(attr_label, attr_value).parseString(text)
        print(result.dump())
        print(result['shape'])
        print(result.shape)  # object attribute access works too
        print(result.asDict())
    prints::
        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        SQUARE
        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
    """
    return Dict( ZeroOrMore( Group ( key + value ) ) )

def originalTextFor(expr, asString=True):
    """
    Helper to return the original, untokenized text for a given expression.  Useful to
    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
    revert separate tokens with intervening whitespace back to the original matching
    input text. By default, returns astring containing the original parsed text.  
       
    If the optional C{asString} argument is passed as C{False}, then the return value is a 
    C{L{ParseResults}} containing any results names that were originally matched, and a 
    single token containing the original matched text from the input string.  So if 
    the expression passed to C{L{originalTextFor}} contains expressions with defined
    results names, you must set C{asString} to C{False} if you want to preserve those
    results name values.

    Example::
        src = "this is test <b> bold <i>text</i> </b> normal text "
        for tag in ("b","i"):
            opener,closer = makeHTMLTags(tag)
            patt = originalTextFor(opener + SkipTo(closer) + closer)
            print(patt.searchString(src)[0])
    prints::
        ['<b> bold <i>text</i> </b>']
        ['<i>text</i>']
    """
    locMarker = Empty().setParseAction(lambda s,loc,t: loc)
    endlocMarker = locMarker.copy()
    endlocMarker.callPreparse = False
    matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
    if asString:
        extractText = lambda s,l,t: s[t._original_start:t._original_end]
    else:
        def extractText(s,l,t):
            t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
    matchExpr.setParseAction(extractText)
    matchExpr.ignoreExprs = expr.ignoreExprs
    return matchExpr

def ungroup(expr): 
    """
    Helper to undo pyparsing's default grouping of And expressions, even
    if all but one are non-empty.
    """
    return TokenConverter(expr).setParseAction(lambda t:t[0])

def locatedExpr(expr):
    """
    Helper to decorate a returned token with its starting and ending locations in the input string.
    This helper adds the following results names:
     - locn_start = location where matched expression begins
     - locn_end = location where matched expression ends
     - value = the actual parsed results

    Be careful if the input text contains C{<TAB>} characters, you may want to call
    C{L{ParserElement.parseWithTabs}}

    Example::
        wd = Word(alphas)
        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
            print(match)
    prints::
        [[0, 'ljsdf', 5]]
        [[8, 'lksdjjf', 15]]
        [[18, 'lkkjj', 23]]
    """
    locator = Empty().setParseAction(lambda s,l,t: l)
    return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))


# convenience constants for positional expressions
empty       = Empty().setName("empty")
lineStart   = LineStart().setName("lineStart")
lineEnd     = LineEnd().setName("lineEnd")
stringStart = StringStart().setName("stringStart")
stringEnd   = StringEnd().setName("stringEnd")

_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
_charRange = Group(_singleChar + Suppress("-") + _singleChar)
_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"

def srange(s):
    r"""
    Helper to easily define string ranges for use in Word construction.  Borrows
    syntax from regexp '[]' string range definitions::
        srange("[0-9]")   -> "0123456789"
        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
    The input string must be enclosed in []'s, and the returned string is the expanded
    character set joined into a single string.
    The values enclosed in the []'s may be:
     - a single character
     - an escaped character with a leading backslash (such as C{\-} or C{\]})
     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
         (C{\0x##} is also supported for backwards compatibility) 
     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
    """
    _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
    try:
        return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
    except Exception:
        return ""

def matchOnlyAtCol(n):
    """
    Helper method for defining parse actions that require matching at a specific
    column in the input text.
    """
    def verifyCol(strg,locn,toks):
        if col(locn,strg) != n:
            raise ParseException(strg,locn,"matched token not at column %d" % n)
    return verifyCol

def replaceWith(replStr):
    """
    Helper method for common parse actions that simply return a literal value.  Especially
    useful when used with C{L{transformString<ParserElement.transformString>}()}.

    Example::
        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
        term = na | num
        
        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
    """
    return lambda s,l,t: [replStr]

def removeQuotes(s,l,t):
    """
    Helper parse action for removing quotation marks from parsed quoted strings.

    Example::
        # by default, quotation marks are included in parsed results
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]

        # use removeQuotes to strip quotation marks from parsed results
        quotedString.setParseAction(removeQuotes)
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
    """
    return t[0][1:-1]

def tokenMap(func, *args):
    """
    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
    args are passed, they are forwarded to the given function as additional arguments after
    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
    parsed data to an integer using base 16.

    Example (compare the last to example in L{ParserElement.transformString}::
        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
        hex_ints.runTests('''
            00 11 22 aa FF 0a 0d 1a
            ''')
        
        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
        OneOrMore(upperword).runTests('''
            my kingdom for a horse
            ''')

        wd = Word(alphas).setParseAction(tokenMap(str.title))
        OneOrMore(wd).setParseAction(' '.join).runTests('''
            now is the winter of our discontent made glorious summer by this sun of york
            ''')
    prints::
        00 11 22 aa FF 0a 0d 1a
        [0, 17, 34, 170, 255, 10, 13, 26]

        my kingdom for a horse
        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']

        now is the winter of our discontent made glorious summer by this sun of york
        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
    """
    def pa(s,l,t):
        return [func(tokn, *args) for tokn in t]

    try:
        func_name = getattr(func, '__name__', 
                            getattr(func, '__class__').__name__)
    except Exception:
        func_name = str(func)
    pa.__name__ = func_name

    return pa

upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""

downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
    
def _makeTags(tagStr, xml):
    """Internal helper to construct opening and closing tag expressions, given a tag name"""
    if isinstance(tagStr,basestring):
        resname = tagStr
        tagStr = Keyword(tagStr, caseless=not xml)
    else:
        resname = tagStr.name

    tagAttrName = Word(alphas,alphanums+"_-:")
    if (xml):
        tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
        openTag = Suppress("<") + tagStr("tag") + \
                Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
    else:
        printablesLessRAbrack = "".join(c for c in printables if c not in ">")
        tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
        openTag = Suppress("<") + tagStr("tag") + \
                Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
                Optional( Suppress("=") + tagAttrValue ) ))) + \
                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
    closeTag = Combine(_L("</") + tagStr + ">")

    openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
    closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)
    openTag.tag = resname
    closeTag.tag = resname
    return openTag, closeTag

def makeHTMLTags(tagStr):
    """
    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.

    Example::
        text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
        a,a_end = makeHTMLTags("A")
        link_expr = a + SkipTo(a_end)("link_text") + a_end
        
        for link in link_expr.searchString(text):
            # attributes in the <A> tag (like "href" shown here) are also accessible as named results
            print(link.link_text, '->', link.href)
    prints::
        pyparsing -> http://pyparsing.wikispaces.com
    """
    return _makeTags( tagStr, False )

def makeXMLTags(tagStr):
    """
    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
    tags only in the given upper/lower case.

    Example: similar to L{makeHTMLTags}
    """
    return _makeTags( tagStr, True )

def withAttribute(*args,**attrDict):
    """
    Helper to create a validating parse action to be used with start tags created
    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
    with a required attribute value, to avoid false matches on common tags such as
    C{<TD>} or C{<DIV>}.

    Call C{withAttribute} with a series of attribute names and values. Specify the list
    of filter attributes names and values as:
     - keyword arguments, as in C{(align="right")}, or
     - as an explicit dict with C{**} operator, when an attribute name is also a Python
          reserved word, as in C{**{"class":"Customer", "align":"right"}}
     - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
    For attribute names with a namespace prefix, you must use the second form.  Attribute
    names are matched insensitive to upper/lower case.
       
    If just testing for C{class} (with or without a namespace), use C{L{withClass}}.

    To verify that the attribute exists, but without specifying a value, pass
    C{withAttribute.ANY_VALUE} as the value.

    Example::
        html = '''
            <div>
            Some text
            <div type="grid">1 4 0 1 0</div>
            <div type="graph">1,3 2,3 1,1</div>
            <div>this has no type</div>
            </div>
                
        '''
        div,div_end = makeHTMLTags("div")

        # only match div tag having a type attribute with value "grid"
        div_grid = div().setParseAction(withAttribute(type="grid"))
        grid_expr = div_grid + SkipTo(div | div_end)("body")
        for grid_header in grid_expr.searchString(html):
            print(grid_header.body)
        
        # construct a match with any div tag having a type attribute, regardless of the value
        div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
        div_expr = div_any_type + SkipTo(div | div_end)("body")
        for div_header in div_expr.searchString(html):
            print(div_header.body)
    prints::
        1 4 0 1 0

        1 4 0 1 0
        1,3 2,3 1,1
    """
    if args:
        attrs = args[:]
    else:
        attrs = attrDict.items()
    attrs = [(k,v) for k,v in attrs]
    def pa(s,l,tokens):
        for attrName,attrValue in attrs:
            if attrName not in tokens:
                raise ParseException(s,l,"no matching attribute " + attrName)
            if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
                raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
                                            (attrName, tokens[attrName], attrValue))
    return pa
withAttribute.ANY_VALUE = object()

def withClass(classname, namespace=''):
    """
    Simplified version of C{L{withAttribute}} when matching on a div class - made
    difficult because C{class} is a reserved word in Python.

    Example::
        html = '''
            <div>
            Some text
            <div class="grid">1 4 0 1 0</div>
            <div class="graph">1,3 2,3 1,1</div>
            <div>this &lt;div&gt; has no class</div>
            </div>
                
        '''
        div,div_end = makeHTMLTags("div")
        div_grid = div().setParseAction(withClass("grid"))
        
        grid_expr = div_grid + SkipTo(div | div_end)("body")
        for grid_header in grid_expr.searchString(html):
            print(grid_header.body)
        
        div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
        div_expr = div_any_type + SkipTo(div | div_end)("body")
        for div_header in div_expr.searchString(html):
            print(div_header.body)
    prints::
        1 4 0 1 0

        1 4 0 1 0
        1,3 2,3 1,1
    """
    classattr = "%s:class" % namespace if namespace else "class"
    return withAttribute(**{classattr : classname})        

opAssoc = _Constants()
opAssoc.LEFT = object()
opAssoc.RIGHT = object()

def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
    """
    Helper method for constructing grammars of expressions made up of
    operators working in a precedence hierarchy.  Operators may be unary or
    binary, left- or right-associative.  Parse actions can also be attached
    to operator expressions. The generated parser will also recognize the use 
    of parentheses to override operator precedences (see example below).
    
    Note: if you define a deep operator list, you may see performance issues
    when using infixNotation. See L{ParserElement.enablePackrat} for a
    mechanism to potentially improve your parser performance.

    Parameters:
     - baseExpr - expression representing the most basic element for the nested
     - opList - list of tuples, one for each operator precedence level in the
      expression grammar; each tuple is of the form
      (opExpr, numTerms, rightLeftAssoc, parseAction), where:
       - opExpr is the pyparsing expression for the operator;
          may also be a string, which will be converted to a Literal;
          if numTerms is 3, opExpr is a tuple of two expressions, for the
          two operators separating the 3 terms
       - numTerms is the number of terms for this operator (must
          be 1, 2, or 3)
       - rightLeftAssoc is the indicator whether the operator is
          right or left associative, using the pyparsing-defined
          constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
       - parseAction is the parse action to be associated with
          expressions matching this operator expression (the
          parse action tuple member may be omitted)
     - lpar - expression for matching left-parentheses (default=C{Suppress('(')})
     - rpar - expression for matching right-parentheses (default=C{Suppress(')')})

    Example::
        # simple example of four-function arithmetic with ints and variable names
        integer = pyparsing_common.signed_integer
        varname = pyparsing_common.identifier 
        
        arith_expr = infixNotation(integer | varname,
            [
            ('-', 1, opAssoc.RIGHT),
            (oneOf('* /'), 2, opAssoc.LEFT),
            (oneOf('+ -'), 2, opAssoc.LEFT),
            ])
        
        arith_expr.runTests('''
            5+3*6
            (5+3)*6
            -2--11
            ''', fullDump=False)
    prints::
        5+3*6
        [[5, '+', [3, '*', 6]]]

        (5+3)*6
        [[[5, '+', 3], '*', 6]]

        -2--11
        [[['-', 2], '-', ['-', 11]]]
    """
    ret = Forward()
    lastExpr = baseExpr | ( lpar + ret + rpar )
    for i,operDef in enumerate(opList):
        opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
        termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr
        if arity == 3:
            if opExpr is None or len(opExpr) != 2:
                raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")
            opExpr1, opExpr2 = opExpr
        thisExpr = Forward().setName(termName)
        if rightLeftAssoc == opAssoc.LEFT:
            if arity == 1:
                matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
            elif arity == 2:
                if opExpr is not None:
                    matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
                else:
                    matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
            elif arity == 3:
                matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
                            Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
            else:
                raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
        elif rightLeftAssoc == opAssoc.RIGHT:
            if arity == 1:
                # try to avoid LR with this extra test
                if not isinstance(opExpr, Optional):
                    opExpr = Optional(opExpr)
                matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
            elif arity == 2:
                if opExpr is not None:
                    matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
                else:
                    matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
            elif arity == 3:
                matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
                            Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
            else:
                raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
        else:
            raise ValueError("operator must indicate right or left associativity")
        if pa:
            matchExpr.setParseAction( pa )
        thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
        lastExpr = thisExpr
    ret <<= lastExpr
    return ret

operatorPrecedence = infixNotation
"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release."""

dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes")
sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes")
quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'|
                       Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes")
unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")

def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
    """
    Helper method for defining nested lists enclosed in opening and closing
    delimiters ("(" and ")" are the default).

    Parameters:
     - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression
     - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression
     - content - expression for items within the nested lists (default=C{None})
     - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})

    If an expression is not provided for the content argument, the nested
    expression will capture all whitespace-delimited content between delimiters
    as a list of separate values.

    Use the C{ignoreExpr} argument to define expressions that may contain
    opening or closing characters that should not be treated as opening
    or closing characters for nesting, such as quotedString or a comment
    expression.  Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
    The default is L{quotedString}, but if no expressions are to be ignored,
    then pass C{None} for this argument.

    Example::
        data_type = oneOf("void int short long char float double")
        decl_data_type = Combine(data_type + Optional(Word('*')))
        ident = Word(alphas+'_', alphanums+'_')
        number = pyparsing_common.number
        arg = Group(decl_data_type + ident)
        LPAR,RPAR = map(Suppress, "()")

        code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))

        c_function = (decl_data_type("type") 
                      + ident("name")
                      + LPAR + Optional(delimitedList(arg), [])("args") + RPAR 
                      + code_body("body"))
        c_function.ignore(cStyleComment)
        
        source_code = '''
            int is_odd(int x) { 
                return (x%2); 
            }
                
            int dec_to_hex(char hchar) { 
                if (hchar >= '0' && hchar <= '9') { 
                    return (ord(hchar)-ord('0')); 
                } else { 
                    return (10+ord(hchar)-ord('A'));
                } 
            }
        '''
        for func in c_function.searchString(source_code):
            print("%(name)s (%(type)s) args: %(args)s" % func)

    prints::
        is_odd (int) args: [['int', 'x']]
        dec_to_hex (int) args: [['char', 'hchar']]
    """
    if opener == closer:
        raise ValueError("opening and closing strings cannot be the same")
    if content is None:
        if isinstance(opener,basestring) and isinstance(closer,basestring):
            if len(opener) == 1 and len(closer)==1:
                if ignoreExpr is not None:
                    content = (Combine(OneOrMore(~ignoreExpr +
                                    CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
                                ).setParseAction(lambda t:t[0].strip()))
                else:
                    content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS
                                ).setParseAction(lambda t:t[0].strip()))
            else:
                if ignoreExpr is not None:
                    content = (Combine(OneOrMore(~ignoreExpr + 
                                    ~Literal(opener) + ~Literal(closer) +
                                    CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
                                ).setParseAction(lambda t:t[0].strip()))
                else:
                    content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +
                                    CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
                                ).setParseAction(lambda t:t[0].strip()))
        else:
            raise ValueError("opening and closing arguments must be strings if no content expression is given")
    ret = Forward()
    if ignoreExpr is not None:
        ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
    else:
        ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content )  + Suppress(closer) )
    ret.setName('nested %s%s expression' % (opener,closer))
    return ret

def indentedBlock(blockStatementExpr, indentStack, indent=True):
    """
    Helper method for defining space-delimited indentation blocks, such as
    those used to define block statements in Python source code.

    Parameters:
     - blockStatementExpr - expression defining syntax of statement that
            is repeated within the indented block
     - indentStack - list created by caller to manage indentation stack
            (multiple statementWithIndentedBlock expressions within a single grammar
            should share a common indentStack)
     - indent - boolean indicating whether block must be indented beyond the
            the current level; set to False for block of left-most statements
            (default=C{True})

    A valid block must contain at least one C{blockStatement}.

    Example::
        data = '''
        def A(z):
          A1
          B = 100
          G = A2
          A2
          A3
        B
        def BB(a,b,c):
          BB1
          def BBA():
            bba1
            bba2
            bba3
        C
        D
        def spam(x,y):
             def eggs(z):
                 pass
        '''


        indentStack = [1]
        stmt = Forward()

        identifier = Word(alphas, alphanums)
        funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")
        func_body = indentedBlock(stmt, indentStack)
        funcDef = Group( funcDecl + func_body )

        rvalue = Forward()
        funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
        rvalue << (funcCall | identifier | Word(nums))
        assignment = Group(identifier + "=" + rvalue)
        stmt << ( funcDef | assignment | identifier )

        module_body = OneOrMore(stmt)

        parseTree = module_body.parseString(data)
        parseTree.pprint()
    prints::
        [['def',
          'A',
          ['(', 'z', ')'],
          ':',
          [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
         'B',
         ['def',
          'BB',
          ['(', 'a', 'b', 'c', ')'],
          ':',
          [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
         'C',
         'D',
         ['def',
          'spam',
          ['(', 'x', 'y', ')'],
          ':',
          [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] 
    """
    def checkPeerIndent(s,l,t):
        if l >= len(s): return
        curCol = col(l,s)
        if curCol != indentStack[-1]:
            if curCol > indentStack[-1]:
                raise ParseFatalException(s,l,"illegal nesting")
            raise ParseException(s,l,"not a peer entry")

    def checkSubIndent(s,l,t):
        curCol = col(l,s)
        if curCol > indentStack[-1]:
            indentStack.append( curCol )
        else:
            raise ParseException(s,l,"not a subentry")

    def checkUnindent(s,l,t):
        if l >= len(s): return
        curCol = col(l,s)
        if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
            raise ParseException(s,l,"not an unindent")
        indentStack.pop()

    NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
    INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')
    PEER   = Empty().setParseAction(checkPeerIndent).setName('')
    UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')
    if indent:
        smExpr = Group( Optional(NL) +
            #~ FollowedBy(blockStatementExpr) +
            INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
    else:
        smExpr = Group( Optional(NL) +
            (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
    blockStatementExpr.ignore(_bslash + LineEnd())
    return smExpr.setName('indented block')

alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")

anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))
_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))
commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")
def replaceHTMLEntity(t):
    """Helper parser action to replace common HTML entities with their special characters"""
    return _htmlEntityMap.get(t.entity)

# it's easy to get these comment structures wrong - they're very common, so may as well make them available
cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")
"Comment of the form C{/* ... */}"

htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")
"Comment of the form C{<!-- ... -->}"

restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")
dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")
"Comment of the form C{// ... (to end of line)}"

cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment")
"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}"

javaStyleComment = cppStyleComment
"Same as C{L{cppStyleComment}}"

pythonStyleComment = Regex(r"#.*").setName("Python style comment")
"Comment of the form C{# ... (to end of line)}"

_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +
                                  Optional( Word(" \t") +
                                            ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas.
   This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}."""

# some other useful expressions - using lower-case class name since we are really using this as a namespace
class pyparsing_common:
    """
    Here are some common low-level expressions that may be useful in jump-starting parser development:
     - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})
     - common L{programming identifiers<identifier>}
     - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})
     - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}
     - L{UUID<uuid>}
     - L{comma-separated list<comma_separated_list>}
    Parse actions:
     - C{L{convertToInteger}}
     - C{L{convertToFloat}}
     - C{L{convertToDate}}
     - C{L{convertToDatetime}}
     - C{L{stripHTMLTags}}
     - C{L{upcaseTokens}}
     - C{L{downcaseTokens}}

    Example::
        pyparsing_common.number.runTests('''
            # any int or real number, returned as the appropriate type
            100
            -100
            +100
            3.14159
            6.02e23
            1e-12
            ''')

        pyparsing_common.fnumber.runTests('''
            # any int or real number, returned as float
            100
            -100
            +100
            3.14159
            6.02e23
            1e-12
            ''')

        pyparsing_common.hex_integer.runTests('''
            # hex numbers
            100
            FF
            ''')

        pyparsing_common.fraction.runTests('''
            # fractions
            1/2
            -3/4
            ''')

        pyparsing_common.mixed_integer.runTests('''
            # mixed fractions
            1
            1/2
            -3/4
            1-3/4
            ''')

        import uuid
        pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
        pyparsing_common.uuid.runTests('''
            # uuid
            12345678-1234-5678-1234-567812345678
            ''')
    prints::
        # any int or real number, returned as the appropriate type
        100
        [100]

        -100
        [-100]

        +100
        [100]

        3.14159
        [3.14159]

        6.02e23
        [6.02e+23]

        1e-12
        [1e-12]

        # any int or real number, returned as float
        100
        [100.0]

        -100
        [-100.0]

        +100
        [100.0]

        3.14159
        [3.14159]

        6.02e23
        [6.02e+23]

        1e-12
        [1e-12]

        # hex numbers
        100
        [256]

        FF
        [255]

        # fractions
        1/2
        [0.5]

        -3/4
        [-0.75]

        # mixed fractions
        1
        [1]

        1/2
        [0.5]

        -3/4
        [-0.75]

        1-3/4
        [1.75]

        # uuid
        12345678-1234-5678-1234-567812345678
        [UUID('12345678-1234-5678-1234-567812345678')]
    """

    convertToInteger = tokenMap(int)
    """
    Parse action for converting parsed integers to Python int
    """

    convertToFloat = tokenMap(float)
    """
    Parse action for converting parsed numbers to Python float
    """

    integer = Word(nums).setName("integer").setParseAction(convertToInteger)
    """expression that parses an unsigned integer, returns an int"""

    hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))
    """expression that parses a hexadecimal integer, returns an int"""

    signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
    """expression that parses an integer with optional leading sign, returns an int"""

    fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")
    """fractional expression of an integer divided by an integer, returns a float"""
    fraction.addParseAction(lambda t: t[0]/t[-1])

    mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
    """mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
    mixed_integer.addParseAction(sum)

    real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)
    """expression that parses a floating point number and returns a float"""

    sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
    """expression that parses a floating point number with optional scientific notation and returns a float"""

    # streamlining this expression makes the docs nicer-looking
    number = (sci_real | real | signed_integer).streamline()
    """any numeric expression, returns the corresponding Python type"""

    fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)
    """any int or real number, returned as float"""
    
    identifier = Word(alphas+'_', alphanums+'_').setName("identifier")
    """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
    
    ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")
    "IPv4 address (C{0.0.0.0 - 255.255.255.255})"

    _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")
    _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address")
    _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address")
    _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)
    _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")
    ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")
    "IPv6 address (long, short, or mixed form)"
    
    mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")
    "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"

    @staticmethod
    def convertToDate(fmt="%Y-%m-%d"):
        """
        Helper to create a parse action for converting parsed date string to Python datetime.date

        Params -
         - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})

        Example::
            date_expr = pyparsing_common.iso8601_date.copy()
            date_expr.setParseAction(pyparsing_common.convertToDate())
            print(date_expr.parseString("1999-12-31"))
        prints::
            [datetime.date(1999, 12, 31)]
        """
        def cvt_fn(s,l,t):
            try:
                return datetime.strptime(t[0], fmt).date()
            except ValueError as ve:
                raise ParseException(s, l, str(ve))
        return cvt_fn

    @staticmethod
    def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):
        """
        Helper to create a parse action for converting parsed datetime string to Python datetime.datetime

        Params -
         - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})

        Example::
            dt_expr = pyparsing_common.iso8601_datetime.copy()
            dt_expr.setParseAction(pyparsing_common.convertToDatetime())
            print(dt_expr.parseString("1999-12-31T23:59:59.999"))
        prints::
            [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
        """
        def cvt_fn(s,l,t):
            try:
                return datetime.strptime(t[0], fmt)
            except ValueError as ve:
                raise ParseException(s, l, str(ve))
        return cvt_fn

    iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")
    "ISO8601 date (C{yyyy-mm-dd})"

    iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")
    "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}"

    uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")
    "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})"

    _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()
    @staticmethod
    def stripHTMLTags(s, l, tokens):
        """
        Parse action to remove HTML tags from web page HTML source

        Example::
            # strip HTML links from normal text 
            text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
            td,td_end = makeHTMLTags("TD")
            table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
            
            print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'
        """
        return pyparsing_common._html_stripper.transformString(tokens[0])

    _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') 
                                        + Optional( White(" \t") ) ) ).streamline().setName("commaItem")
    comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")
    """Predefined expression of 1 or more printable words or quoted strings, separated by commas."""

    upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))
    """Parse action to convert tokens to upper case."""

    downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))
    """Parse action to convert tokens to lower case."""


if __name__ == "__main__":

    selectToken    = CaselessLiteral("select")
    fromToken      = CaselessLiteral("from")

    ident          = Word(alphas, alphanums + "_$")

    columnName     = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
    columnNameList = Group(delimitedList(columnName)).setName("columns")
    columnSpec     = ('*' | columnNameList)

    tableName      = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
    tableNameList  = Group(delimitedList(tableName)).setName("tables")
    
    simpleSQL      = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")

    # demo runTests method, including embedded comments in test string
    simpleSQL.runTests("""
        # '*' as column list and dotted table name
        select * from SYS.XYZZY

        # caseless match on "SELECT", and casts back to "select"
        SELECT * from XYZZY, ABC

        # list of column names, and mixed case SELECT keyword
        Select AA,BB,CC from Sys.dual

        # multiple tables
        Select A, B, C from Sys.dual, Table2

        # invalid SELECT keyword - should fail
        Xelect A, B, C from Sys.dual

        # incomplete command - should fail
        Select

        # invalid column name - should fail
        Select ^^^ frox Sys.dual

        """)

    pyparsing_common.number.runTests("""
        100
        -100
        +100
        3.14159
        6.02e23
        1e-12
        """)

    # any int or real number, returned as float
    pyparsing_common.fnumber.runTests("""
        100
        -100
        +100
        3.14159
        6.02e23
        1e-12
        """)

    pyparsing_common.hex_integer.runTests("""
        100
        FF
        """)

    import uuid
    pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
    pyparsing_common.uuid.runTests("""
        12345678-1234-5678-1234-567812345678
        """)
_vendor/ipaddress.py000064400000234460151733136270010551 0ustar00# Copyright 2007 Google Inc.
#  Licensed to PSF under a Contributor Agreement.

"""A fast, lightweight IPv4/IPv6 manipulation library in Python.

This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.

"""

from __future__ import unicode_literals


import itertools
import struct

__version__ = '1.0.17'

# Compatibility functions
_compat_int_types = (int,)
try:
    _compat_int_types = (int, long)
except NameError:
    pass
try:
    _compat_str = unicode
except NameError:
    _compat_str = str
    assert bytes != str
if b'\0'[0] == 0:  # Python 3 semantics
    def _compat_bytes_to_byte_vals(byt):
        return byt
else:
    def _compat_bytes_to_byte_vals(byt):
        return [struct.unpack(b'!B', b)[0] for b in byt]
try:
    _compat_int_from_byte_vals = int.from_bytes
except AttributeError:
    def _compat_int_from_byte_vals(bytvals, endianess):
        assert endianess == 'big'
        res = 0
        for bv in bytvals:
            assert isinstance(bv, _compat_int_types)
            res = (res << 8) + bv
        return res


def _compat_to_bytes(intval, length, endianess):
    assert isinstance(intval, _compat_int_types)
    assert endianess == 'big'
    if length == 4:
        if intval < 0 or intval >= 2 ** 32:
            raise struct.error("integer out of range for 'I' format code")
        return struct.pack(b'!I', intval)
    elif length == 16:
        if intval < 0 or intval >= 2 ** 128:
            raise struct.error("integer out of range for 'QQ' format code")
        return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
    else:
        raise NotImplementedError()
if hasattr(int, 'bit_length'):
    # Not int.bit_length , since that won't work in 2.7 where long exists
    def _compat_bit_length(i):
        return i.bit_length()
else:
    def _compat_bit_length(i):
        for res in itertools.count():
            if i >> res == 0:
                return res


def _compat_range(start, end, step=1):
    assert step > 0
    i = start
    while i < end:
        yield i
        i += step


class _TotalOrderingMixin(object):
    __slots__ = ()

    # Helper that derives the other comparison operations from
    # __lt__ and __eq__
    # We avoid functools.total_ordering because it doesn't handle
    # NotImplemented correctly yet (http://bugs.python.org/issue10042)
    def __eq__(self, other):
        raise NotImplementedError

    def __ne__(self, other):
        equal = self.__eq__(other)
        if equal is NotImplemented:
            return NotImplemented
        return not equal

    def __lt__(self, other):
        raise NotImplementedError

    def __le__(self, other):
        less = self.__lt__(other)
        if less is NotImplemented or not less:
            return self.__eq__(other)
        return less

    def __gt__(self, other):
        less = self.__lt__(other)
        if less is NotImplemented:
            return NotImplemented
        equal = self.__eq__(other)
        if equal is NotImplemented:
            return NotImplemented
        return not (less or equal)

    def __ge__(self, other):
        less = self.__lt__(other)
        if less is NotImplemented:
            return NotImplemented
        return not less


IPV4LENGTH = 32
IPV6LENGTH = 128


class AddressValueError(ValueError):
    """A Value Error related to the address."""


class NetmaskValueError(ValueError):
    """A Value Error related to the netmask."""


def ip_address(address):
    """Take an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP address.  Either IPv4 or
          IPv6 addresses may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Address or IPv6Address object.

    Raises:
        ValueError: if the *address* passed isn't either a v4 or a v6
          address

    """
    try:
        return IPv4Address(address)
    except (AddressValueError, NetmaskValueError):
        pass

    try:
        return IPv6Address(address)
    except (AddressValueError, NetmaskValueError):
        pass

    if isinstance(address, bytes):
        raise AddressValueError(
            '%r does not appear to be an IPv4 or IPv6 address. '
            'Did you pass in a bytes (str in Python 2) instead of'
            ' a unicode object?' % address)

    raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
                     address)


def ip_network(address, strict=True):
    """Take an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP network.  Either IPv4 or
          IPv6 networks may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Network or IPv6Network object.

    Raises:
        ValueError: if the string passed isn't either a v4 or a v6
          address. Or if the network has host bits set.

    """
    try:
        return IPv4Network(address, strict)
    except (AddressValueError, NetmaskValueError):
        pass

    try:
        return IPv6Network(address, strict)
    except (AddressValueError, NetmaskValueError):
        pass

    if isinstance(address, bytes):
        raise AddressValueError(
            '%r does not appear to be an IPv4 or IPv6 network. '
            'Did you pass in a bytes (str in Python 2) instead of'
            ' a unicode object?' % address)

    raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
                     address)


def ip_interface(address):
    """Take an IP string/int and return an object of the correct type.

    Args:
        address: A string or integer, the IP address.  Either IPv4 or
          IPv6 addresses may be supplied; integers less than 2**32 will
          be considered to be IPv4 by default.

    Returns:
        An IPv4Interface or IPv6Interface object.

    Raises:
        ValueError: if the string passed isn't either a v4 or a v6
          address.

    Notes:
        The IPv?Interface classes describe an Address on a particular
        Network, so they're basically a combination of both the Address
        and Network classes.

    """
    try:
        return IPv4Interface(address)
    except (AddressValueError, NetmaskValueError):
        pass

    try:
        return IPv6Interface(address)
    except (AddressValueError, NetmaskValueError):
        pass

    raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
                     address)


def v4_int_to_packed(address):
    """Represent an address as 4 packed bytes in network (big-endian) order.

    Args:
        address: An integer representation of an IPv4 IP address.

    Returns:
        The integer address packed as 4 bytes in network (big-endian) order.

    Raises:
        ValueError: If the integer is negative or too large to be an
          IPv4 IP address.

    """
    try:
        return _compat_to_bytes(address, 4, 'big')
    except (struct.error, OverflowError):
        raise ValueError("Address negative or too large for IPv4")


def v6_int_to_packed(address):
    """Represent an address as 16 packed bytes in network (big-endian) order.

    Args:
        address: An integer representation of an IPv6 IP address.

    Returns:
        The integer address packed as 16 bytes in network (big-endian) order.

    """
    try:
        return _compat_to_bytes(address, 16, 'big')
    except (struct.error, OverflowError):
        raise ValueError("Address negative or too large for IPv6")


def _split_optional_netmask(address):
    """Helper to split the netmask and raise AddressValueError if needed"""
    addr = _compat_str(address).split('/')
    if len(addr) > 2:
        raise AddressValueError("Only one '/' permitted in %r" % address)
    return addr


def _find_address_range(addresses):
    """Find a sequence of sorted deduplicated IPv#Address.

    Args:
        addresses: a list of IPv#Address objects.

    Yields:
        A tuple containing the first and last IP addresses in the sequence.

    """
    it = iter(addresses)
    first = last = next(it)
    for ip in it:
        if ip._ip != last._ip + 1:
            yield first, last
            first = ip
        last = ip
    yield first, last


def _count_righthand_zero_bits(number, bits):
    """Count the number of zero bits on the right hand side.

    Args:
        number: an integer.
        bits: maximum number of bits to count.

    Returns:
        The number of zero bits on the right hand side of the number.

    """
    if number == 0:
        return bits
    return min(bits, _compat_bit_length(~number & (number - 1)))


def summarize_address_range(first, last):
    """Summarize a network range given the first and last IP addresses.

    Example:
        >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
        ...                              IPv4Address('192.0.2.130')))
        ...                                #doctest: +NORMALIZE_WHITESPACE
        [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
         IPv4Network('192.0.2.130/32')]

    Args:
        first: the first IPv4Address or IPv6Address in the range.
        last: the last IPv4Address or IPv6Address in the range.

    Returns:
        An iterator of the summarized IPv(4|6) network objects.

    Raise:
        TypeError:
            If the first and last objects are not IP addresses.
            If the first and last objects are not the same version.
        ValueError:
            If the last object is not greater than the first.
            If the version of the first address is not 4 or 6.

    """
    if (not (isinstance(first, _BaseAddress) and
             isinstance(last, _BaseAddress))):
        raise TypeError('first and last must be IP addresses, not networks')
    if first.version != last.version:
        raise TypeError("%s and %s are not of the same version" % (
                        first, last))
    if first > last:
        raise ValueError('last IP address must be greater than first')

    if first.version == 4:
        ip = IPv4Network
    elif first.version == 6:
        ip = IPv6Network
    else:
        raise ValueError('unknown IP version')

    ip_bits = first._max_prefixlen
    first_int = first._ip
    last_int = last._ip
    while first_int <= last_int:
        nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
                    _compat_bit_length(last_int - first_int + 1) - 1)
        net = ip((first_int, ip_bits - nbits))
        yield net
        first_int += 1 << nbits
        if first_int - 1 == ip._ALL_ONES:
            break


def _collapse_addresses_internal(addresses):
    """Loops through the addresses, collapsing concurrent netblocks.

    Example:

        ip1 = IPv4Network('192.0.2.0/26')
        ip2 = IPv4Network('192.0.2.64/26')
        ip3 = IPv4Network('192.0.2.128/26')
        ip4 = IPv4Network('192.0.2.192/26')

        _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
          [IPv4Network('192.0.2.0/24')]

        This shouldn't be called directly; it is called via
          collapse_addresses([]).

    Args:
        addresses: A list of IPv4Network's or IPv6Network's

    Returns:
        A list of IPv4Network's or IPv6Network's depending on what we were
        passed.

    """
    # First merge
    to_merge = list(addresses)
    subnets = {}
    while to_merge:
        net = to_merge.pop()
        supernet = net.supernet()
        existing = subnets.get(supernet)
        if existing is None:
            subnets[supernet] = net
        elif existing != net:
            # Merge consecutive subnets
            del subnets[supernet]
            to_merge.append(supernet)
    # Then iterate over resulting networks, skipping subsumed subnets
    last = None
    for net in sorted(subnets.values()):
        if last is not None:
            # Since they are sorted,
            # last.network_address <= net.network_address is a given.
            if last.broadcast_address >= net.broadcast_address:
                continue
        yield net
        last = net


def collapse_addresses(addresses):
    """Collapse a list of IP objects.

    Example:
        collapse_addresses([IPv4Network('192.0.2.0/25'),
                            IPv4Network('192.0.2.128/25')]) ->
                           [IPv4Network('192.0.2.0/24')]

    Args:
        addresses: An iterator of IPv4Network or IPv6Network objects.

    Returns:
        An iterator of the collapsed IPv(4|6)Network objects.

    Raises:
        TypeError: If passed a list of mixed version objects.

    """
    addrs = []
    ips = []
    nets = []

    # split IP addresses and networks
    for ip in addresses:
        if isinstance(ip, _BaseAddress):
            if ips and ips[-1]._version != ip._version:
                raise TypeError("%s and %s are not of the same version" % (
                                ip, ips[-1]))
            ips.append(ip)
        elif ip._prefixlen == ip._max_prefixlen:
            if ips and ips[-1]._version != ip._version:
                raise TypeError("%s and %s are not of the same version" % (
                                ip, ips[-1]))
            try:
                ips.append(ip.ip)
            except AttributeError:
                ips.append(ip.network_address)
        else:
            if nets and nets[-1]._version != ip._version:
                raise TypeError("%s and %s are not of the same version" % (
                                ip, nets[-1]))
            nets.append(ip)

    # sort and dedup
    ips = sorted(set(ips))

    # find consecutive address ranges in the sorted sequence and summarize them
    if ips:
        for first, last in _find_address_range(ips):
            addrs.extend(summarize_address_range(first, last))

    return _collapse_addresses_internal(addrs + nets)


def get_mixed_type_key(obj):
    """Return a key suitable for sorting between networks and addresses.

    Address and Network objects are not sortable by default; they're
    fundamentally different so the expression

        IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')

    doesn't make any sense.  There are some times however, where you may wish
    to have ipaddress sort these for you anyway. If you need to do this, you
    can use this function as the key= argument to sorted().

    Args:
      obj: either a Network or Address object.
    Returns:
      appropriate key.

    """
    if isinstance(obj, _BaseNetwork):
        return obj._get_networks_key()
    elif isinstance(obj, _BaseAddress):
        return obj._get_address_key()
    return NotImplemented


class _IPAddressBase(_TotalOrderingMixin):

    """The mother class."""

    __slots__ = ()

    @property
    def exploded(self):
        """Return the longhand version of the IP address as a string."""
        return self._explode_shorthand_ip_string()

    @property
    def compressed(self):
        """Return the shorthand version of the IP address as a string."""
        return _compat_str(self)

    @property
    def reverse_pointer(self):
        """The name of the reverse DNS pointer for the IP address, e.g.:
            >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
            '1.0.0.127.in-addr.arpa'
            >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
            '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'

        """
        return self._reverse_pointer()

    @property
    def version(self):
        msg = '%200s has no version specified' % (type(self),)
        raise NotImplementedError(msg)

    def _check_int_address(self, address):
        if address < 0:
            msg = "%d (< 0) is not permitted as an IPv%d address"
            raise AddressValueError(msg % (address, self._version))
        if address > self._ALL_ONES:
            msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
            raise AddressValueError(msg % (address, self._max_prefixlen,
                                           self._version))

    def _check_packed_address(self, address, expected_len):
        address_len = len(address)
        if address_len != expected_len:
            msg = (
                '%r (len %d != %d) is not permitted as an IPv%d address. '
                'Did you pass in a bytes (str in Python 2) instead of'
                ' a unicode object?'
            )
            raise AddressValueError(msg % (address, address_len,
                                           expected_len, self._version))

    @classmethod
    def _ip_int_from_prefix(cls, prefixlen):
        """Turn the prefix length into a bitwise netmask

        Args:
            prefixlen: An integer, the prefix length.

        Returns:
            An integer.

        """
        return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)

    @classmethod
    def _prefix_from_ip_int(cls, ip_int):
        """Return prefix length from the bitwise netmask.

        Args:
            ip_int: An integer, the netmask in expanded bitwise format

        Returns:
            An integer, the prefix length.

        Raises:
            ValueError: If the input intermingles zeroes & ones
        """
        trailing_zeroes = _count_righthand_zero_bits(ip_int,
                                                     cls._max_prefixlen)
        prefixlen = cls._max_prefixlen - trailing_zeroes
        leading_ones = ip_int >> trailing_zeroes
        all_ones = (1 << prefixlen) - 1
        if leading_ones != all_ones:
            byteslen = cls._max_prefixlen // 8
            details = _compat_to_bytes(ip_int, byteslen, 'big')
            msg = 'Netmask pattern %r mixes zeroes & ones'
            raise ValueError(msg % details)
        return prefixlen

    @classmethod
    def _report_invalid_netmask(cls, netmask_str):
        msg = '%r is not a valid netmask' % netmask_str
        raise NetmaskValueError(msg)

    @classmethod
    def _prefix_from_prefix_string(cls, prefixlen_str):
        """Return prefix length from a numeric string

        Args:
            prefixlen_str: The string to be converted

        Returns:
            An integer, the prefix length.

        Raises:
            NetmaskValueError: If the input is not a valid netmask
        """
        # int allows a leading +/- as well as surrounding whitespace,
        # so we ensure that isn't the case
        if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
            cls._report_invalid_netmask(prefixlen_str)
        try:
            prefixlen = int(prefixlen_str)
        except ValueError:
            cls._report_invalid_netmask(prefixlen_str)
        if not (0 <= prefixlen <= cls._max_prefixlen):
            cls._report_invalid_netmask(prefixlen_str)
        return prefixlen

    @classmethod
    def _prefix_from_ip_string(cls, ip_str):
        """Turn a netmask/hostmask string into a prefix length

        Args:
            ip_str: The netmask/hostmask to be converted

        Returns:
            An integer, the prefix length.

        Raises:
            NetmaskValueError: If the input is not a valid netmask/hostmask
        """
        # Parse the netmask/hostmask like an IP address.
        try:
            ip_int = cls._ip_int_from_string(ip_str)
        except AddressValueError:
            cls._report_invalid_netmask(ip_str)

        # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
        # Note that the two ambiguous cases (all-ones and all-zeroes) are
        # treated as netmasks.
        try:
            return cls._prefix_from_ip_int(ip_int)
        except ValueError:
            pass

        # Invert the bits, and try matching a /0+1+/ hostmask instead.
        ip_int ^= cls._ALL_ONES
        try:
            return cls._prefix_from_ip_int(ip_int)
        except ValueError:
            cls._report_invalid_netmask(ip_str)

    def __reduce__(self):
        return self.__class__, (_compat_str(self),)


class _BaseAddress(_IPAddressBase):

    """A generic IP object.

    This IP class contains the version independent methods which are
    used by single IP addresses.
    """

    __slots__ = ()

    def __int__(self):
        return self._ip

    def __eq__(self, other):
        try:
            return (self._ip == other._ip and
                    self._version == other._version)
        except AttributeError:
            return NotImplemented

    def __lt__(self, other):
        if not isinstance(other, _IPAddressBase):
            return NotImplemented
        if not isinstance(other, _BaseAddress):
            raise TypeError('%s and %s are not of the same type' % (
                self, other))
        if self._version != other._version:
            raise TypeError('%s and %s are not of the same version' % (
                self, other))
        if self._ip != other._ip:
            return self._ip < other._ip
        return False

    # Shorthand for Integer addition and subtraction. This is not
    # meant to ever support addition/subtraction of addresses.
    def __add__(self, other):
        if not isinstance(other, _compat_int_types):
            return NotImplemented
        return self.__class__(int(self) + other)

    def __sub__(self, other):
        if not isinstance(other, _compat_int_types):
            return NotImplemented
        return self.__class__(int(self) - other)

    def __repr__(self):
        return '%s(%r)' % (self.__class__.__name__, _compat_str(self))

    def __str__(self):
        return _compat_str(self._string_from_ip_int(self._ip))

    def __hash__(self):
        return hash(hex(int(self._ip)))

    def _get_address_key(self):
        return (self._version, self)

    def __reduce__(self):
        return self.__class__, (self._ip,)


class _BaseNetwork(_IPAddressBase):

    """A generic IP network object.

    This IP class contains the version independent methods which are
    used by networks.

    """
    def __init__(self, address):
        self._cache = {}

    def __repr__(self):
        return '%s(%r)' % (self.__class__.__name__, _compat_str(self))

    def __str__(self):
        return '%s/%d' % (self.network_address, self.prefixlen)

    def hosts(self):
        """Generate Iterator over usable hosts in a network.

        This is like __iter__ except it doesn't return the network
        or broadcast addresses.

        """
        network = int(self.network_address)
        broadcast = int(self.broadcast_address)
        for x in _compat_range(network + 1, broadcast):
            yield self._address_class(x)

    def __iter__(self):
        network = int(self.network_address)
        broadcast = int(self.broadcast_address)
        for x in _compat_range(network, broadcast + 1):
            yield self._address_class(x)

    def __getitem__(self, n):
        network = int(self.network_address)
        broadcast = int(self.broadcast_address)
        if n >= 0:
            if network + n > broadcast:
                raise IndexError('address out of range')
            return self._address_class(network + n)
        else:
            n += 1
            if broadcast + n < network:
                raise IndexError('address out of range')
            return self._address_class(broadcast + n)

    def __lt__(self, other):
        if not isinstance(other, _IPAddressBase):
            return NotImplemented
        if not isinstance(other, _BaseNetwork):
            raise TypeError('%s and %s are not of the same type' % (
                            self, other))
        if self._version != other._version:
            raise TypeError('%s and %s are not of the same version' % (
                            self, other))
        if self.network_address != other.network_address:
            return self.network_address < other.network_address
        if self.netmask != other.netmask:
            return self.netmask < other.netmask
        return False

    def __eq__(self, other):
        try:
            return (self._version == other._version and
                    self.network_address == other.network_address and
                    int(self.netmask) == int(other.netmask))
        except AttributeError:
            return NotImplemented

    def __hash__(self):
        return hash(int(self.network_address) ^ int(self.netmask))

    def __contains__(self, other):
        # always false if one is v4 and the other is v6.
        if self._version != other._version:
            return False
        # dealing with another network.
        if isinstance(other, _BaseNetwork):
            return False
        # dealing with another address
        else:
            # address
            return (int(self.network_address) <= int(other._ip) <=
                    int(self.broadcast_address))

    def overlaps(self, other):
        """Tell if self is partly contained in other."""
        return self.network_address in other or (
            self.broadcast_address in other or (
                other.network_address in self or (
                    other.broadcast_address in self)))

    @property
    def broadcast_address(self):
        x = self._cache.get('broadcast_address')
        if x is None:
            x = self._address_class(int(self.network_address) |
                                    int(self.hostmask))
            self._cache['broadcast_address'] = x
        return x

    @property
    def hostmask(self):
        x = self._cache.get('hostmask')
        if x is None:
            x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
            self._cache['hostmask'] = x
        return x

    @property
    def with_prefixlen(self):
        return '%s/%d' % (self.network_address, self._prefixlen)

    @property
    def with_netmask(self):
        return '%s/%s' % (self.network_address, self.netmask)

    @property
    def with_hostmask(self):
        return '%s/%s' % (self.network_address, self.hostmask)

    @property
    def num_addresses(self):
        """Number of hosts in the current subnet."""
        return int(self.broadcast_address) - int(self.network_address) + 1

    @property
    def _address_class(self):
        # Returning bare address objects (rather than interfaces) allows for
        # more consistent behaviour across the network address, broadcast
        # address and individual host addresses.
        msg = '%200s has no associated address class' % (type(self),)
        raise NotImplementedError(msg)

    @property
    def prefixlen(self):
        return self._prefixlen

    def address_exclude(self, other):
        """Remove an address from a larger block.

        For example:

            addr1 = ip_network('192.0.2.0/28')
            addr2 = ip_network('192.0.2.1/32')
            list(addr1.address_exclude(addr2)) =
                [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
                 IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]

        or IPv6:

            addr1 = ip_network('2001:db8::1/32')
            addr2 = ip_network('2001:db8::1/128')
            list(addr1.address_exclude(addr2)) =
                [ip_network('2001:db8::1/128'),
                 ip_network('2001:db8::2/127'),
                 ip_network('2001:db8::4/126'),
                 ip_network('2001:db8::8/125'),
                 ...
                 ip_network('2001:db8:8000::/33')]

        Args:
            other: An IPv4Network or IPv6Network object of the same type.

        Returns:
            An iterator of the IPv(4|6)Network objects which is self
            minus other.

        Raises:
            TypeError: If self and other are of differing address
              versions, or if other is not a network object.
            ValueError: If other is not completely contained by self.

        """
        if not self._version == other._version:
            raise TypeError("%s and %s are not of the same version" % (
                            self, other))

        if not isinstance(other, _BaseNetwork):
            raise TypeError("%s is not a network object" % other)

        if not other.subnet_of(self):
            raise ValueError('%s not contained in %s' % (other, self))
        if other == self:
            return

        # Make sure we're comparing the network of other.
        other = other.__class__('%s/%s' % (other.network_address,
                                           other.prefixlen))

        s1, s2 = self.subnets()
        while s1 != other and s2 != other:
            if other.subnet_of(s1):
                yield s2
                s1, s2 = s1.subnets()
            elif other.subnet_of(s2):
                yield s1
                s1, s2 = s2.subnets()
            else:
                # If we got here, there's a bug somewhere.
                raise AssertionError('Error performing exclusion: '
                                     's1: %s s2: %s other: %s' %
                                     (s1, s2, other))
        if s1 == other:
            yield s2
        elif s2 == other:
            yield s1
        else:
            # If we got here, there's a bug somewhere.
            raise AssertionError('Error performing exclusion: '
                                 's1: %s s2: %s other: %s' %
                                 (s1, s2, other))

    def compare_networks(self, other):
        """Compare two IP objects.

        This is only concerned about the comparison of the integer
        representation of the network addresses.  This means that the
        host bits aren't considered at all in this method.  If you want
        to compare host bits, you can easily enough do a
        'HostA._ip < HostB._ip'

        Args:
            other: An IP object.

        Returns:
            If the IP versions of self and other are the same, returns:

            -1 if self < other:
              eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
              IPv6Network('2001:db8::1000/124') <
                  IPv6Network('2001:db8::2000/124')
            0 if self == other
              eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
              IPv6Network('2001:db8::1000/124') ==
                  IPv6Network('2001:db8::1000/124')
            1 if self > other
              eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
                  IPv6Network('2001:db8::2000/124') >
                      IPv6Network('2001:db8::1000/124')

          Raises:
              TypeError if the IP versions are different.

        """
        # does this need to raise a ValueError?
        if self._version != other._version:
            raise TypeError('%s and %s are not of the same type' % (
                            self, other))
        # self._version == other._version below here:
        if self.network_address < other.network_address:
            return -1
        if self.network_address > other.network_address:
            return 1
        # self.network_address == other.network_address below here:
        if self.netmask < other.netmask:
            return -1
        if self.netmask > other.netmask:
            return 1
        return 0

    def _get_networks_key(self):
        """Network-only key function.

        Returns an object that identifies this address' network and
        netmask. This function is a suitable "key" argument for sorted()
        and list.sort().

        """
        return (self._version, self.network_address, self.netmask)

    def subnets(self, prefixlen_diff=1, new_prefix=None):
        """The subnets which join to make the current subnet.

        In the case that self contains only one IP
        (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
        for IPv6), yield an iterator with just ourself.

        Args:
            prefixlen_diff: An integer, the amount the prefix length
              should be increased by. This should not be set if
              new_prefix is also set.
            new_prefix: The desired new prefix length. This must be a
              larger number (smaller prefix) than the existing prefix.
              This should not be set if prefixlen_diff is also set.

        Returns:
            An iterator of IPv(4|6) objects.

        Raises:
            ValueError: The prefixlen_diff is too small or too large.
                OR
            prefixlen_diff and new_prefix are both set or new_prefix
              is a smaller number than the current prefix (smaller
              number means a larger network)

        """
        if self._prefixlen == self._max_prefixlen:
            yield self
            return

        if new_prefix is not None:
            if new_prefix < self._prefixlen:
                raise ValueError('new prefix must be longer')
            if prefixlen_diff != 1:
                raise ValueError('cannot set prefixlen_diff and new_prefix')
            prefixlen_diff = new_prefix - self._prefixlen

        if prefixlen_diff < 0:
            raise ValueError('prefix length diff must be > 0')
        new_prefixlen = self._prefixlen + prefixlen_diff

        if new_prefixlen > self._max_prefixlen:
            raise ValueError(
                'prefix length diff %d is invalid for netblock %s' % (
                    new_prefixlen, self))

        start = int(self.network_address)
        end = int(self.broadcast_address) + 1
        step = (int(self.hostmask) + 1) >> prefixlen_diff
        for new_addr in _compat_range(start, end, step):
            current = self.__class__((new_addr, new_prefixlen))
            yield current

    def supernet(self, prefixlen_diff=1, new_prefix=None):
        """The supernet containing the current network.

        Args:
            prefixlen_diff: An integer, the amount the prefix length of
              the network should be decreased by.  For example, given a
              /24 network and a prefixlen_diff of 3, a supernet with a
              /21 netmask is returned.

        Returns:
            An IPv4 network object.

        Raises:
            ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
              a negative prefix length.
                OR
            If prefixlen_diff and new_prefix are both set or new_prefix is a
              larger number than the current prefix (larger number means a
              smaller network)

        """
        if self._prefixlen == 0:
            return self

        if new_prefix is not None:
            if new_prefix > self._prefixlen:
                raise ValueError('new prefix must be shorter')
            if prefixlen_diff != 1:
                raise ValueError('cannot set prefixlen_diff and new_prefix')
            prefixlen_diff = self._prefixlen - new_prefix

        new_prefixlen = self.prefixlen - prefixlen_diff
        if new_prefixlen < 0:
            raise ValueError(
                'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
                (self.prefixlen, prefixlen_diff))
        return self.__class__((
            int(self.network_address) & (int(self.netmask) << prefixlen_diff),
            new_prefixlen
        ))

    @property
    def is_multicast(self):
        """Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is a multicast address.
            See RFC 2373 2.7 for details.

        """
        return (self.network_address.is_multicast and
                self.broadcast_address.is_multicast)

    def subnet_of(self, other):
        # always false if one is v4 and the other is v6.
        if self._version != other._version:
            return False
        # dealing with another network.
        if (hasattr(other, 'network_address') and
                hasattr(other, 'broadcast_address')):
            return (other.network_address <= self.network_address and
                    other.broadcast_address >= self.broadcast_address)
        # dealing with another address
        else:
            raise TypeError('Unable to test subnet containment with element '
                            'of type %s' % type(other))

    def supernet_of(self, other):
        # always false if one is v4 and the other is v6.
        if self._version != other._version:
            return False
        # dealing with another network.
        if (hasattr(other, 'network_address') and
                hasattr(other, 'broadcast_address')):
            return (other.network_address >= self.network_address and
                    other.broadcast_address <= self.broadcast_address)
        # dealing with another address
        else:
            raise TypeError('Unable to test subnet containment with element '
                            'of type %s' % type(other))

    @property
    def is_reserved(self):
        """Test if the address is otherwise IETF reserved.

        Returns:
            A boolean, True if the address is within one of the
            reserved IPv6 Network ranges.

        """
        return (self.network_address.is_reserved and
                self.broadcast_address.is_reserved)

    @property
    def is_link_local(self):
        """Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is reserved per RFC 4291.

        """
        return (self.network_address.is_link_local and
                self.broadcast_address.is_link_local)

    @property
    def is_private(self):
        """Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv4-special-registry or iana-ipv6-special-registry.

        """
        return (self.network_address.is_private and
                self.broadcast_address.is_private)

    @property
    def is_global(self):
        """Test if this address is allocated for public networks.

        Returns:
            A boolean, True if the address is not reserved per
            iana-ipv4-special-registry or iana-ipv6-special-registry.

        """
        return not self.is_private

    @property
    def is_unspecified(self):
        """Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 2373 2.5.2.

        """
        return (self.network_address.is_unspecified and
                self.broadcast_address.is_unspecified)

    @property
    def is_loopback(self):
        """Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback address as defined in
            RFC 2373 2.5.3.

        """
        return (self.network_address.is_loopback and
                self.broadcast_address.is_loopback)


class _BaseV4(object):

    """Base IPv4 object.

    The following methods are used by IPv4 objects in both single IP
    addresses and networks.

    """

    __slots__ = ()
    _version = 4
    # Equivalent to 255.255.255.255 or 32 bits of 1's.
    _ALL_ONES = (2 ** IPV4LENGTH) - 1
    _DECIMAL_DIGITS = frozenset('0123456789')

    # the valid octets for host and netmasks. only useful for IPv4.
    _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])

    _max_prefixlen = IPV4LENGTH
    # There are only a handful of valid v4 netmasks, so we cache them all
    # when constructed (see _make_netmask()).
    _netmask_cache = {}

    def _explode_shorthand_ip_string(self):
        return _compat_str(self)

    @classmethod
    def _make_netmask(cls, arg):
        """Make a (netmask, prefix_len) tuple from the given argument.

        Argument can be:
        - an integer (the prefix length)
        - a string representing the prefix length (e.g. "24")
        - a string representing the prefix netmask (e.g. "255.255.255.0")
        """
        if arg not in cls._netmask_cache:
            if isinstance(arg, _compat_int_types):
                prefixlen = arg
            else:
                try:
                    # Check for a netmask in prefix length form
                    prefixlen = cls._prefix_from_prefix_string(arg)
                except NetmaskValueError:
                    # Check for a netmask or hostmask in dotted-quad form.
                    # This may raise NetmaskValueError.
                    prefixlen = cls._prefix_from_ip_string(arg)
            netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
            cls._netmask_cache[arg] = netmask, prefixlen
        return cls._netmask_cache[arg]

    @classmethod
    def _ip_int_from_string(cls, ip_str):
        """Turn the given IP string into an integer for comparison.

        Args:
            ip_str: A string, the IP ip_str.

        Returns:
            The IP ip_str as an integer.

        Raises:
            AddressValueError: if ip_str isn't a valid IPv4 Address.

        """
        if not ip_str:
            raise AddressValueError('Address cannot be empty')

        octets = ip_str.split('.')
        if len(octets) != 4:
            raise AddressValueError("Expected 4 octets in %r" % ip_str)

        try:
            return _compat_int_from_byte_vals(
                map(cls._parse_octet, octets), 'big')
        except ValueError as exc:
            raise AddressValueError("%s in %r" % (exc, ip_str))

    @classmethod
    def _parse_octet(cls, octet_str):
        """Convert a decimal octet into an integer.

        Args:
            octet_str: A string, the number to parse.

        Returns:
            The octet as an integer.

        Raises:
            ValueError: if the octet isn't strictly a decimal from [0..255].

        """
        if not octet_str:
            raise ValueError("Empty octet not permitted")
        # Whitelist the characters, since int() allows a lot of bizarre stuff.
        if not cls._DECIMAL_DIGITS.issuperset(octet_str):
            msg = "Only decimal digits permitted in %r"
            raise ValueError(msg % octet_str)
        # We do the length check second, since the invalid character error
        # is likely to be more informative for the user
        if len(octet_str) > 3:
            msg = "At most 3 characters permitted in %r"
            raise ValueError(msg % octet_str)
        # Convert to integer (we know digits are legal)
        octet_int = int(octet_str, 10)
        # Any octets that look like they *might* be written in octal,
        # and which don't look exactly the same in both octal and
        # decimal are rejected as ambiguous
        if octet_int > 7 and octet_str[0] == '0':
            msg = "Ambiguous (octal/decimal) value in %r not permitted"
            raise ValueError(msg % octet_str)
        if octet_int > 255:
            raise ValueError("Octet %d (> 255) not permitted" % octet_int)
        return octet_int

    @classmethod
    def _string_from_ip_int(cls, ip_int):
        """Turns a 32-bit integer into dotted decimal notation.

        Args:
            ip_int: An integer, the IP address.

        Returns:
            The IP address as a string in dotted decimal notation.

        """
        return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
                                    if isinstance(b, bytes)
                                    else b)
                        for b in _compat_to_bytes(ip_int, 4, 'big'))

    def _is_hostmask(self, ip_str):
        """Test if the IP string is a hostmask (rather than a netmask).

        Args:
            ip_str: A string, the potential hostmask.

        Returns:
            A boolean, True if the IP string is a hostmask.

        """
        bits = ip_str.split('.')
        try:
            parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
        except ValueError:
            return False
        if len(parts) != len(bits):
            return False
        if parts[0] < parts[-1]:
            return True
        return False

    def _reverse_pointer(self):
        """Return the reverse DNS pointer name for the IPv4 address.

        This implements the method described in RFC1035 3.5.

        """
        reverse_octets = _compat_str(self).split('.')[::-1]
        return '.'.join(reverse_octets) + '.in-addr.arpa'

    @property
    def max_prefixlen(self):
        return self._max_prefixlen

    @property
    def version(self):
        return self._version


class IPv4Address(_BaseV4, _BaseAddress):

    """Represent and manipulate single IPv4 Addresses."""

    __slots__ = ('_ip', '__weakref__')

    def __init__(self, address):

        """
        Args:
            address: A string or integer representing the IP

              Additionally, an integer can be passed, so
              IPv4Address('192.0.2.1') == IPv4Address(3221225985).
              or, more generally
              IPv4Address(int(IPv4Address('192.0.2.1'))) ==
                IPv4Address('192.0.2.1')

        Raises:
            AddressValueError: If ipaddress isn't a valid IPv4 address.

        """
        # Efficient constructor from integer.
        if isinstance(address, _compat_int_types):
            self._check_int_address(address)
            self._ip = address
            return

        # Constructing from a packed address
        if isinstance(address, bytes):
            self._check_packed_address(address, 4)
            bvs = _compat_bytes_to_byte_vals(address)
            self._ip = _compat_int_from_byte_vals(bvs, 'big')
            return

        # Assume input argument to be string or any object representation
        # which converts into a formatted IP string.
        addr_str = _compat_str(address)
        if '/' in addr_str:
            raise AddressValueError("Unexpected '/' in %r" % address)
        self._ip = self._ip_int_from_string(addr_str)

    @property
    def packed(self):
        """The binary representation of this address."""
        return v4_int_to_packed(self._ip)

    @property
    def is_reserved(self):
        """Test if the address is otherwise IETF reserved.

         Returns:
             A boolean, True if the address is within the
             reserved IPv4 Network range.

        """
        return self in self._constants._reserved_network

    @property
    def is_private(self):
        """Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv4-special-registry.

        """
        return any(self in net for net in self._constants._private_networks)

    @property
    def is_global(self):
        return (
            self not in self._constants._public_network and
            not self.is_private)

    @property
    def is_multicast(self):
        """Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is multicast.
            See RFC 3171 for details.

        """
        return self in self._constants._multicast_network

    @property
    def is_unspecified(self):
        """Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 5735 3.

        """
        return self == self._constants._unspecified_address

    @property
    def is_loopback(self):
        """Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback per RFC 3330.

        """
        return self in self._constants._loopback_network

    @property
    def is_link_local(self):
        """Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is link-local per RFC 3927.

        """
        return self in self._constants._linklocal_network


class IPv4Interface(IPv4Address):

    def __init__(self, address):
        if isinstance(address, (bytes, _compat_int_types)):
            IPv4Address.__init__(self, address)
            self.network = IPv4Network(self._ip)
            self._prefixlen = self._max_prefixlen
            return

        if isinstance(address, tuple):
            IPv4Address.__init__(self, address[0])
            if len(address) > 1:
                self._prefixlen = int(address[1])
            else:
                self._prefixlen = self._max_prefixlen

            self.network = IPv4Network(address, strict=False)
            self.netmask = self.network.netmask
            self.hostmask = self.network.hostmask
            return

        addr = _split_optional_netmask(address)
        IPv4Address.__init__(self, addr[0])

        self.network = IPv4Network(address, strict=False)
        self._prefixlen = self.network._prefixlen

        self.netmask = self.network.netmask
        self.hostmask = self.network.hostmask

    def __str__(self):
        return '%s/%d' % (self._string_from_ip_int(self._ip),
                          self.network.prefixlen)

    def __eq__(self, other):
        address_equal = IPv4Address.__eq__(self, other)
        if not address_equal or address_equal is NotImplemented:
            return address_equal
        try:
            return self.network == other.network
        except AttributeError:
            # An interface with an associated network is NOT the
            # same as an unassociated address. That's why the hash
            # takes the extra info into account.
            return False

    def __lt__(self, other):
        address_less = IPv4Address.__lt__(self, other)
        if address_less is NotImplemented:
            return NotImplemented
        try:
            return self.network < other.network
        except AttributeError:
            # We *do* allow addresses and interfaces to be sorted. The
            # unassociated address is considered less than all interfaces.
            return False

    def __hash__(self):
        return self._ip ^ self._prefixlen ^ int(self.network.network_address)

    __reduce__ = _IPAddressBase.__reduce__

    @property
    def ip(self):
        return IPv4Address(self._ip)

    @property
    def with_prefixlen(self):
        return '%s/%s' % (self._string_from_ip_int(self._ip),
                          self._prefixlen)

    @property
    def with_netmask(self):
        return '%s/%s' % (self._string_from_ip_int(self._ip),
                          self.netmask)

    @property
    def with_hostmask(self):
        return '%s/%s' % (self._string_from_ip_int(self._ip),
                          self.hostmask)


class IPv4Network(_BaseV4, _BaseNetwork):

    """This class represents and manipulates 32-bit IPv4 network + addresses..

    Attributes: [examples for IPv4Network('192.0.2.0/27')]
        .network_address: IPv4Address('192.0.2.0')
        .hostmask: IPv4Address('0.0.0.31')
        .broadcast_address: IPv4Address('192.0.2.32')
        .netmask: IPv4Address('255.255.255.224')
        .prefixlen: 27

    """
    # Class to use when creating address objects
    _address_class = IPv4Address

    def __init__(self, address, strict=True):

        """Instantiate a new IPv4 network object.

        Args:
            address: A string or integer representing the IP [& network].
              '192.0.2.0/24'
              '192.0.2.0/255.255.255.0'
              '192.0.0.2/0.0.0.255'
              are all functionally the same in IPv4. Similarly,
              '192.0.2.1'
              '192.0.2.1/255.255.255.255'
              '192.0.2.1/32'
              are also functionally equivalent. That is to say, failing to
              provide a subnetmask will create an object with a mask of /32.

              If the mask (portion after the / in the argument) is given in
              dotted quad form, it is treated as a netmask if it starts with a
              non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
              starts with a zero field (e.g. 0.255.255.255 == /8), with the
              single exception of an all-zero mask which is treated as a
              netmask == /0. If no mask is given, a default of /32 is used.

              Additionally, an integer can be passed, so
              IPv4Network('192.0.2.1') == IPv4Network(3221225985)
              or, more generally
              IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
                IPv4Interface('192.0.2.1')

        Raises:
            AddressValueError: If ipaddress isn't a valid IPv4 address.
            NetmaskValueError: If the netmask isn't valid for
              an IPv4 address.
            ValueError: If strict is True and a network address is not
              supplied.

        """
        _BaseNetwork.__init__(self, address)

        # Constructing from a packed address or integer
        if isinstance(address, (_compat_int_types, bytes)):
            self.network_address = IPv4Address(address)
            self.netmask, self._prefixlen = self._make_netmask(
                self._max_prefixlen)
            # fixme: address/network test here.
            return

        if isinstance(address, tuple):
            if len(address) > 1:
                arg = address[1]
            else:
                # We weren't given an address[1]
                arg = self._max_prefixlen
            self.network_address = IPv4Address(address[0])
            self.netmask, self._prefixlen = self._make_netmask(arg)
            packed = int(self.network_address)
            if packed & int(self.netmask) != packed:
                if strict:
                    raise ValueError('%s has host bits set' % self)
                else:
                    self.network_address = IPv4Address(packed &
                                                       int(self.netmask))
            return

        # Assume input argument to be string or any object representation
        # which converts into a formatted IP prefix string.
        addr = _split_optional_netmask(address)
        self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))

        if len(addr) == 2:
            arg = addr[1]
        else:
            arg = self._max_prefixlen
        self.netmask, self._prefixlen = self._make_netmask(arg)

        if strict:
            if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
                    self.network_address):
                raise ValueError('%s has host bits set' % self)
        self.network_address = IPv4Address(int(self.network_address) &
                                           int(self.netmask))

        if self._prefixlen == (self._max_prefixlen - 1):
            self.hosts = self.__iter__

    @property
    def is_global(self):
        """Test if this address is allocated for public networks.

        Returns:
            A boolean, True if the address is not reserved per
            iana-ipv4-special-registry.

        """
        return (not (self.network_address in IPv4Network('100.64.0.0/10') and
                self.broadcast_address in IPv4Network('100.64.0.0/10')) and
                not self.is_private)


class _IPv4Constants(object):

    _linklocal_network = IPv4Network('169.254.0.0/16')

    _loopback_network = IPv4Network('127.0.0.0/8')

    _multicast_network = IPv4Network('224.0.0.0/4')

    _public_network = IPv4Network('100.64.0.0/10')

    _private_networks = [
        IPv4Network('0.0.0.0/8'),
        IPv4Network('10.0.0.0/8'),
        IPv4Network('127.0.0.0/8'),
        IPv4Network('169.254.0.0/16'),
        IPv4Network('172.16.0.0/12'),
        IPv4Network('192.0.0.0/29'),
        IPv4Network('192.0.0.170/31'),
        IPv4Network('192.0.2.0/24'),
        IPv4Network('192.168.0.0/16'),
        IPv4Network('198.18.0.0/15'),
        IPv4Network('198.51.100.0/24'),
        IPv4Network('203.0.113.0/24'),
        IPv4Network('240.0.0.0/4'),
        IPv4Network('255.255.255.255/32'),
    ]

    _reserved_network = IPv4Network('240.0.0.0/4')

    _unspecified_address = IPv4Address('0.0.0.0')


IPv4Address._constants = _IPv4Constants


class _BaseV6(object):

    """Base IPv6 object.

    The following methods are used by IPv6 objects in both single IP
    addresses and networks.

    """

    __slots__ = ()
    _version = 6
    _ALL_ONES = (2 ** IPV6LENGTH) - 1
    _HEXTET_COUNT = 8
    _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
    _max_prefixlen = IPV6LENGTH

    # There are only a bunch of valid v6 netmasks, so we cache them all
    # when constructed (see _make_netmask()).
    _netmask_cache = {}

    @classmethod
    def _make_netmask(cls, arg):
        """Make a (netmask, prefix_len) tuple from the given argument.

        Argument can be:
        - an integer (the prefix length)
        - a string representing the prefix length (e.g. "24")
        - a string representing the prefix netmask (e.g. "255.255.255.0")
        """
        if arg not in cls._netmask_cache:
            if isinstance(arg, _compat_int_types):
                prefixlen = arg
            else:
                prefixlen = cls._prefix_from_prefix_string(arg)
            netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
            cls._netmask_cache[arg] = netmask, prefixlen
        return cls._netmask_cache[arg]

    @classmethod
    def _ip_int_from_string(cls, ip_str):
        """Turn an IPv6 ip_str into an integer.

        Args:
            ip_str: A string, the IPv6 ip_str.

        Returns:
            An int, the IPv6 address

        Raises:
            AddressValueError: if ip_str isn't a valid IPv6 Address.

        """
        if not ip_str:
            raise AddressValueError('Address cannot be empty')

        parts = ip_str.split(':')

        # An IPv6 address needs at least 2 colons (3 parts).
        _min_parts = 3
        if len(parts) < _min_parts:
            msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
            raise AddressValueError(msg)

        # If the address has an IPv4-style suffix, convert it to hexadecimal.
        if '.' in parts[-1]:
            try:
                ipv4_int = IPv4Address(parts.pop())._ip
            except AddressValueError as exc:
                raise AddressValueError("%s in %r" % (exc, ip_str))
            parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
            parts.append('%x' % (ipv4_int & 0xFFFF))

        # An IPv6 address can't have more than 8 colons (9 parts).
        # The extra colon comes from using the "::" notation for a single
        # leading or trailing zero part.
        _max_parts = cls._HEXTET_COUNT + 1
        if len(parts) > _max_parts:
            msg = "At most %d colons permitted in %r" % (
                _max_parts - 1, ip_str)
            raise AddressValueError(msg)

        # Disregarding the endpoints, find '::' with nothing in between.
        # This indicates that a run of zeroes has been skipped.
        skip_index = None
        for i in _compat_range(1, len(parts) - 1):
            if not parts[i]:
                if skip_index is not None:
                    # Can't have more than one '::'
                    msg = "At most one '::' permitted in %r" % ip_str
                    raise AddressValueError(msg)
                skip_index = i

        # parts_hi is the number of parts to copy from above/before the '::'
        # parts_lo is the number of parts to copy from below/after the '::'
        if skip_index is not None:
            # If we found a '::', then check if it also covers the endpoints.
            parts_hi = skip_index
            parts_lo = len(parts) - skip_index - 1
            if not parts[0]:
                parts_hi -= 1
                if parts_hi:
                    msg = "Leading ':' only permitted as part of '::' in %r"
                    raise AddressValueError(msg % ip_str)  # ^: requires ^::
            if not parts[-1]:
                parts_lo -= 1
                if parts_lo:
                    msg = "Trailing ':' only permitted as part of '::' in %r"
                    raise AddressValueError(msg % ip_str)  # :$ requires ::$
            parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
            if parts_skipped < 1:
                msg = "Expected at most %d other parts with '::' in %r"
                raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
        else:
            # Otherwise, allocate the entire address to parts_hi.  The
            # endpoints could still be empty, but _parse_hextet() will check
            # for that.
            if len(parts) != cls._HEXTET_COUNT:
                msg = "Exactly %d parts expected without '::' in %r"
                raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
            if not parts[0]:
                msg = "Leading ':' only permitted as part of '::' in %r"
                raise AddressValueError(msg % ip_str)  # ^: requires ^::
            if not parts[-1]:
                msg = "Trailing ':' only permitted as part of '::' in %r"
                raise AddressValueError(msg % ip_str)  # :$ requires ::$
            parts_hi = len(parts)
            parts_lo = 0
            parts_skipped = 0

        try:
            # Now, parse the hextets into a 128-bit integer.
            ip_int = 0
            for i in range(parts_hi):
                ip_int <<= 16
                ip_int |= cls._parse_hextet(parts[i])
            ip_int <<= 16 * parts_skipped
            for i in range(-parts_lo, 0):
                ip_int <<= 16
                ip_int |= cls._parse_hextet(parts[i])
            return ip_int
        except ValueError as exc:
            raise AddressValueError("%s in %r" % (exc, ip_str))

    @classmethod
    def _parse_hextet(cls, hextet_str):
        """Convert an IPv6 hextet string into an integer.

        Args:
            hextet_str: A string, the number to parse.

        Returns:
            The hextet as an integer.

        Raises:
            ValueError: if the input isn't strictly a hex number from
              [0..FFFF].

        """
        # Whitelist the characters, since int() allows a lot of bizarre stuff.
        if not cls._HEX_DIGITS.issuperset(hextet_str):
            raise ValueError("Only hex digits permitted in %r" % hextet_str)
        # We do the length check second, since the invalid character error
        # is likely to be more informative for the user
        if len(hextet_str) > 4:
            msg = "At most 4 characters permitted in %r"
            raise ValueError(msg % hextet_str)
        # Length check means we can skip checking the integer value
        return int(hextet_str, 16)

    @classmethod
    def _compress_hextets(cls, hextets):
        """Compresses a list of hextets.

        Compresses a list of strings, replacing the longest continuous
        sequence of "0" in the list with "" and adding empty strings at
        the beginning or at the end of the string such that subsequently
        calling ":".join(hextets) will produce the compressed version of
        the IPv6 address.

        Args:
            hextets: A list of strings, the hextets to compress.

        Returns:
            A list of strings.

        """
        best_doublecolon_start = -1
        best_doublecolon_len = 0
        doublecolon_start = -1
        doublecolon_len = 0
        for index, hextet in enumerate(hextets):
            if hextet == '0':
                doublecolon_len += 1
                if doublecolon_start == -1:
                    # Start of a sequence of zeros.
                    doublecolon_start = index
                if doublecolon_len > best_doublecolon_len:
                    # This is the longest sequence of zeros so far.
                    best_doublecolon_len = doublecolon_len
                    best_doublecolon_start = doublecolon_start
            else:
                doublecolon_len = 0
                doublecolon_start = -1

        if best_doublecolon_len > 1:
            best_doublecolon_end = (best_doublecolon_start +
                                    best_doublecolon_len)
            # For zeros at the end of the address.
            if best_doublecolon_end == len(hextets):
                hextets += ['']
            hextets[best_doublecolon_start:best_doublecolon_end] = ['']
            # For zeros at the beginning of the address.
            if best_doublecolon_start == 0:
                hextets = [''] + hextets

        return hextets

    @classmethod
    def _string_from_ip_int(cls, ip_int=None):
        """Turns a 128-bit integer into hexadecimal notation.

        Args:
            ip_int: An integer, the IP address.

        Returns:
            A string, the hexadecimal representation of the address.

        Raises:
            ValueError: The address is bigger than 128 bits of all ones.

        """
        if ip_int is None:
            ip_int = int(cls._ip)

        if ip_int > cls._ALL_ONES:
            raise ValueError('IPv6 address is too large')

        hex_str = '%032x' % ip_int
        hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]

        hextets = cls._compress_hextets(hextets)
        return ':'.join(hextets)

    def _explode_shorthand_ip_string(self):
        """Expand a shortened IPv6 address.

        Args:
            ip_str: A string, the IPv6 address.

        Returns:
            A string, the expanded IPv6 address.

        """
        if isinstance(self, IPv6Network):
            ip_str = _compat_str(self.network_address)
        elif isinstance(self, IPv6Interface):
            ip_str = _compat_str(self.ip)
        else:
            ip_str = _compat_str(self)

        ip_int = self._ip_int_from_string(ip_str)
        hex_str = '%032x' % ip_int
        parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
        if isinstance(self, (_BaseNetwork, IPv6Interface)):
            return '%s/%d' % (':'.join(parts), self._prefixlen)
        return ':'.join(parts)

    def _reverse_pointer(self):
        """Return the reverse DNS pointer name for the IPv6 address.

        This implements the method described in RFC3596 2.5.

        """
        reverse_chars = self.exploded[::-1].replace(':', '')
        return '.'.join(reverse_chars) + '.ip6.arpa'

    @property
    def max_prefixlen(self):
        return self._max_prefixlen

    @property
    def version(self):
        return self._version


class IPv6Address(_BaseV6, _BaseAddress):

    """Represent and manipulate single IPv6 Addresses."""

    __slots__ = ('_ip', '__weakref__')

    def __init__(self, address):
        """Instantiate a new IPv6 address object.

        Args:
            address: A string or integer representing the IP

              Additionally, an integer can be passed, so
              IPv6Address('2001:db8::') ==
                IPv6Address(42540766411282592856903984951653826560)
              or, more generally
              IPv6Address(int(IPv6Address('2001:db8::'))) ==
                IPv6Address('2001:db8::')

        Raises:
            AddressValueError: If address isn't a valid IPv6 address.

        """
        # Efficient constructor from integer.
        if isinstance(address, _compat_int_types):
            self._check_int_address(address)
            self._ip = address
            return

        # Constructing from a packed address
        if isinstance(address, bytes):
            self._check_packed_address(address, 16)
            bvs = _compat_bytes_to_byte_vals(address)
            self._ip = _compat_int_from_byte_vals(bvs, 'big')
            return

        # Assume input argument to be string or any object representation
        # which converts into a formatted IP string.
        addr_str = _compat_str(address)
        if '/' in addr_str:
            raise AddressValueError("Unexpected '/' in %r" % address)
        self._ip = self._ip_int_from_string(addr_str)

    @property
    def packed(self):
        """The binary representation of this address."""
        return v6_int_to_packed(self._ip)

    @property
    def is_multicast(self):
        """Test if the address is reserved for multicast use.

        Returns:
            A boolean, True if the address is a multicast address.
            See RFC 2373 2.7 for details.

        """
        return self in self._constants._multicast_network

    @property
    def is_reserved(self):
        """Test if the address is otherwise IETF reserved.

        Returns:
            A boolean, True if the address is within one of the
            reserved IPv6 Network ranges.

        """
        return any(self in x for x in self._constants._reserved_networks)

    @property
    def is_link_local(self):
        """Test if the address is reserved for link-local.

        Returns:
            A boolean, True if the address is reserved per RFC 4291.

        """
        return self in self._constants._linklocal_network

    @property
    def is_site_local(self):
        """Test if the address is reserved for site-local.

        Note that the site-local address space has been deprecated by RFC 3879.
        Use is_private to test if this address is in the space of unique local
        addresses as defined by RFC 4193.

        Returns:
            A boolean, True if the address is reserved per RFC 3513 2.5.6.

        """
        return self in self._constants._sitelocal_network

    @property
    def is_private(self):
        """Test if this address is allocated for private networks.

        Returns:
            A boolean, True if the address is reserved per
            iana-ipv6-special-registry.

        """
        return any(self in net for net in self._constants._private_networks)

    @property
    def is_global(self):
        """Test if this address is allocated for public networks.

        Returns:
            A boolean, true if the address is not reserved per
            iana-ipv6-special-registry.

        """
        return not self.is_private

    @property
    def is_unspecified(self):
        """Test if the address is unspecified.

        Returns:
            A boolean, True if this is the unspecified address as defined in
            RFC 2373 2.5.2.

        """
        return self._ip == 0

    @property
    def is_loopback(self):
        """Test if the address is a loopback address.

        Returns:
            A boolean, True if the address is a loopback address as defined in
            RFC 2373 2.5.3.

        """
        return self._ip == 1

    @property
    def ipv4_mapped(self):
        """Return the IPv4 mapped address.

        Returns:
            If the IPv6 address is a v4 mapped address, return the
            IPv4 mapped address. Return None otherwise.

        """
        if (self._ip >> 32) != 0xFFFF:
            return None
        return IPv4Address(self._ip & 0xFFFFFFFF)

    @property
    def teredo(self):
        """Tuple of embedded teredo IPs.

        Returns:
            Tuple of the (server, client) IPs or None if the address
            doesn't appear to be a teredo address (doesn't start with
            2001::/32)

        """
        if (self._ip >> 96) != 0x20010000:
            return None
        return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
                IPv4Address(~self._ip & 0xFFFFFFFF))

    @property
    def sixtofour(self):
        """Return the IPv4 6to4 embedded address.

        Returns:
            The IPv4 6to4-embedded address if present or None if the
            address doesn't appear to contain a 6to4 embedded address.

        """
        if (self._ip >> 112) != 0x2002:
            return None
        return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)


class IPv6Interface(IPv6Address):

    def __init__(self, address):
        if isinstance(address, (bytes, _compat_int_types)):
            IPv6Address.__init__(self, address)
            self.network = IPv6Network(self._ip)
            self._prefixlen = self._max_prefixlen
            return
        if isinstance(address, tuple):
            IPv6Address.__init__(self, address[0])
            if len(address) > 1:
                self._prefixlen = int(address[1])
            else:
                self._prefixlen = self._max_prefixlen
            self.network = IPv6Network(address, strict=False)
            self.netmask = self.network.netmask
            self.hostmask = self.network.hostmask
            return

        addr = _split_optional_netmask(address)
        IPv6Address.__init__(self, addr[0])
        self.network = IPv6Network(address, strict=False)
        self.netmask = self.network.netmask
        self._prefixlen = self.network._prefixlen
        self.hostmask = self.network.hostmask

    def __str__(self):
        return '%s/%d' % (self._string_from_ip_int(self._ip),
                          self.network.prefixlen)

    def __eq__(self, other):
        address_equal = IPv6Address.__eq__(self, other)
        if not address_equal or address_equal is NotImplemented:
            return address_equal
        try:
            return self.network == other.network
        except AttributeError:
            # An interface with an associated network is NOT the
            # same as an unassociated address. That's why the hash
            # takes the extra info into account.
            return False

    def __lt__(self, other):
        address_less = IPv6Address.__lt__(self, other)
        if address_less is NotImplemented:
            return NotImplemented
        try:
            return self.network < other.network
        except AttributeError:
            # We *do* allow addresses and interfaces to be sorted. The
            # unassociated address is considered less than all interfaces.
            return False

    def __hash__(self):
        return self._ip ^ self._prefixlen ^ int(self.network.network_address)

    __reduce__ = _IPAddressBase.__reduce__

    @property
    def ip(self):
        return IPv6Address(self._ip)

    @property
    def with_prefixlen(self):
        return '%s/%s' % (self._string_from_ip_int(self._ip),
                          self._prefixlen)

    @property
    def with_netmask(self):
        return '%s/%s' % (self._string_from_ip_int(self._ip),
                          self.netmask)

    @property
    def with_hostmask(self):
        return '%s/%s' % (self._string_from_ip_int(self._ip),
                          self.hostmask)

    @property
    def is_unspecified(self):
        return self._ip == 0 and self.network.is_unspecified

    @property
    def is_loopback(self):
        return self._ip == 1 and self.network.is_loopback


class IPv6Network(_BaseV6, _BaseNetwork):

    """This class represents and manipulates 128-bit IPv6 networks.

    Attributes: [examples for IPv6('2001:db8::1000/124')]
        .network_address: IPv6Address('2001:db8::1000')
        .hostmask: IPv6Address('::f')
        .broadcast_address: IPv6Address('2001:db8::100f')
        .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
        .prefixlen: 124

    """

    # Class to use when creating address objects
    _address_class = IPv6Address

    def __init__(self, address, strict=True):
        """Instantiate a new IPv6 Network object.

        Args:
            address: A string or integer representing the IPv6 network or the
              IP and prefix/netmask.
              '2001:db8::/128'
              '2001:db8:0000:0000:0000:0000:0000:0000/128'
              '2001:db8::'
              are all functionally the same in IPv6.  That is to say,
              failing to provide a subnetmask will create an object with
              a mask of /128.

              Additionally, an integer can be passed, so
              IPv6Network('2001:db8::') ==
                IPv6Network(42540766411282592856903984951653826560)
              or, more generally
              IPv6Network(int(IPv6Network('2001:db8::'))) ==
                IPv6Network('2001:db8::')

            strict: A boolean. If true, ensure that we have been passed
              A true network address, eg, 2001:db8::1000/124 and not an
              IP address on a network, eg, 2001:db8::1/124.

        Raises:
            AddressValueError: If address isn't a valid IPv6 address.
            NetmaskValueError: If the netmask isn't valid for
              an IPv6 address.
            ValueError: If strict was True and a network address was not
              supplied.

        """
        _BaseNetwork.__init__(self, address)

        # Efficient constructor from integer or packed address
        if isinstance(address, (bytes, _compat_int_types)):
            self.network_address = IPv6Address(address)
            self.netmask, self._prefixlen = self._make_netmask(
                self._max_prefixlen)
            return

        if isinstance(address, tuple):
            if len(address) > 1:
                arg = address[1]
            else:
                arg = self._max_prefixlen
            self.netmask, self._prefixlen = self._make_netmask(arg)
            self.network_address = IPv6Address(address[0])
            packed = int(self.network_address)
            if packed & int(self.netmask) != packed:
                if strict:
                    raise ValueError('%s has host bits set' % self)
                else:
                    self.network_address = IPv6Address(packed &
                                                       int(self.netmask))
            return

        # Assume input argument to be string or any object representation
        # which converts into a formatted IP prefix string.
        addr = _split_optional_netmask(address)

        self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))

        if len(addr) == 2:
            arg = addr[1]
        else:
            arg = self._max_prefixlen
        self.netmask, self._prefixlen = self._make_netmask(arg)

        if strict:
            if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
                    self.network_address):
                raise ValueError('%s has host bits set' % self)
        self.network_address = IPv6Address(int(self.network_address) &
                                           int(self.netmask))

        if self._prefixlen == (self._max_prefixlen - 1):
            self.hosts = self.__iter__

    def hosts(self):
        """Generate Iterator over usable hosts in a network.

          This is like __iter__ except it doesn't return the
          Subnet-Router anycast address.

        """
        network = int(self.network_address)
        broadcast = int(self.broadcast_address)
        for x in _compat_range(network + 1, broadcast + 1):
            yield self._address_class(x)

    @property
    def is_site_local(self):
        """Test if the address is reserved for site-local.

        Note that the site-local address space has been deprecated by RFC 3879.
        Use is_private to test if this address is in the space of unique local
        addresses as defined by RFC 4193.

        Returns:
            A boolean, True if the address is reserved per RFC 3513 2.5.6.

        """
        return (self.network_address.is_site_local and
                self.broadcast_address.is_site_local)


class _IPv6Constants(object):

    _linklocal_network = IPv6Network('fe80::/10')

    _multicast_network = IPv6Network('ff00::/8')

    _private_networks = [
        IPv6Network('::1/128'),
        IPv6Network('::/128'),
        IPv6Network('::ffff:0:0/96'),
        IPv6Network('100::/64'),
        IPv6Network('2001::/23'),
        IPv6Network('2001:2::/48'),
        IPv6Network('2001:db8::/32'),
        IPv6Network('2001:10::/28'),
        IPv6Network('fc00::/7'),
        IPv6Network('fe80::/10'),
    ]

    _reserved_networks = [
        IPv6Network('::/8'), IPv6Network('100::/8'),
        IPv6Network('200::/7'), IPv6Network('400::/6'),
        IPv6Network('800::/5'), IPv6Network('1000::/4'),
        IPv6Network('4000::/3'), IPv6Network('6000::/3'),
        IPv6Network('8000::/3'), IPv6Network('A000::/3'),
        IPv6Network('C000::/3'), IPv6Network('E000::/4'),
        IPv6Network('F000::/5'), IPv6Network('F800::/6'),
        IPv6Network('FE00::/9'),
    ]

    _sitelocal_network = IPv6Network('fec0::/10')


IPv6Address._constants = _IPv6Constants
_vendor/__init__.py000064400000011076151733136270010326 0ustar00"""
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
depend on something external.

Files inside of pip._vendor should be considered immutable and should only be
updated to versions from upstream.
"""
from __future__ import absolute_import

import glob
import os.path
import sys

# Downstream redistributors which have debundled our dependencies should also
# patch this value to be true. This will trigger the additional patching
# to cause things like "six" to be available as pip.
DEBUNDLED = False

# By default, look in this directory for a bunch of .whl files which we will
# add to the beginning of sys.path before attempting to import anything. This
# is done to support downstream re-distributors like Debian and Fedora who
# wish to create their own Wheels for our dependencies to aid in debundling.
WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))


# Define a small helper function to alias our vendored modules to the real ones
# if the vendored ones do not exist. This idea of this was taken from
# https://github.com/kennethreitz/requests/pull/2567.
def vendored(modulename):
    vendored_name = "{0}.{1}".format(__name__, modulename)

    try:
        __import__(vendored_name, globals(), locals(), level=0)
    except ImportError:
        try:
            __import__(modulename, globals(), locals(), level=0)
        except ImportError:
            # We can just silently allow import failures to pass here. If we
            # got to this point it means that ``import pip._vendor.whatever``
            # failed and so did ``import whatever``. Since we're importing this
            # upfront in an attempt to alias imports, not erroring here will
            # just mean we get a regular import error whenever pip *actually*
            # tries to import one of these modules to use it, which actually
            # gives us a better error message than we would have otherwise
            # gotten.
            pass
        else:
            sys.modules[vendored_name] = sys.modules[modulename]
            base, head = vendored_name.rsplit(".", 1)
            setattr(sys.modules[base], head, sys.modules[modulename])


# If we're operating in a debundled setup, then we want to go ahead and trigger
# the aliasing of our vendored libraries as well as looking for wheels to add
# to our sys.path. This will cause all of this code to be a no-op typically
# however downstream redistributors can enable it in a consistent way across
# all platforms.
if DEBUNDLED:
    # Actually look inside of WHEEL_DIR to find .whl files and add them to the
    # front of our sys.path.
    sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path

    # Actually alias all of our vendored dependencies.
    vendored("cachecontrol")
    vendored("colorama")
    vendored("distlib")
    vendored("distro")
    vendored("html5lib")
    vendored("lockfile")
    vendored("six")
    vendored("six.moves")
    vendored("six.moves.urllib")
    vendored("packaging")
    vendored("packaging.version")
    vendored("packaging.specifiers")
    vendored("pkg_resources")
    vendored("progress")
    vendored("retrying")
    vendored("requests")
    vendored("requests.packages")
    vendored("requests.packages.urllib3")
    vendored("requests.packages.urllib3._collections")
    vendored("requests.packages.urllib3.connection")
    vendored("requests.packages.urllib3.connectionpool")
    vendored("requests.packages.urllib3.contrib")
    vendored("requests.packages.urllib3.contrib.ntlmpool")
    vendored("requests.packages.urllib3.contrib.pyopenssl")
    vendored("requests.packages.urllib3.exceptions")
    vendored("requests.packages.urllib3.fields")
    vendored("requests.packages.urllib3.filepost")
    vendored("requests.packages.urllib3.packages")
    vendored("requests.packages.urllib3.packages.ordered_dict")
    vendored("requests.packages.urllib3.packages.six")
    vendored("requests.packages.urllib3.packages.ssl_match_hostname")
    vendored("requests.packages.urllib3.packages.ssl_match_hostname."
             "_implementation")
    vendored("requests.packages.urllib3.poolmanager")
    vendored("requests.packages.urllib3.request")
    vendored("requests.packages.urllib3.response")
    vendored("requests.packages.urllib3.util")
    vendored("requests.packages.urllib3.util.connection")
    vendored("requests.packages.urllib3.util.request")
    vendored("requests.packages.urllib3.util.response")
    vendored("requests.packages.urllib3.util.retry")
    vendored("requests.packages.urllib3.util.ssl_")
    vendored("requests.packages.urllib3.util.timeout")
    vendored("requests.packages.urllib3.util.url")
_vendor/distlib/manifest.py000064400000034732151733136270012033 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
Class representing the list of files in a distribution.

Equivalent to distutils.filelist, but fixes some problems.
"""
import fnmatch
import logging
import os
import re
import sys

from . import DistlibException
from .compat import fsdecode
from .util import convert_path


__all__ = ['Manifest']

logger = logging.getLogger(__name__)

# a \ followed by some spaces + EOL
_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)

#
# Due to the different results returned by fnmatch.translate, we need
# to do slightly different processing for Python 2.7 and 3.2 ... this needed
# to be brought in for Python 3.6 onwards.
#
_PYTHON_VERSION = sys.version_info[:2]

class Manifest(object):
    """A list of files built by on exploring the filesystem and filtered by
    applying various patterns to what we find there.
    """

    def __init__(self, base=None):
        """
        Initialise an instance.

        :param base: The base directory to explore under.
        """
        self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
        self.prefix = self.base + os.sep
        self.allfiles = None
        self.files = set()

    #
    # Public API
    #

    def findall(self):
        """Find all files under the base and set ``allfiles`` to the absolute
        pathnames of files found.
        """
        from stat import S_ISREG, S_ISDIR, S_ISLNK

        self.allfiles = allfiles = []
        root = self.base
        stack = [root]
        pop = stack.pop
        push = stack.append

        while stack:
            root = pop()
            names = os.listdir(root)

            for name in names:
                fullname = os.path.join(root, name)

                # Avoid excess stat calls -- just one will do, thank you!
                stat = os.stat(fullname)
                mode = stat.st_mode
                if S_ISREG(mode):
                    allfiles.append(fsdecode(fullname))
                elif S_ISDIR(mode) and not S_ISLNK(mode):
                    push(fullname)

    def add(self, item):
        """
        Add a file to the manifest.

        :param item: The pathname to add. This can be relative to the base.
        """
        if not item.startswith(self.prefix):
            item = os.path.join(self.base, item)
        self.files.add(os.path.normpath(item))

    def add_many(self, items):
        """
        Add a list of files to the manifest.

        :param items: The pathnames to add. These can be relative to the base.
        """
        for item in items:
            self.add(item)

    def sorted(self, wantdirs=False):
        """
        Return sorted files in directory order
        """

        def add_dir(dirs, d):
            dirs.add(d)
            logger.debug('add_dir added %s', d)
            if d != self.base:
                parent, _ = os.path.split(d)
                assert parent not in ('', '/')
                add_dir(dirs, parent)

        result = set(self.files)    # make a copy!
        if wantdirs:
            dirs = set()
            for f in result:
                add_dir(dirs, os.path.dirname(f))
            result |= dirs
        return [os.path.join(*path_tuple) for path_tuple in
                sorted(os.path.split(path) for path in result)]

    def clear(self):
        """Clear all collected files."""
        self.files = set()
        self.allfiles = []

    def process_directive(self, directive):
        """
        Process a directive which either adds some files from ``allfiles`` to
        ``files``, or removes some files from ``files``.

        :param directive: The directive to process. This should be in a format
                     compatible with distutils ``MANIFEST.in`` files:

                     http://docs.python.org/distutils/sourcedist.html#commands
        """
        # Parse the line: split it up, make sure the right number of words
        # is there, and return the relevant words.  'action' is always
        # defined: it's the first word of the line.  Which of the other
        # three are defined depends on the action; it'll be either
        # patterns, (dir and patterns), or (dirpattern).
        action, patterns, thedir, dirpattern = self._parse_directive(directive)

        # OK, now we know that the action is valid and we have the
        # right number of words on the line for that action -- so we
        # can proceed with minimal error-checking.
        if action == 'include':
            for pattern in patterns:
                if not self._include_pattern(pattern, anchor=True):
                    logger.warning('no files found matching %r', pattern)

        elif action == 'exclude':
            for pattern in patterns:
                found = self._exclude_pattern(pattern, anchor=True)
                #if not found:
                #    logger.warning('no previously-included files '
                #                   'found matching %r', pattern)

        elif action == 'global-include':
            for pattern in patterns:
                if not self._include_pattern(pattern, anchor=False):
                    logger.warning('no files found matching %r '
                                   'anywhere in distribution', pattern)

        elif action == 'global-exclude':
            for pattern in patterns:
                found = self._exclude_pattern(pattern, anchor=False)
                #if not found:
                #    logger.warning('no previously-included files '
                #                   'matching %r found anywhere in '
                #                   'distribution', pattern)

        elif action == 'recursive-include':
            for pattern in patterns:
                if not self._include_pattern(pattern, prefix=thedir):
                    logger.warning('no files found matching %r '
                                   'under directory %r', pattern, thedir)

        elif action == 'recursive-exclude':
            for pattern in patterns:
                found = self._exclude_pattern(pattern, prefix=thedir)
                #if not found:
                #    logger.warning('no previously-included files '
                #                   'matching %r found under directory %r',
                #                   pattern, thedir)

        elif action == 'graft':
            if not self._include_pattern(None, prefix=dirpattern):
                logger.warning('no directories found matching %r',
                               dirpattern)

        elif action == 'prune':
            if not self._exclude_pattern(None, prefix=dirpattern):
                logger.warning('no previously-included directories found '
                               'matching %r', dirpattern)
        else:   # pragma: no cover
            # This should never happen, as it should be caught in
            # _parse_template_line
            raise DistlibException(
                'invalid action %r' % action)

    #
    # Private API
    #

    def _parse_directive(self, directive):
        """
        Validate a directive.
        :param directive: The directive to validate.
        :return: A tuple of action, patterns, thedir, dir_patterns
        """
        words = directive.split()
        if len(words) == 1 and words[0] not in ('include', 'exclude',
                                                'global-include',
                                                'global-exclude',
                                                'recursive-include',
                                                'recursive-exclude',
                                                'graft', 'prune'):
            # no action given, let's use the default 'include'
            words.insert(0, 'include')

        action = words[0]
        patterns = thedir = dir_pattern = None

        if action in ('include', 'exclude',
                      'global-include', 'global-exclude'):
            if len(words) < 2:
                raise DistlibException(
                    '%r expects <pattern1> <pattern2> ...' % action)

            patterns = [convert_path(word) for word in words[1:]]

        elif action in ('recursive-include', 'recursive-exclude'):
            if len(words) < 3:
                raise DistlibException(
                    '%r expects <dir> <pattern1> <pattern2> ...' % action)

            thedir = convert_path(words[1])
            patterns = [convert_path(word) for word in words[2:]]

        elif action in ('graft', 'prune'):
            if len(words) != 2:
                raise DistlibException(
                    '%r expects a single <dir_pattern>' % action)

            dir_pattern = convert_path(words[1])

        else:
            raise DistlibException('unknown action %r' % action)

        return action, patterns, thedir, dir_pattern

    def _include_pattern(self, pattern, anchor=True, prefix=None,
                         is_regex=False):
        """Select strings (presumably filenames) from 'self.files' that
        match 'pattern', a Unix-style wildcard (glob) pattern.

        Patterns are not quite the same as implemented by the 'fnmatch'
        module: '*' and '?'  match non-special characters, where "special"
        is platform-dependent: slash on Unix; colon, slash, and backslash on
        DOS/Windows; and colon on Mac OS.

        If 'anchor' is true (the default), then the pattern match is more
        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
        'anchor' is false, both of these will match.

        If 'prefix' is supplied, then only filenames starting with 'prefix'
        (itself a pattern) and ending with 'pattern', with anything in between
        them, will match.  'anchor' is ignored in this case.

        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
        'pattern' is assumed to be either a string containing a regex or a
        regex object -- no translation is done, the regex is just compiled
        and used as-is.

        Selected strings will be added to self.files.

        Return True if files are found.
        """
        # XXX docstring lying about what the special chars are?
        found = False
        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)

        # delayed loading of allfiles list
        if self.allfiles is None:
            self.findall()

        for name in self.allfiles:
            if pattern_re.search(name):
                self.files.add(name)
                found = True
        return found

    def _exclude_pattern(self, pattern, anchor=True, prefix=None,
                         is_regex=False):
        """Remove strings (presumably filenames) from 'files' that match
        'pattern'.

        Other parameters are the same as for 'include_pattern()', above.
        The list 'self.files' is modified in place. Return True if files are
        found.

        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
        packaging source distributions
        """
        found = False
        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
        for f in list(self.files):
            if pattern_re.search(f):
                self.files.remove(f)
                found = True
        return found

    def _translate_pattern(self, pattern, anchor=True, prefix=None,
                           is_regex=False):
        """Translate a shell-like wildcard pattern to a compiled regular
        expression.

        Return the compiled regex.  If 'is_regex' true,
        then 'pattern' is directly compiled to a regex (if it's a string)
        or just returned as-is (assumes it's a regex object).
        """
        if is_regex:
            if isinstance(pattern, str):
                return re.compile(pattern)
            else:
                return pattern

        if _PYTHON_VERSION > (3, 2):
            # ditch start and end characters
            start, _, end = self._glob_to_re('_').partition('_')

        if pattern:
            pattern_re = self._glob_to_re(pattern)
            if _PYTHON_VERSION > (3, 2):
                assert pattern_re.startswith(start) and pattern_re.endswith(end)
        else:
            pattern_re = ''

        base = re.escape(os.path.join(self.base, ''))
        if prefix is not None:
            # ditch end of pattern character
            if _PYTHON_VERSION <= (3, 2):
                empty_pattern = self._glob_to_re('')
                prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
            else:
                prefix_re = self._glob_to_re(prefix)
                assert prefix_re.startswith(start) and prefix_re.endswith(end)
                prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
            sep = os.sep
            if os.sep == '\\':
                sep = r'\\'
            if _PYTHON_VERSION <= (3, 2):
                pattern_re = '^' + base + sep.join((prefix_re,
                                                    '.*' + pattern_re))
            else:
                pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
                pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
                                                  pattern_re, end)
        else:  # no prefix -- respect anchor flag
            if anchor:
                if _PYTHON_VERSION <= (3, 2):
                    pattern_re = '^' + base + pattern_re
                else:
                    pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])

        return re.compile(pattern_re)

    def _glob_to_re(self, pattern):
        """Translate a shell-like glob pattern to a regular expression.

        Return a string containing the regex.  Differs from
        'fnmatch.translate()' in that '*' does not match "special characters"
        (which are platform-specific).
        """
        pattern_re = fnmatch.translate(pattern)

        # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
        # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
        # and by extension they shouldn't match such "special characters" under
        # any OS.  So change all non-escaped dots in the RE to match any
        # character except the special characters (currently: just os.sep).
        sep = os.sep
        if os.sep == '\\':
            # we're using a regex to manipulate a regex, so we need
            # to escape the backslash twice
            sep = r'\\\\'
        escaped = r'\1[^%s]' % sep
        pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
        return pattern_re
_vendor/distlib/__pycache__/index.cpython-36.opt-1.pyc000064400000041620151733136270016551 0ustar003

�Pf]R�@s�ddlZddlZddlZddlZddlZddlZyddlmZWn ek
r`ddl	mZYnXddl
mZddlm
Z
mZmZmZmZmZddlmZmZmZeje�ZdZdZGd	d
�d
e�ZdS)�N)�Thread�)�DistlibException)�HTTPBasicAuthHandler�Request�HTTPPasswordMgr�urlparse�build_opener�string_types)�cached_property�zip_dir�ServerProxyzhttps://pypi.python.org/pypi�pypic@s�eZdZdZdZd*dd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zd+dd�Zd,dd�Z
d-dd�Zd.dd�Zdd�Zd/dd�Zd0d d!�Zd1d"d#�Zd$d%�Zd&d'�Zd2d(d)�ZdS)3�PackageIndexzc
    This class represents a package index compatible with PyPI, the Python
    Package Index.
    s.----------ThIs_Is_tHe_distlib_index_bouNdaRY_$NcCs�|pt|_|j�t|j�\}}}}}}|s<|s<|s<|dkrJtd|j��d|_d|_d|_d|_d|_	t
tjd��R}xJdD]B}	y(t
j|	dg||d	�}
|
d
kr�|	|_PWq|tk
r�Yq|Xq|WWdQRXdS)
z�
        Initialise an instance.

        :param url: The URL of the index. If not specified, the URL for PyPI is
                    used.
        �http�httpszinvalid repository: %sN�w�gpg�gpg2z	--version)�stdout�stderrr)rr)rr)�
DEFAULT_INDEX�url�read_configurationrr�password_handler�ssl_verifierr�gpg_home�	rpc_proxy�open�os�devnull�
subprocessZ
check_call�OSError)�selfr�scheme�netloc�pathZparamsZqueryZfragZsink�s�rc�r)�/usr/lib/python3.6/index.py�__init__$s(

zPackageIndex.__init__cCs&ddlm}ddlm}|�}||�S)zs
        Get the distutils command for interacting with PyPI configurations.
        :return: the command.
        r)�Distribution)�
PyPIRCCommand)Zdistutils.corer,Zdistutils.configr-)r#r,r-�dr)r)r*�_get_pypirc_commandBsz PackageIndex._get_pypirc_commandcCsR|j�}|j|_|j�}|jd�|_|jd�|_|jdd�|_|jd|j�|_dS)z�
        Read the PyPI access configuration as supported by distutils, getting
        PyPI to do the actual work. This populates ``username``, ``password``,
        ``realm`` and ``url`` attributes from the configuration.
        �username�password�realmr�
repositoryN)r/rr3Z_read_pypirc�getr0r1r2)r#�cZcfgr)r)r*rLszPackageIndex.read_configurationcCs$|j�|j�}|j|j|j�dS)z�
        Save the PyPI access configuration. You must have set ``username`` and
        ``password`` attributes before calling this method.

        Again, distutils is used to do the actual work.
        N)�check_credentialsr/Z
_store_pypircr0r1)r#r5r)r)r*�save_configuration[szPackageIndex.save_configurationcCs\|jdks|jdkrtd��t�}t|j�\}}}}}}|j|j||j|j�t|�|_	dS)zp
        Check that ``username`` and ``password`` have been set, and raise an
        exception if not.
        Nz!username and password must be set)
r0r1rrrrZadd_passwordr2rr)r#Zpm�_r%r)r)r*r6gszPackageIndex.check_credentialscCs\|j�|j�|j�}d|d<|j|j�g�}|j|�}d|d<|j|j�g�}|j|�S)aq
        Register a distribution on PyPI, using the provided metadata.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the distribution to be
                         registered.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        Zverifyz:actionZsubmit)r6�validate�todict�encode_request�items�send_request)r#�metadatar.�requestZresponser)r)r*�registerss

zPackageIndex.registercCsJx<|j�}|sP|jd�j�}|j|�tjd||f�qW|j�dS)ar
        Thread runner for reading lines of from a subprocess into a buffer.

        :param name: The logical name of the stream (used for logging only).
        :param stream: The stream to read from. This will typically a pipe
                       connected to the output stream of a subprocess.
        :param outbuf: The list to append the read lines to.
        zutf-8z%s: %sN)�readline�decode�rstrip�append�logger�debug�close)r#�name�streamZoutbufr'r)r)r*�_reader�s	
zPackageIndex._readercCs�|jdddg}|dkr|j}|r.|jd|g�|dk	rF|jdddg�tj�}tjj|tjj|�d	�}|jd
dd|d
||g�t	j
ddj|��||fS)a�
        Return a suitable command for signing a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The signing command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        z--status-fd�2z--no-ttyNz	--homedirz--batchz--passphrase-fd�0z.ascz
--detach-signz--armorz--local-userz--outputzinvoking: %s� )rr�extend�tempfileZmkdtemprr&�join�basenamerErF)r#�filename�signer�
sign_password�keystore�cmdZtdZsfr)r)r*�get_sign_command�s
zPackageIndex.get_sign_commandc	Cs�tjtjd�}|dk	r tj|d<g}g}tj|f|�}t|jd|j|fd�}|j�t|jd|j|fd�}|j�|dk	r�|jj	|�|jj
�|j�|j�|j�|j
||fS)a�
        Run a command in a child process , passing it any input data specified.

        :param cmd: The command to run.
        :param input_data: If specified, this must be a byte string containing
                           data to be sent to the child process.
        :return: A tuple consisting of the subprocess' exit code, a list of
                 lines read from the subprocess' ``stdout``, and a list of
                 lines read from the subprocess' ``stderr``.
        )rrN�stdinr)�target�argsr)r!�PIPE�PopenrrJr�startrrX�writerG�waitrP�
returncode)	r#rVZ
input_data�kwargsrr�pZt1Zt2r)r)r*�run_command�s$


zPackageIndex.run_commandc
CsD|j||||�\}}|j||jd��\}}}	|dkr@td|��|S)aR
        Sign a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The absolute pathname of the file where the signature is
                 stored.
        zutf-8rz&sign command failed with error code %s)rWrc�encoder)
r#rRrSrTrUrV�sig_filer(rrr)r)r*�	sign_file�s

zPackageIndex.sign_file�sdist�sourcecCs(|j�tjj|�s td|��|j�|j�}d}	|rZ|jsJtj	d�n|j
||||�}	t|d��}
|
j�}WdQRXt
j|�j�}t
j|�j�}
|jdd||||
d��dtjj|�|fg}|	�rt|	d��}
|
j�}WdQRX|jd	tjj|	�|f�tjtjj|	��|j|j�|�}|j|�S)
a�
        Upload a release file to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the file to be uploaded.
        :param filename: The pathname of the file to be uploaded.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param filetype: The type of the file being uploaded. This is the
                        distutils command which produced that file, e.g.
                        ``sdist`` or ``bdist_wheel``.
        :param pyversion: The version of Python which the release relates
                          to. For code compatible with any Python, this would
                          be ``source``, otherwise it would be e.g. ``3.2``.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        z
not found: %sNz)no signing program available - not signed�rbZfile_upload�1)z:actionZprotocol_version�filetype�	pyversion�
md5_digest�
sha256_digest�contentZ
gpg_signature)r6rr&�existsrr9r:rrEZwarningrfr�read�hashlib�md5�	hexdigestZsha256�updaterQrD�shutilZrmtree�dirnamer;r<r=)r#r>rRrSrTrkrlrUr.re�fZ	file_datarmrn�filesZsig_datar?r)r)r*�upload_file�s>

zPackageIndex.upload_filec
Cs�|j�tjj|�s td|��tjj|d�}tjj|�sFtd|��|j�|j|j	}}t
|�j�}d	d|fd|fg}d||fg}|j||�}	|j
|	�S)
a2
        Upload documentation to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the documentation to be
                         uploaded.
        :param doc_dir: The pathname of the directory which contains the
                        documentation. This should be the directory that
                        contains the ``index.html`` for the documentation.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        znot a directory: %rz
index.htmlz
not found: %r�:action�
doc_uploadrH�versionro)r{r|)r6rr&�isdirrrPrpr9rHr}r�getvaluer;r=)
r#r>Zdoc_dir�fnrHr}Zzip_data�fieldsryr?r)r)r*�upload_documentation)s
z!PackageIndex.upload_documentationcCsT|jdddg}|dkr|j}|r.|jd|g�|jd||g�tjddj|��|S)	a|
        Return a suitable command for verifying a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The verifying command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        z--status-fdrKz--no-ttyNz	--homedirz--verifyzinvoking: %srM)rrrNrErFrP)r#�signature_filename�
data_filenamerUrVr)r)r*�get_verify_commandEszPackageIndex.get_verify_commandcCsH|jstd��|j|||�}|j|�\}}}|dkr@td|��|dkS)a6
        Verify a signature for a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: True if the signature was verified, else False.
        z0verification unavailable because gpg unavailablerrz(verify command failed with error code %s)rr)rrr�rc)r#r�r�rUrVr(rrr)r)r*�verify_signature]szPackageIndex.verify_signaturecCsp|dkrd}tjd�n6t|ttf�r0|\}}nd}tt|��}tjd|�t|d���}|jt	|��}z�|j
�}	d}
d}d}d}
d	|	kr�t|	d
�}|r�||
|
|�xP|j|
�}|s�P|t
|�7}|j|�|r�|j|�|
d7}
|r�||
|
|�q�WWd|j�XWdQRX|dk�r4||k�r4td||f��|�rl|j�}||k�r`td||||f��tjd
|�dS)a
        This is a convenience method for downloading a file from an URL.
        Normally, this will be a file from the index, though currently
        no check is made for this (i.e. a file can be downloaded from
        anywhere).

        The method is just like the :func:`urlretrieve` function in the
        standard library, except that it allows digest computation to be
        done during download and checking that the downloaded data
        matched any expected value.

        :param url: The URL of the file to be downloaded (assumed to be
                    available via an HTTP GET request).
        :param destfile: The pathname where the downloaded file is to be
                         saved.
        :param digest: If specified, this must be a (hasher, value)
                       tuple, where hasher is the algorithm used (e.g.
                       ``'md5'``) and ``value`` is the expected value.
        :param reporthook: The same as for :func:`urlretrieve` in the
                           standard library.
        NzNo digest specifiedrszDigest specified: %s�wbi rrzcontent-lengthzContent-Lengthz1retrieval incomplete: got only %d out of %d bytesz.%s digest mismatch for %s: expected %s, got %szDigest verified: %s���)rErF�
isinstance�list�tuple�getattrrrrr=r�info�intrq�lenr^rurGrrt)r#r�destfileZdigestZ
reporthookZdigesterZhasherZdfpZsfp�headersZ	blocksize�sizerqZblocknum�block�actualr)r)r*�
download_filevsV




zPackageIndex.download_filecCs:g}|jr|j|j�|jr(|j|j�t|�}|j|�S)z�
        Send a standard library :class:`Request` to PyPI and return its
        response.

        :param req: The request to send.
        :return: The HTTP response from PyPI (a standard library HTTPResponse).
        )rrDrr	r)r#ZreqZhandlers�openerr)r)r*r=�szPackageIndex.send_requestcCs�g}|j}xX|D]P\}}t|ttf�s,|g}x2|D]*}|jd|d|jd�d|jd�f�q2WqWx6|D].\}}	}
|jd|d||	fjd�d|
f�qjW|jd|ddf�dj|�}d|}|tt|��d�}
t	|j
||
�S)	a&
        Encode fields and files for posting to an HTTP server.

        :param fields: The fields to send as a list of (fieldname, value)
                       tuples.
        :param files: The files to send as a list of (fieldname, filename,
                      file_bytes) tuple.
        s--z)Content-Disposition: form-data; name="%s"zutf-8�z8Content-Disposition: form-data; name="%s"; filename="%s"s
smultipart/form-data; boundary=)zContent-typezContent-length)�boundaryr�r�r�rNrdrP�strr�rr)r#r�ry�partsr��k�values�v�keyrR�valueZbodyZctr�r)r)r*r;�s2


zPackageIndex.encode_requestcCs>t|t�rd|i}|jdkr,t|jdd�|_|jj||p:d�S)NrHg@)Ztimeout�and)r�r
rr
r�search)r#Zterms�operatorr)r)r*r��s


zPackageIndex.search)N)N)N)N)NNrgrhN)N)N)NN)N)�__name__�
__module__�__qualname__�__doc__r�r+r/rr7r6r@rJrWrcrfrzr�r�r�r�r=r;r�r)r)r)r*rs*



#

8


M+r)rrZloggingrrvr!rOZ	threadingr�ImportErrorZdummy_threading�r�compatrrrrr	r
�utilrrr
Z	getLoggerr�rErZ
DEFAULT_REALM�objectrr)r)r)r*�<module>s  
_vendor/distlib/__pycache__/wheel.cpython-36.pyc000064400000060601151733136270015607 0ustar003

�Pf˘�@s�ddlmZddlZddlZddlZddlZddlmZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlZddlmZmZddlmZmZmZmZmZddlmZddlm Z m!Z!dd	l"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+dd
l,m-Z-m.Z.ej/e0�Z1da2e3ed��r4dZ4n*ej5j6d
��rHdZ4nej5dk�rZdZ4ndZ4ej7d�Z8e8�s�dej9dd�Z8de8Z:e4e8Z;ej"j<�j=dd�j=dd�Z>ej7d�Z?e?�r�e?j6d��r�e?j=dd�Z?ndd�Z@e@�Z?[@ejAdejBejCB�ZDejAdejBejCB�ZEejAd�ZFejAd �ZGd!ZHd"ZIe
jJd#k�r>d$d%�ZKnd&d%�ZKGd'd(�d(eL�ZMeM�ZNGd)d*�d*eL�ZOd+d,�ZPeP�ZQ[Pd/d-d.�ZRdS)0�)�unicode_literalsN)�message_from_file�)�__version__�DistlibException)�	sysconfig�ZipFile�fsdecode�	text_type�filter)�InstalledDistribution)�Metadata�METADATA_FILENAME)	�FileOperator�convert_path�	CSVReader�	CSVWriter�Cache�cached_property�get_cache_base�read_exports�tempdir)�NormalizedVersion�UnsupportedVersionErrorZpypy_version_infoZpp�javaZjyZcliZip�cp�py_version_nodotz%s%s��py�-�_�.�SOABIzcpython-cCsRdtg}tjd�r|jd�tjd�r0|jd�tjd�dkrH|jd�d	j|�S)
Nr�Py_DEBUG�d�
WITH_PYMALLOC�mZPy_UNICODE_SIZE��u�)�
VER_SUFFIXr�get_config_var�append�join)�parts�r/�/usr/lib/python3.6/wheel.py�_derive_abi;s




r1zz
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?
-(?P<py>\w+\d+(\.\w+\d+)*)
-(?P<bi>\w+)
-(?P<ar>\w+(\.\w+)*)
\.whl$
z7
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?$
s
\s*#![^\r\n]*s^(\s*#!("[^"]+"|\S+))\s+(.*)$s#!pythons	#!pythonw�/cCs|S)Nr/)�or/r/r0�<lambda>]sr4cCs|jtjd�S)Nr2)�replace�os�sep)r3r/r/r0r4_sc@s6eZdZdd�Zdd�Zdd�Zddd	�Zd
d�ZdS)
�MountercCsi|_i|_dS)N)�
impure_wheels�libs)�selfr/r/r0�__init__cszMounter.__init__cCs||j|<|jj|�dS)N)r9r:�update)r;�pathname�
extensionsr/r/r0�addgs
zMounter.addcCs4|jj|�}x"|D]\}}||jkr|j|=qWdS)N)r9�popr:)r;r>r?�k�vr/r/r0�removeks
zMounter.removeNcCs||jkr|}nd}|S)N)r:)r;�fullname�path�resultr/r/r0�find_moduleqs
zMounter.find_modulecCsj|tjkrtj|}nP||jkr,td|��tj||j|�}||_|jdd�}t|�dkrf|d|_	|S)Nzunable to find extension for %sr!rr)
�sys�modulesr:�ImportError�impZload_dynamic�
__loader__�rsplit�len�__package__)r;rErGr.r/r/r0�load_modulexs


zMounter.load_module)N)�__name__�
__module__�__qualname__r<r@rDrHrQr/r/r/r0r8bs

r8c@s�eZdZdZd2ZdZd3dd�Zedd	��Zed
d��Z	edd
��Z
edd��Zdd�Z
edd��Zdd�Zd4dd�Zdd�Zdd�Zdd�Zd5dd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd6d*d+�Zd,d-�Zd.d/�Zd7d0d1�ZdS)8�Wheelz@
    Class to build and install from Wheel files (PEP 427).
    rZsha256NFcCs8||_||_d|_tg|_dg|_dg|_tj�|_	|dkrRd|_
d|_|j|_
n�tj|�}|r�|jd�}|d|_
|djd	d
�|_|d|_|j|_
n�tjj|�\}}tj|�}|s�td|��|r�tjj|�|_	||_
|jd�}|d|_
|d|_|d|_|d
jd�|_|djd�|_|djd�|_dS)zB
        Initialise an instance using a (valid) filename.
        r)�none�anyNZdummyz0.1ZnmZvnr rZbnzInvalid name or filename: %rrr!Zbi�ar)�signZ
should_verify�buildver�PYVER�pyver�abi�archr6�getcwd�dirname�name�version�filenameZ	_filename�NAME_VERSION_RE�match�	groupdictr5rF�split�FILENAME_REr�abspath)r;rcrY�verifyr&�infor`r/r/r0r<�sB











zWheel.__init__cCs^|jrd|j}nd}dj|j�}dj|j�}dj|j�}|jjdd�}d|j|||||fS)zJ
        Build and return a filename from the various components.
        rr)r!r z%s-%s%s-%s-%s-%s.whl)rZr-r\r]r^rbr5ra)r;rZr\r]r^rbr/r/r0rc�s
zWheel.filenamecCstjj|j|j�}tjj|�S)N)r6rFr-r`rc�isfile)r;rFr/r/r0�exists�szWheel.existsccs@x:|jD]0}x*|jD] }x|jD]}|||fVq WqWqWdS)N)r\r]r^)r;r\r]r^r/r/r0�tags�sz
Wheel.tagscCs�tjj|j|j�}d|j|jf}d|}tjd�}t	|d���}|j
|�}|djdd�}tdd	�|D��}|d
krzd
}	nt
}	y8tj||	�}
|j|
��}||�}t|d�}
WdQRXWn tk
r�td|	��YnXWdQRX|
S)Nz%s-%sz%s.dist-infozutf-8�rz
Wheel-Versionr!rcSsg|]}t|��qSr/)�int)�.0�ir/r/r0�
<listcomp>�sz"Wheel.metadata.<locals>.<listcomp>ZMETADATA)Zfileobjz$Invalid wheel, because %s is missing)rr)r6rFr-r`rcrarb�codecs�	getreaderr�get_wheel_metadatarg�tupler�	posixpath�openr
�KeyError�
ValueError)r;r>�name_ver�info_dir�wrapper�zf�wheel_metadata�wv�file_version�fn�metadata_filename�bf�wfrGr/r/r0�metadata�s(

zWheel.metadatac	CsXd|j|jf}d|}tj|d�}|j|��}tjd�|�}t|�}WdQRXt|�S)Nz%s-%sz%s.dist-info�WHEELzutf-8)	rarbrxr-ryrtrur�dict)r;rr|r}r�r�r��messager/r/r0rv�szWheel.get_wheel_metadatac	Cs6tjj|j|j�}t|d��}|j|�}WdQRX|S)Nro)r6rFr-r`rcrrv)r;r>rrGr/r/r0rk�sz
Wheel.infocCs�tj|�}|r||j�}|d|�||d�}}d|j�krBt}nt}tj|�}|rfd|j�d
}nd}||}||}nT|jd�}|jd�}	|dks�||	kr�d}
n|||d�d	kr�d	}
nd}
t|
|}|S)Nspythonw� r��
�
rrs
���)	�
SHEBANG_REre�end�lower�SHEBANG_PYTHONW�SHEBANG_PYTHON�SHEBANG_DETAIL_RE�groups�find)r;�datar&r�ZshebangZdata_after_shebangZshebang_python�argsZcrZlfZtermr/r/r0�process_shebang�s,




zWheel.process_shebangcCsh|dkr|j}ytt|�}Wn tk
r<td|��YnX||�j�}tj|�jd�j	d�}||fS)NzUnsupported hash algorithm: %r�=�ascii)
�	hash_kind�getattr�hashlib�AttributeErrorr�digest�base64Zurlsafe_b64encode�rstrip�decode)r;r�r��hasherrGr/r/r0�get_hashszWheel.get_hashc
Csbt|�}ttjj||��}|j|ddf�|j�t|��}x|D]}|j|�qBWWdQRXdS)Nr))	�list�to_posixr6rF�relpathr,�sortrZwriterow)r;�recordsZrecord_path�base�p�writer�rowr/r/r0�write_record's

zWheel.write_recordcCs�g}|\}}tt|j�}xX|D]P\}}	t|	d��}
|
j�}WdQRXd|j|�}tjj|	�}
|j	|||
f�qWtjj
|d�}	|j||	|�ttjj
|d��}|j	||	f�dS)N�rbz%s=%s�RECORD)
r�r�r�ry�readr�r6rF�getsizer,r-r�r�)r;rk�libdir�
archive_pathsr��distinfor}r��apr��fr�r��sizer/r/r0�
write_records0szWheel.write_recordscCsJt|dtj��2}x*|D]"\}}tjd||�|j||�qWWdQRXdS)N�wzWrote %s to %s in wheel)r�zipfileZZIP_DEFLATED�logger�debug�write)r;r>r�rr�r�r/r/r0�	build_zip@szWheel.build_zipc!s�|dkri}tt�fdd�d%��d}|dkrFd}tg}tg}tg}nd}tg}d	g}d
g}|jd|�|_|jd|�|_|jd
|�|_	�|}	d|j
|jf}
d|
}d|
}g}
x�d&D]�}|�kr�q��|}tj
j|�r�x�tj|�D]�\}}}x�|D]�}ttj
j||��}tj
j||�}ttj
j|||��}|
j||f�|dk�r�|jd��r�t|d��}|j�}WdQRX|j|�}t|d��}|j|�WdQRX�q�Wq�Wq�W|	}d}x�tj|�D]�\}}}||k�r"x@t|�D]4\}}t|�}|jd��r�tj
j||�}||=P�q�W|�s"td��xP|D]H}t|�jd'��r@�q(tj
j||�}ttj
j||��}|
j||f��q(W�q�Wtj|�}xJ|D]B}|d(k�r�ttj
j||��}ttj
j||��}|
j||f��q�Wd|�p�|jd td!|g}x*|jD] \}}}|jd"|||f��q�Wtj
j|d�}t|d#��}|jd$j|��WdQRXttj
j|d��}|
j||f�|j ||f|	|
�tj
j|j!|j"�} |j#| |
�| S))z�
        Build a wheel from files in specified paths, and use any specified tags
        when determining the name of the wheel.
        Ncs|�kS)Nr/)r3)�pathsr/r0r4NszWheel.build.<locals>.<lambda>�purelib�platlibrZfalse�truerVrWr\r]r^z%s-%sz%s.dataz%s.dist-infor��headers�scriptsz.exer��wbz
.dist-infoz(.dist-info directory expected, not found�.pyc�.pyor��	INSTALLER�SHAREDr�zWheel-Version: %d.%dzGenerator: distlib %szRoot-Is-Purelib: %sz
Tag: %s-%s-%sr��
)r�r�)r�r�r�)r�r�)r�r�r�r�)$r�r�IMPVER�ABI�ARCHr[�getr\r]r^rarbr6rF�isdir�walkr	r-r�r�r,�endswithryr�r�r��	enumerate�AssertionError�listdir�
wheel_versionrrnr�r`rcr�)!r;r�rnr�ZlibkeyZis_pureZ
default_pyverZdefault_abiZdefault_archr�r|�data_dirr}r��keyrF�root�dirs�filesr�r��rpr�r�r�r�rr�dnr�r\r]r^r>r/)r�r0�buildFs�


"





zWheel.buildcBIKs`|j}|jd�}|jdd�}tjj|j|j�}d|j|jf}d|}	d|}
t	j|
t
�}t	j|
d�}t	j|
d�}
tjd	�}t
|d
����}|j|��}||�}t|�}WdQRX|djd
d�}tdd�|D��}||jkr�|r�||j|�|ddk�r|d}n|d}i}|j|
��<}t|d��&}x|D]}|d}|||<�q.WWdQRXWdQRXt	j|	d�}t	j|
d�}t	j|	dd�}t|d�}d|_tj}g} tj�}!|!|_d|_�z��y^�x�|j�D�]�}"|"j}#t|#t��r�|#}$n
|#jd	�}$|$j d��r��q�||$}|d�r0t!|"j"�|dk�r0t#d|$��|d�r�|djdd�\}%}&|j|#��}|j$�}'WdQRX|j%|'|%�\}(})|)|&k�r�t#d|#��|�r�|$j&||f��r�t'j(d |$��q�|$j&|��o�|$j d!�}*|$j&|��r|$jd"d�\}(}+},tjj||+t)|,��}-n$|$||
fk�r�q�tjj|t)|$��}-|*�s|j|#��}|j*||-�WdQRX| j+|-�|�r�|d�r�t|-d#��4}|j$�}'|j%|'|%�\}(}.|.|)k�r�t#d$|-��WdQRX|�rx|-j d%��rxy|j,|-�}/| j+|/�Wn$t-k
�rt'j.d&dd'�YnXnttjj/t)|#��}0tjj|!|0�}1|j|#��}|j*||1�WdQRXtjj|-�\}2}0|2|_|j0|0�}3|j1|3�| j2|3��q�W|�r�t'j(d(�d}4�n~d}5|j3d}|d)k�r~t	j|
d*�}6y�|j|6��}t4|�}7WdQRXi}5xxd<D]p}8d-|8}9|9|7k�r�i|5d.|8<}:xF|7|9j5�D]6};d/|;j6|;j7f}<|;j8�rB|<d0|;j87}<|<|:|;j<�qW�q�WWn t-k
�rzt'j.d1�YnXndyB|j|��.}||�}t9j:|�jd2�}5|5�r�|5jd3�}5WdQRXWn t-k
�r�t'j.d4�YnX|5�r�|5jd5i�}=|5jd6i�}>|=�s|>�r�|jdd�}?tjj;|?��s.t<d7��|?|_x6|=j=�D]*\}9};d8|9|;f}@|j0|@�}3|j1|3��q>W|>�r�d,di}Ax8|>j=�D],\}9};d8|9|;f}@|j0|@|A�}3|j1|3��q�Wtjj||
�}t>|�}4t?|�}|d=|d=||d9<|4j@||�}|�r| j+|�|4jA| |d:|�|4St-k
�r@t'jBd;�|jC��YnXWdtDjE|!�XWdQRXdS)=a�
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written.

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        �warner�lib_onlyFz%s-%sz%s.dataz%s.dist-infor�r�zutf-8roNz
Wheel-Versionr!rcSsg|]}t|��qSr/)rp)rqrrr/r/r0rs�sz!Wheel.install.<locals>.<listcomp>zRoot-Is-Purelibr�r�r�)�streamrr)r�)�dry_runTz/RECORD.jwsrzsize mismatch for %s�=zdigest mismatch for %szlib_only: skipping %sz.exer2r�zdigest mismatch on write for %sz.pyzByte-compilation failed)�exc_infozlib_only: returning Nonez1.0zentry_points.txt�console�guiz
%s_scriptszwrap_%sz%s:%sz %szAUnable to read legacy script metadata, so cannot generate scriptsr?zpython.commandsz8Unable to read JSON metadata, so cannot generate scriptsZwrap_consoleZwrap_guizValid script path not specifiedz%s = %s�lib�prefixzinstallation failed.)r�r�)Fr�r�r6rFr-r`rcrarbrxrrtrurryrrgrwr�rr�recordrI�dont_write_bytecode�tempfileZmkdtempZ
source_dirZ
target_dir�infolist�
isinstancer
r�r��str�	file_sizerr�r��
startswithr�r�rZcopy_streamr,Zbyte_compile�	ExceptionZwarning�basenameZmakeZset_executable_mode�extendrkr�valuesr��suffix�flags�json�loadr�r{�itemsrr�Zwrite_shared_locationsZwrite_installed_filesZ	exceptionZrollback�shutilZrmtree)Br;r�Zmaker�kwargsr�r�r�r>r|r�r}�
metadata_name�wheel_metadata_name�record_namer~r�bwfr�r�r�r�r�r�r��readerr�r�Zdata_pfxZinfo_pfxZ
script_pfxZfileopZbcZoutfiles�workdir�zinfo�arcname�	u_arcname�kind�valuer�r r�Z	is_script�wherer�ZoutfileZ	newdigestZpycr�Zworknamer��	filenamesZdistZcommandsZepZepdatar�rBr$rC�sZconsole_scriptsZgui_scriptsZ
script_dirZscriptZoptionsr/r/r0�install�sB



"
















z
Wheel.installcCs4tdkr0tjjt�td�tjdd��}t|�atS)Nzdylib-cache�)	�cacher6rFr-rr�rIrbr)r;r�r/r/r0�_get_dylib_cache�s
zWheel._get_dylib_cachecCsltjj|j|j�}d|j|jf}d|}tj|d�}tj	d�}g}t
|d���}y�|j|���}||�}	tj
|	�}
|j�}|j|�}tjj|j|�}
tjj|
�s�tj|
�x�|
j�D]�\}}tjj|
t|��}tjj|�s�d}n6tj|�j}tjj|�}|j|�}tj|j�}||k}|�r(|j||
�|j||f�q�WWdQRXWntk
�r\YnXWdQRX|S)Nz%s-%sz%s.dist-infoZ
EXTENSIONSzutf-8roT)r6rFr-r`rcrarbrxrtrurryr�r�rZ
prefix_to_dirr�r��makedirsr�rrm�stat�st_mtime�datetimeZ
fromtimestampZgetinfoZ	date_time�extractr,rz)r;r>r|r}rr~rGrr�r�r?rr�Z
cache_baserar��destrZ	file_timerkZ
wheel_timer/r/r0�_get_extensions�s>




 zWheel._get_extensionscCst|�S)zM
        Determine if a wheel is compatible with the running system.
        )�
is_compatible)r;r/r/r0r�szWheel.is_compatiblecCsdS)zP
        Determine if a wheel is asserted as mountable by its metadata.
        Tr/)r;r/r/r0�is_mountable�szWheel.is_mountablecCs�tjjtjj|j|j��}|j�s2d|}t|��|j�sJd|}t|��|t	jkrbt
jd|�nN|rtt	jj|�nt	jj
d|�|j�}|r�tt	jkr�t	jjt�tj||�dS)Nz)Wheel %s not compatible with this Python.z$Wheel %s is marked as not mountable.z%s already in pathr)r6rFrir-r`rcrrrrIr�r�r,�insertr�_hook�	meta_pathr@)r;r,r>�msgr?r/r/r0�mount�s"

zWheel.mountcCsrtjjtjj|j|j��}|tjkr2tjd|�n<tjj	|�|t
jkrRt
j	|�t
jsnt
tjkrntjj	t
�dS)Nz%s not in path)
r6rFrir-r`rcrIr�r�rDrr9r)r;r>r/r/r0�unmount�s



z
Wheel.unmountc'Cstjj|j|j�}d|j|jf}d|}d|}tj|t�}tj|d�}tj|d�}t	j
d�}t|d����}	|	j|��}
||
�}t
|�}WdQRX|djd	d
�}
tdd�|
D��}i}|	j|��:}t|d
��$}x|D]}|d}|||<q�WWdQRXWdQRXx�|	j�D]�}|j}t|t��r*|}n
|jd�}d|k�rJtd|��|jd��rZ�q||}|d�r�t|j�|dk�r�td|��|d
�r|d
jdd
�\}}|	j|��}|j�}WdQRX|j||�\}}||k�rtd|���qWWdQRXdS)Nz%s-%sz%s.dataz%s.dist-infor�r�zutf-8roz
Wheel-Versionr!rcSsg|]}t|��qSr/)rp)rqrrr/r/r0rs�sz Wheel.verify.<locals>.<listcomp>)r�rz..zinvalid entry in wheel: %rz/RECORD.jwsrzsize mismatch for %sr�zdigest mismatch for %s)r6rFr-r`rcrarbrxrrtrurryrrgrwrr�r�r
r�rr�r�r�r�r�)r;r>r|r�r}r�r�rr~rrr�r�r�r�r�r�rr�r�rrrrrr�r r�r/r/r0rj�sT

 



zWheel.verifycKs�dd�}dd�}tjj|j|j�}d|j|jf}d|}tj|d�}	t����}
t	|d���}i}xt|j
�D]h}
|
j}t|t�r�|}n
|j
d	�}||	kr�qjd
|kr�td|��|j|
|
�tjj|
t|��}|||<qjWWdQRX|||�\}}||f|�}|�r�|||�\}}|�r(||k�r(|||�|dk�rRtjd
d|
d�\}}tj|�n*tjj|��sltd|��tjj||j�}t|j��}tjj|
|�}||f}|j||
|�|j||�|dk�r�tj||�WdQRX|S)a�
        Update the contents of a wheel in a generic way. The modifier should
        be a callable which expects a dictionary argument: its keys are
        archive-entry paths, and its values are absolute filesystem paths
        where the contents the corresponding archive entries can be found. The
        modifier is free to change the contents of the files pointed to, add
        new entries and remove entries, before returning. This method will
        extract the entire contents of the wheel to a temporary location, call
        the modifier, and then use the passed (and possibly updated)
        dictionary to write a new wheel. If ``dest_dir`` is specified, the new
        wheel is written there -- otherwise, the original wheel is overwritten.

        The modifier should return True if it updated the wheel, else False.
        This method returns the same value the modifier returns.
        cSsHd}}d|tf}||kr$d|}||kr@||}t|d�j}||fS)Nz%s/%sz%s/PKG-INFO)rF)rr
rb)�path_mapr}rbrFr�r/r/r0�get_version1sz!Wheel.update.<locals>.get_versioncSs�d}y|t|�}|jd�}|dkr*d|}nTdd�||dd�jd�D�}|dd7<d|d|�djd	d
�|D��f}Wn tk
r�tjd|�YnX|r�t|d�}||_|j	t
�}|j||d
�tjd||�dS)Nrrz%s+1cSsg|]}t|��qSr/)rp)rqrr/r/r0rsCsz8Wheel.update.<locals>.update_version.<locals>.<listcomp>rr!z%s+%scss|]}t|�VqdS)N)r�)rqrrr/r/r0�	<genexpr>Fsz7Wheel.update.<locals>.update_version.<locals>.<genexpr>z0Cannot update non-compliant (PEP-440) version %r)rF)rF�legacyzVersion updated from %r to %rr�)rr�rgr-rr�r�r
rbr�rr�)rbrF�updatedrCrrr.Zmdr"r/r/r0�update_version;s(

 
z$Wheel.update.<locals>.update_versionz%s-%sz%s.dist-infor�rozutf-8z..zinvalid entry in wheel: %rNz.whlz
wheel-update-)r�r��dirzNot a directory: %r)r6rFr-r`rcrarbrxrrr�r�r
r�rrrr�Zmkstemp�closer�r�r�r�r�r�Zcopyfile)r;ZmodifierZdest_dirr�r r$r>r|r}rrrrrrrrFZoriginal_versionr ZmodifiedZcurrent_version�fd�newpathr�r�rkr/r/r0r= sX






zWheel.update)rr)NFF)N)NN)F)N)rRrSrT�__doc__r�r�r<�propertyrcrmrnrr�rvrkr�r�r�r�r�r�rrrrrrrrjr=r/r/r/r0rU�s4
)	
	
he	"
6rUcCs�tg}td}x6ttjdddd�D]}|jdj|t|�g��q&Wg}x6tj�D]*\}}}|j	d�rT|j|j
dd�d�qTW|j�tdkr�|j
dt�|jd�g}tg}tjdk�r�tjd	t�}|�r�|j�\}	}}}
t|�}|
g}|
dk�r|jd�|
dk�r|jd�|
dk�r*|jd�|
dk�r>|jd�|
dk�rR|jd�xL|dk�r�x2|D]*}d|	|||f}
|
tk�rd|j|
��qdW|d8}�qTWx<|D]4}x,|D]$}
|jdjt|df�||
f��q�W�q�WxXt|�D]L\}}|jdjt|f�ddf�|dk�r�|jdjt|df�ddf��q�WxXt|�D]L\}}|jdjd|f�ddf�|dk�rB|jdjd|df�ddf��qBWt|�S)zG
    Return (pyver, abi, arch) tuples compatible with this Python.
    rrr)z.abir!rrV�darwinz(\w+)_(\d+)_(\d+)_(\w+)$�i386�ppcZfat�x86_64Zfat3�ppc64Zfat64�intelZ	universalz%s_%s_%s_%srWrr�r�)r,r-)r,r-r.)r/r.)r,r.)r,r.r0r-r/)r*�rangerI�version_infor,r-r�rLZget_suffixesr�rgr�r�rr��platform�rerer�rp�
IMP_PREFIXr��set)Zversions�major�minorZabisr�r rGZarchesr&rar^Zmatchesrerr]rrrbr/r/r0�compatible_tags�s`















*
$
$r9cCs^t|t�st|�}d}|dkr"t}x6|D].\}}}||jkr(||jkr(||jkr(d}Pq(W|S)NFT)r�rU�COMPATIBLE_TAGSr\r]r^)ZwheelrnrGZverr]r^r/r/r0r�s
r)N)SZ
__future__rr�rtrZdistutils.utilZ	distutilsZemailrr�rLr�Zloggingr6rxr4r�rIr�r�r)rr�compatrrr	r
rZdatabaserr�r
r�utilrrrrrrrrrrbrrZ	getLoggerrRr�r�hasattrr5r3r�r+r*r2r[r��get_platformr5r�r�r1�compile�
IGNORECASE�VERBOSErhrdr�r�r�r�r7r��objectr8rrUr9r:rr/r/r/r0�<module>s�,


	


#>_vendor/distlib/__pycache__/version.cpython-36.pyc000064400000050714151733136270016174 0ustar003

�Pf�\�@sZdZddlZddlZddlmZddddd	d
ddgZeje�ZGd
d�de	�Z
Gdd�de�ZGdd�de�Z
ejd�Zdd�ZeZGdd�de�Zdd�ZGdd�de
�Zejd�dfejd�dfejd�dfejd�dfejd �d!fejd"�d!fejd#�d$fejd%�d&fejd'�d(fejd)�d*ff
Zejd+�dfejd,�dfejd-�d$fejd#�d$fejd.�dffZejd/�Zd0d1�Zd2d3�Zejd4ej�Zd5d5d6d5d7ddd8�Zd9d:�ZGd;d�de�ZGd<d�de
�Zejd=ej�Z d>d?�Z!d@dA�Z"GdBd	�d	e�Z#GdCd
�d
e
�Z$GdDdE�dEe�Z%e%eee�e%eedFdG��e%e"e$e�dH�Z&e&dIe&dJ<dKd�Z'dS)Lz~
Implementation of a flexible versioning scheme providing support for PEP-440,
setuptools-compatible and semantic versioning.
�N�)�string_types�NormalizedVersion�NormalizedMatcher�
LegacyVersion�
LegacyMatcher�SemanticVersion�SemanticMatcher�UnsupportedVersionError�
get_schemec@seZdZdZdS)r
zThis is an unsupported version.N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/version.pyr
sc@sxeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zedd��ZdS)�VersioncCs@|j�|_}|j|�|_}t|t�s,t�t|�dks<t�dS)Nr)�strip�_string�parse�_parts�
isinstance�tuple�AssertionError�len)�self�s�partsrrr�__init__szVersion.__init__cCstd��dS)Nzplease implement in a subclass)�NotImplementedError)rrrrrr$sz
Version.parsecCs$t|�t|�kr td||f��dS)Nzcannot compare %r and %r)�type�	TypeError)r�otherrrr�_check_compatible'szVersion._check_compatiblecCs|j|�|j|jkS)N)r#r)rr"rrr�__eq__+s
zVersion.__eq__cCs|j|�S)N)r$)rr"rrr�__ne__/szVersion.__ne__cCs|j|�|j|jkS)N)r#r)rr"rrr�__lt__2s
zVersion.__lt__cCs|j|�p|j|�S)N)r&r$)rr"rrr�__gt__6szVersion.__gt__cCs|j|�p|j|�S)N)r&r$)rr"rrr�__le__9szVersion.__le__cCs|j|�p|j|�S)N)r'r$)rr"rrr�__ge__<szVersion.__ge__cCs
t|j�S)N)�hashr)rrrr�__hash__@szVersion.__hash__cCsd|jj|jfS)Nz%s('%s'))�	__class__rr)rrrr�__repr__CszVersion.__repr__cCs|jS)N)r)rrrr�__str__FszVersion.__str__cCstd��dS)NzPlease implement in subclasses.)r)rrrr�
is_prereleaseIszVersion.is_prereleaseN)rr
rrrr#r$r%r&r'r(r)r+r-r.�propertyr/rrrrrsrc	@s�eZdZdZejd�Zejd�Zejd�Zdd�dd�dd�d	d�d
d�dd�dd�d
d�d�Z	dd�Z
dd�Zedd��Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd �ZdS)!�MatcherNz^(\w[\s\w'.-]*)(\((.*)\))?z'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$z
^\d+(\.\d+)*$cCs||kS)Nr)�v�c�prrr�<lambda>WszMatcher.<lambda>cCs||kS)Nr)r2r3r4rrrr5XscCs||kp||kS)Nr)r2r3r4rrrr5YscCs||kp||kS)Nr)r2r3r4rrrr5ZscCs||kS)Nr)r2r3r4rrrr5[scCs||kS)Nr)r2r3r4rrrr5\scCs||kp||kS)Nr)r2r3r4rrrr5^scCs||kS)Nr)r2r3r4rrrr5_s)�<�>z<=z>=z==z===z~=z!=c
CsJ|jdkrtd��|j�|_}|jj|�}|s<td|��|jd�}|dj�|_|jj�|_	g}|d�r<dd�|dj
d�D�}x�|D]�}|jj|�}|s�td	||f��|j�}|dp�d
}|d}|jd��r|dkr�td|��|dd�d}}	|j
j|��s(|j|�n|j|�d}}	|j|||	f�q�Wt|�|_dS)NzPlease specify a version classz
Not valid: %r�r�cSsg|]}|j��qSr)r)�.0r3rrr�
<listcomp>nsz$Matcher.__init__.<locals>.<listcomp>�,zInvalid %r in %rz~=rz.*�==�!=z#'.*' not allowed for %r constraintsTF)r=r>���)�
version_class�
ValueErrorrr�dist_re�match�groups�name�lower�key�split�comp_re�endswith�num_re�appendrr)
rr�mrDZclistZconstraintsr3�opZvn�prefixrrrrbs:



zMatcher.__init__cCszt|t�r|j|�}x`|jD]V\}}}|jj|�}t|t�rFt||�}|sbd||jjf}t	|��||||�sdSqWdS)z�
        Check if the provided version matches the constraints.

        :param version: The version to match against this instance.
        :type version: String or :class:`Version` instance.
        z%r not implemented for %sFT)
rrr@r�
_operators�get�getattrr,rr)r�version�operator�
constraintrO�f�msgrrrrC�s



z
Matcher.matchcCs6d}t|j�dkr2|jdddkr2|jdd}|S)Nrr�==�===)rXrY)rr)r�resultrrr�
exact_version�s zMatcher.exact_versioncCs0t|�t|�ks|j|jkr,td||f��dS)Nzcannot compare %s and %s)r rEr!)rr"rrrr#�szMatcher._check_compatiblecCs"|j|�|j|jko |j|jkS)N)r#rGr)rr"rrrr$�s
zMatcher.__eq__cCs|j|�S)N)r$)rr"rrrr%�szMatcher.__ne__cCst|j�t|j�S)N)r*rGr)rrrrr+�szMatcher.__hash__cCsd|jj|jfS)Nz%s(%r))r,rr)rrrrr-�szMatcher.__repr__cCs|jS)N)r)rrrrr.�szMatcher.__str__)rr
rr@�re�compilerBrIrKrPrrCr0r[r#r$r%r+r-r.rrrrr1Ns*


%r1zk^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?(\.(post)(\d+))?(\.(dev)(\d+))?(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$cCs�|j�}tj|�}|s"td|��|j�}tdd�|djd�D��}x(t|�dkrn|ddkrn|dd�}qHW|ds~d}nt|d�}|dd�}|d	d
�}|dd�}|d
}|dkr�f}n|dt|d�f}|dkr�f}n|dt|d�f}|dk�r
f}n|dt|d�f}|dk�r.f}nLg}	x>|jd�D]0}
|
j	��rZdt|
�f}
nd|
f}
|	j
|
��q>Wt|	�}|�s�|�r�|�r�d}nd}|�s�d}|�s�d}||||||fS)NzNot a valid version: %scss|]}t|�VqdS)N)�int)r:r2rrr�	<genexpr>�sz_pep_440_key.<locals>.<genexpr>r�.r����	�
��
�a�z�_�final���rl)NN)NN)NNrl)rhrl)ri)rj)rk)r�PEP440_VERSION_RErCr
rDrrHrr^�isdigitrL)rrMrDZnumsZepoch�preZpost�devZlocalr�partrrr�_pep_440_key�sT



rrc@s6eZdZdZdd�Zedddddg�Zed	d
��ZdS)raIA rational version.

    Good:
        1.2         # equivalent to "1.2.0"
        1.2.0
        1.2a1
        1.2.3a2
        1.2.3b1
        1.2.3c1
        1.2.3.4
        TODO: fill this out

    Bad:
        1           # minimum two numbers
        1.2a        # release level must have a release serial
        1.2.3b
    cCs<t|�}tj|�}|j�}tdd�|djd�D��|_|S)Ncss|]}t|�VqdS)N)r^)r:r2rrrr_sz*NormalizedVersion.parse.<locals>.<genexpr>rr`)�_normalized_keyrmrCrDrrH�_release_clause)rrrZrMrDrrrrs

zNormalizedVersion.parserh�br3�rcrpcst�fdd��jD��S)Nc3s |]}|r|d�jkVqdS)rN)�PREREL_TAGS)r:�t)rrrr_sz2NormalizedVersion.is_prerelease.<locals>.<genexpr>)�anyr)rr)rrr/szNormalizedVersion.is_prereleaseN)	rr
rrr�setrwr0r/rrrrr�scCs>t|�}t|�}||krdS|j|�s*dSt|�}||dkS)NTFr`)�str�
startswithr)�x�y�nrrr�
_match_prefix"s
r�c	@sneZdZeZddddddddd	�Zd
d�Zdd
�Zdd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�ZdS)r�_match_compatible�	_match_lt�	_match_gt�	_match_le�	_match_ge�	_match_eq�_match_arbitrary�	_match_ne)z~=r6r7z<=z>=z==z===z!=cCsV|rd|ko|jd}n|jdo,|jd}|rN|jjdd�d}|j|�}||fS)N�+rrrlrlrl)rrrHr@)rrSrUrOZstrip_localrrrr�
_adjust_local<s
zNormalizedMatcher._adjust_localcCsD|j|||�\}}||krdS|j}djdd�|D��}t||�S)NFr`cSsg|]}t|��qSr)r{)r:�irrrr;Osz/NormalizedMatcher._match_lt.<locals>.<listcomp>)r�rt�joinr�)rrSrUrO�release_clause�pfxrrrr�JszNormalizedMatcher._match_ltcCsD|j|||�\}}||krdS|j}djdd�|D��}t||�S)NFr`cSsg|]}t|��qSr)r{)r:r�rrrr;Wsz/NormalizedMatcher._match_gt.<locals>.<listcomp>)r�rtr�r�)rrSrUrOr�r�rrrr�RszNormalizedMatcher._match_gtcCs|j|||�\}}||kS)N)r�)rrSrUrOrrrr�ZszNormalizedMatcher._match_lecCs|j|||�\}}||kS)N)r�)rrSrUrOrrrr�^szNormalizedMatcher._match_gecCs.|j|||�\}}|s ||k}n
t||�}|S)N)r�r�)rrSrUrOrZrrrr�bs


zNormalizedMatcher._match_eqcCst|�t|�kS)N)r{)rrSrUrOrrrr�jsz"NormalizedMatcher._match_arbitrarycCs0|j|||�\}}|s ||k}nt||�}|S)N)r�r�)rrSrUrOrZrrrr�ms

zNormalizedMatcher._match_necCsf|j|||�\}}||krdS||kr*dS|j}t|�dkrH|dd�}djdd�|D��}t||�S)NTFrr`cSsg|]}t|��qSr)r{)r:r�rrrr;�sz7NormalizedMatcher._match_compatible.<locals>.<listcomp>rl)r�rtrr�r�)rrSrUrOr�r�rrrr�usz#NormalizedMatcher._match_compatibleN)rr
rrr@rPr�r�r�r�r�r�r�r�r�rrrrr-s$z[.+-]$r8z^[.](\d)z0.\1z^[.-]z
^\((.*)\)$z\1z^v(ersion)?\s*(\d+)z\2z^r(ev)?\s*(\d+)z[.]{2,}r`z\b(alfa|apha)\b�alphaz\b(pre-alpha|prealpha)\bz	pre.alphaz	\(beta\)$�betaz
^[:~._+-]+z
[,*")([\]]z[~:+_ -]z\.$z
(\d+(\.\d+)*)cCsZ|j�j�}xtD]\}}|j||�}qW|s2d}tj|�}|sJd}|}n�|j�djd�}dd�|D�}xt|�dkr�|j	d�qlWt|�dkr�||j
�d�}n8djdd�|dd�D��||j
�d�}|dd�}djd	d�|D��}|j�}|�rxtD]\}}|j||�}�qW|�s*|}nd
|k�r8dnd}|||}t
|��sVd}|S)
z�
    Try to suggest a semantic form for a version for which
    _suggest_normalized_version couldn't come up with anything.
    z0.0.0rr`cSsg|]}t|��qSr)r^)r:r�rrrr;�sz-_suggest_semantic_version.<locals>.<listcomp>�NcSsg|]}t|��qSr)r{)r:r�rrrr;�scSsg|]}t|��qSr)r{)r:r�rrrr;�srp�-r�)rrF�
_REPLACEMENTS�sub�_NUMERIC_PREFIXrCrDrHrrL�endr��_SUFFIX_REPLACEMENTS�	is_semver)rrZZpat�replrMrO�suffix�seprrr�_suggest_semantic_version�s:
,
r�cCslyt|�|Stk
r YnX|j�}xdBD]\}}|j||�}q0Wtjdd|�}tjdd|�}tjdd|�}tjdd|�}tjdd|�}|jd��r�|d d!�}tjd"d|�}tjd#d$|�}tjd%d&|�}tjd'd|�}tjd(d)|�}tjd*d)|�}tjd+d
|�}tjd,d-|�}tjd.d&|�}tjd/d0|�}tjd1d2|�}yt|�Wntk
�rfd!}YnX|S)Ca�Suggest a normalized version close to the given version string.

    If you have a version string that isn't rational (i.e. NormalizedVersion
    doesn't like it) then you might be able to get an equivalent (or close)
    rational version from this function.

    This does a number of simple normalizations to the given string, based
    on observation of versions currently in use on PyPI. Given a dump of
    those version during PyCon 2009, 4287 of them:
    - 2312 (53.93%) match NormalizedVersion without change
      with the automatic suggestion
    - 3474 (81.04%) match when using this suggestion method

    @param s {str} An irrational version string.
    @returns A rational version string, or None, if couldn't determine one.
    �-alpharh�-betarur�r�rvr3�-finalr8�-pre�-release�.release�-stabler�r`rj� �.finalrkzpre$Zpre0zdev$Zdev0z([abc]|rc)[\-\.](\d+)$z\1\2z[\-\.](dev)[\-\.]?r?(\d+)$z.\1\2z[.~]?([abc])\.?z\1r2rNz\b0+(\d+)(?!\d)z(\d+[abc])$z\g<1>0z\.?(dev-r|dev\.r)\.?(\d+)$z.dev\2z-(a|b|c)(\d+)$z[\.\-](dev|devel)$z.dev0z(?![\.\-])dev$z(final|stable)$z\.?(r|-|-r)\.?(\d+)$z.post\2z\.?(dev|git|bzr)\.?(\d+)$z\.?(pre|preview|-c)(\d+)$zc\g<2>zp(\d+)$z.post\1�r�rh�r�ru�r�rh�r�ru�rvr3�r�r8�r�r3�r�r8�r�r8�r�r8�r�r`�rjr`�r�r8�r�r8�rkr8)r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)rsr
rF�replacer\r�r|)rZrsZorigr�rrr�_suggest_normalized_version�sH	
r�z([a-z]+|\d+|[\.-])r3zfinal-�@)roZpreviewr�rvrpr8r`cCs~dd�}g}xh||�D]\}|jd�rh|dkrJx|rH|ddkrH|j�q.Wx|rf|d	dkrf|j�qLW|j|�qWt|�S)
NcSsxg}xdtj|j��D]R}tj||�}|rd|dd�koBdknrT|jd�}nd|}|j|�qW|jd�|S)N�0r�9��*z*final)�
_VERSION_PARTrHrF�_VERSION_REPLACErQ�zfillrL)rrZr4rrr�	get_partsIs 
z_legacy_key.<locals>.get_partsr�z*finalrz*final-Z00000000rlrl)r|�poprLr)rr�rZr4rrr�_legacy_keyHs

r�c@s eZdZdd�Zedd��ZdS)rcCst|�S)N)r�)rrrrrrcszLegacyVersion.parsecCs:d}x0|jD]&}t|t�r|jd�r|dkrd}PqW|S)NFr�z*finalT)rrrr|)rrZr}rrrr/fszLegacyVersion.is_prereleaseN)rr
rrr0r/rrrrrbsc@s4eZdZeZeej�Zded<ej	d�Z
dd�ZdS)rr�z~=z^(\d+(\.\d+)*)cCs`||krdS|jjt|��}|s2tjd||�dS|j�d}d|krV|jdd�d}t||�S)NFzACannot compute compatible match for version %s  and constraint %sTrr`r)�
numeric_rerCr{�loggerZwarningrD�rsplitr�)rrSrUrOrMrrrrr�yszLegacyMatcher._match_compatibleN)rr
rrr@�dictr1rPr\r]r�r�rrrrrqs


zN^(\d+)\.(\d+)\.(\d+)(-[a-z0-9]+(\.[a-z0-9-]+)*)?(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$cCs
tj|�S)N)�
_SEMVER_RErC)rrrrr��sr�c	Csndd�}t|�}|st|��|j�}dd�|dd�D�\}}}||dd�||dd�}}|||f||fS)	NcSs8|dkr|f}n$|dd�jd�}tdd�|D��}|S)Nrr`cSs"g|]}|j�r|jd�n|�qS)r�)rnr�)r:r4rrrr;�sz5_semantic_key.<locals>.make_tuple.<locals>.<listcomp>)rHr)rZabsentrZrrrr�
make_tuple�s
z!_semantic_key.<locals>.make_tuplecSsg|]}t|��qSr)r^)r:r�rrrr;�sz!_semantic_key.<locals>.<listcomp>r��|�r�)r�r
rD)	rr�rMrD�major�minorZpatchroZbuildrrr�
_semantic_key�s
r�c@s eZdZdd�Zedd��ZdS)rcCst|�S)N)r�)rrrrrr�szSemanticVersion.parsecCs|jdddkS)Nrrr�)r)rrrrr/�szSemanticVersion.is_prereleaseN)rr
rrr0r/rrrrr�sc@seZdZeZdS)r	N)rr
rrr@rrrrr	�sc@s6eZdZddd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
�
VersionSchemeNcCs||_||_||_dS)N)rG�matcher�	suggester)rrGr�r�rrrr�szVersionScheme.__init__cCs2y|jj|�d}Wntk
r,d}YnX|S)NTF)r�r@r
)rrrZrrr�is_valid_version�s
zVersionScheme.is_valid_versioncCs0y|j|�d}Wntk
r*d}YnX|S)NTF)r�r
)rrrZrrr�is_valid_matcher�s

zVersionScheme.is_valid_matchercCs|jd|�S)z:
        Used for processing some metadata fields
        zdummy_name (%s))r�)rrrrr�is_valid_constraint_list�sz&VersionScheme.is_valid_constraint_listcCs|jdkrd}n
|j|�}|S)N)r�)rrrZrrr�suggest�s

zVersionScheme.suggest)N)rr
rrr�r�r�r�rrrrr��s

r�cCs|S)Nr)rrrrrr5�sr5)�
normalized�legacyZsemanticr��defaultcCs|tkrtd|��t|S)Nzunknown scheme name: %r)�_SCHEMESrA)rErrrr�s)(rZloggingr\�compatr�__all__Z	getLoggerrr�rAr
�objectrr1r]rmrrrsrr�rr�r�r�r�r��Ir�r�r�rrr�r�r�rr	r�r�rrrrr�<module>	sz
1k
=$W
.r	$
_vendor/distlib/__pycache__/metadata.cpython-36.opt-1.pyc000064400000064462151733136270017233 0ustar003

�Pf���@sdZddlmZddlZddlmZddlZddlZddlZddl	m
Z
mZddlm
Z
mZmZddlmZdd	lmZmZdd
lmZmZeje�ZGdd�de
�ZGd
d�de
�ZGdd�de
�ZGdd�de
�ZdddgZdZ dZ!ej"d�Z#ej"d�Z$dGZ%dHZ&dIZ'dJZ(dKZ)dLZ*dMZ+e,�Z-e-j.e%�e-j.e&�e-j.e(�e-j.e*�ej"d8�Z/d9d:�Z0d;d<�Z1ddddd%ddd d!d"d#d+d,d$d&d'd-d/d0d5d1d2d*d)d(d.d3d4d6d7d=�Z2dNZ3dOZ4dPZ5dQZ6dRZ7dSZ8dTZ9e:�Z;ej"d>�Z<dUd@dA�Z=GdBdC�dCe:�Z>dDZ?dEZ@GdFd�de:�ZAdS)VzImplementation of the Metadata for Python packages PEPs.

Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
�)�unicode_literalsN)�message_from_file�)�DistlibException�__version__)�StringIO�string_types�	text_type)�	interpret)�extract_by_key�
get_extras)�
get_scheme�PEP440_VERSION_REc@seZdZdZdS)�MetadataMissingErrorzA required metadata is missingN)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/metadata.pyrsrc@seZdZdZdS)�MetadataConflictErrorz>Attempt to read or write metadata fields that are conflictual.N)rrrrrrrrr src@seZdZdZdS)� MetadataUnrecognizedVersionErrorz Unknown metadata version number.N)rrrrrrrrr$src@seZdZdZdS)�MetadataInvalidErrorzA metadata value is invalidN)rrrrrrrrr(sr�Metadata�PKG_INFO_ENCODING�PKG_INFO_PREFERRED_VERSIONzutf-8z1.1z

       \|z	
        �Metadata-Version�Name�Version�Platform�Summary�Description�Keywords�	Home-page�Author�Author-email�License�Supported-Platform�
Classifier�Download-URL�	Obsoletes�Provides�Requires�
Maintainer�Maintainer-email�Obsoletes-Dist�Project-URL�
Provides-Dist�
Requires-Dist�Requires-Python�Requires-External�Private-Version�Obsoleted-By�Setup-Requires-Dist�	Extension�Provides-Extraz"extra\s*==\s*("([^"]+)"|'([^']+)')cCs<|dkrtS|dkrtS|dkr$tS|dkr0tSt|��dS)Nz1.0z1.1z1.2z2.0)�_241_FIELDS�_314_FIELDS�_345_FIELDS�_426_FIELDSr)�versionrrr�_version2fieldlistgsr?c	Cs�dd�}g}x.|j�D]"\}}|gddfkr.q|j|�qWddddg}xt|D]l}|tkrld|krl|jd�|tkr�d|kr�|jd�|tkr�d|kr�|jd�|tkrNd|krN|jd�qNWt|�d	kr�|d
St|�d
kr�td��d|ko�||t	�}d|k�o
||t
�}d|k�o||t�}t|�t|�t|�d	k�rFtd��|�rl|�rl|�rlt
|k�rlt
S|�rvdS|�r�dSdS)
z5Detect the best version depending on the fields used.cSsx|D]}||krdSqWdS)NTFr)�keys�markers�markerrrr�_has_markerus
z"_best_version.<locals>._has_marker�UNKNOWNNz1.0z1.1z1.2z2.0rrzUnknown metadata setz(You used incompatible 1.1/1.2/2.0 fields)�items�appendr:�remover;r<r=�lenr�_314_MARKERS�_345_MARKERS�_426_MARKERS�intr)	�fieldsrCr@�key�valueZpossible_versionsZis_1_1Zis_1_2Zis_2_0rrr�
_best_versionssB




rP)�metadata_version�namer>�platformZsupported_platform�summary�description�keywords�	home_page�author�author_email�
maintainer�maintainer_email�license�
classifier�download_url�obsoletes_dist�
provides_dist�
requires_dist�setup_requires_dist�requires_python�requires_external�requires�provides�	obsoletes�project_urlZprivate_versionZobsoleted_by�	extensionZprovides_extraz[^A-Za-z0-9.]+FcCs0|r$tjd|�}tjd|jdd��}d||fS)zhReturn the distribution name with version.

    If for_filename is true, return a filename-escaped form.�-� �.z%s-%s)�	_FILESAFE�sub�replace)rRr>Zfor_filenamerrr�_get_name_and_version�srpc@s
eZdZdZd?dd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zd@dd�Zdd�Zdd �Zd!d"�Zd#d$�ZdAd%d&�ZdBd'd(�ZdCd)d*�Zd+d,�Zefd-d.�ZdDd/d0�ZdEd1d2�Zd3d4�Zd5d6�Zd7d8�Zd9d:�Zd;d<�Z d=d>�Z!dS)F�LegacyMetadataaaThe legacy metadata of a release.

    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
    instantiate the class with one of these arguments (or none):
    - *path*, the path to a metadata file
    - *fileobj* give a file-like object with metadata as content
    - *mapping* is a dict-like object
    - *scheme* is a version scheme name
    N�defaultcCsz|||gjd�dkrtd��i|_g|_d|_||_|dk	rH|j|�n.|dk	r\|j|�n|dk	rv|j|�|j	�dS)N�z'path, fileobj and mapping are exclusive)
�count�	TypeError�_fieldsZrequires_filesZ
_dependencies�scheme�read�	read_file�update�set_metadata_version)�self�path�fileobj�mappingrwrrr�__init__�s
zLegacyMetadata.__init__cCst|j�|jd<dS)NzMetadata-Version)rPrv)r|rrrr{sz#LegacyMetadata.set_metadata_versioncCs|jd||f�dS)Nz%s: %s
)�write)r|r~rRrOrrr�_write_fieldszLegacyMetadata._write_fieldcCs
|j|�S)N)�get)r|rRrrr�__getitem__szLegacyMetadata.__getitem__cCs|j||�S)N)�set)r|rRrOrrr�__setitem__szLegacyMetadata.__setitem__cCs8|j|�}y|j|=Wntk
r2t|��YnXdS)N)�
_convert_namerv�KeyError)r|rR�
field_namerrr�__delitem__s

zLegacyMetadata.__delitem__cCs||jkp|j|�|jkS)N)rvr�)r|rRrrr�__contains__s
zLegacyMetadata.__contains__cCs(|tkr|S|jdd�j�}tj||�S)Nrj�_)�_ALL_FIELDSro�lower�_ATTR2FIELDr�)r|rRrrrr�szLegacyMetadata._convert_namecCs|tks|tkrgSdS)NrD)�_LISTFIELDS�_ELEMENTSFIELD)r|rRrrr�_default_value%szLegacyMetadata._default_valuecCs&|jdkrtjd|�Stjd|�SdS)N�1.0�1.1�
)r�r�)rQ�_LINE_PREFIX_PRE_1_2rn�_LINE_PREFIX_1_2)r|rOrrr�_remove_line_prefix*s
z"LegacyMetadata._remove_line_prefixcCs|tkr||St|��dS)N)r��AttributeError)r|rRrrr�__getattr__0szLegacyMetadata.__getattr__FcCst|d|d|�S)zhReturn the distribution name with version.

        If filesafe is true, return a filename-escaped form.rr)rp)r|Zfilesaferrr�get_fullname;szLegacyMetadata.get_fullnamecCs|j|�}|tkS)z+return True if name is a valid metadata key)r�r�)r|rRrrr�is_fieldAs
zLegacyMetadata.is_fieldcCs|j|�}|tkS)N)r�r�)r|rRrrr�is_multi_fieldFs
zLegacyMetadata.is_multi_fieldc
Cs.tj|ddd�}z|j|�Wd|j�XdS)z*Read the metadata values from a file path.�rzutf-8)�encodingN)�codecs�openry�close)r|�filepath�fprrrrxJszLegacyMetadata.readcCs�t|�}|d|jd<xxtD]p}||kr*q|tkrh|j|�}|tkrZ|dk	rZdd�|D�}|j||�q||}|dk	r|dkr|j||�qW|j�dS)z,Read the metadata values from a file object.zmetadata-versionzMetadata-VersionNcSsg|]}t|jd���qS)�,)�tuple�split)�.0rOrrr�
<listcomp>_sz,LegacyMetadata.read_file.<locals>.<listcomp>rD)rrvr�r�Zget_all�_LISTTUPLEFIELDSr�r{)r|Zfileob�msg�field�valuesrOrrrryRs

zLegacyMetadata.read_filec
Cs0tj|ddd�}z|j||�Wd|j�XdS)z&Write the metadata fields to filepath.�wzutf-8)r�N)r�r��
write_filer�)r|r��skip_unknownr�rrrr�hszLegacyMetadata.writecCs�|j�x�t|d�D]�}|j|�}|r:|dgdgfkr:q|tkrX|j||dj|��q|tkr�|dkr�|jd
kr�|jdd�}n|jdd	�}|g}|t	kr�d
d�|D�}x|D]}|j|||�q�WqWdS)z0Write the PKG-INFO format data to a file object.zMetadata-VersionrDr�r!�1.0�1.1r�z	
        z	
       |cSsg|]}dj|��qS)r�)�join)r�rOrrrr��sz-LegacyMetadata.write_file.<locals>.<listcomp>N)r�r�)
r{r?r�r�r�r�r�rQror�)r|Z
fileobjectr�r�r�rOrrrr�ps$


zLegacyMetadata.write_filecs��fdd�}|snHt|d�r>x<|j�D]}||||�q&Wnx|D]\}}|||�qDW|r~x|j�D]\}}|||�qhWdS)a�Set metadata values from the given iterable `other` and kwargs.

        Behavior is like `dict.update`: If `other` has a ``keys`` method,
        they are looped over and ``self[key]`` is assigned ``other[key]``.
        Else, ``other`` is an iterable of ``(key, value)`` iterables.

        Keys that don't match a metadata field or that have an empty value are
        dropped.
        cs"|tkr|r�j�j|�|�dS)N)r�r�r�)rNrO)r|rr�_set�sz#LegacyMetadata.update.<locals>._setr@N)�hasattrr@rE)r|�other�kwargsr��k�vr)r|rrz�s

zLegacyMetadata.updatecCsn|j|�}|tks|dkrPt|ttf�rPt|t�rJdd�|jd�D�}q~g}n.|tkr~t|ttf�r~t|t�rz|g}ng}tj	t
j��rB|d}t|j
�}|tkr�|dk	r�x�|D](}|j|jd�d�s�tjd	|||�q�Wn`|tko�|dk	�r|j|��sBtjd
|||�n0|tk�rB|dk	�rB|j|��sBtjd
|||�|tk�r`|dk�r`|j|�}||j|<dS)z"Control then set a metadata field.rcSsg|]}|j��qSr)�strip)r�r�rrrr��sz&LegacyMetadata.set.<locals>.<listcomp>r�rN�;rz$'%s': '%s' is not valid (field '%s')z.'%s': '%s' is not a valid version (field '%s')r!)r�r��
isinstance�listr�rr�r��loggerZisEnabledFor�loggingZWARNINGr
rw�_PREDICATE_FIELDS�is_valid_matcher�warning�_VERSIONS_FIELDS�is_valid_constraint_list�_VERSION_FIELDS�is_valid_version�_UNICODEFIELDSr�rv)r|rRrOZproject_namerwr�rrrr��s@








zLegacyMetadata.setcCs�|j|�}||jkr*|tkr&|j|�}|S|tkr@|j|}|S|tkr�|j|}|dkr^gSg}x6|D].}|tkr�|j|�qh|j|d|df�qhW|S|tkr�|j|}t	|t
�r�|jd�S|j|S)zGet a metadata field.Nrrr�)r�rv�_MISSINGr�r�r�r�rFr�r�rr�)r|rRrrrO�res�valrrrr��s.








zLegacyMetadata.getcs|j�gg}}xd
D]}||kr|j|�qW|rT|gkrTddj|�}t|��xdD]}||krZ|j|�qZW|ddkr�||fSt|j���fd	d
�}xdt|ft�jft	�j
ffD]F\}}x<|D]4}	|j|	d�}
|
dk	o�||
�r�|jd|	|
f�q�Wq�W||fS)zkCheck if the metadata is compliant. If strict is True then raise if
        no Name or Version are providedrrzmissing required metadata: %sz, �	Home-pager$zMetadata-Versionz1.2cs*x$|D]}�j|jd�d�sdSqWdS)Nr�rFT)r�r�)rOr�)rwrr�are_valid_constraintss
z3LegacyMetadata.check.<locals>.are_valid_constraintsNzWrong value for '%s': %s)rr)r�r$)r{rFr�rr
rwr�r�r�r�r�r�)r|�strict�missing�warnings�attrr�r�rMZ
controllerr�rOr)rwr�check�s2




zLegacyMetadata.checkcCs�|j�dB}i}x,|D]$\}}|s.||jkr||||<qW|ddk�r�dK}x�|D]F\}}|sn||jkrV|d&k�r�||||<qVd,d-�||D�||<qVWnF|dd.k�r�dO}x2|D]*\}}|�s�||jk�r�||||<�q�W|S)Pz�Return fields as a dict.

        Field names will be converted to use the underscore-lowercase style
        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
        rQ�Metadata-VersionrRrr>rrTr rW�	Home-pagerXr$rY�Author-emailr\r&rUr!rVr"rSr�classifiersr(r^�Download-URLz1.2ra�
Requires-Distrc�Requires-Pythonrd�Requires-Externalr`�
Provides-Distr_�Obsoletes-Distrh�Project-URLrZr-r[�Maintainer-emailcSsg|]}dj|��qS)r�)r�)r��urrrr�Gsz)LegacyMetadata.todict.<locals>.<listcomp>z1.1rfr+rer,rgr*�rQr��rRr�r>r�rTr �rWr��rXr$�rYr��r\r&�rUr!�rVr"�rSr�r�r(�r^r�)
r�r�r�r�r�r�r�r�r�r�r�r�r��rar��rcr��rdr��r`r��r_r��rhr��rZr-�r[r�)r�r�r�r�r�r�r�r��rfr+�rer,�rgr*)r�r�r�)r{rv)r|Zskip_missingZmapping_1_0�datarNr�Zmapping_1_2Zmapping_1_1rrr�todictsP
zLegacyMetadata.todictcCs<|ddkr(xdD]}||kr||=qW|d|7<dS)NzMetadata-Versionz1.1r*r,r+z
Requires-Dist)r*r,r+r)r|�requirementsr�rrr�add_requirementsUs


zLegacyMetadata.add_requirementscCstt|d��S)NzMetadata-Version)r�r?)r|rrrr@`szLegacyMetadata.keysccsx|j�D]
}|Vq
WdS)N)r@)r|rNrrr�__iter__cszLegacyMetadata.__iter__cs�fdd��j�D�S)Ncsg|]}�|�qSrr)r�rN)r|rrr�hsz)LegacyMetadata.values.<locals>.<listcomp>)r@)r|r)r|rr�gszLegacyMetadata.valuescs�fdd��j�D�S)Ncsg|]}|�|f�qSrr)r�rN)r|rrr�ksz(LegacyMetadata.items.<locals>.<listcomp>)r@)r|r)r|rrEjszLegacyMetadata.itemscCsd|jj|j|jfS)Nz
<%s %s %s>)�	__class__rrRr>)r|rrr�__repr__mszLegacyMetadata.__repr__)NNNrr)F)F)F)N)F)F)"rrrrr�r{r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r�rzr�r�r�r�r�r�r@r�r�rEr�rrrrrq�s>	




,
,
;rqzpydist.jsonz
metadata.jsonc@s�eZdZdZejd�Zejdej�Ze	Z
ejd�ZdZde
ZffdId�Zd	Zd
ZeffedJfe
dKfedLfd�ZdMZdNdd�ZedO�ZdefZdefZdefdefeeedefeeeedefdPdQd�
Z[[dd �ZdRd!d"�Zd#d$�Zed%d&��Z ed'd(��Z!e!j"d)d(��Z!dSd*d+�Z#ed,d-��Z$ed.d/��Z%e%j"d0d/��Z%d1d2�Z&d3d4�Z'd5d6�Z(d7d8�Z)d9d:d;d<d=dd>�Z*d?d@�Z+dTdCdD�Z,dEdF�Z-dGdH�Z.dS)Urz�
    The metadata of a release. This implementation uses 2.0 (JSON)
    metadata where possible. If not possible, it wraps a LegacyMetadata
    instance which handles the key-value metadata format.
    z
^\d+(\.\d+)*$z!^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$z	.{1,2047}z2.0zdistlib (%s)�legacy)rRr>rTzqname version license summary description author author_email keywords platform home_page classifiers download_urlzwextras run_requires test_requires build_requires dev_requires provides meta_requires obsoleted_by supports_environments)rQrRr>rT�_legacy�_datarwNrrcCs0|||gjd�dkrtd��d|_d|_||_|dk	rzy|j||�||_Wn*tk
rvt||d�|_|j�YnXn�d}|r�t	|d��}|j
�}WdQRXn|r�|j
�}|dkr�|j|jd�|_ndt
|t�s�|jd�}ytj|�|_|j|j|�Wn0tk
�r*tt|�|d�|_|j�YnXdS)Nrsz'path, fileobj and mapping are exclusive)rrw�rb)rQ�	generatorzutf-8)r~rw)rtrur�rrw�_validate_mappingrrq�validater�rx�METADATA_VERSION�	GENERATORr�r	�decode�json�loads�
ValueErrorr)r|r}r~rrwr��frrrr��s<



zMetadata.__init__rRr>r\rVrTz
Requires-DistzSetup-Requires-DistzProvides-Extrar(�Download-URL�Metadata-Version)
�run_requires�build_requires�dev_requiresZ
test_requires�
meta_requires�extras�modules�
namespaces�exports�commandsr�Z
source_urlrQc
CsZtj|d�}tj|d�}||k�r||\}}|jr^|dkrP|dkrHdn|�}n|jj|�}n�|dkrjdn|�}|dkr�|jj||�}n�t�}|}|jjd�}	|	�r|dkr�|	jd	|�}nR|dkr�|	jd
�}	|	r�|	j||�}n.|	jd�}	|	�s�|jjd�}	|	�r|	j||�}||k�rV|}n:||k�r4tj||�}n"|j�rJ|jj|�}n|jj|�}|S)
N�common_keys�mapped_keysrrrrr��
extensionszpython.commandszpython.detailszpython.exports)rrrrr�)�object�__getattribute__r�r�r)
r|rN�common�mapped�lkZmaker�resultrO�sentinel�drrrr�sF




zMetadata.__getattribute__cCsH||jkrD|j|\}}|p |j|krD|j|�}|sDtd||f��dS)Nz.'%s' is an invalid value for the '%s' property)�SYNTAX_VALIDATORSrw�matchr)r|rNrOrw�pattern�
exclusions�mrrr�_validate_values

zMetadata._validate_valuecCs*|j||�tj|d�}tj|d�}||kr�||\}}|jrV|dkrJt�||j|<nf|d
krj||j|<nR|jjdi�}|dkr�||d	<n2|dkr�|jd
i�}|||<n|jdi�}|||<nh||kr�tj|||�nP|dk�rt|t	��r|j
�}|�r|j�}ng}|j�r||j|<n
||j|<dS)Nrrrrrrr�rzpython.commandszpython.detailszpython.exportsrV)rrrrr�)r'rrr��NotImplementedErrorr�
setdefault�__setattr__r�rr�r�)r|rNrOrrrr�r!rrrr*s>




zMetadata.__setattr__cCst|j|jd�S)NT)rprRr>)r|rrr�name_and_version@szMetadata.name_and_versioncCsF|jr|jd}n|jjdg�}d|j|jf}||krB|j|�|S)Nz
Provides-Distrfz%s (%s))r�rr)rRr>rF)r|r�srrrrfDs
zMetadata.providescCs |jr||jd<n
||jd<dS)Nz
Provides-Distrf)r�r)r|rOrrrrfOsc
Cs�|jr|}n�g}t|pg|j�}xl|D]d}d|kr@d|kr@d}n8d|krNd}n|jd�|k}|rx|jd�}|rxt||�}|r&|j|d�q&WxNdD]F}d|}	|	|kr�|j|	�|jjd	|g�}|j|j|||d
��q�W|S)a�
        Base method to get dependencies, given a set of extras
        to satisfy and an optional environment context.
        :param reqts: A list of sometimes-wanted dependencies,
                      perhaps dependent on extras and environment.
        :param extras: A list of optional components being requested.
        :param env: An optional environment for marker evaluation.
        �extra�environmentTre�build�dev�testz:%s:z%s_requires)r�env)r/r0r1)	r�rrr�r
�extendrGr�get_requirements)
r|�reqtsrr2rr!�includerBrN�errrr4Vs0	




zMetadata.get_requirementscCs|jr|j�S|jS)N)r��_from_legacyr)r|rrr�
dictionary�szMetadata.dictionarycCs|jrt�nt|j|j�SdS)N)r�r(rr�DEPENDENCY_KEYS)r|rrr�dependencies�szMetadata.dependenciescCs|jrt�n|jj|�dS)N)r�r(rrz)r|rOrrrr;�sc	Cs�|jd�|jkrt��g}x0|jj�D]"\}}||kr&||kr&|j|�q&W|rfddj|�}t|��x"|j�D]\}}|j|||�qpWdS)NrQzMissing metadata items: %sz, )	r�rr�MANDATORY_KEYSrErFr�rr')	r|rrwr�rNr%r�r�r�rrrr�szMetadata._validate_mappingcCsB|jr.|jjd�\}}|s|r>tjd||�n|j|j|j�dS)NTz#Metadata: missing: %s, warnings: %s)r�r�r�r�rrrw)r|r�r�rrrr�s
zMetadata.validatecCs(|jr|jjd�St|j|j�}|SdS)NT)r�r�rr�
INDEX_KEYS)r|rrrrr��szMetadata.todictc
Cs�|j|jd�}|jjd�}x2dD]*}||kr |dkr:d	}n|}||||<q W|jd
g�}|dgkrhg}||d<d}x2|D]*\}}||krz||rzd||ig||<qzW|j|d<i}i}	|S)N)rQrTrRr>r\rTrUr]r�r"�rVrarrbrrerf)rRr>r\rTrUr]�rar�rbr)r?r@)rrr�r�r�rf)
r|rZlmdr��nk�kwr@�okrXrZrrrr8�s.


zMetadata._from_legacyrrr&r r!)rRr>r\rTrUr�cCs�dd�}t�}|j}x*|jj�D]\}}||kr ||||<q W||j|j�}||j|j�}|jrtt	|j�|d<t	|�|d<t	|�|d<|S)NcSs�t�}x�|D]�}|jd�}|jd�}|d}xb|D]Z}|rN|rN|j|�q2d}|r^d|}|rx|rtd||f}n|}|jdj||f��q2WqW|S)Nr-r.rer>z
extra == "%s"z(%s) and %sr�)r�r��addr�)Zentriesr5r7r-r2Zrlistr�rBrrr�process_entries�s"



z,Metadata._to_legacy.<locals>.process_entrieszProvides-Extraz
Requires-DistzSetup-Requires-Dist)
rqr�LEGACY_MAPPINGrErrrrr�sorted)r|rErZnmdrArCZr1Zr2rrr�
_to_legacy�szMetadata._to_legacyFTcCs�||gjd�dkrtd��|j�|r`|jr4|j}n|j�}|rP|j||d�q�|j||d�n^|jrp|j�}n|j}|r�t	j
||dddd�n.tj|dd��}t	j
||dddd�WdQRXdS)	Nrz)Exactly one of path and fileobj is needed)r�Trs)Zensure_ascii�indentZ	sort_keysr�zutf-8)
rtr
rr�rHr�r�r8rr�dumpr�r�)r|r}r~r�r�Z	legacy_mdr!rrrrr��s&

zMetadata.writecCs�|jr|jj|�nt|jjdg�}d}x"|D]}d|kr,d|kr,|}Pq,W|dkrhd|i}|jd|�n t|d�t|�B}t|�|d<dS)Nrr.r-rer)r�r�rr)�insertr�rG)r|r�r�always�entryZrsetrrrr�s
zMetadata.add_requirementscCs*|jpd}|jpd}d|jj|j||fS)Nz	(no name)z
no versionz<%s %s %s (%s)>)rRr>r�rrQ)r|rRr>rrrr�(s

zMetadata.__repr__)r�)r�)r�)r�)r�rrw)NNNrr)rRr>r\rVrT)rN)r
N)N)NN)NNFT)/rrrr�re�compileZMETADATA_VERSION_MATCHER�IZNAME_MATCHERrZVERSION_MATCHERZSUMMARY_MATCHERrrrr<r=r:r"�	__slots__r�r�rr�Z	none_list�dictZ	none_dictrrr'r*�propertyr+rf�setterr4r9r;rrr�r8rFrHr�r�r�rrrrrvsx


,+

'
*	%
)rrrrr r!r"r#r$r%r&)rrrrr'r r!r"r#r$r%r&r(r)r*r+r,)r*r+r,r(r))rrrrr'r r!r"r#r$r%r-r.r&r(r)r/r0r1r2r3r4)r1r2r3r/r4r-r.r0)rrrrr'r r!r"r#r$r%r-r.r&r(r)r/r0r1r2r3r4r5r6r7r8r9)r5r9r6r7r8)r2r/r1)r3)r)rr(r*r,r+r/r1r2r4r0r'r7r9r8)r0)r")r$r-r r!)F)BrZ
__future__rr�Zemailrrr�rNr>rr�compatrrr	rAr
�utilrrr>r
rZ	getLoggerrr�rrrr�__all__rrrOr�r�r:r;rIr<rJr=rKr�r�rzZEXTRA_REr?rPr�r�r�r�r�r�r�r�rr�rmrprqZMETADATA_FILENAMEZWHEEL_METADATA_FILENAMErrrrr�<module>	s�








9


	_vendor/distlib/__pycache__/manifest.cpython-36.opt-1.pyc000064400000023660151733136270017254 0ustar003

�Pf�9�@s�dZddlZddlZddlZddlZddlZddlmZddlm	Z	ddl
mZdgZej
e�Zejdej�Zejd	ejejB�Zejdd
�ZGdd�de�ZdS)zu
Class representing the list of files in a distribution.

Equivalent to distutils.filelist, but fixes some problems.
�N�)�DistlibException)�fsdecode)�convert_path�Manifestz\\w*
z#.*?(?=
)|
(?=$)�c@szeZdZdZddd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dd�Z
dd�Zddd�Zd dd�Z
d!dd�Zdd�ZdS)"rz~A list of files built by on exploring the filesystem and filtered by
    applying various patterns to what we find there.
    NcCs>tjjtjj|ptj���|_|jtj|_d|_t	�|_
dS)zd
        Initialise an instance.

        :param base: The base directory to explore under.
        N)�os�path�abspath�normpath�getcwd�base�sep�prefix�allfiles�set�files)�selfr
�r�/usr/lib/python3.6/manifest.py�__init__*szManifest.__init__cCs�ddlm}m}m}g|_}|j}|g}|j}|j}xv|r�|�}tj	|�}	x\|	D]T}
tj
j||
�}tj|�}|j}
||
�r�|jt
|��qR||
�rR||
�rR||�qRWq8WdS)zmFind all files under the base and set ``allfiles`` to the absolute
        pathnames of files found.
        r)�S_ISREG�S_ISDIR�S_ISLNKN)�statrrrrr
�pop�appendr�listdirr	�join�st_moder)rrrrr�root�stackr�push�names�name�fullnamer�moderrr�findall9s"



zManifest.findallcCs4|j|j�stjj|j|�}|jjtjj|��dS)zz
        Add a file to the manifest.

        :param item: The pathname to add. This can be relative to the base.
        N)	�
startswithrrr	rr
r�addr)r�itemrrrr)TszManifest.addcCsx|D]}|j|�qWdS)z�
        Add a list of files to the manifest.

        :param items: The pathnames to add. These can be relative to the base.
        N)r))r�itemsr*rrr�add_many^s
zManifest.add_manyFcsf��fdd��t�j�}|rJt�}x|D]}�|tjj|��q(W||O}dd�tdd�|D��D�S)z8
        Return sorted files in directory order
        cs>|j|�tjd|�|�jkr:tjj|�\}}�||�dS)Nzadd_dir added %s)r)�logger�debugr
rr	�split)�dirs�d�parent�_)�add_dirrrrr4ls


z Manifest.sorted.<locals>.add_dircSsg|]}tjj|��qSr)rr	r)�.0Z
path_tuplerrr�
<listcomp>zsz#Manifest.sorted.<locals>.<listcomp>css|]}tjj|�VqdS)N)rr	r/)r5r	rrr�	<genexpr>{sz"Manifest.sorted.<locals>.<genexpr>)rrrr	�dirname�sorted)rZwantdirs�resultr0�fr)r4rrr9gs

zManifest.sortedcCst�|_g|_dS)zClear all collected files.N)rrr)rrrr�clear}szManifest.clearcCs�|j|�\}}}}|dkrFx&|D]}|j|dd�s tjd|�q W�n<|dkrnx|D]}|j|dd�}qTW�n|dkr�x&|D]}|j|dd�s|tjd|�q|Wn�|d	kr�x�|D]}|j|dd�}q�Wn�|d
k�r�x�|D] }|j||d�s�tjd||�q�Wn�|d
k�r&xz|D]}|j||d�}�qWn\|dk�rN|jd|d��s�tjd|�n4|dk�rv|jd|d��s�tjd|�ntd|��dS)av
        Process a directive which either adds some files from ``allfiles`` to
        ``files``, or removes some files from ``files``.

        :param directive: The directive to process. This should be in a format
                     compatible with distutils ``MANIFEST.in`` files:

                     http://docs.python.org/distutils/sourcedist.html#commands
        �includeT)�anchorzno files found matching %r�excludezglobal-includeFz3no files found matching %r anywhere in distributionzglobal-excludezrecursive-include)rz-no files found matching %r under directory %rzrecursive-exclude�graftNz no directories found matching %r�prunez4no previously-included directories found matching %rzinvalid action %r)�_parse_directive�_include_patternr-Zwarning�_exclude_patternr)r�	directive�action�patterns�thedirZ
dirpattern�pattern�foundrrr�process_directive�sD









zManifest.process_directivec	Cs|j�}t|�dkr,|ddkr,|jdd�|d}d}}}|dkrxt|�dkr`td
|��dd�|dd�D�}n�|dkr�t|�dkr�td|��t|d�}dd�|dd�D�}n<|dk�r�t|�dkr�td|��t|d�}ntd|��||||fS)z�
        Validate a directive.
        :param directive: The directive to validate.
        :return: A tuple of action, patterns, thedir, dir_patterns
        rrr=r?�global-include�global-exclude�recursive-include�recursive-excluder@rANrz$%r expects <pattern1> <pattern2> ...cSsg|]}t|��qSr)r)r5�wordrrrr6�sz-Manifest._parse_directive.<locals>.<listcomp>�z*%r expects <dir> <pattern1> <pattern2> ...cSsg|]}t|��qSr)r)r5rPrrrr6�sz!%r expects a single <dir_pattern>zunknown action %r)r=r?rLrMrNrOr@rA)r=r?rLrM)rNrO)r@rA)r/�len�insertrr)rrEZwordsrFrGrHZdir_patternrrrrB�s:



zManifest._parse_directiveTcCsTd}|j||||�}|jdkr&|j�x(|jD]}|j|�r.|jj|�d}q.W|S)a�Select strings (presumably filenames) from 'self.files' that
        match 'pattern', a Unix-style wildcard (glob) pattern.

        Patterns are not quite the same as implemented by the 'fnmatch'
        module: '*' and '?'  match non-special characters, where "special"
        is platform-dependent: slash on Unix; colon, slash, and backslash on
        DOS/Windows; and colon on Mac OS.

        If 'anchor' is true (the default), then the pattern match is more
        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
        'anchor' is false, both of these will match.

        If 'prefix' is supplied, then only filenames starting with 'prefix'
        (itself a pattern) and ending with 'pattern', with anything in between
        them, will match.  'anchor' is ignored in this case.

        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
        'pattern' is assumed to be either a string containing a regex or a
        regex object -- no translation is done, the regex is just compiled
        and used as-is.

        Selected strings will be added to self.files.

        Return True if files are found.
        FNT)�_translate_patternrr'�searchrr))rrIr>r�is_regexrJ�
pattern_rer$rrrrCs

zManifest._include_patterncCsFd}|j||||�}x,t|j�D]}|j|�r |jj|�d}q W|S)atRemove strings (presumably filenames) from 'files' that match
        'pattern'.

        Other parameters are the same as for 'include_pattern()', above.
        The list 'self.files' is modified in place. Return True if files are
        found.

        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
        packaging source distributions
        FT)rT�listrrU�remove)rrIr>rrVrJrWr;rrrrD)s
zManifest._exclude_patternc
Csv|rt|t�rtj|�S|Stdkr:|jd�jd�\}}}|rR|j|�}td
krVnd}tjtj	j
|jd��}	|dk	�r4tdkr�|jd�}
|j|�dt|
��}n&|j|�}|t|�t|�t|��}tj
}tj
dkr�d}tdk�rd|	|j
|d	|f�}n0|t|�t|�t|��}d
||	||||f}n8|�rltdk�rRd|	|}nd||	|t|�d�f}tj|�S)aTranslate a shell-like wildcard pattern to a compiled regular
        expression.

        Return the compiled regex.  If 'is_regex' true,
        then 'pattern' is directly compiled to a regex (if it's a string)
        or just returned as-is (assumes it's a regex object).
        rQrr3�N�\z\\�^z.*z%s%s%s%s.*%s%sz%s%s%s)rQr)rQr)rQr)rQr)rQr)�
isinstance�str�re�compile�_PYTHON_VERSION�_glob_to_re�	partition�escaperr	rr
rRr)
rrIr>rrV�startr3�endrWr
Z
empty_patternZ	prefix_rerrrrrT=s@	









zManifest._translate_patterncCs8tj|�}tj}tjdkrd}d|}tjd||�}|S)z�Translate a shell-like glob pattern to a regular expression.

        Return a string containing the regex.  Differs from
        'fnmatch.translate()' in that '*' does not match "special characters"
        (which are platform-specific).
        r[z\\\\z\1[^%s]z((?<!\\)(\\\\)*)\.)�fnmatch�	translaterrr_�sub)rrIrWrZescapedrrrrbts

zManifest._glob_to_re)N)F)TNF)TNF)TNF)�__name__�
__module__�__qualname__�__doc__rr'r)r,r9r<rKrBrCrDrTrbrrrrr%s 

	
O/
(

6)rmrgZloggingrr_�sysrZr�compatr�utilr�__all__Z	getLoggerrjr-r`�MZ_COLLAPSE_PATTERN�SZ_COMMENTED_LINE�version_infora�objectrrrrr�<module>
s
_vendor/distlib/__pycache__/locators.cpython-36.pyc000064400000113170151733136270016331 0ustar003

�PfE��@s@ddlZddlmZddlZddlZddlZddlZddlZyddlZWne	k
rdddl
ZYnXddlZddlm
Z
ddlmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZddlm Z m!Z!m"Z"ddl#m$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.dd	l/m0Z0m1Z1dd
l2m3Z3m4Z4ej5e6�Z7ej8d�Z9ej8dej:�Z;ej8d
�Z<dZ=d-dd�Z>Gdd�de�Z?Gdd�de@�ZAGdd�deA�ZBGdd�deA�ZCGdd�de@�ZDGdd�deA�ZEGdd�deA�ZFGdd �d eA�ZGGd!d"�d"eA�ZHGd#d$�d$eA�ZIeIeG�eEd%d&d'�d(d)�ZJeJjKZKej8d*�ZLGd+d,�d,e@�ZMdS).�N)�BytesIO�)�DistlibException)�urljoin�urlparse�
urlunparse�url2pathname�pathname2url�queue�quote�unescape�string_types�build_opener�HTTPRedirectHandler�	text_type�Request�	HTTPError�URLError)�Distribution�DistributionPath�	make_dist)�Metadata)	�cached_property�parse_credentials�ensure_slash�split_filename�get_project_data�parse_requirement�parse_name_and_version�ServerProxy�normalize_name)�
get_scheme�UnsupportedVersionError)�Wheel�
is_compatiblez^(\w+)=([a-f0-9]+)z;\s*charset\s*=\s*(.*)\s*$ztext/html|application/x(ht)?mlzhttps://pypi.python.org/pypicCs |dkrt}t|dd�}|j�S)z�
    Return all distribution names known by an index.
    :param url: The URL of the index.
    :return: A list of all known distribution names.
    Ng@)�timeout)�
DEFAULT_INDEXr�
list_packages)�url�client�r*�/usr/lib/python3.6/locators.py�get_all_distribution_names)sr,c@s$eZdZdZdd�ZeZZZdS)�RedirectHandlerzE
    A class to work around a bug in some Python 3.2.x releases.
    c	Cs�d}xdD]}||kr
||}Pq
W|dkr0dSt|�}|jdkrpt|j�|�}t|d�rh|j||�n|||<tj||||||�S)N�location�uri��replace_header)r.r/)r�schemerZget_full_url�hasattrr1�BaseRedirectHandler�http_error_302)	�self�req�fp�code�msg�headersZnewurl�keyZurlpartsr*r*r+r5=s


zRedirectHandler.http_error_302N)�__name__�
__module__�__qualname__�__doc__r5Zhttp_error_301Zhttp_error_303Zhttp_error_307r*r*r*r+r-4sr-c@s�eZdZdZd/Zd0Zd1ZdZed2Zd3dd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zee
e�Zdd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd*d+�Zd4d-d.�ZdS)5�LocatorzG
    A base class for locators - things that locate distributions.
    �.tar.gz�.tar.bz2�.tar�.zip�.tgz�.tbz�.egg�.exe�.whl�.pdfN�defaultcCs,i|_||_tt��|_d|_tj�|_dS)a^
        Initialise an instance.
        :param scheme: Because locators look for most recent versions, they
                       need to know the version scheme to use. This specifies
                       the current PEP-recommended scheme - use ``'legacy'``
                       if you need to support existing distributions on PyPI.
        N)	�_cacher2rr-�opener�matcherr
�Queue�errors)r6r2r*r*r+�__init__cs
zLocator.__init__cCsXg}xN|jj�sRy|jjd�}|j|�Wn|jjk
rDwYnX|jj�qW|S)z8
        Return any errors which have occurred.
        F)rQ�empty�get�appendZEmpty�	task_done)r6�result�er*r*r+�
get_errorsvszLocator.get_errorscCs|j�dS)z>
        Clear any errors which may have been logged.
        N)rY)r6r*r*r+�clear_errors�szLocator.clear_errorscCs|jj�dS)N)rM�clear)r6r*r*r+�clear_cache�szLocator.clear_cachecCs|jS)N)�_scheme)r6r*r*r+�_get_scheme�szLocator._get_schemecCs
||_dS)N)r])r6�valuer*r*r+�_set_scheme�szLocator._set_schemecCstd��dS)a=
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This should be implemented in subclasses.

        If called from a locate() request, self.matcher will be set to a
        matcher for the requirement to satisfy, otherwise it will be None.
        z Please implement in the subclassN)�NotImplementedError)r6�namer*r*r+�_get_project�s
zLocator._get_projectcCstd��dS)zJ
        Return all the distribution names known to this locator.
        z Please implement in the subclassN)ra)r6r*r*r+�get_distribution_names�szLocator.get_distribution_namescCsL|jdkr|j|�}n2||jkr,|j|}n|j�|j|�}||j|<|S)z�
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This calls _get_project to do all the work, and just implements a caching layer on top.
        N)rMrcrZ)r6rbrWr*r*r+�get_project�s



zLocator.get_projectcCsPt|�}tj|j�}d}|jd�}|r6tt|�|j�}|jdkd|j	k|||fS)zu
        Give an url a score which can be used to choose preferred URLs
        for a given project release.
        Tz.whl�httpszpypi.python.org)
r�	posixpath�basename�path�endswithr$r#�
wheel_tagsr2�netloc)r6r(�trhZ
compatibleZis_wheelr*r*r+�	score_url�s
zLocator.score_urlcCsR|}|rN|j|�}|j|�}||kr(|}||kr@tjd||�ntjd||�|S)a{
        Choose one of two URLs where both are candidates for distribution
        archives for the same version of a distribution (for example,
        .tar.gz vs. zip).

        The current implementation favours https:// URLs over http://, archives
        from PyPI over those from other locations, wheel compatibility (if a
        wheel) and then the archive name.
        zNot replacing %r with %rzReplacing %r with %r)rn�logger�debug)r6�url1�url2rW�s1�s2r*r*r+�
prefer_url�s


zLocator.prefer_urlcCs
t||�S)zZ
        Attempt to split a filename in project name, version and Python version.
        )r)r6�filename�project_namer*r*r+r�szLocator.split_filenamecCsdd�}d}t|�\}}}}}	}
|
j�jd�r<tjd||
�tj|
�}|rX|j�\}}
nd\}}
|}|r�|ddkr�|dd�}|jd��r2yrt	|�}t
||j��r�|dkr�d	}n||j|�}|�r�|j|j
|jt|||||	d
f�djdd
�|jD��d�}Wn0tk
�r.}ztjd|�WYdd}~XnXn�|j|j��r�tj|�}}x�|jD]�}|j|��rV|dt|��}|j||�}|�s�tjd|�nJ|\}}}|�s�|||��r�|||t|||||	d
f�d�}|�r�||d<P�qVW|�r|�r|
|d|<|S)a
        See if a URL is a candidate for a download URL for a project (the URL
        has typically been scraped from an HTML page).

        If it is, a dictionary is returned with keys "name", "version",
        "filename" and "url"; otherwise, None is returned.
        cSst|�t|�kS)N)r )Zname1Zname2r*r*r+�same_project�sz:Locator.convert_url_to_download_info.<locals>.same_projectNzegg=z %s: version hint in fragment: %rr�/z.whlTr0z, cSs"g|]}djt|dd����qS)�.�N)�join�list)�.0�vr*r*r+�
<listcomp>sz8Locator.convert_url_to_download_info.<locals>.<listcomp>)rb�versionrvr(zpython-versionzinvalid path for wheel: %sz No match for project/version: %s)rbr�rvr(zpython-versionz	%s_digest)NN���r�)r�lower�
startswithrorp�HASHER_HASH�match�groupsrjr#r$rkrbr�rvrr|�pyver�	Exception�warning�downloadable_extensionsrgrh�lenr)r6r(rwrxrWr2rlri�params�query�frag�m�algo�digestZorigpath�wheel�includerXrvZextrmrbr�r�r*r*r+�convert_url_to_download_info�sf

 
z$Locator.convert_url_to_download_infocCs4d}x*dD]"}d|}||kr
|||f}Pq
W|S)z�
        Get a digest from a dictionary by looking at keys of the form
        'algo_digest'.

        Returns a 2-tuple (algo, digest) if found, else None. Currently
        looks only for SHA256, then MD5.
        N�sha256�md5z	%s_digest)r�r�r*)r6�inforWr�r<r*r*r+�_get_digest)s
zLocator._get_digestc	Cs�|jd�}|jd�}||kr,||}|j}nt|||jd�}|j}|j|�|_}|d}||d|<|j|dkr�|j|j|�|_|dj|t	��j
|�||_|||<dS)z�
        Update a result dictionary (the final result from _get_project) with a
        dictionary for a specific version, which typically holds information
        gleaned from a filename or URL for an archive for the distribution.
        rbr�)r2r(�digests�urlsN)�pop�metadatarr2r�r��
source_urlru�
setdefault�set�add�locator)	r6rWr�rbr��dist�mdr�r(r*r*r+�_update_version_data9s

zLocator._update_version_dataFcCs�d}t|�}|dkr td|��t|j�}|j|j�|_}tjd|t|�j	�|j
|j�}t|�dk�r8g}|j
}	x�|D]|}
|
d
kr�qzyJ|j|
�s�tjd||
�n,|s�|	|
�jr�|j|
�ntjd|
|j�Wqztk
r�tjd	||
�YqzXqzWt|�d
k�rt||jd�}|�r8tjd|�|d}||}|�r�|j�rN|j|_|jdi�j|t��|_i}|jdi�}
x&|jD]}||
k�r~|
|||<�q~W||_d|_|S)a
        Find the most recent distribution which matches the given
        requirement.

        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
                            'foo (>= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        NzNot a valid requirement: %rzmatcher: %s (%s)r{r�r�z%s did not match %rz%skipping pre-release version %s of %szerror matching %s with %rr)r<zsorted list: %s)r�r�r�)rrr!r2rO�requirementrorp�typer=rerbr�Z
version_classr�Z
is_prereleaserUr�r��sortedr<ZextrasrTr��
download_urlsr�)r6r��prereleasesrW�rr2rO�versionsZslistZvcls�kr��dZsdr(r*r*r+�locatePsT





zLocator.locate)rBrCrDrErFrG)rHrIrJ)rK)rJ)rL)F)r=r>r?r@�source_extensions�binary_extensions�excluded_extensionsrkr�rRrYrZr\r^r`�propertyr2rcrdrernrurr�r�r�r�r*r*r*r+rASs.

FrAcs0eZdZdZ�fdd�Zdd�Zdd�Z�ZS)�PyPIRPCLocatorz�
    This locator uses XML-RPC to locate distributions. It therefore
    cannot be used with simple mirrors (that only mirror file content).
    cs*tt|�jf|�||_t|dd�|_dS)z�
        Initialise an instance.

        :param url: The URL to use for XML-RPC.
        :param kwargs: Passed to the superclass constructor.
        g@)r%N)�superr�rR�base_urlrr))r6r(�kwargs)�	__class__r*r+rR�szPyPIRPCLocator.__init__cCst|jj��S)zJ
        Return all the distribution names known to this locator.
        )r�r)r')r6r*r*r+rd�sz%PyPIRPCLocator.get_distribution_namescCsiid�}|jj|d�}x�|D]�}|jj||�}|jj||�}t|jd�}|d|_|d|_|jd�|_	|jdg�|_
|jd�|_t|�}|r|d	}	|	d
|_
|j|	�|_||_|||<xB|D]:}	|	d
}
|j|	�}|dj|t��j|
�||d|
<q�WqW|S)
N)r�r�T)r2rbr��license�keywords�summaryrr(r�r�)r)Zpackage_releasesZrelease_urlsZrelease_datarr2rbr�rTr�r�r�rr�r�r�r�r�r�r�)r6rbrWr�rr��datar�r�r�r(r�r*r*r+rc�s0






zPyPIRPCLocator._get_project)r=r>r?r@rRrdrc�
__classcell__r*r*)r�r+r��sr�cs0eZdZdZ�fdd�Zdd�Zdd�Z�ZS)�PyPIJSONLocatorzw
    This locator uses PyPI's JSON interface. It's very limited in functionality
    and probably not worth using.
    cs tt|�jf|�t|�|_dS)N)r�r�rRrr�)r6r(r�)r�r*r+rR�szPyPIJSONLocator.__init__cCstd��dS)zJ
        Return all the distribution names known to this locator.
        zNot available from this locatorN)ra)r6r*r*r+rd�sz&PyPIJSONLocator.get_distribution_namescCsiid�}t|jdt|��}�y�|jj|�}|j�j�}tj|�}t	|j
d�}|d}|d|_|d|_|j
d�|_|j
dg�|_|j
d	�|_t|�}||_|d
}	|||j<x`|d
D]T}
|
d}|jj|�|j|
�|j|<|d
j|jt��j|�|j|
�|d|<q�Wx�|d
j�D]�\}}||jk�r:�q"t	|j
d�}
|j|
_||
_t|
�}||_|||<x\|D]T}
|
d}|jj|�|j|
�|j|<|d
j|t��j|�|j|
�|d|<�qpW�q"WWn@tk
�r}z"|jjt|��tjd|�WYdd}~XnX|S)N)r�r�z%s/json)r2r�rbr�r�r�r�r�r(r�ZreleaseszJSON fetch failed: %s) rr�rrN�open�read�decode�json�loadsrr2rbr�rTr�r�r�rr�r�r�r�r�r�r��itemsr�rQ�putrro�	exception)r6rbrWr(�respr�r�r�r�r�r�r�ZinfosZomd�odistrXr*r*r+rc�sT





"	zPyPIJSONLocator._get_project)r=r>r?r@rRrdrcr�r*r*)r�r+r��sr�c@s`eZdZdZejdejejBejB�Z	ejdejejB�Z
dd�Zejdej�Ze
dd��Zd	S)
�Pagez4
    This class represents a scraped HTML page.
    z�
(rel\s*=\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\s
]*))\s+)?
href\s*=\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\s
]*))
(\s+rel\s*=\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\s
]*)))?
z!<base\s+href\s*=\s*['"]?([^'">]+)cCs4||_||_|_|jj|j�}|r0|jd�|_dS)zk
        Initialise an instance with the Unicode page contents and the URL they
        came from.
        rN)r�r�r(�_base�search�group)r6r�r(r�r*r*r+rRs
z
Page.__init__z[^a-z0-9$&+,/:;=?@.#%_\\|-]cCs�dd�}t�}x�|jj|j�D]�}|jd�}|dpZ|dpZ|dpZ|dpZ|dpZ|d	}|d
pr|dpr|d}t|j|�}t|�}|jj	d
d�|�}|j
||f�qWt|dd�dd�}|S)z�
        Return the URLs of all the links on a page together with information
        about their "rel" attribute, for determining which ones to treat as
        downloads and which ones to queue for further scraping.
        cSs,t|�\}}}}}}t||t|�|||f�S)zTidy up an URL.)rrr)r(r2rlrir�r�r�r*r*r+�clean%szPage.links.<locals>.cleanr0Zrel1Zrel2Zrel3Zrel4Zrel5Zrel6rqrrZurl3cSsdt|jd��S)Nz%%%2xr)�ordr�)r�r*r*r+�<lambda>3szPage.links.<locals>.<lambda>cSs|dS)Nrr*)rmr*r*r+r�7sT)r<�reverse)r��_href�finditerr��	groupdictrr�r�	_clean_re�subr�r�)r6r�rWr�r��relr(r*r*r+�linkss
z
Page.linksN)r=r>r?r@�re�compile�I�S�Xr�r�rRr�rr�r*r*r*r+r�sr�cs�eZdZdZejdd�dd�d�Zd�fdd	�	Zd
d�Zdd
�Z	dd�Z
ejdej
�Zdd�Zdd�Zdd�Zdd�Zdd�Zejd�Zdd�Z�ZS)�SimpleScrapingLocatorz�
    A locator which scrapes HTML pages to locate downloads for a distribution.
    This runs multiple threads to do the I/O; performance is at least as good
    as pip's PackageFinder, which works in an analogous fashion.
    cCstjtt�d�j�S)N)Zfileobj)�gzipZGzipFilerr�r�)�br*r*r+r�EszSimpleScrapingLocator.<lambda>cCs|S)Nr*)r�r*r*r+r�Fs)Zdeflater�ZnoneN�
csftt|�jf|�t|�|_||_i|_t�|_t	j
�|_t�|_d|_
||_tj�|_tj�|_dS)a�
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        FN)r�r�rRrr�r%�_page_cacher��_seenr
rP�	_to_fetch�
_bad_hosts�skip_externals�num_workers�	threading�RLock�_lock�_gplock)r6r(r%r�r�)r�r*r+rRIs



zSimpleScrapingLocator.__init__cCsJg|_x>t|j�D]0}tj|jd�}|jd�|j�|jj|�qWdS)z�
        Threads are created only when get_project is called, and terminate
        before it returns. They are there primarily to parallelise I/O (i.e.
        fetching web pages).
        )�targetTN)	�_threads�ranger�r�ZThread�_fetchZ	setDaemon�startrU)r6�irmr*r*r+�_prepare_threadscs
z&SimpleScrapingLocator._prepare_threadscCs>x|jD]}|jjd�qWx|jD]}|j�q$Wg|_dS)zu
        Tell all the threads to terminate (by sending a sentinel value) and
        wait for them to do so.
        N)r�r�r�r|)r6rmr*r*r+�
_wait_threadsps
z#SimpleScrapingLocator._wait_threadscCs�iid�}|j�x||_||_t|jdt|��}|jj�|jj�|j	�z&t
jd|�|jj
|�|jj�Wd|j�X|`WdQRX|S)N)r�r�z%s/zQueueing %s)r�rWrwrr�rr�r[r�r�rorpr�r�r|r�)r6rbrWr(r*r*r+rc}s



z"SimpleScrapingLocator._get_projectz<\b(linux-(i\d86|x86_64|arm\w+)|win(32|-amd64)|macosx-?\d+)\bcCs|jj|�S)zD
        Does an URL refer to a platform-specific download?
        )�platform_dependentr�)r6r(r*r*r+�_is_platform_dependent�sz,SimpleScrapingLocator._is_platform_dependentc
CsT|j|�rd}n|j||j�}tjd||�|rP|j�|j|j|�WdQRX|S)a%
        See if an URL is a suitable download for a project.

        If it is, register information in the result dictionary (for
        _get_project) about the specific version it's for.

        Note that the return value isn't actually used other than as a boolean
        value.
        Nzprocess_download: %s -> %s)r�r�rwrorpr�r�rW)r6r(r�r*r*r+�_process_download�s

z'SimpleScrapingLocator._process_downloadc
Cs�t|�\}}}}}}|j|j|j|j�r2d}n~|jrL|j|j�rLd}nd|j|j�s^d}nR|d
krld}nD|dkrzd}n6|j|�r�d}n&|j	dd�d	}	|	j
�d
kr�d}nd}tjd||||�|S)z�
        Determine whether a link URL from a referring page and with a
        particular "rel" attribute should be queued for scraping.
        F�homepage�download�httprf�ftp�:rrZ	localhostTz#should_queue: %s (%s) from %s -> %s)r�r�)r�rfr�)
rrjr�r�r�r�r�r�r��splitr�rorp)
r6�linkZreferrerr�r2rlri�_rW�hostr*r*r+�
_should_queue�s*


z#SimpleScrapingLocator._should_queuecCs�x�|jj�}z�yz|r�|j|�}|dkr(wx\|jD]R\}}||jkr0|jj|�|j|�r0|j|||�r0tj	d||�|jj
|�q0WWn2tk
r�}z|jj
t
|��WYdd}~XnXWd|jj�X|sPqWdS)z�
        Get a URL to fetch from the work queue, get the HTML page, examine its
        links for download candidates and candidates for further scraping.

        This is a handy method to run in a thread.
        NzQueueing %s from %s)r�rT�get_pager�r�r�r�r�rorpr�r�rQrrV)r6r(�pager�r�rXr*r*r+r��s&


&zSimpleScrapingLocator._fetchcCsXt|�\}}}}}}|dkr:tjjt|��r:tt|�d�}||jkr`|j|}tj	d||��n�|j
dd�d}d}||jkr�tj	d||��n�t|d	d
id�}�z�y�tj	d|�|j
j||jd
�}	tj	d|�|	j�}
|
jdd�}tj|��r�|	j�}|	j�}
|
jd�}|�r"|j|}||
�}
d}tj|�}|�r@|jd�}y|
j|�}
Wn tk
�rn|
jd�}
YnXt|
|�}||j|<Wn�tk
�r�}z |jdk�r�tjd||�WYdd}~Xn�t k
�r}z2tjd||�|j!�|jj"|�WdQRXWYdd}~Xn2t#k
�rB}ztjd||�WYdd}~XnXWd||j|<X|S)a
        Get the HTML for an URL, possibly from an in-memory cache.

        XXX TODO Note: this cache is never actually cleared. It's assumed that
        the data won't get stale over the lifetime of a locator instance (not
        necessarily true for the default_locator).
        �filez
index.htmlzReturning %s from cache: %sr�rrNzSkipping %s due to bad host %szAccept-encodingZidentity)r;zFetching %s)r%z
Fetched %szContent-Typer0zContent-Encodingzutf-8zlatin-1i�zFetch failed: %s: %s)$r�osri�isdirrrrr�rorpr�r�rrNr�r%r�rT�HTML_CONTENT_TYPEr�Zgeturlr��decoders�CHARSETr�r�r��UnicodeErrorr�rr9r�rr�r�r�)r6r(r2rlrir�rWr�r7r�r;Zcontent_typeZ	final_urlr��encoding�decoderr�rXr*r*r+r�sZ	







&$zSimpleScrapingLocator.get_pagez<a href=[^>]*>([^<]+)<cCsPt�}|j|j�}|s$td|j��x&|jj|j�D]}|j|jd��q4W|S)zJ
        Return all the distribution names known to this locator.
        zUnable to get %sr)	r�rr�r�_distname_rer�r�r�r�)r6rWrr�r*r*r+rd$sz,SimpleScrapingLocator.get_distribution_names)Nr�)r=r>r?r@�zlibZ
decompressrrRr�r�rcr�r�r�r�r�r�r�r�rrrdr�r*r*)r�r+r�;s"

;
r�cs8eZdZdZ�fdd�Zdd�Zdd�Zdd	�Z�ZS)
�DirectoryLocatorz?
    This class locates distributions in a directory tree.
    csN|jdd�|_tt|�jf|�tjj|�}tjj|�sDt	d|��||_
dS)a�
        Initialise an instance.
        :param path: The root of the directory tree to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * recursive - if True (the default), subdirectories are
                         recursed into. If False, only the top-level directory
                         is searched,
        �	recursiveTzNot a directory: %rN)r�rr�r
rRrri�abspathrr�base_dir)r6rir�)r�r*r+rR5s
zDirectoryLocator.__init__cCs|j|j�S)z�
        Should a filename be considered as a candidate for a distribution
        archive? As well as the filename, the directory which contains it
        is provided, though not used by the current implementation.
        )rjr�)r6rv�parentr*r*r+�should_includeFszDirectoryLocator.should_includec		Cs�iid�}x�tj|j�D]v\}}}xb|D]Z}|j||�r(tjj||�}tddttjj|��dddf�}|j	||�}|r(|j
||�q(W|jsPqW|S)N)r�r�rr0)r�walkrrrir|rr	rr�r�r)	r6rbrW�root�dirs�files�fnr(r�r*r*r+rcNs

zDirectoryLocator._get_projectc	Cs�t�}x�tj|j�D]x\}}}xd|D]\}|j||�r$tjj||�}tddttjj	|��dddf�}|j
|d�}|r$|j|d�q$W|jsPqW|S)zJ
        Return all the distribution names known to this locator.
        rr0Nrb)
r�rrrrrir|rr	rr�r�r)r6rWrrrrr(r�r*r*r+rd^s
z'DirectoryLocator.get_distribution_names)	r=r>r?r@rRrrcrdr�r*r*)r�r+r
0s
r
c@s eZdZdZdd�Zdd�ZdS)�JSONLocatora
    This locator uses special extended metadata (not available on PyPI) and is
    the basis of performant dependency resolution in distlib. Other locators
    require archive downloads before dependencies can be determined! As you
    might imagine, that can be slow.
    cCstd��dS)zJ
        Return all the distribution names known to this locator.
        zNot available from this locatorN)ra)r6r*r*r+rdxsz"JSONLocator.get_distribution_namescCs�iid�}t|�}|r�x�|jdg�D]�}|ddks$|ddkrBq$t|d|d|jd	d
�|jd�}|j}|d|_d
|kr�|d
r�d|d
f|_|jdi�|_|jdi�|_|||j	<|dj
|j	t��j|d�q$W|S)N)r�r�rZptypeZsdistZ	pyversion�sourcerbr�r�zPlaceholder for summary)r�r2r(r�r�Zrequirements�exportsr�)
rrTrr2r�r�r�Zdependenciesrr�r�r�r�)r6rbrWr�r�r�r�r*r*r+rc~s&



"zJSONLocator._get_projectN)r=r>r?r@rdrcr*r*r*r+rqsrcs(eZdZdZ�fdd�Zdd�Z�ZS)�DistPathLocatorz�
    This locator finds installed distributions in a path. It can be useful for
    adding to an :class:`AggregatingLocator`.
    cs*tt|�jf|�t|t�s t�||_dS)zs
        Initialise an instance.

        :param distpath: A :class:`DistributionPath` instance to search.
        N)r�rrR�
isinstancer�AssertionError�distpath)r6rr�)r�r*r+rR�szDistPathLocator.__init__cCsP|jj|�}|dkr iid�}n,|j|d|jt|jg�id|jtdg�ii}|S)N)r�r�r�r�)rZget_distributionr�r�r�)r6rbr�rWr*r*r+rc�szDistPathLocator._get_project)r=r>r?r@rRrcr�r*r*)r�r+r�s
rcsReZdZdZ�fdd�Z�fdd�Zdd�Zeej	j
e�Z	dd	�Zd
d�Z�Z
S)�AggregatingLocatorzI
    This class allows you to chain and/or merge a list of locators.
    cs*|jdd�|_||_tt|�jf|�dS)a�
        Initialise an instance.

        :param locators: The list of locators to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * merge - if False (the default), the first successful
                         search from any of the locators is returned. If True,
                         the results from all locators are merged (this can be
                         slow).
        �mergeFN)r�r �locatorsr�rrR)r6r!r�)r�r*r+rR�szAggregatingLocator.__init__cs*tt|�j�x|jD]}|j�qWdS)N)r�rr\r!)r6r�)r�r*r+r\�szAggregatingLocator.clear_cachecCs ||_x|jD]
}||_qWdS)N)r]r!r2)r6r_r�r*r*r+r`�szAggregatingLocator._set_schemecCs�i}x�|jD]�}|j|�}|r|jr�|jdi�}|jdi�}|j|�|jd�}|r�|r�x6|j�D]*\}}	||kr�|||	O<qb|	||<qbW|jd�}
|r�|
r�|
j|�q|jdkr�d}n$d}x|D]}|jj|�r�d}Pq�W|r|}PqW|S)Nr�r�TF)r!rer rT�updater�rOr�)r6rbrWr�r�rr�Zdfr�rZdd�foundr*r*r+rc�s8





zAggregatingLocator._get_projectcCs@t�}x4|jD]*}y||j�O}Wqtk
r6YqXqW|S)zJ
        Return all the distribution names known to this locator.
        )r�r!rdra)r6rWr�r*r*r+rd�s
z)AggregatingLocator.get_distribution_names)r=r>r?r@rRr\r`r�rAr2�fgetrcrdr�r*r*)r�r+r�s,rzhttps://pypi.python.org/simple/g@)r%�legacy)r2z1(?P<name>[\w-]+)\s*\(\s*(==\s*)?(?P<ver>[^)]+)\)$c@sLeZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	ddd�Z
dS)�DependencyFinderz0
    Locate dependencies for distributions.
    NcCs|pt|_t|jj�|_dS)zf
        Initialise an instance, using the specified locator
        to locate distributions.
        N)�default_locatorr�r!r2)r6r�r*r*r+rRs
zDependencyFinder.__init__cCsvtjd|�|j}||j|<||j||jf<xD|jD]:}t|�\}}tjd|||�|jj	|t
��j||f�q4WdS)z�
        Add a distribution to the finder. This will update internal information
        about who provides what.
        :param dist: The distribution to add.
        zadding distribution %szAdd to provided: %s, %s, %sN)rorpr<�
dists_by_name�distsr��providesr�providedr�r�r�)r6r�rb�pr�r*r*r+�add_distribution&s
z!DependencyFinder.add_distributioncCs|tjd|�|j}|j|=|j||jf=xN|jD]D}t|�\}}tjd|||�|j|}|j	||f�|s0|j|=q0WdS)z�
        Remove a distribution from the finder. This will update internal
        information about who provides what.
        :param dist: The distribution to remove.
        zremoving distribution %sz Remove from provided: %s, %s, %sN)
rorpr<r(r)r�r*rr+�remove)r6r�rbr,r��sr*r*r+�remove_distribution5s
z$DependencyFinder.remove_distributioncCsBy|jj|�}Wn,tk
r<|j�d}|jj|�}YnX|S)z�
        Get a version matcher for a requirement.
        :param reqt: The requirement
        :type reqt: str
        :return: A version matcher (an instance of
                 :class:`distlib.version.Matcher`).
        r)r2rOr"r�)r6�reqtrOrbr*r*r+�get_matcherGszDependencyFinder.get_matcherc	Csv|j|�}|j}t�}|j}||krrxL||D]@\}}y|j|�}Wntk
r\d}YnX|r.|j|�Pq.W|S)z�
        Find the distributions which can fulfill a requirement.

        :param reqt: The requirement.
         :type reqt: str
        :return: A set of distribution which can fulfill the requirement.
        F)r2r<r�r+r�r"r�)	r6r1rOrbrWr+r��providerr�r*r*r+�find_providersWs


zDependencyFinder.find_providersc	Cs�|j|}t�}x,|D]$}|j|�}|j|j�s|j|�qW|r^|jd||t|�f�d}nD|j|�|j|=x"|D]}|jj|t��j|�qvW|j	|�d}|S)a�
        Attempt to replace one provider with another. This is typically used
        when resolving dependencies from multiple sources, e.g. A requires
        (B >= 1.0) while C requires (B >= 1.1).

        For successful replacement, ``provider`` must meet all the requirements
        which ``other`` fulfills.

        :param provider: The provider we are trying to replace with.
        :param other: The provider we're trying to replace.
        :param problems: If False is returned, this will contain what
                         problems prevented replacement. This is currently
                         a tuple of the literal string 'cantreplace',
                         ``provider``, ``other``  and the set of requirements
                         that ``provider`` couldn't fulfill.
        :return: True if we can replace ``other`` with ``provider``, else
                 False.
        ZcantreplaceFT)
�reqtsr�r2r�r�r��	frozensetr0r�r-)	r6r3�other�problemsZrlistZ	unmatchedr/rOrWr*r*r+�try_to_replaceos"






zDependencyFinder.try_to_replaceFcCsi|_i|_i|_i|_t|p g�}d|krH|jd�|tdddg�O}t|t�rh|}}tj	d|�n4|j
j||d�}}|dkr�td|��tj	d	|�d
|_
t�}t|g�}t|g�}�x�|�r�|j�}|j}	|	|jkr�|j|�n"|j|	}
|
|k�r|j||
|�|j|jB}|j}t�}
||k�rbx2dD]*}d|}||k�r4|
t|d|�O}
�q4W||B|
B}�x>|D�]4}|j|�}|�sNtj	d|�|j
j||d�}|dk�r�|�r�|j
j|d
d�}|dk�r�tj	d|�|jd|f�n^|j|j}}||f|jk�r|j|�|j|�||k�rN||k�rN|j|�tj	d|j�xZ|D]R}|j}	|	|jk�r�|jj|t��j|�n"|j|	}
|
|k�rT|j||
|��qTW�qvWq�Wt|jj��}x.|D]&}||k|_|j�r�tj	d|j��q�Wtj	d|�||fS)a�
        Find a distribution and all distributions it depends on.

        :param requirement: The requirement specifying the distribution to
                            find, or a Distribution instance.
        :param meta_extras: A list of meta extras such as :test:, :build: and
                            so on.
        :param prereleases: If ``True``, allow pre-release versions to be
                            returned - otherwise, don't return prereleases
                            unless they're all that's available.

        Return a set of :class:`Distribution` instances and a set of
        problems.

        The distributions returned should be such that they have the
        :attr:`required` attribute set to ``True`` if they were
        from the ``requirement`` passed to ``find()``, and they have the
        :attr:`build_time_dependency` attribute set to ``True`` unless they
        are post-installation dependencies of the ``requirement``.

        The problems should be a tuple consisting of the string
        ``'unsatisfied'`` and the requirement which couldn't be satisfied
        by any distribution known to the locator.
        z:*:z:test:z:build:z:dev:zpassed %s as requirement)r�NzUnable to locate %rz
located %sT�test�build�devz:%s:z%s_requireszNo providers found for %rzCannot satisfy %rZunsatisfiedzAdding %s to install_distsz#%s is a build-time dependency only.zfind done for %s)r:r;r<)r+r)r(r5r�r.rrrorpr�r�rZ	requestedr�r<r-r9Zrun_requiresZ
meta_requiresZbuild_requires�getattrr4r�r�Zname_and_versionr��valuesZbuild_time_dependency)r6r�Zmeta_extrasr�r�r�r8ZtodoZ
install_distsrbr7ZireqtsZsreqtsZereqtsr<rXZ	all_reqtsr�Z	providersr3�nrr,r)r*r*r+�find�s�




















zDependencyFinder.find)N)NF)r=r>r?r@rRr-r0r2r4r9r@r*r*r*r+r&s
(r&)N)Nr��iorr�Zloggingrrgr�r��ImportErrorZdummy_threadingrr0r�compatrrrrr	r
rrr
rrr4rrrrZdatabaserrrr�r�utilrrrrrrrrr r�r!r"r�r#r$Z	getLoggerr=ror�r�r�rrr&r,r-�objectrAr�r�r�r�r
rrrr'r�ZNAME_VERSION_REr&r*r*r*r+�<module>sZD,



;0E:vA&[
_vendor/distlib/__pycache__/resources.cpython-36.pyc000064400000025116151733136270016517 0ustar003

�Pf*�@s�ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZmZmZmZeje�ZdaGdd�de�ZGdd	�d	e�ZGd
d�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�Zed�ee
jeiZyFyddl Z!Wne"k
�r$ddl#Z!YnXeee!j$<eee!j%<[!Wne"e&fk
�rXYnXdd�Z'iZ(dd�Z)e	j*e+d��Z,dd�Z-dS)�)�unicode_literalsN�)�DistlibException)�cached_property�get_cache_base�path_to_cache_dir�Cachecs.eZdZd�fdd�	Zdd�Zdd�Z�ZS)	�
ResourceCacheNcs0|dkrtjjt�td��}tt|�j|�dS)Nzresource-cache)�os�path�joinr�str�superr	�__init__)�self�base)�	__class__��/usr/lib/python3.6/resources.pyrszResourceCache.__init__cCsdS)z�
        Is the cache stale for the given resource?

        :param resource: The :class:`Resource` being cached.
        :param path: The path of the resource in the cache.
        :return: True if the cache is stale.
        Tr)r�resourcerrrr�is_stale#s	zResourceCache.is_stalec	Cs�|jj|�\}}|dkr|}n~tjj|j|j|�|�}tjj|�}tjj|�sXtj	|�tjj
|�sjd}n|j||�}|r�t|d��}|j
|j�WdQRX|S)z�
        Get a resource into the cache,

        :param resource: A :class:`Resource` instance.
        :return: The pathname of the resource in the cache.
        NT�wb)�finder�get_cache_infor
rrrZ
prefix_to_dir�dirname�isdir�makedirs�existsr�open�write�bytes)rr�prefixr�resultrZstale�frrr�get.s
zResourceCache.get)N)�__name__�
__module__�__qualname__rrr$�
__classcell__rr)rrr	sr	c@seZdZdd�ZdS)�ResourceBasecCs||_||_dS)N)r�name)rrr*rrrrIszResourceBase.__init__N)r%r&r'rrrrrr)Hsr)c@s@eZdZdZdZdd�Zedd��Zedd��Zed	d
��Z	dS)�Resourcez�
    A class representing an in-package resource, such as a data file. This is
    not normally instantiated by user code, but rather by a
    :class:`ResourceFinder` which manages the resource.
    FcCs|jj|�S)z�
        Get the resource as a stream.

        This is not a property to make it obvious that it returns a new stream
        each time.
        )r�
get_stream)rrrr�	as_streamVszResource.as_streamcCstdkrt�atj|�S)N)�cacher	r$)rrrr�	file_path_szResource.file_pathcCs|jj|�S)N)r�	get_bytes)rrrrr fszResource.bytescCs|jj|�S)N)r�get_size)rrrr�sizejsz
Resource.sizeN)
r%r&r'�__doc__�is_containerr-rr/r r2rrrrr+Ns	r+c@seZdZdZedd��ZdS)�ResourceContainerTcCs|jj|�S)N)r�
get_resources)rrrr�	resourcesrszResourceContainer.resourcesN)r%r&r'r4rr7rrrrr5osr5c@s�eZdZdZejjd�rdZnd Zdd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zeejj�Zdd�ZdS)!�ResourceFinderz4
    Resource finder for file system resources.
    �java�.pyc�.pyo�.classcCs.||_t|dd�|_tjjt|dd��|_dS)N�
__loader__�__file__�)�module�getattr�loaderr
rrr)rr@rrrr�szResourceFinder.__init__cCstjj|�S)N)r
r�realpath)rrrrr�_adjust_path�szResourceFinder._adjust_pathcCsBt|t�rd}nd}|j|�}|jd|j�tjj|�}|j|�S)N�/�/r)	�
isinstancer �split�insertrr
rrrD)r�
resource_name�sep�partsr"rrr�
_make_path�s

zResourceFinder._make_pathcCstjj|�S)N)r
rr)rrrrr�_find�szResourceFinder._findcCs
d|jfS)N)r)rrrrrr�szResourceFinder.get_cache_infocCsD|j|�}|j|�sd}n&|j|�r0t||�}n
t||�}||_|S)N)rMrN�
_is_directoryr5r+r)rrJrr"rrr�find�s



zResourceFinder.findcCst|jd�S)N�rb)rr)rrrrrr,�szResourceFinder.get_streamc	Cs t|jd��
}|j�SQRXdS)NrQ)rr�read)rrr#rrrr0�szResourceFinder.get_bytescCstjj|j�S)N)r
r�getsize)rrrrrr1�szResourceFinder.get_sizecs*�fdd��t�fdd�tj|j�D��S)Ncs|dko|j�j�S)N�__pycache__)�endswith�skipped_extensions)r#)rrr�allowed�sz-ResourceFinder.get_resources.<locals>.allowedcsg|]}�|�r|�qSrr)�.0r#)rWrr�
<listcomp>�sz0ResourceFinder.get_resources.<locals>.<listcomp>)�setr
�listdirr)rrr)rWrrr6�szResourceFinder.get_resourcescCs|j|j�S)N)rOr)rrrrrr4�szResourceFinder.is_containerccs�|j|�}|dk	r�|g}xn|r�|jd�}|V|jr|j}xH|jD]>}|sP|}ndj||g�}|j|�}|jrz|j|�qB|VqBWqWdS)NrrF)rP�popr4r*r7r�append)rrJrZtodoZrnamer*�new_nameZchildrrr�iterator�s 


zResourceFinder.iteratorN)r:r;r<)r:r;)r%r&r'r3�sys�platform�
startswithrVrrDrMrNrrPr,r0r1r6r4�staticmethodr
rrrOr_rrrrr8ws"r8cs`eZdZdZ�fdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Z�Z
S)�ZipResourceFinderz6
    Resource finder for resources in .zip files.
    csZtt|�j|�|jj}dt|�|_t|jd�r>|jj|_nt	j
||_t|j�|_dS)Nr�_files)
rrdrrB�archive�len�
prefix_len�hasattrre�	zipimport�_zip_directory_cache�sorted�index)rr@rf)rrrr�szZipResourceFinder.__init__cCs|S)Nr)rrrrrrD�szZipResourceFinder._adjust_pathcCs�||jd�}||jkrd}nX|r:|dtjkr:|tj}tj|j|�}y|j|j|�}Wntk
rtd}YnX|s�tj	d||j
j�ntj	d||j
j�|S)NTrFz_find failed: %r %rz_find worked: %r %r���)rhrer
rK�bisectrmrb�
IndexError�logger�debugrBr!)rrr"�irrrrN�s


zZipResourceFinder._findcCs&|jj}|jdt|�d�}||fS)Nr)rBrfrrg)rrr!rrrrr�sz ZipResourceFinder.get_cache_infocCs|jj|j�S)N)rB�get_datar)rrrrrr0�szZipResourceFinder.get_bytescCstj|j|��S)N)�io�BytesIOr0)rrrrrr,�szZipResourceFinder.get_streamcCs|j|jd�}|j|dS)N�)rrhre)rrrrrrr1szZipResourceFinder.get_sizecCs�|j|jd�}|r,|dtjkr,|tj7}t|�}t�}tj|j|�}xV|t|j�kr�|j|j|�sjP|j||d�}|j	|j
tjd�d�|d7}qJW|S)Nrrrn)rrhr
rKrgrZrormrb�addrH)rrrZplenr"rs�srrrr6s
zZipResourceFinder.get_resourcescCsj||jd�}|r*|dtjkr*|tj7}tj|j|�}y|j|j|�}Wntk
rdd}YnX|S)NrFrn)rhr
rKrormrbrp)rrrsr"rrrrOs

zZipResourceFinder._is_directory)r%r&r'r3rrDrNrr0r,r1r6rOr(rr)rrrd�srdcCs|tt|�<dS)N)�_finder_registry�type)rB�finder_makerrrr�register_finder0sr}cCs�|tkrt|}nv|tjkr$t|�tj|}t|dd�}|dkrJtd��t|dd�}tjt|��}|dkrxtd|��||�}|t|<|S)z�
    Return a resource finder for a package.
    :param package: The name of the package.
    :return: A :class:`ResourceFinder` instance for the package.
    �__path__Nz8You cannot get a finder for a module, only for a packager=zUnable to locate finder for %r)	�
_finder_cacher`�modules�
__import__rArrzr$r{)�packager"r@rrBr|rrrr6s


rZ	__dummy__cCsRd}tj|�tjj|�}tjt|��}|rNt}tj	j
|d�|_||_||�}|S)z�
    Return a resource finder for a path, which should represent a container.

    :param path: The path.
    :return: A :class:`ResourceFinder` instance for the path.
    Nr?)
�pkgutilZget_importerr`�path_importer_cacher$rzr{�
_dummy_moduler
rrr>r=)rr"rBrr@rrr�finder_for_pathRs
r�).Z
__future__rroruZloggingr
r�Zshutilr`�typesrjr?r�utilrrrrZ	getLoggerr%rqr.r	�objectr)r+r5r8rdr{�zipimporterrz�_frozen_importlib_externalZ_fi�ImportError�_frozen_importlib�SourceFileLoader�
FileFinder�AttributeErrorr}rr�
ModuleTyper
r�r�rrrr�<module>sH
,!ZN


_vendor/distlib/__pycache__/util.cpython-36.pyc000064400000127326151733136300015462 0ustar003

�Pfi��@s>ddlZddlmZddlZddlZddlmZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZyddlZWnek
r�dZYnXddlZddlZddlZddlZddlZyddlZWnek
r�ddlZYnXddlZddlmZddlmZmZmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/e
j0e1�Z2dZ3e
j4e3�Z5dZ6d	e6d
Z7e6dZ8dZ9d
e9de8de3d
e9de8dZ:dZ;de:de;de:dZ<e7d
e3e7dZ=de=dZ>de6de>de<dZ?e
j4e?�Z@de9de8d
ZAe
j4eA�ZBdd �ZCd!d"�ZDd#d$�ZEd%d&�ZFd�d'd(�ZGd)d*�ZHd+d,�ZId-d.�ZJejKd/d0��ZLejKd1d2��ZMejKd�d4d5��ZNGd6d7�d7eO�ZPd8d9�ZQGd:d;�d;eO�ZRd<d=�ZSGd>d?�d?eO�ZTe
j4d@e
jU�ZVdAdB�ZWd�dCdD�ZXdEdF�ZYdGdH�ZZdIdJ�Z[dKdL�Z\dMdN�Z]e
j4dOe
j^�Z_e
j4dP�Z`d�dQdR�Zae
j4dS�ZbdTdU�ZcdVdW�ZddXdY�ZedZZfd[d\�Zgd]d^�ZhGd_d`�d`eO�ZiGdadb�dbeO�ZjGdcdd�ddeO�Zkd�Zld�dmdn�Zmdodp�Znd�ZoGdwdx�dxeO�Zpe
j4dy�Zqe
j4dz�Zre
j4d{�Zsd|d}�Zd~d�Zte�r\dd�lmuZvmwZwmxZxGd�d��d�e$jy�ZyGd�d��d�ev�ZuGd�d��d�eue'�Zzej{dd��Z|e|d�k�r�Gd�d��d�e$j}�Z}e�r�Gd�d��d�e$j~�Z~Gd�d��d�e%j�Ze�r�Gd�d��d�e%j��Z�Gd�d��d�e%j��Z�d�d��Z�Gd�d��d�eO�Z�Gd�d��d�e��Z�Gd�d��d�e��Z�Gd�d��d�e(�Z�Gd�d��d�eO�Z�d�d��Z�dS)��N)�deque)�iglob�)�DistlibException)�string_types�	text_type�shutil�	raw_input�StringIO�cache_from_source�urlopen�urljoin�httplib�	xmlrpclib�	splittype�HTTPHandler�BaseConfigurator�valid_ident�	Container�configparser�URLError�ZipFile�fsdecode�unquotez\s*,\s*z
(\w|[.-])+z(\*|:(\*|\w+):|�)z\*?z([<>=!~]=)|[<>]�(z)?\s*(z)(z)\s*(z))*z(from\s+(?P<diref>.*))z\(\s*(?P<c1>�|z)\s*\)|(?P<c2>z\s*)z)*z\[\s*(?P<ex>z)?\s*\]z(?P<dn>z	\s*)?(\s*z)?$z(?P<op>z)\s*(?P<vn>cs�dd��d}tj|�}|r�|j�}|d}|dp8|d}|dsHd}nd}|dj�}|snd}d}|d}nL|dd	kr�d
|}tj|�}	�fdd�|	D�}d
|djdd�|D��f}|ds�d}
ntj|d�}
t	|||
|||d�}|S)NcSs|j�}|d|dfS)N�opZvn)�	groupdict)�m�d�r!�/usr/lib/python3.6/util.py�get_constraintYsz)parse_requirement.<locals>.get_constraintZdnZc1Zc2Zdiref�rz<>!=z~=csg|]}�|��qSr!r!)�.0r)r#r!r"�
<listcomp>qsz%parse_requirement.<locals>.<listcomp>z%s (%s)z, cSsg|]}d|�qS)z%s %sr!)r%Zconr!r!r"r&rsZex)�nameZconstraints�extrasZrequirement�source�url)
�REQUIREMENT_RE�matchr�strip�RELOP_IDENT_RE�finditer�join�COMMA_RE�splitr)�s�resultrr r'Zconsr*ZconstrZrs�iteratorr(r!)r#r"�parse_requirementWs4


r6cCs�dd�}i}x�|D]�\}}}tjj||�}x�t|�D]t}tjj||�}	x`t|	�D]T}
|||
�}|dkrt|j|d�qP|||
�}|jtjjd�jd�}
|
d|||<qPWq4WqW|S)z%Find destinations for resources filescSsD|jtjjd�}|jtjjd�}|j|�s.t�|t|�d�jd�S)N�/)�replace�os�path�sep�
startswith�AssertionError�len�lstrip)�baser:r!r!r"�get_rel_pathsz)get_resources_dests.<locals>.get_rel_pathNr7)r9r:r0r�popr8r;�rstrip)Zresources_rootZrulesrAZdestinationsr@�suffix�dest�prefixZabs_baseZabs_globZabs_pathZ
resource_fileZrel_pathZrel_destr!r!r"�get_resources_dests|s

rGcCs(ttd�rd}ntjttdtj�k}|S)NZreal_prefixT�base_prefix)�hasattr�sysrF�getattr)r4r!r!r"�in_venv�s
rLcCs$tjjtj�}t|t�s t|�}|S)N)r9r:�normcaserJ�
executable�
isinstancerr)r4r!r!r"�get_executable�s

rPcCsT|}xJt|�}|}|r |r |}|r|dj�}||kr:P|rd|||f}qW|S)Nrz	%c: %s
%s)r	�lower)�promptZ
allowed_charsZerror_prompt�default�pr3�cr!r!r"�proceed�s
rVcCs<t|t�r|j�}i}x |D]}||kr||||<qW|S)N)rOrr2)r �keysr4�keyr!r!r"�extract_by_key�s

rYcCs�tjddkrtjd�|�}|j�}t|�}yrtj|�}|ddd}xR|j�D]F\}}x<|j�D]0\}}d||f}t	|�}	|	dk	s�t
�|	||<qdWqRW|Stk
r�|jdd�YnXdd	�}
t
j�}y|
||�Wn<t
jk
�r|j�tj|�}t|�}|
||�YnXi}xb|j�D]V}i||<}xB|j|�D]4\}
}d|
|f}t	|�}	|	dk	�spt
�|	||
<�qFW�q*W|S)
Nr�zutf-8�
extensionszpython.exports�exportsz%s = %scSs$t|d�r|j|�n
|j|�dS)N�	read_file)rIr]Zreadfp)�cp�streamr!r!r"�read_stream�s
z!read_exports.<locals>.read_stream)rJ�version_info�codecs�	getreader�readr
�json�load�items�get_export_entryr=�	Exception�seekr�ConfigParserZMissingSectionHeaderError�close�textwrap�dedentZsections)r_�dataZjdatar4�groupZentries�k�vr3�entryr`r^rXr'�valuer!r!r"�read_exports�sD

rucCs�tjddkrtjd�|�}tj�}x||j�D]p\}}|j|�x\|j�D]P}|j	dkr`|j
}nd|j
|j	f}|jr�d|dj|j�f}|j
||j|�qJWq.W|j|�dS)NrrZzutf-8z%s:%sz%s [%s]z, )rJrarb�	getwriterrrkrgZadd_section�valuesrDrF�flagsr0�setr'�write)r\r_r^rqrrrsr3r!r!r"�
write_exports�s

r{ccs$tj�}z
|VWdtj|�XdS)N)�tempfile�mkdtempr�rmtree)Ztdr!r!r"�tempdirs
rccs.tj�}ztj|�dVWdtj|�XdS)N)r9�getcwd�chdir)r �cwdr!r!r"r�s


r��ccs.tj�}ztj|�dVWdtj|�XdS)N)�socketZgetdefaulttimeoutZsetdefaulttimeout)ZsecondsZctor!r!r"�socket_timeouts


r�c@seZdZdd�Zddd�ZdS)�cached_propertycCs
||_dS)N)�func)�selfr�r!r!r"�__init__)szcached_property.__init__NcCs,|dkr|S|j|�}tj||jj|�|S)N)r��object�__setattr__�__name__)r��obj�clsrtr!r!r"�__get__.s

zcached_property.__get__)N)r��
__module__�__qualname__r�r�r!r!r!r"r�(sr�cCs�tjdkr|S|s|S|ddkr.td|��|ddkrFtd|��|jd�}xtj|krj|jtj�qRW|svtjStjj|�S)a�Return 'pathname' as a name that will work on the native filesystem.

    The path is split on '/' and put back together again using the current
    directory separator.  Needed because filenames in the setup script are
    always supplied in Unix style, and have to be converted to the local
    convention before we can actually use them in the filesystem.  Raises
    ValueError on non-Unix-ish systems if 'pathname' either starts or
    ends with a slash.
    r7rzpath '%s' cannot be absoluterzpath '%s' cannot end with '/'���)r9r;�
ValueErrorr2�curdir�remover:r0)�pathname�pathsr!r!r"�convert_path6s


r�c@s�eZdZd$dd�Zdd�Zdd�Zdd	�Zd%dd�Zd&dd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
d'dd�Zdd�Zdd�Zd d!�Zd"d#�Zd
S)(�FileOperatorFcCs||_t�|_|j�dS)N)�dry_runry�ensured�_init_record)r�r�r!r!r"r�RszFileOperator.__init__cCsd|_t�|_t�|_dS)NF)�recordry�
files_written�dirs_created)r�r!r!r"r�WszFileOperator._init_recordcCs|jr|jj|�dS)N)r�r��add)r�r:r!r!r"�record_as_written\szFileOperator.record_as_writtencCsHtjj|�s tdtjj|���tjj|�s0dStj|�jtj|�jkS)a�Tell if the target is newer than the source.

        Returns true if 'source' exists and is more recently modified than
        'target', or if 'source' exists and 'target' doesn't.

        Returns false if both exist and 'target' is the same age or younger
        than 'source'. Raise PackagingFileError if 'source' does not exist.

        Note that this test is not very accurate: files created in the same
        second will have the same "age".
        zfile '%r' does not existT)r9r:�existsr�abspath�stat�st_mtime)r�r)�targetr!r!r"�newer`szFileOperator.newerTcCs�|jtjj|��tjd||�|js�d}|rftjj|�rDd|}n"tjj|�rftjj	|�rfd|}|rvt
|d��tj||�|j
|�dS)z8Copy a file respecting dry-run and force flags.
        zCopying %s to %sNz%s is a symlinkz%s is a non-regular filez which would be overwritten)�
ensure_dirr9r:�dirname�logger�infor��islinkr��isfiler�rZcopyfiler�)r�Zinfile�outfile�check�msgr!r!r"�	copy_filets
zFileOperator.copy_fileNc
Cs�tjj|�st�|jtjj|��tjd||�|jsx|dkrLt	|d�}nt
j	|d|d�}ztj||�Wd|j
�X|j|�dS)NzCopying stream %s to %s�wb�w)�encoding)r9r:�isdirr=r�r�r�r�r��openrbrZcopyfileobjrlr�)r�Zinstreamr�r�Z	outstreamr!r!r"�copy_stream�s
zFileOperator.copy_streamc	CsF|jtjj|��|js8t|d��}|j|�WdQRX|j|�dS)Nr�)r�r9r:r�r�r�rzr�)r�r:ro�fr!r!r"�write_binary_file�s
zFileOperator.write_binary_filec
CsL|jtjj|��|js>t|d��}|j|j|��WdQRX|j|�dS)Nr�)	r�r9r:r�r�r�rz�encoder�)r�r:ror�r�r!r!r"�write_text_file�s
zFileOperator.write_text_filecCsrtjdkstjdkrntjdkrnxN|D]F}|jr<tjd|�q$tj|�j|B|@}tjd||�tj||�q$WdS)N�posix�javazchanging mode of %szchanging mode of %s to %o)	r9r'�_namer�r�r�r��st_mode�chmod)r��bits�mask�filesr��moder!r!r"�set_mode�s
zFileOperator.set_modecCs|jdd|�S)Nimi�)r�)r3r�r!r!r"�<lambda>�szFileOperator.<lambda>cCs~tjj|�}||jkrztjj|�rz|jj|�tjj|�\}}|j|�tj	d|�|j
shtj|�|jrz|j
j|�dS)NzCreating %s)r9r:r�r�r�r�r2r�r�r�r��mkdirr�r�)r�r:r r�r!r!r"r��s

zFileOperator.ensure_dircCsvt||�}tjd||�|jsh|s0|j||�rX|s:d}n|j|�sHt�|t|�d�}tj	|||d�|j
|�|S)NzByte-compiling %s to %sT)rr�r�r�r�r<r=r>�
py_compile�compiler�)r�r:�optimize�forcerFZdpathZdiagpathr!r!r"�byte_compile�s
zFileOperator.byte_compilecCs�tjj|�r�tjj|�r`tjj|�r`tjd|�|jsBtj	|�|j
r�||jkr�|jj|�nPtjj|�rrd}nd}tjd||�|js�tj|�|j
r�||j
kr�|j
j|�dS)NzRemoving directory tree at %s�link�filezRemoving %s %s)r9r:r�r�r�r��debugr�rr~r�r�r�r�)r�r:r3r!r!r"�ensure_removed�s"



zFileOperator.ensure_removedcCsHd}x>|sBtjj|�r&tj|tj�}Ptjj|�}||kr<P|}qW|S)NF)r9r:r��access�W_OKr�)r�r:r4�parentr!r!r"�is_writable�szFileOperator.is_writablecCs"|js
t�|j|jf}|j�|S)zV
        Commit recorded changes, turn off recording, return
        changes.
        )r�r=r�r�r�)r�r4r!r!r"�commit�s
zFileOperator.commitcCs�|js�x(t|j�D]}tjj|�rtj|�qWt|jdd�}xN|D]F}tj	|�}|r�|dgksdt
�tjj||d�}tj|�tj|�qDW|j
�dS)NT)�reverse�__pycache__r)r��listr�r9r:r�r��sortedr��listdirr=r0�rmdirr�)r�r��dirsr �flistZsdr!r!r"�rollback�s


zFileOperator.rollback)F)T)N)FFN)r�r�r�r�r�r�r�r�r�r�r�r�Zset_executable_moder�r�r�r�r�r�r!r!r!r"r�Qs 




r�cCsb|tjkrtj|}nt|�}|dkr,|}n2|jd�}t||jd��}x|D]}t||�}qLW|S)N�.r)rJ�modules�
__import__r2rKrB)Zmodule_nameZdotted_path�modr4�partsrTr!r!r"�resolves


r�c@s6eZdZdd�Zedd��Zdd�Zdd�Zej	Z	d	S)
�ExportEntrycCs||_||_||_||_dS)N)r'rFrDrx)r�r'rFrDrxr!r!r"r�szExportEntry.__init__cCst|j|j�S)N)r�rFrD)r�r!r!r"rtszExportEntry.valuecCsd|j|j|j|jfS)Nz<ExportEntry %s = %s:%s %s>)r'rFrDrx)r�r!r!r"�__repr__!s
zExportEntry.__repr__cCsDt|t�sd}n0|j|jko>|j|jko>|j|jko>|j|jk}|S)NF)rOr�r'rFrDrx)r��otherr4r!r!r"�__eq__%s
zExportEntry.__eq__N)
r�r�r�r�r�rtr�r�r��__hash__r!r!r!r"r�s

r�z�(?P<name>(\w|[-.+])+)
                      \s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
                      \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
                      c
Cs�tj|�}|s0d}d|ks"d|kr�td|��n�|j�}|d}|d}|jd�}|dkrf|d}}n"|dkrztd|��|jd�\}}|d	}	|	dkr�d|ks�d|kr�td|��g}	nd
d�|	jd�D�}	t||||	�}|S)
N�[�]zInvalid specification '%s'r'�callable�:rrrxcSsg|]}|j��qSr!)r-)r%r�r!r!r"r&Qsz$get_export_entry.<locals>.<listcomp>�,)�ENTRY_RE�searchrr�countr2r�)
Z
specificationrr4r r'r:ZcolonsrFrDrxr!r!r"rh7s2


rhcCs�|dkrd}tjdkr.dtjkr.tjjd�}ntjjd�}tjj|�rftj|tj�}|s�t	j
d|�n<ytj|�d}Wn(tk
r�t	j
d	|dd
�d}YnX|s�t
j�}t	j
d|�tjj||�S)
a�
    Return the default base location for distlib caches. If the directory does
    not exist, it is created. Use the suffix provided for the base directory,
    and default to '.distlib' if it isn't provided.

    On Windows, if LOCALAPPDATA is defined in the environment, then it is
    assumed to be a directory, and will be the parent directory of the result.
    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
    directory - using os.expanduser('~') - will be the parent directory of
    the result.

    The result is just the directory '.distlib' in the parent directory as
    determined above, or with the name specified with ``suffix``.
    Nz.distlib�ntZLOCALAPPDATAz
$localappdata�~z(Directory exists but is not writable: %sTzUnable to create %s)�exc_infoFz#Default location unusable, using %s)r9r'�environr:�
expandvars�
expanduserr�r�r�r��warning�makedirs�OSErrorr|r}r0)rDr4Zusabler!r!r"�get_cache_baseVs&

r�cCsBtjjtjj|��\}}|r(|jdd�}|jtjd�}||dS)a
    Convert an absolute path to a directory name for use in a cache.

    The algorithm used is:

    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
    #. ``'.cache'`` is appended.
    r�z---z--z.cache)r9r:�
splitdriver�r8r;)r:r rTr!r!r"�path_to_cache_dirs

r�cCs|jd�s|dS|S)Nr7)�endswith)r3r!r!r"�ensure_slash�s
r�cCsHd}}d|kr>|jdd�\}}d|kr.|}n|jdd�\}}|||fS)N�@rr�)r2)ZnetlocZusernameZpasswordrFr!r!r"�parse_credentials�srcCstjd�}tj|�|S)N�)r9�umask)r4r!r!r"�get_process_umask�s

rcCs>d}d}x$t|�D]\}}t|t�sd}PqW|dk	s:t�|S)NTF)�	enumeraterOrr=)�seqr4�ir3r!r!r"�is_string_sequence�s
rz3([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-([a-z0-9_.+-]+)z
-py(\d\.?\d?)cCs�d}d}t|�jdd�}tj|�}|r@|jd�}|d|j��}|r�t|�t|�dkr�tjtj	|�d|�}|r�|j
�}|d|�||dd�|f}|dkr�tj|�}|r�|jd�|jd�|f}|S)zw
    Extract name, version, python version from a filename (no extension)

    Return name, version, pyver or None
    N� �-rz\brZ)rr8�PYTHON_VERSIONr�rp�startr>�rer,�escape�end�PROJECT_NAME_AND_VERSION)�filenameZproject_namer4Zpyverr�nr!r!r"�split_filename�s"


rz-(?P<name>[\w .-]+)\s*\(\s*(?P<ver>[^\s)]+)\)$cCs:tj|�}|std|��|j�}|dj�j�|dfS)z�
    A utility method used to get name and version from a string.

    From e.g. a Provides-Dist value.

    :param p: A value in a form 'foo (1.0)'
    :return: The name and version as a tuple.
    z$Ill-formed name/version string: '%s'r'Zver)�NAME_VERSION_REr,rrr-rQ)rTrr r!r!r"�parse_name_and_version�s
	
rcCs�t�}t|pg�}t|pg�}d|kr8|jd�||O}x�|D]x}|dkrV|j|�q>|jd�r�|dd�}||kr�tjd|�||kr�|j|�q>||kr�tjd|�|j|�q>W|S)N�*r
rzundeclared extra: %s)ryr�r�r<r�r�)Z	requestedZ	availabler4�rZunwantedr!r!r"�
get_extras�s&


rcCs�i}yNt|�}|j�}|jd�}|jd�s8tjd|�ntjd�|�}tj	|�}Wn0t
k
r�}ztjd||�WYdd}~XnX|S)NzContent-Typezapplication/jsonz(Unexpected response for JSON request: %szutf-8z&Failed to get external data for %s: %s)rr��getr<r�r�rbrcrerfri�	exception)r*r4ZrespZheadersZct�reader�er!r!r"�_get_external_data�s

 rz'https://www.red-dove.com/pypi/projects/cCs*d|dj�|f}tt|�}t|�}|S)Nz%s/%s/project.jsonr)�upperr
�_external_data_base_urlr)r'r*r4r!r!r"�get_project_datas
r cCs(d|dj�||f}tt|�}t|�S)Nz%s/%s/package-%s.jsonr)rr
rr)r'�versionr*r!r!r"�get_package_datas
r"c@s(eZdZdZdd�Zdd�Zdd�ZdS)	�Cachez�
    A class implementing a cache for resources that need to live in the file system
    e.g. shared libraries. This class was moved from resources to here because it
    could be used by other modules, e.g. the wheel module.
    cCsPtjj|�stj|�tj|�jd@dkr6tjd|�tjjtjj	|��|_
dS)zu
        Initialise an instance.

        :param base: The base directory where the cache should be located.
        �?rzDirectory '%s' is not privateN)r9r:r�r�r�r�r�r�r��normpathr@)r�r@r!r!r"r�"s

zCache.__init__cCst|�S)zN
        Converts a resource prefix to a directory name in the cache.
        )r�)r�rFr!r!r"�
prefix_to_dir0szCache.prefix_to_dircCs�g}x�tj|j�D]r}tjj|j|�}y>tjj|�s@tjj|�rLtj|�ntjj|�rbt	j
|�Wqtk
r�|j|�YqXqW|S)z"
        Clear the cache.
        )
r9r�r@r:r0r�r�r�r�rr~ri�append)r�Znot_removed�fnr!r!r"�clear6szCache.clearN)r�r�r��__doc__r�r&r)r!r!r!r"r#sr#c@s:eZdZdZdd�Zddd�Zdd�Zd	d
�Zdd�Zd
S)�
EventMixinz1
    A very simple publish/subscribe system.
    cCs
i|_dS)N)�_subscribers)r�r!r!r"r�KszEventMixin.__init__TcCsD|j}||krt|g�||<n"||}|r6|j|�n
|j|�dS)a`
        Add a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be added (and called when the
                           event is published).
        :param append: Whether to append or prepend the subscriber to an
                       existing subscriber list for the event.
        N)r,rr'�
appendleft)r��event�
subscriberr'�subsZsqr!r!r"r�Ns
zEventMixin.addcCs,|j}||krtd|��||j|�dS)z�
        Remove a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be removed.
        zNo subscribers: %rN)r,r�r�)r�r.r/r0r!r!r"r�bszEventMixin.removecCst|jj|f��S)z�
        Return an iterator for the subscribers for an event.
        :param event: The event to return subscribers for.
        )�iterr,r)r�r.r!r!r"�get_subscribersnszEventMixin.get_subscriberscOspg}xT|j|�D]F}y||f|�|�}Wn"tk
rJtjd�d}YnX|j|�qWtjd||||�|S)a^
        Publish a event and return a list of values returned by its
        subscribers.

        :param event: The event to publish.
        :param args: The positional arguments to pass to the event's
                     subscribers.
        :param kwargs: The keyword arguments to pass to the event's
                       subscribers.
        z"Exception during event publicationNz/publish %s: args = %s, kwargs = %s, result = %s)r2rir�rr'r�)r�r.�args�kwargsr4r/rtr!r!r"�publishus

zEventMixin.publishN)T)	r�r�r�r*r�r�r�r2r5r!r!r!r"r+Gs
r+c@s^eZdZdd�Zdd�Zddd�Zdd	�Zd
d�Zdd
�Zdd�Z	e
dd��Ze
dd��ZdS)�	SequencercCsi|_i|_t�|_dS)N)�_preds�_succsry�_nodes)r�r!r!r"r��szSequencer.__init__cCs|jj|�dS)N)r9r�)r��noder!r!r"�add_node�szSequencer.add_nodeFcCs�||jkr|jj|�|r�x&t|jj|f��D]}|j||�q.Wx&t|jj|f��D]}|j||�qVWx&t|jj��D]\}}|sz|j|=qzWx&t|jj��D]\}}|s�|j|=q�WdS)N)r9r�ryr7rr8r�rg)r�r:ZedgesrTr3rqrrr!r!r"�remove_node�s
zSequencer.remove_nodecCs<||kst�|jj|t��j|�|jj|t��j|�dS)N)r=r7�
setdefaultryr�r8)r��pred�succr!r!r"r��sz
Sequencer.addcCs�||kst�y|j|}|j|}Wn tk
rDtd|��YnXy|j|�|j|�Wn$tk
r�td||f��YnXdS)Nz%r not a successor of anythingz%r not a successor of %r)r=r7r8�KeyErrorr�r�)r�r>r?�predsZsuccsr!r!r"r��s

zSequencer.removecCs||jkp||jkp||jkS)N)r7r8r9)r��stepr!r!r"�is_step�szSequencer.is_stepcCs�|j|�std|��g}g}t�}|j|�xd|r�|jd�}||krd||kr�|j|�|j|�q0|j|�|j|�|jj|f�}|j	|�q0Wt
|�S)NzUnknown: %rr)rCr�ryr'rBr�r�r7r�extend�reversed)r��finalr4Ztodo�seenrBrAr!r!r"�	get_steps�s"





zSequencer.get_stepscsVdg�g�i�i�g�|j��������fdd��x�D]}|�kr:�|�q:W�S)Nrc
s��d�|<�d�|<�dd7<�j|�y�|}Wntk
rVg}YnXxR|D]J}|�kr��|�t�|�|��|<q^|�kr^t�|�|��|<q^W�|�|kr�g}x �j�}|j|�||kr�Pq�Wt|�}�j|�dS)Nrr)r'ri�minrB�tuple)r:Z
successorsZ	successorZconnected_componentZ	component)�graph�index�
index_counter�lowlinksr4�stack�
strongconnectr!r"rP�s.



z3Sequencer.strong_connections.<locals>.strongconnect)r8)r�r:r!)rKrLrMrNr4rOrPr"�strong_connections�s"
zSequencer.strong_connectionscCsrdg}x8|jD].}|j|}x|D]}|jd||f�q"WqWx|jD]}|jd|�qHW|jd�dj|�S)Nzdigraph G {z  %s -> %s;z  %s;�}�
)r7r'r9r0)r�r4r?rAr>r:r!r!r"�dot
s


z
Sequencer.dotN)F)
r�r�r�r�r;r<r�r�rCrH�propertyrQrTr!r!r!r"r6�s

3r6�.tar.gz�.tar.bz2�.tar�.zip�.tgz�.tbz�.whlTc
sf��fdd�}tjj���t���d}|dkr�|jd�r>d}nH|jd�rRd}d	}n4|jd�rfd}d
}n |jd�rzd}d}ntd|��z�|dkr�t|d�}|r�|j�}xD|D]}||�q�Wn.tj	||�}|r�|j
�}x|D]}||�q�W|dk�r6tjddk�r6x.|j
�D]"}	t|	jt��s|	jjd�|	_�qWdd�}
|
|_|j��Wd|�r`|j�XdS)NcsTt|t�s|jd�}tjjtjj�|��}|j��sD|�tjkrPt	d|��dS)Nzutf-8zpath outside destination: %r)
rOr�decoder9r:r�r0r<r;r�)r:rT)�dest_dir�plenr!r"�
check_paths


zunarchive.<locals>.check_path�.zip�.whl�zip�.tar.gz�.tgzZtgzzr:gz�.tar.bz2�.tbzZtbzzr:bz2z.tarZtarrzUnknown format for %rrrZzutf-8cSsBytj||�Stjk
r<}ztt|���WYdd}~XnXdS)z:Run tarfile.tar_fillter, but raise the expected ValueErrorN)�tarfileZ
tar_filterZFilterErrorr��str)�memberr:�excr!r!r"�extraction_filterPsz$unarchive.<locals>.extraction_filter)rarb)rdre)rfrg)r9r:r�r>r�r�rZnamelistrhr�ZgetnamesrJraZ
getmembersrOr'rr]rlZ
extractallrl)Zarchive_filenamer^�formatr�r`�archiver��namesr'Ztarinforlr!)r^r_r"�	unarchivesL






rpcCs�tj�}t|�}t|d��b}xZtj|�D]L\}}}x@|D]8}tjj||�}||d�}	tjj|	|�}
|j||
�q8Wq(WWdQRX|S)z*zip a directory tree into a BytesIO objectr�N)	�io�BytesIOr>rr9�walkr:r0rz)Z	directoryr4ZdlenZzf�rootr�r�r'ZfullZrelrEr!r!r"�zip_dir`s
rur$�K�M�G�T�Pc@sreZdZdZddd�Zdd�Zdd	�Zd
d�Zdd
�Ze	dd��Z
e	dd��Zdd�Ze	dd��Z
e	dd��ZdS)�ProgressZUNKNOWNr�dcCs<|dks||kst�||_|_||_d|_d|_d|_dS)NrF)r=rI�cur�max�started�elapsed�done)r�ZminvalZmaxvalr!r!r"r�wszProgress.__init__cCsV|j|kst�|jdks&||jks&t�||_tj�}|jdkrF||_n||j|_dS)N)rIr=r~r}�timerr�)r�ZcurvalZnowr!r!r"�updates
zProgress.updatecCs |dkst�|j|j|�dS)Nr)r=r�r})r�Zincrr!r!r"�	increment�szProgress.incrementcCs|j|j�|S)N)r�rI)r�r!r!r"r�szProgress.startcCs |jdk	r|j|j�d|_dS)NT)r~r�r�)r�r!r!r"�stop�s
z
Progress.stopcCs|jdkr|jS|jS)N)r~�unknown)r�r!r!r"�maximum�szProgress.maximumcCsD|jrd}n4|jdkrd}n$d|j|j|j|j}d|}|S)Nz100 %z ?? %gY@z%3d %%)r�r~r}rI)r�r4rrr!r!r"�
percentage�s
zProgress.percentagecCs:|dkr|jdks|j|jkr$d}ntjdtj|��}|S)Nrz??:??:??z%H:%M:%S)r~r}rIr�ZstrftimeZgmtime)r�Zdurationr4r!r!r"�format_duration�szProgress.format_durationcCs�|jrd}|j}n^d}|jdkr&d}nJ|jdks<|j|jkrBd}n.t|j|j�}||j|j}|d|j}d||j|�fS)NZDonezETA rrz%s: %sr�)r�r�r~r}rI�floatr�)r�rF�tr!r!r"�ETA�s
zProgress.ETAcCsN|jdkrd}n|j|j|j}xtD]}|dkr6P|d}q(Wd||fS)Nrgi�g@�@z%d %sB/s)r�r}rI�UNITS)r�r4Zunitr!r!r"�speed�s

zProgress.speedN)rr|)r�r�r�r�r�r�r�rr�rUr�r�r�r�r�r!r!r!r"r{ts

	r{z\{([^}]*)\}z[^/\\,{]\*\*|\*\*[^/\\,}]z^[^{]*\}|\{[^}]*$cCs<tj|�rd}t||��tj|�r4d}t||��t|�S)zAExtended globbing function that supports ** and {opt1,opt2,opt3}.z7invalid glob %r: recursive glob "**" must be used alonez2invalid glob %r: mismatching set marker '{' or '}')�_CHECK_RECURSIVE_GLOBr�r��_CHECK_MISMATCH_SET�_iglob)�	path_globr�r!r!r"r�s

rccstj|d�}t|�dkrpt|�dks,t|��|\}}}x�|jd�D](}x"tdj|||f��D]
}|Vq\WqBWn�d|kr�x�t|�D]
}|Vq�Wn�|jdd�\}}|dkr�d}|dkr�d}n|jd�}|jd	�}xHtj	|�D]:\}}}	tj
j|�}x"ttj
j||��D]}
|
V�qWq�WdS)
NrrZr�r$z**r�rr7�\)�	RICH_GLOBr2r>r=r�r0�	std_iglobr?r9rsr:r%)r�Zrich_path_globrFryrD�itemr:Zradical�dirr�r(r!r!r"r��s*


r�)�HTTPSHandler�match_hostname�CertificateErrorc@seZdZdZdZdd�ZdS)�HTTPSConnectionNTcCsPtj|j|jf|j�}t|dd�r0||_|j�tt	d�sp|j
rHt	j}nt	j}t	j
||j|j|t	j|j
d�|_nxt	jt	j�}|jt	jO_|jr�|j|j|j�i}|j
r�t	j|_|j|j
d�tt	dd�r�|j|d<|j
|f|�|_|j
o�|j�rLy$t|jj�|j�tjd|j�Wn0tk
�rJ|jjtj�|jj��YnXdS)	NZ_tunnel_hostF�
SSLContext)�	cert_reqsZssl_version�ca_certs)ZcafileZHAS_SNIZserver_hostnamezHost verified: %s) r�Zcreate_connection�host�port�timeoutrK�sockZ_tunnelrI�sslr�Z
CERT_REQUIREDZ	CERT_NONEZwrap_socketZkey_fileZ	cert_fileZPROTOCOL_SSLv23r�ZoptionsZOP_NO_SSLv2Zload_cert_chainZverify_modeZload_verify_locations�check_domainr�Zgetpeercertr�r�r�ZshutdownZ	SHUT_RDWRrl)r�r�r��contextr4r!r!r"�connect
s>


zHTTPSConnection.connect)r�r�r�r�r�r�r!r!r!r"r�sr�c@s&eZdZd	dd�Zdd�Zdd�ZdS)
r�TcCstj|�||_||_dS)N)�BaseHTTPSHandlerr�r�r�)r�r�r�r!r!r"r�0s
zHTTPSHandler.__init__cOs$t||�}|jr |j|_|j|_|S)a
            This is called to create a connection instance. Normally you'd
            pass a connection class to do_open, but it doesn't actually check for
            a class, and just expects a callable. As long as we behave just as a
            constructor would have, we should be OK. If it ever changes so that
            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
            which just sets check_domain to False in the class definition, and
            choose which one to pass to do_open.
            )r�r�r�)r�r3r4r4r!r!r"�_conn_maker5s


zHTTPSHandler._conn_makercCsVy|j|j|�Stk
rP}z&dt|j�kr>td|j��n�WYdd}~XnXdS)Nzcertificate verify failedz*Unable to verify server certificate for %s)Zdo_openr�rri�reasonr�r�)r��reqrr!r!r"�
https_openEszHTTPSHandler.https_openN)T)r�r�r�r�r�r�r!r!r!r"r�/s
r�c@seZdZdd�ZdS)�HTTPSOnlyHandlercCstd|��dS)NzAUnexpected HTTP request on what should be a secure connection: %s)r)r�r�r!r!r"�	http_openYszHTTPSOnlyHandler.http_openN)r�r�r�r�r!r!r!r"r�Xsr���c@seZdZddd�ZdS)�HTTPr$NcKs&|dkrd}|j|j||f|��dS)Nr)�_setup�_connection_class)r�r�r�r4r!r!r"r�esz
HTTP.__init__)r$N)r�r�r�r�r!r!r!r"r�dsr�c@seZdZddd�ZdS)�HTTPSr$NcKs&|dkrd}|j|j||f|��dS)Nr)r�r�)r�r�r�r4r!r!r"r�mszHTTPS.__init__)r$N)r�r�r�r�r!r!r!r"r�lsr�c@seZdZddd�Zdd�ZdS)�	TransportrcCs||_tjj||�dS)N)r�rr�r�)r�r��use_datetimer!r!r"r�tszTransport.__init__cCsb|j|�\}}}tdkr(t||jd�}n6|js>||jdkrT||_|tj|�f|_|jd}|S)Nr�r�)r�rr)r�r�)�
get_host_info�	_ver_infor�r��_connection�_extra_headersrZHTTPConnection)r�r��h�ehZx509r4r!r!r"�make_connectionxs
zTransport.make_connectionN)r)r�r�r�r�r�r!r!r!r"r�ss
r�c@seZdZddd�Zdd�ZdS)�
SafeTransportrcCs||_tjj||�dS)N)r�rr�r�)r�r�r�r!r!r"r��szSafeTransport.__init__cCsz|j|�\}}}|si}|j|d<tdkr:t|df|�}n<|jsP||jdkrl||_|tj|df|�f|_|jd}|S)Nr�r�r�rr)r�r�)r�r�r�r�r�r�rr�)r�r�r�r�r4r4r!r!r"r��s


zSafeTransport.make_connectionN)r)r�r�r�r�r�r!r!r!r"r��s
r�c@seZdZdd�ZdS)�ServerProxyc	Kst|jdd�|_}|dk	r^t|�\}}|jdd�}|dkr@t}nt}|||d�|d<}||_tjj	||f|�dS)Nr�r�rZhttps)r��	transport)
rBr�rrr�r�r�rr�r�)	r�Zurir4r��scheme�_r�Ztclsr�r!r!r"r��szServerProxy.__init__N)r�r�r�r�r!r!r!r"r��sr�cKs.tjddkr|d7}nd|d<t||f|�S)NrrZ�br$�newline)rJrar�)r(r�r4r!r!r"�	_csv_open�s
r�c@s4eZdZed�ed�ed�d�Zdd�Zdd�Zd	S)
�CSVBaser��"rS)Z	delimiterZ	quotecharZlineterminatorcCs|S)Nr!)r�r!r!r"�	__enter__�szCSVBase.__enter__cGs|jj�dS)N)r_rl)r�r�r!r!r"�__exit__�szCSVBase.__exit__N)r�r�r�ri�defaultsr�r�r!r!r!r"r��s
r�c@s(eZdZdd�Zdd�Zdd�ZeZdS)�	CSVReadercKs\d|kr4|d}tjddkr,tjd�|�}||_nt|dd�|_tj|jf|j�|_dS)Nr_rrZzutf-8r:r)	rJrarbrcr_r��csvrr�)r�r4r_r!r!r"r��szCSVReader.__init__cCs|S)Nr!)r�r!r!r"�__iter__�szCSVReader.__iter__cCsJt|j�}tjddkrFx,t|�D] \}}t|t�s"|jd�||<q"W|S)NrrZzutf-8)�nextrrJrarrOrr])r�r4rr�r!r!r"r��s

zCSVReader.nextN)r�r�r�r�r�r��__next__r!r!r!r"r��sr�c@seZdZdd�Zdd�ZdS)�	CSVWritercKs$t|d�|_tj|jf|j�|_dS)Nr�)r�r_r��writerr�)r�r(r4r!r!r"r��szCSVWriter.__init__cCsRtjddkrBg}x*|D]"}t|t�r0|jd�}|j|�qW|}|jj|�dS)NrrZzutf-8)rJrarOrr�r'r��writerow)r��rowrr�r!r!r"r��s


zCSVWriter.writerowN)r�r�r�r�r�r!r!r!r"r��sr�csHeZdZeej�Zded<d�fdd�	Zdd�Zdd	�Zd
d�Z	�Z
S)
�Configurator�inc_convertZincNcs"tt|�j|�|ptj�|_dS)N)�superr�r�r9r�r@)r��configr@)�	__class__r!r"r��szConfigurator.__init__c
s���fdd���jd�}t|�s*�j|�}�jdd�}�jdf�}|r\t�fdd�|D��}��fdd��D�}t|�}|||�}|r�x$|j�D]\}}	t||�|	��q�W|S)	Ncszt|ttf�r*t|��fdd�|D��}nLt|t�rld|krH�j|�}qvi}x(|D]}�||�||<qRWn
�j|�}|S)Ncsg|]}�|��qSr!r!)r%r)�convertr!r"r&�szBConfigurator.configure_custom.<locals>.convert.<locals>.<listcomp>z())rOr�rJ�type�dict�configure_customr�)�or4rq)r�r�r!r"r��s


z.Configurator.configure_custom.<locals>.convertz()r�z[]csg|]}�|��qSr!r!)r%r�)r�r!r"r&sz1Configurator.configure_custom.<locals>.<listcomp>cs$g|]}t|�r|��|�f�qSr!)r)r%rq)r�r�r!r"r&s)rBr�r�rJr�rg�setattr)
r�r�rUZpropsr3rgr4r4rrrr!)r�r�r�r"r��s


zConfigurator.configure_customcCs4|j|}t|t�r0d|kr0|j|�|j|<}|S)Nz())r�rOr�r�)r�rXr4r!r!r"�__getitem__s
zConfigurator.__getitem__c	CsFtjj|�stjj|j|�}tj|ddd��}tj|�}WdQRX|S)z*Default converter for the inc:// protocol.rzutf-8)r�N)	r9r:�isabsr0r@rbr�rerf)r�rtr�r4r!r!r"r�s
zConfigurator.inc_convert)N)r�r�r�r�rZvalue_convertersr�r�r�r��
__classcell__r!r!)r�r"r��s
r�c@s&eZdZd	dd�Zdd�Zdd�ZdS)
�SubprocessMixinFNcCs||_||_dS)N)�verbose�progress)r�r�r�r!r!r"r�+szSubprocessMixin.__init__cCsn|j}|j}xT|j�}|sP|dk	r0|||�q|sBtjjd�ntjj|jd��tjj�qW|j�dS)z�
        Read lines from a subprocess' output stream and either pass to a progress
        callable (if specified) or write progress information to sys.stderr.
        Nr�zutf-8)	r�r��readlinerJ�stderrrzr]�flushrl)r�r_r�r�r�r3r!r!r"r/szSubprocessMixin.readercKs�tj|ftjtjd�|��}tj|j|jdfd�}|j�tj|j|jdfd�}|j�|j	�|j
�|j
�|jdk	r�|jdd�n|jr�t
jjd�|S)N)�stdoutr�r�)r�r3r�zdone.�mainzdone.
)�
subprocess�Popen�PIPE�	threadingZThreadrr�rr��waitr0r�r�rJrz)r��cmdr4rTZt1Zt2r!r!r"�run_commandDs
zSubprocessMixin.run_command)FN)r�r�r�r�rr�r!r!r!r"r�*s
r�cCstjdd|�j�S)z,Normalize a python package name a la PEP 503z[-_.]+r
)r
�subrQ)r'r!r!r"�normalize_nameUsr�)NN)r�)N)N)rVrWrXrYrZr[r\)NT)r$rvrwrxryrz)r�r�)�rb�collectionsr�
contextlibr�Zglobrr�rqreZloggingr9r�r
rr�r��ImportErrorr�rJrhr|rmr�Zdummy_threadingr�r$r�compatrrr	r
rrr
rrrrrrrrrrrrZ	getLoggerr�r��COMMAr�r1ZIDENTZEXTRA_IDENTZVERSPECZRELOPZBARE_CONSTRAINTSZ
DIRECT_REFZCONSTRAINTSZ
EXTRA_LISTZEXTRASZREQUIREMENTr+ZRELOP_IDENTr.r6rGrLrPrVrYrur{�contextmanagerrr�r�r�r�r�r�r�r��VERBOSEr�rhr�r�r�rrr�Irrrrrrrrr r"r#r+r6ZARCHIVE_EXTENSIONSrprur�r{r�r�r�r�r�r�r�r�r�r�rar�r�r�r�r�r�r�r�r�r�r�r�r�r!r!r!r"�<module>s�
X

,

%

	/
	7

)



,H
C]


*)	
:+_vendor/distlib/__pycache__/metadata.cpython-36.pyc000064400000064571151733136300016267 0ustar003

�Pf���@sdZddlmZddlZddlmZddlZddlZddlZddl	m
Z
mZddlm
Z
mZmZddlmZdd	lmZmZdd
lmZmZeje�ZGdd�de
�ZGd
d�de
�ZGdd�de
�ZGdd�de
�ZdddgZdZ dZ!ej"d�Z#ej"d�Z$dGZ%dHZ&dIZ'dJZ(dKZ)dLZ*dMZ+e,�Z-e-j.e%�e-j.e&�e-j.e(�e-j.e*�ej"d8�Z/d9d:�Z0d;d<�Z1ddddd%ddd d!d"d#d+d,d$d&d'd-d/d0d5d1d2d*d)d(d.d3d4d6d7d=�Z2dNZ3dOZ4dPZ5dQZ6dRZ7dSZ8dTZ9e:�Z;ej"d>�Z<dUd@dA�Z=GdBdC�dCe:�Z>dDZ?dEZ@GdFd�de:�ZAdS)VzImplementation of the Metadata for Python packages PEPs.

Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
�)�unicode_literalsN)�message_from_file�)�DistlibException�__version__)�StringIO�string_types�	text_type)�	interpret)�extract_by_key�
get_extras)�
get_scheme�PEP440_VERSION_REc@seZdZdZdS)�MetadataMissingErrorzA required metadata is missingN)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/metadata.pyrsrc@seZdZdZdS)�MetadataConflictErrorz>Attempt to read or write metadata fields that are conflictual.N)rrrrrrrrr src@seZdZdZdS)� MetadataUnrecognizedVersionErrorz Unknown metadata version number.N)rrrrrrrrr$src@seZdZdZdS)�MetadataInvalidErrorzA metadata value is invalidN)rrrrrrrrr(sr�Metadata�PKG_INFO_ENCODING�PKG_INFO_PREFERRED_VERSIONzutf-8z1.1z

       \|z	
        �Metadata-Version�Name�Version�Platform�Summary�Description�Keywords�	Home-page�Author�Author-email�License�Supported-Platform�
Classifier�Download-URL�	Obsoletes�Provides�Requires�
Maintainer�Maintainer-email�Obsoletes-Dist�Project-URL�
Provides-Dist�
Requires-Dist�Requires-Python�Requires-External�Private-Version�Obsoleted-By�Setup-Requires-Dist�	Extension�Provides-Extraz"extra\s*==\s*("([^"]+)"|'([^']+)')cCs<|dkrtS|dkrtS|dkr$tS|dkr0tSt|��dS)Nz1.0z1.1z1.2z2.0)�_241_FIELDS�_314_FIELDS�_345_FIELDS�_426_FIELDSr)�versionrrr�_version2fieldlistgsr?c	Cs�dd�}g}x.|j�D]"\}}|gddfkr.q|j|�qWddddg}xt|D]l}|tkrld|krl|jd�|tkr�d|kr�|jd�|tkr�d|kr�|jd�|tkrNd|krN|jd�qNWt|�d	kr�|d
St|�d
kr�td��d|ko�||t	�}d|k�o
||t
�}d|k�o||t�}t|�t|�t|�d	k�rFtd��|�rl|�rl|�rlt
|k�rlt
S|�rvdS|�r�dSdS)
z5Detect the best version depending on the fields used.cSsx|D]}||krdSqWdS)NTFr)�keys�markers�markerrrr�_has_markerus
z"_best_version.<locals>._has_marker�UNKNOWNNz1.0z1.1z1.2z2.0rrzUnknown metadata setz(You used incompatible 1.1/1.2/2.0 fields)�items�appendr:�remover;r<r=�lenr�_314_MARKERS�_345_MARKERS�_426_MARKERS�intr)	�fieldsrCr@�key�valueZpossible_versionsZis_1_1Zis_1_2Zis_2_0rrr�
_best_versionssB




rP)�metadata_version�namer>�platformZsupported_platform�summary�description�keywords�	home_page�author�author_email�
maintainer�maintainer_email�license�
classifier�download_url�obsoletes_dist�
provides_dist�
requires_dist�setup_requires_dist�requires_python�requires_external�requires�provides�	obsoletes�project_urlZprivate_versionZobsoleted_by�	extensionZprovides_extraz[^A-Za-z0-9.]+FcCs0|r$tjd|�}tjd|jdd��}d||fS)zhReturn the distribution name with version.

    If for_filename is true, return a filename-escaped form.�-� �.z%s-%s)�	_FILESAFE�sub�replace)rRr>Zfor_filenamerrr�_get_name_and_version�srpc@s
eZdZdZd?dd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zd@dd�Zdd�Zdd �Zd!d"�Zd#d$�ZdAd%d&�ZdBd'd(�ZdCd)d*�Zd+d,�Zefd-d.�ZdDd/d0�ZdEd1d2�Zd3d4�Zd5d6�Zd7d8�Zd9d:�Zd;d<�Z d=d>�Z!dS)F�LegacyMetadataaaThe legacy metadata of a release.

    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
    instantiate the class with one of these arguments (or none):
    - *path*, the path to a metadata file
    - *fileobj* give a file-like object with metadata as content
    - *mapping* is a dict-like object
    - *scheme* is a version scheme name
    N�defaultcCsz|||gjd�dkrtd��i|_g|_d|_||_|dk	rH|j|�n.|dk	r\|j|�n|dk	rv|j|�|j	�dS)N�z'path, fileobj and mapping are exclusive)
�count�	TypeError�_fieldsZrequires_filesZ
_dependencies�scheme�read�	read_file�update�set_metadata_version)�self�path�fileobj�mappingrwrrr�__init__�s
zLegacyMetadata.__init__cCst|j�|jd<dS)NzMetadata-Version)rPrv)r|rrrr{sz#LegacyMetadata.set_metadata_versioncCs|jd||f�dS)Nz%s: %s
)�write)r|r~rRrOrrr�_write_fieldszLegacyMetadata._write_fieldcCs
|j|�S)N)�get)r|rRrrr�__getitem__szLegacyMetadata.__getitem__cCs|j||�S)N)�set)r|rRrOrrr�__setitem__szLegacyMetadata.__setitem__cCs8|j|�}y|j|=Wntk
r2t|��YnXdS)N)�
_convert_namerv�KeyError)r|rR�
field_namerrr�__delitem__s

zLegacyMetadata.__delitem__cCs||jkp|j|�|jkS)N)rvr�)r|rRrrr�__contains__s
zLegacyMetadata.__contains__cCs(|tkr|S|jdd�j�}tj||�S)Nrj�_)�_ALL_FIELDSro�lower�_ATTR2FIELDr�)r|rRrrrr�szLegacyMetadata._convert_namecCs|tks|tkrgSdS)NrD)�_LISTFIELDS�_ELEMENTSFIELD)r|rRrrr�_default_value%szLegacyMetadata._default_valuecCs&|jdkrtjd|�Stjd|�SdS)N�1.0�1.1�
)r�r�)rQ�_LINE_PREFIX_PRE_1_2rn�_LINE_PREFIX_1_2)r|rOrrr�_remove_line_prefix*s
z"LegacyMetadata._remove_line_prefixcCs|tkr||St|��dS)N)r��AttributeError)r|rRrrr�__getattr__0szLegacyMetadata.__getattr__FcCst|d|d|�S)zhReturn the distribution name with version.

        If filesafe is true, return a filename-escaped form.rr)rp)r|Zfilesaferrr�get_fullname;szLegacyMetadata.get_fullnamecCs|j|�}|tkS)z+return True if name is a valid metadata key)r�r�)r|rRrrr�is_fieldAs
zLegacyMetadata.is_fieldcCs|j|�}|tkS)N)r�r�)r|rRrrr�is_multi_fieldFs
zLegacyMetadata.is_multi_fieldc
Cs.tj|ddd�}z|j|�Wd|j�XdS)z*Read the metadata values from a file path.�rzutf-8)�encodingN)�codecs�openry�close)r|�filepath�fprrrrxJszLegacyMetadata.readcCs�t|�}|d|jd<xxtD]p}||kr*q|tkrh|j|�}|tkrZ|dk	rZdd�|D�}|j||�q||}|dk	r|dkr|j||�qW|j�dS)z,Read the metadata values from a file object.zmetadata-versionzMetadata-VersionNcSsg|]}t|jd���qS)�,)�tuple�split)�.0rOrrr�
<listcomp>_sz,LegacyMetadata.read_file.<locals>.<listcomp>rD)rrvr�r�Zget_all�_LISTTUPLEFIELDSr�r{)r|Zfileob�msg�field�valuesrOrrrryRs

zLegacyMetadata.read_filec
Cs0tj|ddd�}z|j||�Wd|j�XdS)z&Write the metadata fields to filepath.�wzutf-8)r�N)r�r��
write_filer�)r|r��skip_unknownr�rrrr�hszLegacyMetadata.writecCs�|j�x�t|d�D]�}|j|�}|r:|dgdgfkr:q|tkrX|j||dj|��q|tkr�|dkr�|jd
kr�|jdd�}n|jdd	�}|g}|t	kr�d
d�|D�}x|D]}|j|||�q�WqWdS)z0Write the PKG-INFO format data to a file object.zMetadata-VersionrDr�r!�1.0�1.1r�z	
        z	
       |cSsg|]}dj|��qS)r�)�join)r�rOrrrr��sz-LegacyMetadata.write_file.<locals>.<listcomp>N)r�r�)
r{r?r�r�r�r�r�rQror�)r|Z
fileobjectr�r�r�rOrrrr�ps$


zLegacyMetadata.write_filecs��fdd�}|snHt|d�r>x<|j�D]}||||�q&Wnx|D]\}}|||�qDW|r~x|j�D]\}}|||�qhWdS)a�Set metadata values from the given iterable `other` and kwargs.

        Behavior is like `dict.update`: If `other` has a ``keys`` method,
        they are looped over and ``self[key]`` is assigned ``other[key]``.
        Else, ``other`` is an iterable of ``(key, value)`` iterables.

        Keys that don't match a metadata field or that have an empty value are
        dropped.
        cs"|tkr|r�j�j|�|�dS)N)r�r�r�)rNrO)r|rr�_set�sz#LegacyMetadata.update.<locals>._setr@N)�hasattrr@rE)r|�other�kwargsr��k�vr)r|rrz�s

zLegacyMetadata.updatecCsn|j|�}|tks|dkrPt|ttf�rPt|t�rJdd�|jd�D�}q~g}n.|tkr~t|ttf�r~t|t�rz|g}ng}tj	t
j��rB|d}t|j
�}|tkr�|dk	r�x�|D](}|j|jd�d�s�tjd	|||�q�Wn`|tko�|dk	�r|j|��sBtjd
|||�n0|tk�rB|dk	�rB|j|��sBtjd
|||�|tk�r`|dk�r`|j|�}||j|<dS)z"Control then set a metadata field.rcSsg|]}|j��qSr)�strip)r�r�rrrr��sz&LegacyMetadata.set.<locals>.<listcomp>r�rN�;rz$'%s': '%s' is not valid (field '%s')z.'%s': '%s' is not a valid version (field '%s')r!)r�r��
isinstance�listr�rr�r��loggerZisEnabledFor�loggingZWARNINGr
rw�_PREDICATE_FIELDS�is_valid_matcher�warning�_VERSIONS_FIELDS�is_valid_constraint_list�_VERSION_FIELDS�is_valid_version�_UNICODEFIELDSr�rv)r|rRrOZproject_namerwr�rrrr��s@








zLegacyMetadata.setcCs�|j|�}||jkr*|tkr&|j|�}|S|tkr@|j|}|S|tkr�|j|}|dkr^gSg}x6|D].}|tkr�|j|�qh|j|d|df�qhW|S|tkr�|j|}t	|t
�r�|jd�S|j|S)zGet a metadata field.Nrrr�)r�rv�_MISSINGr�r�r�r�rFr�r�rr�)r|rRrrrO�res�valrrrr��s.








zLegacyMetadata.getcs|j�gg}}xd
D]}||kr|j|�qW|rT|gkrTddj|�}t|��xdD]}||krZ|j|�qZW|ddkr�||fSt|j���fd	d
�}xdt|ft�jft	�j
ffD]F\}}x<|D]4}	|j|	d�}
|
dk	o�||
�r�|jd|	|
f�q�Wq�W||fS)zkCheck if the metadata is compliant. If strict is True then raise if
        no Name or Version are providedrrzmissing required metadata: %sz, �	Home-pager$zMetadata-Versionz1.2cs*x$|D]}�j|jd�d�sdSqWdS)Nr�rFT)r�r�)rOr�)rwrr�are_valid_constraintss
z3LegacyMetadata.check.<locals>.are_valid_constraintsNzWrong value for '%s': %s)rr)r�r$)r{rFr�rr
rwr�r�r�r�r�r�)r|�strict�missing�warnings�attrr�r�rMZ
controllerr�rOr)rwr�check�s2




zLegacyMetadata.checkcCs�|j�dB}i}x,|D]$\}}|s.||jkr||||<qW|ddk�r�dK}x�|D]F\}}|sn||jkrV|d&k�r�||||<qVd,d-�||D�||<qVWnF|dd.k�r�dO}x2|D]*\}}|�s�||jk�r�||||<�q�W|S)Pz�Return fields as a dict.

        Field names will be converted to use the underscore-lowercase style
        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
        rQ�Metadata-VersionrRrr>rrTr rW�	Home-pagerXr$rY�Author-emailr\r&rUr!rVr"rSr�classifiersr(r^�Download-URLz1.2ra�
Requires-Distrc�Requires-Pythonrd�Requires-Externalr`�
Provides-Distr_�Obsoletes-Distrh�Project-URLrZr-r[�Maintainer-emailcSsg|]}dj|��qS)r�)r�)r��urrrr�Gsz)LegacyMetadata.todict.<locals>.<listcomp>z1.1rfr+rer,rgr*�rQr��rRr�r>r�rTr �rWr��rXr$�rYr��r\r&�rUr!�rVr"�rSr�r�r(�r^r�)
r�r�r�r�r�r�r�r�r�r�r�r�r��rar��rcr��rdr��r`r��r_r��rhr��rZr-�r[r�)r�r�r�r�r�r�r�r��rfr+�rer,�rgr*)r�r�r�)r{rv)r|Zskip_missingZmapping_1_0�datarNr�Zmapping_1_2Zmapping_1_1rrr�todictsP
zLegacyMetadata.todictcCs<|ddkr(xdD]}||kr||=qW|d|7<dS)NzMetadata-Versionz1.1r*r,r+z
Requires-Dist)r*r,r+r)r|�requirementsr�rrr�add_requirementsUs


zLegacyMetadata.add_requirementscCstt|d��S)NzMetadata-Version)r�r?)r|rrrr@`szLegacyMetadata.keysccsx|j�D]
}|Vq
WdS)N)r@)r|rNrrr�__iter__cszLegacyMetadata.__iter__cs�fdd��j�D�S)Ncsg|]}�|�qSrr)r�rN)r|rrr�hsz)LegacyMetadata.values.<locals>.<listcomp>)r@)r|r)r|rr�gszLegacyMetadata.valuescs�fdd��j�D�S)Ncsg|]}|�|f�qSrr)r�rN)r|rrr�ksz(LegacyMetadata.items.<locals>.<listcomp>)r@)r|r)r|rrEjszLegacyMetadata.itemscCsd|jj|j|jfS)Nz
<%s %s %s>)�	__class__rrRr>)r|rrr�__repr__mszLegacyMetadata.__repr__)NNNrr)F)F)F)N)F)F)"rrrrr�r{r�r�r�r�r�r�r�r�r�r�r�r�rxryr�r�rzr�r�r�r�r�r�r@r�r�rEr�rrrrrq�s>	




,
,
;rqzpydist.jsonz
metadata.jsonc@s�eZdZdZejd�Zejdej�Ze	Z
ejd�ZdZde
ZffdId�Zd	Zd
ZeffedJfe
dKfedLfd�ZdMZdNdd�ZedO�ZdefZdefZdefdefeeedefeeeedefdPdQd�
Z[[dd �ZdRd!d"�Zd#d$�Zed%d&��Z ed'd(��Z!e!j"d)d(��Z!dSd*d+�Z#ed,d-��Z$ed.d/��Z%e%j"d0d/��Z%d1d2�Z&d3d4�Z'd5d6�Z(d7d8�Z)d9d:d;d<d=dd>�Z*d?d@�Z+dTdCdD�Z,dEdF�Z-dGdH�Z.dS)Urz�
    The metadata of a release. This implementation uses 2.0 (JSON)
    metadata where possible. If not possible, it wraps a LegacyMetadata
    instance which handles the key-value metadata format.
    z
^\d+(\.\d+)*$z!^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$z	.{1,2047}z2.0zdistlib (%s)�legacy)rRr>rTzqname version license summary description author author_email keywords platform home_page classifiers download_urlzwextras run_requires test_requires build_requires dev_requires provides meta_requires obsoleted_by supports_environments)rQrRr>rT�_legacy�_datarwNrrcCs0|||gjd�dkrtd��d|_d|_||_|dk	rzy|j||�||_Wn*tk
rvt||d�|_|j�YnXn�d}|r�t	|d��}|j
�}WdQRXn|r�|j
�}|dkr�|j|jd�|_ndt
|t�s�|jd�}ytj|�|_|j|j|�Wn0tk
�r*tt|�|d�|_|j�YnXdS)Nrsz'path, fileobj and mapping are exclusive)rrw�rb)rQ�	generatorzutf-8)r~rw)rtrur�rrw�_validate_mappingrrq�validater�rx�METADATA_VERSION�	GENERATORr�r	�decode�json�loads�
ValueErrorr)r|r}r~rrwr��frrrr��s<



zMetadata.__init__rRr>r\rVrTz
Requires-DistzSetup-Requires-DistzProvides-Extrar(�Download-URL�Metadata-Version)
�run_requires�build_requires�dev_requiresZ
test_requires�
meta_requires�extras�modules�
namespaces�exports�commandsr�Z
source_urlrQc
CsZtj|d�}tj|d�}||k�r||\}}|jr^|dkrP|dkrHdn|�}n|jj|�}n�|dkrjdn|�}|dkr�|jj||�}n�t�}|}|jjd�}	|	�r|dkr�|	jd	|�}nR|dkr�|	jd
�}	|	r�|	j||�}n.|	jd�}	|	�s�|jjd�}	|	�r|	j||�}||k�rV|}n:||k�r4tj||�}n"|j�rJ|jj|�}n|jj|�}|S)
N�common_keys�mapped_keysrrrrr��
extensionszpython.commandszpython.detailszpython.exports)rrrrr�)�object�__getattribute__r�r�r)
r|rN�common�mapped�lkZmaker�resultrO�sentinel�drrrr�sF




zMetadata.__getattribute__cCsH||jkrD|j|\}}|p |j|krD|j|�}|sDtd||f��dS)Nz.'%s' is an invalid value for the '%s' property)�SYNTAX_VALIDATORSrw�matchr)r|rNrOrw�pattern�
exclusions�mrrr�_validate_values

zMetadata._validate_valuecCs*|j||�tj|d�}tj|d�}||kr�||\}}|jrV|dkrJt�||j|<nf|d
krj||j|<nR|jjdi�}|dkr�||d	<n2|dkr�|jd
i�}|||<n|jdi�}|||<nh||kr�tj|||�nP|dk�rt|t	��r|j
�}|�r|j�}ng}|j�r||j|<n
||j|<dS)Nrrrrrrr�rzpython.commandszpython.detailszpython.exportsrV)rrrrr�)r'rrr��NotImplementedErrorr�
setdefault�__setattr__r�rr�r�)r|rNrOrrrr�r!rrrr*s>




zMetadata.__setattr__cCst|j|jd�S)NT)rprRr>)r|rrr�name_and_version@szMetadata.name_and_versioncCsF|jr|jd}n|jjdg�}d|j|jf}||krB|j|�|S)Nz
Provides-Distrfz%s (%s))r�rr)rRr>rF)r|r�srrrrfDs
zMetadata.providescCs |jr||jd<n
||jd<dS)Nz
Provides-Distrf)r�r)r|rOrrrrfOsc
Cs�|jr|}n�g}t|pg|j�}xl|D]d}d|kr@d|kr@d}n8d|krNd}n|jd�|k}|rx|jd�}|rxt||�}|r&|j|d�q&WxNdD]F}d|}	|	|kr�|j|	�|jjd	|g�}|j|j|||d
��q�W|S)a�
        Base method to get dependencies, given a set of extras
        to satisfy and an optional environment context.
        :param reqts: A list of sometimes-wanted dependencies,
                      perhaps dependent on extras and environment.
        :param extras: A list of optional components being requested.
        :param env: An optional environment for marker evaluation.
        �extra�environmentTre�build�dev�testz:%s:z%s_requires)r�env)r/r0r1)	r�rrr�r
�extendrGr�get_requirements)
r|�reqtsrr2rr!�includerBrN�errrr4Vs0	




zMetadata.get_requirementscCs|jr|j�S|jS)N)r��_from_legacyr)r|rrr�
dictionary�szMetadata.dictionarycCs|jrt�nt|j|j�SdS)N)r�r(rr�DEPENDENCY_KEYS)r|rrr�dependencies�szMetadata.dependenciescCs|jrt�n|jj|�dS)N)r�r(rrz)r|rOrrrr;�sc	Cs�|jd�|jkrt��g}x0|jj�D]"\}}||kr&||kr&|j|�q&W|rfddj|�}t|��x"|j�D]\}}|j|||�qpWdS)NrQzMissing metadata items: %sz, )	r�rr�MANDATORY_KEYSrErFr�rr')	r|rrwr�rNr%r�r�r�rrrr�szMetadata._validate_mappingcCsB|jr.|jjd�\}}|s|r>tjd||�n|j|j|j�dS)NTz#Metadata: missing: %s, warnings: %s)r�r�r�r�rrrw)r|r�r�rrrr�s
zMetadata.validatecCs(|jr|jjd�St|j|j�}|SdS)NT)r�r�rr�
INDEX_KEYS)r|rrrrr��szMetadata.todictc
Cs�|jr|jst�|j|jd�}|jjd�}x2dD]*}||kr2|dkrLd	}n|}||||<q2W|jd
g�}|dgkrzg}||d<d}x2|D]*\}}||kr�||r�d||ig||<q�W|j|d<i}i}	|S)N)rQrTrRr>r\rTrUr]r�r"�rVrarrbrrerf)rRr>r\rTrUr]�rar�rbr)r?r@)r�r�AssertionErrorrrr�r�rf)
r|rZlmdr��nk�kwr@�okrXrZrrrr8�s0


zMetadata._from_legacyrrr&r r!)rRr>r\rTrUr�cCs�dd�}|jr|jst�t�}|j}x*|jj�D]\}}||kr2||||<q2W||j|j�}||j|j	�}|j
r�t|j
�|d<t|�|d<t|�|d<|S)NcSs�t�}x�|D]�}|jd�}|jd�}|d}xb|D]Z}|rN|rN|j|�q2d}|r^d|}|rx|rtd||f}n|}|jdj||f��q2WqW|S)Nr-r.rer>z
extra == "%s"z(%s) and %sr�)r�r��addr�)Zentriesr5r7r-r2Zrlistr�rBrrr�process_entries�s"



z,Metadata._to_legacy.<locals>.process_entrieszProvides-Extraz
Requires-DistzSetup-Requires-Dist)rr�rArq�LEGACY_MAPPINGrErrrrr�sorted)r|rFrZnmdrBrDZr1Zr2rrr�
_to_legacy�szMetadata._to_legacyFTcCs�||gjd�dkrtd��|j�|r`|jr4|j}n|j�}|rP|j||d�q�|j||d�n^|jrp|j�}n|j}|r�t	j
||dddd�n.tj|dd��}t	j
||dddd�WdQRXdS)	Nrz)Exactly one of path and fileobj is needed)r�Trs)Zensure_ascii�indentZ	sort_keysr�zutf-8)
rtr
rr�rIr�r�r8rr�dumpr�r�)r|r}r~r�r�Z	legacy_mdr!rrrrr��s&

zMetadata.writecCs�|jr|jj|�nt|jjdg�}d}x"|D]}d|kr,d|kr,|}Pq,W|dkrhd|i}|jd|�n t|d�t|�B}t|�|d<dS)Nrr.r-rer)r�r�rr)�insertr�rH)r|r�r�always�entryZrsetrrrr�s
zMetadata.add_requirementscCs*|jpd}|jpd}d|jj|j||fS)Nz	(no name)z
no versionz<%s %s %s (%s)>)rRr>r�rrQ)r|rRr>rrrr�(s

zMetadata.__repr__)r�)r�)r�)r�)r�rrw)NNNrr)rRr>r\rVrT)rN)r
N)N)NN)NNFT)/rrrr�re�compileZMETADATA_VERSION_MATCHER�IZNAME_MATCHERrZVERSION_MATCHERZSUMMARY_MATCHERrrrr<r=r:r"�	__slots__r�r�rr�Z	none_list�dictZ	none_dictrrr'r*�propertyr+rf�setterr4r9r;rrr�r8rGrIr�r�r�rrrrrvsx


,+

'
*	%
)rrrrr r!r"r#r$r%r&)rrrrr'r r!r"r#r$r%r&r(r)r*r+r,)r*r+r,r(r))rrrrr'r r!r"r#r$r%r-r.r&r(r)r/r0r1r2r3r4)r1r2r3r/r4r-r.r0)rrrrr'r r!r"r#r$r%r-r.r&r(r)r/r0r1r2r3r4r5r6r7r8r9)r5r9r6r7r8)r2r/r1)r3)r)rr(r*r,r+r/r1r2r4r0r'r7r9r8)r0)r")r$r-r r!)F)BrZ
__future__rr�Zemailrrr�rOr>rr�compatrrr	rAr
�utilrrr>r
rZ	getLoggerrr�rrrr�__all__rrrPr�r�r:r;rIr<rJr=rKr�r�rzZEXTRA_REr?rPr�r�r�r�r�r�r�r�rr�rmrprqZMETADATA_FILENAMEZWHEEL_METADATA_FILENAMErrrrr�<module>	s�








9


	_vendor/distlib/__pycache__/locators.cpython-36.opt-1.pyc000064400000113116151733136300017262 0ustar003

�PfE��@s@ddlZddlmZddlZddlZddlZddlZddlZyddlZWne	k
rdddl
ZYnXddlZddlm
Z
ddlmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZddlm Z m!Z!m"Z"ddl#m$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.dd	l/m0Z0m1Z1dd
l2m3Z3m4Z4ej5e6�Z7ej8d�Z9ej8dej:�Z;ej8d
�Z<dZ=d-dd�Z>Gdd�de�Z?Gdd�de@�ZAGdd�deA�ZBGdd�deA�ZCGdd�de@�ZDGdd�deA�ZEGdd�deA�ZFGdd �d eA�ZGGd!d"�d"eA�ZHGd#d$�d$eA�ZIeIeG�eEd%d&d'�d(d)�ZJeJjKZKej8d*�ZLGd+d,�d,e@�ZMdS).�N)�BytesIO�)�DistlibException)�urljoin�urlparse�
urlunparse�url2pathname�pathname2url�queue�quote�unescape�string_types�build_opener�HTTPRedirectHandler�	text_type�Request�	HTTPError�URLError)�Distribution�DistributionPath�	make_dist)�Metadata)	�cached_property�parse_credentials�ensure_slash�split_filename�get_project_data�parse_requirement�parse_name_and_version�ServerProxy�normalize_name)�
get_scheme�UnsupportedVersionError)�Wheel�
is_compatiblez^(\w+)=([a-f0-9]+)z;\s*charset\s*=\s*(.*)\s*$ztext/html|application/x(ht)?mlzhttps://pypi.python.org/pypicCs |dkrt}t|dd�}|j�S)z�
    Return all distribution names known by an index.
    :param url: The URL of the index.
    :return: A list of all known distribution names.
    Ng@)�timeout)�
DEFAULT_INDEXr�
list_packages)�url�client�r*�/usr/lib/python3.6/locators.py�get_all_distribution_names)sr,c@s$eZdZdZdd�ZeZZZdS)�RedirectHandlerzE
    A class to work around a bug in some Python 3.2.x releases.
    c	Cs�d}xdD]}||kr
||}Pq
W|dkr0dSt|�}|jdkrpt|j�|�}t|d�rh|j||�n|||<tj||||||�S)N�location�uri��replace_header)r.r/)r�schemerZget_full_url�hasattrr1�BaseRedirectHandler�http_error_302)	�self�req�fp�code�msg�headersZnewurl�keyZurlpartsr*r*r+r5=s


zRedirectHandler.http_error_302N)�__name__�
__module__�__qualname__�__doc__r5Zhttp_error_301Zhttp_error_303Zhttp_error_307r*r*r*r+r-4sr-c@s�eZdZdZd/Zd0Zd1ZdZed2Zd3dd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zee
e�Zdd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd*d+�Zd4d-d.�ZdS)5�LocatorzG
    A base class for locators - things that locate distributions.
    �.tar.gz�.tar.bz2�.tar�.zip�.tgz�.tbz�.egg�.exe�.whl�.pdfN�defaultcCs,i|_||_tt��|_d|_tj�|_dS)a^
        Initialise an instance.
        :param scheme: Because locators look for most recent versions, they
                       need to know the version scheme to use. This specifies
                       the current PEP-recommended scheme - use ``'legacy'``
                       if you need to support existing distributions on PyPI.
        N)	�_cacher2rr-�opener�matcherr
�Queue�errors)r6r2r*r*r+�__init__cs
zLocator.__init__cCsXg}xN|jj�sRy|jjd�}|j|�Wn|jjk
rDwYnX|jj�qW|S)z8
        Return any errors which have occurred.
        F)rQ�empty�get�appendZEmpty�	task_done)r6�result�er*r*r+�
get_errorsvszLocator.get_errorscCs|j�dS)z>
        Clear any errors which may have been logged.
        N)rY)r6r*r*r+�clear_errors�szLocator.clear_errorscCs|jj�dS)N)rM�clear)r6r*r*r+�clear_cache�szLocator.clear_cachecCs|jS)N)�_scheme)r6r*r*r+�_get_scheme�szLocator._get_schemecCs
||_dS)N)r])r6�valuer*r*r+�_set_scheme�szLocator._set_schemecCstd��dS)a=
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This should be implemented in subclasses.

        If called from a locate() request, self.matcher will be set to a
        matcher for the requirement to satisfy, otherwise it will be None.
        z Please implement in the subclassN)�NotImplementedError)r6�namer*r*r+�_get_project�s
zLocator._get_projectcCstd��dS)zJ
        Return all the distribution names known to this locator.
        z Please implement in the subclassN)ra)r6r*r*r+�get_distribution_names�szLocator.get_distribution_namescCsL|jdkr|j|�}n2||jkr,|j|}n|j�|j|�}||j|<|S)z�
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This calls _get_project to do all the work, and just implements a caching layer on top.
        N)rMrcrZ)r6rbrWr*r*r+�get_project�s



zLocator.get_projectcCsPt|�}tj|j�}d}|jd�}|r6tt|�|j�}|jdkd|j	k|||fS)zu
        Give an url a score which can be used to choose preferred URLs
        for a given project release.
        Tz.whl�httpszpypi.python.org)
r�	posixpath�basename�path�endswithr$r#�
wheel_tagsr2�netloc)r6r(�trhZ
compatibleZis_wheelr*r*r+�	score_url�s
zLocator.score_urlcCsR|}|rN|j|�}|j|�}||kr(|}||kr@tjd||�ntjd||�|S)a{
        Choose one of two URLs where both are candidates for distribution
        archives for the same version of a distribution (for example,
        .tar.gz vs. zip).

        The current implementation favours https:// URLs over http://, archives
        from PyPI over those from other locations, wheel compatibility (if a
        wheel) and then the archive name.
        zNot replacing %r with %rzReplacing %r with %r)rn�logger�debug)r6�url1�url2rW�s1�s2r*r*r+�
prefer_url�s


zLocator.prefer_urlcCs
t||�S)zZ
        Attempt to split a filename in project name, version and Python version.
        )r)r6�filename�project_namer*r*r+r�szLocator.split_filenamecCsdd�}d}t|�\}}}}}	}
|
j�jd�r<tjd||
�tj|
�}|rX|j�\}}
nd\}}
|}|r�|ddkr�|dd�}|jd��r2yrt	|�}t
||j��r�|dkr�d	}n||j|�}|�r�|j|j
|jt|||||	d
f�djdd
�|jD��d�}Wn0tk
�r.}ztjd|�WYdd}~XnXn�|j|j��r�tj|�}}x�|jD]�}|j|��rV|dt|��}|j||�}|�s�tjd|�nJ|\}}}|�s�|||��r�|||t|||||	d
f�d�}|�r�||d<P�qVW|�r|�r|
|d|<|S)a
        See if a URL is a candidate for a download URL for a project (the URL
        has typically been scraped from an HTML page).

        If it is, a dictionary is returned with keys "name", "version",
        "filename" and "url"; otherwise, None is returned.
        cSst|�t|�kS)N)r )Zname1Zname2r*r*r+�same_project�sz:Locator.convert_url_to_download_info.<locals>.same_projectNzegg=z %s: version hint in fragment: %rr�/z.whlTr0z, cSs"g|]}djt|dd����qS)�.�N)�join�list)�.0�vr*r*r+�
<listcomp>sz8Locator.convert_url_to_download_info.<locals>.<listcomp>)rb�versionrvr(zpython-versionzinvalid path for wheel: %sz No match for project/version: %s)rbr�rvr(zpython-versionz	%s_digest)NN���r�)r�lower�
startswithrorp�HASHER_HASH�match�groupsrjr#r$rkrbr�rvrr|�pyver�	Exception�warning�downloadable_extensionsrgrh�lenr)r6r(rwrxrWr2rlri�params�query�frag�m�algo�digestZorigpath�wheel�includerXrvZextrmrbr�r�r*r*r+�convert_url_to_download_info�sf

 
z$Locator.convert_url_to_download_infocCs4d}x*dD]"}d|}||kr
|||f}Pq
W|S)z�
        Get a digest from a dictionary by looking at keys of the form
        'algo_digest'.

        Returns a 2-tuple (algo, digest) if found, else None. Currently
        looks only for SHA256, then MD5.
        N�sha256�md5z	%s_digest)r�r�r*)r6�inforWr�r<r*r*r+�_get_digest)s
zLocator._get_digestc	Cs�|jd�}|jd�}||kr,||}|j}nt|||jd�}|j}|j|�|_}|d}||d|<|j|dkr�|j|j|�|_|dj|t	��j
|�||_|||<dS)z�
        Update a result dictionary (the final result from _get_project) with a
        dictionary for a specific version, which typically holds information
        gleaned from a filename or URL for an archive for the distribution.
        rbr�)r2r(�digests�urlsN)�pop�metadatarr2r�r��
source_urlru�
setdefault�set�add�locator)	r6rWr�rbr��dist�mdr�r(r*r*r+�_update_version_data9s

zLocator._update_version_dataFcCs�d}t|�}|dkr td|��t|j�}|j|j�|_}tjd|t|�j	�|j
|j�}t|�dk�r8g}|j
}	x�|D]|}
|
d
kr�qzyJ|j|
�s�tjd||
�n,|s�|	|
�jr�|j|
�ntjd|
|j�Wqztk
r�tjd	||
�YqzXqzWt|�d
k�rt||jd�}|�r8tjd|�|d}||}|�r�|j�rN|j|_|jdi�j|t��|_i}|jdi�}
x&|jD]}||
k�r~|
|||<�q~W||_d|_|S)a
        Find the most recent distribution which matches the given
        requirement.

        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
                            'foo (>= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        NzNot a valid requirement: %rzmatcher: %s (%s)r{r�r�z%s did not match %rz%skipping pre-release version %s of %szerror matching %s with %rr)r<zsorted list: %s)r�r�r�)rrr!r2rO�requirementrorp�typer=rerbr�Z
version_classr�Z
is_prereleaserUr�r��sortedr<ZextrasrTr��
download_urlsr�)r6r��prereleasesrW�rr2rO�versionsZslistZvcls�kr��dZsdr(r*r*r+�locatePsT





zLocator.locate)rBrCrDrErFrG)rHrIrJ)rK)rJ)rL)F)r=r>r?r@�source_extensions�binary_extensions�excluded_extensionsrkr�rRrYrZr\r^r`�propertyr2rcrdrernrurr�r�r�r�r*r*r*r+rASs.

FrAcs0eZdZdZ�fdd�Zdd�Zdd�Z�ZS)�PyPIRPCLocatorz�
    This locator uses XML-RPC to locate distributions. It therefore
    cannot be used with simple mirrors (that only mirror file content).
    cs*tt|�jf|�||_t|dd�|_dS)z�
        Initialise an instance.

        :param url: The URL to use for XML-RPC.
        :param kwargs: Passed to the superclass constructor.
        g@)r%N)�superr�rR�base_urlrr))r6r(�kwargs)�	__class__r*r+rR�szPyPIRPCLocator.__init__cCst|jj��S)zJ
        Return all the distribution names known to this locator.
        )r�r)r')r6r*r*r+rd�sz%PyPIRPCLocator.get_distribution_namescCsiid�}|jj|d�}x�|D]�}|jj||�}|jj||�}t|jd�}|d|_|d|_|jd�|_	|jdg�|_
|jd�|_t|�}|r|d	}	|	d
|_
|j|	�|_||_|||<xB|D]:}	|	d
}
|j|	�}|dj|t��j|
�||d|
<q�WqW|S)
N)r�r�T)r2rbr��license�keywords�summaryrr(r�r�)r)Zpackage_releasesZrelease_urlsZrelease_datarr2rbr�rTr�r�r�rr�r�r�r�r�r�r�)r6rbrWr�rr��datar�r�r�r(r�r*r*r+rc�s0






zPyPIRPCLocator._get_project)r=r>r?r@rRrdrc�
__classcell__r*r*)r�r+r��sr�cs0eZdZdZ�fdd�Zdd�Zdd�Z�ZS)�PyPIJSONLocatorzw
    This locator uses PyPI's JSON interface. It's very limited in functionality
    and probably not worth using.
    cs tt|�jf|�t|�|_dS)N)r�r�rRrr�)r6r(r�)r�r*r+rR�szPyPIJSONLocator.__init__cCstd��dS)zJ
        Return all the distribution names known to this locator.
        zNot available from this locatorN)ra)r6r*r*r+rd�sz&PyPIJSONLocator.get_distribution_namescCsiid�}t|jdt|��}�y�|jj|�}|j�j�}tj|�}t	|j
d�}|d}|d|_|d|_|j
d�|_|j
dg�|_|j
d	�|_t|�}||_|d
}	|||j<x`|d
D]T}
|
d}|jj|�|j|
�|j|<|d
j|jt��j|�|j|
�|d|<q�Wx�|d
j�D]�\}}||jk�r:�q"t	|j
d�}
|j|
_||
_t|
�}||_|||<x\|D]T}
|
d}|jj|�|j|
�|j|<|d
j|t��j|�|j|
�|d|<�qpW�q"WWn@tk
�r}z"|jjt|��tjd|�WYdd}~XnX|S)N)r�r�z%s/json)r2r�rbr�r�r�r�r�r(r�ZreleaseszJSON fetch failed: %s) rr�rrN�open�read�decode�json�loadsrr2rbr�rTr�r�r�rr�r�r�r�r�r�r��itemsr�rQ�putrro�	exception)r6rbrWr(�respr�r�r�r�r�r�r�ZinfosZomd�odistrXr*r*r+rc�sT





"	zPyPIJSONLocator._get_project)r=r>r?r@rRrdrcr�r*r*)r�r+r��sr�c@s`eZdZdZejdejejBejB�Z	ejdejejB�Z
dd�Zejdej�Ze
dd��Zd	S)
�Pagez4
    This class represents a scraped HTML page.
    z�
(rel\s*=\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\s
]*))\s+)?
href\s*=\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\s
]*))
(\s+rel\s*=\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\s
]*)))?
z!<base\s+href\s*=\s*['"]?([^'">]+)cCs4||_||_|_|jj|j�}|r0|jd�|_dS)zk
        Initialise an instance with the Unicode page contents and the URL they
        came from.
        rN)r�r�r(�_base�search�group)r6r�r(r�r*r*r+rRs
z
Page.__init__z[^a-z0-9$&+,/:;=?@.#%_\\|-]cCs�dd�}t�}x�|jj|j�D]�}|jd�}|dpZ|dpZ|dpZ|dpZ|dpZ|d	}|d
pr|dpr|d}t|j|�}t|�}|jj	d
d�|�}|j
||f�qWt|dd�dd�}|S)z�
        Return the URLs of all the links on a page together with information
        about their "rel" attribute, for determining which ones to treat as
        downloads and which ones to queue for further scraping.
        cSs,t|�\}}}}}}t||t|�|||f�S)zTidy up an URL.)rrr)r(r2rlrir�r�r�r*r*r+�clean%szPage.links.<locals>.cleanr0Zrel1Zrel2Zrel3Zrel4Zrel5Zrel6rqrrZurl3cSsdt|jd��S)Nz%%%2xr)�ordr�)r�r*r*r+�<lambda>3szPage.links.<locals>.<lambda>cSs|dS)Nrr*)rmr*r*r+r�7sT)r<�reverse)r��_href�finditerr��	groupdictrr�r�	_clean_re�subr�r�)r6r�rWr�r��relr(r*r*r+�linkss
z
Page.linksN)r=r>r?r@�re�compile�I�S�Xr�r�rRr�rr�r*r*r*r+r�sr�cs�eZdZdZejdd�dd�d�Zd�fdd	�	Zd
d�Zdd
�Z	dd�Z
ejdej
�Zdd�Zdd�Zdd�Zdd�Zdd�Zejd�Zdd�Z�ZS)�SimpleScrapingLocatorz�
    A locator which scrapes HTML pages to locate downloads for a distribution.
    This runs multiple threads to do the I/O; performance is at least as good
    as pip's PackageFinder, which works in an analogous fashion.
    cCstjtt�d�j�S)N)Zfileobj)�gzipZGzipFilerr�r�)�br*r*r+r�EszSimpleScrapingLocator.<lambda>cCs|S)Nr*)r�r*r*r+r�Fs)Zdeflater�ZnoneN�
csftt|�jf|�t|�|_||_i|_t�|_t	j
�|_t�|_d|_
||_tj�|_tj�|_dS)a�
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        FN)r�r�rRrr�r%�_page_cacher��_seenr
rP�	_to_fetch�
_bad_hosts�skip_externals�num_workers�	threading�RLock�_lock�_gplock)r6r(r%r�r�)r�r*r+rRIs



zSimpleScrapingLocator.__init__cCsJg|_x>t|j�D]0}tj|jd�}|jd�|j�|jj|�qWdS)z�
        Threads are created only when get_project is called, and terminate
        before it returns. They are there primarily to parallelise I/O (i.e.
        fetching web pages).
        )�targetTN)	�_threads�ranger�r�ZThread�_fetchZ	setDaemon�startrU)r6�irmr*r*r+�_prepare_threadscs
z&SimpleScrapingLocator._prepare_threadscCs>x|jD]}|jjd�qWx|jD]}|j�q$Wg|_dS)zu
        Tell all the threads to terminate (by sending a sentinel value) and
        wait for them to do so.
        N)r�r�r�r|)r6rmr*r*r+�
_wait_threadsps
z#SimpleScrapingLocator._wait_threadscCs�iid�}|j�x||_||_t|jdt|��}|jj�|jj�|j	�z&t
jd|�|jj
|�|jj�Wd|j�X|`WdQRX|S)N)r�r�z%s/zQueueing %s)r�rWrwrr�rr�r[r�r�rorpr�r�r|r�)r6rbrWr(r*r*r+rc}s



z"SimpleScrapingLocator._get_projectz<\b(linux-(i\d86|x86_64|arm\w+)|win(32|-amd64)|macosx-?\d+)\bcCs|jj|�S)zD
        Does an URL refer to a platform-specific download?
        )�platform_dependentr�)r6r(r*r*r+�_is_platform_dependent�sz,SimpleScrapingLocator._is_platform_dependentc
CsT|j|�rd}n|j||j�}tjd||�|rP|j�|j|j|�WdQRX|S)a%
        See if an URL is a suitable download for a project.

        If it is, register information in the result dictionary (for
        _get_project) about the specific version it's for.

        Note that the return value isn't actually used other than as a boolean
        value.
        Nzprocess_download: %s -> %s)r�r�rwrorpr�r�rW)r6r(r�r*r*r+�_process_download�s

z'SimpleScrapingLocator._process_downloadc
Cs�t|�\}}}}}}|j|j|j|j�r2d}n~|jrL|j|j�rLd}nd|j|j�s^d}nR|d
krld}nD|dkrzd}n6|j|�r�d}n&|j	dd�d	}	|	j
�d
kr�d}nd}tjd||||�|S)z�
        Determine whether a link URL from a referring page and with a
        particular "rel" attribute should be queued for scraping.
        F�homepage�download�httprf�ftp�:rrZ	localhostTz#should_queue: %s (%s) from %s -> %s)r�r�)r�rfr�)
rrjr�r�r�r�r�r�r��splitr�rorp)
r6�linkZreferrerr�r2rlri�_rW�hostr*r*r+�
_should_queue�s*


z#SimpleScrapingLocator._should_queuecCs�x�|jj�}z�yz|r�|j|�}|dkr(wx\|jD]R\}}||jkr0|jj|�|j|�r0|j|||�r0tj	d||�|jj
|�q0WWn2tk
r�}z|jj
t
|��WYdd}~XnXWd|jj�X|sPqWdS)z�
        Get a URL to fetch from the work queue, get the HTML page, examine its
        links for download candidates and candidates for further scraping.

        This is a handy method to run in a thread.
        NzQueueing %s from %s)r�rT�get_pager�r�r�r�r�rorpr�r�rQrrV)r6r(�pager�r�rXr*r*r+r��s&


&zSimpleScrapingLocator._fetchcCsXt|�\}}}}}}|dkr:tjjt|��r:tt|�d�}||jkr`|j|}tj	d||��n�|j
dd�d}d}||jkr�tj	d||��n�t|d	d
id�}�z�y�tj	d|�|j
j||jd
�}	tj	d|�|	j�}
|
jdd�}tj|��r�|	j�}|	j�}
|
jd�}|�r"|j|}||
�}
d}tj|�}|�r@|jd�}y|
j|�}
Wn tk
�rn|
jd�}
YnXt|
|�}||j|<Wn�tk
�r�}z |jdk�r�tjd||�WYdd}~Xn�t k
�r}z2tjd||�|j!�|jj"|�WdQRXWYdd}~Xn2t#k
�rB}ztjd||�WYdd}~XnXWd||j|<X|S)a
        Get the HTML for an URL, possibly from an in-memory cache.

        XXX TODO Note: this cache is never actually cleared. It's assumed that
        the data won't get stale over the lifetime of a locator instance (not
        necessarily true for the default_locator).
        �filez
index.htmlzReturning %s from cache: %sr�rrNzSkipping %s due to bad host %szAccept-encodingZidentity)r;zFetching %s)r%z
Fetched %szContent-Typer0zContent-Encodingzutf-8zlatin-1i�zFetch failed: %s: %s)$r�osri�isdirrrrr�rorpr�r�rrNr�r%r�rT�HTML_CONTENT_TYPEr�Zgeturlr��decoders�CHARSETr�r�r��UnicodeErrorr�rr9r�rr�r�r�)r6r(r2rlrir�rWr�r7r�r;Zcontent_typeZ	final_urlr��encoding�decoderr�rXr*r*r+r�sZ	







&$zSimpleScrapingLocator.get_pagez<a href=[^>]*>([^<]+)<cCsPt�}|j|j�}|s$td|j��x&|jj|j�D]}|j|jd��q4W|S)zJ
        Return all the distribution names known to this locator.
        zUnable to get %sr)	r�rr�r�_distname_rer�r�r�r�)r6rWrr�r*r*r+rd$sz,SimpleScrapingLocator.get_distribution_names)Nr�)r=r>r?r@�zlibZ
decompressrrRr�r�rcr�r�r�r�r�r�r�r�rrrdr�r*r*)r�r+r�;s"

;
r�cs8eZdZdZ�fdd�Zdd�Zdd�Zdd	�Z�ZS)
�DirectoryLocatorz?
    This class locates distributions in a directory tree.
    csN|jdd�|_tt|�jf|�tjj|�}tjj|�sDt	d|��||_
dS)a�
        Initialise an instance.
        :param path: The root of the directory tree to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * recursive - if True (the default), subdirectories are
                         recursed into. If False, only the top-level directory
                         is searched,
        �	recursiveTzNot a directory: %rN)r�rr�r
rRrri�abspathrr�base_dir)r6rir�)r�r*r+rR5s
zDirectoryLocator.__init__cCs|j|j�S)z�
        Should a filename be considered as a candidate for a distribution
        archive? As well as the filename, the directory which contains it
        is provided, though not used by the current implementation.
        )rjr�)r6rv�parentr*r*r+�should_includeFszDirectoryLocator.should_includec		Cs�iid�}x�tj|j�D]v\}}}xb|D]Z}|j||�r(tjj||�}tddttjj|��dddf�}|j	||�}|r(|j
||�q(W|jsPqW|S)N)r�r�rr0)r�walkrrrir|rr	rr�r�r)	r6rbrW�root�dirs�files�fnr(r�r*r*r+rcNs

zDirectoryLocator._get_projectc	Cs�t�}x�tj|j�D]x\}}}xd|D]\}|j||�r$tjj||�}tddttjj	|��dddf�}|j
|d�}|r$|j|d�q$W|jsPqW|S)zJ
        Return all the distribution names known to this locator.
        rr0Nrb)
r�rrrrrir|rr	rr�r�r)r6rWrrrrr(r�r*r*r+rd^s
z'DirectoryLocator.get_distribution_names)	r=r>r?r@rRrrcrdr�r*r*)r�r+r
0s
r
c@s eZdZdZdd�Zdd�ZdS)�JSONLocatora
    This locator uses special extended metadata (not available on PyPI) and is
    the basis of performant dependency resolution in distlib. Other locators
    require archive downloads before dependencies can be determined! As you
    might imagine, that can be slow.
    cCstd��dS)zJ
        Return all the distribution names known to this locator.
        zNot available from this locatorN)ra)r6r*r*r+rdxsz"JSONLocator.get_distribution_namescCs�iid�}t|�}|r�x�|jdg�D]�}|ddks$|ddkrBq$t|d|d|jd	d
�|jd�}|j}|d|_d
|kr�|d
r�d|d
f|_|jdi�|_|jdi�|_|||j	<|dj
|j	t��j|d�q$W|S)N)r�r�rZptypeZsdistZ	pyversion�sourcerbr�r�zPlaceholder for summary)r�r2r(r�r�Zrequirements�exportsr�)
rrTrr2r�r�r�Zdependenciesrr�r�r�r�)r6rbrWr�r�r�r�r*r*r+rc~s&



"zJSONLocator._get_projectN)r=r>r?r@rdrcr*r*r*r+rqsrcs(eZdZdZ�fdd�Zdd�Z�ZS)�DistPathLocatorz�
    This locator finds installed distributions in a path. It can be useful for
    adding to an :class:`AggregatingLocator`.
    cstt|�jf|�||_dS)zs
        Initialise an instance.

        :param distpath: A :class:`DistributionPath` instance to search.
        N)r�rrR�distpath)r6rr�)r�r*r+rR�szDistPathLocator.__init__cCsP|jj|�}|dkr iid�}n,|j|d|jt|jg�id|jtdg�ii}|S)N)r�r�r�r�)rZget_distributionr�r�r�)r6rbr�rWr*r*r+rc�szDistPathLocator._get_project)r=r>r?r@rRrcr�r*r*)r�r+r�s
rcsReZdZdZ�fdd�Z�fdd�Zdd�Zeej	j
e�Z	dd	�Zd
d�Z�Z
S)�AggregatingLocatorzI
    This class allows you to chain and/or merge a list of locators.
    cs*|jdd�|_||_tt|�jf|�dS)a�
        Initialise an instance.

        :param locators: The list of locators to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * merge - if False (the default), the first successful
                         search from any of the locators is returned. If True,
                         the results from all locators are merged (this can be
                         slow).
        �mergeFN)r�r�locatorsr�rrR)r6rr�)r�r*r+rR�szAggregatingLocator.__init__cs*tt|�j�x|jD]}|j�qWdS)N)r�rr\r)r6r�)r�r*r+r\�szAggregatingLocator.clear_cachecCs ||_x|jD]
}||_qWdS)N)r]rr2)r6r_r�r*r*r+r`�szAggregatingLocator._set_schemecCs�i}x�|jD]�}|j|�}|r|jr�|jdi�}|jdi�}|j|�|jd�}|r�|r�x6|j�D]*\}}	||kr�|||	O<qb|	||<qbW|jd�}
|r�|
r�|
j|�q|jdkr�d}n$d}x|D]}|jj|�r�d}Pq�W|r|}PqW|S)Nr�r�TF)rrerrT�updater�rOr�)r6rbrWr�r�rr�Zdfr�rZdd�foundr*r*r+rc�s8





zAggregatingLocator._get_projectcCs@t�}x4|jD]*}y||j�O}Wqtk
r6YqXqW|S)zJ
        Return all the distribution names known to this locator.
        )r�rrdra)r6rWr�r*r*r+rd�s
z)AggregatingLocator.get_distribution_names)r=r>r?r@rRr\r`r�rAr2�fgetrcrdr�r*r*)r�r+r�s,rzhttps://pypi.python.org/simple/g@)r%�legacy)r2z1(?P<name>[\w-]+)\s*\(\s*(==\s*)?(?P<ver>[^)]+)\)$c@sLeZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	ddd�Z
dS)�DependencyFinderz0
    Locate dependencies for distributions.
    NcCs|pt|_t|jj�|_dS)zf
        Initialise an instance, using the specified locator
        to locate distributions.
        N)�default_locatorr�r!r2)r6r�r*r*r+rRs
zDependencyFinder.__init__cCsvtjd|�|j}||j|<||j||jf<xD|jD]:}t|�\}}tjd|||�|jj	|t
��j||f�q4WdS)z�
        Add a distribution to the finder. This will update internal information
        about who provides what.
        :param dist: The distribution to add.
        zadding distribution %szAdd to provided: %s, %s, %sN)rorpr<�
dists_by_name�distsr��providesr�providedr�r�r�)r6r�rb�pr�r*r*r+�add_distribution&s
z!DependencyFinder.add_distributioncCs|tjd|�|j}|j|=|j||jf=xN|jD]D}t|�\}}tjd|||�|j|}|j	||f�|s0|j|=q0WdS)z�
        Remove a distribution from the finder. This will update internal
        information about who provides what.
        :param dist: The distribution to remove.
        zremoving distribution %sz Remove from provided: %s, %s, %sN)
rorpr<r&r'r�r(rr)�remove)r6r�rbr*r��sr*r*r+�remove_distribution5s
z$DependencyFinder.remove_distributioncCsBy|jj|�}Wn,tk
r<|j�d}|jj|�}YnX|S)z�
        Get a version matcher for a requirement.
        :param reqt: The requirement
        :type reqt: str
        :return: A version matcher (an instance of
                 :class:`distlib.version.Matcher`).
        r)r2rOr"r�)r6�reqtrOrbr*r*r+�get_matcherGszDependencyFinder.get_matcherc	Csv|j|�}|j}t�}|j}||krrxL||D]@\}}y|j|�}Wntk
r\d}YnX|r.|j|�Pq.W|S)z�
        Find the distributions which can fulfill a requirement.

        :param reqt: The requirement.
         :type reqt: str
        :return: A set of distribution which can fulfill the requirement.
        F)r0r<r�r)r�r"r�)	r6r/rOrbrWr)r��providerr�r*r*r+�find_providersWs


zDependencyFinder.find_providersc	Cs�|j|}t�}x,|D]$}|j|�}|j|j�s|j|�qW|r^|jd||t|�f�d}nD|j|�|j|=x"|D]}|jj|t��j|�qvW|j	|�d}|S)a�
        Attempt to replace one provider with another. This is typically used
        when resolving dependencies from multiple sources, e.g. A requires
        (B >= 1.0) while C requires (B >= 1.1).

        For successful replacement, ``provider`` must meet all the requirements
        which ``other`` fulfills.

        :param provider: The provider we are trying to replace with.
        :param other: The provider we're trying to replace.
        :param problems: If False is returned, this will contain what
                         problems prevented replacement. This is currently
                         a tuple of the literal string 'cantreplace',
                         ``provider``, ``other``  and the set of requirements
                         that ``provider`` couldn't fulfill.
        :return: True if we can replace ``other`` with ``provider``, else
                 False.
        ZcantreplaceFT)
�reqtsr�r0r�r�r��	frozensetr.r�r+)	r6r1�other�problemsZrlistZ	unmatchedr-rOrWr*r*r+�try_to_replaceos"






zDependencyFinder.try_to_replaceFcCsi|_i|_i|_i|_t|p g�}d|krH|jd�|tdddg�O}t|t�rh|}}tj	d|�n4|j
j||d�}}|dkr�td|��tj	d	|�d
|_
t�}t|g�}t|g�}�x�|�r�|j�}|j}	|	|jkr�|j|�n"|j|	}
|
|k�r|j||
|�|j|jB}|j}t�}
||k�rbx2dD]*}d|}||k�r4|
t|d|�O}
�q4W||B|
B}�x>|D�]4}|j|�}|�sNtj	d|�|j
j||d�}|dk�r�|�r�|j
j|d
d�}|dk�r�tj	d|�|jd|f�n^|j|j}}||f|jk�r|j|�|j|�||k�rN||k�rN|j|�tj	d|j�xZ|D]R}|j}	|	|jk�r�|jj|t��j|�n"|j|	}
|
|k�rT|j||
|��qTW�qvWq�Wt|jj��}x.|D]&}||k|_|j�r�tj	d|j��q�Wtj	d|�||fS)a�
        Find a distribution and all distributions it depends on.

        :param requirement: The requirement specifying the distribution to
                            find, or a Distribution instance.
        :param meta_extras: A list of meta extras such as :test:, :build: and
                            so on.
        :param prereleases: If ``True``, allow pre-release versions to be
                            returned - otherwise, don't return prereleases
                            unless they're all that's available.

        Return a set of :class:`Distribution` instances and a set of
        problems.

        The distributions returned should be such that they have the
        :attr:`required` attribute set to ``True`` if they were
        from the ``requirement`` passed to ``find()``, and they have the
        :attr:`build_time_dependency` attribute set to ``True`` unless they
        are post-installation dependencies of the ``requirement``.

        The problems should be a tuple consisting of the string
        ``'unsatisfied'`` and the requirement which couldn't be satisfied
        by any distribution known to the locator.
        z:*:z:test:z:build:z:dev:zpassed %s as requirement)r�NzUnable to locate %rz
located %sT�test�build�devz:%s:z%s_requireszNo providers found for %rzCannot satisfy %rZunsatisfiedzAdding %s to install_distsz#%s is a build-time dependency only.zfind done for %s)r8r9r:)r)r'r&r3r�r,�
isinstancerrorpr�r�rZ	requestedr�r<r+r7Zrun_requiresZ
meta_requiresZbuild_requires�getattrr2r�r�Zname_and_versionr��valuesZbuild_time_dependency)r6r�Zmeta_extrasr�r�r�r6ZtodoZ
install_distsrbr5ZireqtsZsreqtsZereqtsr<rXZ	all_reqtsr�Z	providersr1�nrr*r'r*r*r+�find�s�




















zDependencyFinder.find)N)NF)r=r>r?r@rRr+r.r0r2r7r?r*r*r*r+r$s
(r$)N)Nr��iorr�Zloggingrrgr�r��ImportErrorZdummy_threadingrr0r�compatrrrrr	r
rrr
rrr4rrrrZdatabaserrrr�r�utilrrrrrrrrr r�r!r"r�r#r$Z	getLoggerr=ror�r�r�rrr&r,r-�objectrAr�r�r�r�r
rrrr%r�ZNAME_VERSION_REr$r*r*r*r+�<module>sZD,



;0E:vA&[
_vendor/distlib/__pycache__/__init__.cpython-36.pyc000064400000001703151733136300016232 0ustar003

�PfE�@snddlZdZGdd�de�ZyddlmZWn&ek
rRGdd�dej�ZYnXeje�Z	e	j
e��dS)�Nz0.2.4c@seZdZdS)�DistlibExceptionN)�__name__�
__module__�__qualname__�rr�/usr/lib/python3.6/__init__.pyrsr)�NullHandlerc@s$eZdZdd�Zdd�Zdd�ZdS)rcCsdS)Nr)�self�recordrrr�handleszNullHandler.handlecCsdS)Nr)r	r
rrr�emitszNullHandler.emitcCs
d|_dS)N)�lock)r	rrr�
createLockszNullHandler.createLockN)rrrrrrrrrrrsr)Zlogging�__version__�	Exceptionrr�ImportErrorZHandlerZ	getLoggerrZloggerZ
addHandlerrrrr�<module>s
_vendor/distlib/__pycache__/compat.cpython-36.opt-1.pyc000064400000076203151733136300016724 0ustar003

�Pfa���@sddlmZddlZddlZddlZyddlZWnek
rHdZYnXejddk�r~ddlmZe	fZ
eZddl
mZddlZddlZddlmZddlmZmZmZmZmZdd	lmZmZmZm Z m!Z!m"Z"m#Z#d
d�Zddl$Z$ddl$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-e�r&dd
l$m.Z.ddl/Z/ddl0Z0ddl1Z2ddl3m3Z3ddl4Z4e5Z5ddl6m7Z8ddl6m9Z:da;dd�Z<�nddl=mZe>fZ
e>Zddl=m?ZddlZddlZddlZddl@mZmZmZm<Z<mZmZmZmZm#Z#ddlAm&Z&mZm%Z%m Z m!Z!m)Z)m*Z*m+Z+m,Z,m-Z-e�r&dd
lAm.Z.ddlBm(Z(m'Z'm"Z"ddlCjDZ/ddlAjEZ$ddlFjDZ0ddl2Z2ddlGm3Z3ddlHjIZ4eJZ5ddl6m:Z:e8Z8yddlmKZKmLZLWn8ek
�r�Gdd�deM�ZLdcdd�ZNdd�ZKYnXyddl
mOZPWn&ek
�rGd d!�d!eQ�ZPYnXydd"lmRZRWn,ek
�rLejSejTBdfd#d$�ZRYnXdd%lUmVZWeXeWd&��rleWZVn,dd'lUmYZZGd(d)�d)eZ�ZYGd*d+�d+eW�ZVydd,l[m\Z\Wnek
�r�d-d.�Z\YnXyddl]Z]Wn"ek
�r�dd/lm]Z]YnXye^Z^Wn*e_k
�r*dd0l`maZad1d2�Z^YnXyejbZbejcZcWnBedk
�r~eje�Zfefd3k�rfd4Zgnd5Zgd6d7�Zbd8d9�ZcYnXydd:lhmiZiWnHek
�r�dd;ljmkZkmlZlddlZejmd<�Znd=d>�Zod?d@�ZiYnXyddAlpmqZqWn"ek
�rddAlrmqZqYnXejddB�ddk�r,e3�jsZsnddDlpmsZsyddEl`mtZtWndek
�r�ddFl`muZuyddGlvmwZxWn ek
�r�dedIdJ�ZxYnXGdKdL�dLeu�ZtYnXyddMlymzZzWn ek
�r�dfdNdO�ZzYnXyddPl`m{Z{Wn�ek
�rzyddQl|m}Z~Wn"ek
�r4ddQlm}Z~YnXyddRl�m�Z�m�Z�m�Z�Wnek
�rdYnXGdSdT�dTe��Z{YnXyddUl�m�Z�m�Z�Wnvek
�rejmdVej��Z�dWdX�Z�GdYdZ�dZe��Z�dgd[d\�Z�Gd]d^�d^e��Z�Gd_d`�d`e��Z�Gdadb�dbeQ�Z�YnXdS)h�)�absolute_importN�)�StringIO)�FileType�)�shutil)�urlparse�
urlunparse�urljoin�urlsplit�
urlunsplit)�urlretrieve�quote�unquote�url2pathname�pathname2url�ContentTooShortError�	splittypecCst|t�r|jd�}t|�S)Nzutf-8)�
isinstance�unicode�encode�_quote)�s�r�/usr/lib/python3.6/compat.pyrs

r)	�Request�urlopen�URLError�	HTTPError�HTTPBasicAuthHandler�HTTPPasswordMgr�HTTPHandler�HTTPRedirectHandler�build_opener)�HTTPSHandler)�
HTMLParser)�ifilter)�ifilterfalsecCs<tdkrddl}|jd�atj|�}|r4|jdd�Sd|fS)zJsplituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.Nrz^(.*)@(.*)$r�)�	_userprog�re�compile�match�group)�hostr*r,rrr�	splituser4s

r/)�
TextIOWrapper)	rr	r
r/rrrrr)
rr
rrrrr r!r"r#)rrr)�filterfalse)�match_hostname�CertificateErrorc@seZdZdS)r3N)�__name__�
__module__�__qualname__rrrrr3^sr3c
Cs�g}|sdS|jd�}|d|dd�}}|jd�}||krNtdt|���|sb|j�|j�kS|dkrv|jd�n>|jd	�s�|jd	�r�|jtj|��n|jtj|�j	d
d��x|D]}|jtj|��q�Wtj
dd
j|�dtj�}	|	j
|�S)zpMatching according to RFC 6125, section 6.4.3

        http://tools.ietf.org/html/rfc6125#section-6.4.3
        F�.rrN�*z,too many wildcards in certificate DNS name: z[^.]+zxn--z\*z[^.]*z\Az\.z\Z)�split�countr3�repr�lower�append�
startswithr*�escape�replacer+�join�
IGNORECASEr,)
Zdn�hostnameZ
max_wildcardsZpats�partsZleftmostZ	remainderZ	wildcards�fragZpatrrr�_dnsname_matchbs(


rFcCs�|std��g}|jdf�}x0|D](\}}|dkr"t||�r@dS|j|�q"W|s�xF|jdf�D]6}x0|D](\}}|dkrjt||�r�dS|j|�qjWq`Wt|�dkr�td|d	jtt|��f��n*t|�dkr�td
||df��ntd��dS)
a=Verify that *cert* (in decoded format as returned by
        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
        rules are followed, but IP addresses are not accepted for *hostname*.

        CertificateError is raised on failure. On success, the function
        returns nothing.
        ztempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDZsubjectAltNameZDNSNZsubjectZ
commonNamerz&hostname %r doesn't match either of %sz, zhostname %r doesn't match %rrz=no appropriate commonName or subjectAltName fields were found)	�
ValueError�getrFr=�lenr3rA�mapr;)ZcertrCZdnsnamesZsan�key�value�subrrrr2�s.

r2)�SimpleNamespacec@seZdZdZdd�ZdS)�	ContainerzR
        A generic container for when multiple values need to be returned
        cKs|jj|�dS)N)�__dict__�update)�self�kwargsrrr�__init__�szContainer.__init__N)r4r5r6�__doc__rTrrrrrO�srO)�whichcs"dd�}tjj��r&|�|�r"�SdS|dkr>tjjdtj�}|sFdS|jtj�}tj	dkr�tj
|krt|jdtj
�tjjdd�jtj�}t�fd	d
�|D��r��g}q‡fdd�|D�}n�g}t
�}xT|D]L}tjj|�}||kr�|j|�x(|D] }	tjj||	�}
||
|�r�|
Sq�Wq�WdS)
aKGiven a command, mode, and a PATH string, return the path which
        conforms to the given mode on the PATH, or None if there is no such
        file.

        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
        of os.environ.get("PATH"), or can be overridden with a custom search
        path.

        cSs&tjj|�o$tj||�o$tjj|�S)N)�os�path�exists�access�isdir)�fn�moderrr�
_access_check�szwhich.<locals>._access_checkN�PATHZwin32rZPATHEXT�c3s |]}�j�j|j��VqdS)N)r<�endswith)�.0�ext)�cmdrr�	<genexpr>�szwhich.<locals>.<genexpr>csg|]}�|�qSrr)rbrc)rdrr�
<listcomp>�szwhich.<locals>.<listcomp>)rWrX�dirname�environrH�defpathr9�pathsep�sys�platform�curdir�insert�any�set�normcase�addrA)rdr]rXr^Zpathext�files�seen�dirZnormdirZthefile�namer)rdrrV�s8







rV)�ZipFile�	__enter__)�
ZipExtFilec@s$eZdZdd�Zdd�Zdd�ZdS)rycCs|jj|j�dS)N)rPrQ)rR�baserrrrTszZipExtFile.__init__cCs|S)Nr)rRrrrrxszZipExtFile.__enter__cGs|j�dS)N)�close)rR�exc_inforrr�__exit__szZipExtFile.__exit__N)r4r5r6rTrxr}rrrrrysryc@s$eZdZdd�Zdd�Zdd�ZdS)rwcCs|S)Nr)rRrrrrx"szZipFile.__enter__cGs|j�dS)N)r{)rRr|rrrr}%szZipFile.__exit__cOstj|f|�|�}t|�S)N)�BaseZipFile�openry)rR�argsrSrzrrrr)szZipFile.openN)r4r5r6rxr}rrrrrrw!srw)�python_implementationcCs0dtjkrdStjdkrdStjjd�r,dSdS)z6Return a string identifying the Python implementation.ZPyPy�javaZJythonZ
IronPythonZCPython)rk�versionrWrvr>rrrrr�0s

r�)�	sysconfig)�CallablecCs
t|t�S)N)rr�)�objrrr�callableDsr��mbcs�strict�surrogateescapecCs:t|t�r|St|t�r$|jtt�Stdt|�j��dS)Nzexpect bytes or str, not %s)	r�bytes�	text_typer�_fsencoding�	_fserrors�	TypeError�typer4)�filenamerrr�fsencodeRs

r�cCs:t|t�r|St|t�r$|jtt�Stdt|�j��dS)Nzexpect bytes or str, not %s)	rr�r��decoder�r�r�r�r4)r�rrr�fsdecode[s

r�)�detect_encoding)�BOM_UTF8�lookupzcoding[:=]\s*([-\w.]+)cCsH|dd�j�jdd�}|dks*|jd�r.dS|d
ks@|jd�rDdS|S)z(Imitates get_normal_name in tokenizer.c.N��_�-zutf-8zutf-8-�latin-1�
iso-8859-1�iso-latin-1�latin-1-�iso-8859-1-�iso-latin-1-)r�r�r�)r�r�r�)r<r@r>)�orig_enc�encrrr�_get_normal_namels
r�cs�y�jj�Wntk
r$d�YnXd�d}d}�fdd�}��fdd�}|�}|jt�rpd�|d	d�}d
}|s||gfS||�}|r�||gfS|�}|s�||gfS||�}|r�|||gfS|||gfS)a?
        The detect_encoding() function is used to detect the encoding that should
        be used to decode a Python source file.  It requires one argument, readline,
        in the same way as the tokenize() generator.

        It will call readline a maximum of twice, and return the encoding used
        (as a string) and a list of any lines (left as bytes) it has read in.

        It detects the encoding from the presence of a utf-8 bom or an encoding
        cookie as specified in pep-0263.  If both a bom and a cookie are present,
        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
        'utf-8-sig' is returned.

        If no encoding is specified, then the default of 'utf-8' will be returned.
        NFzutf-8cs y��Stk
rdSXdS)N�)�
StopIterationr)�readlinerr�read_or_stop�sz%detect_encoding.<locals>.read_or_stopcs�y|jd�}Wn4tk
rBd}�dk	r6dj|��}t|��YnXtj|�}|sVdSt|d�}yt|�}Wn:tk
r��dkr�d|}ndj�|�}t|��YnX�r�|j	dkr؈dkr�d}n
dj��}t|��|d	7}|S)
Nzutf-8z'invalid or missing encoding declarationz{} for {!r}rzunknown encoding: zunknown encoding for {!r}: {}zencoding problem: utf-8z encoding problem for {!r}: utf-8z-sig)
r��UnicodeDecodeError�format�SyntaxError�	cookie_re�findallr�r��LookupErrorrv)�line�line_string�msgZmatches�encoding�codec)�	bom_foundr�rr�find_cookie�s6



z$detect_encoding.<locals>.find_cookieTrz	utf-8-sig)�__self__rv�AttributeErrorr>r�)r�r��defaultr�r��first�secondr)r�r�r�rr�ws4
&


r�)r?r(�)�unescape)�ChainMap)�MutableMapping)�recursive_repr�...cs�fdd�}|S)zm
            Decorator to make a repr function return fillvalue for a recursive
            call
            csLt�����fdd�}t�d�|_t�d�|_t�d�|_t�di�|_|S)NcsBt|�t�f}|�kr�S�j|�z�|�}Wd�j|�X|S)N)�id�	get_identrr�discard)rRrK�result)�	fillvalue�repr_running�
user_functionrr�wrapper�s
z=_recursive_repr.<locals>.decorating_function.<locals>.wrapperr5rUr4�__annotations__)rp�getattrr5rUr4r�)r�r�)r�)r�r�r�decorating_function�sz,_recursive_repr.<locals>.decorating_functionr)r�r�r)r�r�_recursive_repr�sr�c@s�eZdZdZdd�Zdd�Zdd�Zd'd	d
�Zdd�Zd
d�Z	dd�Z
dd�Ze�dd��Z
edd��Zdd�ZeZdd�Zedd��Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�ZdS)(r�a� A ChainMap groups multiple dicts (or other mappings) together
        to create a single, updateable view.

        The underlying mappings are stored in a list.  That list is public and can
        accessed or updated using the *maps* attribute.  There is no other state.

        Lookups search the underlying mappings successively until a key is found.
        In contrast, writes, updates, and deletions only operate on the first
        mapping.

        cGst|�pig|_dS)z�Initialize a ChainMap by setting *maps* to the given mappings.
            If no mappings are provided, a single empty dictionary is used.

            N)�list�maps)rRr�rrrrT
szChainMap.__init__cCst|��dS)N)�KeyError)rRrKrrr�__missing__szChainMap.__missing__cCs8x,|jD]"}y||Stk
r(YqXqW|j|�S)N)r�r�r�)rRrK�mappingrrr�__getitem__s
zChainMap.__getitem__NcCs||kr||S|S)Nr)rRrKr�rrrrHszChainMap.getcCstt�j|j��S)N)rIrp�unionr�)rRrrr�__len__"szChainMap.__len__cCstt�j|j��S)N)�iterrpr�r�)rRrrr�__iter__%szChainMap.__iter__cst�fdd�|jD��S)Nc3s|]}�|kVqdS)Nr)rb�m)rKrrre)sz(ChainMap.__contains__.<locals>.<genexpr>)ror�)rRrKr)rKr�__contains__(szChainMap.__contains__cCs
t|j�S)N)ror�)rRrrr�__bool__+szChainMap.__bool__cCsdj|djtt|j���S)Nz{0.__class__.__name__}({1})z, )r�rArJr;r�)rRrrr�__repr__.szChainMap.__repr__cGs|tj|f|���S)z?Create a ChainMap with a single dict created from the iterable.)�dict�fromkeys)�cls�iterabler�rrrr�3szChainMap.fromkeyscCs$|j|jdj�f|jdd���S)zHNew ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]rrN)�	__class__r��copy)rRrrrr�8sz
ChainMap.copycCs|jif|j��S)z;New ChainMap with a new dict followed by all previous maps.)r�r�)rRrrr�	new_child>szChainMap.new_childcCs|j|jdd��S)zNew ChainMap from maps[1:].rN)r�r�)rRrrr�parentsBszChainMap.parentscCs||jd|<dS)Nr)r�)rRrKrLrrr�__setitem__GszChainMap.__setitem__cCs8y|jd|=Wn"tk
r2tdj|���YnXdS)Nrz(Key not found in the first mapping: {!r})r�r�r�)rRrKrrr�__delitem__JszChainMap.__delitem__cCs0y|jdj�Stk
r*td��YnXdS)zPRemove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.rz#No keys found in the first mapping.N)r��popitemr�)rRrrrr�PszChainMap.popitemcGs>y|jdj|f|��Stk
r8tdj|���YnXdS)zWRemove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].rz(Key not found in the first mapping: {!r}N)r��popr�r�)rRrKr�rrrr�WszChainMap.popcCs|jdj�dS)z'Clear maps[0], leaving maps[1:] intact.rN)r��clear)rRrrrr�^szChainMap.clear)N)r4r5r6rUrTr�r�rHr�r�r�r�r�r��classmethodr�r��__copy__r��propertyr�r�r�r�r�r�rrrrr�s(
r�)�cache_from_sourcecCs"|dkrd}|rd}nd}||S)NF�c�or)rX�debug_override�suffixrrrr�esr�)�OrderedDict)r�)�KeysView�
ValuesView�	ItemsViewc@s�eZdZdZdd�Zejfdd�Zejfdd�Zdd	�Zd
d�Z	dd
�Z
d6dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�ZeZe�Zefdd �Zd7d"d#�Zd8d$d%�Zd&d'�Zd(d)�Zed9d*d+��Zd,d-�Zd.d/�Zd0d1�Zd2d3�Z d4d5�Z!d!S):r�z)Dictionary that remembers insertion ordercOsnt|�dkrtdt|���y
|jWn6tk
r\g|_}||dg|dd�<i|_YnX|j||�dS)z�Initialize an ordered dictionary.  Signature is the same as for
            regular dictionaries, but keyword arguments are not recommended
            because their insertion order is arbitrary.

            rz$expected at most 1 arguments, got %dN)rIr��_OrderedDict__rootr��_OrderedDict__map�_OrderedDict__update)rRr��kwds�rootrrrrT�s

zOrderedDict.__init__cCsF||kr6|j}|d}|||g|d<|d<|j|<||||�dS)z!od.__setitem__(i, y) <==> od[i]=yrrN)r�r�)rRrKrLZdict_setitemr��lastrrrr��s
 zOrderedDict.__setitem__cCs0|||�|jj|�\}}}||d<||d<dS)z od.__delitem__(y) <==> del od[y]rrN)r�r�)rRrKZdict_delitem�	link_prev�	link_nextrrrr��s
zOrderedDict.__delitem__ccs2|j}|d}x||k	r,|dV|d}qWdS)zod.__iter__() <==> iter(od)rr(N)r�)rRr��currrrrr��s


zOrderedDict.__iter__ccs2|j}|d}x||k	r,|dV|d}qWdS)z#od.__reversed__() <==> reversed(od)rr(N)r�)rRr�r�rrr�__reversed__�s


zOrderedDict.__reversed__cCshyDx|jj�D]}|dd�=qW|j}||dg|dd�<|jj�Wntk
rXYnXtj|�dS)z.od.clear() -> None.  Remove all items from od.N)r��
itervaluesr�r�r�r�)rRZnoder�rrrr��szOrderedDict.clearTcCs||std��|j}|r8|d}|d}||d<||d<n |d}|d}||d<||d<|d}|j|=tj||�}||fS)z�od.popitem() -> (k, v), return and remove a (key, value) pair.
            Pairs are returned in LIFO order if last is true or FIFO order if false.

            zdictionary is emptyrrr()r�r�r�r�r�)rRr�r��linkr�r�rKrLrrrr��s 
zOrderedDict.popitemcCst|�S)zod.keys() -> list of keys in od)r�)rRrrr�keys�szOrderedDict.keyscs�fdd��D�S)z#od.values() -> list of values in odcsg|]}�|�qSrr)rbrK)rRrrrf�sz&OrderedDict.values.<locals>.<listcomp>r)rRr)rRr�values�szOrderedDict.valuescs�fdd��D�S)z.od.items() -> list of (key, value) pairs in odcsg|]}|�|f�qSrr)rbrK)rRrrrf�sz%OrderedDict.items.<locals>.<listcomp>r)rRr)rRr�items�szOrderedDict.itemscCst|�S)z0od.iterkeys() -> an iterator over the keys in od)r�)rRrrr�iterkeys�szOrderedDict.iterkeysccsx|D]}||VqWdS)z2od.itervalues -> an iterator over the values in odNr)rR�krrrr��s
zOrderedDict.itervaluesccs x|D]}|||fVqWdS)z=od.iteritems -> an iterator over the (key, value) items in odNr)rRrrrr�	iteritems�s
zOrderedDict.iteritemscOs�t|�dkr tdt|�f��n|s,td��|d}f}t|�dkrL|d}t|t�rrx^|D]}||||<q\WnDt|d�r�x8|j�D]}||||<q�Wnx|D]\}}|||<q�Wx|j�D]\}}|||<q�WdS)a�od.update(E, **F) -> None.  Update od from dict/iterable E and F.

            If E is a dict instance, does:           for k in E: od[k] = E[k]
            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
            In either case, this is followed by:     for k, v in F.items(): od[k] = v

            r(z8update() takes at most 2 positional arguments (%d given)z,update() takes at least 1 argument (0 given)rrr�N)rIr�rr��hasattrr�r)r�r�rR�otherrKrLrrrrQ�s&	


zOrderedDict.updatecCs0||kr||}||=|S||jkr,t|��|S)z�od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
            If key is not found, d is returned if given, otherwise KeyError is raised.

            )�_OrderedDict__markerr�)rRrKr�r�rrrr�!s
zOrderedDict.popNcCs||kr||S|||<|S)zDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in odr)rRrKr�rrr�
setdefault.szOrderedDict.setdefaultcCs^|si}t|�t�f}||kr"dSd||<z&|s>d|jjfSd|jj|j�fS||=XdS)zod.__repr__() <==> repr(od)z...rz%s()z%s(%r)N)r��
_get_identr�r4r)rRZ
_repr_runningZcall_keyrrrr�5szOrderedDict.__repr__cs\�fdd��D�}t��j�}xtt��D]}|j|d�q*W|rP�j|f|fS�j|ffS)z%Return state information for picklingcsg|]}|�|g�qSrr)rbr)rRrrrfEsz*OrderedDict.__reduce__.<locals>.<listcomp>N)�varsr�r�r�r�)rRrZ	inst_dictrr)rRr�
__reduce__CszOrderedDict.__reduce__cCs
|j|�S)z!od.copy() -> a shallow copy of od)r�)rRrrrr�MszOrderedDict.copycCs |�}x|D]}|||<qW|S)z�OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
            and values equal to v (which defaults to None).

            r)r�r�rL�drKrrrr�Qs
zOrderedDict.fromkeyscCs6t|t�r*t|�t|�ko(|j�|j�kStj||�S)z�od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
            while comparison to a regular mapping is order-insensitive.

            )rr�rIrr��__eq__)rRrrrrr
\s
 zOrderedDict.__eq__cCs
||kS)Nr)rRrrrr�__ne__eszOrderedDict.__ne__cCst|�S)z@od.viewkeys() -> a set-like object providing a view on od's keys)r�)rRrrr�viewkeysjszOrderedDict.viewkeyscCst|�S)z<od.viewvalues() -> an object providing a view on od's values)r�)rRrrr�
viewvaluesnszOrderedDict.viewvaluescCst|�S)zBod.viewitems() -> a set-like object providing a view on od's items)r�)rRrrr�	viewitemsrszOrderedDict.viewitems)T)N)N)N)"r4r5r6rUrTr�r�r�r�r�r�r�r�rrrr�rrQr��objectrr�rr�rr�r�r�r
rrrrrrrrr��s:
	




	r�)�BaseConfigurator�valid_identz^[a-z_][a-z0-9_]*$cCstj|�}|std|��dS)Nz!Not a valid Python identifier: %rT)�
IDENTIFIERr,rG)rr�rrrr|s
rc@s"eZdZdZdd�Zddd�ZdS)�ConvertingDictz A converting dictionary wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
r�r��configurator�convertr�r�ConvertingList�ConvertingTuple�parentrK)rRrKrLr�rrrr��s
zConvertingDict.__getitem__NcCsLtj|||�}|jj|�}||k	rH|||<t|�tttfkrH||_||_	|S)N)
r�rHrrr�rrrrrK)rRrKr�rLr�rrrrH�s
zConvertingDict.get)N)r4r5r6rUr�rHrrrrr�srcCsDtj|||�}|jj|�}||k	r@t|�tttfkr@||_||_	|S)N)
r�r�rrr�rrrrrK)rRrKr�rLr�rrrr��s
r�c@s"eZdZdZdd�Zd	dd�ZdS)
rzA converting list wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
r�r�rrr�rrrrrK)rRrKrLr�rrrr��s
zConvertingList.__getitem__rcCs<tj||�}|jj|�}||k	r8t|�tttfkr8||_|S)N)	r�r�rrr�rrrr)rR�idxrLr�rrrr��s
zConvertingList.popN���)r)r4r5r6rUr�r�rrrrr�src@seZdZdZdd�ZdS)rzA converting tuple wrapper.cCsBtj||�}|jj|�}||k	r>t|�tttfkr>||_||_	|S)N)
�tupler�rrr�rrrrrK)rRrKrLr�rrrr��s
zConvertingTuple.__getitem__N)r4r5r6rUr�rrrrr�src@s�eZdZdZejd�Zejd�Zejd�Zejd�Z	ejd�Z
ddd	�Zee
�Zd
d�Zdd
�Zdd�Zdd�Zdd�Zdd�Zdd�ZdS)rzQ
        The configurator base class which defines some useful defaults.
        z%^(?P<prefix>[a-z]+)://(?P<suffix>.*)$z^\s*(\w+)\s*z^\.\s*(\w+)\s*z^\[\s*(\w+)\s*\]\s*z^\d+$�ext_convert�cfg_convert)rcZcfgcCst|�|_||j_dS)N)r�configr)rRr!rrrrT�s
zBaseConfigurator.__init__c	Cs�|jd�}|jd�}y`|j|�}xP|D]H}|d|7}yt||�}Wq&tk
rl|j|�t||�}Yq&Xq&W|Stk
r�tj�dd�\}}td||f�}|||_	|_
|�YnXdS)zl
            Resolve strings to objects using standard import and attribute
            syntax.
            r7rrNzCannot resolve %r: %s)r9r��importerr�r��ImportErrorrkr|rG�	__cause__�
__traceback__)	rRrrvZused�foundrE�e�tb�vrrr�resolve�s"




zBaseConfigurator.resolvecCs
|j|�S)z*Default converter for the ext:// protocol.)r*)rRrLrrrrszBaseConfigurator.ext_convertcCs|}|jj|�}|dkr&td|��n�||j�d�}|j|j�d}x�|r�|jj|�}|rp||j�d}nd|jj|�}|r�|j�d}|jj|�s�||}n2yt	|�}||}Wnt
k
r�||}YnX|r�||j�d�}qJtd||f��qJW|S)z*Default converter for the cfg:// protocol.NzUnable to convert %rrzUnable to convert %r at %r)�WORD_PATTERNr,rG�endr!�groups�DOT_PATTERN�
INDEX_PATTERN�
DIGIT_PATTERN�intr�)rRrL�restr�rr�nrrrr s2
zBaseConfigurator.cfg_convertcCs�t|t�r&t|t�r&t|�}||_n�t|t�rLt|t�rLt|�}||_n|t|t�rrt|t�rrt|�}||_nVt|t�r�|j	j
|�}|r�|j�}|d}|jj
|d�}|r�|d}t||�}||�}|S)z�
            Convert values to an appropriate type. dicts, lists and tuples are
            replaced by their converting alternatives. Strings are checked to
            see if they have a conversion format and are converted if they do.
            �prefixNr�)rrr�rrr�rr�string_types�CONVERT_PATTERNr,�	groupdict�value_convertersrHr�)rRrLr�rr4Z	converterr�rrrr)s*


zBaseConfigurator.convertcsr�jd�}t|�s|j|�}�jdd�}t�fdd��D��}|f|�}|rnx |j�D]\}}t|||�qVW|S)z1Configure an object with a user-supplied factory.z()r7Ncs g|]}t|�r|�|f�qSr)r)rbr)r!rrrfLsz5BaseConfigurator.configure_custom.<locals>.<listcomp>)r�r�r*r�r�setattr)rRr!r�ZpropsrSr�rvrLr)r!r�configure_customEs


z!BaseConfigurator.configure_customcCst|t�rt|�}|S)z0Utility function which converts lists to tuples.)rr�r)rRrLrrr�as_tupleSs
zBaseConfigurator.as_tupleN)r4r5r6rUr*r+r6r+r.r/r0r8�staticmethod�
__import__r"rTr*rr rr:r;rrrrr�s 




"r)r)rr�)r�)N)N)�Z
__future__rrWr*rkZsslr#�version_inforZ
basestringr5rr��typesrZ	file_typeZ__builtin__�builtinsZConfigParserZconfigparserZ	_backportrrr	r
rrZurllibr
rrrrrrrZurllib2rrrrrr r!r"r#r$ZhttplibZ	xmlrpclibZQueueZqueuer%ZhtmlentitydefsZ	raw_input�	itertoolsr&�filterr'r1r)r/�io�strr0Zurllib.parseZurllib.requestZurllib.errorZhttp.clientZclientZrequestZ
xmlrpc.clientZhtml.parserZ
html.entitiesZentities�inputr2r3rGrFrNrOrrV�F_OK�X_OKZzipfilerwr~rryZBaseZipExtFilerlr�r�r��	NameError�collectionsr�r�r�r��getfilesystemencodingr�r��tokenizer��codecsr�r�r+r�r�Zhtmlr?Zcgir�r�r��reprlibr�r�Zimpr�r�Zthreadr�r	Zdummy_threadZ_abcollr�r�r�r�Zlogging.configrr�Irrr�r�rrrrrrr�<module>s&
$,,0




2+A


		
[
b
w

_vendor/distlib/__pycache__/database.cpython-36.opt-1.pyc000064400000122116151733136300017200 0ustar003

�Pf��@s�dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlm
Z
mZddlmZddlmZmZddlmZmZmZdd	lmZmZmZmZmZmZmZd
ddd
dgZ ej!e"�Z#dZ$dZ%deddde$dfZ&dZ'Gdd�de(�Z)Gdd�de(�Z*Gdd
�d
e(�Z+Gdd�de+�Z,Gdd�de,�Z-Gdd
�d
e,�Z.e-Z/e.Z0Gdd�de(�Z1d)d!d"�Z2d#d$�Z3d%d&�Z4d'd(�Z5dS)*zPEP 376 implementation.�)�unicode_literalsN�)�DistlibException�	resources)�StringIO)�
get_scheme�UnsupportedVersionError)�Metadata�METADATA_FILENAME�WHEEL_METADATA_FILENAME)�parse_requirement�cached_property�parse_name_and_version�read_exports�
write_exports�	CSVReader�	CSVWriter�Distribution�BaseInstalledDistribution�InstalledDistribution�EggInfoDistribution�DistributionPathzpydist-exports.jsonzpydist-commands.jsonZ	INSTALLER�RECORD�	REQUESTED�	RESOURCES�SHAREDz
.dist-infoc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�_CachezL
    A simple cache mapping names and .dist-info paths to distributions
    cCsi|_i|_d|_dS)zZ
        Initialise an instance. There is normally one for each DistributionPath.
        FN)�name�path�	generated)�self�r!�/usr/lib/python3.6/database.py�__init__0sz_Cache.__init__cCs|jj�|jj�d|_dS)zC
        Clear the cache, setting it to its initial state.
        FN)r�clearrr)r r!r!r"r$8s

z_Cache.clearcCs2|j|jkr.||j|j<|jj|jg�j|�dS)z`
        Add a distribution to the cache.
        :param dist: The distribution to add.
        N)rr�
setdefault�key�append)r �distr!r!r"�add@sz
_Cache.addN)�__name__�
__module__�__qualname__�__doc__r#r$r)r!r!r!r"r,src@s�eZdZdZddd�Zdd�Zdd	�Zeee�Zd
d�Z	dd
�Z
dd�Zedd��Z
dd�Zdd�Zddd�Zdd�Zddd�ZdS)rzU
    Represents a set of distributions installed on a path (typically sys.path).
    NFcCsD|dkrtj}||_d|_||_t�|_t�|_d|_td�|_	dS)a�
        Create an instance from a path, optionally including legacy (distutils/
        setuptools/distribute) distributions.
        :param path: The path to use, as a list of directories. If not specified,
                     sys.path is used.
        :param include_egg: If True, this instance will look for and return legacy
                            distributions as well as those based on PEP 376.
        NT�default)
�sysr�
_include_dist�_include_eggr�_cache�
_cache_egg�_cache_enabledr�_scheme)r rZinclude_eggr!r!r"r#Ns	zDistributionPath.__init__cCs|jS)N)r4)r r!r!r"�_get_cache_enabledbsz#DistributionPath._get_cache_enabledcCs
||_dS)N)r4)r �valuer!r!r"�_set_cache_enabledesz#DistributionPath._set_cache_enabledcCs|jj�|jj�dS)z,
        Clears the internal cache.
        N)r2r$r3)r r!r!r"�clear_cachejs
zDistributionPath.clear_cachec
csTt�}�xF|jD�]:}tj|�}|dkr*q|jd�}|s|jrDqt|j�}�x�|D]�}|j|�}|sV|j|krvqV|jo�|jt	��rt
tg}x*|D] }tj
||�}	|j|	�}
|
r�Pq�WqVtj|
j���}t|dd�}WdQRXtjd|j�|j|j�t|j||d�VqV|jrV|jd	�rVtjd|j�|j|j�t|j|�VqVWqWdS)
zD
        Yield .dist-info and/or .egg(-info) distributions.
        N��legacy)�fileobj�schemezFound %s)�metadata�env�	.egg-info�.egg)r@rA)�setrr�finder_for_path�findZis_container�sortedr0�endswith�DISTINFO_EXTr
r�	posixpath�join�
contextlib�closing�	as_streamr	�logger�debugr)�new_dist_classr1�old_dist_class)
r �seenr�finder�rZrset�entryZpossible_filenamesZmetadata_filenameZ
metadata_pathZpydist�streamr>r!r!r"�_yield_distributionsrs@






z%DistributionPath._yield_distributionscCst|jj}|jo|jj}|s"|rpx4|j�D](}t|t�rH|jj|�q,|jj|�q,W|rdd|j_|rpd|j_dS)zk
        Scan the path for distributions and populate the cache with
        those that are found.
        TN)r2rr1r3rV�
isinstancerr))r Zgen_distZgen_eggr(r!r!r"�_generate_cache�s

z DistributionPath._generate_cachecCs|jdd�}dj||g�tS)ao
        The *name* and *version* parameters are converted into their
        filename-escaped form, i.e. any ``'-'`` characters are replaced
        with ``'_'`` other than the one in ``'dist-info'`` and the one
        separating the name from the version number.

        :parameter name: is converted to a standard distribution name by replacing
                         any runs of non- alphanumeric characters with a single
                         ``'-'``.
        :type name: string
        :parameter version: is converted to a standard version string. Spaces
                            become dots, and all other non-alphanumeric characters
                            (except dots) become dashes, with runs of multiple
                            dashes condensed to a single dash.
        :type version: string
        :returns: directory name
        :rtype: string�-�_)�replacerIrG)�clsr�versionr!r!r"�distinfo_dirname�sz!DistributionPath.distinfo_dirnameccsj|js x^|j�D]
}|VqWnF|j�x|jjj�D]
}|Vq6W|jrfx|jjj�D]
}|VqXWdS)a5
        Provides an iterator that looks for distributions and returns
        :class:`InstalledDistribution` or
        :class:`EggInfoDistribution` instances for each one of them.

        :rtype: iterator of :class:`InstalledDistribution` and
                :class:`EggInfoDistribution` instances
        N)r4rVrXr2r�valuesr1r3)r r(r!r!r"�get_distributions�s	
z"DistributionPath.get_distributionscCs�d}|j�}|js6xj|j�D]}|j|kr|}PqWnH|j�||jjkr\|jj|d}n"|jr~||jjkr~|jj|d}|S)a=
        Looks for a named distribution on the path.

        This function only returns the first result found, as no more than one
        value is expected. If nothing is found, ``None`` is returned.

        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
                or ``None``
        Nr)	�lowerr4rVr&rXr2rr1r3)r r�resultr(r!r!r"�get_distribution�s

z!DistributionPath.get_distributionc	cs�d}|dk	rJy|jjd||f�}Wn$tk
rHtd||f��YnXxd|j�D]X}|j}xL|D]D}t|�\}}|dkr�||kr�|VPqd||krd|j|�rd|VPqdWqTWdS)a
        Iterates over all distributions to find which distributions provide *name*.
        If a *version* is provided, it will be used to filter the results.

        This function only returns the first result found, since no more than
        one values are expected. If the directory is not found, returns ``None``.

        :parameter version: a version specifier that indicates the version
                            required, conforming to the format in ``PEP-345``

        :type name: string
        :type version: string
        Nz%s (%s)zinvalid name or version: %r, %r)r5�matcher�
ValueErrorrr`�providesr�match)	r rr]rdr(�provided�p�p_name�p_verr!r!r"�provides_distribution�s$
z&DistributionPath.provides_distributioncCs(|j|�}|dkrtd|��|j|�S)z5
        Return the path to a resource file.
        Nzno distribution named %r found)rc�LookupError�get_resource_path)r r�
relative_pathr(r!r!r"�
get_file_paths
zDistributionPath.get_file_pathccs`xZ|j�D]N}|j}||kr
||}|dk	r@||krX||Vq
x|j�D]
}|VqJWq
WdS)z�
        Return all of the exported entries in a particular category.

        :param category: The category to search for entries.
        :param name: If specified, only entries with that name are returned.
        N)r`�exportsr_)r �categoryrr(rS�d�vr!r!r"�get_exported_entries"sz%DistributionPath.get_exported_entries)NF)N)N)r*r+r,r-r#r6r8�propertyZ
cache_enabledr9rVrX�classmethodr^r`rcrlrprur!r!r!r"rJs

*
$	c@s�eZdZdZdZdZdd�Zedd��ZeZ	edd��Z
ed	d
��Zdd�Zed
d��Z
edd��Zedd��Zedd��Zedd��Zdd�Zdd�Zdd�Zdd�ZdS) rz�
    A base class for distributions, whether installed or from indexes.
    Either way, it must have some metadata, so that's all that's needed
    for construction.
    FcCsL||_|j|_|jj�|_|j|_d|_d|_d|_d|_t	�|_
i|_dS)z�
        Initialise an instance.
        :param metadata: The instance of :class:`Metadata` describing this
        distribution.
        N)r>rrar&r]Zlocator�digest�extras�contextrBZ
download_urlsZdigests)r r>r!r!r"r#GszDistribution.__init__cCs|jjS)zH
        The source archive download URL for this distribution.
        )r>�
source_url)r r!r!r"r{XszDistribution.source_urlcCsd|j|jfS)zX
        A utility property which displays the name and version in parentheses.
        z%s (%s))rr])r r!r!r"�name_and_versionaszDistribution.name_and_versioncCs.|jj}d|j|jf}||kr*|j|�|S)z�
        A set of distribution names and versions provided by this distribution.
        :return: A set of "name (version)" strings.
        z%s (%s))r>rfrr]r')r Zplist�sr!r!r"rfhs

zDistribution.providescCs8|j}tjd|j��t||�}t|j||j|jd��S)Nz%Getting requirements from metadata %r)ryr?)	r>rMrNZtodict�getattrrBZget_requirementsryrz)r Zreq_attr�mdZreqtsr!r!r"�_get_requirementsts

zDistribution._get_requirementscCs
|jd�S)N�run_requires)r�)r r!r!r"r�{szDistribution.run_requirescCs
|jd�S)N�
meta_requires)r�)r r!r!r"r�szDistribution.meta_requirescCs
|jd�S)N�build_requires)r�)r r!r!r"r��szDistribution.build_requirescCs
|jd�S)N�
test_requires)r�)r r!r!r"r��szDistribution.test_requirescCs
|jd�S)N�dev_requires)r�)r r!r!r"r��szDistribution.dev_requiresc
Cs�t|�}t|jj�}y|j|j�}Wn6tk
rZtjd|�|j	�d}|j|�}YnX|j
}d}xJ|jD]@}t|�\}}	||kr�qny|j
|	�}PWqntk
r�YqnXqnW|S)z�
        Say if this instance matches (fulfills) a requirement.
        :param req: The requirement to match.
        :rtype req: str
        :return: True if it matches, else False.
        z+could not read version %r - using name onlyrF)rrr>r=rd�requirementrrM�warning�splitr&rfrrg)
r �reqrSr=rdrrbrirjrkr!r!r"�matches_requirement�s*	

z Distribution.matches_requirementcCs(|jrd|j}nd}d|j|j|fS)zC
        Return a textual representation of this instance,
        z [%s]r:z<Distribution %s (%s)%s>)r{rr])r �suffixr!r!r"�__repr__�szDistribution.__repr__cCs>t|�t|�k	rd}n$|j|jko8|j|jko8|j|jk}|S)a<
        See if this distribution is the same as another.
        :param other: The distribution to compare with. To be equal to one
                      another. distributions must have the same type, name,
                      version and source_url.
        :return: True if it is the same, else False.
        F)�typerr]r{)r �otherrbr!r!r"�__eq__�szDistribution.__eq__cCst|j�t|j�t|j�S)zH
        Compute hash in a way which matches the equality test.
        )�hashrr]r{)r r!r!r"�__hash__�szDistribution.__hash__N)r*r+r,r-Zbuild_time_dependency�	requestedr#rvr{Zdownload_urlr|rfr�r�r�r�r�r�r�r�r�r�r!r!r!r"r5s$"
cs0eZdZdZdZd�fdd�	Zddd�Z�ZS)	rz]
    This is the base class for installed distributions (whether PEP 376 or
    legacy).
    Ncs tt|�j|�||_||_dS)a
        Initialise an instance.
        :param metadata: An instance of :class:`Metadata` which describes the
                         distribution. This will normally have been initialised
                         from a metadata file in the ``path``.
        :param path:     The path of the ``.dist-info`` or ``.egg-info``
                         directory for the distribution.
        :param env:      This is normally the :class:`DistributionPath`
                         instance where this distribution was found.
        N)�superrr#r�	dist_path)r r>rr?)�	__class__r!r"r#�sz"BaseInstalledDistribution.__init__cCsd|dkr|j}|dkr"tj}d}ntt|�}d|j}||�j�}tj|�jd�jd�}d||fS)a�
        Get the hash of some data, using a particular hash algorithm, if
        specified.

        :param data: The data to be hashed.
        :type data: bytes
        :param hasher: The name of a hash implementation, supported by hashlib,
                       or ``None``. Examples of valid values are ``'sha1'``,
                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
                       ``'sha512'``. If no hasher is specified, the ``hasher``
                       attribute of the :class:`InstalledDistribution` instance
                       is used. If the hasher is determined to be ``None``, MD5
                       is used as the hashing algorithm.
        :returns: The hash of the data. If a hasher was explicitly specified,
                  the returned hash will be prefixed with the specified hasher
                  followed by '='.
        :rtype: str
        Nr:z%s=�=�asciiz%s%s)	�hasher�hashlib�md5r~rx�base64Zurlsafe_b64encode�rstrip�decode)r �datar��prefixrxr!r!r"�get_hash�s

z"BaseInstalledDistribution.get_hash)N)N)r*r+r,r-r�r#r��
__classcell__r!r!)r�r"r�scs�eZdZdZdZd'�fdd�	Zdd�Zdd	�Zd
d�Ze	dd
��Z
dd�Zdd�Zdd�Z
dd�Zd(dd�Zdd�Ze	dd��Zd)dd�Zdd �Zd!d"�Zd#d$�Zd%d&�ZejZ�ZS)*ra
    Created with the *path* of the ``.dist-info`` directory provided to the
    constructor. It reads the metadata contained in ``pydist.json`` when it is
    instantiated., or uses a passed in Metadata instance (useful for when
    dry-run mode is being used).
    Zsha256Ncs0tj|�|_}|dkr(ddl}|j�|rN|jrN||jjkrN|jj|j}nt|dkr�|j	t
�}|dkrr|j	t�}|dkr�|j	d�}|dkr�tdt
|f��t
j|j���}t|dd�}WdQRXtt|�j|||�|r�|jr�|jj|�y|j	d�}Wn&tk
�r ddl}|j�YnX|dk	|_dS)NrZMETADATAzno %s found in %sr;)r<r=r)rrCrR�pdbZ	set_tracer4r2rr>rDr
rrerJrKrLr	r�rr#r)�AttributeErrorr�)r rr>r?rRr�rSrU)r�r!r"r#s4




zInstalledDistribution.__init__cCsd|j|j|jfS)Nz#<InstalledDistribution %r %s at %r>)rr]r)r r!r!r"r�2szInstalledDistribution.__repr__cCsd|j|jfS)Nz%s %s)rr])r r!r!r"�__str__6szInstalledDistribution.__str__c
Cs�g}|jd�}tj|j���`}t|d��J}xB|D]:}dd�tt|�d�D�}||\}}}	|j|||	f�q0WWdQRXWdQRX|S)a"
        Get the list of installed files for the distribution
        :return: A list of tuples of path, hash and size. Note that hash and
                 size might be ``None`` for some entries. The path is exactly
                 as stored in the file (which is as in PEP 376).
        r)rUcSsg|]}d�qS)Nr!)�.0�ir!r!r"�
<listcomp>Hsz6InstalledDistribution._get_records.<locals>.<listcomp>�N)�get_distinfo_resourcerJrKrLr�range�lenr')
r �resultsrSrUZ
record_reader�row�missingr�checksum�sizer!r!r"�_get_records9s

(z"InstalledDistribution._get_recordscCsi}|jt�}|r|j�}|S)a
        Return the information exported by this distribution.
        :return: A dictionary of exports, mapping an export category to a dict
                 of :class:`ExportEntry` instances describing the individual
                 export entries, and keyed by name.
        )r��EXPORTS_FILENAMEr)r rbrSr!r!r"rqPs

zInstalledDistribution.exportsc	Cs8i}|jt�}|r4tj|j���}t|�}WdQRX|S)z�
        Read exports data from a file in .ini format.

        :return: A dictionary of exports, mapping an export category to a list
                 of :class:`ExportEntry` instances describing the individual
                 export entries.
        N)r�r�rJrKrLr)r rbrSrUr!r!r"r^s
z"InstalledDistribution.read_exportsc
Cs.|jt�}t|d��}t||�WdQRXdS)a
        Write a dictionary of exports to a file in .ini format.
        :param exports: A dictionary of exports, mapping an export category to
                        a list of :class:`ExportEntry` instances describing the
                        individual export entries.
        �wN)�get_distinfo_filer��openr)r rqZrf�fr!r!r"rms
z#InstalledDistribution.write_exportscCsh|jd�}tj|j���:}t|d��$}x|D]\}}||kr,|Sq,WWdQRXWdQRXtd|��dS)aW
        NOTE: This API may change in the future.

        Return the absolute path to a resource file with the given relative
        path.

        :param relative_path: The path, relative to .dist-info, of the resource
                              of interest.
        :return: The absolute path where the resource is to be found.
        r)rUNz3no resource file with relative path %r is installed)r�rJrKrLr�KeyError)r rorSrUZresources_readerZrelativeZdestinationr!r!r"rnxs
z'InstalledDistribution.get_resource_pathccsx|j�D]
}|Vq
WdS)z�
        Iterates over the ``RECORD`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: iterator of (path, hash, size)
        N)r�)r rbr!r!r"�list_installed_files�sz*InstalledDistribution.list_installed_filesFcCs,tjj|d�}tjj|j�}|j|�}tjj|d�}|jd�}tjd|�|rRdSt|���}x�|D]�}tjj	|�s||j
d	�r�d}	}
n4dtjj|�}
t|d��}|j
|j��}	WdQRX|j|�s�|r�|j|�r�tjj||�}|j||	|
f�qbW|j|��rtjj||�}|j|ddf�WdQRX|S)
z�
        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
        existing ``RECORD`` file is silently overwritten.

        prefix is used to determine when to write absolute paths.
        r:rzcreating %sN�.pyc�.pyoz%d�rb)r�r�)�osrrI�dirname�
startswithr�rM�infor�isdirrF�getsizer�r��read�relpathZwriterow)r �pathsr��dry_run�baseZbase_under_prefix�record_path�writerr�
hash_valuer��fpr!r!r"�write_installed_files�s.





z+InstalledDistribution.write_installed_filesc
Csg}tjj|j�}|jd�}�x�|j�D]�\}}}tjj|�sLtjj||�}||krVq(tjj|�sv|j|dddf�q(tjj	|�r(t
tjj|��}|r�||kr�|j|d||f�q(|r(d|kr�|jdd�d}nd	}t
|d
��2}	|j|	j�|�}
|
|k�r|j|d||
f�Wd	QRXq(W|S)a�
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        r�existsTFr��=rrNr�r�)r�rr�r�r��isabsrIr�r'�isfile�strr�r�r�r�r�)r �
mismatchesr�r�rr�r�Zactual_sizer�r�Zactual_hashr!r!r"�check_installed_files�s.	

 z+InstalledDistribution.check_installed_filescCs�i}tjj|jd�}tjj|�r�tj|ddd��}|j�j�}WdQRXx@|D]8}|jdd�\}}|dkr~|j	|g�j
|�qN|||<qNW|S)	a�
        A dictionary of shared locations whose keys are in the set 'prefix',
        'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
        The corresponding value is the absolute path of that category for
        this distribution, and takes into account any paths selected by the
        user at installation time (e.g. via command-line arguments). In the
        case of the 'namespace' key, this would be a list of absolute paths
        for the roots of namespace packages in this distribution.

        The first time this property is accessed, the relevant information is
        read from the SHARED file in the .dist-info directory.
        rrSzutf-8)�encodingNr�r�	namespace)r�rrIr��codecsr�r��
splitlinesr�r%r')r rb�shared_pathr��lines�liner&r7r!r!r"�shared_locations�s
z&InstalledDistribution.shared_locationsc	
Cs�tjj|jd�}tjd|�|r$dSg}x6dD].}||}tjj||�r.|jd	||f�q.Wx"|jd
f�D]}|jd|�qnWtj	|dd
d��}|j
dj|��WdQRX|S)aa
        Write shared location information to the SHARED file in .dist-info.
        :param paths: A dictionary as described in the documentation for
        :meth:`shared_locations`.
        :param dry_run: If True, the action is logged but no file is actually
                        written.
        :return: The path of the file written to.
        rzcreating %sNr��lib�headers�scriptsr�z%s=%sr�znamespace=%sr�zutf-8)r��
)r�r�r�r�r�)r�rrIrMr�r�r'�getr�r��write)	r r�r�r�r�r&r�nsr�r!r!r"�write_shared_locations�s	
z,InstalledDistribution.write_shared_locationscCsF|tkrtd||jf��tj|j�}|dkr<td|j��|j|�S)Nz+invalid path for a dist-info file: %r at %rzUnable to get a finder for %s)�
DIST_FILESrrrrCrD)r rrRr!r!r"r�sz+InstalledDistribution.get_distinfo_resourcecCs~|jtj�dkrT|jtj�dd�\}}||jjtj�dkrTtd||j|jf��|tkrntd||jf��tjj	|j|�S)	a�
        Returns a path located under the ``.dist-info`` directory. Returns a
        string representing the path.

        :parameter path: a ``'/'``-separated path relative to the
                         ``.dist-info`` directory or an absolute path;
                         If *path* is an absolute path and doesn't start
                         with the ``.dist-info`` directory path,
                         a :class:`DistlibException` is raised
        :type path: str
        :rtype: str
        r�Nrz;dist-info file %r does not belong to the %r %s distributionz+invalid path for a dist-info file: %r at %r������)
rDr��sepr�rrrr]r�rI)r rr^r!r!r"r�sz'InstalledDistribution.get_distinfo_fileccsVtjj|j�}xB|j�D]6\}}}tjj|�s<tjj||�}|j|j�r|VqWdS)z�
        Iterates over the ``RECORD`` entries and returns paths for each line if
        the path is pointing to a file located in the ``.dist-info`` directory
        or one of its subdirectories.

        :returns: iterator of paths
        N)r�rr�r�r�rIr�)r r�rr�r�r!r!r"�list_distinfo_files6sz)InstalledDistribution.list_distinfo_filescCst|t�o|j|jkS)N)rWrr)r r�r!r!r"r�Fs
zInstalledDistribution.__eq__)NN)F)F)r*r+r,r-r�r#r�r�r�r
rqrrrnr�r�r�r�r�r�r�r�r��objectr�r�r!r!)r�r"r	s(

##
	csjeZdZdZdZiZd�fdd�	Zdd�Zdd	�Zd
d�Z	dd
�Z
dd�Zddd�Zdd�Z
ejZ�ZS)raCreated with the *path* of the ``.egg-info`` directory or file provided
    to the constructor. It reads the metadata contained in the file itself, or
    if the given path happens to be a directory, the metadata is read from the
    file ``PKG-INFO`` under that directory.TNcs�dd�}||_||_|rJ|jrJ||jjkrJ|jj|j}|||j|j�n0|j|�}|||j|j�|rz|jrz|jj|�t	t
|�j|||�dS)NcSs||_|j�|_||_dS)N)rrar&r])r}�nrtr!r!r"�set_name_and_versionXs
z:EggInfoDistribution.__init__.<locals>.set_name_and_version)rr�r4r3r>rr]�
_get_metadatar)r�rr#)r rr?r�r>)r�r!r"r#Ws

zEggInfoDistribution.__init__c
s2d}dd���fdd�}|jd�r�tjj|�rdtjj|dd�}t|dd	�}tjj|dd
�}||�}n`tj|�}t|j	d�j
d��}t|dd
�}y|j	d�}	�|	j
d��}Wntk
r�d}YnXnX|jd��rtjj|��rtjj|d
�}||�}tjj|d�}t|dd	�}ntd|��|�r.|j
|�|S)NcSs�g}|j�}x�|D]�}|j�}|jd�r6tjd|�Pt|�}|sPtjd|�q|jr`tjd�|jst|j|j	�qdj
dd�|jD��}|jd|j	|f�qW|S)	z�Create a list of dependencies from a requires.txt file.

            *data*: the contents of a setuptools-produced requires.txt file.
            �[z.Unexpected line: quitting requirement scan: %rz#Not recognised as a requirement: %rz4extra requirements in requires.txt are not supportedz, css|]}d|VqdS)z%s%sNr!)r��cr!r!r"�	<genexpr>�szQEggInfoDistribution._get_metadata.<locals>.parse_requires_data.<locals>.<genexpr>z%s (%s))r��stripr�rMr�rryZconstraintsr'rrI)r��reqsr�r�rSZconsr!r!r"�parse_requires_dataos&


z>EggInfoDistribution._get_metadata.<locals>.parse_requires_datacsHg}y*tj|dd��}�|j��}WdQRXWntk
rBYnX|S)z�Create a list of dependencies from a requires.txt file.

            *req_path*: the path to a setuptools-produced requires.txt file.
            rSzutf-8N)r�r�r��IOError)�req_pathr�r�)r�r!r"�parse_requires_path�sz>EggInfoDistribution._get_metadata.<locals>.parse_requires_pathz.eggzEGG-INFOzPKG-INFOr;)rr=zrequires.txtzEGG-INFO/PKG-INFO�utf8)r<r=zEGG-INFO/requires.txtzutf-8z	.egg-infoz,path must end with .egg-info or .egg, got %r)rFr�rr�rIr	�	zipimport�zipimporterr�get_datar�r�rZadd_requirements)
r r�requiresr��	meta_pathr>r�Zzipfr<r�r!)r�r"r�ls:




z!EggInfoDistribution._get_metadatacCsd|j|j|jfS)Nz!<EggInfoDistribution %r %s at %r>)rr]r)r r!r!r"r��szEggInfoDistribution.__repr__cCsd|j|jfS)Nz%s %s)rr])r r!r!r"r��szEggInfoDistribution.__str__cCsdg}tjj|jd�}tjj|�r`x>|j�D]2\}}}||kr>q*tjj|�s*|j|dddf�q*W|S)a�
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        zinstalled-files.txtr�TF)r�rrIr�r�r')r r�r�rrZr!r!r"r��s	z)EggInfoDistribution.check_installed_filesc
Cs�dd�}dd�}tjj|jd�}g}tjj|�r�tj|ddd��|}xt|D]l}|j�}tjjtjj|j|��}tjj|�s�tj	d	|�|j
d
�r�qHtjj|�sH|j|||�||�f�qHWWdQRX|j|ddf�|S)z�
        Iterates over the ``installed-files.txt`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: a list of (path, hash, size)
        c
Ss0t|d�}z|j�}Wd|j�Xtj|�j�S)Nr�)r�r��closer�r�Z	hexdigest)rr�Zcontentr!r!r"�_md5�s


z6EggInfoDistribution.list_installed_files.<locals>._md5cSstj|�jS)N)r��stat�st_size)rr!r!r"�_size�sz7EggInfoDistribution.list_installed_files.<locals>._sizezinstalled-files.txtrSzutf-8)r�zNon-existent file: %s�.pyc�.pyoN)rr)
r�rrIr�r�r�r��normpathrMr�rFr�r')r r�rr�rbr�r�rir!r!r"r��s"

&z(EggInfoDistribution.list_installed_filesFccs�tjj|jd�}d}tj|ddd��d}x\|D]T}|j�}|dkrFd}q,|s,tjjtjj|j|��}|j|j�r,|rz|Vq,|Vq,WWdQRXdS)	a
        Iterates over the ``installed-files.txt`` entries and returns paths for
        each line if the path is pointing to a file located in the
        ``.egg-info`` directory or one of its subdirectories.

        :parameter absolute: If *absolute* is ``True``, each returned path is
                          transformed into a local absolute path. Otherwise the
                          raw value from ``installed-files.txt`` is returned.
        :type absolute: boolean
        :returns: iterator of paths
        zinstalled-files.txtTrSzutf-8)r�z./FN)r�rrIr�r�r�rr�)r Zabsoluter��skipr�r�rir!r!r"r��s
z'EggInfoDistribution.list_distinfo_filescCst|t�o|j|jkS)N)rWrr)r r�r!r!r"r�s
zEggInfoDistribution.__eq__)N)F)r*r+r,r-r�r�r#r�r�r�r�r�r�r�r�r�r�r!r!)r�r"rNsK&
c@s^eZdZdZdd�Zdd�Zddd�Zd	d
�Zdd�Zddd�Z	ddd�Z
dd�Zdd�ZdS)�DependencyGrapha�
    Represents a dependency graph between distributions.

    The dependency relationships are stored in an ``adjacency_list`` that maps
    distributions to a list of ``(other, label)`` tuples where  ``other``
    is a distribution and the edge is labeled with ``label`` (i.e. the version
    specifier, if such was provided). Also, for more efficient traversal, for
    every distribution ``x``, a list of predecessors is kept in
    ``reverse_list[x]``. An edge from distribution ``a`` to
    distribution ``b`` means that ``a`` depends on ``b``. If any missing
    dependencies are found, they are stored in ``missing``, which is a
    dictionary that maps distributions to a list of requirements that were not
    provided by any other distributions.
    cCsi|_i|_i|_dS)N)�adjacency_list�reverse_listr�)r r!r!r"r#.szDependencyGraph.__init__cCsg|j|<g|j|<dS)z�Add the *distribution* to the graph.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        N)rr)r �distributionr!r!r"�add_distribution3s
z DependencyGraph.add_distributionNcCs6|j|j||f�||j|kr2|j|j|�dS)a�Add an edge from distribution *x* to distribution *y* with the given
        *label*.

        :type x: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type y: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type label: ``str`` or ``None``
        N)rr'r)r �x�y�labelr!r!r"�add_edge=s
zDependencyGraph.add_edgecCs&tjd||�|jj|g�j|�dS)a
        Add a missing *requirement* for the given *distribution*.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        :type requirement: ``str``
        z
%s missing %rN)rMrNr�r%r')r rr�r!r!r"�add_missingLszDependencyGraph.add_missingcCsd|j|jfS)Nz%s %s)rr])r r(r!r!r"�
_repr_distWszDependencyGraph._repr_distrcCs�|j|�g}xv|j|D]h\}}|j|�}|dk	r>d||f}|jd|t|��|j||d�}|jd�}|j|dd��qWdj|�S)zPrints only a subgraphNz%s [%s]z    rr�)rrr'r��	repr_noder��extendrI)r r(�level�outputr�rZ	suboutputZsubsr!r!r"rZs

zDependencyGraph.repr_nodeTcCs�g}|jd�x||jj�D]n\}}t|�dkr>|r>|j|�xH|D]@\}}|dk	rn|jd|j|j|f�qD|jd|j|jf�qDWqW|r�t|�dkr�|jd�|jd�|jd�x&|D]}|jd	|j�|jd
�q�W|jd�|jd�dS)a9Writes a DOT output for the graph to the provided file *f*.

        If *skip_disconnected* is set to ``True``, then all distributions
        that are not dependent on any other distribution are skipped.

        :type f: has to support ``file``-like operations
        :type skip_disconnected: ``bool``
        zdigraph dependencies {
rNz"%s" -> "%s" [label="%s"]
z
"%s" -> "%s"
zsubgraph disconnected {
zlabel = "Disconnected"
zbgcolor = red
z"%s"r�z}
)r�r�itemsr�r'r)r r�Zskip_disconnectedZdisconnectedr(�adjsr�rr!r!r"�to_dotgs&	






zDependencyGraph.to_dotcs�g}i}x&|jj�D]\}}|dd�||<qWx�g�x4t|j��dd�D]\}}|sL�j|�||=qLW�srPx*|j�D]\}}�fdd�|D�||<q|Wtjddd��D��|j��q2W|t|j��fS)aa
        Perform a topological sort of the graph.
        :return: A tuple, the first element of which is a topologically sorted
                 list of distributions, and the second element of which is a
                 list of distributions that cannot be sorted because they have
                 circular dependencies and so form a cycle.
        Ncs g|]\}}|�kr||f�qSr!r!)r�rsrS)�	to_remover!r"r��sz4DependencyGraph.topological_sort.<locals>.<listcomp>zMoving to result: %scSsg|]}d|j|jf�qS)z%s (%s))rr])r�rsr!r!r"r��s)rr�listr'rMrNr�keys)r rbZalist�krtr!)rr"�topological_sort�s$

z DependencyGraph.topological_sortcCs6g}x&|jj�D]\}}|j|j|��qWdj|�S)zRepresentation of the graphr�)rrr'rrI)r rr(rr!r!r"r��szDependencyGraph.__repr__)N)r)T)
r*r+r,r-r#r	r
rrrrrr�r!r!r!r"rs



 rr.cCsft|�}t�}i}xX|D]P}|j|�x@|jD]6}t|�\}}tjd|||�|j|g�j||f�q.WqWx�|D]�}|j	|j
B|jB|jB}x�|D]�}	y|j
|	�}
Wn6tk
r�tjd|	�|	j�d}|j
|�}
YnX|
j}d}||k�rJxV||D]J\}}y|
j|�}
Wntk
�r,d}
YnX|
r�|j|||	�d}Pq�W|s�|j||	�q�WqrW|S)a6Makes a dependency graph from the given distributions.

    :parameter dists: a list of distributions
    :type dists: list of :class:`distutils2.database.InstalledDistribution` and
                 :class:`distutils2.database.EggInfoDistribution` instances
    :rtype: a :class:`DependencyGraph` instance
    zAdd to provided: %s, %s, %sz+could not read version %r - using name onlyrFT)rrr	rfrrMrNr%r'r�r�r�r�rdrr�r�r&rgr
r)�distsr=�graphrhr(rirr]r�r�rdZmatchedZproviderrgr!r!r"�
make_graph�sD





rcCs~||krtd|j��t|�}|g}|j|}x@|rn|j�}|j|�x$|j|D]}||krR|j|�qRWq0W|jd�|S)z�Recursively generate a list of distributions from *dists* that are
    dependent on *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    z1given distribution %r is not a member of the listr)rrrr�popr')rr(rZdep�todorsZsuccr!r!r"�get_dependent_dists�s



r!cCsv||krtd|j��t|�}g}|j|}xD|rp|j�d}|j|�x$|j|D]}||krT|j|�qTWq.W|S)z�Recursively generate a list of distributions from *dists* that are
    required by *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    z1given distribution %r is not a member of the listr)rrrrrr')rr(rr�r rsZpredr!r!r"�get_required_dists�s


r"cKs4|jdd�}tf|�}||_||_|p(d|_t|�S)zO
    A convenience method for making a dist given just a name and version.
    �summaryzPlaceholder for summary)rr	rr]r#r)rr]�kwargsr#rr!r!r"�	make_dists

r%)r.)6r-Z
__future__rr�r�rJr�Zloggingr�rHr/r�r:rr�compatrr]rrr>r	r
r�utilrr
rrrrr�__all__Z	getLoggerr*rMr�ZCOMMANDS_FILENAMEr�rGr�rrrrrrrOrPrrr!r"r%r!r!r!r"�<module>sV$

l7GM
6_vendor/distlib/__pycache__/compat.cpython-36.pyc000064400000076252151733136300015771 0ustar003

�Pfa���@sddlmZddlZddlZddlZyddlZWnek
rHdZYnXejddk�r~ddlmZe	fZ
eZddl
mZddlZddlZddlmZddlmZmZmZmZmZdd	lmZmZmZm Z m!Z!m"Z"m#Z#d
d�Zddl$Z$ddl$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-e�r&dd
l$m.Z.ddl/Z/ddl0Z0ddl1Z2ddl3m3Z3ddl4Z4e5Z5ddl6m7Z8ddl6m9Z:da;dd�Z<�nddl=mZe>fZ
e>Zddl=m?ZddlZddlZddlZddl@mZmZmZm<Z<mZmZmZmZm#Z#ddlAm&Z&mZm%Z%m Z m!Z!m)Z)m*Z*m+Z+m,Z,m-Z-e�r&dd
lAm.Z.ddlBm(Z(m'Z'm"Z"ddlCjDZ/ddlAjEZ$ddlFjDZ0ddl2Z2ddlGm3Z3ddlHjIZ4eJZ5ddl6m:Z:e8Z8yddlmKZKmLZLWn8ek
�r�Gdd�deM�ZLdcdd�ZNdd�ZKYnXyddl
mOZPWn&ek
�rGd d!�d!eQ�ZPYnXydd"lmRZRWn,ek
�rLejSejTBdfd#d$�ZRYnXdd%lUmVZWeXeWd&��rleWZVn,dd'lUmYZZGd(d)�d)eZ�ZYGd*d+�d+eW�ZVydd,l[m\Z\Wnek
�r�d-d.�Z\YnXyddl]Z]Wn"ek
�r�dd/lm]Z]YnXye^Z^Wn*e_k
�r*dd0l`maZad1d2�Z^YnXyejbZbejcZcWnBedk
�r~eje�Zfefd3k�rfd4Zgnd5Zgd6d7�Zbd8d9�ZcYnXydd:lhmiZiWnHek
�r�dd;ljmkZkmlZlddlZejmd<�Znd=d>�Zod?d@�ZiYnXyddAlpmqZqWn"ek
�rddAlrmqZqYnXejddB�ddk�r,e3�jsZsnddDlpmsZsyddEl`mtZtWndek
�r�ddFl`muZuyddGlvmwZxWn ek
�r�dedIdJ�ZxYnXGdKdL�dLeu�ZtYnXyddMlymzZzWn ek
�r�dfdNdO�ZzYnXyddPl`m{Z{Wn�ek
�rzyddQl|m}Z~Wn"ek
�r4ddQlm}Z~YnXyddRl�m�Z�m�Z�m�Z�Wnek
�rdYnXGdSdT�dTe��Z{YnXyddUl�m�Z�m�Z�Wnvek
�rejmdVej��Z�dWdX�Z�GdYdZ�dZe��Z�dgd[d\�Z�Gd]d^�d^e��Z�Gd_d`�d`e��Z�Gdadb�dbeQ�Z�YnXdS)h�)�absolute_importN�)�StringIO)�FileType�)�shutil)�urlparse�
urlunparse�urljoin�urlsplit�
urlunsplit)�urlretrieve�quote�unquote�url2pathname�pathname2url�ContentTooShortError�	splittypecCst|t�r|jd�}t|�S)Nzutf-8)�
isinstance�unicode�encode�_quote)�s�r�/usr/lib/python3.6/compat.pyrs

r)	�Request�urlopen�URLError�	HTTPError�HTTPBasicAuthHandler�HTTPPasswordMgr�HTTPHandler�HTTPRedirectHandler�build_opener)�HTTPSHandler)�
HTMLParser)�ifilter)�ifilterfalsecCs<tdkrddl}|jd�atj|�}|r4|jdd�Sd|fS)zJsplituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.Nrz^(.*)@(.*)$r�)�	_userprog�re�compile�match�group)�hostr*r,rrr�	splituser4s

r/)�
TextIOWrapper)	rr	r
r/rrrrr)
rr
rrrrr r!r"r#)rrr)�filterfalse)�match_hostname�CertificateErrorc@seZdZdS)r3N)�__name__�
__module__�__qualname__rrrrr3^sr3c
Cs�g}|sdS|jd�}|d|dd�}}|jd�}||krNtdt|���|sb|j�|j�kS|dkrv|jd�n>|jd	�s�|jd	�r�|jtj|��n|jtj|�j	d
d��x|D]}|jtj|��q�Wtj
dd
j|�dtj�}	|	j
|�S)zpMatching according to RFC 6125, section 6.4.3

        http://tools.ietf.org/html/rfc6125#section-6.4.3
        F�.rrN�*z,too many wildcards in certificate DNS name: z[^.]+zxn--z\*z[^.]*z\Az\.z\Z)�split�countr3�repr�lower�append�
startswithr*�escape�replacer+�join�
IGNORECASEr,)
Zdn�hostnameZ
max_wildcardsZpats�partsZleftmostZ	remainderZ	wildcards�fragZpatrrr�_dnsname_matchbs(


rFcCs�|std��g}|jdf�}x0|D](\}}|dkr"t||�r@dS|j|�q"W|s�xF|jdf�D]6}x0|D](\}}|dkrjt||�r�dS|j|�qjWq`Wt|�dkr�td|d	jtt|��f��n*t|�dkr�td
||df��ntd��dS)
a=Verify that *cert* (in decoded format as returned by
        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
        rules are followed, but IP addresses are not accepted for *hostname*.

        CertificateError is raised on failure. On success, the function
        returns nothing.
        ztempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDZsubjectAltNameZDNSNZsubjectZ
commonNamerz&hostname %r doesn't match either of %sz, zhostname %r doesn't match %rrz=no appropriate commonName or subjectAltName fields were found)	�
ValueError�getrFr=�lenr3rA�mapr;)ZcertrCZdnsnamesZsan�key�value�subrrrr2�s.

r2)�SimpleNamespacec@seZdZdZdd�ZdS)�	ContainerzR
        A generic container for when multiple values need to be returned
        cKs|jj|�dS)N)�__dict__�update)�self�kwargsrrr�__init__�szContainer.__init__N)r4r5r6�__doc__rTrrrrrO�srO)�whichcs"dd�}tjj��r&|�|�r"�SdS|dkr>tjjdtj�}|sFdS|jtj�}tj	dkr�tj
|krt|jdtj
�tjjdd�jtj�}t�fd	d
�|D��r��g}q‡fdd�|D�}n�g}t
�}xT|D]L}tjj|�}||kr�|j|�x(|D] }	tjj||	�}
||
|�r�|
Sq�Wq�WdS)
aKGiven a command, mode, and a PATH string, return the path which
        conforms to the given mode on the PATH, or None if there is no such
        file.

        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
        of os.environ.get("PATH"), or can be overridden with a custom search
        path.

        cSs&tjj|�o$tj||�o$tjj|�S)N)�os�path�exists�access�isdir)�fn�moderrr�
_access_check�szwhich.<locals>._access_checkN�PATHZwin32rZPATHEXT�c3s |]}�j�j|j��VqdS)N)r<�endswith)�.0�ext)�cmdrr�	<genexpr>�szwhich.<locals>.<genexpr>csg|]}�|�qSrr)rbrc)rdrr�
<listcomp>�szwhich.<locals>.<listcomp>)rWrX�dirname�environrH�defpathr9�pathsep�sys�platform�curdir�insert�any�set�normcase�addrA)rdr]rXr^Zpathext�files�seen�dirZnormdirZthefile�namer)rdrrV�s8







rV)�ZipFile�	__enter__)�
ZipExtFilec@s$eZdZdd�Zdd�Zdd�ZdS)rycCs|jj|j�dS)N)rPrQ)rR�baserrrrTszZipExtFile.__init__cCs|S)Nr)rRrrrrxszZipExtFile.__enter__cGs|j�dS)N)�close)rR�exc_inforrr�__exit__szZipExtFile.__exit__N)r4r5r6rTrxr}rrrrrysryc@s$eZdZdd�Zdd�Zdd�ZdS)rwcCs|S)Nr)rRrrrrx"szZipFile.__enter__cGs|j�dS)N)r{)rRr|rrrr}%szZipFile.__exit__cOstj|f|�|�}t|�S)N)�BaseZipFile�openry)rR�argsrSrzrrrr)szZipFile.openN)r4r5r6rxr}rrrrrrw!srw)�python_implementationcCs0dtjkrdStjdkrdStjjd�r,dSdS)z6Return a string identifying the Python implementation.ZPyPy�javaZJythonZ
IronPythonZCPython)rk�versionrWrvr>rrrrr�0s

r�)�	sysconfig)�CallablecCs
t|t�S)N)rr�)�objrrr�callableDsr��mbcs�strict�surrogateescapecCs:t|t�r|St|t�r$|jtt�Stdt|�j��dS)Nzexpect bytes or str, not %s)	r�bytes�	text_typer�_fsencoding�	_fserrors�	TypeError�typer4)�filenamerrr�fsencodeRs

r�cCs:t|t�r|St|t�r$|jtt�Stdt|�j��dS)Nzexpect bytes or str, not %s)	rr�r��decoder�r�r�r�r4)r�rrr�fsdecode[s

r�)�detect_encoding)�BOM_UTF8�lookupzcoding[:=]\s*([-\w.]+)cCsH|dd�j�jdd�}|dks*|jd�r.dS|d
ks@|jd�rDdS|S)z(Imitates get_normal_name in tokenizer.c.N��_�-zutf-8zutf-8-�latin-1�
iso-8859-1�iso-latin-1�latin-1-�iso-8859-1-�iso-latin-1-)r�r�r�)r�r�r�)r<r@r>)�orig_enc�encrrr�_get_normal_namels
r�cs�y�jj�Wntk
r$d�YnXd�d}d}�fdd�}��fdd�}|�}|jt�rpd�|d	d�}d
}|s||gfS||�}|r�||gfS|�}|s�||gfS||�}|r�|||gfS|||gfS)a?
        The detect_encoding() function is used to detect the encoding that should
        be used to decode a Python source file.  It requires one argument, readline,
        in the same way as the tokenize() generator.

        It will call readline a maximum of twice, and return the encoding used
        (as a string) and a list of any lines (left as bytes) it has read in.

        It detects the encoding from the presence of a utf-8 bom or an encoding
        cookie as specified in pep-0263.  If both a bom and a cookie are present,
        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
        'utf-8-sig' is returned.

        If no encoding is specified, then the default of 'utf-8' will be returned.
        NFzutf-8cs y��Stk
rdSXdS)N�)�
StopIterationr)�readlinerr�read_or_stop�sz%detect_encoding.<locals>.read_or_stopcs�y|jd�}Wn4tk
rBd}�dk	r6dj|��}t|��YnXtj|�}|sVdSt|d�}yt|�}Wn:tk
r��dkr�d|}ndj�|�}t|��YnX�r�|j	dkr؈dkr�d}n
dj��}t|��|d	7}|S)
Nzutf-8z'invalid or missing encoding declarationz{} for {!r}rzunknown encoding: zunknown encoding for {!r}: {}zencoding problem: utf-8z encoding problem for {!r}: utf-8z-sig)
r��UnicodeDecodeError�format�SyntaxError�	cookie_re�findallr�r��LookupErrorrv)�line�line_string�msgZmatches�encoding�codec)�	bom_foundr�rr�find_cookie�s6



z$detect_encoding.<locals>.find_cookieTrz	utf-8-sig)�__self__rv�AttributeErrorr>r�)r�r��defaultr�r��first�secondr)r�r�r�rr�ws4
&


r�)r?r(�)�unescape)�ChainMap)�MutableMapping)�recursive_repr�...cs�fdd�}|S)zm
            Decorator to make a repr function return fillvalue for a recursive
            call
            csLt�����fdd�}t�d�|_t�d�|_t�d�|_t�di�|_|S)NcsBt|�t�f}|�kr�S�j|�z�|�}Wd�j|�X|S)N)�id�	get_identrr�discard)rRrK�result)�	fillvalue�repr_running�
user_functionrr�wrapper�s
z=_recursive_repr.<locals>.decorating_function.<locals>.wrapperr5rUr4�__annotations__)rp�getattrr5rUr4r�)r�r�)r�)r�r�r�decorating_function�sz,_recursive_repr.<locals>.decorating_functionr)r�r�r)r�r�_recursive_repr�sr�c@s�eZdZdZdd�Zdd�Zdd�Zd'd	d
�Zdd�Zd
d�Z	dd�Z
dd�Ze�dd��Z
edd��Zdd�ZeZdd�Zedd��Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�ZdS)(r�a� A ChainMap groups multiple dicts (or other mappings) together
        to create a single, updateable view.

        The underlying mappings are stored in a list.  That list is public and can
        accessed or updated using the *maps* attribute.  There is no other state.

        Lookups search the underlying mappings successively until a key is found.
        In contrast, writes, updates, and deletions only operate on the first
        mapping.

        cGst|�pig|_dS)z�Initialize a ChainMap by setting *maps* to the given mappings.
            If no mappings are provided, a single empty dictionary is used.

            N)�list�maps)rRr�rrrrT
szChainMap.__init__cCst|��dS)N)�KeyError)rRrKrrr�__missing__szChainMap.__missing__cCs8x,|jD]"}y||Stk
r(YqXqW|j|�S)N)r�r�r�)rRrK�mappingrrr�__getitem__s
zChainMap.__getitem__NcCs||kr||S|S)Nr)rRrKr�rrrrHszChainMap.getcCstt�j|j��S)N)rIrp�unionr�)rRrrr�__len__"szChainMap.__len__cCstt�j|j��S)N)�iterrpr�r�)rRrrr�__iter__%szChainMap.__iter__cst�fdd�|jD��S)Nc3s|]}�|kVqdS)Nr)rb�m)rKrrre)sz(ChainMap.__contains__.<locals>.<genexpr>)ror�)rRrKr)rKr�__contains__(szChainMap.__contains__cCs
t|j�S)N)ror�)rRrrr�__bool__+szChainMap.__bool__cCsdj|djtt|j���S)Nz{0.__class__.__name__}({1})z, )r�rArJr;r�)rRrrr�__repr__.szChainMap.__repr__cGs|tj|f|���S)z?Create a ChainMap with a single dict created from the iterable.)�dict�fromkeys)�cls�iterabler�rrrr�3szChainMap.fromkeyscCs$|j|jdj�f|jdd���S)zHNew ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]rrN)�	__class__r��copy)rRrrrr�8sz
ChainMap.copycCs|jif|j��S)z;New ChainMap with a new dict followed by all previous maps.)r�r�)rRrrr�	new_child>szChainMap.new_childcCs|j|jdd��S)zNew ChainMap from maps[1:].rN)r�r�)rRrrr�parentsBszChainMap.parentscCs||jd|<dS)Nr)r�)rRrKrLrrr�__setitem__GszChainMap.__setitem__cCs8y|jd|=Wn"tk
r2tdj|���YnXdS)Nrz(Key not found in the first mapping: {!r})r�r�r�)rRrKrrr�__delitem__JszChainMap.__delitem__cCs0y|jdj�Stk
r*td��YnXdS)zPRemove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.rz#No keys found in the first mapping.N)r��popitemr�)rRrrrr�PszChainMap.popitemcGs>y|jdj|f|��Stk
r8tdj|���YnXdS)zWRemove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].rz(Key not found in the first mapping: {!r}N)r��popr�r�)rRrKr�rrrr�WszChainMap.popcCs|jdj�dS)z'Clear maps[0], leaving maps[1:] intact.rN)r��clear)rRrrrr�^szChainMap.clear)N)r4r5r6rUrTr�r�rHr�r�r�r�r�r��classmethodr�r��__copy__r��propertyr�r�r�r�r�r�rrrrr�s(
r�)�cache_from_sourcecCs0|jd�st�|dkrd}|r$d}nd}||S)Nz.pyT�c�o)ra�AssertionError)rX�debug_override�suffixrrrr�esr�)�OrderedDict)r�)�KeysView�
ValuesView�	ItemsViewc@s�eZdZdZdd�Zejfdd�Zejfdd�Zdd	�Zd
d�Z	dd
�Z
d6dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�ZeZe�Zefdd �Zd7d"d#�Zd8d$d%�Zd&d'�Zd(d)�Zed9d*d+��Zd,d-�Zd.d/�Zd0d1�Zd2d3�Z d4d5�Z!d!S):r�z)Dictionary that remembers insertion ordercOsnt|�dkrtdt|���y
|jWn6tk
r\g|_}||dg|dd�<i|_YnX|j||�dS)z�Initialize an ordered dictionary.  Signature is the same as for
            regular dictionaries, but keyword arguments are not recommended
            because their insertion order is arbitrary.

            rz$expected at most 1 arguments, got %dN)rIr��_OrderedDict__rootr��_OrderedDict__map�_OrderedDict__update)rRr��kwds�rootrrrrT�s

zOrderedDict.__init__cCsF||kr6|j}|d}|||g|d<|d<|j|<||||�dS)z!od.__setitem__(i, y) <==> od[i]=yrrN)r�r�)rRrKrLZdict_setitemr��lastrrrr��s
 zOrderedDict.__setitem__cCs0|||�|jj|�\}}}||d<||d<dS)z od.__delitem__(y) <==> del od[y]rrN)r�r�)rRrKZdict_delitem�	link_prev�	link_nextrrrr��s
zOrderedDict.__delitem__ccs2|j}|d}x||k	r,|dV|d}qWdS)zod.__iter__() <==> iter(od)rr(N)r�)rRr��currrrrr��s


zOrderedDict.__iter__ccs2|j}|d}x||k	r,|dV|d}qWdS)z#od.__reversed__() <==> reversed(od)rr(N)r�)rRr�r�rrr�__reversed__�s


zOrderedDict.__reversed__cCshyDx|jj�D]}|dd�=qW|j}||dg|dd�<|jj�Wntk
rXYnXtj|�dS)z.od.clear() -> None.  Remove all items from od.N)r��
itervaluesr�r�r�r�)rRZnoder�rrrr��szOrderedDict.clearTcCs||std��|j}|r8|d}|d}||d<||d<n |d}|d}||d<||d<|d}|j|=tj||�}||fS)z�od.popitem() -> (k, v), return and remove a (key, value) pair.
            Pairs are returned in LIFO order if last is true or FIFO order if false.

            zdictionary is emptyrrr()r�r�r�r�r�)rRr�r��linkr�r�rKrLrrrr��s 
zOrderedDict.popitemcCst|�S)zod.keys() -> list of keys in od)r�)rRrrr�keys�szOrderedDict.keyscs�fdd��D�S)z#od.values() -> list of values in odcsg|]}�|�qSrr)rbrK)rRrrrf�sz&OrderedDict.values.<locals>.<listcomp>r)rRr)rRr�values�szOrderedDict.valuescs�fdd��D�S)z.od.items() -> list of (key, value) pairs in odcsg|]}|�|f�qSrr)rbrK)rRrrrf�sz%OrderedDict.items.<locals>.<listcomp>r)rRr)rRr�items�szOrderedDict.itemscCst|�S)z0od.iterkeys() -> an iterator over the keys in od)r�)rRrrr�iterkeys�szOrderedDict.iterkeysccsx|D]}||VqWdS)z2od.itervalues -> an iterator over the values in odNr)rR�krrrr��s
zOrderedDict.itervaluesccs x|D]}|||fVqWdS)z=od.iteritems -> an iterator over the (key, value) items in odNr)rRrrrr�	iteritems�s
zOrderedDict.iteritemscOs�t|�dkr tdt|�f��n|s,td��|d}f}t|�dkrL|d}t|t�rrx^|D]}||||<q\WnDt|d�r�x8|j�D]}||||<q�Wnx|D]\}}|||<q�Wx|j�D]\}}|||<q�WdS)a�od.update(E, **F) -> None.  Update od from dict/iterable E and F.

            If E is a dict instance, does:           for k in E: od[k] = E[k]
            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
            In either case, this is followed by:     for k, v in F.items(): od[k] = v

            r(z8update() takes at most 2 positional arguments (%d given)z,update() takes at least 1 argument (0 given)rrrN)rIr�rr��hasattrrr)r�r�rR�otherrKrLrrrrQ�s&	


zOrderedDict.updatecCs0||kr||}||=|S||jkr,t|��|S)z�od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
            If key is not found, d is returned if given, otherwise KeyError is raised.

            )�_OrderedDict__markerr�)rRrKr�r�rrrr�!s
zOrderedDict.popNcCs||kr||S|||<|S)zDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in odr)rRrKr�rrr�
setdefault.szOrderedDict.setdefaultcCs^|si}t|�t�f}||kr"dSd||<z&|s>d|jjfSd|jj|j�fS||=XdS)zod.__repr__() <==> repr(od)z...rz%s()z%s(%r)N)r��
_get_identr�r4r)rRZ
_repr_runningZcall_keyrrrr�5szOrderedDict.__repr__cs\�fdd��D�}t��j�}xtt��D]}|j|d�q*W|rP�j|f|fS�j|ffS)z%Return state information for picklingcsg|]}|�|g�qSrr)rbr)rRrrrfEsz*OrderedDict.__reduce__.<locals>.<listcomp>N)�varsr�r�r�r�)rRrZ	inst_dictrr)rRr�
__reduce__CszOrderedDict.__reduce__cCs
|j|�S)z!od.copy() -> a shallow copy of od)r�)rRrrrr�MszOrderedDict.copycCs |�}x|D]}|||<qW|S)z�OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
            and values equal to v (which defaults to None).

            r)r�r�rL�drKrrrr�Qs
zOrderedDict.fromkeyscCs6t|t�r*t|�t|�ko(|j�|j�kStj||�S)z�od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
            while comparison to a regular mapping is order-insensitive.

            )rr�rIrr��__eq__)rRrrrrr\s
 zOrderedDict.__eq__cCs
||kS)Nr)rRrrrr�__ne__eszOrderedDict.__ne__cCst|�S)z@od.viewkeys() -> a set-like object providing a view on od's keys)r�)rRrrr�viewkeysjszOrderedDict.viewkeyscCst|�S)z<od.viewvalues() -> an object providing a view on od's values)r�)rRrrr�
viewvaluesnszOrderedDict.viewvaluescCst|�S)zBod.viewitems() -> a set-like object providing a view on od's items)r�)rRrrr�	viewitemsrszOrderedDict.viewitems)T)N)N)N)"r4r5r6rUrTr�r�r�r�r�r�r�rrrrr�rrQr��objectrr�r	r�rr�r�r�rrrrrrrrrr��s:
	




	r�)�BaseConfigurator�valid_identz^[a-z_][a-z0-9_]*$cCstj|�}|std|��dS)Nz!Not a valid Python identifier: %rT)�
IDENTIFIERr,rG)rr�rrrr|s
rc@s"eZdZdZdd�Zddd�ZdS)�ConvertingDictz A converting dictionary wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
r�r��configurator�convertr�r�ConvertingList�ConvertingTuple�parentrK)rRrKrLr�rrrr��s
zConvertingDict.__getitem__NcCsLtj|||�}|jj|�}||k	rH|||<t|�tttfkrH||_||_	|S)N)
r�rHrrr�rrrrrK)rRrKr�rLr�rrrrH�s
zConvertingDict.get)N)r4r5r6rUr�rHrrrrr�srcCsDtj|||�}|jj|�}||k	r@t|�tttfkr@||_||_	|S)N)
r�r�rrr�rrrrrK)rRrKr�rLr�rrrr��s
r�c@s"eZdZdZdd�Zd	dd�ZdS)
rzA converting list wrapper.cCsJtj||�}|jj|�}||k	rF|||<t|�tttfkrF||_||_	|S)N)
r�r�rrr�rrrrrK)rRrKrLr�rrrr��s
zConvertingList.__getitem__rcCs<tj||�}|jj|�}||k	r8t|�tttfkr8||_|S)N)	r�r�rrr�rrrr)rR�idxrLr�rrrr��s
zConvertingList.popN���)r)r4r5r6rUr�r�rrrrr�src@seZdZdZdd�ZdS)rzA converting tuple wrapper.cCsBtj||�}|jj|�}||k	r>t|�tttfkr>||_||_	|S)N)
�tupler�rrr�rrrrrK)rRrKrLr�rrrr��s
zConvertingTuple.__getitem__N)r4r5r6rUr�rrrrr�src@s�eZdZdZejd�Zejd�Zejd�Zejd�Z	ejd�Z
ddd	�Zee
�Zd
d�Zdd
�Zdd�Zdd�Zdd�Zdd�Zdd�ZdS)rzQ
        The configurator base class which defines some useful defaults.
        z%^(?P<prefix>[a-z]+)://(?P<suffix>.*)$z^\s*(\w+)\s*z^\.\s*(\w+)\s*z^\[\s*(\w+)\s*\]\s*z^\d+$�ext_convert�cfg_convert)rcZcfgcCst|�|_||j_dS)N)r�configr)rRr"rrrrT�s
zBaseConfigurator.__init__c	Cs�|jd�}|jd�}y`|j|�}xP|D]H}|d|7}yt||�}Wq&tk
rl|j|�t||�}Yq&Xq&W|Stk
r�tj�dd�\}}td||f�}|||_	|_
|�YnXdS)zl
            Resolve strings to objects using standard import and attribute
            syntax.
            r7rrNzCannot resolve %r: %s)r9r��importerr�r��ImportErrorrkr|rG�	__cause__�
__traceback__)	rRrrvZused�foundrE�e�tb�vrrr�resolve�s"




zBaseConfigurator.resolvecCs
|j|�S)z*Default converter for the ext:// protocol.)r+)rRrLrrrr szBaseConfigurator.ext_convertcCs|}|jj|�}|dkr&td|��n�||j�d�}|j|j�d}x�|r�|jj|�}|rp||j�d}nd|jj|�}|r�|j�d}|jj|�s�||}n2yt	|�}||}Wnt
k
r�||}YnX|r�||j�d�}qJtd||f��qJW|S)z*Default converter for the cfg:// protocol.NzUnable to convert %rrzUnable to convert %r at %r)�WORD_PATTERNr,rG�endr"�groups�DOT_PATTERN�
INDEX_PATTERN�
DIGIT_PATTERN�intr�)rRrL�restr�r
r�nrrrr!s2
zBaseConfigurator.cfg_convertcCs�t|t�r&t|t�r&t|�}||_n�t|t�rLt|t�rLt|�}||_n|t|t�rrt|t�rrt|�}||_nVt|t�r�|j	j
|�}|r�|j�}|d}|jj
|d�}|r�|d}t||�}||�}|S)z�
            Convert values to an appropriate type. dicts, lists and tuples are
            replaced by their converting alternatives. Strings are checked to
            see if they have a conversion format and are converted if they do.
            �prefixNr�)rrr�rrr�rr�string_types�CONVERT_PATTERNr,�	groupdict�value_convertersrHr�)rRrLr�r
r5Z	converterr�rrrr)s*


zBaseConfigurator.convertcsr�jd�}t|�s|j|�}�jdd�}t�fdd��D��}|f|�}|rnx |j�D]\}}t|||�qVW|S)z1Configure an object with a user-supplied factory.z()r7Ncs g|]}t|�r|�|f�qSr)r)rbr)r"rrrfLsz5BaseConfigurator.configure_custom.<locals>.<listcomp>)r�r�r+r�r�setattr)rRr"r�ZpropsrSr�rvrLr)r"r�configure_customEs


z!BaseConfigurator.configure_customcCst|t�rt|�}|S)z0Utility function which converts lists to tuples.)rr�r)rRrLrrr�as_tupleSs
zBaseConfigurator.as_tupleN)r4r5r6rUr*r+r7r,r/r0r1r9�staticmethod�
__import__r#rTr+r r!rr;r<rrrrr�s 




"r)r)rr�)r�)N)N)�Z
__future__rrWr*rkZsslr$�version_inforZ
basestringr6rr��typesrZ	file_typeZ__builtin__�builtinsZConfigParserZconfigparserZ	_backportrrr	r
rrZurllibr
rrrrrrrZurllib2rrrrrr r!r"r#r$ZhttplibZ	xmlrpclibZQueueZqueuer%ZhtmlentitydefsZ	raw_input�	itertoolsr&�filterr'r1r)r/�io�strr0Zurllib.parseZurllib.requestZurllib.errorZhttp.clientZclientZrequestZ
xmlrpc.clientZhtml.parserZ
html.entitiesZentities�inputr2r3rGrFrNrOrrV�F_OK�X_OKZzipfilerwr~rryZBaseZipExtFilerlr�r�r��	NameError�collectionsr�r�r�r��getfilesystemencodingr�r��tokenizer��codecsr�r�r+r�r�Zhtmlr?Zcgir�r�r��reprlibr�r�Zimpr�r�Zthreadr�r
Zdummy_threadZ_abcollr�r�r�r�Zlogging.configrr�Irrr�r�rrrrrrr�<module>s&
$,,0




2+A


		
[
b
w

_vendor/distlib/__pycache__/database.cpython-36.pyc000064400000122116151733136300016241 0ustar003

�Pf��@s�dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlm
Z
mZddlmZddlmZmZddlmZmZmZdd	lmZmZmZmZmZmZmZd
ddd
dgZ ej!e"�Z#dZ$dZ%deddde$dfZ&dZ'Gdd�de(�Z)Gdd�de(�Z*Gdd
�d
e(�Z+Gdd�de+�Z,Gdd�de,�Z-Gdd
�d
e,�Z.e-Z/e.Z0Gdd�de(�Z1d)d!d"�Z2d#d$�Z3d%d&�Z4d'd(�Z5dS)*zPEP 376 implementation.�)�unicode_literalsN�)�DistlibException�	resources)�StringIO)�
get_scheme�UnsupportedVersionError)�Metadata�METADATA_FILENAME�WHEEL_METADATA_FILENAME)�parse_requirement�cached_property�parse_name_and_version�read_exports�
write_exports�	CSVReader�	CSVWriter�Distribution�BaseInstalledDistribution�InstalledDistribution�EggInfoDistribution�DistributionPathzpydist-exports.jsonzpydist-commands.jsonZ	INSTALLER�RECORD�	REQUESTED�	RESOURCES�SHAREDz
.dist-infoc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�_CachezL
    A simple cache mapping names and .dist-info paths to distributions
    cCsi|_i|_d|_dS)zZ
        Initialise an instance. There is normally one for each DistributionPath.
        FN)�name�path�	generated)�self�r!�/usr/lib/python3.6/database.py�__init__0sz_Cache.__init__cCs|jj�|jj�d|_dS)zC
        Clear the cache, setting it to its initial state.
        FN)r�clearrr)r r!r!r"r$8s

z_Cache.clearcCs2|j|jkr.||j|j<|jj|jg�j|�dS)z`
        Add a distribution to the cache.
        :param dist: The distribution to add.
        N)rr�
setdefault�key�append)r �distr!r!r"�add@sz
_Cache.addN)�__name__�
__module__�__qualname__�__doc__r#r$r)r!r!r!r"r,src@s�eZdZdZddd�Zdd�Zdd	�Zeee�Zd
d�Z	dd
�Z
dd�Zedd��Z
dd�Zdd�Zddd�Zdd�Zddd�ZdS)rzU
    Represents a set of distributions installed on a path (typically sys.path).
    NFcCsD|dkrtj}||_d|_||_t�|_t�|_d|_td�|_	dS)a�
        Create an instance from a path, optionally including legacy (distutils/
        setuptools/distribute) distributions.
        :param path: The path to use, as a list of directories. If not specified,
                     sys.path is used.
        :param include_egg: If True, this instance will look for and return legacy
                            distributions as well as those based on PEP 376.
        NT�default)
�sysr�
_include_dist�_include_eggr�_cache�
_cache_egg�_cache_enabledr�_scheme)r rZinclude_eggr!r!r"r#Ns	zDistributionPath.__init__cCs|jS)N)r4)r r!r!r"�_get_cache_enabledbsz#DistributionPath._get_cache_enabledcCs
||_dS)N)r4)r �valuer!r!r"�_set_cache_enabledesz#DistributionPath._set_cache_enabledcCs|jj�|jj�dS)z,
        Clears the internal cache.
        N)r2r$r3)r r!r!r"�clear_cachejs
zDistributionPath.clear_cachec
csTt�}�xF|jD�]:}tj|�}|dkr*q|jd�}|s|jrDqt|j�}�x�|D]�}|j|�}|sV|j|krvqV|jo�|jt	��rt
tg}x*|D] }tj
||�}	|j|	�}
|
r�Pq�WqVtj|
j���}t|dd�}WdQRXtjd|j�|j|j�t|j||d�VqV|jrV|jd	�rVtjd|j�|j|j�t|j|�VqVWqWdS)
zD
        Yield .dist-info and/or .egg(-info) distributions.
        N��legacy)�fileobj�schemezFound %s)�metadata�env�	.egg-info�.egg)r@rA)�setrr�finder_for_path�findZis_container�sortedr0�endswith�DISTINFO_EXTr
r�	posixpath�join�
contextlib�closing�	as_streamr	�logger�debugr)�new_dist_classr1�old_dist_class)
r �seenr�finder�rZrset�entryZpossible_filenamesZmetadata_filenameZ
metadata_pathZpydist�streamr>r!r!r"�_yield_distributionsrs@






z%DistributionPath._yield_distributionscCst|jj}|jo|jj}|s"|rpx4|j�D](}t|t�rH|jj|�q,|jj|�q,W|rdd|j_|rpd|j_dS)zk
        Scan the path for distributions and populate the cache with
        those that are found.
        TN)r2rr1r3rV�
isinstancerr))r Zgen_distZgen_eggr(r!r!r"�_generate_cache�s

z DistributionPath._generate_cachecCs|jdd�}dj||g�tS)ao
        The *name* and *version* parameters are converted into their
        filename-escaped form, i.e. any ``'-'`` characters are replaced
        with ``'_'`` other than the one in ``'dist-info'`` and the one
        separating the name from the version number.

        :parameter name: is converted to a standard distribution name by replacing
                         any runs of non- alphanumeric characters with a single
                         ``'-'``.
        :type name: string
        :parameter version: is converted to a standard version string. Spaces
                            become dots, and all other non-alphanumeric characters
                            (except dots) become dashes, with runs of multiple
                            dashes condensed to a single dash.
        :type version: string
        :returns: directory name
        :rtype: string�-�_)�replacerIrG)�clsr�versionr!r!r"�distinfo_dirname�sz!DistributionPath.distinfo_dirnameccsj|js x^|j�D]
}|VqWnF|j�x|jjj�D]
}|Vq6W|jrfx|jjj�D]
}|VqXWdS)a5
        Provides an iterator that looks for distributions and returns
        :class:`InstalledDistribution` or
        :class:`EggInfoDistribution` instances for each one of them.

        :rtype: iterator of :class:`InstalledDistribution` and
                :class:`EggInfoDistribution` instances
        N)r4rVrXr2r�valuesr1r3)r r(r!r!r"�get_distributions�s	
z"DistributionPath.get_distributionscCs�d}|j�}|js6xj|j�D]}|j|kr|}PqWnH|j�||jjkr\|jj|d}n"|jr~||jjkr~|jj|d}|S)a=
        Looks for a named distribution on the path.

        This function only returns the first result found, as no more than one
        value is expected. If nothing is found, ``None`` is returned.

        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
                or ``None``
        Nr)	�lowerr4rVr&rXr2rr1r3)r r�resultr(r!r!r"�get_distribution�s

z!DistributionPath.get_distributionc	cs�d}|dk	rJy|jjd||f�}Wn$tk
rHtd||f��YnXxd|j�D]X}|j}xL|D]D}t|�\}}|dkr�||kr�|VPqd||krd|j|�rd|VPqdWqTWdS)a
        Iterates over all distributions to find which distributions provide *name*.
        If a *version* is provided, it will be used to filter the results.

        This function only returns the first result found, since no more than
        one values are expected. If the directory is not found, returns ``None``.

        :parameter version: a version specifier that indicates the version
                            required, conforming to the format in ``PEP-345``

        :type name: string
        :type version: string
        Nz%s (%s)zinvalid name or version: %r, %r)r5�matcher�
ValueErrorrr`�providesr�match)	r rr]rdr(�provided�p�p_name�p_verr!r!r"�provides_distribution�s$
z&DistributionPath.provides_distributioncCs(|j|�}|dkrtd|��|j|�S)z5
        Return the path to a resource file.
        Nzno distribution named %r found)rc�LookupError�get_resource_path)r r�
relative_pathr(r!r!r"�
get_file_paths
zDistributionPath.get_file_pathccs`xZ|j�D]N}|j}||kr
||}|dk	r@||krX||Vq
x|j�D]
}|VqJWq
WdS)z�
        Return all of the exported entries in a particular category.

        :param category: The category to search for entries.
        :param name: If specified, only entries with that name are returned.
        N)r`�exportsr_)r �categoryrr(rS�d�vr!r!r"�get_exported_entries"sz%DistributionPath.get_exported_entries)NF)N)N)r*r+r,r-r#r6r8�propertyZ
cache_enabledr9rVrX�classmethodr^r`rcrlrprur!r!r!r"rJs

*
$	c@s�eZdZdZdZdZdd�Zedd��ZeZ	edd��Z
ed	d
��Zdd�Zed
d��Z
edd��Zedd��Zedd��Zedd��Zdd�Zdd�Zdd�Zdd�ZdS) rz�
    A base class for distributions, whether installed or from indexes.
    Either way, it must have some metadata, so that's all that's needed
    for construction.
    FcCsL||_|j|_|jj�|_|j|_d|_d|_d|_d|_t	�|_
i|_dS)z�
        Initialise an instance.
        :param metadata: The instance of :class:`Metadata` describing this
        distribution.
        N)r>rrar&r]Zlocator�digest�extras�contextrBZ
download_urlsZdigests)r r>r!r!r"r#GszDistribution.__init__cCs|jjS)zH
        The source archive download URL for this distribution.
        )r>�
source_url)r r!r!r"r{XszDistribution.source_urlcCsd|j|jfS)zX
        A utility property which displays the name and version in parentheses.
        z%s (%s))rr])r r!r!r"�name_and_versionaszDistribution.name_and_versioncCs.|jj}d|j|jf}||kr*|j|�|S)z�
        A set of distribution names and versions provided by this distribution.
        :return: A set of "name (version)" strings.
        z%s (%s))r>rfrr]r')r Zplist�sr!r!r"rfhs

zDistribution.providescCs8|j}tjd|j��t||�}t|j||j|jd��S)Nz%Getting requirements from metadata %r)ryr?)	r>rMrNZtodict�getattrrBZget_requirementsryrz)r Zreq_attr�mdZreqtsr!r!r"�_get_requirementsts

zDistribution._get_requirementscCs
|jd�S)N�run_requires)r�)r r!r!r"r�{szDistribution.run_requirescCs
|jd�S)N�
meta_requires)r�)r r!r!r"r�szDistribution.meta_requirescCs
|jd�S)N�build_requires)r�)r r!r!r"r��szDistribution.build_requirescCs
|jd�S)N�
test_requires)r�)r r!r!r"r��szDistribution.test_requirescCs
|jd�S)N�dev_requires)r�)r r!r!r"r��szDistribution.dev_requiresc
Cs�t|�}t|jj�}y|j|j�}Wn6tk
rZtjd|�|j	�d}|j|�}YnX|j
}d}xJ|jD]@}t|�\}}	||kr�qny|j
|	�}PWqntk
r�YqnXqnW|S)z�
        Say if this instance matches (fulfills) a requirement.
        :param req: The requirement to match.
        :rtype req: str
        :return: True if it matches, else False.
        z+could not read version %r - using name onlyrF)rrr>r=rd�requirementrrM�warning�splitr&rfrrg)
r �reqrSr=rdrrbrirjrkr!r!r"�matches_requirement�s*	

z Distribution.matches_requirementcCs(|jrd|j}nd}d|j|j|fS)zC
        Return a textual representation of this instance,
        z [%s]r:z<Distribution %s (%s)%s>)r{rr])r �suffixr!r!r"�__repr__�szDistribution.__repr__cCs>t|�t|�k	rd}n$|j|jko8|j|jko8|j|jk}|S)a<
        See if this distribution is the same as another.
        :param other: The distribution to compare with. To be equal to one
                      another. distributions must have the same type, name,
                      version and source_url.
        :return: True if it is the same, else False.
        F)�typerr]r{)r �otherrbr!r!r"�__eq__�szDistribution.__eq__cCst|j�t|j�t|j�S)zH
        Compute hash in a way which matches the equality test.
        )�hashrr]r{)r r!r!r"�__hash__�szDistribution.__hash__N)r*r+r,r-Zbuild_time_dependency�	requestedr#rvr{Zdownload_urlr|rfr�r�r�r�r�r�r�r�r�r�r!r!r!r"r5s$"
cs0eZdZdZdZd�fdd�	Zddd�Z�ZS)	rz]
    This is the base class for installed distributions (whether PEP 376 or
    legacy).
    Ncs tt|�j|�||_||_dS)a
        Initialise an instance.
        :param metadata: An instance of :class:`Metadata` which describes the
                         distribution. This will normally have been initialised
                         from a metadata file in the ``path``.
        :param path:     The path of the ``.dist-info`` or ``.egg-info``
                         directory for the distribution.
        :param env:      This is normally the :class:`DistributionPath`
                         instance where this distribution was found.
        N)�superrr#r�	dist_path)r r>rr?)�	__class__r!r"r#�sz"BaseInstalledDistribution.__init__cCsd|dkr|j}|dkr"tj}d}ntt|�}d|j}||�j�}tj|�jd�jd�}d||fS)a�
        Get the hash of some data, using a particular hash algorithm, if
        specified.

        :param data: The data to be hashed.
        :type data: bytes
        :param hasher: The name of a hash implementation, supported by hashlib,
                       or ``None``. Examples of valid values are ``'sha1'``,
                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
                       ``'sha512'``. If no hasher is specified, the ``hasher``
                       attribute of the :class:`InstalledDistribution` instance
                       is used. If the hasher is determined to be ``None``, MD5
                       is used as the hashing algorithm.
        :returns: The hash of the data. If a hasher was explicitly specified,
                  the returned hash will be prefixed with the specified hasher
                  followed by '='.
        :rtype: str
        Nr:z%s=�=�asciiz%s%s)	�hasher�hashlib�md5r~rx�base64Zurlsafe_b64encode�rstrip�decode)r �datar��prefixrxr!r!r"�get_hash�s

z"BaseInstalledDistribution.get_hash)N)N)r*r+r,r-r�r#r��
__classcell__r!r!)r�r"r�scs�eZdZdZdZd'�fdd�	Zdd�Zdd	�Zd
d�Ze	dd
��Z
dd�Zdd�Zdd�Z
dd�Zd(dd�Zdd�Ze	dd��Zd)dd�Zdd �Zd!d"�Zd#d$�Zd%d&�ZejZ�ZS)*ra
    Created with the *path* of the ``.dist-info`` directory provided to the
    constructor. It reads the metadata contained in ``pydist.json`` when it is
    instantiated., or uses a passed in Metadata instance (useful for when
    dry-run mode is being used).
    Zsha256Ncs0tj|�|_}|dkr(ddl}|j�|rN|jrN||jjkrN|jj|j}nt|dkr�|j	t
�}|dkrr|j	t�}|dkr�|j	d�}|dkr�tdt
|f��t
j|j���}t|dd�}WdQRXtt|�j|||�|r�|jr�|jj|�y|j	d�}Wn&tk
�r ddl}|j�YnX|dk	|_dS)NrZMETADATAzno %s found in %sr;)r<r=r)rrCrR�pdbZ	set_tracer4r2rr>rDr
rrerJrKrLr	r�rr#r)�AttributeErrorr�)r rr>r?rRr�rSrU)r�r!r"r#s4




zInstalledDistribution.__init__cCsd|j|j|jfS)Nz#<InstalledDistribution %r %s at %r>)rr]r)r r!r!r"r�2szInstalledDistribution.__repr__cCsd|j|jfS)Nz%s %s)rr])r r!r!r"�__str__6szInstalledDistribution.__str__c
Cs�g}|jd�}tj|j���`}t|d��J}xB|D]:}dd�tt|�d�D�}||\}}}	|j|||	f�q0WWdQRXWdQRX|S)a"
        Get the list of installed files for the distribution
        :return: A list of tuples of path, hash and size. Note that hash and
                 size might be ``None`` for some entries. The path is exactly
                 as stored in the file (which is as in PEP 376).
        r)rUcSsg|]}d�qS)Nr!)�.0�ir!r!r"�
<listcomp>Hsz6InstalledDistribution._get_records.<locals>.<listcomp>�N)�get_distinfo_resourcerJrKrLr�range�lenr')
r �resultsrSrUZ
record_reader�row�missingr�checksum�sizer!r!r"�_get_records9s

(z"InstalledDistribution._get_recordscCsi}|jt�}|r|j�}|S)a
        Return the information exported by this distribution.
        :return: A dictionary of exports, mapping an export category to a dict
                 of :class:`ExportEntry` instances describing the individual
                 export entries, and keyed by name.
        )r��EXPORTS_FILENAMEr)r rbrSr!r!r"rqPs

zInstalledDistribution.exportsc	Cs8i}|jt�}|r4tj|j���}t|�}WdQRX|S)z�
        Read exports data from a file in .ini format.

        :return: A dictionary of exports, mapping an export category to a list
                 of :class:`ExportEntry` instances describing the individual
                 export entries.
        N)r�r�rJrKrLr)r rbrSrUr!r!r"r^s
z"InstalledDistribution.read_exportsc
Cs.|jt�}t|d��}t||�WdQRXdS)a
        Write a dictionary of exports to a file in .ini format.
        :param exports: A dictionary of exports, mapping an export category to
                        a list of :class:`ExportEntry` instances describing the
                        individual export entries.
        �wN)�get_distinfo_filer��openr)r rqZrf�fr!r!r"rms
z#InstalledDistribution.write_exportscCsh|jd�}tj|j���:}t|d��$}x|D]\}}||kr,|Sq,WWdQRXWdQRXtd|��dS)aW
        NOTE: This API may change in the future.

        Return the absolute path to a resource file with the given relative
        path.

        :param relative_path: The path, relative to .dist-info, of the resource
                              of interest.
        :return: The absolute path where the resource is to be found.
        r)rUNz3no resource file with relative path %r is installed)r�rJrKrLr�KeyError)r rorSrUZresources_readerZrelativeZdestinationr!r!r"rnxs
z'InstalledDistribution.get_resource_pathccsx|j�D]
}|Vq
WdS)z�
        Iterates over the ``RECORD`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: iterator of (path, hash, size)
        N)r�)r rbr!r!r"�list_installed_files�sz*InstalledDistribution.list_installed_filesFcCs,tjj|d�}tjj|j�}|j|�}tjj|d�}|jd�}tjd|�|rRdSt|���}x�|D]�}tjj	|�s||j
d	�r�d}	}
n4dtjj|�}
t|d��}|j
|j��}	WdQRX|j|�s�|r�|j|�r�tjj||�}|j||	|
f�qbW|j|��rtjj||�}|j|ddf�WdQRX|S)
z�
        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
        existing ``RECORD`` file is silently overwritten.

        prefix is used to determine when to write absolute paths.
        r:rzcreating %sN�.pyc�.pyoz%d�rb)r�r�)�osrrI�dirname�
startswithr�rM�infor�isdirrF�getsizer�r��read�relpathZwriterow)r �pathsr��dry_run�baseZbase_under_prefix�record_path�writerr�
hash_valuer��fpr!r!r"�write_installed_files�s.





z+InstalledDistribution.write_installed_filesc
Csg}tjj|j�}|jd�}�x�|j�D]�\}}}tjj|�sLtjj||�}||krVq(tjj|�sv|j|dddf�q(tjj	|�r(t
tjj|��}|r�||kr�|j|d||f�q(|r(d|kr�|jdd�d}nd	}t
|d
��2}	|j|	j�|�}
|
|k�r|j|d||
f�Wd	QRXq(W|S)a�
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        r�existsTFr��=rrNr�r�)r�rr�r�r��isabsrIr�r'�isfile�strr�r�r�r�r�)r �
mismatchesr�r�rr�r�Zactual_sizer�r�Zactual_hashr!r!r"�check_installed_files�s.	

 z+InstalledDistribution.check_installed_filescCs�i}tjj|jd�}tjj|�r�tj|ddd��}|j�j�}WdQRXx@|D]8}|jdd�\}}|dkr~|j	|g�j
|�qN|||<qNW|S)	a�
        A dictionary of shared locations whose keys are in the set 'prefix',
        'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
        The corresponding value is the absolute path of that category for
        this distribution, and takes into account any paths selected by the
        user at installation time (e.g. via command-line arguments). In the
        case of the 'namespace' key, this would be a list of absolute paths
        for the roots of namespace packages in this distribution.

        The first time this property is accessed, the relevant information is
        read from the SHARED file in the .dist-info directory.
        rrSzutf-8)�encodingNr�r�	namespace)r�rrIr��codecsr�r��
splitlinesr�r%r')r rb�shared_pathr��lines�liner&r7r!r!r"�shared_locations�s
z&InstalledDistribution.shared_locationsc	
Cs�tjj|jd�}tjd|�|r$dSg}x6dD].}||}tjj||�r.|jd	||f�q.Wx"|jd
f�D]}|jd|�qnWtj	|dd
d��}|j
dj|��WdQRX|S)aa
        Write shared location information to the SHARED file in .dist-info.
        :param paths: A dictionary as described in the documentation for
        :meth:`shared_locations`.
        :param dry_run: If True, the action is logged but no file is actually
                        written.
        :return: The path of the file written to.
        rzcreating %sNr��lib�headers�scriptsr�z%s=%sr�znamespace=%sr�zutf-8)r��
)r�r�r�r�r�)r�rrIrMr�r�r'�getr�r��write)	r r�r�r�r�r&r�nsr�r!r!r"�write_shared_locations�s	
z,InstalledDistribution.write_shared_locationscCsF|tkrtd||jf��tj|j�}|dkr<td|j��|j|�S)Nz+invalid path for a dist-info file: %r at %rzUnable to get a finder for %s)�
DIST_FILESrrrrCrD)r rrRr!r!r"r�sz+InstalledDistribution.get_distinfo_resourcecCs~|jtj�dkrT|jtj�dd�\}}||jjtj�dkrTtd||j|jf��|tkrntd||jf��tjj	|j|�S)	a�
        Returns a path located under the ``.dist-info`` directory. Returns a
        string representing the path.

        :parameter path: a ``'/'``-separated path relative to the
                         ``.dist-info`` directory or an absolute path;
                         If *path* is an absolute path and doesn't start
                         with the ``.dist-info`` directory path,
                         a :class:`DistlibException` is raised
        :type path: str
        :rtype: str
        r�Nrz;dist-info file %r does not belong to the %r %s distributionz+invalid path for a dist-info file: %r at %r������)
rDr��sepr�rrrr]r�rI)r rr^r!r!r"r�sz'InstalledDistribution.get_distinfo_fileccsVtjj|j�}xB|j�D]6\}}}tjj|�s<tjj||�}|j|j�r|VqWdS)z�
        Iterates over the ``RECORD`` entries and returns paths for each line if
        the path is pointing to a file located in the ``.dist-info`` directory
        or one of its subdirectories.

        :returns: iterator of paths
        N)r�rr�r�r�rIr�)r r�rr�r�r!r!r"�list_distinfo_files6sz)InstalledDistribution.list_distinfo_filescCst|t�o|j|jkS)N)rWrr)r r�r!r!r"r�Fs
zInstalledDistribution.__eq__)NN)F)F)r*r+r,r-r�r#r�r�r�r
rqrrrnr�r�r�r�r�r�r�r�r��objectr�r�r!r!)r�r"r	s(

##
	csjeZdZdZdZiZd�fdd�	Zdd�Zdd	�Zd
d�Z	dd
�Z
dd�Zddd�Zdd�Z
ejZ�ZS)raCreated with the *path* of the ``.egg-info`` directory or file provided
    to the constructor. It reads the metadata contained in the file itself, or
    if the given path happens to be a directory, the metadata is read from the
    file ``PKG-INFO`` under that directory.TNcs�dd�}||_||_|rJ|jrJ||jjkrJ|jj|j}|||j|j�n0|j|�}|||j|j�|rz|jrz|jj|�t	t
|�j|||�dS)NcSs||_|j�|_||_dS)N)rrar&r])r}�nrtr!r!r"�set_name_and_versionXs
z:EggInfoDistribution.__init__.<locals>.set_name_and_version)rr�r4r3r>rr]�
_get_metadatar)r�rr#)r rr?r�r>)r�r!r"r#Ws

zEggInfoDistribution.__init__c
s2d}dd���fdd�}|jd�r�tjj|�rdtjj|dd�}t|dd	�}tjj|dd
�}||�}n`tj|�}t|j	d�j
d��}t|dd
�}y|j	d�}	�|	j
d��}Wntk
r�d}YnXnX|jd��rtjj|��rtjj|d
�}||�}tjj|d�}t|dd	�}ntd|��|�r.|j
|�|S)NcSs�g}|j�}x�|D]�}|j�}|jd�r6tjd|�Pt|�}|sPtjd|�q|jr`tjd�|jst|j|j	�qdj
dd�|jD��}|jd|j	|f�qW|S)	z�Create a list of dependencies from a requires.txt file.

            *data*: the contents of a setuptools-produced requires.txt file.
            �[z.Unexpected line: quitting requirement scan: %rz#Not recognised as a requirement: %rz4extra requirements in requires.txt are not supportedz, css|]}d|VqdS)z%s%sNr!)r��cr!r!r"�	<genexpr>�szQEggInfoDistribution._get_metadata.<locals>.parse_requires_data.<locals>.<genexpr>z%s (%s))r��stripr�rMr�rryZconstraintsr'rrI)r��reqsr�r�rSZconsr!r!r"�parse_requires_dataos&


z>EggInfoDistribution._get_metadata.<locals>.parse_requires_datacsHg}y*tj|dd��}�|j��}WdQRXWntk
rBYnX|S)z�Create a list of dependencies from a requires.txt file.

            *req_path*: the path to a setuptools-produced requires.txt file.
            rSzutf-8N)r�r�r��IOError)�req_pathr�r�)r�r!r"�parse_requires_path�sz>EggInfoDistribution._get_metadata.<locals>.parse_requires_pathz.eggzEGG-INFOzPKG-INFOr;)rr=zrequires.txtzEGG-INFO/PKG-INFO�utf8)r<r=zEGG-INFO/requires.txtzutf-8z	.egg-infoz,path must end with .egg-info or .egg, got %r)rFr�rr�rIr	�	zipimport�zipimporterr�get_datar�r�rZadd_requirements)
r r�requiresr��	meta_pathr>r�Zzipfr<r�r!)r�r"r�ls:




z!EggInfoDistribution._get_metadatacCsd|j|j|jfS)Nz!<EggInfoDistribution %r %s at %r>)rr]r)r r!r!r"r��szEggInfoDistribution.__repr__cCsd|j|jfS)Nz%s %s)rr])r r!r!r"r��szEggInfoDistribution.__str__cCsdg}tjj|jd�}tjj|�r`x>|j�D]2\}}}||kr>q*tjj|�s*|j|dddf�q*W|S)a�
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        zinstalled-files.txtr�TF)r�rrIr�r�r')r r�r�rrZr!r!r"r��s	z)EggInfoDistribution.check_installed_filesc
Cs�dd�}dd�}tjj|jd�}g}tjj|�r�tj|ddd��|}xt|D]l}|j�}tjjtjj|j|��}tjj|�s�tj	d	|�|j
d
�r�qHtjj|�sH|j|||�||�f�qHWWdQRX|j|ddf�|S)z�
        Iterates over the ``installed-files.txt`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: a list of (path, hash, size)
        c
Ss0t|d�}z|j�}Wd|j�Xtj|�j�S)Nr�)r�r��closer�r�Z	hexdigest)rr�Zcontentr!r!r"�_md5�s


z6EggInfoDistribution.list_installed_files.<locals>._md5cSstj|�jS)N)r��stat�st_size)rr!r!r"�_size�sz7EggInfoDistribution.list_installed_files.<locals>._sizezinstalled-files.txtrSzutf-8)r�zNon-existent file: %s�.pyc�.pyoN)rr)
r�rrIr�r�r�r��normpathrMr�rFr�r')r r�rr�rbr�r�rir!r!r"r��s"

&z(EggInfoDistribution.list_installed_filesFccs�tjj|jd�}d}tj|ddd��d}x\|D]T}|j�}|dkrFd}q,|s,tjjtjj|j|��}|j|j�r,|rz|Vq,|Vq,WWdQRXdS)	a
        Iterates over the ``installed-files.txt`` entries and returns paths for
        each line if the path is pointing to a file located in the
        ``.egg-info`` directory or one of its subdirectories.

        :parameter absolute: If *absolute* is ``True``, each returned path is
                          transformed into a local absolute path. Otherwise the
                          raw value from ``installed-files.txt`` is returned.
        :type absolute: boolean
        :returns: iterator of paths
        zinstalled-files.txtTrSzutf-8)r�z./FN)r�rrIr�r�r�rr�)r Zabsoluter��skipr�r�rir!r!r"r��s
z'EggInfoDistribution.list_distinfo_filescCst|t�o|j|jkS)N)rWrr)r r�r!r!r"r�s
zEggInfoDistribution.__eq__)N)F)r*r+r,r-r�r�r#r�r�r�r�r�r�r�r�r�r�r!r!)r�r"rNsK&
c@s^eZdZdZdd�Zdd�Zddd�Zd	d
�Zdd�Zddd�Z	ddd�Z
dd�Zdd�ZdS)�DependencyGrapha�
    Represents a dependency graph between distributions.

    The dependency relationships are stored in an ``adjacency_list`` that maps
    distributions to a list of ``(other, label)`` tuples where  ``other``
    is a distribution and the edge is labeled with ``label`` (i.e. the version
    specifier, if such was provided). Also, for more efficient traversal, for
    every distribution ``x``, a list of predecessors is kept in
    ``reverse_list[x]``. An edge from distribution ``a`` to
    distribution ``b`` means that ``a`` depends on ``b``. If any missing
    dependencies are found, they are stored in ``missing``, which is a
    dictionary that maps distributions to a list of requirements that were not
    provided by any other distributions.
    cCsi|_i|_i|_dS)N)�adjacency_list�reverse_listr�)r r!r!r"r#.szDependencyGraph.__init__cCsg|j|<g|j|<dS)z�Add the *distribution* to the graph.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        N)rr)r �distributionr!r!r"�add_distribution3s
z DependencyGraph.add_distributionNcCs6|j|j||f�||j|kr2|j|j|�dS)a�Add an edge from distribution *x* to distribution *y* with the given
        *label*.

        :type x: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type y: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type label: ``str`` or ``None``
        N)rr'r)r �x�y�labelr!r!r"�add_edge=s
zDependencyGraph.add_edgecCs&tjd||�|jj|g�j|�dS)a
        Add a missing *requirement* for the given *distribution*.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        :type requirement: ``str``
        z
%s missing %rN)rMrNr�r%r')r rr�r!r!r"�add_missingLszDependencyGraph.add_missingcCsd|j|jfS)Nz%s %s)rr])r r(r!r!r"�
_repr_distWszDependencyGraph._repr_distrcCs�|j|�g}xv|j|D]h\}}|j|�}|dk	r>d||f}|jd|t|��|j||d�}|jd�}|j|dd��qWdj|�S)zPrints only a subgraphNz%s [%s]z    rr�)rrr'r��	repr_noder��extendrI)r r(�level�outputr�rZ	suboutputZsubsr!r!r"rZs

zDependencyGraph.repr_nodeTcCs�g}|jd�x||jj�D]n\}}t|�dkr>|r>|j|�xH|D]@\}}|dk	rn|jd|j|j|f�qD|jd|j|jf�qDWqW|r�t|�dkr�|jd�|jd�|jd�x&|D]}|jd	|j�|jd
�q�W|jd�|jd�dS)a9Writes a DOT output for the graph to the provided file *f*.

        If *skip_disconnected* is set to ``True``, then all distributions
        that are not dependent on any other distribution are skipped.

        :type f: has to support ``file``-like operations
        :type skip_disconnected: ``bool``
        zdigraph dependencies {
rNz"%s" -> "%s" [label="%s"]
z
"%s" -> "%s"
zsubgraph disconnected {
zlabel = "Disconnected"
zbgcolor = red
z"%s"r�z}
)r�r�itemsr�r'r)r r�Zskip_disconnectedZdisconnectedr(�adjsr�rr!r!r"�to_dotgs&	






zDependencyGraph.to_dotcs�g}i}x&|jj�D]\}}|dd�||<qWx�g�x4t|j��dd�D]\}}|sL�j|�||=qLW�srPx*|j�D]\}}�fdd�|D�||<q|Wtjddd��D��|j��q2W|t|j��fS)aa
        Perform a topological sort of the graph.
        :return: A tuple, the first element of which is a topologically sorted
                 list of distributions, and the second element of which is a
                 list of distributions that cannot be sorted because they have
                 circular dependencies and so form a cycle.
        Ncs g|]\}}|�kr||f�qSr!r!)r�rsrS)�	to_remover!r"r��sz4DependencyGraph.topological_sort.<locals>.<listcomp>zMoving to result: %scSsg|]}d|j|jf�qS)z%s (%s))rr])r�rsr!r!r"r��s)rr�listr'rMrNr�keys)r rbZalist�krtr!)rr"�topological_sort�s$

z DependencyGraph.topological_sortcCs6g}x&|jj�D]\}}|j|j|��qWdj|�S)zRepresentation of the graphr�)rrr'rrI)r rr(rr!r!r"r��szDependencyGraph.__repr__)N)r)T)
r*r+r,r-r#r	r
rrrrrr�r!r!r!r"rs



 rr.cCsft|�}t�}i}xX|D]P}|j|�x@|jD]6}t|�\}}tjd|||�|j|g�j||f�q.WqWx�|D]�}|j	|j
B|jB|jB}x�|D]�}	y|j
|	�}
Wn6tk
r�tjd|	�|	j�d}|j
|�}
YnX|
j}d}||k�rJxV||D]J\}}y|
j|�}
Wntk
�r,d}
YnX|
r�|j|||	�d}Pq�W|s�|j||	�q�WqrW|S)a6Makes a dependency graph from the given distributions.

    :parameter dists: a list of distributions
    :type dists: list of :class:`distutils2.database.InstalledDistribution` and
                 :class:`distutils2.database.EggInfoDistribution` instances
    :rtype: a :class:`DependencyGraph` instance
    zAdd to provided: %s, %s, %sz+could not read version %r - using name onlyrFT)rrr	rfrrMrNr%r'r�r�r�r�rdrr�r�r&rgr
r)�distsr=�graphrhr(rirr]r�r�rdZmatchedZproviderrgr!r!r"�
make_graph�sD





rcCs~||krtd|j��t|�}|g}|j|}x@|rn|j�}|j|�x$|j|D]}||krR|j|�qRWq0W|jd�|S)z�Recursively generate a list of distributions from *dists* that are
    dependent on *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    z1given distribution %r is not a member of the listr)rrrr�popr')rr(rZdep�todorsZsuccr!r!r"�get_dependent_dists�s



r!cCsv||krtd|j��t|�}g}|j|}xD|rp|j�d}|j|�x$|j|D]}||krT|j|�qTWq.W|S)z�Recursively generate a list of distributions from *dists* that are
    required by *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    z1given distribution %r is not a member of the listr)rrrrrr')rr(rr�r rsZpredr!r!r"�get_required_dists�s


r"cKs4|jdd�}tf|�}||_||_|p(d|_t|�S)zO
    A convenience method for making a dist given just a name and version.
    �summaryzPlaceholder for summary)rr	rr]r#r)rr]�kwargsr#rr!r!r"�	make_dists

r%)r.)6r-Z
__future__rr�r�rJr�Zloggingr�rHr/r�r:rr�compatrr]rrr>r	r
r�utilrr
rrrrr�__all__Z	getLoggerr*rMr�ZCOMMANDS_FILENAMEr�rGr�rrrrrrrOrPrrr!r"r%r!r!r!r"�<module>sV$

l7GM
6_vendor/distlib/__pycache__/scripts.cpython-36.opt-1.pyc000064400000023415151733136300017125 0ustar003

�Pfx;�@s�ddlmZddlZddlZddlZddlZddlZddlmZm	Z	m
Z
ddlmZddl
mZmZmZmZmZeje�Zdj�Zejd�Zd	Zd
d�ZGdd
�d
e�ZdS)�)�BytesION�)�	sysconfig�detect_encoding�ZipFile)�finder)�FileOperator�get_export_entry�convert_path�get_executable�in_venva�
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
 <assemblyIdentity version="1.0.0.0"
 processorArchitecture="X86"
 name="%s"
 type="win32"/>

 <!-- Identify the application security requirements. -->
 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
 <security>
 <requestedPrivileges>
 <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
 </requestedPrivileges>
 </security>
 </trustInfo>
</assembly>s^#!.*pythonw?[0-9.]*([ 	].*)?$a|# -*- coding: utf-8 -*-
if __name__ == '__main__':
    import sys, re

    def _resolve(module, func):
        __import__(module)
        mod = sys.modules[module]
        parts = func.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
        return result

    try:
        sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])

        func = _resolve('%(module)s', '%(func)s')
        rc = func() # None interpreted as 0
    except Exception as e:  # only supporting Python >= 2.6
        sys.stderr.write('%%s\n' %% e)
        rc = 1
    sys.exit(rc)
cCsZd|krV|jd�rD|jdd�\}}d|krV|jd�rVd||f}n|jd�sVd|}|S)N� z
/usr/bin/env r�"z%s "%s"z"%s")�
startswith�split)�
executable�envZ_executable�r�/usr/lib/python3.6/scripts.py�_enquote_executableBs

rc@s�eZdZdZeZdZd%dd�Zdd�Ze	j
jd	�rBd
d�Zdd
�Z
d&dd�Zdd�ZeZdd�Zdd�Zd'dd�Zdd�Zedd��Zejdd��Zejdks�ejd	kr�ejdkr�dd �Zd(d!d"�Zd)d#d$�ZdS)*�ScriptMakerz_
    A class to copy or create scripts from source scripts or callable
    specifications.
    NTFcCsz||_||_||_d|_d|_tjdkp:tjdko:tjdk|_t	d�|_
|pRt|�|_tjdkprtjdkortjdk|_
dS)NF�posix�java��X.Y�nt)rr)�
source_dir�
target_dir�
add_launchers�force�clobber�os�name�_name�set_mode�set�variantsr�_fileop�_is_nt)�selfrrr�dry_runZfileoprrr�__init__[s

zScriptMaker.__init__cCs@|jdd�r<|jr<tjj|�\}}|jdd�}tjj||�}|S)N�guiF�pythonZpythonw)�getr(r!�pathr�replace�join)r)r�optionsZdn�fnrrr�_get_alternate_executableks
z%ScriptMaker._get_alternate_executablercCsLy"t|��}|jd�dkSQRXWn$ttfk
rFtjd|�dSXdS)zl
            Determine if the specified executable is a script
            (contains a #! line)
            �z#!NzFailed to open %sF)�open�read�OSError�IOError�logger�warning)r)r�fprrr�	_is_shellss
zScriptMaker._is_shellcCsD|j|�r*ddl}|jjjd�dkr<|Sn|j�jd�r<|Sd|S)Nrzos.nameZLinuxz
jython.exez/usr/bin/env %s)r=rZlangZSystemZgetProperty�lower�endswith)r)rrrrr�_fix_jython_executables
z"ScriptMaker._fix_jython_executable�cCsdd}|jr|j}d}n^tj�s&t�}nNt�rLtjjtjd�dtj	d��}n(tjjtj	d�dtj	d�tj	d�f�}|r�|j
||�}tjj
d	�r�|j|�}tjj|�}|r�t|�}|jd
�}tjdkr�d|kr�d
|kr�|d7}d||d}y|jd
�Wn"tk
�rtd|��YnX|d
k�r`y|j|�Wn&tk
�r^td||f��YnX|S)NTF�scriptszpython%s�EXE�BINDIRz
python%s%s�VERSIONrzutf-8Zcliz	-X:Framesz
-X:FullFramess
 -X:Framess#!�
z,The shebang (%r) is not decodable from utf-8z?The shebang (%r) is not decodable from the script encoding (%r))rr�is_python_buildrrr!r/r1�get_path�get_config_varr4�sys�platformrr@�normcaser�encode�decode�UnicodeDecodeError�
ValueError)r)�encoding�post_interpr2Zenquoter�shebangrrr�_get_shebang�sL



zScriptMaker._get_shebangcCs|jt|j|jd�S)N)�module�func)�script_template�dict�prefix�suffix)r)�entryrrr�_get_script_text�s
zScriptMaker._get_script_textcCstjj|�}|j|S)N)r!r/�basename�manifest)r)Zexename�baserrr�get_manifest�szScriptMaker.get_manifestcCs�|jo
|j}tjjd�}|s*|||}n^|dkr>|jd�}n
|jd�}t�}	t|	d��}
|
jd|�WdQRX|	j	�}||||}�xd|D�]Z}tj
j|j|�}
|�rrtj
j
|
�\}}|jd�r�|}
d|
}
y|jj|
|�Wn�tk
�rntjd�d	|
}tj
j|��r tj|�tj|
|�|jj|
|�tjd
�ytj|�Wntk
�rhYnXYnXnp|j�r�|
jd|��r�d|
|f}
tj
j|
��r�|j�r�tjd
|
�q�|jj|
|�|j�r�|jj|
g�|j|
�q�WdS)Nzutf-8�py�t�wz__main__.pyz.pyz%s.exez:Failed to write executable - trying to use .deleteme logicz%s.deletemez0Able to replace executable using .deleteme logic�.z%s.%szSkipping existing file %s)rr(r!�lineseprM�
_get_launcherrrZwritestr�getvaluer/r1r�splitextrr'Zwrite_binary_file�	Exceptionr:r;�exists�remove�rename�debugr?r r$�set_executable_mode�append)r)�namesrSZscript_bytes�	filenames�extZuse_launcherreZlauncher�streamZzfZzip_datar"�outname�n�eZdfnamerrr�
_write_script�sT




zScriptMaker._write_scriptcCs�d}|r0|jdg�}|r0ddj|�}|jd�}|jd||d�}|j|�jd�}|j}t�}	d|jkrp|	j|�d|jkr�|	jd	|t	j
d
f�d|jkr�|	jd|t	j
dd
�f�|r�|jdd�r�d}
nd}
|j|	||||
�dS)NrAZinterpreter_argsz %sr
zutf-8)r2r�Xz%s%srzX.Yz%s-%s�r,F�pywra)r.r1rMrTr\r"r%r&�addrJ�versionrw)r)r[rqr2rR�argsrS�scriptr"Zscriptnamesrrrrr�_make_script�s(




zScriptMaker._make_scriptcCs�d}tjj|jt|��}tjj|jtjj|��}|jrX|jj	||�rXt
jd|�dSyt|d�}Wn t
k
r�|js~�d}YnLX|j�}|s�t
jd|j�|�dStj|jdd��}|r�d}|jd�p�d	}|�s|r�|j�|jj||�|j�r|jj|g�|j|�n�t
jd
||j�|jj�s�t|j�\}	}
|jd�|j|	|�}d|k�rbd
}nd}tjj|�}
|j|
g||j �||�|�r�|j�dS)NFznot copying %s (up-to-date)�rbz"%s: %s is an empty file (skipping)s
rFTrrAzcopying and adjusting %s -> %srspythonwrzra)!r!r/r1rr
rr]rr'Znewerr:rmr6r9r*�readliner;Zget_command_name�
FIRST_LINE_RE�matchr0�group�closeZ	copy_filer$rnro�infor�seekrTrwr7)r)r~rqZadjustrt�fZ
first_liner�rRrQ�linesrSrrrurrr�_copy_scriptsR



zScriptMaker._copy_scriptcCs|jjS)N)r'r*)r)rrrr*JszScriptMaker.dry_runcCs||j_dS)N)r'r*)r)�valuerrrr*NsrcCsHtjd�dkrd}nd}d||f}tjdd�d}t|�j|�j}|S)	N�P�Z64Z32z%s%s.exerdrr)�struct�calcsize�__name__�rsplitr�find�bytes)r)Zkind�bitsr"Zdistlib_package�resultrrrrfVszScriptMaker._get_launchercCs6g}t|�}|dkr"|j||�n|j|||d�|S)a�
        Make a script.

        :param specification: The specification, which is either a valid export
                              entry specification (to make a script from a
                              callable) or a filename (to make a script by
                              copying from a source location).
        :param options: A dictionary of options controlling script generation.
        :return: A list of all absolute pathnames written to.
        N)r2)r	r�r)r)�
specificationr2rqr[rrr�makedszScriptMaker.makecCs(g}x|D]}|j|j||��q
W|S)z�
        Take a list of specifications and make scripts from them,
        :param specifications: A list of specifications.
        :return: A list of all absolute pathnames written to,
        )�extendr�)r)Zspecificationsr2rqr�rrr�
make_multiplews
zScriptMaker.make_multiple)TFN)rAN)N)N)N)r��
__module__�__qualname__�__doc__�SCRIPT_TEMPLATErWrr+r4rJrKrr=r@rTr\�_DEFAULT_MANIFESTr^r`rwrr��propertyr*�setterr!r"r#rfr�r�rrrrrRs,

82
4
r)�iorZloggingr!�rer�rJ�compatrrrZ	resourcesr�utilrr	r
rrZ	getLoggerr�r:�stripr��compiler�r�r�objectrrrrr�<module>s

_vendor/distlib/__pycache__/__init__.cpython-36.opt-1.pyc000064400000001703151733136300017171 0ustar003

�PfE�@snddlZdZGdd�de�ZyddlmZWn&ek
rRGdd�dej�ZYnXeje�Z	e	j
e��dS)�Nz0.2.4c@seZdZdS)�DistlibExceptionN)�__name__�
__module__�__qualname__�rr�/usr/lib/python3.6/__init__.pyrsr)�NullHandlerc@s$eZdZdd�Zdd�Zdd�ZdS)rcCsdS)Nr)�self�recordrrr�handleszNullHandler.handlecCsdS)Nr)r	r
rrr�emitszNullHandler.emitcCs
d|_dS)N)�lock)r	rrr�
createLockszNullHandler.createLockN)rrrrrrrrrrrsr)Zlogging�__version__�	Exceptionrr�ImportErrorZHandlerZ	getLoggerrZloggerZ
addHandlerrrrr�<module>s
_vendor/distlib/__pycache__/manifest.cpython-36.pyc000064400000024052151733136300016303 0ustar003

�Pf�9�@s�dZddlZddlZddlZddlZddlZddlmZddlm	Z	ddl
mZdgZej
e�Zejdej�Zejd	ejejB�Zejdd
�ZGdd�de�ZdS)zu
Class representing the list of files in a distribution.

Equivalent to distutils.filelist, but fixes some problems.
�N�)�DistlibException)�fsdecode)�convert_path�Manifestz\\w*
z#.*?(?=
)|
(?=$)�c@szeZdZdZddd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dd�Z
dd�Zddd�Zd dd�Z
d!dd�Zdd�ZdS)"rz~A list of files built by on exploring the filesystem and filtered by
    applying various patterns to what we find there.
    NcCs>tjjtjj|ptj���|_|jtj|_d|_t	�|_
dS)zd
        Initialise an instance.

        :param base: The base directory to explore under.
        N)�os�path�abspath�normpath�getcwd�base�sep�prefix�allfiles�set�files)�selfr
�r�/usr/lib/python3.6/manifest.py�__init__*szManifest.__init__cCs�ddlm}m}m}g|_}|j}|g}|j}|j}xv|r�|�}tj	|�}	x\|	D]T}
tj
j||
�}tj|�}|j}
||
�r�|jt
|��qR||
�rR||
�rR||�qRWq8WdS)zmFind all files under the base and set ``allfiles`` to the absolute
        pathnames of files found.
        r)�S_ISREG�S_ISDIR�S_ISLNKN)�statrrrrr
�pop�appendr�listdirr	�join�st_moder)rrrrr�root�stackr�push�names�name�fullnamer�moderrr�findall9s"



zManifest.findallcCs4|j|j�stjj|j|�}|jjtjj|��dS)zz
        Add a file to the manifest.

        :param item: The pathname to add. This can be relative to the base.
        N)	�
startswithrrr	rr
r�addr)r�itemrrrr)TszManifest.addcCsx|D]}|j|�qWdS)z�
        Add a list of files to the manifest.

        :param items: The pathnames to add. These can be relative to the base.
        N)r))r�itemsr*rrr�add_many^s
zManifest.add_manyFcsf��fdd��t�j�}|rJt�}x|D]}�|tjj|��q(W||O}dd�tdd�|D��D�S)z8
        Return sorted files in directory order
        csJ|j|�tjd|�|�jkrFtjj|�\}}|dks<t��||�dS)Nzadd_dir added %s��/)r-r.)r)�logger�debugr
rr	�split�AssertionError)�dirs�d�parent�_)�add_dirrrrr7ls

z Manifest.sorted.<locals>.add_dircSsg|]}tjj|��qSr)rr	r)�.0Z
path_tuplerrr�
<listcomp>zsz#Manifest.sorted.<locals>.<listcomp>css|]}tjj|�VqdS)N)rr	r1)r8r	rrr�	<genexpr>{sz"Manifest.sorted.<locals>.<genexpr>)rrrr	�dirname�sorted)rZwantdirs�resultr3�fr)r7rrr<gs

zManifest.sortedcCst�|_g|_dS)zClear all collected files.N)rrr)rrrr�clear}szManifest.clearcCs�|j|�\}}}}|dkrFx&|D]}|j|dd�s tjd|�q W�n<|dkrnx|D]}|j|dd�}qTW�n|dkr�x&|D]}|j|dd�s|tjd|�q|Wn�|d	kr�x�|D]}|j|dd�}q�Wn�|d
k�r�x�|D] }|j||d�s�tjd||�q�Wn�|d
k�r&xz|D]}|j||d�}�qWn\|dk�rN|jd|d��s�tjd|�n4|dk�rv|jd|d��s�tjd|�ntd|��dS)av
        Process a directive which either adds some files from ``allfiles`` to
        ``files``, or removes some files from ``files``.

        :param directive: The directive to process. This should be in a format
                     compatible with distutils ``MANIFEST.in`` files:

                     http://docs.python.org/distutils/sourcedist.html#commands
        �includeT)�anchorzno files found matching %r�excludezglobal-includeFz3no files found matching %r anywhere in distributionzglobal-excludezrecursive-include)rz-no files found matching %r under directory %rzrecursive-exclude�graftNz no directories found matching %r�prunez4no previously-included directories found matching %rzinvalid action %r)�_parse_directive�_include_patternr/Zwarning�_exclude_patternr)r�	directive�action�patterns�thedirZ
dirpattern�pattern�foundrrr�process_directive�sD









zManifest.process_directivec	Cs|j�}t|�dkr,|ddkr,|jdd�|d}d}}}|dkrxt|�dkr`td
|��dd�|dd�D�}n�|dkr�t|�dkr�td|��t|d�}dd�|dd�D�}n<|dk�r�t|�dkr�td|��t|d�}ntd|��||||fS)z�
        Validate a directive.
        :param directive: The directive to validate.
        :return: A tuple of action, patterns, thedir, dir_patterns
        rrr@rB�global-include�global-exclude�recursive-include�recursive-excluderCrDNrz$%r expects <pattern1> <pattern2> ...cSsg|]}t|��qSr)r)r8�wordrrrr9�sz-Manifest._parse_directive.<locals>.<listcomp>�z*%r expects <dir> <pattern1> <pattern2> ...cSsg|]}t|��qSr)r)r8rSrrrr9�sz!%r expects a single <dir_pattern>zunknown action %r)r@rBrOrPrQrRrCrD)r@rBrOrP)rQrR)rCrD)r1�len�insertrr)rrHZwordsrIrJrKZdir_patternrrrrE�s:



zManifest._parse_directiveTcCsTd}|j||||�}|jdkr&|j�x(|jD]}|j|�r.|jj|�d}q.W|S)a�Select strings (presumably filenames) from 'self.files' that
        match 'pattern', a Unix-style wildcard (glob) pattern.

        Patterns are not quite the same as implemented by the 'fnmatch'
        module: '*' and '?'  match non-special characters, where "special"
        is platform-dependent: slash on Unix; colon, slash, and backslash on
        DOS/Windows; and colon on Mac OS.

        If 'anchor' is true (the default), then the pattern match is more
        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
        'anchor' is false, both of these will match.

        If 'prefix' is supplied, then only filenames starting with 'prefix'
        (itself a pattern) and ending with 'pattern', with anything in between
        them, will match.  'anchor' is ignored in this case.

        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
        'pattern' is assumed to be either a string containing a regex or a
        regex object -- no translation is done, the regex is just compiled
        and used as-is.

        Selected strings will be added to self.files.

        Return True if files are found.
        FNT)�_translate_patternrr'�searchrr))rrLrAr�is_regexrM�
pattern_rer$rrrrFs

zManifest._include_patterncCsFd}|j||||�}x,t|j�D]}|j|�r |jj|�d}q W|S)atRemove strings (presumably filenames) from 'files' that match
        'pattern'.

        Other parameters are the same as for 'include_pattern()', above.
        The list 'self.files' is modified in place. Return True if files are
        found.

        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
        packaging source distributions
        FT)rW�listrrX�remove)rrLrArrYrMrZr>rrrrG)s
zManifest._exclude_patternc
Cs�|rt|t�rtj|�S|Stdkr:|jd�jd�\}}}|rj|j|�}td
krn|j|�rd|j|�snt	�nd}tj
tjj
|jd��}	|dk	�rftdkr�|jd�}
|j|�dt|
��}n>|j|�}|j|�r�|j|�s�t	�|t|�t|�t|��}tj}tjdk�rd}tdk�r4d|	|j
|d	|f�}n0|t|�t|�t|��}d
||	||||f}n8|�r�tdk�r�d|	|}nd||	|t|�d�f}tj|�S)aTranslate a shell-like wildcard pattern to a compiled regular
        expression.

        Return the compiled regex.  If 'is_regex' true,
        then 'pattern' is directly compiled to a regex (if it's a string)
        or just returned as-is (assumes it's a regex object).
        rTrr6r-N�\z\\�^z.*z%s%s%s%s.*%s%sz%s%s%s)rTr)rTr)rTr)rTr)rTr)�
isinstance�str�re�compile�_PYTHON_VERSION�_glob_to_re�	partitionr(�endswithr2�escaperr	rr
rUr)
rrLrArrY�startr6�endrZr
Z
empty_patternZ	prefix_rerrrrrW=sB	








zManifest._translate_patterncCs8tj|�}tj}tjdkrd}d|}tjd||�}|S)z�Translate a shell-like glob pattern to a regular expression.

        Return a string containing the regex.  Differs from
        'fnmatch.translate()' in that '*' does not match "special characters"
        (which are platform-specific).
        r]z\\\\z\1[^%s]z((?<!\\)(\\\\)*)\.)�fnmatch�	translaterrra�sub)rrLrZrZescapedrrrrdts

zManifest._glob_to_re)N)F)TNF)TNF)TNF)�__name__�
__module__�__qualname__�__doc__rr'r)r,r<r?rNrErFrGrWrdrrrrr%s 

	
O/
(

6)rprjZloggingrra�sysr-r�compatr�utilr�__all__Z	getLoggerrmr/rb�MZ_COLLAPSE_PATTERN�SZ_COMMENTED_LINE�version_inforc�objectrrrrr�<module>
s
_vendor/distlib/__pycache__/util.cpython-36.opt-1.pyc000064400000126527151733136300016423 0ustar003

�Pfi��@s>ddlZddlmZddlZddlZddlmZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZyddlZWnek
r�dZYnXddlZddlZddlZddlZddlZyddlZWnek
r�ddlZYnXddlZddlmZddlmZmZmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/e
j0e1�Z2dZ3e
j4e3�Z5dZ6d	e6d
Z7e6dZ8dZ9d
e9de8de3d
e9de8dZ:dZ;de:de;de:dZ<e7d
e3e7dZ=de=dZ>de6de>de<dZ?e
j4e?�Z@de9de8d
ZAe
j4eA�ZBdd �ZCd!d"�ZDd#d$�ZEd%d&�ZFd�d'd(�ZGd)d*�ZHd+d,�ZId-d.�ZJejKd/d0��ZLejKd1d2��ZMejKd�d4d5��ZNGd6d7�d7eO�ZPd8d9�ZQGd:d;�d;eO�ZRd<d=�ZSGd>d?�d?eO�ZTe
j4d@e
jU�ZVdAdB�ZWd�dCdD�ZXdEdF�ZYdGdH�ZZdIdJ�Z[dKdL�Z\dMdN�Z]e
j4dOe
j^�Z_e
j4dP�Z`d�dQdR�Zae
j4dS�ZbdTdU�ZcdVdW�ZddXdY�ZedZZfd[d\�Zgd]d^�ZhGd_d`�d`eO�ZiGdadb�dbeO�ZjGdcdd�ddeO�Zkd�Zld�dmdn�Zmdodp�Znd�ZoGdwdx�dxeO�Zpe
j4dy�Zqe
j4dz�Zre
j4d{�Zsd|d}�Zd~d�Zte�r\dd�lmuZvmwZwmxZxGd�d��d�e$jy�ZyGd�d��d�ev�ZuGd�d��d�eue'�Zzej{dd��Z|e|d�k�r�Gd�d��d�e$j}�Z}e�r�Gd�d��d�e$j~�Z~Gd�d��d�e%j�Ze�r�Gd�d��d�e%j��Z�Gd�d��d�e%j��Z�d�d��Z�Gd�d��d�eO�Z�Gd�d��d�e��Z�Gd�d��d�e��Z�Gd�d��d�e(�Z�Gd�d��d�eO�Z�d�d��Z�dS)��N)�deque)�iglob�)�DistlibException)�string_types�	text_type�shutil�	raw_input�StringIO�cache_from_source�urlopen�urljoin�httplib�	xmlrpclib�	splittype�HTTPHandler�BaseConfigurator�valid_ident�	Container�configparser�URLError�ZipFile�fsdecode�unquotez\s*,\s*z
(\w|[.-])+z(\*|:(\*|\w+):|�)z\*?z([<>=!~]=)|[<>]�(z)?\s*(z)(z)\s*(z))*z(from\s+(?P<diref>.*))z\(\s*(?P<c1>�|z)\s*\)|(?P<c2>z\s*)z)*z\[\s*(?P<ex>z)?\s*\]z(?P<dn>z	\s*)?(\s*z)?$z(?P<op>z)\s*(?P<vn>cs�dd��d}tj|�}|r�|j�}|d}|dp8|d}|dsHd}nd}|dj�}|snd}d}|d}nL|dd	kr�d
|}tj|�}	�fdd�|	D�}d
|djdd�|D��f}|ds�d}
ntj|d�}
t	|||
|||d�}|S)NcSs|j�}|d|dfS)N�opZvn)�	groupdict)�m�d�r!�/usr/lib/python3.6/util.py�get_constraintYsz)parse_requirement.<locals>.get_constraintZdnZc1Zc2Zdiref�rz<>!=z~=csg|]}�|��qSr!r!)�.0r)r#r!r"�
<listcomp>qsz%parse_requirement.<locals>.<listcomp>z%s (%s)z, cSsg|]}d|�qS)z%s %sr!)r%Zconr!r!r"r&rsZex)�nameZconstraints�extrasZrequirement�source�url)
�REQUIREMENT_RE�matchr�strip�RELOP_IDENT_RE�finditer�join�COMMA_RE�splitr)�s�resultrr r'Zconsr*ZconstrZrs�iteratorr(r!)r#r"�parse_requirementWs4


r6cCs�dd�}i}x�|D]�\}}}tjj||�}x�t|�D]t}tjj||�}	x`t|	�D]T}
|||
�}|dkrt|j|d�qP|||
�}|jtjjd�jd�}
|
d|||<qPWq4WqW|S)z%Find destinations for resources filescSs6|jtjjd�}|jtjjd�}|t|�d�jd�S)N�/)�replace�os�path�sep�len�lstrip)�baser:r!r!r"�get_rel_pathsz)get_resources_dests.<locals>.get_rel_pathNr7)r9r:r0r�popr8r;�rstrip)Zresources_rootZrulesr?Zdestinationsr>�suffix�dest�prefixZabs_baseZabs_globZabs_pathZ
resource_fileZrel_pathZrel_destr!r!r"�get_resources_dests|s

rEcCs(ttd�rd}ntjttdtj�k}|S)NZreal_prefixT�base_prefix)�hasattr�sysrD�getattr)r4r!r!r"�in_venv�s
rJcCs$tjjtj�}t|t�s t|�}|S)N)r9r:�normcaserH�
executable�
isinstancerr)r4r!r!r"�get_executable�s

rNcCsT|}xJt|�}|}|r |r |}|r|dj�}||kr:P|rd|||f}qW|S)Nrz	%c: %s
%s)r	�lower)�promptZ
allowed_charsZerror_prompt�default�pr3�cr!r!r"�proceed�s
rTcCs<t|t�r|j�}i}x |D]}||kr||||<qW|S)N)rMrr2)r �keysr4�keyr!r!r"�extract_by_key�s

rWcCsntjddkrtjd�|�}|j�}t|�}yftj|�}|ddd}xF|j�D]:\}}x0|j�D]$\}}d||f}t	|�}	|	||<qdWqRW|St
k
r�|jdd�YnXdd	�}
tj
�}y|
||�Wn<tjk
�r|j�tj|�}t|�}|
||�YnXi}xT|j�D]H}i||<}x4|j|�D]&\}
}d|
|f}t	|�}	|	||
<�q:W�qW|S)
Nr�zutf-8�
extensionszpython.exports�exportsz%s = %scSs$t|d�r|j|�n
|j|�dS)N�	read_file)rGr[Zreadfp)�cp�streamr!r!r"�read_stream�s
z!read_exports.<locals>.read_stream)rH�version_info�codecs�	getreader�readr
�json�load�items�get_export_entry�	Exception�seekr�ConfigParserZMissingSectionHeaderError�close�textwrap�dedentZsections)r]�dataZjdatar4�groupZentries�k�vr3�entryr^r\rVr'�valuer!r!r"�read_exports�s@

rscCs�tjddkrtjd�|�}tj�}x||j�D]p\}}|j|�x\|j�D]P}|j	dkr`|j
}nd|j
|j	f}|jr�d|dj|j�f}|j
||j|�qJWq.W|j|�dS)NrrXzutf-8z%s:%sz%s [%s]z, )rHr_r`�	getwriterrrireZadd_section�valuesrBrD�flagsr0�setr'�write)rZr]r\rorprqr3r!r!r"�
write_exports�s

ryccs$tj�}z
|VWdtj|�XdS)N)�tempfile�mkdtempr�rmtree)Ztdr!r!r"�tempdirs
r}ccs.tj�}ztj|�dVWdtj|�XdS)N)r9�getcwd�chdir)r �cwdr!r!r"rs


r�ccs.tj�}ztj|�dVWdtj|�XdS)N)�socketZgetdefaulttimeoutZsetdefaulttimeout)ZsecondsZctor!r!r"�socket_timeouts


r�c@seZdZdd�Zddd�ZdS)�cached_propertycCs
||_dS)N)�func)�selfr�r!r!r"�__init__)szcached_property.__init__NcCs,|dkr|S|j|�}tj||jj|�|S)N)r��object�__setattr__�__name__)r��obj�clsrrr!r!r"�__get__.s

zcached_property.__get__)N)r��
__module__�__qualname__r�r�r!r!r!r"r�(sr�cCs�tjdkr|S|s|S|ddkr.td|��|ddkrFtd|��|jd�}xtj|krj|jtj�qRW|svtjStjj|�S)a�Return 'pathname' as a name that will work on the native filesystem.

    The path is split on '/' and put back together again using the current
    directory separator.  Needed because filenames in the setup script are
    always supplied in Unix style, and have to be converted to the local
    convention before we can actually use them in the filesystem.  Raises
    ValueError on non-Unix-ish systems if 'pathname' either starts or
    ends with a slash.
    r7rzpath '%s' cannot be absoluterzpath '%s' cannot end with '/'���)r9r;�
ValueErrorr2�curdir�remover:r0)�pathname�pathsr!r!r"�convert_path6s


r�c@s�eZdZd$dd�Zdd�Zdd�Zdd	�Zd%dd�Zd&dd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
d'dd�Zdd�Zdd�Zd d!�Zd"d#�Zd
S)(�FileOperatorFcCs||_t�|_|j�dS)N)�dry_runrw�ensured�_init_record)r�r�r!r!r"r�RszFileOperator.__init__cCsd|_t�|_t�|_dS)NF)�recordrw�
files_written�dirs_created)r�r!r!r"r�WszFileOperator._init_recordcCs|jr|jj|�dS)N)r�r��add)r�r:r!r!r"�record_as_written\szFileOperator.record_as_writtencCsHtjj|�s tdtjj|���tjj|�s0dStj|�jtj|�jkS)a�Tell if the target is newer than the source.

        Returns true if 'source' exists and is more recently modified than
        'target', or if 'source' exists and 'target' doesn't.

        Returns false if both exist and 'target' is the same age or younger
        than 'source'. Raise PackagingFileError if 'source' does not exist.

        Note that this test is not very accurate: files created in the same
        second will have the same "age".
        zfile '%r' does not existT)r9r:�existsr�abspath�stat�st_mtime)r�r)�targetr!r!r"�newer`szFileOperator.newerTcCs�|jtjj|��tjd||�|js�d}|rftjj|�rDd|}n"tjj|�rftjj	|�rfd|}|rvt
|d��tj||�|j
|�dS)z8Copy a file respecting dry-run and force flags.
        zCopying %s to %sNz%s is a symlinkz%s is a non-regular filez which would be overwritten)�
ensure_dirr9r:�dirname�logger�infor��islinkr��isfiler�rZcopyfiler�)r�Zinfile�outfile�check�msgr!r!r"�	copy_filets
zFileOperator.copy_fileNc
Cst|jtjj|��tjd||�|jsf|dkr:t|d�}ntj|d|d�}zt	j
||�Wd|j�X|j|�dS)NzCopying stream %s to %s�wb�w)�encoding)
r�r9r:r�r�r�r��openr`rZcopyfileobjrjr�)r�Zinstreamr�r�Z	outstreamr!r!r"�copy_stream�s
zFileOperator.copy_streamc	CsF|jtjj|��|js8t|d��}|j|�WdQRX|j|�dS)Nr�)r�r9r:r�r�r�rxr�)r�r:rm�fr!r!r"�write_binary_file�s
zFileOperator.write_binary_filec
CsL|jtjj|��|js>t|d��}|j|j|��WdQRX|j|�dS)Nr�)	r�r9r:r�r�r�rx�encoder�)r�r:rmr�r�r!r!r"�write_text_file�s
zFileOperator.write_text_filecCsrtjdkstjdkrntjdkrnxN|D]F}|jr<tjd|�q$tj|�j|B|@}tjd||�tj||�q$WdS)N�posix�javazchanging mode of %szchanging mode of %s to %o)	r9r'�_namer�r�r�r��st_mode�chmod)r��bits�mask�filesr��moder!r!r"�set_mode�s
zFileOperator.set_modecCs|jdd|�S)Nimi�)r�)r3r�r!r!r"�<lambda>�szFileOperator.<lambda>cCs~tjj|�}||jkrztjj|�rz|jj|�tjj|�\}}|j|�tj	d|�|j
shtj|�|jrz|j
j|�dS)NzCreating %s)r9r:r�r�r�r�r2r�r�r�r��mkdirr�r�)r�r:r r�r!r!r"r��s

zFileOperator.ensure_dircCsht||�}tjd||�|jsZ|s0|j||�rJ|s:d}n|t|�d�}tj|||d�|j|�|S)NzByte-compiling %s to %sT)	rr�r�r�r�r<�
py_compile�compiler�)r�r:�optimize�forcerDZdpathZdiagpathr!r!r"�byte_compile�s
zFileOperator.byte_compilecCs�tjj|�r�tjj|�r`tjj|�r`tjd|�|jsBtj	|�|j
r�||jkr�|jj|�nPtjj|�rrd}nd}tjd||�|js�tj|�|j
r�||j
kr�|j
j|�dS)NzRemoving directory tree at %s�link�filezRemoving %s %s)r9r:r��isdirr�r��debugr�rr|r�r�r�r�)r�r:r3r!r!r"�ensure_removed�s"



zFileOperator.ensure_removedcCsHd}x>|sBtjj|�r&tj|tj�}Ptjj|�}||kr<P|}qW|S)NF)r9r:r��access�W_OKr�)r�r:r4�parentr!r!r"�is_writable�szFileOperator.is_writablecCs|j|jf}|j�|S)zV
        Commit recorded changes, turn off recording, return
        changes.
        )r�r�r�)r�r4r!r!r"�commit�szFileOperator.commitcCs�|js�x(t|j�D]}tjj|�rtj|�qWt|jdd�}x@|D]8}tj	|�}|rrtjj
||d�}tj|�tj|�qDW|j�dS)NT)�reverser)
r��listr�r9r:r�r��sortedr��listdirr0�rmdirr�)r�r��dirsr �flistZsdr!r!r"�rollback�s


zFileOperator.rollback)F)T)N)FFN)r�r�r�r�r�r�r�r�r�r�r�r�Zset_executable_moder�r�r�r�r�r�r!r!r!r"r�Qs 




r�cCsb|tjkrtj|}nt|�}|dkr,|}n2|jd�}t||jd��}x|D]}t||�}qLW|S)N�.r)rH�modules�
__import__r2rIr@)Zmodule_nameZdotted_path�modr4�partsrRr!r!r"�resolves


r�c@s6eZdZdd�Zedd��Zdd�Zdd�Zej	Z	d	S)
�ExportEntrycCs||_||_||_||_dS)N)r'rDrBrv)r�r'rDrBrvr!r!r"r�szExportEntry.__init__cCst|j|j�S)N)r�rDrB)r�r!r!r"rrszExportEntry.valuecCsd|j|j|j|jfS)Nz<ExportEntry %s = %s:%s %s>)r'rDrBrv)r�r!r!r"�__repr__!s
zExportEntry.__repr__cCsDt|t�sd}n0|j|jko>|j|jko>|j|jko>|j|jk}|S)NF)rMr�r'rDrBrv)r��otherr4r!r!r"�__eq__%s
zExportEntry.__eq__N)
r�r�r�r�r�rrr�r�r��__hash__r!r!r!r"r�s

r�z�(?P<name>(\w|[-.+])+)
                      \s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
                      \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
                      c
Cs�tj|�}|s0d}d|ks"d|kr�td|��n�|j�}|d}|d}|jd�}|dkrf|d}}n"|dkrztd|��|jd�\}}|d	}	|	dkr�d|ks�d|kr�td|��g}	nd
d�|	jd�D�}	t||||	�}|S)
N�[�]zInvalid specification '%s'r'�callable�:rrrvcSsg|]}|j��qSr!)r-)r%r�r!r!r"r&Qsz$get_export_entry.<locals>.<listcomp>�,)�ENTRY_RE�searchrr�countr2r�)
Z
specificationrr4r r'r:ZcolonsrDrBrvr!r!r"rf7s2


rfcCs�|dkrd}tjdkr.dtjkr.tjjd�}ntjjd�}tjj|�rftj|tj�}|s�t	j
d|�n<ytj|�d}Wn(tk
r�t	j
d	|dd
�d}YnX|s�t
j�}t	j
d|�tjj||�S)
a�
    Return the default base location for distlib caches. If the directory does
    not exist, it is created. Use the suffix provided for the base directory,
    and default to '.distlib' if it isn't provided.

    On Windows, if LOCALAPPDATA is defined in the environment, then it is
    assumed to be a directory, and will be the parent directory of the result.
    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
    directory - using os.expanduser('~') - will be the parent directory of
    the result.

    The result is just the directory '.distlib' in the parent directory as
    determined above, or with the name specified with ``suffix``.
    Nz.distlib�ntZLOCALAPPDATAz
$localappdata�~z(Directory exists but is not writable: %sTzUnable to create %s)�exc_infoFz#Default location unusable, using %s)r9r'�environr:�
expandvars�
expanduserr�r�r�r��warning�makedirs�OSErrorrzr{r0)rBr4Zusabler!r!r"�get_cache_baseVs&

r�cCsBtjjtjj|��\}}|r(|jdd�}|jtjd�}||dS)a
    Convert an absolute path to a directory name for use in a cache.

    The algorithm used is:

    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
    #. ``'.cache'`` is appended.
    r�z---z--z.cache)r9r:�
splitdriver�r8r;)r:r rRr!r!r"�path_to_cache_dirs

r�cCs|jd�s|dS|S)Nr7)�endswith)r3r!r!r"�ensure_slash�s
r�cCsHd}}d|kr>|jdd�\}}d|kr.|}n|jdd�\}}|||fS)N�@rr�)r2)ZnetlocZusernameZpasswordrDr!r!r"�parse_credentials�sr�cCstjd�}tj|�|S)N�)r9�umask)r4r!r!r"�get_process_umask�s

rcCs2d}d}x$t|�D]\}}t|t�sd}PqW|S)NTF)�	enumeraterMr)�seqr4�ir3r!r!r"�is_string_sequence�s
rz3([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-([a-z0-9_.+-]+)z
-py(\d\.?\d?)cCs�d}d}t|�jdd�}tj|�}|r@|jd�}|d|j��}|r�t|�t|�dkr�tjtj	|�d|�}|r�|j
�}|d|�||dd�|f}|dkr�tj|�}|r�|jd�|jd�|f}|S)zw
    Extract name, version, python version from a filename (no extension)

    Return name, version, pyver or None
    N� �-rz\brX)rr8�PYTHON_VERSIONr�rn�startr<�rer,�escape�end�PROJECT_NAME_AND_VERSION)�filenameZproject_namer4Zpyverr�nr!r!r"�split_filename�s"


rz-(?P<name>[\w .-]+)\s*\(\s*(?P<ver>[^\s)]+)\)$cCs:tj|�}|std|��|j�}|dj�j�|dfS)z�
    A utility method used to get name and version from a string.

    From e.g. a Provides-Dist value.

    :param p: A value in a form 'foo (1.0)'
    :return: The name and version as a tuple.
    z$Ill-formed name/version string: '%s'r'Zver)�NAME_VERSION_REr,rrr-rO)rRrr r!r!r"�parse_name_and_version�s
	
rcCs�t�}t|pg�}t|pg�}d|kr8|jd�||O}x�|D]x}|dkrV|j|�q>|jd�r�|dd�}||kr�tjd|�||kr�|j|�q>||kr�tjd|�|j|�q>W|S)N�*rrzundeclared extra: %s)rwr�r��
startswithr�r�)Z	requestedZ	availabler4�rZunwantedr!r!r"�
get_extras�s&


rcCs�i}yNt|�}|j�}|jd�}|jd�s8tjd|�ntjd�|�}tj	|�}Wn0t
k
r�}ztjd||�WYdd}~XnX|S)NzContent-Typezapplication/jsonz(Unexpected response for JSON request: %szutf-8z&Failed to get external data for %s: %s)rr��getrr�r�r`rarcrdrg�	exception)r*r4ZrespZheadersZct�reader�er!r!r"�_get_external_data�s

 rz'https://www.red-dove.com/pypi/projects/cCs*d|dj�|f}tt|�}t|�}|S)Nz%s/%s/project.jsonr)�upperr
�_external_data_base_urlr)r'r*r4r!r!r"�get_project_datas
rcCs(d|dj�||f}tt|�}t|�S)Nz%s/%s/package-%s.jsonr)rr
rr)r'�versionr*r!r!r"�get_package_datas
r c@s(eZdZdZdd�Zdd�Zdd�ZdS)	�Cachez�
    A class implementing a cache for resources that need to live in the file system
    e.g. shared libraries. This class was moved from resources to here because it
    could be used by other modules, e.g. the wheel module.
    cCsPtjj|�stj|�tj|�jd@dkr6tjd|�tjjtjj	|��|_
dS)zu
        Initialise an instance.

        :param base: The base directory where the cache should be located.
        �?rzDirectory '%s' is not privateN)r9r:r�r�r�r�r�r�r��normpathr>)r�r>r!r!r"r�"s

zCache.__init__cCst|�S)zN
        Converts a resource prefix to a directory name in the cache.
        )r�)r�rDr!r!r"�
prefix_to_dir0szCache.prefix_to_dircCs�g}x�tj|j�D]r}tjj|j|�}y>tjj|�s@tjj|�rLtj|�ntjj|�rbt	j
|�Wqtk
r�|j|�YqXqW|S)z"
        Clear the cache.
        )
r9r�r>r:r0r�r�r�r�rr|rg�append)r�Znot_removed�fnr!r!r"�clear6szCache.clearN)r�r�r��__doc__r�r$r'r!r!r!r"r!sr!c@s:eZdZdZdd�Zddd�Zdd�Zd	d
�Zdd�Zd
S)�
EventMixinz1
    A very simple publish/subscribe system.
    cCs
i|_dS)N)�_subscribers)r�r!r!r"r�KszEventMixin.__init__TcCsD|j}||krt|g�||<n"||}|r6|j|�n
|j|�dS)a`
        Add a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be added (and called when the
                           event is published).
        :param append: Whether to append or prepend the subscriber to an
                       existing subscriber list for the event.
        N)r*rr%�
appendleft)r��event�
subscriberr%�subsZsqr!r!r"r�Ns
zEventMixin.addcCs,|j}||krtd|��||j|�dS)z�
        Remove a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be removed.
        zNo subscribers: %rN)r*r�r�)r�r,r-r.r!r!r"r�bszEventMixin.removecCst|jj|f��S)z�
        Return an iterator for the subscribers for an event.
        :param event: The event to return subscribers for.
        )�iterr*r)r�r,r!r!r"�get_subscribersnszEventMixin.get_subscriberscOspg}xT|j|�D]F}y||f|�|�}Wn"tk
rJtjd�d}YnX|j|�qWtjd||||�|S)a^
        Publish a event and return a list of values returned by its
        subscribers.

        :param event: The event to publish.
        :param args: The positional arguments to pass to the event's
                     subscribers.
        :param kwargs: The keyword arguments to pass to the event's
                       subscribers.
        z"Exception during event publicationNz/publish %s: args = %s, kwargs = %s, result = %s)r0rgr�rr%r�)r�r,�args�kwargsr4r-rrr!r!r"�publishus

zEventMixin.publishN)T)	r�r�r�r(r�r�r�r0r3r!r!r!r"r)Gs
r)c@s^eZdZdd�Zdd�Zddd�Zdd	�Zd
d�Zdd
�Zdd�Z	e
dd��Ze
dd��ZdS)�	SequencercCsi|_i|_t�|_dS)N)�_preds�_succsrw�_nodes)r�r!r!r"r��szSequencer.__init__cCs|jj|�dS)N)r7r�)r��noder!r!r"�add_node�szSequencer.add_nodeFcCs�||jkr|jj|�|r�x&t|jj|f��D]}|j||�q.Wx&t|jj|f��D]}|j||�qVWx&t|jj��D]\}}|sz|j|=qzWx&t|jj��D]\}}|s�|j|=q�WdS)N)r7r�rwr5rr6r�re)r�r8ZedgesrRr3rorpr!r!r"�remove_node�s
zSequencer.remove_nodecCs0|jj|t��j|�|jj|t��j|�dS)N)r5�
setdefaultrwr�r6)r��pred�succr!r!r"r��sz
Sequencer.addcCs|y|j|}|j|}Wn tk
r8td|��YnXy|j|�|j|�Wn$tk
rvtd||f��YnXdS)Nz%r not a successor of anythingz%r not a successor of %r)r5r6�KeyErrorr�r�)r�r<r=�predsZsuccsr!r!r"r��s

zSequencer.removecCs||jkp||jkp||jkS)N)r5r6r7)r��stepr!r!r"�is_step�szSequencer.is_stepcCs�|j|�std|��g}g}t�}|j|�xd|r�|jd�}||krd||kr�|j|�|j|�q0|j|�|j|�|jj|f�}|j	|�q0Wt
|�S)NzUnknown: %rr)rAr�rwr%r@r�r�r5r�extend�reversed)r��finalr4Ztodo�seenr@r?r!r!r"�	get_steps�s"





zSequencer.get_stepscsVdg�g�i�i�g�|j��������fdd��x�D]}|�kr:�|�q:W�S)Nrc
s��d�|<�d�|<�dd7<�j|�y�|}Wntk
rVg}YnXxR|D]J}|�kr��|�t�|�|��|<q^|�kr^t�|�|��|<q^W�|�|kr�g}x �j�}|j|�||kr�Pq�Wt|�}�j|�dS)Nrr)r%rg�minr@�tuple)r8Z
successorsZ	successorZconnected_componentZ	component)�graph�index�
index_counter�lowlinksr4�stack�
strongconnectr!r"rN�s.



z3Sequencer.strong_connections.<locals>.strongconnect)r6)r�r8r!)rIrJrKrLr4rMrNr"�strong_connections�s"
zSequencer.strong_connectionscCsrdg}x8|jD].}|j|}x|D]}|jd||f�q"WqWx|jD]}|jd|�qHW|jd�dj|�S)Nzdigraph G {z  %s -> %s;z  %s;�}�
)r5r%r7r0)r�r4r=r?r<r8r!r!r"�dot
s


z
Sequencer.dotN)F)
r�r�r�r�r9r:r�r�rArF�propertyrOrRr!r!r!r"r4�s

3r4�.tar.gz�.tar.bz2�.tar�.zip�.tgz�.tbz�.whlTc
sf��fdd�}tjj���t���d}|dkr�|jd�r>d}nH|jd�rRd}d	}n4|jd�rfd}d
}n |jd�rzd}d}ntd|��z�|dkr�t|d�}|r�|j�}xD|D]}||�q�Wn.tj	||�}|r�|j
�}x|D]}||�q�W|dk�r6tjddk�r6x.|j
�D]"}	t|	jt��s|	jjd�|	_�qWdd�}
|
|_|j��Wd|�r`|j�XdS)NcsTt|t�s|jd�}tjjtjj�|��}|j��sD|�tjkrPt	d|��dS)Nzutf-8zpath outside destination: %r)
rMr�decoder9r:r�r0rr;r�)r:rR)�dest_dir�plenr!r"�
check_paths


zunarchive.<locals>.check_path�.zip�.whl�zip�.tar.gz�.tgzZtgzzr:gz�.tar.bz2�.tbzZtbzzr:bz2z.tarZtarrzUnknown format for %rrrXzutf-8cSsBytj||�Stjk
r<}ztt|���WYdd}~XnXdS)z:Run tarfile.tar_fillter, but raise the expected ValueErrorN)�tarfileZ
tar_filterZFilterErrorr��str)�memberr:�excr!r!r"�extraction_filterPsz$unarchive.<locals>.extraction_filter)r_r`)rbrc)rdre)r9r:r�r<r�r�rZnamelistrfr�ZgetnamesrHr_Z
getmembersrMr'rr[rjZ
extractallrj)Zarchive_filenamer\�formatr�r^�archiver��namesr'Ztarinforjr!)r\r]r"�	unarchivesL






rncCs�tj�}t|�}t|d��b}xZtj|�D]L\}}}x@|D]8}tjj||�}||d�}	tjj|	|�}
|j||
�q8Wq(WWdQRX|S)z*zip a directory tree into a BytesIO objectr�N)	�io�BytesIOr<rr9�walkr:r0rx)Z	directoryr4ZdlenZzf�rootr�r�r'ZfullZrelrCr!r!r"�zip_dir`s
rsr$�K�M�G�T�Pc@sreZdZdZddd�Zdd�Zdd	�Zd
d�Zdd
�Ze	dd��Z
e	dd��Zdd�Ze	dd��Z
e	dd��ZdS)�ProgressZUNKNOWNr�dcCs(||_|_||_d|_d|_d|_dS)NrF)rG�cur�max�started�elapsed�done)r�ZminvalZmaxvalr!r!r"r�ws
zProgress.__init__cCs0||_tj�}|jdkr ||_n||j|_dS)N)r{�timer}r~)r�ZcurvalZnowr!r!r"�updates

zProgress.updatecCs|j|j|�dS)N)r�r{)r�Zincrr!r!r"�	increment�szProgress.incrementcCs|j|j�|S)N)r�rG)r�r!r!r"r	�szProgress.startcCs |jdk	r|j|j�d|_dS)NT)r|r�r)r�r!r!r"�stop�s
z
Progress.stopcCs|jdkr|jS|jS)N)r|�unknown)r�r!r!r"�maximum�szProgress.maximumcCsD|jrd}n4|jdkrd}n$d|j|j|j|j}d|}|S)Nz100 %z ?? %gY@z%3d %%)rr|r{rG)r�r4rpr!r!r"�
percentage�s
zProgress.percentagecCs:|dkr|jdks|j|jkr$d}ntjdtj|��}|S)Nrz??:??:??z%H:%M:%S)r|r{rGr�ZstrftimeZgmtime)r�Zdurationr4r!r!r"�format_duration�szProgress.format_durationcCs�|jrd}|j}n^d}|jdkr&d}nJ|jdks<|j|jkrBd}n.t|j|j�}||j|j}|d|j}d||j|�fS)NZDonezETA rrz%s: %sr�)rr~r|r{rG�floatr�)r�rD�tr!r!r"�ETA�s
zProgress.ETAcCsN|jdkrd}n|j|j|j}xtD]}|dkr6P|d}q(Wd||fS)Nrgi�g@�@z%d %sB/s)r~r{rG�UNITS)r�r4Zunitr!r!r"�speed�s

zProgress.speedN)rrz)r�r�r�r�r�r�r�r	r�rSr�r�r�r�r�r!r!r!r"ryts

	ryz\{([^}]*)\}z[^/\\,{]\*\*|\*\*[^/\\,}]z^[^{]*\}|\{[^}]*$cCs<tj|�rd}t||��tj|�r4d}t||��t|�S)zAExtended globbing function that supports ** and {opt1,opt2,opt3}.z7invalid glob %r: recursive glob "**" must be used alonez2invalid glob %r: mismatching set marker '{' or '}')�_CHECK_RECURSIVE_GLOBr�r��_CHECK_MISMATCH_SET�_iglob)�	path_globr�r!r!r"r�s

rccstj|d�}t|�dkr\|\}}}x�|jd�D](}x"tdj|||f��D]
}|VqHWq.Wn�d|kr~x�t|�D]
}|VqnWn�|jdd�\}}|dkr�d}|dkr�d}n|jd�}|jd�}xFtj|�D]8\}}}	tj	j
|�}x ttj	j||��D]
}
|
Vq�Wq�WdS)	Nrr�r$z**r�rr7�\)�	RICH_GLOBr2r<r�r0�	std_iglobr=r9rqr:r#)r�Zrich_path_globrDrwrB�itemr:Zradical�dirr�r&r!r!r"r��s(


r�)�HTTPSHandler�match_hostname�CertificateErrorc@seZdZdZdZdd�ZdS)�HTTPSConnectionNTcCsPtj|j|jf|j�}t|dd�r0||_|j�tt	d�sp|j
rHt	j}nt	j}t	j
||j|j|t	j|j
d�|_nxt	jt	j�}|jt	jO_|jr�|j|j|j�i}|j
r�t	j|_|j|j
d�tt	dd�r�|j|d<|j
|f|�|_|j
o�|j�rLy$t|jj�|j�tjd|j�Wn0tk
�rJ|jjtj�|jj��YnXdS)	NZ_tunnel_hostF�
SSLContext)�	cert_reqsZssl_version�ca_certs)ZcafileZHAS_SNIZserver_hostnamezHost verified: %s) r�Zcreate_connection�host�port�timeoutrI�sockZ_tunnelrG�sslr�Z
CERT_REQUIREDZ	CERT_NONEZwrap_socketZkey_fileZ	cert_fileZPROTOCOL_SSLv23r�ZoptionsZOP_NO_SSLv2Zload_cert_chainZverify_modeZload_verify_locations�check_domainr�Zgetpeercertr�r�r�ZshutdownZ	SHUT_RDWRrj)r�r�r��contextr2r!r!r"�connect
s>


zHTTPSConnection.connect)r�r�r�r�r�r�r!r!r!r"r�sr�c@s&eZdZd	dd�Zdd�Zdd�ZdS)
r�TcCstj|�||_||_dS)N)�BaseHTTPSHandlerr�r�r�)r�r�r�r!r!r"r�0s
zHTTPSHandler.__init__cOs$t||�}|jr |j|_|j|_|S)a
            This is called to create a connection instance. Normally you'd
            pass a connection class to do_open, but it doesn't actually check for
            a class, and just expects a callable. As long as we behave just as a
            constructor would have, we should be OK. If it ever changes so that
            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
            which just sets check_domain to False in the class definition, and
            choose which one to pass to do_open.
            )r�r�r�)r�r1r2r4r!r!r"�_conn_maker5s


zHTTPSHandler._conn_makercCsVy|j|j|�Stk
rP}z&dt|j�kr>td|j��n�WYdd}~XnXdS)Nzcertificate verify failedz*Unable to verify server certificate for %s)Zdo_openr�rrg�reasonr�r�)r��reqrr!r!r"�
https_openEszHTTPSHandler.https_openN)T)r�r�r�r�r�r�r!r!r!r"r�/s
r�c@seZdZdd�ZdS)�HTTPSOnlyHandlercCstd|��dS)NzAUnexpected HTTP request on what should be a secure connection: %s)r)r�r�r!r!r"�	http_openYszHTTPSOnlyHandler.http_openN)r�r�r�r�r!r!r!r"r�Xsr���c@seZdZddd�ZdS)�HTTPr$NcKs&|dkrd}|j|j||f|��dS)Nr)�_setup�_connection_class)r�r�r�r2r!r!r"r�esz
HTTP.__init__)r$N)r�r�r�r�r!r!r!r"r�dsr�c@seZdZddd�ZdS)�HTTPSr$NcKs&|dkrd}|j|j||f|��dS)Nr)r�r�)r�r�r�r2r!r!r"r�mszHTTPS.__init__)r$N)r�r�r�r�r!r!r!r"r�lsr�c@seZdZddd�Zdd�ZdS)�	TransportrcCs||_tjj||�dS)N)r�rr�r�)r�r��use_datetimer!r!r"r�tszTransport.__init__cCsb|j|�\}}}tdkr(t||jd�}n6|js>||jdkrT||_|tj|�f|_|jd}|S)Nr�r�)r�rr)r�r�)�
get_host_info�	_ver_infor�r��_connection�_extra_headersrZHTTPConnection)r�r��h�ehZx509r4r!r!r"�make_connectionxs
zTransport.make_connectionN)r)r�r�r�r�r�r!r!r!r"r�ss
r�c@seZdZddd�Zdd�ZdS)�
SafeTransportrcCs||_tjj||�dS)N)r�rr�r�)r�r�r�r!r!r"r��szSafeTransport.__init__cCsz|j|�\}}}|si}|j|d<tdkr:t|df|�}n<|jsP||jdkrl||_|tj|df|�f|_|jd}|S)Nr�r�r�rr)r�r�)r�r�r�r�r�r�rr�)r�r�r�r�r2r4r!r!r"r��s


zSafeTransport.make_connectionN)r)r�r�r�r�r�r!r!r!r"r��s
r�c@seZdZdd�ZdS)�ServerProxyc	Kst|jdd�|_}|dk	r^t|�\}}|jdd�}|dkr@t}nt}|||d�|d<}||_tjj	||f|�dS)Nr�r�rZhttps)r��	transport)
r@r�rrr�r�r�rr�r�)	r�Zurir2r��scheme�_r�Ztclsr�r!r!r"r��szServerProxy.__init__N)r�r�r�r�r!r!r!r"r��sr�cKs.tjddkr|d7}nd|d<t||f|�S)NrrX�br$�newline)rHr_r�)r&r�r2r!r!r"�	_csv_open�s
r�c@s4eZdZed�ed�ed�d�Zdd�Zdd�Zd	S)
�CSVBaser��"rQ)Z	delimiterZ	quotecharZlineterminatorcCs|S)Nr!)r�r!r!r"�	__enter__�szCSVBase.__enter__cGs|jj�dS)N)r]rj)r�r�r!r!r"�__exit__�szCSVBase.__exit__N)r�r�r�rg�defaultsr�r�r!r!r!r"r��s
r�c@s(eZdZdd�Zdd�Zdd�ZeZdS)�	CSVReadercKs\d|kr4|d}tjddkr,tjd�|�}||_nt|dd�|_tj|jf|j�|_dS)Nr]rrXzutf-8r:r)	rHr_r`rar]r��csvrr�)r�r2r]r!r!r"r��szCSVReader.__init__cCs|S)Nr!)r�r!r!r"�__iter__�szCSVReader.__iter__cCsJt|j�}tjddkrFx,t|�D] \}}t|t�s"|jd�||<q"W|S)NrrXzutf-8)�nextrrHr_rrMrr[)r�r4rr�r!r!r"r��s

zCSVReader.nextN)r�r�r�r�r�r��__next__r!r!r!r"r��sr�c@seZdZdd�Zdd�ZdS)�	CSVWritercKs$t|d�|_tj|jf|j�|_dS)Nr�)r�r]r��writerr�)r�r&r2r!r!r"r��szCSVWriter.__init__cCsRtjddkrBg}x*|D]"}t|t�r0|jd�}|j|�qW|}|jj|�dS)NrrXzutf-8)rHr_rMrr�r%r��writerow)r��rowrr�r!r!r"r��s


zCSVWriter.writerowN)r�r�r�r�r�r!r!r!r"r��sr�csHeZdZeej�Zded<d�fdd�	Zdd�Zdd	�Zd
d�Z	�Z
S)
�Configurator�inc_convertZincNcs"tt|�j|�|ptj�|_dS)N)�superr�r�r9r~r>)r��configr>)�	__class__r!r"r��szConfigurator.__init__c
s���fdd���jd�}t|�s*�j|�}�jdd�}�jdf�}|r\t�fdd�|D��}��fdd��D�}t|�}|||�}|r�x$|j�D]\}}	t||�|	��q�W|S)	Ncszt|ttf�r*t|��fdd�|D��}nLt|t�rld|krH�j|�}qvi}x(|D]}�||�||<qRWn
�j|�}|S)Ncsg|]}�|��qSr!r!)r%r)�convertr!r"r&�szBConfigurator.configure_custom.<locals>.convert.<locals>.<listcomp>z())rMr�rH�type�dict�configure_customr�)�or4ro)r�r�r!r"r��s


z.Configurator.configure_custom.<locals>.convertz()r�z[]csg|]}�|��qSr!r!)r%r�)r�r!r"r&sz1Configurator.configure_custom.<locals>.<listcomp>cs$g|]}t|�r|��|�f�qSr!)r)r%ro)r�r�r!r"r&s)r@r�r�rHr�re�setattr)
r�r�rSZpropsr1rer2r4rrpr!)r�r�r�r"r��s


zConfigurator.configure_customcCs4|j|}t|t�r0d|kr0|j|�|j|<}|S)Nz())r�rMr�r�)r�rVr4r!r!r"�__getitem__s
zConfigurator.__getitem__c	CsFtjj|�stjj|j|�}tj|ddd��}tj|�}WdQRX|S)z*Default converter for the inc:// protocol.rzutf-8)r�N)	r9r:�isabsr0r>r`r�rcrd)r�rrr�r4r!r!r"r�s
zConfigurator.inc_convert)N)r�r�r�r�rZvalue_convertersr�r�r�r��
__classcell__r!r!)r�r"r��s
r�c@s&eZdZd	dd�Zdd�Zdd�ZdS)
�SubprocessMixinFNcCs||_||_dS)N)�verbose�progress)r�r�r�r!r!r"r�+szSubprocessMixin.__init__cCsn|j}|j}xT|j�}|sP|dk	r0|||�q|sBtjjd�ntjj|jd��tjj�qW|j�dS)z�
        Read lines from a subprocess' output stream and either pass to a progress
        callable (if specified) or write progress information to sys.stderr.
        Nr�zutf-8)	r�r��readlinerH�stderrrxr[�flushrj)r�r]r�r�r�r3r!r!r"r/szSubprocessMixin.readercKs�tj|ftjtjd�|��}tj|j|jdfd�}|j�tj|j|jdfd�}|j�|j	�|j
�|j
�|jdk	r�|jdd�n|jr�t
jjd�|S)N)�stdoutr�r�)r�r1r�zdone.�mainzdone.
)�
subprocess�Popen�PIPE�	threadingZThreadrr�r	r��waitr0r�r�rHrx)r��cmdr2rRZt1Zt2r!r!r"�run_commandDs
zSubprocessMixin.run_command)FN)r�r�r�r�rr�r!r!r!r"r�*s
r�cCstjdd|�j�S)z,Normalize a python package name a la PEP 503z[-_.]+r)r
�subrO)r'r!r!r"�normalize_nameUsr�)NN)r�)N)N)rTrUrVrWrXrYrZ)NT)r$rtrurvrwrx)r�r�)�r`�collectionsr�
contextlibr�Zglobrr�rorcZloggingr9r�r
rr�r��ImportErrorr�rHrfrzrkr�Zdummy_threadingr�r$r�compatrrr	r
rrr
rrrrrrrrrrrrZ	getLoggerr�r��COMMAr�r1ZIDENTZEXTRA_IDENTZVERSPECZRELOPZBARE_CONSTRAINTSZ
DIRECT_REFZCONSTRAINTSZ
EXTRA_LISTZEXTRASZREQUIREMENTr+ZRELOP_IDENTr.r6rErJrNrTrWrsry�contextmanagerr}rr�r�r�r�r�r�r��VERBOSEr�rfr�r�r�r�rr�Ir
rrrrrrrrr r!r)r4ZARCHIVE_EXTENSIONSrnrsr�ryr�r�r�r�r�r�r�r�r�r�r_r�r�r�r�r�r�r�r�r�r�r�r�r�r!r!r!r"�<module>s�
X

,

%

	/
	7

)



,H
C]


*)	
:+_vendor/distlib/__pycache__/resources.cpython-36.opt-1.pyc000064400000025116151733136300017450 0ustar003

�Pf*�@s�ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZmZmZmZeje�ZdaGdd�de�ZGdd	�d	e�ZGd
d�de�ZGdd
�d
e�ZGdd�de�ZGdd�de�Zed�ee
jeiZyFyddl Z!Wne"k
�r$ddl#Z!YnXeee!j$<eee!j%<[!Wne"e&fk
�rXYnXdd�Z'iZ(dd�Z)e	j*e+d��Z,dd�Z-dS)�)�unicode_literalsN�)�DistlibException)�cached_property�get_cache_base�path_to_cache_dir�Cachecs.eZdZd�fdd�	Zdd�Zdd�Z�ZS)	�
ResourceCacheNcs0|dkrtjjt�td��}tt|�j|�dS)Nzresource-cache)�os�path�joinr�str�superr	�__init__)�self�base)�	__class__��/usr/lib/python3.6/resources.pyrszResourceCache.__init__cCsdS)z�
        Is the cache stale for the given resource?

        :param resource: The :class:`Resource` being cached.
        :param path: The path of the resource in the cache.
        :return: True if the cache is stale.
        Tr)r�resourcerrrr�is_stale#s	zResourceCache.is_stalec	Cs�|jj|�\}}|dkr|}n~tjj|j|j|�|�}tjj|�}tjj|�sXtj	|�tjj
|�sjd}n|j||�}|r�t|d��}|j
|j�WdQRX|S)z�
        Get a resource into the cache,

        :param resource: A :class:`Resource` instance.
        :return: The pathname of the resource in the cache.
        NT�wb)�finder�get_cache_infor
rrrZ
prefix_to_dir�dirname�isdir�makedirs�existsr�open�write�bytes)rr�prefixr�resultrZstale�frrr�get.s
zResourceCache.get)N)�__name__�
__module__�__qualname__rrr$�
__classcell__rr)rrr	sr	c@seZdZdd�ZdS)�ResourceBasecCs||_||_dS)N)r�name)rrr*rrrrIszResourceBase.__init__N)r%r&r'rrrrrr)Hsr)c@s@eZdZdZdZdd�Zedd��Zedd��Zed	d
��Z	dS)�Resourcez�
    A class representing an in-package resource, such as a data file. This is
    not normally instantiated by user code, but rather by a
    :class:`ResourceFinder` which manages the resource.
    FcCs|jj|�S)z�
        Get the resource as a stream.

        This is not a property to make it obvious that it returns a new stream
        each time.
        )r�
get_stream)rrrr�	as_streamVszResource.as_streamcCstdkrt�atj|�S)N)�cacher	r$)rrrr�	file_path_szResource.file_pathcCs|jj|�S)N)r�	get_bytes)rrrrr fszResource.bytescCs|jj|�S)N)r�get_size)rrrr�sizejsz
Resource.sizeN)
r%r&r'�__doc__�is_containerr-rr/r r2rrrrr+Ns	r+c@seZdZdZedd��ZdS)�ResourceContainerTcCs|jj|�S)N)r�
get_resources)rrrr�	resourcesrszResourceContainer.resourcesN)r%r&r'r4rr7rrrrr5osr5c@s�eZdZdZejjd�rdZnd Zdd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zeejj�Zdd�ZdS)!�ResourceFinderz4
    Resource finder for file system resources.
    �java�.pyc�.pyo�.classcCs.||_t|dd�|_tjjt|dd��|_dS)N�
__loader__�__file__�)�module�getattr�loaderr
rrr)rr@rrrr�szResourceFinder.__init__cCstjj|�S)N)r
r�realpath)rrrrr�_adjust_path�szResourceFinder._adjust_pathcCsBt|t�rd}nd}|j|�}|jd|j�tjj|�}|j|�S)N�/�/r)	�
isinstancer �split�insertrr
rrrD)r�
resource_name�sep�partsr"rrr�
_make_path�s

zResourceFinder._make_pathcCstjj|�S)N)r
rr)rrrrr�_find�szResourceFinder._findcCs
d|jfS)N)r)rrrrrr�szResourceFinder.get_cache_infocCsD|j|�}|j|�sd}n&|j|�r0t||�}n
t||�}||_|S)N)rMrN�
_is_directoryr5r+r)rrJrr"rrr�find�s



zResourceFinder.findcCst|jd�S)N�rb)rr)rrrrrr,�szResourceFinder.get_streamc	Cs t|jd��
}|j�SQRXdS)NrQ)rr�read)rrr#rrrr0�szResourceFinder.get_bytescCstjj|j�S)N)r
r�getsize)rrrrrr1�szResourceFinder.get_sizecs*�fdd��t�fdd�tj|j�D��S)Ncs|dko|j�j�S)N�__pycache__)�endswith�skipped_extensions)r#)rrr�allowed�sz-ResourceFinder.get_resources.<locals>.allowedcsg|]}�|�r|�qSrr)�.0r#)rWrr�
<listcomp>�sz0ResourceFinder.get_resources.<locals>.<listcomp>)�setr
�listdirr)rrr)rWrrr6�szResourceFinder.get_resourcescCs|j|j�S)N)rOr)rrrrrr4�szResourceFinder.is_containerccs�|j|�}|dk	r�|g}xn|r�|jd�}|V|jr|j}xH|jD]>}|sP|}ndj||g�}|j|�}|jrz|j|�qB|VqBWqWdS)NrrF)rP�popr4r*r7r�append)rrJrZtodoZrnamer*�new_nameZchildrrr�iterator�s 


zResourceFinder.iteratorN)r:r;r<)r:r;)r%r&r'r3�sys�platform�
startswithrVrrDrMrNrrPr,r0r1r6r4�staticmethodr
rrrOr_rrrrr8ws"r8cs`eZdZdZ�fdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Z�Z
S)�ZipResourceFinderz6
    Resource finder for resources in .zip files.
    csZtt|�j|�|jj}dt|�|_t|jd�r>|jj|_nt	j
||_t|j�|_dS)Nr�_files)
rrdrrB�archive�len�
prefix_len�hasattrre�	zipimport�_zip_directory_cache�sorted�index)rr@rf)rrrr�szZipResourceFinder.__init__cCs|S)Nr)rrrrrrD�szZipResourceFinder._adjust_pathcCs�||jd�}||jkrd}nX|r:|dtjkr:|tj}tj|j|�}y|j|j|�}Wntk
rtd}YnX|s�tj	d||j
j�ntj	d||j
j�|S)NTrFz_find failed: %r %rz_find worked: %r %r���)rhrer
rK�bisectrmrb�
IndexError�logger�debugrBr!)rrr"�irrrrN�s


zZipResourceFinder._findcCs&|jj}|jdt|�d�}||fS)Nr)rBrfrrg)rrr!rrrrr�sz ZipResourceFinder.get_cache_infocCs|jj|j�S)N)rB�get_datar)rrrrrr0�szZipResourceFinder.get_bytescCstj|j|��S)N)�io�BytesIOr0)rrrrrr,�szZipResourceFinder.get_streamcCs|j|jd�}|j|dS)N�)rrhre)rrrrrrr1szZipResourceFinder.get_sizecCs�|j|jd�}|r,|dtjkr,|tj7}t|�}t�}tj|j|�}xV|t|j�kr�|j|j|�sjP|j||d�}|j	|j
tjd�d�|d7}qJW|S)Nrrrn)rrhr
rKrgrZrormrb�addrH)rrrZplenr"rs�srrrr6s
zZipResourceFinder.get_resourcescCsj||jd�}|r*|dtjkr*|tj7}tj|j|�}y|j|j|�}Wntk
rdd}YnX|S)NrFrn)rhr
rKrormrbrp)rrrsr"rrrrOs

zZipResourceFinder._is_directory)r%r&r'r3rrDrNrr0r,r1r6rOr(rr)rrrd�srdcCs|tt|�<dS)N)�_finder_registry�type)rB�finder_makerrrr�register_finder0sr}cCs�|tkrt|}nv|tjkr$t|�tj|}t|dd�}|dkrJtd��t|dd�}tjt|��}|dkrxtd|��||�}|t|<|S)z�
    Return a resource finder for a package.
    :param package: The name of the package.
    :return: A :class:`ResourceFinder` instance for the package.
    �__path__Nz8You cannot get a finder for a module, only for a packager=zUnable to locate finder for %r)	�
_finder_cacher`�modules�
__import__rArrzr$r{)�packager"r@rrBr|rrrr6s


rZ	__dummy__cCsRd}tj|�tjj|�}tjt|��}|rNt}tj	j
|d�|_||_||�}|S)z�
    Return a resource finder for a path, which should represent a container.

    :param path: The path.
    :return: A :class:`ResourceFinder` instance for the path.
    Nr?)
�pkgutilZget_importerr`�path_importer_cacher$rzr{�
_dummy_moduler
rrr>r=)rr"rBrr@rrr�finder_for_pathRs
r�).Z
__future__rroruZloggingr
r�Zshutilr`�typesrjr?r�utilrrrrZ	getLoggerr%rqr.r	�objectr)r+r5r8rdr{�zipimporterrz�_frozen_importlib_externalZ_fi�ImportError�_frozen_importlib�SourceFileLoader�
FileFinder�AttributeErrorr}rr�
ModuleTyper
r�r�rrrr�<module>sH
,!ZN


_vendor/distlib/__pycache__/scripts.cpython-36.pyc000064400000023415151733136300016166 0ustar003

�Pfx;�@s�ddlmZddlZddlZddlZddlZddlZddlmZm	Z	m
Z
ddlmZddl
mZmZmZmZmZeje�Zdj�Zejd�Zd	Zd
d�ZGdd
�d
e�ZdS)�)�BytesION�)�	sysconfig�detect_encoding�ZipFile)�finder)�FileOperator�get_export_entry�convert_path�get_executable�in_venva�
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
 <assemblyIdentity version="1.0.0.0"
 processorArchitecture="X86"
 name="%s"
 type="win32"/>

 <!-- Identify the application security requirements. -->
 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
 <security>
 <requestedPrivileges>
 <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
 </requestedPrivileges>
 </security>
 </trustInfo>
</assembly>s^#!.*pythonw?[0-9.]*([ 	].*)?$a|# -*- coding: utf-8 -*-
if __name__ == '__main__':
    import sys, re

    def _resolve(module, func):
        __import__(module)
        mod = sys.modules[module]
        parts = func.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
        return result

    try:
        sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])

        func = _resolve('%(module)s', '%(func)s')
        rc = func() # None interpreted as 0
    except Exception as e:  # only supporting Python >= 2.6
        sys.stderr.write('%%s\n' %% e)
        rc = 1
    sys.exit(rc)
cCsZd|krV|jd�rD|jdd�\}}d|krV|jd�rVd||f}n|jd�sVd|}|S)N� z
/usr/bin/env r�"z%s "%s"z"%s")�
startswith�split)�
executable�envZ_executable�r�/usr/lib/python3.6/scripts.py�_enquote_executableBs

rc@s�eZdZdZeZdZd%dd�Zdd�Ze	j
jd	�rBd
d�Zdd
�Z
d&dd�Zdd�ZeZdd�Zdd�Zd'dd�Zdd�Zedd��Zejdd��Zejdks�ejd	kr�ejdkr�dd �Zd(d!d"�Zd)d#d$�ZdS)*�ScriptMakerz_
    A class to copy or create scripts from source scripts or callable
    specifications.
    NTFcCsz||_||_||_d|_d|_tjdkp:tjdko:tjdk|_t	d�|_
|pRt|�|_tjdkprtjdkortjdk|_
dS)NF�posix�java��X.Y�nt)rr)�
source_dir�
target_dir�
add_launchers�force�clobber�os�name�_name�set_mode�set�variantsr�_fileop�_is_nt)�selfrrr�dry_runZfileoprrr�__init__[s

zScriptMaker.__init__cCs@|jdd�r<|jr<tjj|�\}}|jdd�}tjj||�}|S)N�guiF�pythonZpythonw)�getr(r!�pathr�replace�join)r)r�optionsZdn�fnrrr�_get_alternate_executableks
z%ScriptMaker._get_alternate_executablercCsLy"t|��}|jd�dkSQRXWn$ttfk
rFtjd|�dSXdS)zl
            Determine if the specified executable is a script
            (contains a #! line)
            �z#!NzFailed to open %sF)�open�read�OSError�IOError�logger�warning)r)r�fprrr�	_is_shellss
zScriptMaker._is_shellcCsD|j|�r*ddl}|jjjd�dkr<|Sn|j�jd�r<|Sd|S)Nrzos.nameZLinuxz
jython.exez/usr/bin/env %s)r=rZlangZSystemZgetProperty�lower�endswith)r)rrrrr�_fix_jython_executables
z"ScriptMaker._fix_jython_executable�cCsdd}|jr|j}d}n^tj�s&t�}nNt�rLtjjtjd�dtj	d��}n(tjjtj	d�dtj	d�tj	d�f�}|r�|j
||�}tjj
d	�r�|j|�}tjj|�}|r�t|�}|jd
�}tjdkr�d|kr�d
|kr�|d7}d||d}y|jd
�Wn"tk
�rtd|��YnX|d
k�r`y|j|�Wn&tk
�r^td||f��YnX|S)NTF�scriptszpython%s�EXE�BINDIRz
python%s%s�VERSIONrzutf-8Zcliz	-X:Framesz
-X:FullFramess
 -X:Framess#!�
z,The shebang (%r) is not decodable from utf-8z?The shebang (%r) is not decodable from the script encoding (%r))rr�is_python_buildrrr!r/r1�get_path�get_config_varr4�sys�platformrr@�normcaser�encode�decode�UnicodeDecodeError�
ValueError)r)�encoding�post_interpr2Zenquoter�shebangrrr�_get_shebang�sL



zScriptMaker._get_shebangcCs|jt|j|jd�S)N)�module�func)�script_template�dict�prefix�suffix)r)�entryrrr�_get_script_text�s
zScriptMaker._get_script_textcCstjj|�}|j|S)N)r!r/�basename�manifest)r)Zexename�baserrr�get_manifest�szScriptMaker.get_manifestcCs�|jo
|j}tjjd�}|s*|||}n^|dkr>|jd�}n
|jd�}t�}	t|	d��}
|
jd|�WdQRX|	j	�}||||}�xd|D�]Z}tj
j|j|�}
|�rrtj
j
|
�\}}|jd�r�|}
d|
}
y|jj|
|�Wn�tk
�rntjd�d	|
}tj
j|��r tj|�tj|
|�|jj|
|�tjd
�ytj|�Wntk
�rhYnXYnXnp|j�r�|
jd|��r�d|
|f}
tj
j|
��r�|j�r�tjd
|
�q�|jj|
|�|j�r�|jj|
g�|j|
�q�WdS)Nzutf-8�py�t�wz__main__.pyz.pyz%s.exez:Failed to write executable - trying to use .deleteme logicz%s.deletemez0Able to replace executable using .deleteme logic�.z%s.%szSkipping existing file %s)rr(r!�lineseprM�
_get_launcherrrZwritestr�getvaluer/r1r�splitextrr'Zwrite_binary_file�	Exceptionr:r;�exists�remove�rename�debugr?r r$�set_executable_mode�append)r)�namesrSZscript_bytes�	filenames�extZuse_launcherreZlauncher�streamZzfZzip_datar"�outname�n�eZdfnamerrr�
_write_script�sT




zScriptMaker._write_scriptcCs�d}|r0|jdg�}|r0ddj|�}|jd�}|jd||d�}|j|�jd�}|j}t�}	d|jkrp|	j|�d|jkr�|	jd	|t	j
d
f�d|jkr�|	jd|t	j
dd
�f�|r�|jdd�r�d}
nd}
|j|	||||
�dS)NrAZinterpreter_argsz %sr
zutf-8)r2r�Xz%s%srzX.Yz%s-%s�r,F�pywra)r.r1rMrTr\r"r%r&�addrJ�versionrw)r)r[rqr2rR�argsrS�scriptr"Zscriptnamesrrrrr�_make_script�s(




zScriptMaker._make_scriptcCs�d}tjj|jt|��}tjj|jtjj|��}|jrX|jj	||�rXt
jd|�dSyt|d�}Wn t
k
r�|js~�d}YnLX|j�}|s�t
jd|j�|�dStj|jdd��}|r�d}|jd�p�d	}|�s|r�|j�|jj||�|j�r|jj|g�|j|�n�t
jd
||j�|jj�s�t|j�\}	}
|jd�|j|	|�}d|k�rbd
}nd}tjj|�}
|j|
g||j �||�|�r�|j�dS)NFznot copying %s (up-to-date)�rbz"%s: %s is an empty file (skipping)s
rFTrrAzcopying and adjusting %s -> %srspythonwrzra)!r!r/r1rr
rr]rr'Znewerr:rmr6r9r*�readliner;Zget_command_name�
FIRST_LINE_RE�matchr0�group�closeZ	copy_filer$rnro�infor�seekrTrwr7)r)r~rqZadjustrt�fZ
first_liner�rRrQ�linesrSrrrurrr�_copy_scriptsR



zScriptMaker._copy_scriptcCs|jjS)N)r'r*)r)rrrr*JszScriptMaker.dry_runcCs||j_dS)N)r'r*)r)�valuerrrr*NsrcCsHtjd�dkrd}nd}d||f}tjdd�d}t|�j|�j}|S)	N�P�Z64Z32z%s%s.exerdrr)�struct�calcsize�__name__�rsplitr�find�bytes)r)Zkind�bitsr"Zdistlib_package�resultrrrrfVszScriptMaker._get_launchercCs6g}t|�}|dkr"|j||�n|j|||d�|S)a�
        Make a script.

        :param specification: The specification, which is either a valid export
                              entry specification (to make a script from a
                              callable) or a filename (to make a script by
                              copying from a source location).
        :param options: A dictionary of options controlling script generation.
        :return: A list of all absolute pathnames written to.
        N)r2)r	r�r)r)�
specificationr2rqr[rrr�makedszScriptMaker.makecCs(g}x|D]}|j|j||��q
W|S)z�
        Take a list of specifications and make scripts from them,
        :param specifications: A list of specifications.
        :return: A list of all absolute pathnames written to,
        )�extendr�)r)Zspecificationsr2rqr�rrr�
make_multiplews
zScriptMaker.make_multiple)TFN)rAN)N)N)N)r��
__module__�__qualname__�__doc__�SCRIPT_TEMPLATErWrr+r4rJrKrr=r@rTr\�_DEFAULT_MANIFESTr^r`rwrr��propertyr*�setterr!r"r#rfr�r�rrrrrRs,

82
4
r)�iorZloggingr!�rer�rJ�compatrrrZ	resourcesr�utilrr	r
rrZ	getLoggerr�r:�stripr��compiler�r�r�objectrrrrr�<module>s

_vendor/distlib/__pycache__/markers.cpython-36.opt-1.pyc000064400000013337151733136300017104 0ustar003

�Pf��@sddZddlZddlZddlZddlZddlmZmZddlm	Z	dgZ
Gdd�de�Zd
d	d�Z
dS)zEParser for the environment markers micro-language defined in PEP 345.�N�)�python_implementation�string_types)�in_venv�	interpretc
@s�eZdZdZdd�dd�dd�dd�dd�dd�d	d�d
d�dd�d�	Zejd
ejdd�ejj	dd�de
jee
��ej�ej�ej�e�d�	Zd*dd�Zdd�Zdd�Zd+dd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�ZdS),�	Evaluatorz5
    A limited evaluator for Python expressions.
    cCs||kS)N�)�x�yrr�/usr/lib/python3.6/markers.py�<lambda>szEvaluator.<lambda>cCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs|S)Nr)r	rrrr scCs||kS)Nr)r	r
rrrr!scCs||kS)Nr)r	r
rrrr"s)	�eq�gtZgte�in�ltZlte�notZnoteqZnotinz%s.%sN�� rr)	Zsys_platformZpython_versionZpython_full_versionZos_nameZplatform_in_venvZplatform_releaseZplatform_versionZplatform_machineZplatform_python_implementationcCs|pi|_d|_dS)zu
        Initialise an instance.

        :param context: If specified, names are looked up in this mapping.
        N)�context�source)�selfrrrr�__init__3s
zEvaluator.__init__cCs8d}d|j|||�}||t|j�kr4|d7}|S)zH
        Get the part of the source which is causing a problem.
        �
z%rz...)r�len)r�offsetZfragment_len�srrr�get_fragment<s
zEvaluator.get_fragmentcCst|d|d�S)z@
        Get a handler for the specified AST node type.
        zdo_%sN)�getattr)r�	node_typerrr�get_handlerFszEvaluator.get_handlercCs�t|t�rr||_ddi}|r$||d<ytj|f|�}Wn:tk
rp}z|j|j�}td|��WYdd}~XnX|jj	j
�}|j|�}|dkr�|jdkr�d}n|j|j�}td||f��||�S)zf
        Evaluate a source string or node, using ``filename`` when
        displaying errors.
        �mode�eval�filenamezsyntax error %sNz(source not available)z don't know how to evaluate %r %s)
�
isinstancerr�ast�parse�SyntaxErrorrr�	__class__�__name__�lowerr�
col_offset)r�noder"�kwargs�errZhandlerrrr�evaluateLs&


zEvaluator.evaluatecCsd|jj|jfS)Nz%s.%s)�value�id�attr)rr+rrr�get_attr_keyfszEvaluator.get_attr_keycCsft|jtj�sd}n|j|�}||jkp0||jk}|sBtd|��||jkrX|j|}n
|j|}|S)NFzinvalid expression: %s)r#r/r$�Namer2r�allowed_valuesr&)rr+�valid�key�resultrrr�do_attributejs


zEvaluator.do_attributecCsx|j|jd�}|jjtjk}|jjtjk}|r4|s>|rt|rtx4|jdd�D]"}|j|�}|rd|sn|rN|rNPqNW|S)Nrr)r.�values�opr'r$ZOrZAnd)rr+r7Zis_orZis_and�nrrr�	do_boolopxs
zEvaluator.do_boolopc	s���fdd�}�j}�j|�}d}xnt�j�j�D]\\}}|||�|jjj�}|�jkrft	d|���j|�}�j|||�}|s�P|}|}q2W|S)Ncs@d}t|tj�r t|tj�r d}|s<�j�j�}td|��dS)NTFzInvalid comparison: %s)r#r$ZStrrr*r&)�lhsnode�rhsnoder5r)r+rrr�sanity_check�sz*Evaluator.do_compare.<locals>.sanity_checkTzunsupported operation: %r)
�leftr.�zipZopsZcomparatorsr'r(r)�	operatorsr&)	rr+r?r=Zlhsr7r:r>Zrhsr)r+rr�
do_compare�s 




zEvaluator.do_comparecCs|j|j�S)N)r.Zbody)rr+rrr�
do_expression�szEvaluator.do_expressioncCsTd}|j|jkr"d}|j|j}n|j|jkr>d}|j|j}|sPtd|j��|S)NFTzinvalid expression: %s)r0rr4r&)rr+r5r7rrr�do_name�szEvaluator.do_namecCs|jS)N)r)rr+rrr�do_str�szEvaluator.do_str)N)N)r(�
__module__�__qualname__�__doc__rB�sys�platform�version_info�version�split�os�name�strr�release�machinerr4rrrr.r2r8r<rCrDrErFrrrrrs<

	

rcCst|�j|j��S)z�
    Interpret a marker and return a result depending on environment.

    :param marker: The marker to interpret.
    :type marker: str
    :param execution_context: The context used for name lookup.
    :type execution_context: mapping
    )rr.�strip)ZmarkerZexecution_contextrrrr�s	)N)rIr$rOrJrK�compatrr�utilr�__all__�objectrrrrrr�<module>s"_vendor/distlib/__pycache__/markers.cpython-36.pyc000064400000013506151733136300016143 0ustar003

�Pf��@sddZddlZddlZddlZddlZddlmZmZddlm	Z	dgZ
Gdd�de�Zd
d	d�Z
dS)zEParser for the environment markers micro-language defined in PEP 345.�N�)�python_implementation�string_types)�in_venv�	interpretc
@s�eZdZdZdd�dd�dd�dd�dd�dd�d	d�d
d�dd�d�	Zejd
ejdd�ejj	dd�de
jee
��ej�ej�ej�e�d�	Zd*dd�Zdd�Zdd�Zd+dd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�ZdS),�	Evaluatorz5
    A limited evaluator for Python expressions.
    cCs||kS)N�)�x�yrr�/usr/lib/python3.6/markers.py�<lambda>szEvaluator.<lambda>cCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs||kS)Nr)r	r
rrrrscCs|S)Nr)r	rrrr scCs||kS)Nr)r	r
rrrr!scCs||kS)Nr)r	r
rrrr"s)	�eq�gtZgte�in�ltZlte�notZnoteqZnotinz%s.%sN�� rr)	Zsys_platformZpython_versionZpython_full_versionZos_nameZplatform_in_venvZplatform_releaseZplatform_versionZplatform_machineZplatform_python_implementationcCs|pi|_d|_dS)zu
        Initialise an instance.

        :param context: If specified, names are looked up in this mapping.
        N)�context�source)�selfrrrr�__init__3s
zEvaluator.__init__cCs8d}d|j|||�}||t|j�kr4|d7}|S)zH
        Get the part of the source which is causing a problem.
        �
z%rz...)r�len)r�offsetZfragment_len�srrr�get_fragment<s
zEvaluator.get_fragmentcCst|d|d�S)z@
        Get a handler for the specified AST node type.
        zdo_%sN)�getattr)r�	node_typerrr�get_handlerFszEvaluator.get_handlercCs�t|t�rr||_ddi}|r$||d<ytj|f|�}Wn:tk
rp}z|j|j�}td|��WYdd}~XnX|jj	j
�}|j|�}|dkr�|jdkr�d}n|j|j�}td||f��||�S)zf
        Evaluate a source string or node, using ``filename`` when
        displaying errors.
        �mode�eval�filenamezsyntax error %sNz(source not available)z don't know how to evaluate %r %s)
�
isinstancerr�ast�parse�SyntaxErrorrr�	__class__�__name__�lowerr�
col_offset)r�noder"�kwargs�errZhandlerrrr�evaluateLs&


zEvaluator.evaluatecCs&t|tj�std��d|jj|jfS)Nzattribute node expectedz%s.%s)r#r$Z	Attribute�AssertionError�value�id�attr)rr+rrr�get_attr_keyfszEvaluator.get_attr_keycCsft|jtj�sd}n|j|�}||jkp0||jk}|sBtd|��||jkrX|j|}n
|j|}|S)NFzinvalid expression: %s)r#r0r$�Namer3r�allowed_valuesr&)rr+�valid�key�resultrrr�do_attributejs


zEvaluator.do_attributecCs�|j|jd�}|jjtjk}|jjtjk}|s8|s8t�|r@|sJ|r�|r�x4|jdd�D]"}|j|�}|rp|sz|rZ|rZPqZW|S)Nrr)r.�values�opr'r$ZOrZAndr/)rr+r8Zis_orZis_and�nrrr�	do_boolopxs
zEvaluator.do_boolopc	s���fdd�}�j}�j|�}d}xnt�j�j�D]\\}}|||�|jjj�}|�jkrft	d|���j|�}�j|||�}|s�P|}|}q2W|S)Ncs@d}t|tj�r t|tj�r d}|s<�j�j�}td|��dS)NTFzInvalid comparison: %s)r#r$ZStrrr*r&)�lhsnode�rhsnoder6r)r+rrr�sanity_check�sz*Evaluator.do_compare.<locals>.sanity_checkTzunsupported operation: %r)
�leftr.�zipZopsZcomparatorsr'r(r)�	operatorsr&)	rr+r@r>Zlhsr8r;r?Zrhsr)r+rr�
do_compare�s 




zEvaluator.do_comparecCs|j|j�S)N)r.Zbody)rr+rrr�
do_expression�szEvaluator.do_expressioncCsTd}|j|jkr"d}|j|j}n|j|jkr>d}|j|j}|sPtd|j��|S)NFTzinvalid expression: %s)r1rr5r&)rr+r6r8rrr�do_name�szEvaluator.do_namecCs|jS)N)r)rr+rrr�do_str�szEvaluator.do_str)N)N)r(�
__module__�__qualname__�__doc__rC�sys�platform�version_info�version�split�os�name�strr�release�machinerr5rrrr.r3r9r=rDrErFrGrrrrrs<

	

rcCst|�j|j��S)z�
    Interpret a marker and return a result depending on environment.

    :param marker: The marker to interpret.
    :type marker: str
    :param execution_context: The context used for name lookup.
    :type execution_context: mapping
    )rr.�strip)ZmarkerZexecution_contextrrrr�s	)N)rJr$rPrKrL�compatrr�utilr�__all__�objectrrrrrr�<module>s"_vendor/distlib/__pycache__/wheel.cpython-36.opt-1.pyc000064400000060467151733136300016552 0ustar003

�Pf˘�@s�ddlmZddlZddlZddlZddlZddlmZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlZddlmZmZddlmZmZmZmZmZddlmZddlm Z m!Z!dd	l"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+dd
l,m-Z-m.Z.ej/e0�Z1da2e3ed��r4dZ4n*ej5j6d
��rHdZ4nej5dk�rZdZ4ndZ4ej7d�Z8e8�s�dej9dd�Z8de8Z:e4e8Z;ej"j<�j=dd�j=dd�Z>ej7d�Z?e?�r�e?j6d��r�e?j=dd�Z?ndd�Z@e@�Z?[@ejAdejBejCB�ZDejAdejBejCB�ZEejAd�ZFejAd �ZGd!ZHd"ZIe
jJd#k�r>d$d%�ZKnd&d%�ZKGd'd(�d(eL�ZMeM�ZNGd)d*�d*eL�ZOd+d,�ZPeP�ZQ[Pd/d-d.�ZRdS)0�)�unicode_literalsN)�message_from_file�)�__version__�DistlibException)�	sysconfig�ZipFile�fsdecode�	text_type�filter)�InstalledDistribution)�Metadata�METADATA_FILENAME)	�FileOperator�convert_path�	CSVReader�	CSVWriter�Cache�cached_property�get_cache_base�read_exports�tempdir)�NormalizedVersion�UnsupportedVersionErrorZpypy_version_infoZpp�javaZjyZcliZip�cp�py_version_nodotz%s%s��py�-�_�.�SOABIzcpython-cCsRdtg}tjd�r|jd�tjd�r0|jd�tjd�dkrH|jd�d	j|�S)
Nr�Py_DEBUG�d�
WITH_PYMALLOC�mZPy_UNICODE_SIZE��u�)�
VER_SUFFIXr�get_config_var�append�join)�parts�r/�/usr/lib/python3.6/wheel.py�_derive_abi;s




r1zz
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?
-(?P<py>\w+\d+(\.\w+\d+)*)
-(?P<bi>\w+)
-(?P<ar>\w+(\.\w+)*)
\.whl$
z7
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?$
s
\s*#![^\r\n]*s^(\s*#!("[^"]+"|\S+))\s+(.*)$s#!pythons	#!pythonw�/cCs|S)Nr/)�or/r/r0�<lambda>]sr4cCs|jtjd�S)Nr2)�replace�os�sep)r3r/r/r0r4_sc@s6eZdZdd�Zdd�Zdd�Zddd	�Zd
d�ZdS)
�MountercCsi|_i|_dS)N)�
impure_wheels�libs)�selfr/r/r0�__init__cszMounter.__init__cCs||j|<|jj|�dS)N)r9r:�update)r;�pathname�
extensionsr/r/r0�addgs
zMounter.addcCs4|jj|�}x"|D]\}}||jkr|j|=qWdS)N)r9�popr:)r;r>r?�k�vr/r/r0�removeks
zMounter.removeNcCs||jkr|}nd}|S)N)r:)r;�fullname�path�resultr/r/r0�find_moduleqs
zMounter.find_modulecCsj|tjkrtj|}nP||jkr,td|��tj||j|�}||_|jdd�}t|�dkrf|d|_	|S)Nzunable to find extension for %sr!rr)
�sys�modulesr:�ImportError�impZload_dynamic�
__loader__�rsplit�len�__package__)r;rErGr.r/r/r0�load_modulexs


zMounter.load_module)N)�__name__�
__module__�__qualname__r<r@rDrHrQr/r/r/r0r8bs

r8c@s�eZdZdZd2ZdZd3dd�Zedd	��Zed
d��Z	edd
��Z
edd��Zdd�Z
edd��Zdd�Zd4dd�Zdd�Zdd�Zdd�Zd5dd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd6d*d+�Zd,d-�Zd.d/�Zd7d0d1�ZdS)8�Wheelz@
    Class to build and install from Wheel files (PEP 427).
    rZsha256NFcCs8||_||_d|_tg|_dg|_dg|_tj�|_	|dkrRd|_
d|_|j|_
n�tj|�}|r�|jd�}|d|_
|djd	d
�|_|d|_|j|_
n�tjj|�\}}tj|�}|s�td|��|r�tjj|�|_	||_
|jd�}|d|_
|d|_|d|_|d
jd�|_|djd�|_|djd�|_dS)zB
        Initialise an instance using a (valid) filename.
        r)�none�anyNZdummyz0.1ZnmZvnr rZbnzInvalid name or filename: %rrr!Zbi�ar)�signZ
should_verify�buildver�PYVER�pyver�abi�archr6�getcwd�dirname�name�version�filenameZ	_filename�NAME_VERSION_RE�match�	groupdictr5rF�split�FILENAME_REr�abspath)r;rcrY�verifyr&�infor`r/r/r0r<�sB











zWheel.__init__cCs^|jrd|j}nd}dj|j�}dj|j�}dj|j�}|jjdd�}d|j|||||fS)zJ
        Build and return a filename from the various components.
        rr)r!r z%s-%s%s-%s-%s-%s.whl)rZr-r\r]r^rbr5ra)r;rZr\r]r^rbr/r/r0rc�s
zWheel.filenamecCstjj|j|j�}tjj|�S)N)r6rFr-r`rc�isfile)r;rFr/r/r0�exists�szWheel.existsccs@x:|jD]0}x*|jD] }x|jD]}|||fVq WqWqWdS)N)r\r]r^)r;r\r]r^r/r/r0�tags�sz
Wheel.tagscCs�tjj|j|j�}d|j|jf}d|}tjd�}t	|d���}|j
|�}|djdd�}tdd	�|D��}|d
krzd
}	nt
}	y8tj||	�}
|j|
��}||�}t|d�}
WdQRXWn tk
r�td|	��YnXWdQRX|
S)Nz%s-%sz%s.dist-infozutf-8�rz
Wheel-Versionr!rcSsg|]}t|��qSr/)�int)�.0�ir/r/r0�
<listcomp>�sz"Wheel.metadata.<locals>.<listcomp>ZMETADATA)Zfileobjz$Invalid wheel, because %s is missing)rr)r6rFr-r`rcrarb�codecs�	getreaderr�get_wheel_metadatarg�tupler�	posixpath�openr
�KeyError�
ValueError)r;r>�name_ver�info_dir�wrapper�zf�wheel_metadata�wv�file_version�fn�metadata_filename�bf�wfrGr/r/r0�metadata�s(

zWheel.metadatac	CsXd|j|jf}d|}tj|d�}|j|��}tjd�|�}t|�}WdQRXt|�S)Nz%s-%sz%s.dist-info�WHEELzutf-8)	rarbrxr-ryrtrur�dict)r;rr|r}r�r�r��messager/r/r0rv�szWheel.get_wheel_metadatac	Cs6tjj|j|j�}t|d��}|j|�}WdQRX|S)Nro)r6rFr-r`rcrrv)r;r>rrGr/r/r0rk�sz
Wheel.infocCs�tj|�}|r||j�}|d|�||d�}}d|j�krBt}nt}tj|�}|rfd|j�d
}nd}||}||}nT|jd�}|jd�}	|dks�||	kr�d}
n|||d�d	kr�d	}
nd}
t|
|}|S)Nspythonw� r��
�
rrs
���)	�
SHEBANG_REre�end�lower�SHEBANG_PYTHONW�SHEBANG_PYTHON�SHEBANG_DETAIL_RE�groups�find)r;�datar&r�ZshebangZdata_after_shebangZshebang_python�argsZcrZlfZtermr/r/r0�process_shebang�s,




zWheel.process_shebangcCsh|dkr|j}ytt|�}Wn tk
r<td|��YnX||�j�}tj|�jd�j	d�}||fS)NzUnsupported hash algorithm: %r�=�ascii)
�	hash_kind�getattr�hashlib�AttributeErrorr�digest�base64Zurlsafe_b64encode�rstrip�decode)r;r�r��hasherrGr/r/r0�get_hashszWheel.get_hashc
Csbt|�}ttjj||��}|j|ddf�|j�t|��}x|D]}|j|�qBWWdQRXdS)Nr))	�list�to_posixr6rF�relpathr,�sortrZwriterow)r;�recordsZrecord_path�base�p�writer�rowr/r/r0�write_record's

zWheel.write_recordcCs�g}|\}}tt|j�}xX|D]P\}}	t|	d��}
|
j�}WdQRXd|j|�}tjj|	�}
|j	|||
f�qWtjj
|d�}	|j||	|�ttjj
|d��}|j	||	f�dS)N�rbz%s=%s�RECORD)
r�r�r�ry�readr�r6rF�getsizer,r-r�r�)r;rk�libdir�
archive_pathsr��distinfor}r��apr��fr�r��sizer/r/r0�
write_records0szWheel.write_recordscCsJt|dtj��2}x*|D]"\}}tjd||�|j||�qWWdQRXdS)N�wzWrote %s to %s in wheel)r�zipfileZZIP_DEFLATED�logger�debug�write)r;r>r�rr�r�r/r/r0�	build_zip@szWheel.build_zipc!s�|dkri}tt�fdd�d$��d}|dkrFd}tg}tg}tg}nd}tg}d	g}d
g}|jd|�|_|jd|�|_|jd
|�|_	�|}	d|j
|jf}
d|
}d|
}g}
x�d%D]�}|�kr�q��|}tj
j|�r�x�tj|�D]�\}}}x�|D]�}ttj
j||��}tj
j||�}ttj
j|||��}|
j||f�|dk�r�|jd��r�t|d��}|j�}WdQRX|j|�}t|d��}|j|�WdQRX�q�Wq�Wq�W|	}d}x�tj|�D]�\}}}||k�rx@t|�D]4\}}t|�}|jd��r�tj
j||�}||=P�q�WxP|D]H}t|�jd&��r2�qtj
j||�}ttj
j||��}|
j||f��qW�q�Wtj|�}xJ|D]B}|d'k�r|ttj
j||��}ttj
j||��}|
j||f��q|Wd|�p�|jdtd |g}x*|jD] \}}}|jd!|||f��q�Wtj
j|d�}t|d"��}|jd#j|��WdQRXttj
j|d��}|
j||f�|j||f|	|
�tj
j|j |j!�} |j"| |
�| S)(z�
        Build a wheel from files in specified paths, and use any specified tags
        when determining the name of the wheel.
        Ncs|�kS)Nr/)r3)�pathsr/r0r4NszWheel.build.<locals>.<lambda>�purelib�platlibrZfalse�truerVrWr\r]r^z%s-%sz%s.dataz%s.dist-infor��headers�scriptsz.exer��wbz
.dist-info�.pyc�.pyor��	INSTALLER�SHAREDr�zWheel-Version: %d.%dzGenerator: distlib %szRoot-Is-Purelib: %sz
Tag: %s-%s-%sr��
)r�r�)r�r�r�)r�r�)r�r�r�r�)#r�r�IMPVER�ABI�ARCHr[�getr\r]r^rarbr6rF�isdir�walkr	r-r�r�r,�endswithryr�r�r��	enumerate�listdir�
wheel_versionrrnr�r`rcr�)!r;r�rnr�ZlibkeyZis_pureZ
default_pyverZdefault_abiZdefault_archr�r|�data_dirr}r��keyrF�root�dirs�filesr�r��rpr�r�r�r�rr�dnr�r\r]r^r>r/)r�r0�buildFs�


"





zWheel.buildcBIKs`|j}|jd�}|jdd�}tjj|j|j�}d|j|jf}d|}	d|}
t	j|
t
�}t	j|
d�}t	j|
d�}
tjd	�}t
|d
����}|j|��}||�}t|�}WdQRX|djd
d�}tdd�|D��}||jkr�|r�||j|�|ddk�r|d}n|d}i}|j|
��<}t|d��&}x|D]}|d}|||<�q.WWdQRXWdQRXt	j|	d�}t	j|
d�}t	j|	dd�}t|d�}d|_tj}g} tj�}!|!|_d|_�z��y^�x�|j�D�]�}"|"j}#t|#t��r�|#}$n
|#jd	�}$|$j d��r��q�||$}|d�r0t!|"j"�|dk�r0t#d|$��|d�r�|djdd�\}%}&|j|#��}|j$�}'WdQRX|j%|'|%�\}(})|)|&k�r�t#d|#��|�r�|$j&||f��r�t'j(d |$��q�|$j&|��o�|$j d!�}*|$j&|��r|$jd"d�\}(}+},tjj||+t)|,��}-n$|$||
fk�r�q�tjj|t)|$��}-|*�s|j|#��}|j*||-�WdQRX| j+|-�|�r�|d�r�t|-d#��4}|j$�}'|j%|'|%�\}(}.|.|)k�r�t#d$|-��WdQRX|�rx|-j d%��rxy|j,|-�}/| j+|/�Wn$t-k
�rt'j.d&dd'�YnXnttjj/t)|#��}0tjj|!|0�}1|j|#��}|j*||1�WdQRXtjj|-�\}2}0|2|_|j0|0�}3|j1|3�| j2|3��q�W|�r�t'j(d(�d}4�n~d}5|j3d}|d)k�r~t	j|
d*�}6y�|j|6��}t4|�}7WdQRXi}5xxd<D]p}8d-|8}9|9|7k�r�i|5d.|8<}:xF|7|9j5�D]6};d/|;j6|;j7f}<|;j8�rB|<d0|;j87}<|<|:|;j<�qW�q�WWn t-k
�rzt'j.d1�YnXndyB|j|��.}||�}t9j:|�jd2�}5|5�r�|5jd3�}5WdQRXWn t-k
�r�t'j.d4�YnX|5�r�|5jd5i�}=|5jd6i�}>|=�s|>�r�|jdd�}?tjj;|?��s.t<d7��|?|_x6|=j=�D]*\}9};d8|9|;f}@|j0|@�}3|j1|3��q>W|>�r�d,di}Ax8|>j=�D],\}9};d8|9|;f}@|j0|@|A�}3|j1|3��q�Wtjj||
�}t>|�}4t?|�}|d=|d=||d9<|4j@||�}|�r| j+|�|4jA| |d:|�|4St-k
�r@t'jBd;�|jC��YnXWdtDjE|!�XWdQRXdS)=a�
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written.

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        �warner�lib_onlyFz%s-%sz%s.dataz%s.dist-infor�r�zutf-8roNz
Wheel-Versionr!rcSsg|]}t|��qSr/)rp)rqrrr/r/r0rs�sz!Wheel.install.<locals>.<listcomp>zRoot-Is-Purelibr�r�r�)�streamrr)r�)�dry_runTz/RECORD.jwsrzsize mismatch for %s�=zdigest mismatch for %szlib_only: skipping %sz.exer2r�zdigest mismatch on write for %sz.pyzByte-compilation failed)�exc_infozlib_only: returning Nonez1.0zentry_points.txt�console�guiz
%s_scriptszwrap_%sz%s:%sz %szAUnable to read legacy script metadata, so cannot generate scriptsr?zpython.commandsz8Unable to read JSON metadata, so cannot generate scriptsZwrap_consoleZwrap_guizValid script path not specifiedz%s = %s�lib�prefixzinstallation failed.)r�r�)Fr�r�r6rFr-r`rcrarbrxrrtrurryrrgrwr�rr�recordrI�dont_write_bytecode�tempfileZmkdtempZ
source_dirZ
target_dir�infolist�
isinstancer
r�r��str�	file_sizerr�r��
startswithr�r�rZcopy_streamr,Zbyte_compile�	ExceptionZwarning�basenameZmakeZset_executable_mode�extendrkr�valuesr��suffix�flags�json�loadr�r{�itemsrr�Zwrite_shared_locationsZwrite_installed_filesZ	exceptionZrollback�shutilZrmtree)Br;r�Zmaker�kwargsr�r�r�r>r|r�r}�
metadata_name�wheel_metadata_name�record_namer~r�bwfr�r�r�r�r�r�r��readerr�r�Zdata_pfxZinfo_pfxZ
script_pfxZfileopZbcZoutfiles�workdir�zinfo�arcname�	u_arcname�kind�valuer�r r�Z	is_script�wherer�ZoutfileZ	newdigestZpycr�Zworknamer��	filenamesZdistZcommandsZepZepdatar�rBr$rC�sZconsole_scriptsZgui_scriptsZ
script_dirZscriptZoptionsr/r/r0�install�sB



"
















z
Wheel.installcCs4tdkr0tjjt�td�tjdd��}t|�atS)Nzdylib-cache�)	�cacher6rFr-rr�rIrbr)r;r�r/r/r0�_get_dylib_cache�s
zWheel._get_dylib_cachecCsltjj|j|j�}d|j|jf}d|}tj|d�}tj	d�}g}t
|d���}y�|j|���}||�}	tj
|	�}
|j�}|j|�}tjj|j|�}
tjj|
�s�tj|
�x�|
j�D]�\}}tjj|
t|��}tjj|�s�d}n6tj|�j}tjj|�}|j|�}tj|j�}||k}|�r(|j||
�|j||f�q�WWdQRXWntk
�r\YnXWdQRX|S)Nz%s-%sz%s.dist-infoZ
EXTENSIONSzutf-8roT)r6rFr-r`rcrarbrxrtrurryr�r�rZ
prefix_to_dirr�r��makedirsr�rrm�stat�st_mtime�datetimeZ
fromtimestampZgetinfoZ	date_time�extractr,rz)r;r>r|r}rr~rGrr�r�r?r
r�Z
cache_baserar��destrZ	file_timerkZ
wheel_timer/r/r0�_get_extensions�s>




 zWheel._get_extensionscCst|�S)zM
        Determine if a wheel is compatible with the running system.
        )�
is_compatible)r;r/r/r0r�szWheel.is_compatiblecCsdS)zP
        Determine if a wheel is asserted as mountable by its metadata.
        Tr/)r;r/r/r0�is_mountable�szWheel.is_mountablecCs�tjjtjj|j|j��}|j�s2d|}t|��|j�sJd|}t|��|t	jkrbt
jd|�nN|rtt	jj|�nt	jj
d|�|j�}|r�tt	jkr�t	jjt�tj||�dS)Nz)Wheel %s not compatible with this Python.z$Wheel %s is marked as not mountable.z%s already in pathr)r6rFrir-r`rcrrrrIr�r�r,�insertr�_hook�	meta_pathr@)r;r,r>�msgr?r/r/r0�mount�s"

zWheel.mountcCsrtjjtjj|j|j��}|tjkr2tjd|�n<tjj	|�|t
jkrRt
j	|�t
jsnt
tjkrntjj	t
�dS)Nz%s not in path)
r6rFrir-r`rcrIr�r�rDrr9r)r;r>r/r/r0�unmount�s



z
Wheel.unmountc'Cstjj|j|j�}d|j|jf}d|}d|}tj|t�}tj|d�}tj|d�}t	j
d�}t|d����}	|	j|��}
||
�}t
|�}WdQRX|djd	d
�}
tdd�|
D��}i}|	j|��:}t|d
��$}x|D]}|d}|||<q�WWdQRXWdQRXx�|	j�D]�}|j}t|t��r*|}n
|jd�}d|k�rJtd|��|jd��rZ�q||}|d�r�t|j�|dk�r�td|��|d
�r|d
jdd
�\}}|	j|��}|j�}WdQRX|j||�\}}||k�rtd|���qWWdQRXdS)Nz%s-%sz%s.dataz%s.dist-infor�r�zutf-8roz
Wheel-Versionr!rcSsg|]}t|��qSr/)rp)rqrrr/r/r0rs�sz Wheel.verify.<locals>.<listcomp>)r�rz..zinvalid entry in wheel: %rz/RECORD.jwsrzsize mismatch for %sr�zdigest mismatch for %s)r6rFr-r`rcrarbrxrrtrurryrrgrwrr�r�r
r�rr�r�r�r�r�)r;r>r|r�r}r�r�r�r~rrr�r�r�r�r�r�rr�r�rrrrrr�r r�r/r/r0rj�sT

 



zWheel.verifycKs�dd�}dd�}tjj|j|j�}d|j|jf}d|}tj|d�}	t����}
t	|d���}i}xt|j
�D]h}
|
j}t|t�r�|}n
|j
d	�}||	kr�qjd
|kr�td|��|j|
|
�tjj|
t|��}|||<qjWWdQRX|||�\}}||f|�}|�r�|||�\}}|�r(||k�r(|||�|dk�rRtjd
d|
d�\}}tj|�n*tjj|��sltd|��tjj||j�}t|j��}tjj|
|�}||f}|j||
|�|j||�|dk�r�tj||�WdQRX|S)a�
        Update the contents of a wheel in a generic way. The modifier should
        be a callable which expects a dictionary argument: its keys are
        archive-entry paths, and its values are absolute filesystem paths
        where the contents the corresponding archive entries can be found. The
        modifier is free to change the contents of the files pointed to, add
        new entries and remove entries, before returning. This method will
        extract the entire contents of the wheel to a temporary location, call
        the modifier, and then use the passed (and possibly updated)
        dictionary to write a new wheel. If ``dest_dir`` is specified, the new
        wheel is written there -- otherwise, the original wheel is overwritten.

        The modifier should return True if it updated the wheel, else False.
        This method returns the same value the modifier returns.
        cSsHd}}d|tf}||kr$d|}||kr@||}t|d�j}||fS)Nz%s/%sz%s/PKG-INFO)rF)rr
rb)�path_mapr}rbrFr�r/r/r0�get_version1sz!Wheel.update.<locals>.get_versioncSs�d}y|t|�}|jd�}|dkr*d|}nTdd�||dd�jd�D�}|dd7<d|d|�djd	d
�|D��f}Wn tk
r�tjd|�YnX|r�t|d�}||_|j	t
�}|j||d
�tjd||�dS)Nrrz%s+1cSsg|]}t|��qSr/)rp)rqr
r/r/r0rsCsz8Wheel.update.<locals>.update_version.<locals>.<listcomp>rr!z%s+%scss|]}t|�VqdS)N)r�)rqrrr/r/r0�	<genexpr>Fsz7Wheel.update.<locals>.update_version.<locals>.<genexpr>z0Cannot update non-compliant (PEP-440) version %r)rF)rF�legacyzVersion updated from %r to %rr�)rr�rgr-rr�r�r
rbr�rr�)rbrF�updatedrCrrr.Zmdr!r/r/r0�update_version;s(

 
z$Wheel.update.<locals>.update_versionz%s-%sz%s.dist-infor�rozutf-8z..zinvalid entry in wheel: %rNz.whlz
wheel-update-)r�r��dirzNot a directory: %r)r6rFr-r`rcrarbrxrrr�r�r
r�rrrr�Zmkstemp�closer�r�r�r�r�r�Zcopyfile)r;ZmodifierZdest_dirr�rr#r>r|r}r�rrrrrrrFZoriginal_versionr ZmodifiedZcurrent_version�fd�newpathr�r�rkr/r/r0r= sX






zWheel.update)rr)NFF)N)NN)F)N)rRrSrT�__doc__r�r�r<�propertyrcrmrnrr�rvrkr�r�r�r�r�r�rrrrrrrrjr=r/r/r/r0rU�s4
)	
	
he	"
6rUcCs�tg}td}x6ttjdddd�D]}|jdj|t|�g��q&Wg}x6tj�D]*\}}}|j	d�rT|j|j
dd�d�qTW|j�tdkr�|j
dt�|jd�g}tg}tjdk�r�tjd	t�}|�r�|j�\}	}}}
t|�}|
g}|
dk�r|jd�|
dk�r|jd�|
dk�r*|jd�|
dk�r>|jd�|
dk�rR|jd�xL|dk�r�x2|D]*}d|	|||f}
|
tk�rd|j|
��qdW|d8}�qTWx<|D]4}x,|D]$}
|jdjt|df�||
f��q�W�q�WxXt|�D]L\}}|jdjt|f�ddf�|dk�r�|jdjt|df�ddf��q�WxXt|�D]L\}}|jdjd|f�ddf�|dk�rB|jdjd|df�ddf��qBWt|�S)zG
    Return (pyver, abi, arch) tuples compatible with this Python.
    rrr)z.abir!rrV�darwinz(\w+)_(\d+)_(\d+)_(\w+)$�i386�ppcZfat�x86_64Zfat3�ppc64Zfat64�intelZ	universalz%s_%s_%s_%srWrr�r�)r+r,)r+r,r-)r.r-)r+r-)r+r-r/r,r.)r*�rangerI�version_infor,r-r�rLZget_suffixesr�rgr�r�rr��platform�rerer�rp�
IMP_PREFIXr��set)Zversions�major�minorZabisr�r rGZarchesr&rar^Zmatchesrer
r]rrrbr/r/r0�compatible_tags�s`















*
$
$r8cCs^t|t�st|�}d}|dkr"t}x6|D].\}}}||jkr(||jkr(||jkr(d}Pq(W|S)NFT)r�rU�COMPATIBLE_TAGSr\r]r^)ZwheelrnrGZverr]r^r/r/r0r�s
r)N)SZ
__future__rr�rtrZdistutils.utilZ	distutilsZemailrr�rLr�Zloggingr6rxr3r�rIr�r�r)rr�compatrrr	r
rZdatabaserr�r
r�utilrrrrrrrrrrbrrZ	getLoggerrRr�r
�hasattrr4r2r�r+r*r1r[r��get_platformr5r�r�r1�compile�
IGNORECASE�VERBOSErhrdr�r�r�r�r7r��objectr8rrUr8r9rr/r/r/r0�<module>s�,


	


#>_vendor/distlib/__pycache__/index.cpython-36.pyc000064400000041620151733136300015604 0ustar003

�Pf]R�@s�ddlZddlZddlZddlZddlZddlZyddlmZWn ek
r`ddl	mZYnXddl
mZddlm
Z
mZmZmZmZmZddlmZmZmZeje�ZdZdZGd	d
�d
e�ZdS)�N)�Thread�)�DistlibException)�HTTPBasicAuthHandler�Request�HTTPPasswordMgr�urlparse�build_opener�string_types)�cached_property�zip_dir�ServerProxyzhttps://pypi.python.org/pypi�pypic@s�eZdZdZdZd*dd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zd+dd�Zd,dd�Z
d-dd�Zd.dd�Zdd�Zd/dd�Zd0d d!�Zd1d"d#�Zd$d%�Zd&d'�Zd2d(d)�ZdS)3�PackageIndexzc
    This class represents a package index compatible with PyPI, the Python
    Package Index.
    s.----------ThIs_Is_tHe_distlib_index_bouNdaRY_$NcCs�|pt|_|j�t|j�\}}}}}}|s<|s<|s<|dkrJtd|j��d|_d|_d|_d|_d|_	t
tjd��R}xJdD]B}	y(t
j|	dg||d	�}
|
d
kr�|	|_PWq|tk
r�Yq|Xq|WWdQRXdS)
z�
        Initialise an instance.

        :param url: The URL of the index. If not specified, the URL for PyPI is
                    used.
        �http�httpszinvalid repository: %sN�w�gpg�gpg2z	--version)�stdout�stderrr)rr)rr)�
DEFAULT_INDEX�url�read_configurationrr�password_handler�ssl_verifierr�gpg_home�	rpc_proxy�open�os�devnull�
subprocessZ
check_call�OSError)�selfr�scheme�netloc�pathZparamsZqueryZfragZsink�s�rc�r)�/usr/lib/python3.6/index.py�__init__$s(

zPackageIndex.__init__cCs&ddlm}ddlm}|�}||�S)zs
        Get the distutils command for interacting with PyPI configurations.
        :return: the command.
        r)�Distribution)�
PyPIRCCommand)Zdistutils.corer,Zdistutils.configr-)r#r,r-�dr)r)r*�_get_pypirc_commandBsz PackageIndex._get_pypirc_commandcCsR|j�}|j|_|j�}|jd�|_|jd�|_|jdd�|_|jd|j�|_dS)z�
        Read the PyPI access configuration as supported by distutils, getting
        PyPI to do the actual work. This populates ``username``, ``password``,
        ``realm`` and ``url`` attributes from the configuration.
        �username�password�realmr�
repositoryN)r/rr3Z_read_pypirc�getr0r1r2)r#�cZcfgr)r)r*rLszPackageIndex.read_configurationcCs$|j�|j�}|j|j|j�dS)z�
        Save the PyPI access configuration. You must have set ``username`` and
        ``password`` attributes before calling this method.

        Again, distutils is used to do the actual work.
        N)�check_credentialsr/Z
_store_pypircr0r1)r#r5r)r)r*�save_configuration[szPackageIndex.save_configurationcCs\|jdks|jdkrtd��t�}t|j�\}}}}}}|j|j||j|j�t|�|_	dS)zp
        Check that ``username`` and ``password`` have been set, and raise an
        exception if not.
        Nz!username and password must be set)
r0r1rrrrZadd_passwordr2rr)r#Zpm�_r%r)r)r*r6gszPackageIndex.check_credentialscCs\|j�|j�|j�}d|d<|j|j�g�}|j|�}d|d<|j|j�g�}|j|�S)aq
        Register a distribution on PyPI, using the provided metadata.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the distribution to be
                         registered.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        Zverifyz:actionZsubmit)r6�validate�todict�encode_request�items�send_request)r#�metadatar.�requestZresponser)r)r*�registerss

zPackageIndex.registercCsJx<|j�}|sP|jd�j�}|j|�tjd||f�qW|j�dS)ar
        Thread runner for reading lines of from a subprocess into a buffer.

        :param name: The logical name of the stream (used for logging only).
        :param stream: The stream to read from. This will typically a pipe
                       connected to the output stream of a subprocess.
        :param outbuf: The list to append the read lines to.
        zutf-8z%s: %sN)�readline�decode�rstrip�append�logger�debug�close)r#�name�streamZoutbufr'r)r)r*�_reader�s	
zPackageIndex._readercCs�|jdddg}|dkr|j}|r.|jd|g�|dk	rF|jdddg�tj�}tjj|tjj|�d	�}|jd
dd|d
||g�t	j
ddj|��||fS)a�
        Return a suitable command for signing a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The signing command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        z--status-fd�2z--no-ttyNz	--homedirz--batchz--passphrase-fd�0z.ascz
--detach-signz--armorz--local-userz--outputzinvoking: %s� )rr�extend�tempfileZmkdtemprr&�join�basenamerErF)r#�filename�signer�
sign_password�keystore�cmdZtdZsfr)r)r*�get_sign_command�s
zPackageIndex.get_sign_commandc	Cs�tjtjd�}|dk	r tj|d<g}g}tj|f|�}t|jd|j|fd�}|j�t|jd|j|fd�}|j�|dk	r�|jj	|�|jj
�|j�|j�|j�|j
||fS)a�
        Run a command in a child process , passing it any input data specified.

        :param cmd: The command to run.
        :param input_data: If specified, this must be a byte string containing
                           data to be sent to the child process.
        :return: A tuple consisting of the subprocess' exit code, a list of
                 lines read from the subprocess' ``stdout``, and a list of
                 lines read from the subprocess' ``stderr``.
        )rrN�stdinr)�target�argsr)r!�PIPE�PopenrrJr�startrrX�writerG�waitrP�
returncode)	r#rVZ
input_data�kwargsrr�pZt1Zt2r)r)r*�run_command�s$


zPackageIndex.run_commandc
CsD|j||||�\}}|j||jd��\}}}	|dkr@td|��|S)aR
        Sign a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The absolute pathname of the file where the signature is
                 stored.
        zutf-8rz&sign command failed with error code %s)rWrc�encoder)
r#rRrSrTrUrV�sig_filer(rrr)r)r*�	sign_file�s

zPackageIndex.sign_file�sdist�sourcecCs(|j�tjj|�s td|��|j�|j�}d}	|rZ|jsJtj	d�n|j
||||�}	t|d��}
|
j�}WdQRXt
j|�j�}t
j|�j�}
|jdd||||
d��dtjj|�|fg}|	�rt|	d��}
|
j�}WdQRX|jd	tjj|	�|f�tjtjj|	��|j|j�|�}|j|�S)
a�
        Upload a release file to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the file to be uploaded.
        :param filename: The pathname of the file to be uploaded.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param filetype: The type of the file being uploaded. This is the
                        distutils command which produced that file, e.g.
                        ``sdist`` or ``bdist_wheel``.
        :param pyversion: The version of Python which the release relates
                          to. For code compatible with any Python, this would
                          be ``source``, otherwise it would be e.g. ``3.2``.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        z
not found: %sNz)no signing program available - not signed�rbZfile_upload�1)z:actionZprotocol_version�filetype�	pyversion�
md5_digest�
sha256_digest�contentZ
gpg_signature)r6rr&�existsrr9r:rrEZwarningrfr�read�hashlib�md5�	hexdigestZsha256�updaterQrD�shutilZrmtree�dirnamer;r<r=)r#r>rRrSrTrkrlrUr.re�fZ	file_datarmrn�filesZsig_datar?r)r)r*�upload_file�s>

zPackageIndex.upload_filec
Cs�|j�tjj|�s td|��tjj|d�}tjj|�sFtd|��|j�|j|j	}}t
|�j�}d	d|fd|fg}d||fg}|j||�}	|j
|	�S)
a2
        Upload documentation to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the documentation to be
                         uploaded.
        :param doc_dir: The pathname of the directory which contains the
                        documentation. This should be the directory that
                        contains the ``index.html`` for the documentation.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        znot a directory: %rz
index.htmlz
not found: %r�:action�
doc_uploadrH�versionro)r{r|)r6rr&�isdirrrPrpr9rHr}r�getvaluer;r=)
r#r>Zdoc_dir�fnrHr}Zzip_data�fieldsryr?r)r)r*�upload_documentation)s
z!PackageIndex.upload_documentationcCsT|jdddg}|dkr|j}|r.|jd|g�|jd||g�tjddj|��|S)	a|
        Return a suitable command for verifying a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The verifying command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        z--status-fdrKz--no-ttyNz	--homedirz--verifyzinvoking: %srM)rrrNrErFrP)r#�signature_filename�
data_filenamerUrVr)r)r*�get_verify_commandEszPackageIndex.get_verify_commandcCsH|jstd��|j|||�}|j|�\}}}|dkr@td|��|dkS)a6
        Verify a signature for a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: True if the signature was verified, else False.
        z0verification unavailable because gpg unavailablerrz(verify command failed with error code %s)rr)rrr�rc)r#r�r�rUrVr(rrr)r)r*�verify_signature]szPackageIndex.verify_signaturecCsp|dkrd}tjd�n6t|ttf�r0|\}}nd}tt|��}tjd|�t|d���}|jt	|��}z�|j
�}	d}
d}d}d}
d	|	kr�t|	d
�}|r�||
|
|�xP|j|
�}|s�P|t
|�7}|j|�|r�|j|�|
d7}
|r�||
|
|�q�WWd|j�XWdQRX|dk�r4||k�r4td||f��|�rl|j�}||k�r`td||||f��tjd
|�dS)a
        This is a convenience method for downloading a file from an URL.
        Normally, this will be a file from the index, though currently
        no check is made for this (i.e. a file can be downloaded from
        anywhere).

        The method is just like the :func:`urlretrieve` function in the
        standard library, except that it allows digest computation to be
        done during download and checking that the downloaded data
        matched any expected value.

        :param url: The URL of the file to be downloaded (assumed to be
                    available via an HTTP GET request).
        :param destfile: The pathname where the downloaded file is to be
                         saved.
        :param digest: If specified, this must be a (hasher, value)
                       tuple, where hasher is the algorithm used (e.g.
                       ``'md5'``) and ``value`` is the expected value.
        :param reporthook: The same as for :func:`urlretrieve` in the
                           standard library.
        NzNo digest specifiedrszDigest specified: %s�wbi rrzcontent-lengthzContent-Lengthz1retrieval incomplete: got only %d out of %d bytesz.%s digest mismatch for %s: expected %s, got %szDigest verified: %s���)rErF�
isinstance�list�tuple�getattrrrrr=r�info�intrq�lenr^rurGrrt)r#r�destfileZdigestZ
reporthookZdigesterZhasherZdfpZsfp�headersZ	blocksize�sizerqZblocknum�block�actualr)r)r*�
download_filevsV




zPackageIndex.download_filecCs:g}|jr|j|j�|jr(|j|j�t|�}|j|�S)z�
        Send a standard library :class:`Request` to PyPI and return its
        response.

        :param req: The request to send.
        :return: The HTTP response from PyPI (a standard library HTTPResponse).
        )rrDrr	r)r#ZreqZhandlers�openerr)r)r*r=�szPackageIndex.send_requestcCs�g}|j}xX|D]P\}}t|ttf�s,|g}x2|D]*}|jd|d|jd�d|jd�f�q2WqWx6|D].\}}	}
|jd|d||	fjd�d|
f�qjW|jd|ddf�dj|�}d|}|tt|��d�}
t	|j
||
�S)	a&
        Encode fields and files for posting to an HTTP server.

        :param fields: The fields to send as a list of (fieldname, value)
                       tuples.
        :param files: The files to send as a list of (fieldname, filename,
                      file_bytes) tuple.
        s--z)Content-Disposition: form-data; name="%s"zutf-8�z8Content-Disposition: form-data; name="%s"; filename="%s"s
smultipart/form-data; boundary=)zContent-typezContent-length)�boundaryr�r�r�rNrdrP�strr�rr)r#r�ry�partsr��k�values�v�keyrR�valueZbodyZctr�r)r)r*r;�s2


zPackageIndex.encode_requestcCs>t|t�rd|i}|jdkr,t|jdd�|_|jj||p:d�S)NrHg@)Ztimeout�and)r�r
rr
r�search)r#Zterms�operatorr)r)r*r��s


zPackageIndex.search)N)N)N)N)NNrgrhN)N)N)NN)N)�__name__�
__module__�__qualname__�__doc__r�r+r/rr7r6r@rJrWrcrfrzr�r�r�r�r=r;r�r)r)r)r*rs*



#

8


M+r)rrZloggingrrvr!rOZ	threadingr�ImportErrorZdummy_threading�r�compatrrrrr	r
�utilrrr
Z	getLoggerr�rErZ
DEFAULT_REALM�objectrr)r)r)r*�<module>s  
_vendor/distlib/__pycache__/version.cpython-36.opt-1.pyc000064400000050610151733136300017120 0ustar003

�Pf�\�@sZdZddlZddlZddlmZddddd	d
ddgZeje�ZGd
d�de	�Z
Gdd�de�ZGdd�de�Z
ejd�Zdd�ZeZGdd�de�Zdd�ZGdd�de
�Zejd�dfejd�dfejd�dfejd�dfejd �d!fejd"�d!fejd#�d$fejd%�d&fejd'�d(fejd)�d*ff
Zejd+�dfejd,�dfejd-�d$fejd#�d$fejd.�dffZejd/�Zd0d1�Zd2d3�Zejd4ej�Zd5d5d6d5d7ddd8�Zd9d:�ZGd;d�de�ZGd<d�de
�Zejd=ej�Z d>d?�Z!d@dA�Z"GdBd	�d	e�Z#GdCd
�d
e
�Z$GdDdE�dEe�Z%e%eee�e%eedFdG��e%e"e$e�dH�Z&e&dIe&dJ<dKd�Z'dS)Lz~
Implementation of a flexible versioning scheme providing support for PEP-440,
setuptools-compatible and semantic versioning.
�N�)�string_types�NormalizedVersion�NormalizedMatcher�
LegacyVersion�
LegacyMatcher�SemanticVersion�SemanticMatcher�UnsupportedVersionError�
get_schemec@seZdZdZdS)r
zThis is an unsupported version.N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/version.pyr
sc@sxeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zedd��ZdS)�VersioncCs"|j�|_}|j|�|_}dS)N)�strip�_string�parse�_parts)�self�s�partsrrr�__init__szVersion.__init__cCstd��dS)Nzplease implement in a subclass)�NotImplementedError)rrrrrr$sz
Version.parsecCs$t|�t|�kr td||f��dS)Nzcannot compare %r and %r)�type�	TypeError)r�otherrrr�_check_compatible'szVersion._check_compatiblecCs|j|�|j|jkS)N)rr)rrrrr�__eq__+s
zVersion.__eq__cCs|j|�S)N)r )rrrrr�__ne__/szVersion.__ne__cCs|j|�|j|jkS)N)rr)rrrrr�__lt__2s
zVersion.__lt__cCs|j|�p|j|�S)N)r"r )rrrrr�__gt__6szVersion.__gt__cCs|j|�p|j|�S)N)r"r )rrrrr�__le__9szVersion.__le__cCs|j|�p|j|�S)N)r#r )rrrrr�__ge__<szVersion.__ge__cCs
t|j�S)N)�hashr)rrrr�__hash__@szVersion.__hash__cCsd|jj|jfS)Nz%s('%s'))�	__class__rr)rrrr�__repr__CszVersion.__repr__cCs|jS)N)r)rrrr�__str__FszVersion.__str__cCstd��dS)NzPlease implement in subclasses.)r)rrrr�
is_prereleaseIszVersion.is_prereleaseN)rr
rrrrr r!r"r#r$r%r'r)r*�propertyr+rrrrrsrc	@s�eZdZdZejd�Zejd�Zejd�Zdd�dd�dd�d	d�d
d�dd�dd�d
d�d�Z	dd�Z
dd�Zedd��Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd �ZdS)!�MatcherNz^(\w[\s\w'.-]*)(\((.*)\))?z'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$z
^\d+(\.\d+)*$cCs||kS)Nr)�v�c�prrr�<lambda>WszMatcher.<lambda>cCs||kS)Nr)r.r/r0rrrr1XscCs||kp||kS)Nr)r.r/r0rrrr1YscCs||kp||kS)Nr)r.r/r0rrrr1ZscCs||kS)Nr)r.r/r0rrrr1[scCs||kS)Nr)r.r/r0rrrr1\scCs||kp||kS)Nr)r.r/r0rrrr1^scCs||kS)Nr)r.r/r0rrrr1_s)�<�>z<=z>=z==z===z~=z!=c
CsJ|jdkrtd��|j�|_}|jj|�}|s<td|��|jd�}|dj�|_|jj�|_	g}|d�r<dd�|dj
d�D�}x�|D]�}|jj|�}|s�td	||f��|j�}|dp�d
}|d}|jd��r|dkr�td|��|dd�d}}	|j
j|��s(|j|�n|j|�d}}	|j|||	f�q�Wt|�|_dS)NzPlease specify a version classz
Not valid: %r�r�cSsg|]}|j��qSr)r)�.0r/rrr�
<listcomp>nsz$Matcher.__init__.<locals>.<listcomp>�,zInvalid %r in %rz~=rz.*�==�!=z#'.*' not allowed for %r constraintsTF)r9r:���)�
version_class�
ValueErrorrr�dist_re�match�groups�name�lower�key�split�comp_re�endswith�num_re�append�tupler)
rr�mr@ZclistZconstraintsr/�opZvn�prefixrrrrbs:



zMatcher.__init__cCszt|t�r|j|�}x`|jD]V\}}}|jj|�}t|t�rFt||�}|sbd||jjf}t	|��||||�sdSqWdS)z�
        Check if the provided version matches the constraints.

        :param version: The version to match against this instance.
        :type version: String or :class:`Version` instance.
        z%r not implemented for %sFT)
�
isinstancerr<r�
_operators�get�getattrr(rr)r�version�operator�
constraintrL�f�msgrrrr?�s



z
Matcher.matchcCs6d}t|j�dkr2|jdddkr2|jdd}|S)Nrr�==�===)rVrW)�lenr)r�resultrrr�
exact_version�s zMatcher.exact_versioncCs0t|�t|�ks|j|jkr,td||f��dS)Nzcannot compare %s and %s)rrAr)rrrrrr�szMatcher._check_compatiblecCs"|j|�|j|jko |j|jkS)N)rrCr)rrrrrr �s
zMatcher.__eq__cCs|j|�S)N)r )rrrrrr!�szMatcher.__ne__cCst|j�t|j�S)N)r&rCr)rrrrr'�szMatcher.__hash__cCsd|jj|jfS)Nz%s(%r))r(rr)rrrrr)�szMatcher.__repr__cCs|jS)N)r)rrrrr*�szMatcher.__str__)rr
rr<�re�compiler>rErGrNrr?r,rZrr r!r'r)r*rrrrr-Ns*


%r-zk^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?(\.(post)(\d+))?(\.(dev)(\d+))?(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$cCs�|j�}tj|�}|s"td|��|j�}tdd�|djd�D��}x(t|�dkrn|ddkrn|dd�}qHW|ds~d}nt|d�}|dd�}|d	d
�}|dd�}|d
}|dkr�f}n|dt|d�f}|dkr�f}n|dt|d�f}|dk�r
f}n|dt|d�f}|dk�r.f}nLg}	x>|jd�D]0}
|
j	��rZdt|
�f}
nd|
f}
|	j
|
��q>Wt|	�}|�s�|�r�|�r�d}nd}|�s�d}|�s�d}||||||fS)NzNot a valid version: %scss|]}t|�VqdS)N)�int)r6r.rrr�	<genexpr>�sz_pep_440_key.<locals>.<genexpr>r�.r����	�
��
�a�z�_�final���rk)NN)NN)NNrk)rgrk)rh)ri)rj)r�PEP440_VERSION_REr?r
r@rIrDrXr]�isdigitrH)rrJr@ZnumsZepoch�preZpost�devZlocalr�partrrr�_pep_440_key�sT



rqc@s6eZdZdZdd�Zedddddg�Zed	d
��ZdS)raIA rational version.

    Good:
        1.2         # equivalent to "1.2.0"
        1.2.0
        1.2a1
        1.2.3a2
        1.2.3b1
        1.2.3c1
        1.2.3.4
        TODO: fill this out

    Bad:
        1           # minimum two numbers
        1.2a        # release level must have a release serial
        1.2.3b
    cCs<t|�}tj|�}|j�}tdd�|djd�D��|_|S)Ncss|]}t|�VqdS)N)r])r6r.rrrr^sz*NormalizedVersion.parse.<locals>.<genexpr>rr_)�_normalized_keyrlr?r@rIrD�_release_clause)rrrYrJr@rrrrs

zNormalizedVersion.parserg�br/�rcrocst�fdd��jD��S)Nc3s |]}|r|d�jkVqdS)rN)�PREREL_TAGS)r6�t)rrrr^sz2NormalizedVersion.is_prerelease.<locals>.<genexpr>)�anyr)rr)rrr+szNormalizedVersion.is_prereleaseN)	rr
rrr�setrvr,r+rrrrr�scCs>t|�}t|�}||krdS|j|�s*dSt|�}||dkS)NTFr_)�str�
startswithrX)�x�y�nrrr�
_match_prefix"s
rc	@sneZdZeZddddddddd	�Zd
d�Zdd
�Zdd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�ZdS)r�_match_compatible�	_match_lt�	_match_gt�	_match_le�	_match_ge�	_match_eq�_match_arbitrary�	_match_ne)z~=r2r3z<=z>=z==z===z!=cCsV|rd|ko|jd}n|jdo,|jd}|rN|jjdd�d}|j|�}||fS)N�+rrrkrkrk)rrrDr<)rrQrSrLZstrip_localrrrr�
_adjust_local<s
zNormalizedMatcher._adjust_localcCsD|j|||�\}}||krdS|j}djdd�|D��}t||�S)NFr_cSsg|]}t|��qSr)rz)r6�irrrr7Osz/NormalizedMatcher._match_lt.<locals>.<listcomp>)r�rs�joinr)rrQrSrL�release_clause�pfxrrrr�JszNormalizedMatcher._match_ltcCsD|j|||�\}}||krdS|j}djdd�|D��}t||�S)NFr_cSsg|]}t|��qSr)rz)r6r�rrrr7Wsz/NormalizedMatcher._match_gt.<locals>.<listcomp>)r�rsr�r)rrQrSrLr�r�rrrr�RszNormalizedMatcher._match_gtcCs|j|||�\}}||kS)N)r�)rrQrSrLrrrr�ZszNormalizedMatcher._match_lecCs|j|||�\}}||kS)N)r�)rrQrSrLrrrr�^szNormalizedMatcher._match_gecCs.|j|||�\}}|s ||k}n
t||�}|S)N)r�r)rrQrSrLrYrrrr�bs


zNormalizedMatcher._match_eqcCst|�t|�kS)N)rz)rrQrSrLrrrr�jsz"NormalizedMatcher._match_arbitrarycCs0|j|||�\}}|s ||k}nt||�}|S)N)r�r)rrQrSrLrYrrrr�ms

zNormalizedMatcher._match_necCsf|j|||�\}}||krdS||kr*dS|j}t|�dkrH|dd�}djdd�|D��}t||�S)NTFrr_cSsg|]}t|��qSr)rz)r6r�rrrr7�sz7NormalizedMatcher._match_compatible.<locals>.<listcomp>rk)r�rsrXr�r)rrQrSrLr�r�rrrr�usz#NormalizedMatcher._match_compatibleN)rr
rrr<rNr�r�r�r�r�r�r�r�r�rrrrr-s$z[.+-]$r4z^[.](\d)z0.\1z^[.-]z
^\((.*)\)$z\1z^v(ersion)?\s*(\d+)z\2z^r(ev)?\s*(\d+)z[.]{2,}r_z\b(alfa|apha)\b�alphaz\b(pre-alpha|prealpha)\bz	pre.alphaz	\(beta\)$�betaz
^[:~._+-]+z
[,*")([\]]z[~:+_ -]z\.$z
(\d+(\.\d+)*)cCsZ|j�j�}xtD]\}}|j||�}qW|s2d}tj|�}|sJd}|}n�|j�djd�}dd�|D�}xt|�dkr�|j	d�qlWt|�dkr�||j
�d�}n8djdd�|dd�D��||j
�d�}|dd�}djd	d�|D��}|j�}|�rxtD]\}}|j||�}�qW|�s*|}nd
|k�r8dnd}|||}t
|��sVd}|S)
z�
    Try to suggest a semantic form for a version for which
    _suggest_normalized_version couldn't come up with anything.
    z0.0.0rr_cSsg|]}t|��qSr)r])r6r�rrrr7�sz-_suggest_semantic_version.<locals>.<listcomp>�NcSsg|]}t|��qSr)rz)r6r�rrrr7�scSsg|]}t|��qSr)rz)r6r�rrrr7�sro�-r�)rrB�
_REPLACEMENTS�sub�_NUMERIC_PREFIXr?r@rDrXrH�endr��_SUFFIX_REPLACEMENTS�	is_semver)rrYZpat�replrJrL�suffix�seprrr�_suggest_semantic_version�s:
,
r�cCslyt|�|Stk
r YnX|j�}xdBD]\}}|j||�}q0Wtjdd|�}tjdd|�}tjdd|�}tjdd|�}tjdd|�}|jd��r�|d d!�}tjd"d|�}tjd#d$|�}tjd%d&|�}tjd'd|�}tjd(d)|�}tjd*d)|�}tjd+d
|�}tjd,d-|�}tjd.d&|�}tjd/d0|�}tjd1d2|�}yt|�Wntk
�rfd!}YnX|S)Ca�Suggest a normalized version close to the given version string.

    If you have a version string that isn't rational (i.e. NormalizedVersion
    doesn't like it) then you might be able to get an equivalent (or close)
    rational version from this function.

    This does a number of simple normalizations to the given string, based
    on observation of versions currently in use on PyPI. Given a dump of
    those version during PyCon 2009, 4287 of them:
    - 2312 (53.93%) match NormalizedVersion without change
      with the automatic suggestion
    - 3474 (81.04%) match when using this suggestion method

    @param s {str} An irrational version string.
    @returns A rational version string, or None, if couldn't determine one.
    �-alpharg�-betartr�r�rur/�-finalr4�-pre�-release�.release�-stabler�r_ri� �.finalrjzpre$Zpre0zdev$Zdev0z([abc]|rc)[\-\.](\d+)$z\1\2z[\-\.](dev)[\-\.]?r?(\d+)$z.\1\2z[.~]?([abc])\.?z\1r.rNz\b0+(\d+)(?!\d)z(\d+[abc])$z\g<1>0z\.?(dev-r|dev\.r)\.?(\d+)$z.dev\2z-(a|b|c)(\d+)$z[\.\-](dev|devel)$z.dev0z(?![\.\-])dev$z(final|stable)$z\.?(r|-|-r)\.?(\d+)$z.post\2z\.?(dev|git|bzr)\.?(\d+)$z\.?(pre|preview|-c)(\d+)$zc\g<2>zp(\d+)$z.post\1�r�rg�r�rt�r�rg�r�rt�rur/�r�r4�r�r/�r�r4�r�r4�r�r4�r�r_�rir_�r�r4�r�r4�rjr4)r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)rrr
rB�replacer[r�r{)rZrsZorigr�rrr�_suggest_normalized_version�sH	
r�z([a-z]+|\d+|[\.-])r/zfinal-�@)rnZpreviewr�ruror4r_cCs~dd�}g}xh||�D]\}|jd�rh|dkrJx|rH|ddkrH|j�q.Wx|rf|d	dkrf|j�qLW|j|�qWt|�S)
NcSsxg}xdtj|j��D]R}tj||�}|rd|dd�koBdknrT|jd�}nd|}|j|�qW|jd�|S)N�0r�9��*z*final)�
_VERSION_PARTrDrB�_VERSION_REPLACErO�zfillrH)rrYr0rrr�	get_partsIs 
z_legacy_key.<locals>.get_partsr�z*finalrz*final-Z00000000rkrk)r{�poprHrI)rr�rYr0rrr�_legacy_keyHs

r�c@s eZdZdd�Zedd��ZdS)rcCst|�S)N)r�)rrrrrrcszLegacyVersion.parsecCs:d}x0|jD]&}t|t�r|jd�r|dkrd}PqW|S)NFr�z*finalT)rrMrr{)rrYr|rrrr+fszLegacyVersion.is_prereleaseN)rr
rrr,r+rrrrrbsc@s4eZdZeZeej�Zded<ej	d�Z
dd�ZdS)rr�z~=z^(\d+(\.\d+)*)cCs`||krdS|jjt|��}|s2tjd||�dS|j�d}d|krV|jdd�d}t||�S)NFzACannot compute compatible match for version %s  and constraint %sTrr_r)�
numeric_rer?rz�loggerZwarningr@�rsplitr)rrQrSrLrJrrrrr�yszLegacyMatcher._match_compatibleN)rr
rrr<�dictr-rNr[r\r�r�rrrrrqs


zN^(\d+)\.(\d+)\.(\d+)(-[a-z0-9]+(\.[a-z0-9-]+)*)?(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$cCs
tj|�S)N)�
_SEMVER_REr?)rrrrr��sr�c	Csndd�}t|�}|st|��|j�}dd�|dd�D�\}}}||dd�||dd�}}|||f||fS)	NcSs8|dkr|f}n$|dd�jd�}tdd�|D��}|S)Nrr_cSs"g|]}|j�r|jd�n|�qS)r�)rmr�)r6r0rrrr7�sz5_semantic_key.<locals>.make_tuple.<locals>.<listcomp>)rDrI)rZabsentrYrrrr�
make_tuple�s
z!_semantic_key.<locals>.make_tuplecSsg|]}t|��qSr)r])r6r�rrrr7�sz!_semantic_key.<locals>.<listcomp>r��|�r�)r�r
r@)	rr�rJr@�major�minorZpatchrnZbuildrrr�
_semantic_key�s
r�c@s eZdZdd�Zedd��ZdS)rcCst|�S)N)r�)rrrrrr�szSemanticVersion.parsecCs|jdddkS)Nrrr�)r)rrrrr+�szSemanticVersion.is_prereleaseN)rr
rrr,r+rrrrr�sc@seZdZeZdS)r	N)rr
rrr<rrrrr	�sc@s6eZdZddd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
�
VersionSchemeNcCs||_||_||_dS)N)rC�matcher�	suggester)rrCr�r�rrrr�szVersionScheme.__init__cCs2y|jj|�d}Wntk
r,d}YnX|S)NTF)r�r<r
)rrrYrrr�is_valid_version�s
zVersionScheme.is_valid_versioncCs0y|j|�d}Wntk
r*d}YnX|S)NTF)r�r
)rrrYrrr�is_valid_matcher�s

zVersionScheme.is_valid_matchercCs|jd|�S)z:
        Used for processing some metadata fields
        zdummy_name (%s))r�)rrrrr�is_valid_constraint_list�sz&VersionScheme.is_valid_constraint_listcCs|jdkrd}n
|j|�}|S)N)r�)rrrYrrr�suggest�s

zVersionScheme.suggest)N)rr
rrr�r�r�r�rrrrr��s

r�cCs|S)Nr)rrrrrr1�sr1)�
normalized�legacyZsemanticr��defaultcCs|tkrtd|��t|S)Nzunknown scheme name: %r)�_SCHEMESr=)rArrrr�s)(rZloggingr[�compatr�__all__Z	getLoggerrr�r=r
�objectrr-r\rlrqrrrrrr�r�r�r�r��Ir�r�r�rrr�r�r�rr	r�r�rrrrr�<module>	sz
1k
=$W
.r	$
_vendor/distlib/__init__.py000064400000001105151733136300011742 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import logging

__version__ = '0.2.4'

class DistlibException(Exception):
    pass

try:
    from logging import NullHandler
except ImportError: # pragma: no cover
    class NullHandler(logging.Handler):
        def handle(self, record): pass
        def emit(self, record): pass
        def createLock(self): self.lock = None

logger = logging.getLogger(__name__)
logger.addHandler(NullHandler())
_vendor/distlib/resources.py000064400000025016151733136300012224 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals

import bisect
import io
import logging
import os
import pkgutil
import shutil
import sys
import types
import zipimport

from . import DistlibException
from .util import cached_property, get_cache_base, path_to_cache_dir, Cache

logger = logging.getLogger(__name__)


cache = None    # created when needed


class ResourceCache(Cache):
    def __init__(self, base=None):
        if base is None:
            # Use native string to avoid issues on 2.x: see Python #20140.
            base = os.path.join(get_cache_base(), str('resource-cache'))
        super(ResourceCache, self).__init__(base)

    def is_stale(self, resource, path):
        """
        Is the cache stale for the given resource?

        :param resource: The :class:`Resource` being cached.
        :param path: The path of the resource in the cache.
        :return: True if the cache is stale.
        """
        # Cache invalidation is a hard problem :-)
        return True

    def get(self, resource):
        """
        Get a resource into the cache,

        :param resource: A :class:`Resource` instance.
        :return: The pathname of the resource in the cache.
        """
        prefix, path = resource.finder.get_cache_info(resource)
        if prefix is None:
            result = path
        else:
            result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
            dirname = os.path.dirname(result)
            if not os.path.isdir(dirname):
                os.makedirs(dirname)
            if not os.path.exists(result):
                stale = True
            else:
                stale = self.is_stale(resource, path)
            if stale:
                # write the bytes of the resource to the cache location
                with open(result, 'wb') as f:
                    f.write(resource.bytes)
        return result


class ResourceBase(object):
    def __init__(self, finder, name):
        self.finder = finder
        self.name = name


class Resource(ResourceBase):
    """
    A class representing an in-package resource, such as a data file. This is
    not normally instantiated by user code, but rather by a
    :class:`ResourceFinder` which manages the resource.
    """
    is_container = False        # Backwards compatibility

    def as_stream(self):
        """
        Get the resource as a stream.

        This is not a property to make it obvious that it returns a new stream
        each time.
        """
        return self.finder.get_stream(self)

    @cached_property
    def file_path(self):
        global cache
        if cache is None:
            cache = ResourceCache()
        return cache.get(self)

    @cached_property
    def bytes(self):
        return self.finder.get_bytes(self)

    @cached_property
    def size(self):
        return self.finder.get_size(self)


class ResourceContainer(ResourceBase):
    is_container = True     # Backwards compatibility

    @cached_property
    def resources(self):
        return self.finder.get_resources(self)


class ResourceFinder(object):
    """
    Resource finder for file system resources.
    """

    if sys.platform.startswith('java'):
        skipped_extensions = ('.pyc', '.pyo', '.class')
    else:
        skipped_extensions = ('.pyc', '.pyo')

    def __init__(self, module):
        self.module = module
        self.loader = getattr(module, '__loader__', None)
        self.base = os.path.dirname(getattr(module, '__file__', ''))

    def _adjust_path(self, path):
        return os.path.realpath(path)

    def _make_path(self, resource_name):
        # Issue #50: need to preserve type of path on Python 2.x
        # like os.path._get_sep
        if isinstance(resource_name, bytes):    # should only happen on 2.x
            sep = b'/'
        else:
            sep = '/'
        parts = resource_name.split(sep)
        parts.insert(0, self.base)
        result = os.path.join(*parts)
        return self._adjust_path(result)

    def _find(self, path):
        return os.path.exists(path)

    def get_cache_info(self, resource):
        return None, resource.path

    def find(self, resource_name):
        path = self._make_path(resource_name)
        if not self._find(path):
            result = None
        else:
            if self._is_directory(path):
                result = ResourceContainer(self, resource_name)
            else:
                result = Resource(self, resource_name)
            result.path = path
        return result

    def get_stream(self, resource):
        return open(resource.path, 'rb')

    def get_bytes(self, resource):
        with open(resource.path, 'rb') as f:
            return f.read()

    def get_size(self, resource):
        return os.path.getsize(resource.path)

    def get_resources(self, resource):
        def allowed(f):
            return (f != '__pycache__' and not
                    f.endswith(self.skipped_extensions))
        return set([f for f in os.listdir(resource.path) if allowed(f)])

    def is_container(self, resource):
        return self._is_directory(resource.path)

    _is_directory = staticmethod(os.path.isdir)

    def iterator(self, resource_name):
        resource = self.find(resource_name)
        if resource is not None:
            todo = [resource]
            while todo:
                resource = todo.pop(0)
                yield resource
                if resource.is_container:
                    rname = resource.name
                    for name in resource.resources:
                        if not rname:
                            new_name = name
                        else:
                            new_name = '/'.join([rname, name])
                        child = self.find(new_name)
                        if child.is_container:
                            todo.append(child)
                        else:
                            yield child


class ZipResourceFinder(ResourceFinder):
    """
    Resource finder for resources in .zip files.
    """
    def __init__(self, module):
        super(ZipResourceFinder, self).__init__(module)
        archive = self.loader.archive
        self.prefix_len = 1 + len(archive)
        # PyPy doesn't have a _files attr on zipimporter, and you can't set one
        if hasattr(self.loader, '_files'):
            self._files = self.loader._files
        else:
            self._files = zipimport._zip_directory_cache[archive]
        self.index = sorted(self._files)

    def _adjust_path(self, path):
        return path

    def _find(self, path):
        path = path[self.prefix_len:]
        if path in self._files:
            result = True
        else:
            if path and path[-1] != os.sep:
                path = path + os.sep
            i = bisect.bisect(self.index, path)
            try:
                result = self.index[i].startswith(path)
            except IndexError:
                result = False
        if not result:
            logger.debug('_find failed: %r %r', path, self.loader.prefix)
        else:
            logger.debug('_find worked: %r %r', path, self.loader.prefix)
        return result

    def get_cache_info(self, resource):
        prefix = self.loader.archive
        path = resource.path[1 + len(prefix):]
        return prefix, path

    def get_bytes(self, resource):
        return self.loader.get_data(resource.path)

    def get_stream(self, resource):
        return io.BytesIO(self.get_bytes(resource))

    def get_size(self, resource):
        path = resource.path[self.prefix_len:]
        return self._files[path][3]

    def get_resources(self, resource):
        path = resource.path[self.prefix_len:]
        if path and path[-1] != os.sep:
            path += os.sep
        plen = len(path)
        result = set()
        i = bisect.bisect(self.index, path)
        while i < len(self.index):
            if not self.index[i].startswith(path):
                break
            s = self.index[i][plen:]
            result.add(s.split(os.sep, 1)[0])   # only immediate children
            i += 1
        return result

    def _is_directory(self, path):
        path = path[self.prefix_len:]
        if path and path[-1] != os.sep:
            path += os.sep
        i = bisect.bisect(self.index, path)
        try:
            result = self.index[i].startswith(path)
        except IndexError:
            result = False
        return result

_finder_registry = {
    type(None): ResourceFinder,
    zipimport.zipimporter: ZipResourceFinder
}

try:
    # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
    try:
        import _frozen_importlib_external as _fi
    except ImportError:
        import _frozen_importlib as _fi
    _finder_registry[_fi.SourceFileLoader] = ResourceFinder
    _finder_registry[_fi.FileFinder] = ResourceFinder
    del _fi
except (ImportError, AttributeError):
    pass


def register_finder(loader, finder_maker):
    _finder_registry[type(loader)] = finder_maker

_finder_cache = {}


def finder(package):
    """
    Return a resource finder for a package.
    :param package: The name of the package.
    :return: A :class:`ResourceFinder` instance for the package.
    """
    if package in _finder_cache:
        result = _finder_cache[package]
    else:
        if package not in sys.modules:
            __import__(package)
        module = sys.modules[package]
        path = getattr(module, '__path__', None)
        if path is None:
            raise DistlibException('You cannot get a finder for a module, '
                                   'only for a package')
        loader = getattr(module, '__loader__', None)
        finder_maker = _finder_registry.get(type(loader))
        if finder_maker is None:
            raise DistlibException('Unable to locate finder for %r' % package)
        result = finder_maker(module)
        _finder_cache[package] = result
    return result


_dummy_module = types.ModuleType(str('__dummy__'))


def finder_for_path(path):
    """
    Return a resource finder for a path, which should represent a container.

    :param path: The path.
    :return: A :class:`ResourceFinder` instance for the path.
    """
    result = None
    # calls any path hooks, gets importer into cache
    pkgutil.get_importer(path)
    loader = sys.path_importer_cache.get(path)
    finder = _finder_registry.get(type(loader))
    if finder:
        module = _dummy_module
        module.__file__ = os.path.join(path, '')
        module.__loader__ = loader
        result = finder(module)
    return result
_vendor/distlib/scripts.py000064400000035570151733136310011710 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys

from .compat import sysconfig, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
                   get_executable, in_venv)

logger = logging.getLogger(__name__)

_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
 <assemblyIdentity version="1.0.0.0"
 processorArchitecture="X86"
 name="%s"
 type="win32"/>

 <!-- Identify the application security requirements. -->
 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
 <security>
 <requestedPrivileges>
 <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
 </requestedPrivileges>
 </security>
 </trustInfo>
</assembly>'''.strip()

# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
    import sys, re

    def _resolve(module, func):
        __import__(module)
        mod = sys.modules[module]
        parts = func.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
        return result

    try:
        sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])

        func = _resolve('%(module)s', '%(func)s')
        rc = func() # None interpreted as 0
    except Exception as e:  # only supporting Python >= 2.6
        sys.stderr.write('%%s\\n' %% e)
        rc = 1
    sys.exit(rc)
'''


def _enquote_executable(executable):
    if ' ' in executable:
        # make sure we quote only the executable in case of env
        # for example /usr/bin/env "/dir with spaces/bin/jython"
        # instead of "/usr/bin/env /dir with spaces/bin/jython"
        # otherwise whole
        if executable.startswith('/usr/bin/env '):
            env, _executable = executable.split(' ', 1)
            if ' ' in _executable and not _executable.startswith('"'):
                executable = '%s "%s"' % (env, _executable)
        else:
            if not executable.startswith('"'):
                executable = '"%s"' % executable
    return executable


class ScriptMaker(object):
    """
    A class to copy or create scripts from source scripts or callable
    specifications.
    """
    script_template = SCRIPT_TEMPLATE

    executable = None  # for shebangs

    def __init__(self, source_dir, target_dir, add_launchers=True,
                 dry_run=False, fileop=None):
        self.source_dir = source_dir
        self.target_dir = target_dir
        self.add_launchers = add_launchers
        self.force = False
        self.clobber = False
        # It only makes sense to set mode bits on POSIX.
        self.set_mode = (os.name == 'posix') or (os.name == 'java' and
                                                 os._name == 'posix')
        self.variants = set(('', 'X.Y'))
        self._fileop = fileop or FileOperator(dry_run)

        self._is_nt = os.name == 'nt' or (
            os.name == 'java' and os._name == 'nt')

    def _get_alternate_executable(self, executable, options):
        if options.get('gui', False) and self._is_nt:  # pragma: no cover
            dn, fn = os.path.split(executable)
            fn = fn.replace('python', 'pythonw')
            executable = os.path.join(dn, fn)
        return executable

    if sys.platform.startswith('java'):  # pragma: no cover
        def _is_shell(self, executable):
            """
            Determine if the specified executable is a script
            (contains a #! line)
            """
            try:
                with open(executable) as fp:
                    return fp.read(2) == '#!'
            except (OSError, IOError):
                logger.warning('Failed to open %s', executable)
                return False

        def _fix_jython_executable(self, executable):
            if self._is_shell(executable):
                # Workaround for Jython is not needed on Linux systems.
                import java

                if java.lang.System.getProperty('os.name') == 'Linux':
                    return executable
            elif executable.lower().endswith('jython.exe'):
                # Use wrapper exe for Jython on Windows
                return executable
            return '/usr/bin/env %s' % executable

    def _get_shebang(self, encoding, post_interp=b'', options=None):
        enquote = True
        if self.executable:
            executable = self.executable
            enquote = False     # assume this will be taken care of
        elif not sysconfig.is_python_build():
            executable = get_executable()
        elif in_venv():  # pragma: no cover
            executable = os.path.join(sysconfig.get_path('scripts'),
                            'python%s' % sysconfig.get_config_var('EXE'))
        else:  # pragma: no cover
            executable = os.path.join(
                sysconfig.get_config_var('BINDIR'),
               'python%s%s' % (sysconfig.get_config_var('VERSION'),
                               sysconfig.get_config_var('EXE')))
        if options:
            executable = self._get_alternate_executable(executable, options)

        if sys.platform.startswith('java'):  # pragma: no cover
            executable = self._fix_jython_executable(executable)
        # Normalise case for Windows
        executable = os.path.normcase(executable)
        # If the user didn't specify an executable, it may be necessary to
        # cater for executable paths with spaces (not uncommon on Windows)
        if enquote:
            executable = _enquote_executable(executable)
        # Issue #51: don't use fsencode, since we later try to
        # check that the shebang is decodable using utf-8.
        executable = executable.encode('utf-8')
        # in case of IronPython, play safe and enable frames support
        if (sys.platform == 'cli' and '-X:Frames' not in post_interp
            and '-X:FullFrames' not in post_interp):  # pragma: no cover
            post_interp += b' -X:Frames'
        shebang = b'#!' + executable + post_interp + b'\n'
        # Python parser starts to read a script using UTF-8 until
        # it gets a #coding:xxx cookie. The shebang has to be the
        # first line of a file, the #coding:xxx cookie cannot be
        # written before. So the shebang has to be decodable from
        # UTF-8.
        try:
            shebang.decode('utf-8')
        except UnicodeDecodeError:  # pragma: no cover
            raise ValueError(
                'The shebang (%r) is not decodable from utf-8' % shebang)
        # If the script is encoded to a custom encoding (use a
        # #coding:xxx cookie), the shebang has to be decodable from
        # the script encoding too.
        if encoding != 'utf-8':
            try:
                shebang.decode(encoding)
            except UnicodeDecodeError:  # pragma: no cover
                raise ValueError(
                    'The shebang (%r) is not decodable '
                    'from the script encoding (%r)' % (shebang, encoding))
        return shebang

    def _get_script_text(self, entry):
        return self.script_template % dict(module=entry.prefix,
                                           func=entry.suffix)

    manifest = _DEFAULT_MANIFEST

    def get_manifest(self, exename):
        base = os.path.basename(exename)
        return self.manifest % base

    def _write_script(self, names, shebang, script_bytes, filenames, ext):
        use_launcher = self.add_launchers and self._is_nt
        linesep = os.linesep.encode('utf-8')
        if not use_launcher:
            script_bytes = shebang + linesep + script_bytes
        else:  # pragma: no cover
            if ext == 'py':
                launcher = self._get_launcher('t')
            else:
                launcher = self._get_launcher('w')
            stream = BytesIO()
            with ZipFile(stream, 'w') as zf:
                zf.writestr('__main__.py', script_bytes)
            zip_data = stream.getvalue()
            script_bytes = launcher + shebang + linesep + zip_data
        for name in names:
            outname = os.path.join(self.target_dir, name)
            if use_launcher:  # pragma: no cover
                n, e = os.path.splitext(outname)
                if e.startswith('.py'):
                    outname = n
                outname = '%s.exe' % outname
                try:
                    self._fileop.write_binary_file(outname, script_bytes)
                except Exception:
                    # Failed writing an executable - it might be in use.
                    logger.warning('Failed to write executable - trying to '
                                   'use .deleteme logic')
                    dfname = '%s.deleteme' % outname
                    if os.path.exists(dfname):
                        os.remove(dfname)       # Not allowed to fail here
                    os.rename(outname, dfname)  # nor here
                    self._fileop.write_binary_file(outname, script_bytes)
                    logger.debug('Able to replace executable using '
                                 '.deleteme logic')
                    try:
                        os.remove(dfname)
                    except Exception:
                        pass    # still in use - ignore error
            else:
                if self._is_nt and not outname.endswith('.' + ext):  # pragma: no cover
                    outname = '%s.%s' % (outname, ext)
                if os.path.exists(outname) and not self.clobber:
                    logger.warning('Skipping existing file %s', outname)
                    continue
                self._fileop.write_binary_file(outname, script_bytes)
                if self.set_mode:
                    self._fileop.set_executable_mode([outname])
            filenames.append(outname)

    def _make_script(self, entry, filenames, options=None):
        post_interp = b''
        if options:
            args = options.get('interpreter_args', [])
            if args:
                args = ' %s' % ' '.join(args)
                post_interp = args.encode('utf-8')
        shebang = self._get_shebang('utf-8', post_interp, options=options)
        script = self._get_script_text(entry).encode('utf-8')
        name = entry.name
        scriptnames = set()
        if '' in self.variants:
            scriptnames.add(name)
        if 'X' in self.variants:
            scriptnames.add('%s%s' % (name, sys.version[0]))
        if 'X.Y' in self.variants:
            scriptnames.add('%s-%s' % (name, sys.version[:3]))
        if options and options.get('gui', False):
            ext = 'pyw'
        else:
            ext = 'py'
        self._write_script(scriptnames, shebang, script, filenames, ext)

    def _copy_script(self, script, filenames):
        adjust = False
        script = os.path.join(self.source_dir, convert_path(script))
        outname = os.path.join(self.target_dir, os.path.basename(script))
        if not self.force and not self._fileop.newer(script, outname):
            logger.debug('not copying %s (up-to-date)', script)
            return

        # Always open the file, but ignore failures in dry-run mode --
        # that way, we'll get accurate feedback if we can read the
        # script.
        try:
            f = open(script, 'rb')
        except IOError:  # pragma: no cover
            if not self.dry_run:
                raise
            f = None
        else:
            first_line = f.readline()
            if not first_line:  # pragma: no cover
                logger.warning('%s: %s is an empty file (skipping)',
                               self.get_command_name(),  script)
                return

            match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
            if match:
                adjust = True
                post_interp = match.group(1) or b''

        if not adjust:
            if f:
                f.close()
            self._fileop.copy_file(script, outname)
            if self.set_mode:
                self._fileop.set_executable_mode([outname])
            filenames.append(outname)
        else:
            logger.info('copying and adjusting %s -> %s', script,
                        self.target_dir)
            if not self._fileop.dry_run:
                encoding, lines = detect_encoding(f.readline)
                f.seek(0)
                shebang = self._get_shebang(encoding, post_interp)
                if b'pythonw' in first_line:  # pragma: no cover
                    ext = 'pyw'
                else:
                    ext = 'py'
                n = os.path.basename(outname)
                self._write_script([n], shebang, f.read(), filenames, ext)
            if f:
                f.close()

    @property
    def dry_run(self):
        return self._fileop.dry_run

    @dry_run.setter
    def dry_run(self, value):
        self._fileop.dry_run = value

    if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'):  # pragma: no cover
        # Executable launcher support.
        # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/

        def _get_launcher(self, kind):
            if struct.calcsize('P') == 8:   # 64-bit
                bits = '64'
            else:
                bits = '32'
            name = '%s%s.exe' % (kind, bits)
            # Issue 31: don't hardcode an absolute package name, but
            # determine it relative to the current package
            distlib_package = __name__.rsplit('.', 1)[0]
            result = finder(distlib_package).find(name).bytes
            return result

    # Public API follows

    def make(self, specification, options=None):
        """
        Make a script.

        :param specification: The specification, which is either a valid export
                              entry specification (to make a script from a
                              callable) or a filename (to make a script by
                              copying from a source location).
        :param options: A dictionary of options controlling script generation.
        :return: A list of all absolute pathnames written to.
        """
        filenames = []
        entry = get_export_entry(specification)
        if entry is None:
            self._copy_script(specification, filenames)
        else:
            self._make_script(entry, filenames, options=options)
        return filenames

    def make_multiple(self, specifications, options=None):
        """
        Take a list of specifications and make scripts from them,
        :param specifications: A list of specifications.
        :return: A list of all absolute pathnames written to,
        """
        filenames = []
        for specification in specifications:
            filenames.extend(self.make(specification, options))
        return filenames
_vendor/distlib/database.py000064400000141010151733136310011750 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""PEP 376 implementation."""

from __future__ import unicode_literals

import base64
import codecs
import contextlib
import hashlib
import logging
import os
import posixpath
import sys
import zipimport

from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME
from .util import (parse_requirement, cached_property, parse_name_and_version,
                   read_exports, write_exports, CSVReader, CSVWriter)


__all__ = ['Distribution', 'BaseInstalledDistribution',
           'InstalledDistribution', 'EggInfoDistribution',
           'DistributionPath']


logger = logging.getLogger(__name__)

EXPORTS_FILENAME = 'pydist-exports.json'
COMMANDS_FILENAME = 'pydist-commands.json'

DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
              'RESOURCES', EXPORTS_FILENAME, 'SHARED')

DISTINFO_EXT = '.dist-info'


class _Cache(object):
    """
    A simple cache mapping names and .dist-info paths to distributions
    """
    def __init__(self):
        """
        Initialise an instance. There is normally one for each DistributionPath.
        """
        self.name = {}
        self.path = {}
        self.generated = False

    def clear(self):
        """
        Clear the cache, setting it to its initial state.
        """
        self.name.clear()
        self.path.clear()
        self.generated = False

    def add(self, dist):
        """
        Add a distribution to the cache.
        :param dist: The distribution to add.
        """
        if dist.path not in self.path:
            self.path[dist.path] = dist
            self.name.setdefault(dist.key, []).append(dist)


class DistributionPath(object):
    """
    Represents a set of distributions installed on a path (typically sys.path).
    """
    def __init__(self, path=None, include_egg=False):
        """
        Create an instance from a path, optionally including legacy (distutils/
        setuptools/distribute) distributions.
        :param path: The path to use, as a list of directories. If not specified,
                     sys.path is used.
        :param include_egg: If True, this instance will look for and return legacy
                            distributions as well as those based on PEP 376.
        """
        if path is None:
            path = sys.path
        self.path = path
        self._include_dist = True
        self._include_egg = include_egg

        self._cache = _Cache()
        self._cache_egg = _Cache()
        self._cache_enabled = True
        self._scheme = get_scheme('default')

    def _get_cache_enabled(self):
        return self._cache_enabled

    def _set_cache_enabled(self, value):
        self._cache_enabled = value

    cache_enabled = property(_get_cache_enabled, _set_cache_enabled)

    def clear_cache(self):
        """
        Clears the internal cache.
        """
        self._cache.clear()
        self._cache_egg.clear()


    def _yield_distributions(self):
        """
        Yield .dist-info and/or .egg(-info) distributions.
        """
        # We need to check if we've seen some resources already, because on
        # some Linux systems (e.g. some Debian/Ubuntu variants) there are
        # symlinks which alias other files in the environment.
        seen = set()
        for path in self.path:
            finder = resources.finder_for_path(path)
            if finder is None:
                continue
            r = finder.find('')
            if not r or not r.is_container:
                continue
            rset = sorted(r.resources)
            for entry in rset:
                r = finder.find(entry)
                if not r or r.path in seen:
                    continue
                if self._include_dist and entry.endswith(DISTINFO_EXT):
                    possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME]
                    for metadata_filename in possible_filenames:
                        metadata_path = posixpath.join(entry, metadata_filename)
                        pydist = finder.find(metadata_path)
                        if pydist:
                            break
                    else:
                        continue

                    with contextlib.closing(pydist.as_stream()) as stream:
                        metadata = Metadata(fileobj=stream, scheme='legacy')
                    logger.debug('Found %s', r.path)
                    seen.add(r.path)
                    yield new_dist_class(r.path, metadata=metadata,
                                         env=self)
                elif self._include_egg and entry.endswith(('.egg-info',
                                                          '.egg')):
                    logger.debug('Found %s', r.path)
                    seen.add(r.path)
                    yield old_dist_class(r.path, self)

    def _generate_cache(self):
        """
        Scan the path for distributions and populate the cache with
        those that are found.
        """
        gen_dist = not self._cache.generated
        gen_egg = self._include_egg and not self._cache_egg.generated
        if gen_dist or gen_egg:
            for dist in self._yield_distributions():
                if isinstance(dist, InstalledDistribution):
                    self._cache.add(dist)
                else:
                    self._cache_egg.add(dist)

            if gen_dist:
                self._cache.generated = True
            if gen_egg:
                self._cache_egg.generated = True

    @classmethod
    def distinfo_dirname(cls, name, version):
        """
        The *name* and *version* parameters are converted into their
        filename-escaped form, i.e. any ``'-'`` characters are replaced
        with ``'_'`` other than the one in ``'dist-info'`` and the one
        separating the name from the version number.

        :parameter name: is converted to a standard distribution name by replacing
                         any runs of non- alphanumeric characters with a single
                         ``'-'``.
        :type name: string
        :parameter version: is converted to a standard version string. Spaces
                            become dots, and all other non-alphanumeric characters
                            (except dots) become dashes, with runs of multiple
                            dashes condensed to a single dash.
        :type version: string
        :returns: directory name
        :rtype: string"""
        name = name.replace('-', '_')
        return '-'.join([name, version]) + DISTINFO_EXT

    def get_distributions(self):
        """
        Provides an iterator that looks for distributions and returns
        :class:`InstalledDistribution` or
        :class:`EggInfoDistribution` instances for each one of them.

        :rtype: iterator of :class:`InstalledDistribution` and
                :class:`EggInfoDistribution` instances
        """
        if not self._cache_enabled:
            for dist in self._yield_distributions():
                yield dist
        else:
            self._generate_cache()

            for dist in self._cache.path.values():
                yield dist

            if self._include_egg:
                for dist in self._cache_egg.path.values():
                    yield dist

    def get_distribution(self, name):
        """
        Looks for a named distribution on the path.

        This function only returns the first result found, as no more than one
        value is expected. If nothing is found, ``None`` is returned.

        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
                or ``None``
        """
        result = None
        name = name.lower()
        if not self._cache_enabled:
            for dist in self._yield_distributions():
                if dist.key == name:
                    result = dist
                    break
        else:
            self._generate_cache()

            if name in self._cache.name:
                result = self._cache.name[name][0]
            elif self._include_egg and name in self._cache_egg.name:
                result = self._cache_egg.name[name][0]
        return result

    def provides_distribution(self, name, version=None):
        """
        Iterates over all distributions to find which distributions provide *name*.
        If a *version* is provided, it will be used to filter the results.

        This function only returns the first result found, since no more than
        one values are expected. If the directory is not found, returns ``None``.

        :parameter version: a version specifier that indicates the version
                            required, conforming to the format in ``PEP-345``

        :type name: string
        :type version: string
        """
        matcher = None
        if not version is None:
            try:
                matcher = self._scheme.matcher('%s (%s)' % (name, version))
            except ValueError:
                raise DistlibException('invalid name or version: %r, %r' %
                                      (name, version))

        for dist in self.get_distributions():
            provided = dist.provides

            for p in provided:
                p_name, p_ver = parse_name_and_version(p)
                if matcher is None:
                    if p_name == name:
                        yield dist
                        break
                else:
                    if p_name == name and matcher.match(p_ver):
                        yield dist
                        break

    def get_file_path(self, name, relative_path):
        """
        Return the path to a resource file.
        """
        dist = self.get_distribution(name)
        if dist is None:
            raise LookupError('no distribution named %r found' % name)
        return dist.get_resource_path(relative_path)

    def get_exported_entries(self, category, name=None):
        """
        Return all of the exported entries in a particular category.

        :param category: The category to search for entries.
        :param name: If specified, only entries with that name are returned.
        """
        for dist in self.get_distributions():
            r = dist.exports
            if category in r:
                d = r[category]
                if name is not None:
                    if name in d:
                        yield d[name]
                else:
                    for v in d.values():
                        yield v


class Distribution(object):
    """
    A base class for distributions, whether installed or from indexes.
    Either way, it must have some metadata, so that's all that's needed
    for construction.
    """

    build_time_dependency = False
    """
    Set to True if it's known to be only a build-time dependency (i.e.
    not needed after installation).
    """

    requested = False
    """A boolean that indicates whether the ``REQUESTED`` metadata file is
    present (in other words, whether the package was installed by user
    request or it was installed as a dependency)."""

    def __init__(self, metadata):
        """
        Initialise an instance.
        :param metadata: The instance of :class:`Metadata` describing this
        distribution.
        """
        self.metadata = metadata
        self.name = metadata.name
        self.key = self.name.lower()    # for case-insensitive comparisons
        self.version = metadata.version
        self.locator = None
        self.digest = None
        self.extras = None      # additional features requested
        self.context = None     # environment marker overrides
        self.download_urls = set()
        self.digests = {}

    @property
    def source_url(self):
        """
        The source archive download URL for this distribution.
        """
        return self.metadata.source_url

    download_url = source_url   # Backward compatibility

    @property
    def name_and_version(self):
        """
        A utility property which displays the name and version in parentheses.
        """
        return '%s (%s)' % (self.name, self.version)

    @property
    def provides(self):
        """
        A set of distribution names and versions provided by this distribution.
        :return: A set of "name (version)" strings.
        """
        plist = self.metadata.provides
        s = '%s (%s)' % (self.name, self.version)
        if s not in plist:
            plist.append(s)
        return plist

    def _get_requirements(self, req_attr):
        md = self.metadata
        logger.debug('Getting requirements from metadata %r', md.todict())
        reqts = getattr(md, req_attr)
        return set(md.get_requirements(reqts, extras=self.extras,
                                       env=self.context))

    @property
    def run_requires(self):
        return self._get_requirements('run_requires')

    @property
    def meta_requires(self):
        return self._get_requirements('meta_requires')

    @property
    def build_requires(self):
        return self._get_requirements('build_requires')

    @property
    def test_requires(self):
        return self._get_requirements('test_requires')

    @property
    def dev_requires(self):
        return self._get_requirements('dev_requires')

    def matches_requirement(self, req):
        """
        Say if this instance matches (fulfills) a requirement.
        :param req: The requirement to match.
        :rtype req: str
        :return: True if it matches, else False.
        """
        # Requirement may contain extras - parse to lose those
        # from what's passed to the matcher
        r = parse_requirement(req)
        scheme = get_scheme(self.metadata.scheme)
        try:
            matcher = scheme.matcher(r.requirement)
        except UnsupportedVersionError:
            # XXX compat-mode if cannot read the version
            logger.warning('could not read version %r - using name only',
                           req)
            name = req.split()[0]
            matcher = scheme.matcher(name)

        name = matcher.key   # case-insensitive

        result = False
        for p in self.provides:
            p_name, p_ver = parse_name_and_version(p)
            if p_name != name:
                continue
            try:
                result = matcher.match(p_ver)
                break
            except UnsupportedVersionError:
                pass
        return result

    def __repr__(self):
        """
        Return a textual representation of this instance,
        """
        if self.source_url:
            suffix = ' [%s]' % self.source_url
        else:
            suffix = ''
        return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix)

    def __eq__(self, other):
        """
        See if this distribution is the same as another.
        :param other: The distribution to compare with. To be equal to one
                      another. distributions must have the same type, name,
                      version and source_url.
        :return: True if it is the same, else False.
        """
        if type(other) is not type(self):
            result = False
        else:
            result = (self.name == other.name and
                      self.version == other.version and
                      self.source_url == other.source_url)
        return result

    def __hash__(self):
        """
        Compute hash in a way which matches the equality test.
        """
        return hash(self.name) + hash(self.version) + hash(self.source_url)


class BaseInstalledDistribution(Distribution):
    """
    This is the base class for installed distributions (whether PEP 376 or
    legacy).
    """

    hasher = None

    def __init__(self, metadata, path, env=None):
        """
        Initialise an instance.
        :param metadata: An instance of :class:`Metadata` which describes the
                         distribution. This will normally have been initialised
                         from a metadata file in the ``path``.
        :param path:     The path of the ``.dist-info`` or ``.egg-info``
                         directory for the distribution.
        :param env:      This is normally the :class:`DistributionPath`
                         instance where this distribution was found.
        """
        super(BaseInstalledDistribution, self).__init__(metadata)
        self.path = path
        self.dist_path = env

    def get_hash(self, data, hasher=None):
        """
        Get the hash of some data, using a particular hash algorithm, if
        specified.

        :param data: The data to be hashed.
        :type data: bytes
        :param hasher: The name of a hash implementation, supported by hashlib,
                       or ``None``. Examples of valid values are ``'sha1'``,
                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
                       ``'sha512'``. If no hasher is specified, the ``hasher``
                       attribute of the :class:`InstalledDistribution` instance
                       is used. If the hasher is determined to be ``None``, MD5
                       is used as the hashing algorithm.
        :returns: The hash of the data. If a hasher was explicitly specified,
                  the returned hash will be prefixed with the specified hasher
                  followed by '='.
        :rtype: str
        """
        if hasher is None:
            hasher = self.hasher
        if hasher is None:
            hasher = hashlib.md5
            prefix = ''
        else:
            hasher = getattr(hashlib, hasher)
            prefix = '%s=' % self.hasher
        digest = hasher(data).digest()
        digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
        return '%s%s' % (prefix, digest)


class InstalledDistribution(BaseInstalledDistribution):
    """
    Created with the *path* of the ``.dist-info`` directory provided to the
    constructor. It reads the metadata contained in ``pydist.json`` when it is
    instantiated., or uses a passed in Metadata instance (useful for when
    dry-run mode is being used).
    """

    hasher = 'sha256'

    def __init__(self, path, metadata=None, env=None):
        self.finder = finder = resources.finder_for_path(path)
        if finder is None:
            import pdb; pdb.set_trace ()
        if env and env._cache_enabled and path in env._cache.path:
            metadata = env._cache.path[path].metadata
        elif metadata is None:
            r = finder.find(METADATA_FILENAME)
            # Temporary - for Wheel 0.23 support
            if r is None:
                r = finder.find(WHEEL_METADATA_FILENAME)
            # Temporary - for legacy support
            if r is None:
                r = finder.find('METADATA')
            if r is None:
                raise ValueError('no %s found in %s' % (METADATA_FILENAME,
                                                        path))
            with contextlib.closing(r.as_stream()) as stream:
                metadata = Metadata(fileobj=stream, scheme='legacy')

        super(InstalledDistribution, self).__init__(metadata, path, env)

        if env and env._cache_enabled:
            env._cache.add(self)

        try:
            r = finder.find('REQUESTED')
        except AttributeError:
            import pdb; pdb.set_trace ()
        self.requested = r is not None

    def __repr__(self):
        return '<InstalledDistribution %r %s at %r>' % (
            self.name, self.version, self.path)

    def __str__(self):
        return "%s %s" % (self.name, self.version)

    def _get_records(self):
        """
        Get the list of installed files for the distribution
        :return: A list of tuples of path, hash and size. Note that hash and
                 size might be ``None`` for some entries. The path is exactly
                 as stored in the file (which is as in PEP 376).
        """
        results = []
        r = self.get_distinfo_resource('RECORD')
        with contextlib.closing(r.as_stream()) as stream:
            with CSVReader(stream=stream) as record_reader:
                # Base location is parent dir of .dist-info dir
                #base_location = os.path.dirname(self.path)
                #base_location = os.path.abspath(base_location)
                for row in record_reader:
                    missing = [None for i in range(len(row), 3)]
                    path, checksum, size = row + missing
                    #if not os.path.isabs(path):
                    #    path = path.replace('/', os.sep)
                    #    path = os.path.join(base_location, path)
                    results.append((path, checksum, size))
        return results

    @cached_property
    def exports(self):
        """
        Return the information exported by this distribution.
        :return: A dictionary of exports, mapping an export category to a dict
                 of :class:`ExportEntry` instances describing the individual
                 export entries, and keyed by name.
        """
        result = {}
        r = self.get_distinfo_resource(EXPORTS_FILENAME)
        if r:
            result = self.read_exports()
        return result

    def read_exports(self):
        """
        Read exports data from a file in .ini format.

        :return: A dictionary of exports, mapping an export category to a list
                 of :class:`ExportEntry` instances describing the individual
                 export entries.
        """
        result = {}
        r = self.get_distinfo_resource(EXPORTS_FILENAME)
        if r:
            with contextlib.closing(r.as_stream()) as stream:
                result = read_exports(stream)
        return result

    def write_exports(self, exports):
        """
        Write a dictionary of exports to a file in .ini format.
        :param exports: A dictionary of exports, mapping an export category to
                        a list of :class:`ExportEntry` instances describing the
                        individual export entries.
        """
        rf = self.get_distinfo_file(EXPORTS_FILENAME)
        with open(rf, 'w') as f:
            write_exports(exports, f)

    def get_resource_path(self, relative_path):
        """
        NOTE: This API may change in the future.

        Return the absolute path to a resource file with the given relative
        path.

        :param relative_path: The path, relative to .dist-info, of the resource
                              of interest.
        :return: The absolute path where the resource is to be found.
        """
        r = self.get_distinfo_resource('RESOURCES')
        with contextlib.closing(r.as_stream()) as stream:
            with CSVReader(stream=stream) as resources_reader:
                for relative, destination in resources_reader:
                    if relative == relative_path:
                        return destination
        raise KeyError('no resource file with relative path %r '
                       'is installed' % relative_path)

    def list_installed_files(self):
        """
        Iterates over the ``RECORD`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: iterator of (path, hash, size)
        """
        for result in self._get_records():
            yield result

    def write_installed_files(self, paths, prefix, dry_run=False):
        """
        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
        existing ``RECORD`` file is silently overwritten.

        prefix is used to determine when to write absolute paths.
        """
        prefix = os.path.join(prefix, '')
        base = os.path.dirname(self.path)
        base_under_prefix = base.startswith(prefix)
        base = os.path.join(base, '')
        record_path = self.get_distinfo_file('RECORD')
        logger.info('creating %s', record_path)
        if dry_run:
            return None
        with CSVWriter(record_path) as writer:
            for path in paths:
                if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
                    # do not put size and hash, as in PEP-376
                    hash_value = size = ''
                else:
                    size = '%d' % os.path.getsize(path)
                    with open(path, 'rb') as fp:
                        hash_value = self.get_hash(fp.read())
                if path.startswith(base) or (base_under_prefix and
                                             path.startswith(prefix)):
                    path = os.path.relpath(path, base)
                writer.writerow((path, hash_value, size))

            # add the RECORD file itself
            if record_path.startswith(base):
                record_path = os.path.relpath(record_path, base)
            writer.writerow((record_path, '', ''))
        return record_path

    def check_installed_files(self):
        """
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        """
        mismatches = []
        base = os.path.dirname(self.path)
        record_path = self.get_distinfo_file('RECORD')
        for path, hash_value, size in self.list_installed_files():
            if not os.path.isabs(path):
                path = os.path.join(base, path)
            if path == record_path:
                continue
            if not os.path.exists(path):
                mismatches.append((path, 'exists', True, False))
            elif os.path.isfile(path):
                actual_size = str(os.path.getsize(path))
                if size and actual_size != size:
                    mismatches.append((path, 'size', size, actual_size))
                elif hash_value:
                    if '=' in hash_value:
                        hasher = hash_value.split('=', 1)[0]
                    else:
                        hasher = None

                    with open(path, 'rb') as f:
                        actual_hash = self.get_hash(f.read(), hasher)
                        if actual_hash != hash_value:
                            mismatches.append((path, 'hash', hash_value, actual_hash))
        return mismatches

    @cached_property
    def shared_locations(self):
        """
        A dictionary of shared locations whose keys are in the set 'prefix',
        'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
        The corresponding value is the absolute path of that category for
        this distribution, and takes into account any paths selected by the
        user at installation time (e.g. via command-line arguments). In the
        case of the 'namespace' key, this would be a list of absolute paths
        for the roots of namespace packages in this distribution.

        The first time this property is accessed, the relevant information is
        read from the SHARED file in the .dist-info directory.
        """
        result = {}
        shared_path = os.path.join(self.path, 'SHARED')
        if os.path.isfile(shared_path):
            with codecs.open(shared_path, 'r', encoding='utf-8') as f:
                lines = f.read().splitlines()
            for line in lines:
                key, value = line.split('=', 1)
                if key == 'namespace':
                    result.setdefault(key, []).append(value)
                else:
                    result[key] = value
        return result

    def write_shared_locations(self, paths, dry_run=False):
        """
        Write shared location information to the SHARED file in .dist-info.
        :param paths: A dictionary as described in the documentation for
        :meth:`shared_locations`.
        :param dry_run: If True, the action is logged but no file is actually
                        written.
        :return: The path of the file written to.
        """
        shared_path = os.path.join(self.path, 'SHARED')
        logger.info('creating %s', shared_path)
        if dry_run:
            return None
        lines = []
        for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
            path = paths[key]
            if os.path.isdir(paths[key]):
                lines.append('%s=%s' % (key,  path))
        for ns in paths.get('namespace', ()):
            lines.append('namespace=%s' % ns)

        with codecs.open(shared_path, 'w', encoding='utf-8') as f:
            f.write('\n'.join(lines))
        return shared_path

    def get_distinfo_resource(self, path):
        if path not in DIST_FILES:
            raise DistlibException('invalid path for a dist-info file: '
                                   '%r at %r' % (path, self.path))
        finder = resources.finder_for_path(self.path)
        if finder is None:
            raise DistlibException('Unable to get a finder for %s' % self.path)
        return finder.find(path)

    def get_distinfo_file(self, path):
        """
        Returns a path located under the ``.dist-info`` directory. Returns a
        string representing the path.

        :parameter path: a ``'/'``-separated path relative to the
                         ``.dist-info`` directory or an absolute path;
                         If *path* is an absolute path and doesn't start
                         with the ``.dist-info`` directory path,
                         a :class:`DistlibException` is raised
        :type path: str
        :rtype: str
        """
        # Check if it is an absolute path  # XXX use relpath, add tests
        if path.find(os.sep) >= 0:
            # it's an absolute path?
            distinfo_dirname, path = path.split(os.sep)[-2:]
            if distinfo_dirname != self.path.split(os.sep)[-1]:
                raise DistlibException(
                    'dist-info file %r does not belong to the %r %s '
                    'distribution' % (path, self.name, self.version))

        # The file must be relative
        if path not in DIST_FILES:
            raise DistlibException('invalid path for a dist-info file: '
                                   '%r at %r' % (path, self.path))

        return os.path.join(self.path, path)

    def list_distinfo_files(self):
        """
        Iterates over the ``RECORD`` entries and returns paths for each line if
        the path is pointing to a file located in the ``.dist-info`` directory
        or one of its subdirectories.

        :returns: iterator of paths
        """
        base = os.path.dirname(self.path)
        for path, checksum, size in self._get_records():
            # XXX add separator or use real relpath algo
            if not os.path.isabs(path):
                path = os.path.join(base, path)
            if path.startswith(self.path):
                yield path

    def __eq__(self, other):
        return (isinstance(other, InstalledDistribution) and
                self.path == other.path)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    __hash__ = object.__hash__


class EggInfoDistribution(BaseInstalledDistribution):
    """Created with the *path* of the ``.egg-info`` directory or file provided
    to the constructor. It reads the metadata contained in the file itself, or
    if the given path happens to be a directory, the metadata is read from the
    file ``PKG-INFO`` under that directory."""

    requested = True    # as we have no way of knowing, assume it was
    shared_locations = {}

    def __init__(self, path, env=None):
        def set_name_and_version(s, n, v):
            s.name = n
            s.key = n.lower()   # for case-insensitive comparisons
            s.version = v

        self.path = path
        self.dist_path = env
        if env and env._cache_enabled and path in env._cache_egg.path:
            metadata = env._cache_egg.path[path].metadata
            set_name_and_version(self, metadata.name, metadata.version)
        else:
            metadata = self._get_metadata(path)

            # Need to be set before caching
            set_name_and_version(self, metadata.name, metadata.version)

            if env and env._cache_enabled:
                env._cache_egg.add(self)
        super(EggInfoDistribution, self).__init__(metadata, path, env)

    def _get_metadata(self, path):
        requires = None

        def parse_requires_data(data):
            """Create a list of dependencies from a requires.txt file.

            *data*: the contents of a setuptools-produced requires.txt file.
            """
            reqs = []
            lines = data.splitlines()
            for line in lines:
                line = line.strip()
                if line.startswith('['):
                    logger.warning('Unexpected line: quitting requirement scan: %r',
                                   line)
                    break
                r = parse_requirement(line)
                if not r:
                    logger.warning('Not recognised as a requirement: %r', line)
                    continue
                if r.extras:
                    logger.warning('extra requirements in requires.txt are '
                                   'not supported')
                if not r.constraints:
                    reqs.append(r.name)
                else:
                    cons = ', '.join('%s%s' % c for c in r.constraints)
                    reqs.append('%s (%s)' % (r.name, cons))
            return reqs

        def parse_requires_path(req_path):
            """Create a list of dependencies from a requires.txt file.

            *req_path*: the path to a setuptools-produced requires.txt file.
            """

            reqs = []
            try:
                with codecs.open(req_path, 'r', 'utf-8') as fp:
                    reqs = parse_requires_data(fp.read())
            except IOError:
                pass
            return reqs

        if path.endswith('.egg'):
            if os.path.isdir(path):
                meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
                metadata = Metadata(path=meta_path, scheme='legacy')
                req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
                requires = parse_requires_path(req_path)
            else:
                # FIXME handle the case where zipfile is not available
                zipf = zipimport.zipimporter(path)
                fileobj = StringIO(
                    zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
                metadata = Metadata(fileobj=fileobj, scheme='legacy')
                try:
                    data = zipf.get_data('EGG-INFO/requires.txt')
                    requires = parse_requires_data(data.decode('utf-8'))
                except IOError:
                    requires = None
        elif path.endswith('.egg-info'):
            if os.path.isdir(path):
                req_path = os.path.join(path, 'requires.txt')
                requires = parse_requires_path(req_path)
                path = os.path.join(path, 'PKG-INFO')
            metadata = Metadata(path=path, scheme='legacy')
        else:
            raise DistlibException('path must end with .egg-info or .egg, '
                                   'got %r' % path)

        if requires:
            metadata.add_requirements(requires)
        return metadata

    def __repr__(self):
        return '<EggInfoDistribution %r %s at %r>' % (
            self.name, self.version, self.path)

    def __str__(self):
        return "%s %s" % (self.name, self.version)

    def check_installed_files(self):
        """
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        """
        mismatches = []
        record_path = os.path.join(self.path, 'installed-files.txt')
        if os.path.exists(record_path):
            for path, _, _ in self.list_installed_files():
                if path == record_path:
                    continue
                if not os.path.exists(path):
                    mismatches.append((path, 'exists', True, False))
        return mismatches

    def list_installed_files(self):
        """
        Iterates over the ``installed-files.txt`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: a list of (path, hash, size)
        """

        def _md5(path):
            f = open(path, 'rb')
            try:
                content = f.read()
            finally:
                f.close()
            return hashlib.md5(content).hexdigest()

        def _size(path):
            return os.stat(path).st_size

        record_path = os.path.join(self.path, 'installed-files.txt')
        result = []
        if os.path.exists(record_path):
            with codecs.open(record_path, 'r', encoding='utf-8') as f:
                for line in f:
                    line = line.strip()
                    p = os.path.normpath(os.path.join(self.path, line))
                    # "./" is present as a marker between installed files
                    # and installation metadata files
                    if not os.path.exists(p):
                        logger.warning('Non-existent file: %s', p)
                        if p.endswith(('.pyc', '.pyo')):
                            continue
                        #otherwise fall through and fail
                    if not os.path.isdir(p):
                        result.append((p, _md5(p), _size(p)))
            result.append((record_path, None, None))
        return result

    def list_distinfo_files(self, absolute=False):
        """
        Iterates over the ``installed-files.txt`` entries and returns paths for
        each line if the path is pointing to a file located in the
        ``.egg-info`` directory or one of its subdirectories.

        :parameter absolute: If *absolute* is ``True``, each returned path is
                          transformed into a local absolute path. Otherwise the
                          raw value from ``installed-files.txt`` is returned.
        :type absolute: boolean
        :returns: iterator of paths
        """
        record_path = os.path.join(self.path, 'installed-files.txt')
        skip = True
        with codecs.open(record_path, 'r', encoding='utf-8') as f:
            for line in f:
                line = line.strip()
                if line == './':
                    skip = False
                    continue
                if not skip:
                    p = os.path.normpath(os.path.join(self.path, line))
                    if p.startswith(self.path):
                        if absolute:
                            yield p
                        else:
                            yield line

    def __eq__(self, other):
        return (isinstance(other, EggInfoDistribution) and
                self.path == other.path)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    __hash__ = object.__hash__

new_dist_class = InstalledDistribution
old_dist_class = EggInfoDistribution


class DependencyGraph(object):
    """
    Represents a dependency graph between distributions.

    The dependency relationships are stored in an ``adjacency_list`` that maps
    distributions to a list of ``(other, label)`` tuples where  ``other``
    is a distribution and the edge is labeled with ``label`` (i.e. the version
    specifier, if such was provided). Also, for more efficient traversal, for
    every distribution ``x``, a list of predecessors is kept in
    ``reverse_list[x]``. An edge from distribution ``a`` to
    distribution ``b`` means that ``a`` depends on ``b``. If any missing
    dependencies are found, they are stored in ``missing``, which is a
    dictionary that maps distributions to a list of requirements that were not
    provided by any other distributions.
    """

    def __init__(self):
        self.adjacency_list = {}
        self.reverse_list = {}
        self.missing = {}

    def add_distribution(self, distribution):
        """Add the *distribution* to the graph.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        """
        self.adjacency_list[distribution] = []
        self.reverse_list[distribution] = []
        #self.missing[distribution] = []

    def add_edge(self, x, y, label=None):
        """Add an edge from distribution *x* to distribution *y* with the given
        *label*.

        :type x: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type y: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type label: ``str`` or ``None``
        """
        self.adjacency_list[x].append((y, label))
        # multiple edges are allowed, so be careful
        if x not in self.reverse_list[y]:
            self.reverse_list[y].append(x)

    def add_missing(self, distribution, requirement):
        """
        Add a missing *requirement* for the given *distribution*.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        :type requirement: ``str``
        """
        logger.debug('%s missing %r', distribution, requirement)
        self.missing.setdefault(distribution, []).append(requirement)

    def _repr_dist(self, dist):
        return '%s %s' % (dist.name, dist.version)

    def repr_node(self, dist, level=1):
        """Prints only a subgraph"""
        output = [self._repr_dist(dist)]
        for other, label in self.adjacency_list[dist]:
            dist = self._repr_dist(other)
            if label is not None:
                dist = '%s [%s]' % (dist, label)
            output.append('    ' * level + str(dist))
            suboutput = self.repr_node(other, level + 1)
            subs = suboutput.split('\n')
            output.extend(subs[1:])
        return '\n'.join(output)

    def to_dot(self, f, skip_disconnected=True):
        """Writes a DOT output for the graph to the provided file *f*.

        If *skip_disconnected* is set to ``True``, then all distributions
        that are not dependent on any other distribution are skipped.

        :type f: has to support ``file``-like operations
        :type skip_disconnected: ``bool``
        """
        disconnected = []

        f.write("digraph dependencies {\n")
        for dist, adjs in self.adjacency_list.items():
            if len(adjs) == 0 and not skip_disconnected:
                disconnected.append(dist)
            for other, label in adjs:
                if not label is None:
                    f.write('"%s" -> "%s" [label="%s"]\n' %
                            (dist.name, other.name, label))
                else:
                    f.write('"%s" -> "%s"\n' % (dist.name, other.name))
        if not skip_disconnected and len(disconnected) > 0:
            f.write('subgraph disconnected {\n')
            f.write('label = "Disconnected"\n')
            f.write('bgcolor = red\n')

            for dist in disconnected:
                f.write('"%s"' % dist.name)
                f.write('\n')
            f.write('}\n')
        f.write('}\n')

    def topological_sort(self):
        """
        Perform a topological sort of the graph.
        :return: A tuple, the first element of which is a topologically sorted
                 list of distributions, and the second element of which is a
                 list of distributions that cannot be sorted because they have
                 circular dependencies and so form a cycle.
        """
        result = []
        # Make a shallow copy of the adjacency list
        alist = {}
        for k, v in self.adjacency_list.items():
            alist[k] = v[:]
        while True:
            # See what we can remove in this run
            to_remove = []
            for k, v in list(alist.items())[:]:
                if not v:
                    to_remove.append(k)
                    del alist[k]
            if not to_remove:
                # What's left in alist (if anything) is a cycle.
                break
            # Remove from the adjacency list of others
            for k, v in alist.items():
                alist[k] = [(d, r) for d, r in v if d not in to_remove]
            logger.debug('Moving to result: %s',
                         ['%s (%s)' % (d.name, d.version) for d in to_remove])
            result.extend(to_remove)
        return result, list(alist.keys())

    def __repr__(self):
        """Representation of the graph"""
        output = []
        for dist, adjs in self.adjacency_list.items():
            output.append(self.repr_node(dist))
        return '\n'.join(output)


def make_graph(dists, scheme='default'):
    """Makes a dependency graph from the given distributions.

    :parameter dists: a list of distributions
    :type dists: list of :class:`distutils2.database.InstalledDistribution` and
                 :class:`distutils2.database.EggInfoDistribution` instances
    :rtype: a :class:`DependencyGraph` instance
    """
    scheme = get_scheme(scheme)
    graph = DependencyGraph()
    provided = {}  # maps names to lists of (version, dist) tuples

    # first, build the graph and find out what's provided
    for dist in dists:
        graph.add_distribution(dist)

        for p in dist.provides:
            name, version = parse_name_and_version(p)
            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
            provided.setdefault(name, []).append((version, dist))

    # now make the edges
    for dist in dists:
        requires = (dist.run_requires | dist.meta_requires |
                    dist.build_requires | dist.dev_requires)
        for req in requires:
            try:
                matcher = scheme.matcher(req)
            except UnsupportedVersionError:
                # XXX compat-mode if cannot read the version
                logger.warning('could not read version %r - using name only',
                               req)
                name = req.split()[0]
                matcher = scheme.matcher(name)

            name = matcher.key   # case-insensitive

            matched = False
            if name in provided:
                for version, provider in provided[name]:
                    try:
                        match = matcher.match(version)
                    except UnsupportedVersionError:
                        match = False

                    if match:
                        graph.add_edge(dist, provider, req)
                        matched = True
                        break
            if not matched:
                graph.add_missing(dist, req)
    return graph


def get_dependent_dists(dists, dist):
    """Recursively generate a list of distributions from *dists* that are
    dependent on *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    """
    if dist not in dists:
        raise DistlibException('given distribution %r is not a member '
                               'of the list' % dist.name)
    graph = make_graph(dists)

    dep = [dist]  # dependent distributions
    todo = graph.reverse_list[dist]  # list of nodes we should inspect

    while todo:
        d = todo.pop()
        dep.append(d)
        for succ in graph.reverse_list[d]:
            if succ not in dep:
                todo.append(succ)

    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
    return dep


def get_required_dists(dists, dist):
    """Recursively generate a list of distributions from *dists* that are
    required by *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    """
    if dist not in dists:
        raise DistlibException('given distribution %r is not a member '
                               'of the list' % dist.name)
    graph = make_graph(dists)

    req = []  # required distributions
    todo = graph.adjacency_list[dist]  # list of nodes we should inspect

    while todo:
        d = todo.pop()[0]
        req.append(d)
        for pred in graph.adjacency_list[d]:
            if pred not in req:
                todo.append(pred)

    return req


def make_dist(name, version, **kwargs):
    """
    A convenience method for making a dist given just a name and version.
    """
    summary = kwargs.pop('summary', 'Placeholder for summary')
    md = Metadata(**kwargs)
    md.name = name
    md.version = version
    md.summary = summary or 'Placeholder for summary'
    return Distribution(md)
_vendor/distlib/locators.py000064400000143505151733136310012045 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#

import gzip
from io import BytesIO
import json
import logging
import os
import posixpath
import re
try:
    import threading
except ImportError:  # pragma: no cover
    import dummy_threading as threading
import zlib

from . import DistlibException
from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
                     queue, quote, unescape, string_types, build_opener,
                     HTTPRedirectHandler as BaseRedirectHandler, text_type,
                     Request, HTTPError, URLError)
from .database import Distribution, DistributionPath, make_dist
from .metadata import Metadata
from .util import (cached_property, parse_credentials, ensure_slash,
                   split_filename, get_project_data, parse_requirement,
                   parse_name_and_version, ServerProxy, normalize_name)
from .version import get_scheme, UnsupportedVersionError
from .wheel import Wheel, is_compatible

logger = logging.getLogger(__name__)

HASHER_HASH = re.compile('^(\w+)=([a-f0-9]+)')
CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
DEFAULT_INDEX = 'https://pypi.python.org/pypi'

def get_all_distribution_names(url=None):
    """
    Return all distribution names known by an index.
    :param url: The URL of the index.
    :return: A list of all known distribution names.
    """
    if url is None:
        url = DEFAULT_INDEX
    client = ServerProxy(url, timeout=3.0)
    return client.list_packages()

class RedirectHandler(BaseRedirectHandler):
    """
    A class to work around a bug in some Python 3.2.x releases.
    """
    # There's a bug in the base version for some 3.2.x
    # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
    # returns e.g. /abc, it bails because it says the scheme ''
    # is bogus, when actually it should use the request's
    # URL for the scheme. See Python issue #13696.
    def http_error_302(self, req, fp, code, msg, headers):
        # Some servers (incorrectly) return multiple Location headers
        # (so probably same goes for URI).  Use first header.
        newurl = None
        for key in ('location', 'uri'):
            if key in headers:
                newurl = headers[key]
                break
        if newurl is None:
            return
        urlparts = urlparse(newurl)
        if urlparts.scheme == '':
            newurl = urljoin(req.get_full_url(), newurl)
            if hasattr(headers, 'replace_header'):
                headers.replace_header(key, newurl)
            else:
                headers[key] = newurl
        return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
                                                  headers)

    http_error_301 = http_error_303 = http_error_307 = http_error_302

class Locator(object):
    """
    A base class for locators - things that locate distributions.
    """
    source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
    binary_extensions = ('.egg', '.exe', '.whl')
    excluded_extensions = ('.pdf',)

    # A list of tags indicating which wheels you want to match. The default
    # value of None matches against the tags compatible with the running
    # Python. If you want to match other values, set wheel_tags on a locator
    # instance to a list of tuples (pyver, abi, arch) which you want to match.
    wheel_tags = None

    downloadable_extensions = source_extensions + ('.whl',)

    def __init__(self, scheme='default'):
        """
        Initialise an instance.
        :param scheme: Because locators look for most recent versions, they
                       need to know the version scheme to use. This specifies
                       the current PEP-recommended scheme - use ``'legacy'``
                       if you need to support existing distributions on PyPI.
        """
        self._cache = {}
        self.scheme = scheme
        # Because of bugs in some of the handlers on some of the platforms,
        # we use our own opener rather than just using urlopen.
        self.opener = build_opener(RedirectHandler())
        # If get_project() is called from locate(), the matcher instance
        # is set from the requirement passed to locate(). See issue #18 for
        # why this can be useful to know.
        self.matcher = None
        self.errors = queue.Queue()

    def get_errors(self):
        """
        Return any errors which have occurred.
        """
        result = []
        while not self.errors.empty():  # pragma: no cover
            try:
                e = self.errors.get(False)
                result.append(e)
            except self.errors.Empty:
                continue
            self.errors.task_done()
        return result

    def clear_errors(self):
        """
        Clear any errors which may have been logged.
        """
        # Just get the errors and throw them away
        self.get_errors()

    def clear_cache(self):
        self._cache.clear()

    def _get_scheme(self):
        return self._scheme

    def _set_scheme(self, value):
        self._scheme = value

    scheme = property(_get_scheme, _set_scheme)

    def _get_project(self, name):
        """
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This should be implemented in subclasses.

        If called from a locate() request, self.matcher will be set to a
        matcher for the requirement to satisfy, otherwise it will be None.
        """
        raise NotImplementedError('Please implement in the subclass')

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        raise NotImplementedError('Please implement in the subclass')

    def get_project(self, name):
        """
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This calls _get_project to do all the work, and just implements a caching layer on top.
        """
        if self._cache is None:
            result = self._get_project(name)
        elif name in self._cache:
            result = self._cache[name]
        else:
            self.clear_errors()
            result = self._get_project(name)
            self._cache[name] = result
        return result

    def score_url(self, url):
        """
        Give an url a score which can be used to choose preferred URLs
        for a given project release.
        """
        t = urlparse(url)
        basename = posixpath.basename(t.path)
        compatible = True
        is_wheel = basename.endswith('.whl')
        if is_wheel:
            compatible = is_compatible(Wheel(basename), self.wheel_tags)
        return (t.scheme != 'https', 'pypi.python.org' in t.netloc,
                is_wheel, compatible, basename)

    def prefer_url(self, url1, url2):
        """
        Choose one of two URLs where both are candidates for distribution
        archives for the same version of a distribution (for example,
        .tar.gz vs. zip).

        The current implementation favours https:// URLs over http://, archives
        from PyPI over those from other locations, wheel compatibility (if a
        wheel) and then the archive name.
        """
        result = url2
        if url1:
            s1 = self.score_url(url1)
            s2 = self.score_url(url2)
            if s1 > s2:
                result = url1
            if result != url2:
                logger.debug('Not replacing %r with %r', url1, url2)
            else:
                logger.debug('Replacing %r with %r', url1, url2)
        return result

    def split_filename(self, filename, project_name):
        """
        Attempt to split a filename in project name, version and Python version.
        """
        return split_filename(filename, project_name)

    def convert_url_to_download_info(self, url, project_name):
        """
        See if a URL is a candidate for a download URL for a project (the URL
        has typically been scraped from an HTML page).

        If it is, a dictionary is returned with keys "name", "version",
        "filename" and "url"; otherwise, None is returned.
        """
        def same_project(name1, name2):
            return normalize_name(name1) == normalize_name(name2)

        result = None
        scheme, netloc, path, params, query, frag = urlparse(url)
        if frag.lower().startswith('egg='):
            logger.debug('%s: version hint in fragment: %r',
                         project_name, frag)
        m = HASHER_HASH.match(frag)
        if m:
            algo, digest = m.groups()
        else:
            algo, digest = None, None
        origpath = path
        if path and path[-1] == '/':
            path = path[:-1]
        if path.endswith('.whl'):
            try:
                wheel = Wheel(path)
                if is_compatible(wheel, self.wheel_tags):
                    if project_name is None:
                        include = True
                    else:
                        include = same_project(wheel.name, project_name)
                    if include:
                        result = {
                            'name': wheel.name,
                            'version': wheel.version,
                            'filename': wheel.filename,
                            'url': urlunparse((scheme, netloc, origpath,
                                               params, query, '')),
                            'python-version': ', '.join(
                                ['.'.join(list(v[2:])) for v in wheel.pyver]),
                        }
            except Exception as e:  # pragma: no cover
                logger.warning('invalid path for wheel: %s', path)
        elif path.endswith(self.downloadable_extensions):
            path = filename = posixpath.basename(path)
            for ext in self.downloadable_extensions:
                if path.endswith(ext):
                    path = path[:-len(ext)]
                    t = self.split_filename(path, project_name)
                    if not t:
                        logger.debug('No match for project/version: %s', path)
                    else:
                        name, version, pyver = t
                        if not project_name or same_project(project_name, name):
                            result = {
                                'name': name,
                                'version': version,
                                'filename': filename,
                                'url': urlunparse((scheme, netloc, origpath,
                                                   params, query, '')),
                                #'packagetype': 'sdist',
                            }
                            if pyver:
                                result['python-version'] = pyver
                    break
        if result and algo:
            result['%s_digest' % algo] = digest
        return result

    def _get_digest(self, info):
        """
        Get a digest from a dictionary by looking at keys of the form
        'algo_digest'.

        Returns a 2-tuple (algo, digest) if found, else None. Currently
        looks only for SHA256, then MD5.
        """
        result = None
        for algo in ('sha256', 'md5'):
            key = '%s_digest' % algo
            if key in info:
                result = (algo, info[key])
                break
        return result

    def _update_version_data(self, result, info):
        """
        Update a result dictionary (the final result from _get_project) with a
        dictionary for a specific version, which typically holds information
        gleaned from a filename or URL for an archive for the distribution.
        """
        name = info.pop('name')
        version = info.pop('version')
        if version in result:
            dist = result[version]
            md = dist.metadata
        else:
            dist = make_dist(name, version, scheme=self.scheme)
            md = dist.metadata
        dist.digest = digest = self._get_digest(info)
        url = info['url']
        result['digests'][url] = digest
        if md.source_url != info['url']:
            md.source_url = self.prefer_url(md.source_url, url)
            result['urls'].setdefault(version, set()).add(url)
        dist.locator = self
        result[version] = dist

    def locate(self, requirement, prereleases=False):
        """
        Find the most recent distribution which matches the given
        requirement.

        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
                            'foo (>= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        """
        result = None
        r = parse_requirement(requirement)
        if r is None:
            raise DistlibException('Not a valid requirement: %r' % requirement)
        scheme = get_scheme(self.scheme)
        self.matcher = matcher = scheme.matcher(r.requirement)
        logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
        versions = self.get_project(r.name)
        if len(versions) > 2:   # urls and digests keys are present
            # sometimes, versions are invalid
            slist = []
            vcls = matcher.version_class
            for k in versions:
                if k in ('urls', 'digests'):
                    continue
                try:
                    if not matcher.match(k):
                        logger.debug('%s did not match %r', matcher, k)
                    else:
                        if prereleases or not vcls(k).is_prerelease:
                            slist.append(k)
                        else:
                            logger.debug('skipping pre-release '
                                         'version %s of %s', k, matcher.name)
                except Exception:  # pragma: no cover
                    logger.warning('error matching %s with %r', matcher, k)
                    pass # slist.append(k)
            if len(slist) > 1:
                slist = sorted(slist, key=scheme.key)
            if slist:
                logger.debug('sorted list: %s', slist)
                version = slist[-1]
                result = versions[version]
        if result:
            if r.extras:
                result.extras = r.extras
            result.download_urls = versions.get('urls', {}).get(version, set())
            d = {}
            sd = versions.get('digests', {})
            for url in result.download_urls:
                if url in sd:
                    d[url] = sd[url]
            result.digests = d
        self.matcher = None
        return result


class PyPIRPCLocator(Locator):
    """
    This locator uses XML-RPC to locate distributions. It therefore
    cannot be used with simple mirrors (that only mirror file content).
    """
    def __init__(self, url, **kwargs):
        """
        Initialise an instance.

        :param url: The URL to use for XML-RPC.
        :param kwargs: Passed to the superclass constructor.
        """
        super(PyPIRPCLocator, self).__init__(**kwargs)
        self.base_url = url
        self.client = ServerProxy(url, timeout=3.0)

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        return set(self.client.list_packages())

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        versions = self.client.package_releases(name, True)
        for v in versions:
            urls = self.client.release_urls(name, v)
            data = self.client.release_data(name, v)
            metadata = Metadata(scheme=self.scheme)
            metadata.name = data['name']
            metadata.version = data['version']
            metadata.license = data.get('license')
            metadata.keywords = data.get('keywords', [])
            metadata.summary = data.get('summary')
            dist = Distribution(metadata)
            if urls:
                info = urls[0]
                metadata.source_url = info['url']
                dist.digest = self._get_digest(info)
                dist.locator = self
                result[v] = dist
                for info in urls:
                    url = info['url']
                    digest = self._get_digest(info)
                    result['urls'].setdefault(v, set()).add(url)
                    result['digests'][url] = digest
        return result

class PyPIJSONLocator(Locator):
    """
    This locator uses PyPI's JSON interface. It's very limited in functionality
    and probably not worth using.
    """
    def __init__(self, url, **kwargs):
        super(PyPIJSONLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        raise NotImplementedError('Not available from this locator')

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        url = urljoin(self.base_url, '%s/json' % quote(name))
        try:
            resp = self.opener.open(url)
            data = resp.read().decode() # for now
            d = json.loads(data)
            md = Metadata(scheme=self.scheme)
            data = d['info']
            md.name = data['name']
            md.version = data['version']
            md.license = data.get('license')
            md.keywords = data.get('keywords', [])
            md.summary = data.get('summary')
            dist = Distribution(md)
            dist.locator = self
            urls = d['urls']
            result[md.version] = dist
            for info in d['urls']:
                url = info['url']
                dist.download_urls.add(url)
                dist.digests[url] = self._get_digest(info)
                result['urls'].setdefault(md.version, set()).add(url)
                result['digests'][url] = self._get_digest(info)
            # Now get other releases
            for version, infos in d['releases'].items():
                if version == md.version:
                    continue    # already done
                omd = Metadata(scheme=self.scheme)
                omd.name = md.name
                omd.version = version
                odist = Distribution(omd)
                odist.locator = self
                result[version] = odist
                for info in infos:
                    url = info['url']
                    odist.download_urls.add(url)
                    odist.digests[url] = self._get_digest(info)
                    result['urls'].setdefault(version, set()).add(url)
                    result['digests'][url] = self._get_digest(info)
#            for info in urls:
#                md.source_url = info['url']
#                dist.digest = self._get_digest(info)
#                dist.locator = self
#                for info in urls:
#                    url = info['url']
#                    result['urls'].setdefault(md.version, set()).add(url)
#                    result['digests'][url] = self._get_digest(info)
        except Exception as e:
            self.errors.put(text_type(e))
            logger.exception('JSON fetch failed: %s', e)
        return result


class Page(object):
    """
    This class represents a scraped HTML page.
    """
    # The following slightly hairy-looking regex just looks for the contents of
    # an anchor link, which has an attribute "href" either immediately preceded
    # or immediately followed by a "rel" attribute. The attribute values can be
    # declared with double quotes, single quotes or no quotes - which leads to
    # the length of the expression.
    _href = re.compile("""
(rel\s*=\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\s\n]*))\s+)?
href\s*=\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\s\n]*))
(\s+rel\s*=\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\s\n]*)))?
""", re.I | re.S | re.X)
    _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S)

    def __init__(self, data, url):
        """
        Initialise an instance with the Unicode page contents and the URL they
        came from.
        """
        self.data = data
        self.base_url = self.url = url
        m = self._base.search(self.data)
        if m:
            self.base_url = m.group(1)

    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)

    @cached_property
    def links(self):
        """
        Return the URLs of all the links on a page together with information
        about their "rel" attribute, for determining which ones to treat as
        downloads and which ones to queue for further scraping.
        """
        def clean(url):
            "Tidy up an URL."
            scheme, netloc, path, params, query, frag = urlparse(url)
            return urlunparse((scheme, netloc, quote(path),
                               params, query, frag))

        result = set()
        for match in self._href.finditer(self.data):
            d = match.groupdict('')
            rel = (d['rel1'] or d['rel2'] or d['rel3'] or
                   d['rel4'] or d['rel5'] or d['rel6'])
            url = d['url1'] or d['url2'] or d['url3']
            url = urljoin(self.base_url, url)
            url = unescape(url)
            url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
            result.add((url, rel))
        # We sort the result, hoping to bring the most recent versions
        # to the front
        result = sorted(result, key=lambda t: t[0], reverse=True)
        return result


class SimpleScrapingLocator(Locator):
    """
    A locator which scrapes HTML pages to locate downloads for a distribution.
    This runs multiple threads to do the I/O; performance is at least as good
    as pip's PackageFinder, which works in an analogous fashion.
    """

    # These are used to deal with various Content-Encoding schemes.
    decoders = {
        'deflate': zlib.decompress,
        'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(),
        'none': lambda b: b,
    }

    def __init__(self, url, timeout=None, num_workers=10, **kwargs):
        """
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        """
        super(SimpleScrapingLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)
        self.timeout = timeout
        self._page_cache = {}
        self._seen = set()
        self._to_fetch = queue.Queue()
        self._bad_hosts = set()
        self.skip_externals = False
        self.num_workers = num_workers
        self._lock = threading.RLock()
        # See issue #45: we need to be resilient when the locator is used
        # in a thread, e.g. with concurrent.futures. We can't use self._lock
        # as it is for coordinating our internal threads - the ones created
        # in _prepare_threads.
        self._gplock = threading.RLock()

    def _prepare_threads(self):
        """
        Threads are created only when get_project is called, and terminate
        before it returns. They are there primarily to parallelise I/O (i.e.
        fetching web pages).
        """
        self._threads = []
        for i in range(self.num_workers):
            t = threading.Thread(target=self._fetch)
            t.setDaemon(True)
            t.start()
            self._threads.append(t)

    def _wait_threads(self):
        """
        Tell all the threads to terminate (by sending a sentinel value) and
        wait for them to do so.
        """
        # Note that you need two loops, since you can't say which
        # thread will get each sentinel
        for t in self._threads:
            self._to_fetch.put(None)    # sentinel
        for t in self._threads:
            t.join()
        self._threads = []

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        with self._gplock:
            self.result = result
            self.project_name = name
            url = urljoin(self.base_url, '%s/' % quote(name))
            self._seen.clear()
            self._page_cache.clear()
            self._prepare_threads()
            try:
                logger.debug('Queueing %s', url)
                self._to_fetch.put(url)
                self._to_fetch.join()
            finally:
                self._wait_threads()
            del self.result
        return result

    platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|'
                                    r'win(32|-amd64)|macosx-?\d+)\b', re.I)

    def _is_platform_dependent(self, url):
        """
        Does an URL refer to a platform-specific download?
        """
        return self.platform_dependent.search(url)

    def _process_download(self, url):
        """
        See if an URL is a suitable download for a project.

        If it is, register information in the result dictionary (for
        _get_project) about the specific version it's for.

        Note that the return value isn't actually used other than as a boolean
        value.
        """
        if self._is_platform_dependent(url):
            info = None
        else:
            info = self.convert_url_to_download_info(url, self.project_name)
        logger.debug('process_download: %s -> %s', url, info)
        if info:
            with self._lock:    # needed because self.result is shared
                self._update_version_data(self.result, info)
        return info

    def _should_queue(self, link, referrer, rel):
        """
        Determine whether a link URL from a referring page and with a
        particular "rel" attribute should be queued for scraping.
        """
        scheme, netloc, path, _, _, _ = urlparse(link)
        if path.endswith(self.source_extensions + self.binary_extensions +
                         self.excluded_extensions):
            result = False
        elif self.skip_externals and not link.startswith(self.base_url):
            result = False
        elif not referrer.startswith(self.base_url):
            result = False
        elif rel not in ('homepage', 'download'):
            result = False
        elif scheme not in ('http', 'https', 'ftp'):
            result = False
        elif self._is_platform_dependent(link):
            result = False
        else:
            host = netloc.split(':', 1)[0]
            if host.lower() == 'localhost':
                result = False
            else:
                result = True
        logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
                     referrer, result)
        return result

    def _fetch(self):
        """
        Get a URL to fetch from the work queue, get the HTML page, examine its
        links for download candidates and candidates for further scraping.

        This is a handy method to run in a thread.
        """
        while True:
            url = self._to_fetch.get()
            try:
                if url:
                    page = self.get_page(url)
                    if page is None:    # e.g. after an error
                        continue
                    for link, rel in page.links:
                        if link not in self._seen:
                            self._seen.add(link)
                            if (not self._process_download(link) and
                                self._should_queue(link, url, rel)):
                                logger.debug('Queueing %s from %s', link, url)
                                self._to_fetch.put(link)
            except Exception as e:  # pragma: no cover
                self.errors.put(text_type(e))
            finally:
                # always do this, to avoid hangs :-)
                self._to_fetch.task_done()
            if not url:
                #logger.debug('Sentinel seen, quitting.')
                break

    def get_page(self, url):
        """
        Get the HTML for an URL, possibly from an in-memory cache.

        XXX TODO Note: this cache is never actually cleared. It's assumed that
        the data won't get stale over the lifetime of a locator instance (not
        necessarily true for the default_locator).
        """
        # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
        scheme, netloc, path, _, _, _ = urlparse(url)
        if scheme == 'file' and os.path.isdir(url2pathname(path)):
            url = urljoin(ensure_slash(url), 'index.html')

        if url in self._page_cache:
            result = self._page_cache[url]
            logger.debug('Returning %s from cache: %s', url, result)
        else:
            host = netloc.split(':', 1)[0]
            result = None
            if host in self._bad_hosts:
                logger.debug('Skipping %s due to bad host %s', url, host)
            else:
                req = Request(url, headers={'Accept-encoding': 'identity'})
                try:
                    logger.debug('Fetching %s', url)
                    resp = self.opener.open(req, timeout=self.timeout)
                    logger.debug('Fetched %s', url)
                    headers = resp.info()
                    content_type = headers.get('Content-Type', '')
                    if HTML_CONTENT_TYPE.match(content_type):
                        final_url = resp.geturl()
                        data = resp.read()
                        encoding = headers.get('Content-Encoding')
                        if encoding:
                            decoder = self.decoders[encoding]   # fail if not found
                            data = decoder(data)
                        encoding = 'utf-8'
                        m = CHARSET.search(content_type)
                        if m:
                            encoding = m.group(1)
                        try:
                            data = data.decode(encoding)
                        except UnicodeError:  # pragma: no cover
                            data = data.decode('latin-1')    # fallback
                        result = Page(data, final_url)
                        self._page_cache[final_url] = result
                except HTTPError as e:
                    if e.code != 404:
                        logger.exception('Fetch failed: %s: %s', url, e)
                except URLError as e:  # pragma: no cover
                    logger.exception('Fetch failed: %s: %s', url, e)
                    with self._lock:
                        self._bad_hosts.add(host)
                except Exception as e:  # pragma: no cover
                    logger.exception('Fetch failed: %s: %s', url, e)
                finally:
                    self._page_cache[url] = result   # even if None (failure)
        return result

    _distname_re = re.compile('<a href=[^>]*>([^<]+)<')

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        result = set()
        page = self.get_page(self.base_url)
        if not page:
            raise DistlibException('Unable to get %s' % self.base_url)
        for match in self._distname_re.finditer(page.data):
            result.add(match.group(1))
        return result

class DirectoryLocator(Locator):
    """
    This class locates distributions in a directory tree.
    """

    def __init__(self, path, **kwargs):
        """
        Initialise an instance.
        :param path: The root of the directory tree to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * recursive - if True (the default), subdirectories are
                         recursed into. If False, only the top-level directory
                         is searched,
        """
        self.recursive = kwargs.pop('recursive', True)
        super(DirectoryLocator, self).__init__(**kwargs)
        path = os.path.abspath(path)
        if not os.path.isdir(path):  # pragma: no cover
            raise DistlibException('Not a directory: %r' % path)
        self.base_dir = path

    def should_include(self, filename, parent):
        """
        Should a filename be considered as a candidate for a distribution
        archive? As well as the filename, the directory which contains it
        is provided, though not used by the current implementation.
        """
        return filename.endswith(self.downloadable_extensions)

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        for root, dirs, files in os.walk(self.base_dir):
            for fn in files:
                if self.should_include(fn, root):
                    fn = os.path.join(root, fn)
                    url = urlunparse(('file', '',
                                      pathname2url(os.path.abspath(fn)),
                                      '', '', ''))
                    info = self.convert_url_to_download_info(url, name)
                    if info:
                        self._update_version_data(result, info)
            if not self.recursive:
                break
        return result

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        result = set()
        for root, dirs, files in os.walk(self.base_dir):
            for fn in files:
                if self.should_include(fn, root):
                    fn = os.path.join(root, fn)
                    url = urlunparse(('file', '',
                                      pathname2url(os.path.abspath(fn)),
                                      '', '', ''))
                    info = self.convert_url_to_download_info(url, None)
                    if info:
                        result.add(info['name'])
            if not self.recursive:
                break
        return result

class JSONLocator(Locator):
    """
    This locator uses special extended metadata (not available on PyPI) and is
    the basis of performant dependency resolution in distlib. Other locators
    require archive downloads before dependencies can be determined! As you
    might imagine, that can be slow.
    """
    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        raise NotImplementedError('Not available from this locator')

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        data = get_project_data(name)
        if data:
            for info in data.get('files', []):
                if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
                    continue
                # We don't store summary in project metadata as it makes
                # the data bigger for no benefit during dependency
                # resolution
                dist = make_dist(data['name'], info['version'],
                                 summary=data.get('summary',
                                                  'Placeholder for summary'),
                                 scheme=self.scheme)
                md = dist.metadata
                md.source_url = info['url']
                # TODO SHA256 digest
                if 'digest' in info and info['digest']:
                    dist.digest = ('md5', info['digest'])
                md.dependencies = info.get('requirements', {})
                dist.exports = info.get('exports', {})
                result[dist.version] = dist
                result['urls'].setdefault(dist.version, set()).add(info['url'])
        return result

class DistPathLocator(Locator):
    """
    This locator finds installed distributions in a path. It can be useful for
    adding to an :class:`AggregatingLocator`.
    """
    def __init__(self, distpath, **kwargs):
        """
        Initialise an instance.

        :param distpath: A :class:`DistributionPath` instance to search.
        """
        super(DistPathLocator, self).__init__(**kwargs)
        assert isinstance(distpath, DistributionPath)
        self.distpath = distpath

    def _get_project(self, name):
        dist = self.distpath.get_distribution(name)
        if dist is None:
            result = {'urls': {}, 'digests': {}}
        else:
            result = {
                dist.version: dist,
                'urls': {dist.version: set([dist.source_url])},
                'digests': {dist.version: set([None])}
            }
        return result


class AggregatingLocator(Locator):
    """
    This class allows you to chain and/or merge a list of locators.
    """
    def __init__(self, *locators, **kwargs):
        """
        Initialise an instance.

        :param locators: The list of locators to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * merge - if False (the default), the first successful
                         search from any of the locators is returned. If True,
                         the results from all locators are merged (this can be
                         slow).
        """
        self.merge = kwargs.pop('merge', False)
        self.locators = locators
        super(AggregatingLocator, self).__init__(**kwargs)

    def clear_cache(self):
        super(AggregatingLocator, self).clear_cache()
        for locator in self.locators:
            locator.clear_cache()

    def _set_scheme(self, value):
        self._scheme = value
        for locator in self.locators:
            locator.scheme = value

    scheme = property(Locator.scheme.fget, _set_scheme)

    def _get_project(self, name):
        result = {}
        for locator in self.locators:
            d = locator.get_project(name)
            if d:
                if self.merge:
                    files = result.get('urls', {})
                    digests = result.get('digests', {})
                    # next line could overwrite result['urls'], result['digests']
                    result.update(d)
                    df = result.get('urls')
                    if files and df:
                        for k, v in files.items():
                            if k in df:
                                df[k] |= v
                            else:
                                df[k] = v
                    dd = result.get('digests')
                    if digests and dd:
                        dd.update(digests)
                else:
                    # See issue #18. If any dists are found and we're looking
                    # for specific constraints, we only return something if
                    # a match is found. For example, if a DirectoryLocator
                    # returns just foo (1.0) while we're looking for
                    # foo (>= 2.0), we'll pretend there was nothing there so
                    # that subsequent locators can be queried. Otherwise we
                    # would just return foo (1.0) which would then lead to a
                    # failure to find foo (>= 2.0), because other locators
                    # weren't searched. Note that this only matters when
                    # merge=False.
                    if self.matcher is None:
                        found = True
                    else:
                        found = False
                        for k in d:
                            if self.matcher.match(k):
                                found = True
                                break
                    if found:
                        result = d
                        break
        return result

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        result = set()
        for locator in self.locators:
            try:
                result |= locator.get_distribution_names()
            except NotImplementedError:
                pass
        return result


# We use a legacy scheme simply because most of the dists on PyPI use legacy
# versions which don't conform to PEP 426 / PEP 440.
default_locator = AggregatingLocator(
                    JSONLocator(),
                    SimpleScrapingLocator('https://pypi.python.org/simple/',
                                          timeout=3.0),
                    scheme='legacy')

locate = default_locator.locate

NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*'
                             r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$')

class DependencyFinder(object):
    """
    Locate dependencies for distributions.
    """

    def __init__(self, locator=None):
        """
        Initialise an instance, using the specified locator
        to locate distributions.
        """
        self.locator = locator or default_locator
        self.scheme = get_scheme(self.locator.scheme)

    def add_distribution(self, dist):
        """
        Add a distribution to the finder. This will update internal information
        about who provides what.
        :param dist: The distribution to add.
        """
        logger.debug('adding distribution %s', dist)
        name = dist.key
        self.dists_by_name[name] = dist
        self.dists[(name, dist.version)] = dist
        for p in dist.provides:
            name, version = parse_name_and_version(p)
            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
            self.provided.setdefault(name, set()).add((version, dist))

    def remove_distribution(self, dist):
        """
        Remove a distribution from the finder. This will update internal
        information about who provides what.
        :param dist: The distribution to remove.
        """
        logger.debug('removing distribution %s', dist)
        name = dist.key
        del self.dists_by_name[name]
        del self.dists[(name, dist.version)]
        for p in dist.provides:
            name, version = parse_name_and_version(p)
            logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
            s = self.provided[name]
            s.remove((version, dist))
            if not s:
                del self.provided[name]

    def get_matcher(self, reqt):
        """
        Get a version matcher for a requirement.
        :param reqt: The requirement
        :type reqt: str
        :return: A version matcher (an instance of
                 :class:`distlib.version.Matcher`).
        """
        try:
            matcher = self.scheme.matcher(reqt)
        except UnsupportedVersionError:  # pragma: no cover
            # XXX compat-mode if cannot read the version
            name = reqt.split()[0]
            matcher = self.scheme.matcher(name)
        return matcher

    def find_providers(self, reqt):
        """
        Find the distributions which can fulfill a requirement.

        :param reqt: The requirement.
         :type reqt: str
        :return: A set of distribution which can fulfill the requirement.
        """
        matcher = self.get_matcher(reqt)
        name = matcher.key   # case-insensitive
        result = set()
        provided = self.provided
        if name in provided:
            for version, provider in provided[name]:
                try:
                    match = matcher.match(version)
                except UnsupportedVersionError:
                    match = False

                if match:
                    result.add(provider)
                    break
        return result

    def try_to_replace(self, provider, other, problems):
        """
        Attempt to replace one provider with another. This is typically used
        when resolving dependencies from multiple sources, e.g. A requires
        (B >= 1.0) while C requires (B >= 1.1).

        For successful replacement, ``provider`` must meet all the requirements
        which ``other`` fulfills.

        :param provider: The provider we are trying to replace with.
        :param other: The provider we're trying to replace.
        :param problems: If False is returned, this will contain what
                         problems prevented replacement. This is currently
                         a tuple of the literal string 'cantreplace',
                         ``provider``, ``other``  and the set of requirements
                         that ``provider`` couldn't fulfill.
        :return: True if we can replace ``other`` with ``provider``, else
                 False.
        """
        rlist = self.reqts[other]
        unmatched = set()
        for s in rlist:
            matcher = self.get_matcher(s)
            if not matcher.match(provider.version):
                unmatched.add(s)
        if unmatched:
            # can't replace other with provider
            problems.add(('cantreplace', provider, other,
                          frozenset(unmatched)))
            result = False
        else:
            # can replace other with provider
            self.remove_distribution(other)
            del self.reqts[other]
            for s in rlist:
                self.reqts.setdefault(provider, set()).add(s)
            self.add_distribution(provider)
            result = True
        return result

    def find(self, requirement, meta_extras=None, prereleases=False):
        """
        Find a distribution and all distributions it depends on.

        :param requirement: The requirement specifying the distribution to
                            find, or a Distribution instance.
        :param meta_extras: A list of meta extras such as :test:, :build: and
                            so on.
        :param prereleases: If ``True``, allow pre-release versions to be
                            returned - otherwise, don't return prereleases
                            unless they're all that's available.

        Return a set of :class:`Distribution` instances and a set of
        problems.

        The distributions returned should be such that they have the
        :attr:`required` attribute set to ``True`` if they were
        from the ``requirement`` passed to ``find()``, and they have the
        :attr:`build_time_dependency` attribute set to ``True`` unless they
        are post-installation dependencies of the ``requirement``.

        The problems should be a tuple consisting of the string
        ``'unsatisfied'`` and the requirement which couldn't be satisfied
        by any distribution known to the locator.
        """

        self.provided = {}
        self.dists = {}
        self.dists_by_name = {}
        self.reqts = {}

        meta_extras = set(meta_extras or [])
        if ':*:' in meta_extras:
            meta_extras.remove(':*:')
            # :meta: and :run: are implicitly included
            meta_extras |= set([':test:', ':build:', ':dev:'])

        if isinstance(requirement, Distribution):
            dist = odist = requirement
            logger.debug('passed %s as requirement', odist)
        else:
            dist = odist = self.locator.locate(requirement,
                                               prereleases=prereleases)
            if dist is None:
                raise DistlibException('Unable to locate %r' % requirement)
            logger.debug('located %s', odist)
        dist.requested = True
        problems = set()
        todo = set([dist])
        install_dists = set([odist])
        while todo:
            dist = todo.pop()
            name = dist.key     # case-insensitive
            if name not in self.dists_by_name:
                self.add_distribution(dist)
            else:
                #import pdb; pdb.set_trace()
                other = self.dists_by_name[name]
                if other != dist:
                    self.try_to_replace(dist, other, problems)

            ireqts = dist.run_requires | dist.meta_requires
            sreqts = dist.build_requires
            ereqts = set()
            if dist in install_dists:
                for key in ('test', 'build', 'dev'):
                    e = ':%s:' % key
                    if e in meta_extras:
                        ereqts |= getattr(dist, '%s_requires' % key)
            all_reqts = ireqts | sreqts | ereqts
            for r in all_reqts:
                providers = self.find_providers(r)
                if not providers:
                    logger.debug('No providers found for %r', r)
                    provider = self.locator.locate(r, prereleases=prereleases)
                    # If no provider is found and we didn't consider
                    # prereleases, consider them now.
                    if provider is None and not prereleases:
                        provider = self.locator.locate(r, prereleases=True)
                    if provider is None:
                        logger.debug('Cannot satisfy %r', r)
                        problems.add(('unsatisfied', r))
                    else:
                        n, v = provider.key, provider.version
                        if (n, v) not in self.dists:
                            todo.add(provider)
                        providers.add(provider)
                        if r in ireqts and dist in install_dists:
                            install_dists.add(provider)
                            logger.debug('Adding %s to install_dists',
                                         provider.name_and_version)
                for p in providers:
                    name = p.key
                    if name not in self.dists_by_name:
                        self.reqts.setdefault(p, set()).add(r)
                    else:
                        other = self.dists_by_name[name]
                        if other != p:
                            # see if other can be replaced by p
                            self.try_to_replace(p, other, problems)

        dists = set(self.dists.values())
        for dist in dists:
            dist.build_time_dependency = dist not in install_dists
            if dist.build_time_dependency:
                logger.debug('%s is a build-time dependency only.',
                             dist.name_and_version)
        logger.debug('find done for %s', odist)
        return dists, problems
_vendor/distlib/version.py000064400000056237151733136310011711 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
Implementation of a flexible versioning scheme providing support for PEP-440,
setuptools-compatible and semantic versioning.
"""

import logging
import re

from .compat import string_types

__all__ = ['NormalizedVersion', 'NormalizedMatcher',
           'LegacyVersion', 'LegacyMatcher',
           'SemanticVersion', 'SemanticMatcher',
           'UnsupportedVersionError', 'get_scheme']

logger = logging.getLogger(__name__)


class UnsupportedVersionError(ValueError):
    """This is an unsupported version."""
    pass


class Version(object):
    def __init__(self, s):
        self._string = s = s.strip()
        self._parts = parts = self.parse(s)
        assert isinstance(parts, tuple)
        assert len(parts) > 0

    def parse(self, s):
        raise NotImplementedError('please implement in a subclass')

    def _check_compatible(self, other):
        if type(self) != type(other):
            raise TypeError('cannot compare %r and %r' % (self, other))

    def __eq__(self, other):
        self._check_compatible(other)
        return self._parts == other._parts

    def __ne__(self, other):
        return not self.__eq__(other)

    def __lt__(self, other):
        self._check_compatible(other)
        return self._parts < other._parts

    def __gt__(self, other):
        return not (self.__lt__(other) or self.__eq__(other))

    def __le__(self, other):
        return self.__lt__(other) or self.__eq__(other)

    def __ge__(self, other):
        return self.__gt__(other) or self.__eq__(other)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    def __hash__(self):
        return hash(self._parts)

    def __repr__(self):
        return "%s('%s')" % (self.__class__.__name__, self._string)

    def __str__(self):
        return self._string

    @property
    def is_prerelease(self):
        raise NotImplementedError('Please implement in subclasses.')


class Matcher(object):
    version_class = None

    dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")
    comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$')
    num_re = re.compile(r'^\d+(\.\d+)*$')

    # value is either a callable or the name of a method
    _operators = {
        '<': lambda v, c, p: v < c,
        '>': lambda v, c, p: v > c,
        '<=': lambda v, c, p: v == c or v < c,
        '>=': lambda v, c, p: v == c or v > c,
        '==': lambda v, c, p: v == c,
        '===': lambda v, c, p: v == c,
        # by default, compatible => >=.
        '~=': lambda v, c, p: v == c or v > c,
        '!=': lambda v, c, p: v != c,
    }

    def __init__(self, s):
        if self.version_class is None:
            raise ValueError('Please specify a version class')
        self._string = s = s.strip()
        m = self.dist_re.match(s)
        if not m:
            raise ValueError('Not valid: %r' % s)
        groups = m.groups('')
        self.name = groups[0].strip()
        self.key = self.name.lower()    # for case-insensitive comparisons
        clist = []
        if groups[2]:
            constraints = [c.strip() for c in groups[2].split(',')]
            for c in constraints:
                m = self.comp_re.match(c)
                if not m:
                    raise ValueError('Invalid %r in %r' % (c, s))
                groups = m.groups()
                op = groups[0] or '~='
                s = groups[1]
                if s.endswith('.*'):
                    if op not in ('==', '!='):
                        raise ValueError('\'.*\' not allowed for '
                                         '%r constraints' % op)
                    # Could be a partial version (e.g. for '2.*') which
                    # won't parse as a version, so keep it as a string
                    vn, prefix = s[:-2], True
                    if not self.num_re.match(vn):
                        # Just to check that vn is a valid version
                        self.version_class(vn)
                else:
                    # Should parse as a version, so we can create an
                    # instance for the comparison
                    vn, prefix = self.version_class(s), False
                clist.append((op, vn, prefix))
        self._parts = tuple(clist)

    def match(self, version):
        """
        Check if the provided version matches the constraints.

        :param version: The version to match against this instance.
        :type version: String or :class:`Version` instance.
        """
        if isinstance(version, string_types):
            version = self.version_class(version)
        for operator, constraint, prefix in self._parts:
            f = self._operators.get(operator)
            if isinstance(f, string_types):
                f = getattr(self, f)
            if not f:
                msg = ('%r not implemented '
                       'for %s' % (operator, self.__class__.__name__))
                raise NotImplementedError(msg)
            if not f(version, constraint, prefix):
                return False
        return True

    @property
    def exact_version(self):
        result = None
        if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
            result = self._parts[0][1]
        return result

    def _check_compatible(self, other):
        if type(self) != type(other) or self.name != other.name:
            raise TypeError('cannot compare %s and %s' % (self, other))

    def __eq__(self, other):
        self._check_compatible(other)
        return self.key == other.key and self._parts == other._parts

    def __ne__(self, other):
        return not self.__eq__(other)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    def __hash__(self):
        return hash(self.key) + hash(self._parts)

    def __repr__(self):
        return "%s(%r)" % (self.__class__.__name__, self._string)

    def __str__(self):
        return self._string


PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
                               r'(\.(post)(\d+))?(\.(dev)(\d+))?'
                               r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')


def _pep_440_key(s):
    s = s.strip()
    m = PEP440_VERSION_RE.match(s)
    if not m:
        raise UnsupportedVersionError('Not a valid version: %s' % s)
    groups = m.groups()
    nums = tuple(int(v) for v in groups[1].split('.'))
    while len(nums) > 1 and nums[-1] == 0:
        nums = nums[:-1]

    if not groups[0]:
        epoch = 0
    else:
        epoch = int(groups[0])
    pre = groups[4:6]
    post = groups[7:9]
    dev = groups[10:12]
    local = groups[13]
    if pre == (None, None):
        pre = ()
    else:
        pre = pre[0], int(pre[1])
    if post == (None, None):
        post = ()
    else:
        post = post[0], int(post[1])
    if dev == (None, None):
        dev = ()
    else:
        dev = dev[0], int(dev[1])
    if local is None:
        local = ()
    else:
        parts = []
        for part in local.split('.'):
            # to ensure that numeric compares as > lexicographic, avoid
            # comparing them directly, but encode a tuple which ensures
            # correct sorting
            if part.isdigit():
                part = (1, int(part))
            else:
                part = (0, part)
            parts.append(part)
        local = tuple(parts)
    if not pre:
        # either before pre-release, or final release and after
        if not post and dev:
            # before pre-release
            pre = ('a', -1)     # to sort before a0
        else:
            pre = ('z',)        # to sort after all pre-releases
    # now look at the state of post and dev.
    if not post:
        post = ('_',)   # sort before 'a'
    if not dev:
        dev = ('final',)

    #print('%s -> %s' % (s, m.groups()))
    return epoch, nums, pre, post, dev, local


_normalized_key = _pep_440_key


class NormalizedVersion(Version):
    """A rational version.

    Good:
        1.2         # equivalent to "1.2.0"
        1.2.0
        1.2a1
        1.2.3a2
        1.2.3b1
        1.2.3c1
        1.2.3.4
        TODO: fill this out

    Bad:
        1           # minimum two numbers
        1.2a        # release level must have a release serial
        1.2.3b
    """
    def parse(self, s):
        result = _normalized_key(s)
        # _normalized_key loses trailing zeroes in the release
        # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
        # However, PEP 440 prefix matching needs it: for example,
        # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
        m = PEP440_VERSION_RE.match(s)      # must succeed
        groups = m.groups()
        self._release_clause = tuple(int(v) for v in groups[1].split('.'))
        return result

    PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])

    @property
    def is_prerelease(self):
        return any(t[0] in self.PREREL_TAGS for t in self._parts if t)


def _match_prefix(x, y):
    x = str(x)
    y = str(y)
    if x == y:
        return True
    if not x.startswith(y):
        return False
    n = len(y)
    return x[n] == '.'


class NormalizedMatcher(Matcher):
    version_class = NormalizedVersion

    # value is either a callable or the name of a method
    _operators = {
        '~=': '_match_compatible',
        '<': '_match_lt',
        '>': '_match_gt',
        '<=': '_match_le',
        '>=': '_match_ge',
        '==': '_match_eq',
        '===': '_match_arbitrary',
        '!=': '_match_ne',
    }

    def _adjust_local(self, version, constraint, prefix):
        if prefix:
            strip_local = '+' not in constraint and version._parts[-1]
        else:
            # both constraint and version are
            # NormalizedVersion instances.
            # If constraint does not have a local component,
            # ensure the version doesn't, either.
            strip_local = not constraint._parts[-1] and version._parts[-1]
        if strip_local:
            s = version._string.split('+', 1)[0]
            version = self.version_class(s)
        return version, constraint

    def _match_lt(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if version >= constraint:
            return False
        release_clause = constraint._release_clause
        pfx = '.'.join([str(i) for i in release_clause])
        return not _match_prefix(version, pfx)

    def _match_gt(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if version <= constraint:
            return False
        release_clause = constraint._release_clause
        pfx = '.'.join([str(i) for i in release_clause])
        return not _match_prefix(version, pfx)

    def _match_le(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        return version <= constraint

    def _match_ge(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        return version >= constraint

    def _match_eq(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if not prefix:
            result = (version == constraint)
        else:
            result = _match_prefix(version, constraint)
        return result

    def _match_arbitrary(self, version, constraint, prefix):
        return str(version) == str(constraint)

    def _match_ne(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if not prefix:
            result = (version != constraint)
        else:
            result = not _match_prefix(version, constraint)
        return result

    def _match_compatible(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if version == constraint:
            return True
        if version < constraint:
            return False
#        if not prefix:
#            return True
        release_clause = constraint._release_clause
        if len(release_clause) > 1:
            release_clause = release_clause[:-1]
        pfx = '.'.join([str(i) for i in release_clause])
        return _match_prefix(version, pfx)

_REPLACEMENTS = (
    (re.compile('[.+-]$'), ''),                     # remove trailing puncts
    (re.compile(r'^[.](\d)'), r'0.\1'),             # .N -> 0.N at start
    (re.compile('^[.-]'), ''),                      # remove leading puncts
    (re.compile(r'^\((.*)\)$'), r'\1'),             # remove parentheses
    (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'),    # remove leading v(ersion)
    (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'),        # remove leading v(ersion)
    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
    (re.compile(r'\b(alfa|apha)\b'), 'alpha'),      # misspelt alpha
    (re.compile(r'\b(pre-alpha|prealpha)\b'),
                'pre.alpha'),                       # standardise
    (re.compile(r'\(beta\)$'), 'beta'),             # remove parentheses
)

_SUFFIX_REPLACEMENTS = (
    (re.compile('^[:~._+-]+'), ''),                   # remove leading puncts
    (re.compile('[,*")([\]]'), ''),                   # remove unwanted chars
    (re.compile('[~:+_ -]'), '.'),                    # replace illegal chars
    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
    (re.compile(r'\.$'), ''),                       # trailing '.'
)

_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')


def _suggest_semantic_version(s):
    """
    Try to suggest a semantic form for a version for which
    _suggest_normalized_version couldn't come up with anything.
    """
    result = s.strip().lower()
    for pat, repl in _REPLACEMENTS:
        result = pat.sub(repl, result)
    if not result:
        result = '0.0.0'

    # Now look for numeric prefix, and separate it out from
    # the rest.
    #import pdb; pdb.set_trace()
    m = _NUMERIC_PREFIX.match(result)
    if not m:
        prefix = '0.0.0'
        suffix = result
    else:
        prefix = m.groups()[0].split('.')
        prefix = [int(i) for i in prefix]
        while len(prefix) < 3:
            prefix.append(0)
        if len(prefix) == 3:
            suffix = result[m.end():]
        else:
            suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
            prefix = prefix[:3]
        prefix = '.'.join([str(i) for i in prefix])
        suffix = suffix.strip()
    if suffix:
        #import pdb; pdb.set_trace()
        # massage the suffix.
        for pat, repl in _SUFFIX_REPLACEMENTS:
            suffix = pat.sub(repl, suffix)

    if not suffix:
        result = prefix
    else:
        sep = '-' if 'dev' in suffix else '+'
        result = prefix + sep + suffix
    if not is_semver(result):
        result = None
    return result


def _suggest_normalized_version(s):
    """Suggest a normalized version close to the given version string.

    If you have a version string that isn't rational (i.e. NormalizedVersion
    doesn't like it) then you might be able to get an equivalent (or close)
    rational version from this function.

    This does a number of simple normalizations to the given string, based
    on observation of versions currently in use on PyPI. Given a dump of
    those version during PyCon 2009, 4287 of them:
    - 2312 (53.93%) match NormalizedVersion without change
      with the automatic suggestion
    - 3474 (81.04%) match when using this suggestion method

    @param s {str} An irrational version string.
    @returns A rational version string, or None, if couldn't determine one.
    """
    try:
        _normalized_key(s)
        return s   # already rational
    except UnsupportedVersionError:
        pass

    rs = s.lower()

    # part of this could use maketrans
    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
                       ('-pre', 'c'),
                       ('-release', ''), ('.release', ''), ('-stable', ''),
                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
                       ('final', '')):
        rs = rs.replace(orig, repl)

    # if something ends with dev or pre, we add a 0
    rs = re.sub(r"pre$", r"pre0", rs)
    rs = re.sub(r"dev$", r"dev0", rs)

    # if we have something like "b-2" or "a.2" at the end of the
    # version, that is probably beta, alpha, etc
    # let's remove the dash or dot
    rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)

    # 1.0-dev-r371 -> 1.0.dev371
    # 0.1-dev-r79 -> 0.1.dev79
    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)

    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)

    # Clean: v0.3, v1.0
    if rs.startswith('v'):
        rs = rs[1:]

    # Clean leading '0's on numbers.
    #TODO: unintended side-effect on, e.g., "2003.05.09"
    # PyPI stats: 77 (~2%) better
    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)

    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
    # zero.
    # PyPI stats: 245 (7.56%) better
    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)

    # the 'dev-rNNN' tag is a dev tag
    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)

    # clean the - when used as a pre delimiter
    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)

    # a terminal "dev" or "devel" can be changed into ".dev0"
    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)

    # a terminal "dev" can be changed into ".dev0"
    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)

    # a terminal "final" or "stable" can be removed
    rs = re.sub(r"(final|stable)$", "", rs)

    # The 'r' and the '-' tags are post release tags
    #   0.4a1.r10       ->  0.4a1.post10
    #   0.9.33-17222    ->  0.9.33.post17222
    #   0.9.33-r17222   ->  0.9.33.post17222
    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)

    # Clean 'r' instead of 'dev' usage:
    #   0.9.33+r17222   ->  0.9.33.dev17222
    #   1.0dev123       ->  1.0.dev123
    #   1.0.git123      ->  1.0.dev123
    #   1.0.bzr123      ->  1.0.dev123
    #   0.1a0dev.123    ->  0.1a0.dev123
    # PyPI stats:  ~150 (~4%) better
    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)

    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
    #   0.2.pre1        ->  0.2c1
    #   0.2-c1         ->  0.2c1
    #   1.0preview123   ->  1.0c123
    # PyPI stats: ~21 (0.62%) better
    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)

    # Tcl/Tk uses "px" for their post release markers
    rs = re.sub(r"p(\d+)$", r".post\1", rs)

    try:
        _normalized_key(rs)
    except UnsupportedVersionError:
        rs = None
    return rs

#
#   Legacy version processing (distribute-compatible)
#

_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
_VERSION_REPLACE = {
    'pre': 'c',
    'preview': 'c',
    '-': 'final-',
    'rc': 'c',
    'dev': '@',
    '': None,
    '.': None,
}


def _legacy_key(s):
    def get_parts(s):
        result = []
        for p in _VERSION_PART.split(s.lower()):
            p = _VERSION_REPLACE.get(p, p)
            if p:
                if '0' <= p[:1] <= '9':
                    p = p.zfill(8)
                else:
                    p = '*' + p
                result.append(p)
        result.append('*final')
        return result

    result = []
    for p in get_parts(s):
        if p.startswith('*'):
            if p < '*final':
                while result and result[-1] == '*final-':
                    result.pop()
            while result and result[-1] == '00000000':
                result.pop()
        result.append(p)
    return tuple(result)


class LegacyVersion(Version):
    def parse(self, s):
        return _legacy_key(s)

    @property
    def is_prerelease(self):
        result = False
        for x in self._parts:
            if (isinstance(x, string_types) and x.startswith('*') and
                x < '*final'):
                result = True
                break
        return result


class LegacyMatcher(Matcher):
    version_class = LegacyVersion

    _operators = dict(Matcher._operators)
    _operators['~='] = '_match_compatible'

    numeric_re = re.compile('^(\d+(\.\d+)*)')

    def _match_compatible(self, version, constraint, prefix):
        if version < constraint:
            return False
        m = self.numeric_re.match(str(constraint))
        if not m:
            logger.warning('Cannot compute compatible match for version %s '
                           ' and constraint %s', version, constraint)
            return True
        s = m.groups()[0]
        if '.' in s:
            s = s.rsplit('.', 1)[0]
        return _match_prefix(version, s)

#
#   Semantic versioning
#

_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
                        r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
                        r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)


def is_semver(s):
    return _SEMVER_RE.match(s)


def _semantic_key(s):
    def make_tuple(s, absent):
        if s is None:
            result = (absent,)
        else:
            parts = s[1:].split('.')
            # We can't compare ints and strings on Python 3, so fudge it
            # by zero-filling numeric values so simulate a numeric comparison
            result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
        return result

    m = is_semver(s)
    if not m:
        raise UnsupportedVersionError(s)
    groups = m.groups()
    major, minor, patch = [int(i) for i in groups[:3]]
    # choose the '|' and '*' so that versions sort correctly
    pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
    return (major, minor, patch), pre, build


class SemanticVersion(Version):
    def parse(self, s):
        return _semantic_key(s)

    @property
    def is_prerelease(self):
        return self._parts[1][0] != '|'


class SemanticMatcher(Matcher):
    version_class = SemanticVersion


class VersionScheme(object):
    def __init__(self, key, matcher, suggester=None):
        self.key = key
        self.matcher = matcher
        self.suggester = suggester

    def is_valid_version(self, s):
        try:
            self.matcher.version_class(s)
            result = True
        except UnsupportedVersionError:
            result = False
        return result

    def is_valid_matcher(self, s):
        try:
            self.matcher(s)
            result = True
        except UnsupportedVersionError:
            result = False
        return result

    def is_valid_constraint_list(self, s):
        """
        Used for processing some metadata fields
        """
        return self.is_valid_matcher('dummy_name (%s)' % s)

    def suggest(self, s):
        if self.suggester is None:
            result = None
        else:
            result = self.suggester(s)
        return result

_SCHEMES = {
    'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
                                _suggest_normalized_version),
    'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
    'semantic': VersionScheme(_semantic_key, SemanticMatcher,
                              _suggest_semantic_version),
}

_SCHEMES['default'] = _SCHEMES['normalized']


def get_scheme(name):
    if name not in _SCHEMES:
        raise ValueError('unknown scheme name: %r' % name)
    return _SCHEMES[name]
_vendor/distlib/wheel.py000064400000114313151733136310011316 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals

import base64
import codecs
import datetime
import distutils.util
from email import message_from_file
import hashlib
import imp
import json
import logging
import os
import posixpath
import re
import shutil
import sys
import tempfile
import zipfile

from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
from .metadata import Metadata, METADATA_FILENAME
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
                   cached_property, get_cache_base, read_exports, tempdir)
from .version import NormalizedVersion, UnsupportedVersionError

logger = logging.getLogger(__name__)

cache = None    # created when needed

if hasattr(sys, 'pypy_version_info'):
    IMP_PREFIX = 'pp'
elif sys.platform.startswith('java'):
    IMP_PREFIX = 'jy'
elif sys.platform == 'cli':
    IMP_PREFIX = 'ip'
else:
    IMP_PREFIX = 'cp'

VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
if not VER_SUFFIX:   # pragma: no cover
    VER_SUFFIX = '%s%s' % sys.version_info[:2]
PYVER = 'py' + VER_SUFFIX
IMPVER = IMP_PREFIX + VER_SUFFIX

ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')

ABI = sysconfig.get_config_var('SOABI')
if ABI and ABI.startswith('cpython-'):
    ABI = ABI.replace('cpython-', 'cp')
else:
    def _derive_abi():
        parts = ['cp', VER_SUFFIX]
        if sysconfig.get_config_var('Py_DEBUG'):
            parts.append('d')
        if sysconfig.get_config_var('WITH_PYMALLOC'):
            parts.append('m')
        if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
            parts.append('u')
        return ''.join(parts)
    ABI = _derive_abi()
    del _derive_abi

FILENAME_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?
-(?P<py>\w+\d+(\.\w+\d+)*)
-(?P<bi>\w+)
-(?P<ar>\w+(\.\w+)*)
\.whl$
''', re.IGNORECASE | re.VERBOSE)

NAME_VERSION_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?$
''', re.IGNORECASE | re.VERBOSE)

SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
SHEBANG_PYTHON = b'#!python'
SHEBANG_PYTHONW = b'#!pythonw'

if os.sep == '/':
    to_posix = lambda o: o
else:
    to_posix = lambda o: o.replace(os.sep, '/')


class Mounter(object):
    def __init__(self):
        self.impure_wheels = {}
        self.libs = {}

    def add(self, pathname, extensions):
        self.impure_wheels[pathname] = extensions
        self.libs.update(extensions)

    def remove(self, pathname):
        extensions = self.impure_wheels.pop(pathname)
        for k, v in extensions:
            if k in self.libs:
                del self.libs[k]

    def find_module(self, fullname, path=None):
        if fullname in self.libs:
            result = self
        else:
            result = None
        return result

    def load_module(self, fullname):
        if fullname in sys.modules:
            result = sys.modules[fullname]
        else:
            if fullname not in self.libs:
                raise ImportError('unable to find extension for %s' % fullname)
            result = imp.load_dynamic(fullname, self.libs[fullname])
            result.__loader__ = self
            parts = fullname.rsplit('.', 1)
            if len(parts) > 1:
                result.__package__ = parts[0]
        return result

_hook = Mounter()


class Wheel(object):
    """
    Class to build and install from Wheel files (PEP 427).
    """

    wheel_version = (1, 1)
    hash_kind = 'sha256'

    def __init__(self, filename=None, sign=False, verify=False):
        """
        Initialise an instance using a (valid) filename.
        """
        self.sign = sign
        self.should_verify = verify
        self.buildver = ''
        self.pyver = [PYVER]
        self.abi = ['none']
        self.arch = ['any']
        self.dirname = os.getcwd()
        if filename is None:
            self.name = 'dummy'
            self.version = '0.1'
            self._filename = self.filename
        else:
            m = NAME_VERSION_RE.match(filename)
            if m:
                info = m.groupdict('')
                self.name = info['nm']
                # Reinstate the local version separator
                self.version = info['vn'].replace('_', '-')
                self.buildver = info['bn']
                self._filename = self.filename
            else:
                dirname, filename = os.path.split(filename)
                m = FILENAME_RE.match(filename)
                if not m:
                    raise DistlibException('Invalid name or '
                                           'filename: %r' % filename)
                if dirname:
                    self.dirname = os.path.abspath(dirname)
                self._filename = filename
                info = m.groupdict('')
                self.name = info['nm']
                self.version = info['vn']
                self.buildver = info['bn']
                self.pyver = info['py'].split('.')
                self.abi = info['bi'].split('.')
                self.arch = info['ar'].split('.')

    @property
    def filename(self):
        """
        Build and return a filename from the various components.
        """
        if self.buildver:
            buildver = '-' + self.buildver
        else:
            buildver = ''
        pyver = '.'.join(self.pyver)
        abi = '.'.join(self.abi)
        arch = '.'.join(self.arch)
        # replace - with _ as a local version separator
        version = self.version.replace('-', '_')
        return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
                                         pyver, abi, arch)

    @property
    def exists(self):
        path = os.path.join(self.dirname, self.filename)
        return os.path.isfile(path)

    @property
    def tags(self):
        for pyver in self.pyver:
            for abi in self.abi:
                for arch in self.arch:
                    yield pyver, abi, arch

    @cached_property
    def metadata(self):
        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        wrapper = codecs.getreader('utf-8')
        with ZipFile(pathname, 'r') as zf:
            wheel_metadata = self.get_wheel_metadata(zf)
            wv = wheel_metadata['Wheel-Version'].split('.', 1)
            file_version = tuple([int(i) for i in wv])
            if file_version < (1, 1):
                fn = 'METADATA'
            else:
                fn = METADATA_FILENAME
            try:
                metadata_filename = posixpath.join(info_dir, fn)
                with zf.open(metadata_filename) as bf:
                    wf = wrapper(bf)
                    result = Metadata(fileobj=wf)
            except KeyError:
                raise ValueError('Invalid wheel, because %s is '
                                 'missing' % fn)
        return result

    def get_wheel_metadata(self, zf):
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        metadata_filename = posixpath.join(info_dir, 'WHEEL')
        with zf.open(metadata_filename) as bf:
            wf = codecs.getreader('utf-8')(bf)
            message = message_from_file(wf)
        return dict(message)

    @cached_property
    def info(self):
        pathname = os.path.join(self.dirname, self.filename)
        with ZipFile(pathname, 'r') as zf:
            result = self.get_wheel_metadata(zf)
        return result

    def process_shebang(self, data):
        m = SHEBANG_RE.match(data)
        if m:
            end = m.end()
            shebang, data_after_shebang = data[:end], data[end:]
            # Preserve any arguments after the interpreter
            if b'pythonw' in shebang.lower():
                shebang_python = SHEBANG_PYTHONW
            else:
                shebang_python = SHEBANG_PYTHON
            m = SHEBANG_DETAIL_RE.match(shebang)
            if m:
                args = b' ' + m.groups()[-1]
            else:
                args = b''
            shebang = shebang_python + args
            data = shebang + data_after_shebang
        else:
            cr = data.find(b'\r')
            lf = data.find(b'\n')
            if cr < 0 or cr > lf:
                term = b'\n'
            else:
                if data[cr:cr + 2] == b'\r\n':
                    term = b'\r\n'
                else:
                    term = b'\r'
            data = SHEBANG_PYTHON + term + data
        return data

    def get_hash(self, data, hash_kind=None):
        if hash_kind is None:
            hash_kind = self.hash_kind
        try:
            hasher = getattr(hashlib, hash_kind)
        except AttributeError:
            raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
        result = hasher(data).digest()
        result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
        return hash_kind, result

    def write_record(self, records, record_path, base):
        records = list(records) # make a copy for sorting
        p = to_posix(os.path.relpath(record_path, base))
        records.append((p, '', ''))
        records.sort()
        with CSVWriter(record_path) as writer:
            for row in records:
                writer.writerow(row)

    def write_records(self, info, libdir, archive_paths):
        records = []
        distinfo, info_dir = info
        hasher = getattr(hashlib, self.hash_kind)
        for ap, p in archive_paths:
            with open(p, 'rb') as f:
                data = f.read()
            digest = '%s=%s' % self.get_hash(data)
            size = os.path.getsize(p)
            records.append((ap, digest, size))

        p = os.path.join(distinfo, 'RECORD')
        self.write_record(records, p, libdir)
        ap = to_posix(os.path.join(info_dir, 'RECORD'))
        archive_paths.append((ap, p))

    def build_zip(self, pathname, archive_paths):
        with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
            for ap, p in archive_paths:
                logger.debug('Wrote %s to %s in wheel', p, ap)
                zf.write(p, ap)

    def build(self, paths, tags=None, wheel_version=None):
        """
        Build a wheel from files in specified paths, and use any specified tags
        when determining the name of the wheel.
        """
        if tags is None:
            tags = {}

        libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
        if libkey == 'platlib':
            is_pure = 'false'
            default_pyver = [IMPVER]
            default_abi = [ABI]
            default_arch = [ARCH]
        else:
            is_pure = 'true'
            default_pyver = [PYVER]
            default_abi = ['none']
            default_arch = ['any']

        self.pyver = tags.get('pyver', default_pyver)
        self.abi = tags.get('abi', default_abi)
        self.arch = tags.get('arch', default_arch)

        libdir = paths[libkey]

        name_ver = '%s-%s' % (self.name, self.version)
        data_dir = '%s.data' % name_ver
        info_dir = '%s.dist-info' % name_ver

        archive_paths = []

        # First, stuff which is not in site-packages
        for key in ('data', 'headers', 'scripts'):
            if key not in paths:
                continue
            path = paths[key]
            if os.path.isdir(path):
                for root, dirs, files in os.walk(path):
                    for fn in files:
                        p = fsdecode(os.path.join(root, fn))
                        rp = os.path.relpath(p, path)
                        ap = to_posix(os.path.join(data_dir, key, rp))
                        archive_paths.append((ap, p))
                        if key == 'scripts' and not p.endswith('.exe'):
                            with open(p, 'rb') as f:
                                data = f.read()
                            data = self.process_shebang(data)
                            with open(p, 'wb') as f:
                                f.write(data)

        # Now, stuff which is in site-packages, other than the
        # distinfo stuff.
        path = libdir
        distinfo = None
        for root, dirs, files in os.walk(path):
            if root == path:
                # At the top level only, save distinfo for later
                # and skip it for now
                for i, dn in enumerate(dirs):
                    dn = fsdecode(dn)
                    if dn.endswith('.dist-info'):
                        distinfo = os.path.join(root, dn)
                        del dirs[i]
                        break
                assert distinfo, '.dist-info directory expected, not found'

            for fn in files:
                # comment out next suite to leave .pyc files in
                if fsdecode(fn).endswith(('.pyc', '.pyo')):
                    continue
                p = os.path.join(root, fn)
                rp = to_posix(os.path.relpath(p, path))
                archive_paths.append((rp, p))

        # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
        files = os.listdir(distinfo)
        for fn in files:
            if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
                p = fsdecode(os.path.join(distinfo, fn))
                ap = to_posix(os.path.join(info_dir, fn))
                archive_paths.append((ap, p))

        wheel_metadata = [
            'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
            'Generator: distlib %s' % __version__,
            'Root-Is-Purelib: %s' % is_pure,
        ]
        for pyver, abi, arch in self.tags:
            wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
        p = os.path.join(distinfo, 'WHEEL')
        with open(p, 'w') as f:
            f.write('\n'.join(wheel_metadata))
        ap = to_posix(os.path.join(info_dir, 'WHEEL'))
        archive_paths.append((ap, p))

        # Now, at last, RECORD.
        # Paths in here are archive paths - nothing else makes sense.
        self.write_records((distinfo, info_dir), libdir, archive_paths)
        # Now, ready to build the zip file
        pathname = os.path.join(self.dirname, self.filename)
        self.build_zip(pathname, archive_paths)
        return pathname

    def install(self, paths, maker, **kwargs):
        """
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written.

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        """

        dry_run = maker.dry_run
        warner = kwargs.get('warner')
        lib_only = kwargs.get('lib_only', False)

        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        data_dir = '%s.data' % name_ver
        info_dir = '%s.dist-info' % name_ver

        metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
        record_name = posixpath.join(info_dir, 'RECORD')

        wrapper = codecs.getreader('utf-8')

        with ZipFile(pathname, 'r') as zf:
            with zf.open(wheel_metadata_name) as bwf:
                wf = wrapper(bwf)
                message = message_from_file(wf)
            wv = message['Wheel-Version'].split('.', 1)
            file_version = tuple([int(i) for i in wv])
            if (file_version != self.wheel_version) and warner:
                warner(self.wheel_version, file_version)

            if message['Root-Is-Purelib'] == 'true':
                libdir = paths['purelib']
            else:
                libdir = paths['platlib']

            records = {}
            with zf.open(record_name) as bf:
                with CSVReader(stream=bf) as reader:
                    for row in reader:
                        p = row[0]
                        records[p] = row

            data_pfx = posixpath.join(data_dir, '')
            info_pfx = posixpath.join(info_dir, '')
            script_pfx = posixpath.join(data_dir, 'scripts', '')

            # make a new instance rather than a copy of maker's,
            # as we mutate it
            fileop = FileOperator(dry_run=dry_run)
            fileop.record = True    # so we can rollback if needed

            bc = not sys.dont_write_bytecode    # Double negatives. Lovely!

            outfiles = []   # for RECORD writing

            # for script copying/shebang processing
            workdir = tempfile.mkdtemp()
            # set target dir later
            # we default add_launchers to False, as the
            # Python Launcher should be used instead
            maker.source_dir = workdir
            maker.target_dir = None
            try:
                for zinfo in zf.infolist():
                    arcname = zinfo.filename
                    if isinstance(arcname, text_type):
                        u_arcname = arcname
                    else:
                        u_arcname = arcname.decode('utf-8')
                    # The signature file won't be in RECORD,
                    # and we  don't currently don't do anything with it
                    if u_arcname.endswith('/RECORD.jws'):
                        continue
                    row = records[u_arcname]
                    if row[2] and str(zinfo.file_size) != row[2]:
                        raise DistlibException('size mismatch for '
                                               '%s' % u_arcname)
                    if row[1]:
                        kind, value = row[1].split('=', 1)
                        with zf.open(arcname) as bf:
                            data = bf.read()
                        _, digest = self.get_hash(data, kind)
                        if digest != value:
                            raise DistlibException('digest mismatch for '
                                                   '%s' % arcname)

                    if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
                        logger.debug('lib_only: skipping %s', u_arcname)
                        continue
                    is_script = (u_arcname.startswith(script_pfx)
                                 and not u_arcname.endswith('.exe'))

                    if u_arcname.startswith(data_pfx):
                        _, where, rp = u_arcname.split('/', 2)
                        outfile = os.path.join(paths[where], convert_path(rp))
                    else:
                        # meant for site-packages.
                        if u_arcname in (wheel_metadata_name, record_name):
                            continue
                        outfile = os.path.join(libdir, convert_path(u_arcname))
                    if not is_script:
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, outfile)
                        outfiles.append(outfile)
                        # Double check the digest of the written file
                        if not dry_run and row[1]:
                            with open(outfile, 'rb') as bf:
                                data = bf.read()
                                _, newdigest = self.get_hash(data, kind)
                                if newdigest != digest:
                                    raise DistlibException('digest mismatch '
                                                           'on write for '
                                                           '%s' % outfile)
                        if bc and outfile.endswith('.py'):
                            try:
                                pyc = fileop.byte_compile(outfile)
                                outfiles.append(pyc)
                            except Exception:
                                # Don't give up if byte-compilation fails,
                                # but log it and perhaps warn the user
                                logger.warning('Byte-compilation failed',
                                               exc_info=True)
                    else:
                        fn = os.path.basename(convert_path(arcname))
                        workname = os.path.join(workdir, fn)
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, workname)

                        dn, fn = os.path.split(outfile)
                        maker.target_dir = dn
                        filenames = maker.make(fn)
                        fileop.set_executable_mode(filenames)
                        outfiles.extend(filenames)

                if lib_only:
                    logger.debug('lib_only: returning None')
                    dist = None
                else:
                    # Generate scripts

                    # Try to get pydist.json so we can see if there are
                    # any commands to generate. If this fails (e.g. because
                    # of a legacy wheel), log a warning but don't give up.
                    commands = None
                    file_version = self.info['Wheel-Version']
                    if file_version == '1.0':
                        # Use legacy info
                        ep = posixpath.join(info_dir, 'entry_points.txt')
                        try:
                            with zf.open(ep) as bwf:
                                epdata = read_exports(bwf)
                            commands = {}
                            for key in ('console', 'gui'):
                                k = '%s_scripts' % key
                                if k in epdata:
                                    commands['wrap_%s' % key] = d = {}
                                    for v in epdata[k].values():
                                        s = '%s:%s' % (v.prefix, v.suffix)
                                        if v.flags:
                                            s += ' %s' % v.flags
                                        d[v.name] = s
                        except Exception:
                            logger.warning('Unable to read legacy script '
                                           'metadata, so cannot generate '
                                           'scripts')
                    else:
                        try:
                            with zf.open(metadata_name) as bwf:
                                wf = wrapper(bwf)
                                commands = json.load(wf).get('extensions')
                                if commands:
                                    commands = commands.get('python.commands')
                        except Exception:
                            logger.warning('Unable to read JSON metadata, so '
                                           'cannot generate scripts')
                    if commands:
                        console_scripts = commands.get('wrap_console', {})
                        gui_scripts = commands.get('wrap_gui', {})
                        if console_scripts or gui_scripts:
                            script_dir = paths.get('scripts', '')
                            if not os.path.isdir(script_dir):
                                raise ValueError('Valid script path not '
                                                 'specified')
                            maker.target_dir = script_dir
                            for k, v in console_scripts.items():
                                script = '%s = %s' % (k, v)
                                filenames = maker.make(script)
                                fileop.set_executable_mode(filenames)

                            if gui_scripts:
                                options = {'gui': True }
                                for k, v in gui_scripts.items():
                                    script = '%s = %s' % (k, v)
                                    filenames = maker.make(script, options)
                                    fileop.set_executable_mode(filenames)

                    p = os.path.join(libdir, info_dir)
                    dist = InstalledDistribution(p)

                    # Write SHARED
                    paths = dict(paths)     # don't change passed in dict
                    del paths['purelib']
                    del paths['platlib']
                    paths['lib'] = libdir
                    p = dist.write_shared_locations(paths, dry_run)
                    if p:
                        outfiles.append(p)

                    # Write RECORD
                    dist.write_installed_files(outfiles, paths['prefix'],
                                               dry_run)
                return dist
            except Exception:  # pragma: no cover
                logger.exception('installation failed.')
                fileop.rollback()
                raise
            finally:
                shutil.rmtree(workdir)

    def _get_dylib_cache(self):
        global cache
        if cache is None:
            # Use native string to avoid issues on 2.x: see Python #20140.
            base = os.path.join(get_cache_base(), str('dylib-cache'),
                                sys.version[:3])
            cache = Cache(base)
        return cache

    def _get_extensions(self):
        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        arcname = posixpath.join(info_dir, 'EXTENSIONS')
        wrapper = codecs.getreader('utf-8')
        result = []
        with ZipFile(pathname, 'r') as zf:
            try:
                with zf.open(arcname) as bf:
                    wf = wrapper(bf)
                    extensions = json.load(wf)
                    cache = self._get_dylib_cache()
                    prefix = cache.prefix_to_dir(pathname)
                    cache_base = os.path.join(cache.base, prefix)
                    if not os.path.isdir(cache_base):
                        os.makedirs(cache_base)
                    for name, relpath in extensions.items():
                        dest = os.path.join(cache_base, convert_path(relpath))
                        if not os.path.exists(dest):
                            extract = True
                        else:
                            file_time = os.stat(dest).st_mtime
                            file_time = datetime.datetime.fromtimestamp(file_time)
                            info = zf.getinfo(relpath)
                            wheel_time = datetime.datetime(*info.date_time)
                            extract = wheel_time > file_time
                        if extract:
                            zf.extract(relpath, cache_base)
                        result.append((name, dest))
            except KeyError:
                pass
        return result

    def is_compatible(self):
        """
        Determine if a wheel is compatible with the running system.
        """
        return is_compatible(self)

    def is_mountable(self):
        """
        Determine if a wheel is asserted as mountable by its metadata.
        """
        return True # for now - metadata details TBD

    def mount(self, append=False):
        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
        if not self.is_compatible():
            msg = 'Wheel %s not compatible with this Python.' % pathname
            raise DistlibException(msg)
        if not self.is_mountable():
            msg = 'Wheel %s is marked as not mountable.' % pathname
            raise DistlibException(msg)
        if pathname in sys.path:
            logger.debug('%s already in path', pathname)
        else:
            if append:
                sys.path.append(pathname)
            else:
                sys.path.insert(0, pathname)
            extensions = self._get_extensions()
            if extensions:
                if _hook not in sys.meta_path:
                    sys.meta_path.append(_hook)
                _hook.add(pathname, extensions)

    def unmount(self):
        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
        if pathname not in sys.path:
            logger.debug('%s not in path', pathname)
        else:
            sys.path.remove(pathname)
            if pathname in _hook.impure_wheels:
                _hook.remove(pathname)
            if not _hook.impure_wheels:
                if _hook in sys.meta_path:
                    sys.meta_path.remove(_hook)

    def verify(self):
        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        data_dir = '%s.data' % name_ver
        info_dir = '%s.dist-info' % name_ver

        metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
        record_name = posixpath.join(info_dir, 'RECORD')

        wrapper = codecs.getreader('utf-8')

        with ZipFile(pathname, 'r') as zf:
            with zf.open(wheel_metadata_name) as bwf:
                wf = wrapper(bwf)
                message = message_from_file(wf)
            wv = message['Wheel-Version'].split('.', 1)
            file_version = tuple([int(i) for i in wv])
            # TODO version verification

            records = {}
            with zf.open(record_name) as bf:
                with CSVReader(stream=bf) as reader:
                    for row in reader:
                        p = row[0]
                        records[p] = row

            for zinfo in zf.infolist():
                arcname = zinfo.filename
                if isinstance(arcname, text_type):
                    u_arcname = arcname
                else:
                    u_arcname = arcname.decode('utf-8')
                if '..' in u_arcname:
                    raise DistlibException('invalid entry in '
                                           'wheel: %r' % u_arcname)

                # The signature file won't be in RECORD,
                # and we  don't currently don't do anything with it
                if u_arcname.endswith('/RECORD.jws'):
                    continue
                row = records[u_arcname]
                if row[2] and str(zinfo.file_size) != row[2]:
                    raise DistlibException('size mismatch for '
                                           '%s' % u_arcname)
                if row[1]:
                    kind, value = row[1].split('=', 1)
                    with zf.open(arcname) as bf:
                        data = bf.read()
                    _, digest = self.get_hash(data, kind)
                    if digest != value:
                        raise DistlibException('digest mismatch for '
                                               '%s' % arcname)

    def update(self, modifier, dest_dir=None, **kwargs):
        """
        Update the contents of a wheel in a generic way. The modifier should
        be a callable which expects a dictionary argument: its keys are
        archive-entry paths, and its values are absolute filesystem paths
        where the contents the corresponding archive entries can be found. The
        modifier is free to change the contents of the files pointed to, add
        new entries and remove entries, before returning. This method will
        extract the entire contents of the wheel to a temporary location, call
        the modifier, and then use the passed (and possibly updated)
        dictionary to write a new wheel. If ``dest_dir`` is specified, the new
        wheel is written there -- otherwise, the original wheel is overwritten.

        The modifier should return True if it updated the wheel, else False.
        This method returns the same value the modifier returns.
        """

        def get_version(path_map, info_dir):
            version = path = None
            key = '%s/%s' % (info_dir, METADATA_FILENAME)
            if key not in path_map:
                key = '%s/PKG-INFO' % info_dir
            if key in path_map:
                path = path_map[key]
                version = Metadata(path=path).version
            return version, path

        def update_version(version, path):
            updated = None
            try:
                v = NormalizedVersion(version)
                i = version.find('-')
                if i < 0:
                    updated = '%s+1' % version
                else:
                    parts = [int(s) for s in version[i + 1:].split('.')]
                    parts[-1] += 1
                    updated = '%s+%s' % (version[:i],
                                         '.'.join(str(i) for i in parts))
            except UnsupportedVersionError:
                logger.debug('Cannot update non-compliant (PEP-440) '
                             'version %r', version)
            if updated:
                md = Metadata(path=path)
                md.version = updated
                legacy = not path.endswith(METADATA_FILENAME)
                md.write(path=path, legacy=legacy)
                logger.debug('Version updated from %r to %r', version,
                             updated)

        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        record_name = posixpath.join(info_dir, 'RECORD')
        with tempdir() as workdir:
            with ZipFile(pathname, 'r') as zf:
                path_map = {}
                for zinfo in zf.infolist():
                    arcname = zinfo.filename
                    if isinstance(arcname, text_type):
                        u_arcname = arcname
                    else:
                        u_arcname = arcname.decode('utf-8')
                    if u_arcname == record_name:
                        continue
                    if '..' in u_arcname:
                        raise DistlibException('invalid entry in '
                                               'wheel: %r' % u_arcname)
                    zf.extract(zinfo, workdir)
                    path = os.path.join(workdir, convert_path(u_arcname))
                    path_map[u_arcname] = path

            # Remember the version.
            original_version, _ = get_version(path_map, info_dir)
            # Files extracted. Call the modifier.
            modified = modifier(path_map, **kwargs)
            if modified:
                # Something changed - need to build a new wheel.
                current_version, path = get_version(path_map, info_dir)
                if current_version and (current_version == original_version):
                    # Add or update local version to signify changes.
                    update_version(current_version, path)
                # Decide where the new wheel goes.
                if dest_dir is None:
                    fd, newpath = tempfile.mkstemp(suffix='.whl',
                                                   prefix='wheel-update-',
                                                   dir=workdir)
                    os.close(fd)
                else:
                    if not os.path.isdir(dest_dir):
                        raise DistlibException('Not a directory: %r' % dest_dir)
                    newpath = os.path.join(dest_dir, self.filename)
                archive_paths = list(path_map.items())
                distinfo = os.path.join(workdir, info_dir)
                info = distinfo, info_dir
                self.write_records(info, workdir, archive_paths)
                self.build_zip(newpath, archive_paths)
                if dest_dir is None:
                    shutil.copyfile(newpath, pathname)
        return modified

def compatible_tags():
    """
    Return (pyver, abi, arch) tuples compatible with this Python.
    """
    versions = [VER_SUFFIX]
    major = VER_SUFFIX[0]
    for minor in range(sys.version_info[1] - 1, - 1, -1):
        versions.append(''.join([major, str(minor)]))

    abis = []
    for suffix, _, _ in imp.get_suffixes():
        if suffix.startswith('.abi'):
            abis.append(suffix.split('.', 2)[1])
    abis.sort()
    if ABI != 'none':
        abis.insert(0, ABI)
    abis.append('none')
    result = []

    arches = [ARCH]
    if sys.platform == 'darwin':
        m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
        if m:
            name, major, minor, arch = m.groups()
            minor = int(minor)
            matches = [arch]
            if arch in ('i386', 'ppc'):
                matches.append('fat')
            if arch in ('i386', 'ppc', 'x86_64'):
                matches.append('fat3')
            if arch in ('ppc64', 'x86_64'):
                matches.append('fat64')
            if arch in ('i386', 'x86_64'):
                matches.append('intel')
            if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
                matches.append('universal')
            while minor >= 0:
                for match in matches:
                    s = '%s_%s_%s_%s' % (name, major, minor, match)
                    if s != ARCH:   # already there
                        arches.append(s)
                minor -= 1

    # Most specific - our Python version, ABI and arch
    for abi in abis:
        for arch in arches:
            result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))

    # where no ABI / arch dependency, but IMP_PREFIX dependency
    for i, version in enumerate(versions):
        result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
        if i == 0:
            result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))

    # no IMP_PREFIX, ABI or arch dependency
    for i, version in enumerate(versions):
        result.append((''.join(('py', version)), 'none', 'any'))
        if i == 0:
            result.append((''.join(('py', version[0])), 'none', 'any'))
    return set(result)


COMPATIBLE_TAGS = compatible_tags()

del compatible_tags


def is_compatible(wheel, tags=None):
    if not isinstance(wheel, Wheel):
        wheel = Wheel(wheel)    # assume it's a filename
    result = False
    if tags is None:
        tags = COMPATIBLE_TAGS
    for ver, abi, arch in tags:
        if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
            result = True
            break
    return result
_vendor/distlib/compat.py000064400000117541151733136310011503 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import absolute_import

import os
import re
import sys

try:
    import ssl
except ImportError:
    ssl = None

if sys.version_info[0] < 3:  # pragma: no cover
    from StringIO import StringIO
    string_types = basestring,
    text_type = unicode
    from types import FileType as file_type
    import __builtin__ as builtins
    import ConfigParser as configparser
    from ._backport import shutil
    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
    from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
                        pathname2url, ContentTooShortError, splittype)

    def quote(s):
        if isinstance(s, unicode):
            s = s.encode('utf-8')
        return _quote(s)

    import urllib2
    from urllib2 import (Request, urlopen, URLError, HTTPError,
                         HTTPBasicAuthHandler, HTTPPasswordMgr,
                         HTTPHandler, HTTPRedirectHandler,
                         build_opener)
    if ssl:
        from urllib2 import HTTPSHandler
    import httplib
    import xmlrpclib
    import Queue as queue
    from HTMLParser import HTMLParser
    import htmlentitydefs
    raw_input = raw_input
    from itertools import ifilter as filter
    from itertools import ifilterfalse as filterfalse

    _userprog = None
    def splituser(host):
        """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
        global _userprog
        if _userprog is None:
            import re
            _userprog = re.compile('^(.*)@(.*)$')

        match = _userprog.match(host)
        if match: return match.group(1, 2)
        return None, host

else:  # pragma: no cover
    from io import StringIO
    string_types = str,
    text_type = str
    from io import TextIOWrapper as file_type
    import builtins
    import configparser
    import shutil
    from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,
                              unquote, urlsplit, urlunsplit, splittype)
    from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
                                pathname2url,
                                HTTPBasicAuthHandler, HTTPPasswordMgr,
                                HTTPHandler, HTTPRedirectHandler,
                                build_opener)
    if ssl:
        from urllib.request import HTTPSHandler
    from urllib.error import HTTPError, URLError, ContentTooShortError
    import http.client as httplib
    import urllib.request as urllib2
    import xmlrpc.client as xmlrpclib
    import queue
    from html.parser import HTMLParser
    import html.entities as htmlentitydefs
    raw_input = input
    from itertools import filterfalse
    filter = filter

try:
    from ssl import match_hostname, CertificateError
except ImportError: # pragma: no cover
    class CertificateError(ValueError):
        pass


    def _dnsname_match(dn, hostname, max_wildcards=1):
        """Matching according to RFC 6125, section 6.4.3

        http://tools.ietf.org/html/rfc6125#section-6.4.3
        """
        pats = []
        if not dn:
            return False

        parts = dn.split('.')
        leftmost, remainder = parts[0], parts[1:]

        wildcards = leftmost.count('*')
        if wildcards > max_wildcards:
            # Issue #17980: avoid denials of service by refusing more
            # than one wildcard per fragment.  A survey of established
            # policy among SSL implementations showed it to be a
            # reasonable choice.
            raise CertificateError(
                "too many wildcards in certificate DNS name: " + repr(dn))

        # speed up common case w/o wildcards
        if not wildcards:
            return dn.lower() == hostname.lower()

        # RFC 6125, section 6.4.3, subitem 1.
        # The client SHOULD NOT attempt to match a presented identifier in which
        # the wildcard character comprises a label other than the left-most label.
        if leftmost == '*':
            # When '*' is a fragment by itself, it matches a non-empty dotless
            # fragment.
            pats.append('[^.]+')
        elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
            # RFC 6125, section 6.4.3, subitem 3.
            # The client SHOULD NOT attempt to match a presented identifier
            # where the wildcard character is embedded within an A-label or
            # U-label of an internationalized domain name.
            pats.append(re.escape(leftmost))
        else:
            # Otherwise, '*' matches any dotless string, e.g. www*
            pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))

        # add the remaining fragments, ignore any wildcards
        for frag in remainder:
            pats.append(re.escape(frag))

        pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
        return pat.match(hostname)


    def match_hostname(cert, hostname):
        """Verify that *cert* (in decoded format as returned by
        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
        rules are followed, but IP addresses are not accepted for *hostname*.

        CertificateError is raised on failure. On success, the function
        returns nothing.
        """
        if not cert:
            raise ValueError("empty or no certificate, match_hostname needs a "
                             "SSL socket or SSL context with either "
                             "CERT_OPTIONAL or CERT_REQUIRED")
        dnsnames = []
        san = cert.get('subjectAltName', ())
        for key, value in san:
            if key == 'DNS':
                if _dnsname_match(value, hostname):
                    return
                dnsnames.append(value)
        if not dnsnames:
            # The subject is only checked when there is no dNSName entry
            # in subjectAltName
            for sub in cert.get('subject', ()):
                for key, value in sub:
                    # XXX according to RFC 2818, the most specific Common Name
                    # must be used.
                    if key == 'commonName':
                        if _dnsname_match(value, hostname):
                            return
                        dnsnames.append(value)
        if len(dnsnames) > 1:
            raise CertificateError("hostname %r "
                "doesn't match either of %s"
                % (hostname, ', '.join(map(repr, dnsnames))))
        elif len(dnsnames) == 1:
            raise CertificateError("hostname %r "
                "doesn't match %r"
                % (hostname, dnsnames[0]))
        else:
            raise CertificateError("no appropriate commonName or "
                "subjectAltName fields were found")


try:
    from types import SimpleNamespace as Container
except ImportError:  # pragma: no cover
    class Container(object):
        """
        A generic container for when multiple values need to be returned
        """
        def __init__(self, **kwargs):
            self.__dict__.update(kwargs)


try:
    from shutil import which
except ImportError:  # pragma: no cover
    # Implementation from Python 3.3
    def which(cmd, mode=os.F_OK | os.X_OK, path=None):
        """Given a command, mode, and a PATH string, return the path which
        conforms to the given mode on the PATH, or None if there is no such
        file.

        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
        of os.environ.get("PATH"), or can be overridden with a custom search
        path.

        """
        # Check that a given file can be accessed with the correct mode.
        # Additionally check that `file` is not a directory, as on Windows
        # directories pass the os.access check.
        def _access_check(fn, mode):
            return (os.path.exists(fn) and os.access(fn, mode)
                    and not os.path.isdir(fn))

        # If we're given a path with a directory part, look it up directly rather
        # than referring to PATH directories. This includes checking relative to the
        # current directory, e.g. ./script
        if os.path.dirname(cmd):
            if _access_check(cmd, mode):
                return cmd
            return None

        if path is None:
            path = os.environ.get("PATH", os.defpath)
        if not path:
            return None
        path = path.split(os.pathsep)

        if sys.platform == "win32":
            # The current directory takes precedence on Windows.
            if not os.curdir in path:
                path.insert(0, os.curdir)

            # PATHEXT is necessary to check on Windows.
            pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
            # See if the given file matches any of the expected path extensions.
            # This will allow us to short circuit when given "python.exe".
            # If it does match, only test that one, otherwise we have to try
            # others.
            if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
                files = [cmd]
            else:
                files = [cmd + ext for ext in pathext]
        else:
            # On other platforms you don't have things like PATHEXT to tell you
            # what file suffixes are executable, so just pass on cmd as-is.
            files = [cmd]

        seen = set()
        for dir in path:
            normdir = os.path.normcase(dir)
            if not normdir in seen:
                seen.add(normdir)
                for thefile in files:
                    name = os.path.join(dir, thefile)
                    if _access_check(name, mode):
                        return name
        return None


# ZipFile is a context manager in 2.7, but not in 2.6

from zipfile import ZipFile as BaseZipFile

if hasattr(BaseZipFile, '__enter__'):  # pragma: no cover
    ZipFile = BaseZipFile
else:
    from zipfile import ZipExtFile as BaseZipExtFile

    class ZipExtFile(BaseZipExtFile):
        def __init__(self, base):
            self.__dict__.update(base.__dict__)

        def __enter__(self):
            return self

        def __exit__(self, *exc_info):
            self.close()
            # return None, so if an exception occurred, it will propagate

    class ZipFile(BaseZipFile):
        def __enter__(self):
            return self

        def __exit__(self, *exc_info):
            self.close()
            # return None, so if an exception occurred, it will propagate

        def open(self, *args, **kwargs):
            base = BaseZipFile.open(self, *args, **kwargs)
            return ZipExtFile(base)

try:
    from platform import python_implementation
except ImportError: # pragma: no cover
    def python_implementation():
        """Return a string identifying the Python implementation."""
        if 'PyPy' in sys.version:
            return 'PyPy'
        if os.name == 'java':
            return 'Jython'
        if sys.version.startswith('IronPython'):
            return 'IronPython'
        return 'CPython'

try:
    import sysconfig
except ImportError: # pragma: no cover
    from ._backport import sysconfig

try:
    callable = callable
except NameError:   # pragma: no cover
    from collections import Callable

    def callable(obj):
        return isinstance(obj, Callable)


try:
    fsencode = os.fsencode
    fsdecode = os.fsdecode
except AttributeError:  # pragma: no cover
    _fsencoding = sys.getfilesystemencoding()
    if _fsencoding == 'mbcs':
        _fserrors = 'strict'
    else:
        _fserrors = 'surrogateescape'

    def fsencode(filename):
        if isinstance(filename, bytes):
            return filename
        elif isinstance(filename, text_type):
            return filename.encode(_fsencoding, _fserrors)
        else:
            raise TypeError("expect bytes or str, not %s" %
                            type(filename).__name__)

    def fsdecode(filename):
        if isinstance(filename, text_type):
            return filename
        elif isinstance(filename, bytes):
            return filename.decode(_fsencoding, _fserrors)
        else:
            raise TypeError("expect bytes or str, not %s" %
                            type(filename).__name__)

try:
    from tokenize import detect_encoding
except ImportError: # pragma: no cover
    from codecs import BOM_UTF8, lookup
    import re

    cookie_re = re.compile("coding[:=]\s*([-\w.]+)")

    def _get_normal_name(orig_enc):
        """Imitates get_normal_name in tokenizer.c."""
        # Only care about the first 12 characters.
        enc = orig_enc[:12].lower().replace("_", "-")
        if enc == "utf-8" or enc.startswith("utf-8-"):
            return "utf-8"
        if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
           enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
            return "iso-8859-1"
        return orig_enc

    def detect_encoding(readline):
        """
        The detect_encoding() function is used to detect the encoding that should
        be used to decode a Python source file.  It requires one argument, readline,
        in the same way as the tokenize() generator.

        It will call readline a maximum of twice, and return the encoding used
        (as a string) and a list of any lines (left as bytes) it has read in.

        It detects the encoding from the presence of a utf-8 bom or an encoding
        cookie as specified in pep-0263.  If both a bom and a cookie are present,
        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
        'utf-8-sig' is returned.

        If no encoding is specified, then the default of 'utf-8' will be returned.
        """
        try:
            filename = readline.__self__.name
        except AttributeError:
            filename = None
        bom_found = False
        encoding = None
        default = 'utf-8'
        def read_or_stop():
            try:
                return readline()
            except StopIteration:
                return b''

        def find_cookie(line):
            try:
                # Decode as UTF-8. Either the line is an encoding declaration,
                # in which case it should be pure ASCII, or it must be UTF-8
                # per default encoding.
                line_string = line.decode('utf-8')
            except UnicodeDecodeError:
                msg = "invalid or missing encoding declaration"
                if filename is not None:
                    msg = '{} for {!r}'.format(msg, filename)
                raise SyntaxError(msg)

            matches = cookie_re.findall(line_string)
            if not matches:
                return None
            encoding = _get_normal_name(matches[0])
            try:
                codec = lookup(encoding)
            except LookupError:
                # This behaviour mimics the Python interpreter
                if filename is None:
                    msg = "unknown encoding: " + encoding
                else:
                    msg = "unknown encoding for {!r}: {}".format(filename,
                            encoding)
                raise SyntaxError(msg)

            if bom_found:
                if codec.name != 'utf-8':
                    # This behaviour mimics the Python interpreter
                    if filename is None:
                        msg = 'encoding problem: utf-8'
                    else:
                        msg = 'encoding problem for {!r}: utf-8'.format(filename)
                    raise SyntaxError(msg)
                encoding += '-sig'
            return encoding

        first = read_or_stop()
        if first.startswith(BOM_UTF8):
            bom_found = True
            first = first[3:]
            default = 'utf-8-sig'
        if not first:
            return default, []

        encoding = find_cookie(first)
        if encoding:
            return encoding, [first]

        second = read_or_stop()
        if not second:
            return default, [first]

        encoding = find_cookie(second)
        if encoding:
            return encoding, [first, second]

        return default, [first, second]

# For converting & <-> &amp; etc.
try:
    from html import escape
except ImportError:
    from cgi import escape
if sys.version_info[:2] < (3, 4):
    unescape = HTMLParser().unescape
else:
    from html import unescape

try:
    from collections import ChainMap
except ImportError: # pragma: no cover
    from collections import MutableMapping

    try:
        from reprlib import recursive_repr as _recursive_repr
    except ImportError:
        def _recursive_repr(fillvalue='...'):
            '''
            Decorator to make a repr function return fillvalue for a recursive
            call
            '''

            def decorating_function(user_function):
                repr_running = set()

                def wrapper(self):
                    key = id(self), get_ident()
                    if key in repr_running:
                        return fillvalue
                    repr_running.add(key)
                    try:
                        result = user_function(self)
                    finally:
                        repr_running.discard(key)
                    return result

                # Can't use functools.wraps() here because of bootstrap issues
                wrapper.__module__ = getattr(user_function, '__module__')
                wrapper.__doc__ = getattr(user_function, '__doc__')
                wrapper.__name__ = getattr(user_function, '__name__')
                wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
                return wrapper

            return decorating_function

    class ChainMap(MutableMapping):
        ''' A ChainMap groups multiple dicts (or other mappings) together
        to create a single, updateable view.

        The underlying mappings are stored in a list.  That list is public and can
        accessed or updated using the *maps* attribute.  There is no other state.

        Lookups search the underlying mappings successively until a key is found.
        In contrast, writes, updates, and deletions only operate on the first
        mapping.

        '''

        def __init__(self, *maps):
            '''Initialize a ChainMap by setting *maps* to the given mappings.
            If no mappings are provided, a single empty dictionary is used.

            '''
            self.maps = list(maps) or [{}]          # always at least one map

        def __missing__(self, key):
            raise KeyError(key)

        def __getitem__(self, key):
            for mapping in self.maps:
                try:
                    return mapping[key]             # can't use 'key in mapping' with defaultdict
                except KeyError:
                    pass
            return self.__missing__(key)            # support subclasses that define __missing__

        def get(self, key, default=None):
            return self[key] if key in self else default

        def __len__(self):
            return len(set().union(*self.maps))     # reuses stored hash values if possible

        def __iter__(self):
            return iter(set().union(*self.maps))

        def __contains__(self, key):
            return any(key in m for m in self.maps)

        def __bool__(self):
            return any(self.maps)

        @_recursive_repr()
        def __repr__(self):
            return '{0.__class__.__name__}({1})'.format(
                self, ', '.join(map(repr, self.maps)))

        @classmethod
        def fromkeys(cls, iterable, *args):
            'Create a ChainMap with a single dict created from the iterable.'
            return cls(dict.fromkeys(iterable, *args))

        def copy(self):
            'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
            return self.__class__(self.maps[0].copy(), *self.maps[1:])

        __copy__ = copy

        def new_child(self):                        # like Django's Context.push()
            'New ChainMap with a new dict followed by all previous maps.'
            return self.__class__({}, *self.maps)

        @property
        def parents(self):                          # like Django's Context.pop()
            'New ChainMap from maps[1:].'
            return self.__class__(*self.maps[1:])

        def __setitem__(self, key, value):
            self.maps[0][key] = value

        def __delitem__(self, key):
            try:
                del self.maps[0][key]
            except KeyError:
                raise KeyError('Key not found in the first mapping: {!r}'.format(key))

        def popitem(self):
            'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
            try:
                return self.maps[0].popitem()
            except KeyError:
                raise KeyError('No keys found in the first mapping.')

        def pop(self, key, *args):
            'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
            try:
                return self.maps[0].pop(key, *args)
            except KeyError:
                raise KeyError('Key not found in the first mapping: {!r}'.format(key))

        def clear(self):
            'Clear maps[0], leaving maps[1:] intact.'
            self.maps[0].clear()

try:
    from imp import cache_from_source
except ImportError: # pragma: no cover
    def cache_from_source(path, debug_override=None):
        assert path.endswith('.py')
        if debug_override is None:
            debug_override = __debug__
        if debug_override:
            suffix = 'c'
        else:
            suffix = 'o'
        return path + suffix

try:
    from collections import OrderedDict
except ImportError: # pragma: no cover
## {{{ http://code.activestate.com/recipes/576693/ (r9)
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
    try:
        from thread import get_ident as _get_ident
    except ImportError:
        from dummy_thread import get_ident as _get_ident

    try:
        from _abcoll import KeysView, ValuesView, ItemsView
    except ImportError:
        pass


    class OrderedDict(dict):
        'Dictionary that remembers insertion order'
        # An inherited dict maps keys to values.
        # The inherited dict provides __getitem__, __len__, __contains__, and get.
        # The remaining methods are order-aware.
        # Big-O running times for all methods are the same as for regular dictionaries.

        # The internal self.__map dictionary maps keys to links in a doubly linked list.
        # The circular doubly linked list starts and ends with a sentinel element.
        # The sentinel element never gets deleted (this simplifies the algorithm).
        # Each link is stored as a list of length three:  [PREV, NEXT, KEY].

        def __init__(self, *args, **kwds):
            '''Initialize an ordered dictionary.  Signature is the same as for
            regular dictionaries, but keyword arguments are not recommended
            because their insertion order is arbitrary.

            '''
            if len(args) > 1:
                raise TypeError('expected at most 1 arguments, got %d' % len(args))
            try:
                self.__root
            except AttributeError:
                self.__root = root = []                     # sentinel node
                root[:] = [root, root, None]
                self.__map = {}
            self.__update(*args, **kwds)

        def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
            'od.__setitem__(i, y) <==> od[i]=y'
            # Setting a new item creates a new link which goes at the end of the linked
            # list, and the inherited dictionary is updated with the new key/value pair.
            if key not in self:
                root = self.__root
                last = root[0]
                last[1] = root[0] = self.__map[key] = [last, root, key]
            dict_setitem(self, key, value)

        def __delitem__(self, key, dict_delitem=dict.__delitem__):
            'od.__delitem__(y) <==> del od[y]'
            # Deleting an existing item uses self.__map to find the link which is
            # then removed by updating the links in the predecessor and successor nodes.
            dict_delitem(self, key)
            link_prev, link_next, key = self.__map.pop(key)
            link_prev[1] = link_next
            link_next[0] = link_prev

        def __iter__(self):
            'od.__iter__() <==> iter(od)'
            root = self.__root
            curr = root[1]
            while curr is not root:
                yield curr[2]
                curr = curr[1]

        def __reversed__(self):
            'od.__reversed__() <==> reversed(od)'
            root = self.__root
            curr = root[0]
            while curr is not root:
                yield curr[2]
                curr = curr[0]

        def clear(self):
            'od.clear() -> None.  Remove all items from od.'
            try:
                for node in self.__map.itervalues():
                    del node[:]
                root = self.__root
                root[:] = [root, root, None]
                self.__map.clear()
            except AttributeError:
                pass
            dict.clear(self)

        def popitem(self, last=True):
            '''od.popitem() -> (k, v), return and remove a (key, value) pair.
            Pairs are returned in LIFO order if last is true or FIFO order if false.

            '''
            if not self:
                raise KeyError('dictionary is empty')
            root = self.__root
            if last:
                link = root[0]
                link_prev = link[0]
                link_prev[1] = root
                root[0] = link_prev
            else:
                link = root[1]
                link_next = link[1]
                root[1] = link_next
                link_next[0] = root
            key = link[2]
            del self.__map[key]
            value = dict.pop(self, key)
            return key, value

        # -- the following methods do not depend on the internal structure --

        def keys(self):
            'od.keys() -> list of keys in od'
            return list(self)

        def values(self):
            'od.values() -> list of values in od'
            return [self[key] for key in self]

        def items(self):
            'od.items() -> list of (key, value) pairs in od'
            return [(key, self[key]) for key in self]

        def iterkeys(self):
            'od.iterkeys() -> an iterator over the keys in od'
            return iter(self)

        def itervalues(self):
            'od.itervalues -> an iterator over the values in od'
            for k in self:
                yield self[k]

        def iteritems(self):
            'od.iteritems -> an iterator over the (key, value) items in od'
            for k in self:
                yield (k, self[k])

        def update(*args, **kwds):
            '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.

            If E is a dict instance, does:           for k in E: od[k] = E[k]
            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
            In either case, this is followed by:     for k, v in F.items(): od[k] = v

            '''
            if len(args) > 2:
                raise TypeError('update() takes at most 2 positional '
                                'arguments (%d given)' % (len(args),))
            elif not args:
                raise TypeError('update() takes at least 1 argument (0 given)')
            self = args[0]
            # Make progressively weaker assumptions about "other"
            other = ()
            if len(args) == 2:
                other = args[1]
            if isinstance(other, dict):
                for key in other:
                    self[key] = other[key]
            elif hasattr(other, 'keys'):
                for key in other.keys():
                    self[key] = other[key]
            else:
                for key, value in other:
                    self[key] = value
            for key, value in kwds.items():
                self[key] = value

        __update = update  # let subclasses override update without breaking __init__

        __marker = object()

        def pop(self, key, default=__marker):
            '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
            If key is not found, d is returned if given, otherwise KeyError is raised.

            '''
            if key in self:
                result = self[key]
                del self[key]
                return result
            if default is self.__marker:
                raise KeyError(key)
            return default

        def setdefault(self, key, default=None):
            'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
            if key in self:
                return self[key]
            self[key] = default
            return default

        def __repr__(self, _repr_running=None):
            'od.__repr__() <==> repr(od)'
            if not _repr_running: _repr_running = {}
            call_key = id(self), _get_ident()
            if call_key in _repr_running:
                return '...'
            _repr_running[call_key] = 1
            try:
                if not self:
                    return '%s()' % (self.__class__.__name__,)
                return '%s(%r)' % (self.__class__.__name__, self.items())
            finally:
                del _repr_running[call_key]

        def __reduce__(self):
            'Return state information for pickling'
            items = [[k, self[k]] for k in self]
            inst_dict = vars(self).copy()
            for k in vars(OrderedDict()):
                inst_dict.pop(k, None)
            if inst_dict:
                return (self.__class__, (items,), inst_dict)
            return self.__class__, (items,)

        def copy(self):
            'od.copy() -> a shallow copy of od'
            return self.__class__(self)

        @classmethod
        def fromkeys(cls, iterable, value=None):
            '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
            and values equal to v (which defaults to None).

            '''
            d = cls()
            for key in iterable:
                d[key] = value
            return d

        def __eq__(self, other):
            '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
            while comparison to a regular mapping is order-insensitive.

            '''
            if isinstance(other, OrderedDict):
                return len(self)==len(other) and self.items() == other.items()
            return dict.__eq__(self, other)

        def __ne__(self, other):
            return not self == other

        # -- the following methods are only used in Python 2.7 --

        def viewkeys(self):
            "od.viewkeys() -> a set-like object providing a view on od's keys"
            return KeysView(self)

        def viewvalues(self):
            "od.viewvalues() -> an object providing a view on od's values"
            return ValuesView(self)

        def viewitems(self):
            "od.viewitems() -> a set-like object providing a view on od's items"
            return ItemsView(self)

try:
    from logging.config import BaseConfigurator, valid_ident
except ImportError: # pragma: no cover
    IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)


    def valid_ident(s):
        m = IDENTIFIER.match(s)
        if not m:
            raise ValueError('Not a valid Python identifier: %r' % s)
        return True


    # The ConvertingXXX classes are wrappers around standard Python containers,
    # and they serve to convert any suitable values in the container. The
    # conversion converts base dicts, lists and tuples to their wrapped
    # equivalents, whereas strings which match a conversion format are converted
    # appropriately.
    #
    # Each wrapper should have a configurator attribute holding the actual
    # configurator to use for conversion.

    class ConvertingDict(dict):
        """A converting dictionary wrapper."""

        def __getitem__(self, key):
            value = dict.__getitem__(self, key)
            result = self.configurator.convert(value)
            #If the converted value is different, save for next time
            if value is not result:
                self[key] = result
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

        def get(self, key, default=None):
            value = dict.get(self, key, default)
            result = self.configurator.convert(value)
            #If the converted value is different, save for next time
            if value is not result:
                self[key] = result
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

    def pop(self, key, default=None):
        value = dict.pop(self, key, default)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    class ConvertingList(list):
        """A converting list wrapper."""
        def __getitem__(self, key):
            value = list.__getitem__(self, key)
            result = self.configurator.convert(value)
            #If the converted value is different, save for next time
            if value is not result:
                self[key] = result
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

        def pop(self, idx=-1):
            value = list.pop(self, idx)
            result = self.configurator.convert(value)
            if value is not result:
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
            return result

    class ConvertingTuple(tuple):
        """A converting tuple wrapper."""
        def __getitem__(self, key):
            value = tuple.__getitem__(self, key)
            result = self.configurator.convert(value)
            if value is not result:
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

    class BaseConfigurator(object):
        """
        The configurator base class which defines some useful defaults.
        """

        CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')

        WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
        DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
        INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
        DIGIT_PATTERN = re.compile(r'^\d+$')

        value_converters = {
            'ext' : 'ext_convert',
            'cfg' : 'cfg_convert',
        }

        # We might want to use a different one, e.g. importlib
        importer = staticmethod(__import__)

        def __init__(self, config):
            self.config = ConvertingDict(config)
            self.config.configurator = self

        def resolve(self, s):
            """
            Resolve strings to objects using standard import and attribute
            syntax.
            """
            name = s.split('.')
            used = name.pop(0)
            try:
                found = self.importer(used)
                for frag in name:
                    used += '.' + frag
                    try:
                        found = getattr(found, frag)
                    except AttributeError:
                        self.importer(used)
                        found = getattr(found, frag)
                return found
            except ImportError:
                e, tb = sys.exc_info()[1:]
                v = ValueError('Cannot resolve %r: %s' % (s, e))
                v.__cause__, v.__traceback__ = e, tb
                raise v

        def ext_convert(self, value):
            """Default converter for the ext:// protocol."""
            return self.resolve(value)

        def cfg_convert(self, value):
            """Default converter for the cfg:// protocol."""
            rest = value
            m = self.WORD_PATTERN.match(rest)
            if m is None:
                raise ValueError("Unable to convert %r" % value)
            else:
                rest = rest[m.end():]
                d = self.config[m.groups()[0]]
                #print d, rest
                while rest:
                    m = self.DOT_PATTERN.match(rest)
                    if m:
                        d = d[m.groups()[0]]
                    else:
                        m = self.INDEX_PATTERN.match(rest)
                        if m:
                            idx = m.groups()[0]
                            if not self.DIGIT_PATTERN.match(idx):
                                d = d[idx]
                            else:
                                try:
                                    n = int(idx) # try as number first (most likely)
                                    d = d[n]
                                except TypeError:
                                    d = d[idx]
                    if m:
                        rest = rest[m.end():]
                    else:
                        raise ValueError('Unable to convert '
                                         '%r at %r' % (value, rest))
            #rest should be empty
            return d

        def convert(self, value):
            """
            Convert values to an appropriate type. dicts, lists and tuples are
            replaced by their converting alternatives. Strings are checked to
            see if they have a conversion format and are converted if they do.
            """
            if not isinstance(value, ConvertingDict) and isinstance(value, dict):
                value = ConvertingDict(value)
                value.configurator = self
            elif not isinstance(value, ConvertingList) and isinstance(value, list):
                value = ConvertingList(value)
                value.configurator = self
            elif not isinstance(value, ConvertingTuple) and\
                     isinstance(value, tuple):
                value = ConvertingTuple(value)
                value.configurator = self
            elif isinstance(value, string_types):
                m = self.CONVERT_PATTERN.match(value)
                if m:
                    d = m.groupdict()
                    prefix = d['prefix']
                    converter = self.value_converters.get(prefix, None)
                    if converter:
                        suffix = d['suffix']
                        converter = getattr(self, converter)
                        value = converter(suffix)
            return value

        def configure_custom(self, config):
            """Configure an object with a user-supplied factory."""
            c = config.pop('()')
            if not callable(c):
                c = self.resolve(c)
            props = config.pop('.', None)
            # Check for valid identifiers
            kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
            result = c(**kwargs)
            if props:
                for name, value in props.items():
                    setattr(result, name, value)
            return result

        def as_tuple(self, value):
            """Utility function which converts lists to tuples."""
            if isinstance(value, list):
                value = tuple(value)
            return value
_vendor/distlib/markers.py000064400000014212151733136310011653 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Parser for the environment markers micro-language defined in PEP 345."""

import ast
import os
import sys
import platform

from .compat import python_implementation, string_types
from .util import in_venv

__all__ = ['interpret']


class Evaluator(object):
    """
    A limited evaluator for Python expressions.
    """

    operators = {
        'eq': lambda x, y: x == y,
        'gt': lambda x, y: x > y,
        'gte': lambda x, y: x >= y,
        'in': lambda x, y: x in y,
        'lt': lambda x, y: x < y,
        'lte': lambda x, y: x <= y,
        'not': lambda x: not x,
        'noteq': lambda x, y: x != y,
        'notin': lambda x, y: x not in y,
    }

    allowed_values = {
        'sys_platform': sys.platform,
        'python_version': '%s.%s' % sys.version_info[:2],
        # parsing sys.platform is not reliable, but there is no other
        # way to get e.g. 2.7.2+, and the PEP is defined with sys.version
        'python_full_version': sys.version.split(' ', 1)[0],
        'os_name': os.name,
        'platform_in_venv': str(in_venv()),
        'platform_release': platform.release(),
        'platform_version': platform.version(),
        'platform_machine': platform.machine(),
        'platform_python_implementation': python_implementation(),
    }

    def __init__(self, context=None):
        """
        Initialise an instance.

        :param context: If specified, names are looked up in this mapping.
        """
        self.context = context or {}
        self.source = None

    def get_fragment(self, offset):
        """
        Get the part of the source which is causing a problem.
        """
        fragment_len = 10
        s = '%r' % (self.source[offset:offset + fragment_len])
        if offset + fragment_len < len(self.source):
            s += '...'
        return s

    def get_handler(self, node_type):
        """
        Get a handler for the specified AST node type.
        """
        return getattr(self, 'do_%s' % node_type, None)

    def evaluate(self, node, filename=None):
        """
        Evaluate a source string or node, using ``filename`` when
        displaying errors.
        """
        if isinstance(node, string_types):
            self.source = node
            kwargs = {'mode': 'eval'}
            if filename:
                kwargs['filename'] = filename
            try:
                node = ast.parse(node, **kwargs)
            except SyntaxError as e:
                s = self.get_fragment(e.offset)
                raise SyntaxError('syntax error %s' % s)
        node_type = node.__class__.__name__.lower()
        handler = self.get_handler(node_type)
        if handler is None:
            if self.source is None:
                s = '(source not available)'
            else:
                s = self.get_fragment(node.col_offset)
            raise SyntaxError("don't know how to evaluate %r %s" % (
                node_type, s))
        return handler(node)

    def get_attr_key(self, node):
        assert isinstance(node, ast.Attribute), 'attribute node expected'
        return '%s.%s' % (node.value.id, node.attr)

    def do_attribute(self, node):
        if not isinstance(node.value, ast.Name):
            valid = False
        else:
            key = self.get_attr_key(node)
            valid = key in self.context or key in self.allowed_values
        if not valid:
            raise SyntaxError('invalid expression: %s' % key)
        if key in self.context:
            result = self.context[key]
        else:
            result = self.allowed_values[key]
        return result

    def do_boolop(self, node):
        result = self.evaluate(node.values[0])
        is_or = node.op.__class__ is ast.Or
        is_and = node.op.__class__ is ast.And
        assert is_or or is_and
        if (is_and and result) or (is_or and not result):
            for n in node.values[1:]:
                result = self.evaluate(n)
                if (is_or and result) or (is_and and not result):
                    break
        return result

    def do_compare(self, node):
        def sanity_check(lhsnode, rhsnode):
            valid = True
            if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
                valid = False
            #elif (isinstance(lhsnode, ast.Attribute)
            #      and isinstance(rhsnode, ast.Attribute)):
            #    klhs = self.get_attr_key(lhsnode)
            #    krhs = self.get_attr_key(rhsnode)
            #    valid = klhs != krhs
            if not valid:
                s = self.get_fragment(node.col_offset)
                raise SyntaxError('Invalid comparison: %s' % s)

        lhsnode = node.left
        lhs = self.evaluate(lhsnode)
        result = True
        for op, rhsnode in zip(node.ops, node.comparators):
            sanity_check(lhsnode, rhsnode)
            op = op.__class__.__name__.lower()
            if op not in self.operators:
                raise SyntaxError('unsupported operation: %r' % op)
            rhs = self.evaluate(rhsnode)
            result = self.operators[op](lhs, rhs)
            if not result:
                break
            lhs = rhs
            lhsnode = rhsnode
        return result

    def do_expression(self, node):
        return self.evaluate(node.body)

    def do_name(self, node):
        valid = False
        if node.id in self.context:
            valid = True
            result = self.context[node.id]
        elif node.id in self.allowed_values:
            valid = True
            result = self.allowed_values[node.id]
        if not valid:
            raise SyntaxError('invalid expression: %s' % node.id)
        return result

    def do_str(self, node):
        return node.s


def interpret(marker, execution_context=None):
    """
    Interpret a marker and return a result depending on environment.

    :param marker: The marker to interpret.
    :type marker: str
    :param execution_context: The context used for name lookup.
    :type execution_context: mapping
    """
    return Evaluator(execution_context).evaluate(marker.strip())
_vendor/distlib/metadata.py000064400000113661151733136310011777 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Implementation of the Metadata for Python packages PEPs.

Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
"""
from __future__ import unicode_literals

import codecs
from email import message_from_file
import json
import logging
import re


from . import DistlibException, __version__
from .compat import StringIO, string_types, text_type
from .markers import interpret
from .util import extract_by_key, get_extras
from .version import get_scheme, PEP440_VERSION_RE

logger = logging.getLogger(__name__)


class MetadataMissingError(DistlibException):
    """A required metadata is missing"""


class MetadataConflictError(DistlibException):
    """Attempt to read or write metadata fields that are conflictual."""


class MetadataUnrecognizedVersionError(DistlibException):
    """Unknown metadata version number."""


class MetadataInvalidError(DistlibException):
    """A metadata value is invalid"""

# public API of this module
__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']

# Encoding used for the PKG-INFO files
PKG_INFO_ENCODING = 'utf-8'

# preferred version. Hopefully will be changed
# to 1.2 once PEP 345 is supported everywhere
PKG_INFO_PREFERRED_VERSION = '1.1'

_LINE_PREFIX_1_2 = re.compile('\n       \|')
_LINE_PREFIX_PRE_1_2 = re.compile('\n        ')
_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'License')

_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Supported-Platform', 'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'License', 'Classifier', 'Download-URL', 'Obsoletes',
               'Provides', 'Requires')

_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
                'Download-URL')

_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Supported-Platform', 'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'Maintainer', 'Maintainer-email', 'License',
               'Classifier', 'Download-URL', 'Obsoletes-Dist',
               'Project-URL', 'Provides-Dist', 'Requires-Dist',
               'Requires-Python', 'Requires-External')

_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
                'Obsoletes-Dist', 'Requires-External', 'Maintainer',
                'Maintainer-email', 'Project-URL')

_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Supported-Platform', 'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'Maintainer', 'Maintainer-email', 'License',
               'Classifier', 'Download-URL', 'Obsoletes-Dist',
               'Project-URL', 'Provides-Dist', 'Requires-Dist',
               'Requires-Python', 'Requires-External', 'Private-Version',
               'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
               'Provides-Extra')

_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
                'Setup-Requires-Dist', 'Extension')

_ALL_FIELDS = set()
_ALL_FIELDS.update(_241_FIELDS)
_ALL_FIELDS.update(_314_FIELDS)
_ALL_FIELDS.update(_345_FIELDS)
_ALL_FIELDS.update(_426_FIELDS)

EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')


def _version2fieldlist(version):
    if version == '1.0':
        return _241_FIELDS
    elif version == '1.1':
        return _314_FIELDS
    elif version == '1.2':
        return _345_FIELDS
    elif version == '2.0':
        return _426_FIELDS
    raise MetadataUnrecognizedVersionError(version)


def _best_version(fields):
    """Detect the best version depending on the fields used."""
    def _has_marker(keys, markers):
        for marker in markers:
            if marker in keys:
                return True
        return False

    keys = []
    for key, value in fields.items():
        if value in ([], 'UNKNOWN', None):
            continue
        keys.append(key)

    possible_versions = ['1.0', '1.1', '1.2', '2.0']

    # first let's try to see if a field is not part of one of the version
    for key in keys:
        if key not in _241_FIELDS and '1.0' in possible_versions:
            possible_versions.remove('1.0')
        if key not in _314_FIELDS and '1.1' in possible_versions:
            possible_versions.remove('1.1')
        if key not in _345_FIELDS and '1.2' in possible_versions:
            possible_versions.remove('1.2')
        if key not in _426_FIELDS and '2.0' in possible_versions:
            possible_versions.remove('2.0')

    # possible_version contains qualified versions
    if len(possible_versions) == 1:
        return possible_versions[0]   # found !
    elif len(possible_versions) == 0:
        raise MetadataConflictError('Unknown metadata set')

    # let's see if one unique marker is found
    is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
    is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
    is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
    if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1:
        raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields')

    # we have the choice, 1.0, or 1.2, or 2.0
    #   - 1.0 has a broken Summary field but works with all tools
    #   - 1.1 is to avoid
    #   - 1.2 fixes Summary but has little adoption
    #   - 2.0 adds more features and is very new
    if not is_1_1 and not is_1_2 and not is_2_0:
        # we couldn't find any specific marker
        if PKG_INFO_PREFERRED_VERSION in possible_versions:
            return PKG_INFO_PREFERRED_VERSION
    if is_1_1:
        return '1.1'
    if is_1_2:
        return '1.2'

    return '2.0'

_ATTR2FIELD = {
    'metadata_version': 'Metadata-Version',
    'name': 'Name',
    'version': 'Version',
    'platform': 'Platform',
    'supported_platform': 'Supported-Platform',
    'summary': 'Summary',
    'description': 'Description',
    'keywords': 'Keywords',
    'home_page': 'Home-page',
    'author': 'Author',
    'author_email': 'Author-email',
    'maintainer': 'Maintainer',
    'maintainer_email': 'Maintainer-email',
    'license': 'License',
    'classifier': 'Classifier',
    'download_url': 'Download-URL',
    'obsoletes_dist': 'Obsoletes-Dist',
    'provides_dist': 'Provides-Dist',
    'requires_dist': 'Requires-Dist',
    'setup_requires_dist': 'Setup-Requires-Dist',
    'requires_python': 'Requires-Python',
    'requires_external': 'Requires-External',
    'requires': 'Requires',
    'provides': 'Provides',
    'obsoletes': 'Obsoletes',
    'project_url': 'Project-URL',
    'private_version': 'Private-Version',
    'obsoleted_by': 'Obsoleted-By',
    'extension': 'Extension',
    'provides_extra': 'Provides-Extra',
}

_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
_VERSIONS_FIELDS = ('Requires-Python',)
_VERSION_FIELDS = ('Version',)
_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
               'Requires', 'Provides', 'Obsoletes-Dist',
               'Provides-Dist', 'Requires-Dist', 'Requires-External',
               'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
               'Provides-Extra', 'Extension')
_LISTTUPLEFIELDS = ('Project-URL',)

_ELEMENTSFIELD = ('Keywords',)

_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')

_MISSING = object()

_FILESAFE = re.compile('[^A-Za-z0-9.]+')


def _get_name_and_version(name, version, for_filename=False):
    """Return the distribution name with version.

    If for_filename is true, return a filename-escaped form."""
    if for_filename:
        # For both name and version any runs of non-alphanumeric or '.'
        # characters are replaced with a single '-'.  Additionally any
        # spaces in the version string become '.'
        name = _FILESAFE.sub('-', name)
        version = _FILESAFE.sub('-', version.replace(' ', '.'))
    return '%s-%s' % (name, version)


class LegacyMetadata(object):
    """The legacy metadata of a release.

    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
    instantiate the class with one of these arguments (or none):
    - *path*, the path to a metadata file
    - *fileobj* give a file-like object with metadata as content
    - *mapping* is a dict-like object
    - *scheme* is a version scheme name
    """
    # TODO document the mapping API and UNKNOWN default key

    def __init__(self, path=None, fileobj=None, mapping=None,
                 scheme='default'):
        if [path, fileobj, mapping].count(None) < 2:
            raise TypeError('path, fileobj and mapping are exclusive')
        self._fields = {}
        self.requires_files = []
        self._dependencies = None
        self.scheme = scheme
        if path is not None:
            self.read(path)
        elif fileobj is not None:
            self.read_file(fileobj)
        elif mapping is not None:
            self.update(mapping)
            self.set_metadata_version()

    def set_metadata_version(self):
        self._fields['Metadata-Version'] = _best_version(self._fields)

    def _write_field(self, fileobj, name, value):
        fileobj.write('%s: %s\n' % (name, value))

    def __getitem__(self, name):
        return self.get(name)

    def __setitem__(self, name, value):
        return self.set(name, value)

    def __delitem__(self, name):
        field_name = self._convert_name(name)
        try:
            del self._fields[field_name]
        except KeyError:
            raise KeyError(name)

    def __contains__(self, name):
        return (name in self._fields or
                self._convert_name(name) in self._fields)

    def _convert_name(self, name):
        if name in _ALL_FIELDS:
            return name
        name = name.replace('-', '_').lower()
        return _ATTR2FIELD.get(name, name)

    def _default_value(self, name):
        if name in _LISTFIELDS or name in _ELEMENTSFIELD:
            return []
        return 'UNKNOWN'

    def _remove_line_prefix(self, value):
        if self.metadata_version in ('1.0', '1.1'):
            return _LINE_PREFIX_PRE_1_2.sub('\n', value)
        else:
            return _LINE_PREFIX_1_2.sub('\n', value)

    def __getattr__(self, name):
        if name in _ATTR2FIELD:
            return self[name]
        raise AttributeError(name)

    #
    # Public API
    #

#    dependencies = property(_get_dependencies, _set_dependencies)

    def get_fullname(self, filesafe=False):
        """Return the distribution name with version.

        If filesafe is true, return a filename-escaped form."""
        return _get_name_and_version(self['Name'], self['Version'], filesafe)

    def is_field(self, name):
        """return True if name is a valid metadata key"""
        name = self._convert_name(name)
        return name in _ALL_FIELDS

    def is_multi_field(self, name):
        name = self._convert_name(name)
        return name in _LISTFIELDS

    def read(self, filepath):
        """Read the metadata values from a file path."""
        fp = codecs.open(filepath, 'r', encoding='utf-8')
        try:
            self.read_file(fp)
        finally:
            fp.close()

    def read_file(self, fileob):
        """Read the metadata values from a file object."""
        msg = message_from_file(fileob)
        self._fields['Metadata-Version'] = msg['metadata-version']

        # When reading, get all the fields we can
        for field in _ALL_FIELDS:
            if field not in msg:
                continue
            if field in _LISTFIELDS:
                # we can have multiple lines
                values = msg.get_all(field)
                if field in _LISTTUPLEFIELDS and values is not None:
                    values = [tuple(value.split(',')) for value in values]
                self.set(field, values)
            else:
                # single line
                value = msg[field]
                if value is not None and value != 'UNKNOWN':
                    self.set(field, value)
        self.set_metadata_version()

    def write(self, filepath, skip_unknown=False):
        """Write the metadata fields to filepath."""
        fp = codecs.open(filepath, 'w', encoding='utf-8')
        try:
            self.write_file(fp, skip_unknown)
        finally:
            fp.close()

    def write_file(self, fileobject, skip_unknown=False):
        """Write the PKG-INFO format data to a file object."""
        self.set_metadata_version()

        for field in _version2fieldlist(self['Metadata-Version']):
            values = self.get(field)
            if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
                continue
            if field in _ELEMENTSFIELD:
                self._write_field(fileobject, field, ','.join(values))
                continue
            if field not in _LISTFIELDS:
                if field == 'Description':
                    if self.metadata_version in ('1.0', '1.1'):
                        values = values.replace('\n', '\n        ')
                    else:
                        values = values.replace('\n', '\n       |')
                values = [values]

            if field in _LISTTUPLEFIELDS:
                values = [','.join(value) for value in values]

            for value in values:
                self._write_field(fileobject, field, value)

    def update(self, other=None, **kwargs):
        """Set metadata values from the given iterable `other` and kwargs.

        Behavior is like `dict.update`: If `other` has a ``keys`` method,
        they are looped over and ``self[key]`` is assigned ``other[key]``.
        Else, ``other`` is an iterable of ``(key, value)`` iterables.

        Keys that don't match a metadata field or that have an empty value are
        dropped.
        """
        def _set(key, value):
            if key in _ATTR2FIELD and value:
                self.set(self._convert_name(key), value)

        if not other:
            # other is None or empty container
            pass
        elif hasattr(other, 'keys'):
            for k in other.keys():
                _set(k, other[k])
        else:
            for k, v in other:
                _set(k, v)

        if kwargs:
            for k, v in kwargs.items():
                _set(k, v)

    def set(self, name, value):
        """Control then set a metadata field."""
        name = self._convert_name(name)

        if ((name in _ELEMENTSFIELD or name == 'Platform') and
            not isinstance(value, (list, tuple))):
            if isinstance(value, string_types):
                value = [v.strip() for v in value.split(',')]
            else:
                value = []
        elif (name in _LISTFIELDS and
              not isinstance(value, (list, tuple))):
            if isinstance(value, string_types):
                value = [value]
            else:
                value = []

        if logger.isEnabledFor(logging.WARNING):
            project_name = self['Name']

            scheme = get_scheme(self.scheme)
            if name in _PREDICATE_FIELDS and value is not None:
                for v in value:
                    # check that the values are valid
                    if not scheme.is_valid_matcher(v.split(';')[0]):
                        logger.warning(
                            "'%s': '%s' is not valid (field '%s')",
                            project_name, v, name)
            # FIXME this rejects UNKNOWN, is that right?
            elif name in _VERSIONS_FIELDS and value is not None:
                if not scheme.is_valid_constraint_list(value):
                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
                                   project_name, value, name)
            elif name in _VERSION_FIELDS and value is not None:
                if not scheme.is_valid_version(value):
                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
                                   project_name, value, name)

        if name in _UNICODEFIELDS:
            if name == 'Description':
                value = self._remove_line_prefix(value)

        self._fields[name] = value

    def get(self, name, default=_MISSING):
        """Get a metadata field."""
        name = self._convert_name(name)
        if name not in self._fields:
            if default is _MISSING:
                default = self._default_value(name)
            return default
        if name in _UNICODEFIELDS:
            value = self._fields[name]
            return value
        elif name in _LISTFIELDS:
            value = self._fields[name]
            if value is None:
                return []
            res = []
            for val in value:
                if name not in _LISTTUPLEFIELDS:
                    res.append(val)
                else:
                    # That's for Project-URL
                    res.append((val[0], val[1]))
            return res

        elif name in _ELEMENTSFIELD:
            value = self._fields[name]
            if isinstance(value, string_types):
                return value.split(',')
        return self._fields[name]

    def check(self, strict=False):
        """Check if the metadata is compliant. If strict is True then raise if
        no Name or Version are provided"""
        self.set_metadata_version()

        # XXX should check the versions (if the file was loaded)
        missing, warnings = [], []

        for attr in ('Name', 'Version'):  # required by PEP 345
            if attr not in self:
                missing.append(attr)

        if strict and missing != []:
            msg = 'missing required metadata: %s' % ', '.join(missing)
            raise MetadataMissingError(msg)

        for attr in ('Home-page', 'Author'):
            if attr not in self:
                missing.append(attr)

        # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
        if self['Metadata-Version'] != '1.2':
            return missing, warnings

        scheme = get_scheme(self.scheme)

        def are_valid_constraints(value):
            for v in value:
                if not scheme.is_valid_matcher(v.split(';')[0]):
                    return False
            return True

        for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
                                   (_VERSIONS_FIELDS,
                                    scheme.is_valid_constraint_list),
                                   (_VERSION_FIELDS,
                                    scheme.is_valid_version)):
            for field in fields:
                value = self.get(field, None)
                if value is not None and not controller(value):
                    warnings.append("Wrong value for '%s': %s" % (field, value))

        return missing, warnings

    def todict(self, skip_missing=False):
        """Return fields as a dict.

        Field names will be converted to use the underscore-lowercase style
        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
        """
        self.set_metadata_version()

        mapping_1_0 = (
            ('metadata_version', 'Metadata-Version'),
            ('name', 'Name'),
            ('version', 'Version'),
            ('summary', 'Summary'),
            ('home_page', 'Home-page'),
            ('author', 'Author'),
            ('author_email', 'Author-email'),
            ('license', 'License'),
            ('description', 'Description'),
            ('keywords', 'Keywords'),
            ('platform', 'Platform'),
            ('classifiers', 'Classifier'),
            ('download_url', 'Download-URL'),
        )

        data = {}
        for key, field_name in mapping_1_0:
            if not skip_missing or field_name in self._fields:
                data[key] = self[field_name]

        if self['Metadata-Version'] == '1.2':
            mapping_1_2 = (
                ('requires_dist', 'Requires-Dist'),
                ('requires_python', 'Requires-Python'),
                ('requires_external', 'Requires-External'),
                ('provides_dist', 'Provides-Dist'),
                ('obsoletes_dist', 'Obsoletes-Dist'),
                ('project_url', 'Project-URL'),
                ('maintainer', 'Maintainer'),
                ('maintainer_email', 'Maintainer-email'),
            )
            for key, field_name in mapping_1_2:
                if not skip_missing or field_name in self._fields:
                    if key != 'project_url':
                        data[key] = self[field_name]
                    else:
                        data[key] = [','.join(u) for u in self[field_name]]

        elif self['Metadata-Version'] == '1.1':
            mapping_1_1 = (
                ('provides', 'Provides'),
                ('requires', 'Requires'),
                ('obsoletes', 'Obsoletes'),
            )
            for key, field_name in mapping_1_1:
                if not skip_missing or field_name in self._fields:
                    data[key] = self[field_name]

        return data

    def add_requirements(self, requirements):
        if self['Metadata-Version'] == '1.1':
            # we can't have 1.1 metadata *and* Setuptools requires
            for field in ('Obsoletes', 'Requires', 'Provides'):
                if field in self:
                    del self[field]
        self['Requires-Dist'] += requirements

    # Mapping API
    # TODO could add iter* variants

    def keys(self):
        return list(_version2fieldlist(self['Metadata-Version']))

    def __iter__(self):
        for key in self.keys():
            yield key

    def values(self):
        return [self[key] for key in self.keys()]

    def items(self):
        return [(key, self[key]) for key in self.keys()]

    def __repr__(self):
        return '<%s %s %s>' % (self.__class__.__name__, self.name,
                               self.version)


METADATA_FILENAME = 'pydist.json'
WHEEL_METADATA_FILENAME = 'metadata.json'


class Metadata(object):
    """
    The metadata of a release. This implementation uses 2.0 (JSON)
    metadata where possible. If not possible, it wraps a LegacyMetadata
    instance which handles the key-value metadata format.
    """

    METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$')

    NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)

    VERSION_MATCHER = PEP440_VERSION_RE

    SUMMARY_MATCHER = re.compile('.{1,2047}')

    METADATA_VERSION = '2.0'

    GENERATOR = 'distlib (%s)' % __version__

    MANDATORY_KEYS = {
        'name': (),
        'version': (),
        'summary': ('legacy',),
    }

    INDEX_KEYS = ('name version license summary description author '
                  'author_email keywords platform home_page classifiers '
                  'download_url')

    DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
                       'dev_requires provides meta_requires obsoleted_by '
                       'supports_environments')

    SYNTAX_VALIDATORS = {
        'metadata_version': (METADATA_VERSION_MATCHER, ()),
        'name': (NAME_MATCHER, ('legacy',)),
        'version': (VERSION_MATCHER, ('legacy',)),
        'summary': (SUMMARY_MATCHER, ('legacy',)),
    }

    __slots__ = ('_legacy', '_data', 'scheme')

    def __init__(self, path=None, fileobj=None, mapping=None,
                 scheme='default'):
        if [path, fileobj, mapping].count(None) < 2:
            raise TypeError('path, fileobj and mapping are exclusive')
        self._legacy = None
        self._data = None
        self.scheme = scheme
        #import pdb; pdb.set_trace()
        if mapping is not None:
            try:
                self._validate_mapping(mapping, scheme)
                self._data = mapping
            except MetadataUnrecognizedVersionError:
                self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
                self.validate()
        else:
            data = None
            if path:
                with open(path, 'rb') as f:
                    data = f.read()
            elif fileobj:
                data = fileobj.read()
            if data is None:
                # Initialised with no args - to be added
                self._data = {
                    'metadata_version': self.METADATA_VERSION,
                    'generator': self.GENERATOR,
                }
            else:
                if not isinstance(data, text_type):
                    data = data.decode('utf-8')
                try:
                    self._data = json.loads(data)
                    self._validate_mapping(self._data, scheme)
                except ValueError:
                    # Note: MetadataUnrecognizedVersionError does not
                    # inherit from ValueError (it's a DistlibException,
                    # which should not inherit from ValueError).
                    # The ValueError comes from the json.load - if that
                    # succeeds and we get a validation error, we want
                    # that to propagate
                    self._legacy = LegacyMetadata(fileobj=StringIO(data),
                                                  scheme=scheme)
                    self.validate()

    common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))

    none_list = (None, list)
    none_dict = (None, dict)

    mapped_keys = {
        'run_requires': ('Requires-Dist', list),
        'build_requires': ('Setup-Requires-Dist', list),
        'dev_requires': none_list,
        'test_requires': none_list,
        'meta_requires': none_list,
        'extras': ('Provides-Extra', list),
        'modules': none_list,
        'namespaces': none_list,
        'exports': none_dict,
        'commands': none_dict,
        'classifiers': ('Classifier', list),
        'source_url': ('Download-URL', None),
        'metadata_version': ('Metadata-Version', None),
    }

    del none_list, none_dict

    def __getattribute__(self, key):
        common = object.__getattribute__(self, 'common_keys')
        mapped = object.__getattribute__(self, 'mapped_keys')
        if key in mapped:
            lk, maker = mapped[key]
            if self._legacy:
                if lk is None:
                    result = None if maker is None else maker()
                else:
                    result = self._legacy.get(lk)
            else:
                value = None if maker is None else maker()
                if key not in ('commands', 'exports', 'modules', 'namespaces',
                               'classifiers'):
                    result = self._data.get(key, value)
                else:
                    # special cases for PEP 459
                    sentinel = object()
                    result = sentinel
                    d = self._data.get('extensions')
                    if d:
                        if key == 'commands':
                            result = d.get('python.commands', value)
                        elif key == 'classifiers':
                            d = d.get('python.details')
                            if d:
                                result = d.get(key, value)
                        else:
                            d = d.get('python.exports')
                            if not d:
                                d = self._data.get('python.exports')
                            if d:
                                result = d.get(key, value)
                    if result is sentinel:
                        result = value
        elif key not in common:
            result = object.__getattribute__(self, key)
        elif self._legacy:
            result = self._legacy.get(key)
        else:
            result = self._data.get(key)
        return result

    def _validate_value(self, key, value, scheme=None):
        if key in self.SYNTAX_VALIDATORS:
            pattern, exclusions = self.SYNTAX_VALIDATORS[key]
            if (scheme or self.scheme) not in exclusions:
                m = pattern.match(value)
                if not m:
                    raise MetadataInvalidError("'%s' is an invalid value for "
                                               "the '%s' property" % (value,
                                                                    key))

    def __setattr__(self, key, value):
        self._validate_value(key, value)
        common = object.__getattribute__(self, 'common_keys')
        mapped = object.__getattribute__(self, 'mapped_keys')
        if key in mapped:
            lk, _ = mapped[key]
            if self._legacy:
                if lk is None:
                    raise NotImplementedError
                self._legacy[lk] = value
            elif key not in ('commands', 'exports', 'modules', 'namespaces',
                             'classifiers'):
                self._data[key] = value
            else:
                # special cases for PEP 459
                d = self._data.setdefault('extensions', {})
                if key == 'commands':
                    d['python.commands'] = value
                elif key == 'classifiers':
                    d = d.setdefault('python.details', {})
                    d[key] = value
                else:
                    d = d.setdefault('python.exports', {})
                    d[key] = value
        elif key not in common:
            object.__setattr__(self, key, value)
        else:
            if key == 'keywords':
                if isinstance(value, string_types):
                    value = value.strip()
                    if value:
                        value = value.split()
                    else:
                        value = []
            if self._legacy:
                self._legacy[key] = value
            else:
                self._data[key] = value

    @property
    def name_and_version(self):
        return _get_name_and_version(self.name, self.version, True)

    @property
    def provides(self):
        if self._legacy:
            result = self._legacy['Provides-Dist']
        else:
            result = self._data.setdefault('provides', [])
        s = '%s (%s)' % (self.name, self.version)
        if s not in result:
            result.append(s)
        return result

    @provides.setter
    def provides(self, value):
        if self._legacy:
            self._legacy['Provides-Dist'] = value
        else:
            self._data['provides'] = value

    def get_requirements(self, reqts, extras=None, env=None):
        """
        Base method to get dependencies, given a set of extras
        to satisfy and an optional environment context.
        :param reqts: A list of sometimes-wanted dependencies,
                      perhaps dependent on extras and environment.
        :param extras: A list of optional components being requested.
        :param env: An optional environment for marker evaluation.
        """
        if self._legacy:
            result = reqts
        else:
            result = []
            extras = get_extras(extras or [], self.extras)
            for d in reqts:
                if 'extra' not in d and 'environment' not in d:
                    # unconditional
                    include = True
                else:
                    if 'extra' not in d:
                        # Not extra-dependent - only environment-dependent
                        include = True
                    else:
                        include = d.get('extra') in extras
                    if include:
                        # Not excluded because of extras, check environment
                        marker = d.get('environment')
                        if marker:
                            include = interpret(marker, env)
                if include:
                    result.extend(d['requires'])
            for key in ('build', 'dev', 'test'):
                e = ':%s:' % key
                if e in extras:
                    extras.remove(e)
                    # A recursive call, but it should terminate since 'test'
                    # has been removed from the extras
                    reqts = self._data.get('%s_requires' % key, [])
                    result.extend(self.get_requirements(reqts, extras=extras,
                                                        env=env))
        return result

    @property
    def dictionary(self):
        if self._legacy:
            return self._from_legacy()
        return self._data

    @property
    def dependencies(self):
        if self._legacy:
            raise NotImplementedError
        else:
            return extract_by_key(self._data, self.DEPENDENCY_KEYS)

    @dependencies.setter
    def dependencies(self, value):
        if self._legacy:
            raise NotImplementedError
        else:
            self._data.update(value)

    def _validate_mapping(self, mapping, scheme):
        if mapping.get('metadata_version') != self.METADATA_VERSION:
            raise MetadataUnrecognizedVersionError()
        missing = []
        for key, exclusions in self.MANDATORY_KEYS.items():
            if key not in mapping:
                if scheme not in exclusions:
                    missing.append(key)
        if missing:
            msg = 'Missing metadata items: %s' % ', '.join(missing)
            raise MetadataMissingError(msg)
        for k, v in mapping.items():
            self._validate_value(k, v, scheme)

    def validate(self):
        if self._legacy:
            missing, warnings = self._legacy.check(True)
            if missing or warnings:
                logger.warning('Metadata: missing: %s, warnings: %s',
                               missing, warnings)
        else:
            self._validate_mapping(self._data, self.scheme)

    def todict(self):
        if self._legacy:
            return self._legacy.todict(True)
        else:
            result = extract_by_key(self._data, self.INDEX_KEYS)
            return result

    def _from_legacy(self):
        assert self._legacy and not self._data
        result = {
            'metadata_version': self.METADATA_VERSION,
            'generator': self.GENERATOR,
        }
        lmd = self._legacy.todict(True)     # skip missing ones
        for k in ('name', 'version', 'license', 'summary', 'description',
                  'classifier'):
            if k in lmd:
                if k == 'classifier':
                    nk = 'classifiers'
                else:
                    nk = k
                result[nk] = lmd[k]
        kw = lmd.get('Keywords', [])
        if kw == ['']:
            kw = []
        result['keywords'] = kw
        keys = (('requires_dist', 'run_requires'),
                ('setup_requires_dist', 'build_requires'))
        for ok, nk in keys:
            if ok in lmd and lmd[ok]:
                result[nk] = [{'requires': lmd[ok]}]
        result['provides'] = self.provides
        author = {}
        maintainer = {}
        return result

    LEGACY_MAPPING = {
        'name': 'Name',
        'version': 'Version',
        'license': 'License',
        'summary': 'Summary',
        'description': 'Description',
        'classifiers': 'Classifier',
    }

    def _to_legacy(self):
        def process_entries(entries):
            reqts = set()
            for e in entries:
                extra = e.get('extra')
                env = e.get('environment')
                rlist = e['requires']
                for r in rlist:
                    if not env and not extra:
                        reqts.add(r)
                    else:
                        marker = ''
                        if extra:
                            marker = 'extra == "%s"' % extra
                        if env:
                            if marker:
                                marker = '(%s) and %s' % (env, marker)
                            else:
                                marker = env
                        reqts.add(';'.join((r, marker)))
            return reqts

        assert self._data and not self._legacy
        result = LegacyMetadata()
        nmd = self._data
        for nk, ok in self.LEGACY_MAPPING.items():
            if nk in nmd:
                result[ok] = nmd[nk]
        r1 = process_entries(self.run_requires + self.meta_requires)
        r2 = process_entries(self.build_requires + self.dev_requires)
        if self.extras:
            result['Provides-Extra'] = sorted(self.extras)
        result['Requires-Dist'] = sorted(r1)
        result['Setup-Requires-Dist'] = sorted(r2)
        # TODO: other fields such as contacts
        return result

    def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
        if [path, fileobj].count(None) != 1:
            raise ValueError('Exactly one of path and fileobj is needed')
        self.validate()
        if legacy:
            if self._legacy:
                legacy_md = self._legacy
            else:
                legacy_md = self._to_legacy()
            if path:
                legacy_md.write(path, skip_unknown=skip_unknown)
            else:
                legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
        else:
            if self._legacy:
                d = self._from_legacy()
            else:
                d = self._data
            if fileobj:
                json.dump(d, fileobj, ensure_ascii=True, indent=2,
                          sort_keys=True)
            else:
                with codecs.open(path, 'w', 'utf-8') as f:
                    json.dump(d, f, ensure_ascii=True, indent=2,
                              sort_keys=True)

    def add_requirements(self, requirements):
        if self._legacy:
            self._legacy.add_requirements(requirements)
        else:
            run_requires = self._data.setdefault('run_requires', [])
            always = None
            for entry in run_requires:
                if 'environment' not in entry and 'extra' not in entry:
                    always = entry
                    break
            if always is None:
                always = { 'requires': requirements }
                run_requires.insert(0, always)
            else:
                rset = set(always['requires']) | set(requirements)
                always['requires'] = sorted(rset)

    def __repr__(self):
        name = self.name or '(no name)'
        version = self.version or 'no version'
        return '<%s %s %s (%s)>' % (self.__class__.__name__,
                                    self.metadata_version, name, version)
_vendor/distlib/index.py000064400000051135151733136310011323 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import hashlib
import logging
import os
import shutil
import subprocess
import tempfile
try:
    from threading import Thread
except ImportError:
    from dummy_threading import Thread

from . import DistlibException
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
                     urlparse, build_opener, string_types)
from .util import cached_property, zip_dir, ServerProxy

logger = logging.getLogger(__name__)

DEFAULT_INDEX = 'https://pypi.python.org/pypi'
DEFAULT_REALM = 'pypi'

class PackageIndex(object):
    """
    This class represents a package index compatible with PyPI, the Python
    Package Index.
    """

    boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'

    def __init__(self, url=None):
        """
        Initialise an instance.

        :param url: The URL of the index. If not specified, the URL for PyPI is
                    used.
        """
        self.url = url or DEFAULT_INDEX
        self.read_configuration()
        scheme, netloc, path, params, query, frag = urlparse(self.url)
        if params or query or frag or scheme not in ('http', 'https'):
            raise DistlibException('invalid repository: %s' % self.url)
        self.password_handler = None
        self.ssl_verifier = None
        self.gpg = None
        self.gpg_home = None
        self.rpc_proxy = None
        with open(os.devnull, 'w') as sink:
            # Use gpg by default rather than gpg2, as gpg2 insists on
            # prompting for passwords
            for s in ('gpg', 'gpg2'):
                try:
                    rc = subprocess.check_call([s, '--version'], stdout=sink,
                                               stderr=sink)
                    if rc == 0:
                        self.gpg = s
                        break
                except OSError:
                    pass

    def _get_pypirc_command(self):
        """
        Get the distutils command for interacting with PyPI configurations.
        :return: the command.
        """
        from distutils.core import Distribution
        from distutils.config import PyPIRCCommand
        d = Distribution()
        return PyPIRCCommand(d)

    def read_configuration(self):
        """
        Read the PyPI access configuration as supported by distutils, getting
        PyPI to do the actual work. This populates ``username``, ``password``,
        ``realm`` and ``url`` attributes from the configuration.
        """
        # get distutils to do the work
        c = self._get_pypirc_command()
        c.repository = self.url
        cfg = c._read_pypirc()
        self.username = cfg.get('username')
        self.password = cfg.get('password')
        self.realm = cfg.get('realm', 'pypi')
        self.url = cfg.get('repository', self.url)

    def save_configuration(self):
        """
        Save the PyPI access configuration. You must have set ``username`` and
        ``password`` attributes before calling this method.

        Again, distutils is used to do the actual work.
        """
        self.check_credentials()
        # get distutils to do the work
        c = self._get_pypirc_command()
        c._store_pypirc(self.username, self.password)

    def check_credentials(self):
        """
        Check that ``username`` and ``password`` have been set, and raise an
        exception if not.
        """
        if self.username is None or self.password is None:
            raise DistlibException('username and password must be set')
        pm = HTTPPasswordMgr()
        _, netloc, _, _, _, _ = urlparse(self.url)
        pm.add_password(self.realm, netloc, self.username, self.password)
        self.password_handler = HTTPBasicAuthHandler(pm)

    def register(self, metadata):
        """
        Register a distribution on PyPI, using the provided metadata.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the distribution to be
                         registered.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        """
        self.check_credentials()
        metadata.validate()
        d = metadata.todict()
        d[':action'] = 'verify'
        request = self.encode_request(d.items(), [])
        response = self.send_request(request)
        d[':action'] = 'submit'
        request = self.encode_request(d.items(), [])
        return self.send_request(request)

    def _reader(self, name, stream, outbuf):
        """
        Thread runner for reading lines of from a subprocess into a buffer.

        :param name: The logical name of the stream (used for logging only).
        :param stream: The stream to read from. This will typically a pipe
                       connected to the output stream of a subprocess.
        :param outbuf: The list to append the read lines to.
        """
        while True:
            s = stream.readline()
            if not s:
                break
            s = s.decode('utf-8').rstrip()
            outbuf.append(s)
            logger.debug('%s: %s' % (name, s))
        stream.close()

    def get_sign_command(self, filename, signer, sign_password,
                         keystore=None):
        """
        Return a suitable command for signing a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The signing command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        """
        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
        if keystore is None:
            keystore = self.gpg_home
        if keystore:
            cmd.extend(['--homedir', keystore])
        if sign_password is not None:
            cmd.extend(['--batch', '--passphrase-fd', '0'])
        td = tempfile.mkdtemp()
        sf = os.path.join(td, os.path.basename(filename) + '.asc')
        cmd.extend(['--detach-sign', '--armor', '--local-user',
                    signer, '--output', sf, filename])
        logger.debug('invoking: %s', ' '.join(cmd))
        return cmd, sf

    def run_command(self, cmd, input_data=None):
        """
        Run a command in a child process , passing it any input data specified.

        :param cmd: The command to run.
        :param input_data: If specified, this must be a byte string containing
                           data to be sent to the child process.
        :return: A tuple consisting of the subprocess' exit code, a list of
                 lines read from the subprocess' ``stdout``, and a list of
                 lines read from the subprocess' ``stderr``.
        """
        kwargs = {
            'stdout': subprocess.PIPE,
            'stderr': subprocess.PIPE,
        }
        if input_data is not None:
            kwargs['stdin'] = subprocess.PIPE
        stdout = []
        stderr = []
        p = subprocess.Popen(cmd, **kwargs)
        # We don't use communicate() here because we may need to
        # get clever with interacting with the command
        t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
        t1.start()
        t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
        t2.start()
        if input_data is not None:
            p.stdin.write(input_data)
            p.stdin.close()

        p.wait()
        t1.join()
        t2.join()
        return p.returncode, stdout, stderr

    def sign_file(self, filename, signer, sign_password, keystore=None):
        """
        Sign a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The absolute pathname of the file where the signature is
                 stored.
        """
        cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
                                              keystore)
        rc, stdout, stderr = self.run_command(cmd,
                                              sign_password.encode('utf-8'))
        if rc != 0:
            raise DistlibException('sign command failed with error '
                                   'code %s' % rc)
        return sig_file

    def upload_file(self, metadata, filename, signer=None, sign_password=None,
                    filetype='sdist', pyversion='source', keystore=None):
        """
        Upload a release file to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the file to be uploaded.
        :param filename: The pathname of the file to be uploaded.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param filetype: The type of the file being uploaded. This is the
                        distutils command which produced that file, e.g.
                        ``sdist`` or ``bdist_wheel``.
        :param pyversion: The version of Python which the release relates
                          to. For code compatible with any Python, this would
                          be ``source``, otherwise it would be e.g. ``3.2``.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        """
        self.check_credentials()
        if not os.path.exists(filename):
            raise DistlibException('not found: %s' % filename)
        metadata.validate()
        d = metadata.todict()
        sig_file = None
        if signer:
            if not self.gpg:
                logger.warning('no signing program available - not signed')
            else:
                sig_file = self.sign_file(filename, signer, sign_password,
                                          keystore)
        with open(filename, 'rb') as f:
            file_data = f.read()
        md5_digest = hashlib.md5(file_data).hexdigest()
        sha256_digest = hashlib.sha256(file_data).hexdigest()
        d.update({
            ':action': 'file_upload',
            'protocol_version': '1',
            'filetype': filetype,
            'pyversion': pyversion,
            'md5_digest': md5_digest,
            'sha256_digest': sha256_digest,
        })
        files = [('content', os.path.basename(filename), file_data)]
        if sig_file:
            with open(sig_file, 'rb') as f:
                sig_data = f.read()
            files.append(('gpg_signature', os.path.basename(sig_file),
                         sig_data))
            shutil.rmtree(os.path.dirname(sig_file))
        request = self.encode_request(d.items(), files)
        return self.send_request(request)

    def upload_documentation(self, metadata, doc_dir):
        """
        Upload documentation to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the documentation to be
                         uploaded.
        :param doc_dir: The pathname of the directory which contains the
                        documentation. This should be the directory that
                        contains the ``index.html`` for the documentation.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        """
        self.check_credentials()
        if not os.path.isdir(doc_dir):
            raise DistlibException('not a directory: %r' % doc_dir)
        fn = os.path.join(doc_dir, 'index.html')
        if not os.path.exists(fn):
            raise DistlibException('not found: %r' % fn)
        metadata.validate()
        name, version = metadata.name, metadata.version
        zip_data = zip_dir(doc_dir).getvalue()
        fields = [(':action', 'doc_upload'),
                  ('name', name), ('version', version)]
        files = [('content', name, zip_data)]
        request = self.encode_request(fields, files)
        return self.send_request(request)

    def get_verify_command(self, signature_filename, data_filename,
                           keystore=None):
        """
        Return a suitable command for verifying a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The verifying command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        """
        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
        if keystore is None:
            keystore = self.gpg_home
        if keystore:
            cmd.extend(['--homedir', keystore])
        cmd.extend(['--verify', signature_filename, data_filename])
        logger.debug('invoking: %s', ' '.join(cmd))
        return cmd

    def verify_signature(self, signature_filename, data_filename,
                         keystore=None):
        """
        Verify a signature for a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: True if the signature was verified, else False.
        """
        if not self.gpg:
            raise DistlibException('verification unavailable because gpg '
                                   'unavailable')
        cmd = self.get_verify_command(signature_filename, data_filename,
                                      keystore)
        rc, stdout, stderr = self.run_command(cmd)
        if rc not in (0, 1):
            raise DistlibException('verify command failed with error '
                             'code %s' % rc)
        return rc == 0

    def download_file(self, url, destfile, digest=None, reporthook=None):
        """
        This is a convenience method for downloading a file from an URL.
        Normally, this will be a file from the index, though currently
        no check is made for this (i.e. a file can be downloaded from
        anywhere).

        The method is just like the :func:`urlretrieve` function in the
        standard library, except that it allows digest computation to be
        done during download and checking that the downloaded data
        matched any expected value.

        :param url: The URL of the file to be downloaded (assumed to be
                    available via an HTTP GET request).
        :param destfile: The pathname where the downloaded file is to be
                         saved.
        :param digest: If specified, this must be a (hasher, value)
                       tuple, where hasher is the algorithm used (e.g.
                       ``'md5'``) and ``value`` is the expected value.
        :param reporthook: The same as for :func:`urlretrieve` in the
                           standard library.
        """
        if digest is None:
            digester = None
            logger.debug('No digest specified')
        else:
            if isinstance(digest, (list, tuple)):
                hasher, digest = digest
            else:
                hasher = 'md5'
            digester = getattr(hashlib, hasher)()
            logger.debug('Digest specified: %s' % digest)
        # The following code is equivalent to urlretrieve.
        # We need to do it this way so that we can compute the
        # digest of the file as we go.
        with open(destfile, 'wb') as dfp:
            # addinfourl is not a context manager on 2.x
            # so we have to use try/finally
            sfp = self.send_request(Request(url))
            try:
                headers = sfp.info()
                blocksize = 8192
                size = -1
                read = 0
                blocknum = 0
                if "content-length" in headers:
                    size = int(headers["Content-Length"])
                if reporthook:
                    reporthook(blocknum, blocksize, size)
                while True:
                    block = sfp.read(blocksize)
                    if not block:
                        break
                    read += len(block)
                    dfp.write(block)
                    if digester:
                        digester.update(block)
                    blocknum += 1
                    if reporthook:
                        reporthook(blocknum, blocksize, size)
            finally:
                sfp.close()

        # check that we got the whole file, if we can
        if size >= 0 and read < size:
            raise DistlibException(
                'retrieval incomplete: got only %d out of %d bytes'
                % (read, size))
        # if we have a digest, it must match.
        if digester:
            actual = digester.hexdigest()
            if digest != actual:
                raise DistlibException('%s digest mismatch for %s: expected '
                                       '%s, got %s' % (hasher, destfile,
                                                       digest, actual))
            logger.debug('Digest verified: %s', digest)

    def send_request(self, req):
        """
        Send a standard library :class:`Request` to PyPI and return its
        response.

        :param req: The request to send.
        :return: The HTTP response from PyPI (a standard library HTTPResponse).
        """
        handlers = []
        if self.password_handler:
            handlers.append(self.password_handler)
        if self.ssl_verifier:
            handlers.append(self.ssl_verifier)
        opener = build_opener(*handlers)
        return opener.open(req)

    def encode_request(self, fields, files):
        """
        Encode fields and files for posting to an HTTP server.

        :param fields: The fields to send as a list of (fieldname, value)
                       tuples.
        :param files: The files to send as a list of (fieldname, filename,
                      file_bytes) tuple.
        """
        # Adapted from packaging, which in turn was adapted from
        # http://code.activestate.com/recipes/146306

        parts = []
        boundary = self.boundary
        for k, values in fields:
            if not isinstance(values, (list, tuple)):
                values = [values]

            for v in values:
                parts.extend((
                    b'--' + boundary,
                    ('Content-Disposition: form-data; name="%s"' %
                     k).encode('utf-8'),
                    b'',
                    v.encode('utf-8')))
        for key, filename, value in files:
            parts.extend((
                b'--' + boundary,
                ('Content-Disposition: form-data; name="%s"; filename="%s"' %
                 (key, filename)).encode('utf-8'),
                b'',
                value))

        parts.extend((b'--' + boundary + b'--', b''))

        body = b'\r\n'.join(parts)
        ct = b'multipart/form-data; boundary=' + boundary
        headers = {
            'Content-type': ct,
            'Content-length': str(len(body))
        }
        return Request(self.url, body, headers)

    def search(self, terms, operator=None):
        if isinstance(terms, string_types):
            terms = {'name': terms}
        if self.rpc_proxy is None:
            self.rpc_proxy = ServerProxy(self.url, timeout=3.0)
        return self.rpc_proxy.search(terms, operator or 'and')
_vendor/distlib/util.py000064400000150551151733136310011173 0ustar00#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import codecs
from collections import deque
import contextlib
import csv
from glob import iglob as std_iglob
import io
import json
import logging
import os
import py_compile
import re
import shutil
import socket
try:
    import ssl
except ImportError:  # pragma: no cover
    ssl = None
import subprocess
import sys
import tarfile
import tempfile
import textwrap

try:
    import threading
except ImportError:  # pragma: no cover
    import dummy_threading as threading
import time

from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
                     cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
                     splittype, HTTPHandler, BaseConfigurator, valid_ident,
                     Container, configparser, URLError, ZipFile, fsdecode,
                     unquote)

logger = logging.getLogger(__name__)

#
# Requirement parsing code for name + optional constraints + optional extras
#
# e.g. 'foo >= 1.2, < 2.0 [bar, baz]'
#
# The regex can seem a bit hairy, so we build it up out of smaller pieces
# which are manageable.
#

COMMA = r'\s*,\s*'
COMMA_RE = re.compile(COMMA)

IDENT = r'(\w|[.-])+'
EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'
VERSPEC = IDENT + r'\*?'

RELOP = '([<>=!~]=)|[<>]'

#
# The first relop is optional - if absent, will be taken as '~='
#
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +
                    RELOP + r')\s*(' + VERSPEC + '))*')

DIRECT_REF = '(from\s+(?P<diref>.*))'

#
# Either the bare constraints or the bare constraints in parentheses
#
CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + '|' + DIRECT_REF +
               r')\s*\)|(?P<c2>' + BARE_CONSTRAINTS + '\s*)')

EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'
EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]'
REQUIREMENT = ('(?P<dn>'  + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
               CONSTRAINTS + ')?$')
REQUIREMENT_RE = re.compile(REQUIREMENT)

#
# Used to scan through the constraints
#
RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + VERSPEC + ')'
RELOP_IDENT_RE = re.compile(RELOP_IDENT)

def parse_requirement(s):

    def get_constraint(m):
        d = m.groupdict()
        return d['op'], d['vn']

    result = None
    m = REQUIREMENT_RE.match(s)
    if m:
        d = m.groupdict()
        name = d['dn']
        cons = d['c1'] or d['c2']
        if not d['diref']:
            url = None
        else:
            # direct reference
            cons = None
            url = d['diref'].strip()
        if not cons:
            cons = None
            constr = ''
            rs = d['dn']
        else:
            if cons[0] not in '<>!=':
                cons = '~=' + cons
            iterator = RELOP_IDENT_RE.finditer(cons)
            cons = [get_constraint(m) for m in iterator]
            rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
        if not d['ex']:
            extras = None
        else:
            extras = COMMA_RE.split(d['ex'])
        result = Container(name=name, constraints=cons, extras=extras,
                           requirement=rs, source=s, url=url)
    return result


def get_resources_dests(resources_root, rules):
    """Find destinations for resources files"""

    def get_rel_path(base, path):
        # normalizes and returns a lstripped-/-separated path
        base = base.replace(os.path.sep, '/')
        path = path.replace(os.path.sep, '/')
        assert path.startswith(base)
        return path[len(base):].lstrip('/')


    destinations = {}
    for base, suffix, dest in rules:
        prefix = os.path.join(resources_root, base)
        for abs_base in iglob(prefix):
            abs_glob = os.path.join(abs_base, suffix)
            for abs_path in iglob(abs_glob):
                resource_file = get_rel_path(resources_root, abs_path)
                if dest is None:  # remove the entry if it was here
                    destinations.pop(resource_file, None)
                else:
                    rel_path = get_rel_path(abs_base, abs_path)
                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
                    destinations[resource_file] = rel_dest + '/' + rel_path
    return destinations


def in_venv():
    if hasattr(sys, 'real_prefix'):
        # virtualenv venvs
        result = True
    else:
        # PEP 405 venvs
        result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
    return result


def get_executable():
# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
# changes to the stub launcher mean that sys.executable always points
# to the stub on macOS
#    if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
#                                     in os.environ):
#        result =  os.environ['__PYVENV_LAUNCHER__']
#    else:
#        result = sys.executable
#    return result
    result = os.path.normcase(sys.executable)
    if not isinstance(result, text_type):
        result = fsdecode(result)
    return result


def proceed(prompt, allowed_chars, error_prompt=None, default=None):
    p = prompt
    while True:
        s = raw_input(p)
        p = prompt
        if not s and default:
            s = default
        if s:
            c = s[0].lower()
            if c in allowed_chars:
                break
            if error_prompt:
                p = '%c: %s\n%s' % (c, error_prompt, prompt)
    return c


def extract_by_key(d, keys):
    if isinstance(keys, string_types):
        keys = keys.split()
    result = {}
    for key in keys:
        if key in d:
            result[key] = d[key]
    return result

def read_exports(stream):
    if sys.version_info[0] >= 3:
        # needs to be a text stream
        stream = codecs.getreader('utf-8')(stream)
    # Try to load as JSON, falling back on legacy format
    data = stream.read()
    stream = StringIO(data)
    try:
        jdata = json.load(stream)
        result = jdata['extensions']['python.exports']['exports']
        for group, entries in result.items():
            for k, v in entries.items():
                s = '%s = %s' % (k, v)
                entry = get_export_entry(s)
                assert entry is not None
                entries[k] = entry
        return result
    except Exception:
        stream.seek(0, 0)

    def read_stream(cp, stream):
        if hasattr(cp, 'read_file'):
            cp.read_file(stream)
        else:
            cp.readfp(stream)

    cp = configparser.ConfigParser()
    try:
        read_stream(cp, stream)
    except configparser.MissingSectionHeaderError:
        stream.close()
        data = textwrap.dedent(data)
        stream = StringIO(data)
        read_stream(cp, stream)

    result = {}
    for key in cp.sections():
        result[key] = entries = {}
        for name, value in cp.items(key):
            s = '%s = %s' % (name, value)
            entry = get_export_entry(s)
            assert entry is not None
            #entry.dist = self
            entries[name] = entry
    return result


def write_exports(exports, stream):
    if sys.version_info[0] >= 3:
        # needs to be a text stream
        stream = codecs.getwriter('utf-8')(stream)
    cp = configparser.ConfigParser()
    for k, v in exports.items():
        # TODO check k, v for valid values
        cp.add_section(k)
        for entry in v.values():
            if entry.suffix is None:
                s = entry.prefix
            else:
                s = '%s:%s' % (entry.prefix, entry.suffix)
            if entry.flags:
                s = '%s [%s]' % (s, ', '.join(entry.flags))
            cp.set(k, entry.name, s)
    cp.write(stream)


@contextlib.contextmanager
def tempdir():
    td = tempfile.mkdtemp()
    try:
        yield td
    finally:
        shutil.rmtree(td)

@contextlib.contextmanager
def chdir(d):
    cwd = os.getcwd()
    try:
        os.chdir(d)
        yield
    finally:
        os.chdir(cwd)


@contextlib.contextmanager
def socket_timeout(seconds=15):
    cto = socket.getdefaulttimeout()
    try:
        socket.setdefaulttimeout(seconds)
        yield
    finally:
        socket.setdefaulttimeout(cto)


class cached_property(object):
    def __init__(self, func):
        self.func = func
        #for attr in ('__name__', '__module__', '__doc__'):
        #    setattr(self, attr, getattr(func, attr, None))

    def __get__(self, obj, cls=None):
        if obj is None:
            return self
        value = self.func(obj)
        object.__setattr__(obj, self.func.__name__, value)
        #obj.__dict__[self.func.__name__] = value = self.func(obj)
        return value

def convert_path(pathname):
    """Return 'pathname' as a name that will work on the native filesystem.

    The path is split on '/' and put back together again using the current
    directory separator.  Needed because filenames in the setup script are
    always supplied in Unix style, and have to be converted to the local
    convention before we can actually use them in the filesystem.  Raises
    ValueError on non-Unix-ish systems if 'pathname' either starts or
    ends with a slash.
    """
    if os.sep == '/':
        return pathname
    if not pathname:
        return pathname
    if pathname[0] == '/':
        raise ValueError("path '%s' cannot be absolute" % pathname)
    if pathname[-1] == '/':
        raise ValueError("path '%s' cannot end with '/'" % pathname)

    paths = pathname.split('/')
    while os.curdir in paths:
        paths.remove(os.curdir)
    if not paths:
        return os.curdir
    return os.path.join(*paths)


class FileOperator(object):
    def __init__(self, dry_run=False):
        self.dry_run = dry_run
        self.ensured = set()
        self._init_record()

    def _init_record(self):
        self.record = False
        self.files_written = set()
        self.dirs_created = set()

    def record_as_written(self, path):
        if self.record:
            self.files_written.add(path)

    def newer(self, source, target):
        """Tell if the target is newer than the source.

        Returns true if 'source' exists and is more recently modified than
        'target', or if 'source' exists and 'target' doesn't.

        Returns false if both exist and 'target' is the same age or younger
        than 'source'. Raise PackagingFileError if 'source' does not exist.

        Note that this test is not very accurate: files created in the same
        second will have the same "age".
        """
        if not os.path.exists(source):
            raise DistlibException("file '%r' does not exist" %
                                   os.path.abspath(source))
        if not os.path.exists(target):
            return True

        return os.stat(source).st_mtime > os.stat(target).st_mtime

    def copy_file(self, infile, outfile, check=True):
        """Copy a file respecting dry-run and force flags.
        """
        self.ensure_dir(os.path.dirname(outfile))
        logger.info('Copying %s to %s', infile, outfile)
        if not self.dry_run:
            msg = None
            if check:
                if os.path.islink(outfile):
                    msg = '%s is a symlink' % outfile
                elif os.path.exists(outfile) and not os.path.isfile(outfile):
                    msg = '%s is a non-regular file' % outfile
            if msg:
                raise ValueError(msg + ' which would be overwritten')
            shutil.copyfile(infile, outfile)
        self.record_as_written(outfile)

    def copy_stream(self, instream, outfile, encoding=None):
        assert not os.path.isdir(outfile)
        self.ensure_dir(os.path.dirname(outfile))
        logger.info('Copying stream %s to %s', instream, outfile)
        if not self.dry_run:
            if encoding is None:
                outstream = open(outfile, 'wb')
            else:
                outstream = codecs.open(outfile, 'w', encoding=encoding)
            try:
                shutil.copyfileobj(instream, outstream)
            finally:
                outstream.close()
        self.record_as_written(outfile)

    def write_binary_file(self, path, data):
        self.ensure_dir(os.path.dirname(path))
        if not self.dry_run:
            with open(path, 'wb') as f:
                f.write(data)
        self.record_as_written(path)

    def write_text_file(self, path, data, encoding):
        self.ensure_dir(os.path.dirname(path))
        if not self.dry_run:
            with open(path, 'wb') as f:
                f.write(data.encode(encoding))
        self.record_as_written(path)

    def set_mode(self, bits, mask, files):
        if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
            # Set the executable bits (owner, group, and world) on
            # all the files specified.
            for f in files:
                if self.dry_run:
                    logger.info("changing mode of %s", f)
                else:
                    mode = (os.stat(f).st_mode | bits) & mask
                    logger.info("changing mode of %s to %o", f, mode)
                    os.chmod(f, mode)

    set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)

    def ensure_dir(self, path):
        path = os.path.abspath(path)
        if path not in self.ensured and not os.path.exists(path):
            self.ensured.add(path)
            d, f = os.path.split(path)
            self.ensure_dir(d)
            logger.info('Creating %s' % path)
            if not self.dry_run:
                os.mkdir(path)
            if self.record:
                self.dirs_created.add(path)

    def byte_compile(self, path, optimize=False, force=False, prefix=None):
        dpath = cache_from_source(path, not optimize)
        logger.info('Byte-compiling %s to %s', path, dpath)
        if not self.dry_run:
            if force or self.newer(path, dpath):
                if not prefix:
                    diagpath = None
                else:
                    assert path.startswith(prefix)
                    diagpath = path[len(prefix):]
            py_compile.compile(path, dpath, diagpath, True)     # raise error
        self.record_as_written(dpath)
        return dpath

    def ensure_removed(self, path):
        if os.path.exists(path):
            if os.path.isdir(path) and not os.path.islink(path):
                logger.debug('Removing directory tree at %s', path)
                if not self.dry_run:
                    shutil.rmtree(path)
                if self.record:
                    if path in self.dirs_created:
                        self.dirs_created.remove(path)
            else:
                if os.path.islink(path):
                    s = 'link'
                else:
                    s = 'file'
                logger.debug('Removing %s %s', s, path)
                if not self.dry_run:
                    os.remove(path)
                if self.record:
                    if path in self.files_written:
                        self.files_written.remove(path)

    def is_writable(self, path):
        result = False
        while not result:
            if os.path.exists(path):
                result = os.access(path, os.W_OK)
                break
            parent = os.path.dirname(path)
            if parent == path:
                break
            path = parent
        return result

    def commit(self):
        """
        Commit recorded changes, turn off recording, return
        changes.
        """
        assert self.record
        result = self.files_written, self.dirs_created
        self._init_record()
        return result

    def rollback(self):
        if not self.dry_run:
            for f in list(self.files_written):
                if os.path.exists(f):
                    os.remove(f)
            # dirs should all be empty now, except perhaps for
            # __pycache__ subdirs
            # reverse so that subdirs appear before their parents
            dirs = sorted(self.dirs_created, reverse=True)
            for d in dirs:
                flist = os.listdir(d)
                if flist:
                    assert flist == ['__pycache__']
                    sd = os.path.join(d, flist[0])
                    os.rmdir(sd)
                os.rmdir(d)     # should fail if non-empty
        self._init_record()

def resolve(module_name, dotted_path):
    if module_name in sys.modules:
        mod = sys.modules[module_name]
    else:
        mod = __import__(module_name)
    if dotted_path is None:
        result = mod
    else:
        parts = dotted_path.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
    return result


class ExportEntry(object):
    def __init__(self, name, prefix, suffix, flags):
        self.name = name
        self.prefix = prefix
        self.suffix = suffix
        self.flags = flags

    @cached_property
    def value(self):
        return resolve(self.prefix, self.suffix)

    def __repr__(self):  # pragma: no cover
        return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
                                                self.suffix, self.flags)

    def __eq__(self, other):
        if not isinstance(other, ExportEntry):
            result = False
        else:
            result = (self.name == other.name and
                      self.prefix == other.prefix and
                      self.suffix == other.suffix and
                      self.flags == other.flags)
        return result

    __hash__ = object.__hash__


ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+)
                      \s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
                      \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
                      ''', re.VERBOSE)

def get_export_entry(specification):
    m = ENTRY_RE.search(specification)
    if not m:
        result = None
        if '[' in specification or ']' in specification:
            raise DistlibException("Invalid specification "
                                   "'%s'" % specification)
    else:
        d = m.groupdict()
        name = d['name']
        path = d['callable']
        colons = path.count(':')
        if colons == 0:
            prefix, suffix = path, None
        else:
            if colons != 1:
                raise DistlibException("Invalid specification "
                                       "'%s'" % specification)
            prefix, suffix = path.split(':')
        flags = d['flags']
        if flags is None:
            if '[' in specification or ']' in specification:
                raise DistlibException("Invalid specification "
                                       "'%s'" % specification)
            flags = []
        else:
            flags = [f.strip() for f in flags.split(',')]
        result = ExportEntry(name, prefix, suffix, flags)
    return result


def get_cache_base(suffix=None):
    """
    Return the default base location for distlib caches. If the directory does
    not exist, it is created. Use the suffix provided for the base directory,
    and default to '.distlib' if it isn't provided.

    On Windows, if LOCALAPPDATA is defined in the environment, then it is
    assumed to be a directory, and will be the parent directory of the result.
    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
    directory - using os.expanduser('~') - will be the parent directory of
    the result.

    The result is just the directory '.distlib' in the parent directory as
    determined above, or with the name specified with ``suffix``.
    """
    if suffix is None:
        suffix = '.distlib'
    if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
        result = os.path.expandvars('$localappdata')
    else:
        # Assume posix, or old Windows
        result = os.path.expanduser('~')
    # we use 'isdir' instead of 'exists', because we want to
    # fail if there's a file with that name
    if os.path.isdir(result):
        usable = os.access(result, os.W_OK)
        if not usable:
            logger.warning('Directory exists but is not writable: %s', result)
    else:
        try:
            os.makedirs(result)
            usable = True
        except OSError:
            logger.warning('Unable to create %s', result, exc_info=True)
            usable = False
    if not usable:
        result = tempfile.mkdtemp()
        logger.warning('Default location unusable, using %s', result)
    return os.path.join(result, suffix)


def path_to_cache_dir(path):
    """
    Convert an absolute path to a directory name for use in a cache.

    The algorithm used is:

    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
    #. ``'.cache'`` is appended.
    """
    d, p = os.path.splitdrive(os.path.abspath(path))
    if d:
        d = d.replace(':', '---')
    p = p.replace(os.sep, '--')
    return d + p + '.cache'


def ensure_slash(s):
    if not s.endswith('/'):
        return s + '/'
    return s


def parse_credentials(netloc):
    username = password = None
    if '@' in netloc:
        prefix, netloc = netloc.split('@', 1)
        if ':' not in prefix:
            username = prefix
        else:
            username, password = prefix.split(':', 1)
    return username, password, netloc


def get_process_umask():
    result = os.umask(0o22)
    os.umask(result)
    return result

def is_string_sequence(seq):
    result = True
    i = None
    for i, s in enumerate(seq):
        if not isinstance(s, string_types):
            result = False
            break
    assert i is not None
    return result

PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
                                      '([a-z0-9_.+-]+)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')


def split_filename(filename, project_name=None):
    """
    Extract name, version, python version from a filename (no extension)

    Return name, version, pyver or None
    """
    result = None
    pyver = None
    filename = unquote(filename).replace(' ', '-')
    m = PYTHON_VERSION.search(filename)
    if m:
        pyver = m.group(1)
        filename = filename[:m.start()]
    if project_name and len(filename) > len(project_name) + 1:
        m = re.match(re.escape(project_name) + r'\b', filename)
        if m:
            n = m.end()
            result = filename[:n], filename[n + 1:], pyver
    if result is None:
        m = PROJECT_NAME_AND_VERSION.match(filename)
        if m:
            result = m.group(1), m.group(3), pyver
    return result

# Allow spaces in name because of legacy dists like "Twisted Core"
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
                             r'\(\s*(?P<ver>[^\s)]+)\)$')

def parse_name_and_version(p):
    """
    A utility method used to get name and version from a string.

    From e.g. a Provides-Dist value.

    :param p: A value in a form 'foo (1.0)'
    :return: The name and version as a tuple.
    """
    m = NAME_VERSION_RE.match(p)
    if not m:
        raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
    d = m.groupdict()
    return d['name'].strip().lower(), d['ver']

def get_extras(requested, available):
    result = set()
    requested = set(requested or [])
    available = set(available or [])
    if '*' in requested:
        requested.remove('*')
        result |= available
    for r in requested:
        if r == '-':
            result.add(r)
        elif r.startswith('-'):
            unwanted = r[1:]
            if unwanted not in available:
                logger.warning('undeclared extra: %s' % unwanted)
            if unwanted in result:
                result.remove(unwanted)
        else:
            if r not in available:
                logger.warning('undeclared extra: %s' % r)
            result.add(r)
    return result
#
# Extended metadata functionality
#

def _get_external_data(url):
    result = {}
    try:
        # urlopen might fail if it runs into redirections,
        # because of Python issue #13696. Fixed in locators
        # using a custom redirect handler.
        resp = urlopen(url)
        headers = resp.info()
        ct = headers.get('Content-Type')
        if not ct.startswith('application/json'):
            logger.debug('Unexpected response for JSON request: %s', ct)
        else:
            reader = codecs.getreader('utf-8')(resp)
            #data = reader.read().decode('utf-8')
            #result = json.loads(data)
            result = json.load(reader)
    except Exception as e:
        logger.exception('Failed to get external data for %s: %s', url, e)
    return result

_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'

def get_project_data(name):
    url = '%s/%s/project.json' % (name[0].upper(), name)
    url = urljoin(_external_data_base_url, url)
    result = _get_external_data(url)
    return result

def get_package_data(name, version):
    url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
    url = urljoin(_external_data_base_url, url)
    return _get_external_data(url)


class Cache(object):
    """
    A class implementing a cache for resources that need to live in the file system
    e.g. shared libraries. This class was moved from resources to here because it
    could be used by other modules, e.g. the wheel module.
    """

    def __init__(self, base):
        """
        Initialise an instance.

        :param base: The base directory where the cache should be located.
        """
        # we use 'isdir' instead of 'exists', because we want to
        # fail if there's a file with that name
        if not os.path.isdir(base):  # pragma: no cover
            os.makedirs(base)
        if (os.stat(base).st_mode & 0o77) != 0:
            logger.warning('Directory \'%s\' is not private', base)
        self.base = os.path.abspath(os.path.normpath(base))

    def prefix_to_dir(self, prefix):
        """
        Converts a resource prefix to a directory name in the cache.
        """
        return path_to_cache_dir(prefix)

    def clear(self):
        """
        Clear the cache.
        """
        not_removed = []
        for fn in os.listdir(self.base):
            fn = os.path.join(self.base, fn)
            try:
                if os.path.islink(fn) or os.path.isfile(fn):
                    os.remove(fn)
                elif os.path.isdir(fn):
                    shutil.rmtree(fn)
            except Exception:
                not_removed.append(fn)
        return not_removed


class EventMixin(object):
    """
    A very simple publish/subscribe system.
    """
    def __init__(self):
        self._subscribers = {}

    def add(self, event, subscriber, append=True):
        """
        Add a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be added (and called when the
                           event is published).
        :param append: Whether to append or prepend the subscriber to an
                       existing subscriber list for the event.
        """
        subs = self._subscribers
        if event not in subs:
            subs[event] = deque([subscriber])
        else:
            sq = subs[event]
            if append:
                sq.append(subscriber)
            else:
                sq.appendleft(subscriber)

    def remove(self, event, subscriber):
        """
        Remove a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be removed.
        """
        subs = self._subscribers
        if event not in subs:
            raise ValueError('No subscribers: %r' % event)
        subs[event].remove(subscriber)

    def get_subscribers(self, event):
        """
        Return an iterator for the subscribers for an event.
        :param event: The event to return subscribers for.
        """
        return iter(self._subscribers.get(event, ()))

    def publish(self, event, *args, **kwargs):
        """
        Publish a event and return a list of values returned by its
        subscribers.

        :param event: The event to publish.
        :param args: The positional arguments to pass to the event's
                     subscribers.
        :param kwargs: The keyword arguments to pass to the event's
                       subscribers.
        """
        result = []
        for subscriber in self.get_subscribers(event):
            try:
                value = subscriber(event, *args, **kwargs)
            except Exception:
                logger.exception('Exception during event publication')
                value = None
            result.append(value)
        logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
                     event, args, kwargs, result)
        return result

#
# Simple sequencing
#
class Sequencer(object):
    def __init__(self):
        self._preds = {}
        self._succs = {}
        self._nodes = set()     # nodes with no preds/succs

    def add_node(self, node):
        self._nodes.add(node)

    def remove_node(self, node, edges=False):
        if node in self._nodes:
            self._nodes.remove(node)
        if edges:
            for p in set(self._preds.get(node, ())):
                self.remove(p, node)
            for s in set(self._succs.get(node, ())):
                self.remove(node, s)
            # Remove empties
            for k, v in list(self._preds.items()):
                if not v:
                    del self._preds[k]
            for k, v in list(self._succs.items()):
                if not v:
                    del self._succs[k]

    def add(self, pred, succ):
        assert pred != succ
        self._preds.setdefault(succ, set()).add(pred)
        self._succs.setdefault(pred, set()).add(succ)

    def remove(self, pred, succ):
        assert pred != succ
        try:
            preds = self._preds[succ]
            succs = self._succs[pred]
        except KeyError:  # pragma: no cover
            raise ValueError('%r not a successor of anything' % succ)
        try:
            preds.remove(pred)
            succs.remove(succ)
        except KeyError:  # pragma: no cover
            raise ValueError('%r not a successor of %r' % (succ, pred))

    def is_step(self, step):
        return (step in self._preds or step in self._succs or
                step in self._nodes)

    def get_steps(self, final):
        if not self.is_step(final):
            raise ValueError('Unknown: %r' % final)
        result = []
        todo = []
        seen = set()
        todo.append(final)
        while todo:
            step = todo.pop(0)
            if step in seen:
                # if a step was already seen,
                # move it to the end (so it will appear earlier
                # when reversed on return) ... but not for the
                # final step, as that would be confusing for
                # users
                if step != final:
                    result.remove(step)
                    result.append(step)
            else:
                seen.add(step)
                result.append(step)
                preds = self._preds.get(step, ())
                todo.extend(preds)
        return reversed(result)

    @property
    def strong_connections(self):
        #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
        index_counter = [0]
        stack = []
        lowlinks = {}
        index = {}
        result = []

        graph = self._succs

        def strongconnect(node):
            # set the depth index for this node to the smallest unused index
            index[node] = index_counter[0]
            lowlinks[node] = index_counter[0]
            index_counter[0] += 1
            stack.append(node)

            # Consider successors
            try:
                successors = graph[node]
            except Exception:
                successors = []
            for successor in successors:
                if successor not in lowlinks:
                    # Successor has not yet been visited
                    strongconnect(successor)
                    lowlinks[node] = min(lowlinks[node],lowlinks[successor])
                elif successor in stack:
                    # the successor is in the stack and hence in the current
                    # strongly connected component (SCC)
                    lowlinks[node] = min(lowlinks[node],index[successor])

            # If `node` is a root node, pop the stack and generate an SCC
            if lowlinks[node] == index[node]:
                connected_component = []

                while True:
                    successor = stack.pop()
                    connected_component.append(successor)
                    if successor == node: break
                component = tuple(connected_component)
                # storing the result
                result.append(component)

        for node in graph:
            if node not in lowlinks:
                strongconnect(node)

        return result

    @property
    def dot(self):
        result = ['digraph G {']
        for succ in self._preds:
            preds = self._preds[succ]
            for pred in preds:
                result.append('  %s -> %s;' % (pred, succ))
        for node in self._nodes:
            result.append('  %s;' % node)
        result.append('}')
        return '\n'.join(result)

#
# Unarchiving functionality for zip, tar, tgz, tbz, whl
#

ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
                      '.tgz', '.tbz', '.whl')

def unarchive(archive_filename, dest_dir, format=None, check=True):

    def check_path(path):
        if not isinstance(path, text_type):
            path = path.decode('utf-8')
        p = os.path.abspath(os.path.join(dest_dir, path))
        if not p.startswith(dest_dir) or p[plen] != os.sep:
            raise ValueError('path outside destination: %r' % p)

    dest_dir = os.path.abspath(dest_dir)
    plen = len(dest_dir)
    archive = None
    if format is None:
        if archive_filename.endswith(('.zip', '.whl')):
            format = 'zip'
        elif archive_filename.endswith(('.tar.gz', '.tgz')):
            format = 'tgz'
            mode = 'r:gz'
        elif archive_filename.endswith(('.tar.bz2', '.tbz')):
            format = 'tbz'
            mode = 'r:bz2'
        elif archive_filename.endswith('.tar'):
            format = 'tar'
            mode = 'r'
        else:  # pragma: no cover
            raise ValueError('Unknown format for %r' % archive_filename)
    try:
        if format == 'zip':
            archive = ZipFile(archive_filename, 'r')
            if check:
                names = archive.namelist()
                for name in names:
                    check_path(name)
        else:
            archive = tarfile.open(archive_filename, mode)
            if check:
                names = archive.getnames()
                for name in names:
                    check_path(name)
        if format != 'zip' and sys.version_info[0] < 3:
            # See Python issue 17153. If the dest path contains Unicode,
            # tarfile extraction fails on Python 2.x if a member path name
            # contains non-ASCII characters - it leads to an implicit
            # bytes -> unicode conversion using ASCII to decode.
            for tarinfo in archive.getmembers():
                if not isinstance(tarinfo.name, text_type):
                    tarinfo.name = tarinfo.name.decode('utf-8')

        # Limit extraction of dangerous items, if this Python
        # allows it easily. If not, just trust the input.
        # See: https://docs.python.org/3/library/tarfile.html#extraction-filters
        def extraction_filter(member, path):
            """Run tarfile.tar_fillter, but raise the expected ValueError"""
            # This is only called if the current Python has tarfile filters
            try:
                return tarfile.tar_filter(member, path)
            except tarfile.FilterError as exc:
                raise ValueError(str(exc))
        archive.extraction_filter = extraction_filter

        archive.extractall(dest_dir)

    finally:
        if archive:
            archive.close()


def zip_dir(directory):
    """zip a directory tree into a BytesIO object"""
    result = io.BytesIO()
    dlen = len(directory)
    with ZipFile(result, "w") as zf:
        for root, dirs, files in os.walk(directory):
            for name in files:
                full = os.path.join(root, name)
                rel = root[dlen:]
                dest = os.path.join(rel, name)
                zf.write(full, dest)
    return result

#
# Simple progress bar
#

UNITS = ('', 'K', 'M', 'G','T','P')


class Progress(object):
    unknown = 'UNKNOWN'

    def __init__(self, minval=0, maxval=100):
        assert maxval is None or maxval >= minval
        self.min = self.cur = minval
        self.max = maxval
        self.started = None
        self.elapsed = 0
        self.done = False

    def update(self, curval):
        assert self.min <= curval
        assert self.max is None or curval <= self.max
        self.cur = curval
        now = time.time()
        if self.started is None:
            self.started = now
        else:
            self.elapsed = now - self.started

    def increment(self, incr):
        assert incr >= 0
        self.update(self.cur + incr)

    def start(self):
        self.update(self.min)
        return self

    def stop(self):
        if self.max is not None:
            self.update(self.max)
        self.done = True

    @property
    def maximum(self):
        return self.unknown if self.max is None else self.max

    @property
    def percentage(self):
        if self.done:
            result = '100 %'
        elif self.max is None:
            result = ' ?? %'
        else:
            v = 100.0 * (self.cur - self.min) / (self.max - self.min)
            result = '%3d %%' % v
        return result

    def format_duration(self, duration):
        if (duration <= 0) and self.max is None or self.cur == self.min:
            result = '??:??:??'
        #elif duration < 1:
        #    result = '--:--:--'
        else:
            result = time.strftime('%H:%M:%S', time.gmtime(duration))
        return result

    @property
    def ETA(self):
        if self.done:
            prefix = 'Done'
            t = self.elapsed
            #import pdb; pdb.set_trace()
        else:
            prefix = 'ETA '
            if self.max is None:
                t = -1
            elif self.elapsed == 0 or (self.cur == self.min):
                t = 0
            else:
                #import pdb; pdb.set_trace()
                t = float(self.max - self.min)
                t /= self.cur - self.min
                t = (t - 1) * self.elapsed
        return '%s: %s' % (prefix, self.format_duration(t))

    @property
    def speed(self):
        if self.elapsed == 0:
            result = 0.0
        else:
            result = (self.cur - self.min) / self.elapsed
        for unit in UNITS:
            if result < 1000:
                break
            result /= 1000.0
        return '%d %sB/s' % (result, unit)

#
# Glob functionality
#

RICH_GLOB = re.compile(r'\{([^}]*)\}')
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')


def iglob(path_glob):
    """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
    if _CHECK_RECURSIVE_GLOB.search(path_glob):
        msg = """invalid glob %r: recursive glob "**" must be used alone"""
        raise ValueError(msg % path_glob)
    if _CHECK_MISMATCH_SET.search(path_glob):
        msg = """invalid glob %r: mismatching set marker '{' or '}'"""
        raise ValueError(msg % path_glob)
    return _iglob(path_glob)


def _iglob(path_glob):
    rich_path_glob = RICH_GLOB.split(path_glob, 1)
    if len(rich_path_glob) > 1:
        assert len(rich_path_glob) == 3, rich_path_glob
        prefix, set, suffix = rich_path_glob
        for item in set.split(','):
            for path in _iglob(''.join((prefix, item, suffix))):
                yield path
    else:
        if '**' not in path_glob:
            for item in std_iglob(path_glob):
                yield item
        else:
            prefix, radical = path_glob.split('**', 1)
            if prefix == '':
                prefix = '.'
            if radical == '':
                radical = '*'
            else:
                # we support both
                radical = radical.lstrip('/')
                radical = radical.lstrip('\\')
            for path, dir, files in os.walk(prefix):
                path = os.path.normpath(path)
                for fn in _iglob(os.path.join(path, radical)):
                    yield fn

if ssl:
    from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
                         CertificateError)


#
# HTTPSConnection which verifies certificates/matches domains
#

    class HTTPSConnection(httplib.HTTPSConnection):
        ca_certs = None # set this to the path to the certs file (.pem)
        check_domain = True # only used if ca_certs is not None

        # noinspection PyPropertyAccess
        def connect(self):
            sock = socket.create_connection((self.host, self.port), self.timeout)
            if getattr(self, '_tunnel_host', False):
                self.sock = sock
                self._tunnel()

            if not hasattr(ssl, 'SSLContext'):
                # For 2.x
                if self.ca_certs:
                    cert_reqs = ssl.CERT_REQUIRED
                else:
                    cert_reqs = ssl.CERT_NONE
                self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
                                            cert_reqs=cert_reqs,
                                            ssl_version=ssl.PROTOCOL_SSLv23,
                                            ca_certs=self.ca_certs)
            else:  # pragma: no cover
                context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
                context.options |= ssl.OP_NO_SSLv2
                if self.cert_file:
                    context.load_cert_chain(self.cert_file, self.key_file)
                kwargs = {}
                if self.ca_certs:
                    context.verify_mode = ssl.CERT_REQUIRED
                    context.load_verify_locations(cafile=self.ca_certs)
                    if getattr(ssl, 'HAS_SNI', False):
                        kwargs['server_hostname'] = self.host
                self.sock = context.wrap_socket(sock, **kwargs)
            if self.ca_certs and self.check_domain:
                try:
                    match_hostname(self.sock.getpeercert(), self.host)
                    logger.debug('Host verified: %s', self.host)
                except CertificateError:  # pragma: no cover
                    self.sock.shutdown(socket.SHUT_RDWR)
                    self.sock.close()
                    raise

    class HTTPSHandler(BaseHTTPSHandler):
        def __init__(self, ca_certs, check_domain=True):
            BaseHTTPSHandler.__init__(self)
            self.ca_certs = ca_certs
            self.check_domain = check_domain

        def _conn_maker(self, *args, **kwargs):
            """
            This is called to create a connection instance. Normally you'd
            pass a connection class to do_open, but it doesn't actually check for
            a class, and just expects a callable. As long as we behave just as a
            constructor would have, we should be OK. If it ever changes so that
            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
            which just sets check_domain to False in the class definition, and
            choose which one to pass to do_open.
            """
            result = HTTPSConnection(*args, **kwargs)
            if self.ca_certs:
                result.ca_certs = self.ca_certs
                result.check_domain = self.check_domain
            return result

        def https_open(self, req):
            try:
                return self.do_open(self._conn_maker, req)
            except URLError as e:
                if 'certificate verify failed' in str(e.reason):
                    raise CertificateError('Unable to verify server certificate '
                                           'for %s' % req.host)
                else:
                    raise

    #
    # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
    # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
    # HTML containing a http://xyz link when it should be https://xyz),
    # you can use the following handler class, which does not allow HTTP traffic.
    #
    # It works by inheriting from HTTPHandler - so build_opener won't add a
    # handler for HTTP itself.
    #
    class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
        def http_open(self, req):
            raise URLError('Unexpected HTTP request on what should be a secure '
                           'connection: %s' % req)

#
# XML-RPC with timeouts
#

_ver_info = sys.version_info[:2]

if _ver_info == (2, 6):
    class HTTP(httplib.HTTP):
        def __init__(self, host='', port=None, **kwargs):
            if port == 0:   # 0 means use port 0, not the default port
                port = None
            self._setup(self._connection_class(host, port, **kwargs))


    if ssl:
        class HTTPS(httplib.HTTPS):
            def __init__(self, host='', port=None, **kwargs):
                if port == 0:   # 0 means use port 0, not the default port
                    port = None
                self._setup(self._connection_class(host, port, **kwargs))


class Transport(xmlrpclib.Transport):
    def __init__(self, timeout, use_datetime=0):
        self.timeout = timeout
        xmlrpclib.Transport.__init__(self, use_datetime)

    def make_connection(self, host):
        h, eh, x509 = self.get_host_info(host)
        if _ver_info == (2, 6):
            result = HTTP(h, timeout=self.timeout)
        else:
            if not self._connection or host != self._connection[0]:
                self._extra_headers = eh
                self._connection = host, httplib.HTTPConnection(h)
            result = self._connection[1]
        return result

if ssl:
    class SafeTransport(xmlrpclib.SafeTransport):
        def __init__(self, timeout, use_datetime=0):
            self.timeout = timeout
            xmlrpclib.SafeTransport.__init__(self, use_datetime)

        def make_connection(self, host):
            h, eh, kwargs = self.get_host_info(host)
            if not kwargs:
                kwargs = {}
            kwargs['timeout'] = self.timeout
            if _ver_info == (2, 6):
                result = HTTPS(host, None, **kwargs)
            else:
                if not self._connection or host != self._connection[0]:
                    self._extra_headers = eh
                    self._connection = host, httplib.HTTPSConnection(h, None,
                                                                     **kwargs)
                result = self._connection[1]
            return result


class ServerProxy(xmlrpclib.ServerProxy):
    def __init__(self, uri, **kwargs):
        self.timeout = timeout = kwargs.pop('timeout', None)
        # The above classes only come into play if a timeout
        # is specified
        if timeout is not None:
            scheme, _ = splittype(uri)
            use_datetime = kwargs.get('use_datetime', 0)
            if scheme == 'https':
                tcls = SafeTransport
            else:
                tcls = Transport
            kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
            self.transport = t
        xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)

#
# CSV functionality. This is provided because on 2.x, the csv module can't
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
#

def _csv_open(fn, mode, **kwargs):
    if sys.version_info[0] < 3:
        mode += 'b'
    else:
        kwargs['newline'] = ''
    return open(fn, mode, **kwargs)


class CSVBase(object):
    defaults = {
        'delimiter': str(','),      # The strs are used because we need native
        'quotechar': str('"'),      # str in the csv API (2.x won't take
        'lineterminator': str('\n') # Unicode)
    }

    def __enter__(self):
        return self

    def __exit__(self, *exc_info):
        self.stream.close()


class CSVReader(CSVBase):
    def __init__(self, **kwargs):
        if 'stream' in kwargs:
            stream = kwargs['stream']
            if sys.version_info[0] >= 3:
                # needs to be a text stream
                stream = codecs.getreader('utf-8')(stream)
            self.stream = stream
        else:
            self.stream = _csv_open(kwargs['path'], 'r')
        self.reader = csv.reader(self.stream, **self.defaults)

    def __iter__(self):
        return self

    def next(self):
        result = next(self.reader)
        if sys.version_info[0] < 3:
            for i, item in enumerate(result):
                if not isinstance(item, text_type):
                    result[i] = item.decode('utf-8')
        return result

    __next__ = next

class CSVWriter(CSVBase):
    def __init__(self, fn, **kwargs):
        self.stream = _csv_open(fn, 'w')
        self.writer = csv.writer(self.stream, **self.defaults)

    def writerow(self, row):
        if sys.version_info[0] < 3:
            r = []
            for item in row:
                if isinstance(item, text_type):
                    item = item.encode('utf-8')
                r.append(item)
            row = r
        self.writer.writerow(row)

#
#   Configurator functionality
#

class Configurator(BaseConfigurator):

    value_converters = dict(BaseConfigurator.value_converters)
    value_converters['inc'] = 'inc_convert'

    def __init__(self, config, base=None):
        super(Configurator, self).__init__(config)
        self.base = base or os.getcwd()

    def configure_custom(self, config):
        def convert(o):
            if isinstance(o, (list, tuple)):
                result = type(o)([convert(i) for i in o])
            elif isinstance(o, dict):
                if '()' in o:
                    result = self.configure_custom(o)
                else:
                    result = {}
                    for k in o:
                        result[k] = convert(o[k])
            else:
                result = self.convert(o)
            return result

        c = config.pop('()')
        if not callable(c):
            c = self.resolve(c)
        props = config.pop('.', None)
        # Check for valid identifiers
        args = config.pop('[]', ())
        if args:
            args = tuple([convert(o) for o in args])
        items = [(k, convert(config[k])) for k in config if valid_ident(k)]
        kwargs = dict(items)
        result = c(*args, **kwargs)
        if props:
            for n, v in props.items():
                setattr(result, n, convert(v))
        return result

    def __getitem__(self, key):
        result = self.config[key]
        if isinstance(result, dict) and '()' in result:
            self.config[key] = result = self.configure_custom(result)
        return result

    def inc_convert(self, value):
        """Default converter for the inc:// protocol."""
        if not os.path.isabs(value):
            value = os.path.join(self.base, value)
        with codecs.open(value, 'r', encoding='utf-8') as f:
            result = json.load(f)
        return result

#
# Mixin for running subprocesses and capturing their output
#

class SubprocessMixin(object):
    def __init__(self, verbose=False, progress=None):
        self.verbose = verbose
        self.progress = progress

    def reader(self, stream, context):
        """
        Read lines from a subprocess' output stream and either pass to a progress
        callable (if specified) or write progress information to sys.stderr.
        """
        progress = self.progress
        verbose = self.verbose
        while True:
            s = stream.readline()
            if not s:
                break
            if progress is not None:
                progress(s, context)
            else:
                if not verbose:
                    sys.stderr.write('.')
                else:
                    sys.stderr.write(s.decode('utf-8'))
                sys.stderr.flush()
        stream.close()

    def run_command(self, cmd, **kwargs):
        p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE, **kwargs)
        t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
        t1.start()
        t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
        t2.start()
        p.wait()
        t1.join()
        t2.join()
        if self.progress is not None:
            self.progress('done.', 'main')
        elif self.verbose:
            sys.stderr.write('done.\n')
        return p


def normalize_name(name):
    """Normalize a python package name a la PEP 503"""
    # https://www.python.org/dev/peps/pep-0503/#normalized-names
    return re.sub('[-_.]+', '-', name).lower()
_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.opt-1.pyc000064400000037031151733136310021406 0ustar003

�PfKi�@s�dZddlZddlZddlZddlZddlmZmZyddlZWne	k
r\ddl
ZYnXdddddd	d
ddd
dgZdd�Zej
r�ejjeej
��Zneej��Zejdkr�dedEd�j�kr�eejjee��Zejdko�dedFd�j�k�r
eejjeee��Zejdk�r@dedGd�j�k�r@eejjeee��Zdd�Ze�Zdadd�Zej�Zejd�Zdd�Zejj�dZ ejdd �Z!e de d!Z"ejj#ej$�Z%ejj#ej&�Z'da(dZ)d"d#�Z*d$d%�Z+d&d'�Z,d(d)�Z-d*d+�Z.d,d-�Z/dHd.d/�Z0d0d�Z1d1d2�Z2d3d4�Z3dId5d�Z4d6d�Z5d7d
�Z6d8d	�Z7e.�dd9fd:d
�Z8e.�dd9fd;d�Z9d<d�Z:d=d�Z;d>d�Z<d?d�Z=d@dA�Z>dBdC�Z?e@dDk�r�e?�dS)Jz-Access to Python's configuration information.�N)�pardir�realpath�get_config_h_filename�get_config_var�get_config_vars�get_makefile_filename�get_path�get_path_names�	get_paths�get_platform�get_python_version�get_scheme_names�parse_config_hcCs"yt|�Stk
r|SXdS)N)r�OSError)�path�r�/usr/lib/python3.6/sysconfig.py�_safe_realpath"sr�ntZpcbuild�z\pc\v�
z\pcbuild\amd64�cCs.x(dD] }tjjtjjtd|��rdSqWdS)N�
Setup.dist�Setup.local�ModulesTF)rr)�osr�isfile�join�
_PROJECT_BASE)�fnrrr�is_python_build:s
r FcCs�ts�ddlm}tjdd�d}||�}|jd�}|j��}tj|�WdQRXt	r~x(dD] }tj
|d	d
�tj
|dd�qZWd
adS)N�)�finder�.�rz
sysconfig.cfg�posix_prefix�
posix_home�includez{srcdir}/Include�platincludez{projectbase}/.T)r%r&)�	_cfg_readZ	resourcesr"�__name__�rsplit�findZ	as_stream�_SCHEMESZreadfp�
_PYTHON_BUILD�set)r"Zbackport_packageZ_finderZ_cfgfile�s�schemerrr�_ensure_cfg_readDs


r2z\{([^{]*?)\}cs�t�|jd�r|jd�}nt�}|j�}xD|D]<}|dkr>q0x,|D]$\}}|j||�rZqD|j|||�qDWq0W|jd�xX|j�D]L}t|j|����fdd�}x,|j|�D]\}}|j||t	j
||��q�Wq�WdS)N�globalscs$|jd�}|�kr�|S|jd�S)Nr$r)�group)�matchobj�name)�	variablesrr�	_replaceros
z"_expand_globals.<locals>._replacer)r2Zhas_section�items�tuple�sectionsZ
has_optionr/Zremove_section�dict�	_VAR_REPL�sub)�configr3r;ZsectionZoption�valuer8r)r7r�_expand_globalsYs$


rA�r!cs�fdd�}tj||�S)z�In the string `path`, replace tokens like {some.thing} with the
    corresponding value from the map `local_vars`.

    If there is no corresponding value, leave the token unchanged.
    cs8|jd�}|�kr�|S|tjkr.tj|S|jd�S)Nr$r)r4r�environ)r5r6)�
local_varsrrr8�s


z_subst_vars.<locals>._replacer)r=r>)rrDr8r)rDr�_subst_vars�srEcCs4|j�}x&|j�D]\}}||kr$q|||<qWdS)N)�keysr9)�target_dict�
other_dict�target_keys�keyr@rrr�_extend_dict�s
rKcCsdi}|dkri}t|t��xBtj|�D]4\}}tjdkrFtjj|�}tjjt	||��||<q(W|S)N�posixr)rLr)
rKrr-r9rr6r�
expanduser�normpathrE)r1�vars�resrJr@rrr�_expand_vars�s
rQcs�fdd�}tj||�S)Ncs$|jd�}|�kr�|S|jd�S)Nr$r)r4)r5r6)rOrrr8�s
zformat_value.<locals>._replacer)r=r>)r@rOr8r)rOr�format_value�srRcCstjdkrdStjS)NrLr%)rr6rrrr�_get_default_scheme�s
rScCs�tjjdd�}dd�}tjdkrBtjjd�p.d}|r8|S||d�Stjdkr|td	�}|r||r`|S|dd
|dtjdd��S|r�|S|dd
�SdS)N�PYTHONUSERBASEcWstjjtjj|��S)N)rrrMr)�argsrrr�joinuser�sz_getuserbase.<locals>.joinuserr�APPDATA�~�Python�darwin�PYTHONFRAMEWORK�Libraryz%d.%dr!z.local)rrC�getr6�sys�platformr�version_info)�env_baserV�base�	frameworkrrr�_getuserbase�s"



rdcCs"tjd�}tjd�}tjd�}|dkr*i}i}i}tj|ddd��}|j�}WdQRXx�|D]�}	|	jd�s\|	j�d	krxq\|j|	�}
|
r\|
jd
d�\}}|j�}|j	dd	�}
d
|
kr�|||<q\yt
|�}Wn$tk
r�|j	dd
�||<Yq\X|||<q\Wt|j
��}d}�x�t|�dk�r�x�t|�D�]�}||}|j|��pJ|j|�}
|
dk	�r�|
jd
�}d}||k�r|t||�}n�||k�r�d}nx|tjk�r�tj|}n`||k�r�|jd��r�|dd�|k�r�d	}n$d||k�r�d}nt|d|�}nd	||<}|�r�||
j�d�}|d|
j��||}d
|k�rF|||<n~yt
|�}Wn"tk
�rt|j�||<Yn
X|||<|j|�|jd��r�|dd�|k�r�|dd�}||k�r�|||<n|||<|j|��q(W�qWx.|j�D]"\}}t|t��r�|j�||<�q�W|j|�|S)z�Parse a Makefile-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    z"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)z\$\(([A-Za-z][A-Za-z0-9_]*)\)z\${([A-Za-z][A-Za-z0-9_]*)}Nzutf-8�surrogateescape)�encoding�errors�#�r$r!z$$�$�CFLAGS�LDFLAGS�CPPFLAGSrTF�PY_rB)rkrlrm)�re�compile�codecs�open�	readlines�
startswith�strip�matchr4�replace�int�
ValueError�listrF�lenr:�search�strrrC�end�start�remover9�
isinstance�update)�filenamerO�_variable_rx�_findvar1_rx�_findvar2_rx�done�notdone�f�lines�line�m�n�v�tmpvr7�renamed_variablesr6r@�found�item�after�krrr�_parse_makefile�s�	
















r�cCsDtrtjjtd�Sttd�r,dttjf}nd}tjjt	d�|d�S)z Return the path of the Makefile.�Makefile�abiflagszconfig-%s%sr?�stdlib)
r.rrrr�hasattrr^�_PY_VERSION_SHORTr�r)�config_dir_namerrrrMs
cCs�t�}yt||�WnJtk
r^}z.d|}t|d�rF|d|j}t|��WYdd}~XnXt�}y"t|��}t||�WdQRXWnJtk
r�}z.d|}t|d�r�|d|j}t|��WYdd}~XnXtr�|d|d<dS)z7Initialize the module as appropriate for POSIX systems.z.invalid Python installation: unable to open %s�strerrorz (%s)N�	BLDSHARED�LDSHARED)	rr��IOErrorr�r�rrrrr.)rO�makefile�e�msg�config_hr�rrr�_init_posixXs&


r�cCsVtd�|d<td�|d<td�|d<d|d<d	|d
<t|d<tjjttj��|d<d
S)z+Initialize the module as appropriate for NTr��LIBDEST�
platstdlib�
BINLIBDESTr'�	INCLUDEPYz.pyd�SOz.exe�EXE�VERSION�BINDIRN)r�_PY_VERSION_SHORT_NO_DOTrr�dirnamerr^�
executable)rOrrr�_init_non_posixtsr�cCs�|dkri}tjd�}tjd�}xx|j�}|s0P|j|�}|rz|jdd�\}}yt|�}Wntk
rnYnX|||<q"|j|�}|r"d||jd�<q"W|S)z�Parse a config.h-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    Nz"#define ([A-Z][A-Za-z0-9_]+) (.*)
z&/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/
r$r!r)rorp�readlinervr4rxry)�fprO�	define_rx�undef_rxr�r�r�r�rrrr�s(




cCs:tr$tjdkrtjjtd�}q,t}ntd�}tjj|d�S)zReturn the path of pyconfig.h.r�PCr(z
pyconfig.h)r.rr6rrrr)�inc_dirrrrr�s
cCstttj���S)z,Return a tuple containing the schemes names.)r:�sortedr-r;rrrrr
�scCs
tjd�S)z*Return a tuple containing the paths names.r%)r-Zoptionsrrrrr	�sTcCs&t�|rt||�Sttj|��SdS)z�Return a mapping containing an install scheme.

    ``scheme`` is the install scheme name. If not provided, it will
    return the default scheme for the current platform.
    N)r2rQr<r-r9)r1rO�expandrrrr
�s
cCst|||�|S)z[Return a path corresponding to the scheme.

    ``scheme`` is the install scheme name.
    )r
)r6r1rOr�rrrr�scGs�tdk�r�iattd<ttd<ttd<ttd<tdtdtd<ttd	<ttd
<ttd<ytjtd<Wntk
r�d
td<YnXt	j
d#kr�tt�t	j
dkr�tt�tj
dkr�t�td<dtkr�ttd<nttd�td<to�t	j
dk�r\t}yt	j�}Wntk
�rd}YnXt	jjtd��r\||k�r\t	jj|td�}t	jj|�td<tjdk�r�t	j�d}t|jd�d�}|dk�r�x:d$D]2}t|}tjdd|�}tjdd|�}|t|<�q�Wn�dt	jk�rt	jd}x8d%D]0}t|}tjdd|�}|d|}|t|<�q�Wtjdd
�}	tjd |	�}
|
dk	�r�|
j d!�}t	jj!|��s�x,d&D]$}t|}tjd"d|�}|t|<�q^W|�r�g}x|D]}
|j"tj|
���q�W|StSdS)'ayWith no arguments, return a dictionary of all configuration
    variables relevant for the current platform.

    On Unix, this means every variable defined in Python's installed Makefile;
    On Windows and Mac OS it's a much smaller set.

    With arguments, return a list of values that result from looking up
    each argument in the configuration variable dictionary.
    N�prefix�exec_prefix�
py_version�py_version_shortrr!�py_version_nodotrb�platbase�projectbaser�rir�os2rLz2.6�userbase�srcdirrZr#rrl�
BASECFLAGSrk�	PY_CFLAGSr�z
-arch\s+\w+\s� z-isysroot [^ 	]*Z	ARCHFLAGSz-isysroot\s+(\S+)r$z-isysroot\s+\S+(\s|$))rr�)rlr�rkr�r�)rlr�rkr�r�)rlr�rkr�r�)#�_CONFIG_VARS�_PREFIX�_EXEC_PREFIX�_PY_VERSIONr�rr^r��AttributeErrorrr6r�r��versionrdrr.�getcwdrr�isabsrrNr_�unamerx�splitror>rCr]r|r4�exists�append)rUrb�cwdr�Zkernel_versionZ
major_versionrJ�flagsZarchrkr�Zsdk�valsr6rrrr�s�












cCst�j|�S)z�Return the value of a single variable using the dictionary returned by
    'get_config_vars()'.

    Equivalent to get_config_vars().get(name)
    )rr])r6rrrrRscCs`tjdkrnd}tjj|�}|d:kr(tjStjjd|�}tj|t|�|�j�}|dkr\dS|dkrhdStjStjd	ks�ttd
�r�tjStj	�\}}}}}|j�j
dd�}|j
d
d�}|j
dd�}|dd�dkr�d||fS|dd�dk�r(|ddk�rRd}dt|d�d|dd�f}�n*|dd�dk�rFd||fS|dd�dk�rfd|||fS|dd�d k�r�d }tj
d!�}	|	j|�}
|
�rR|
j�}�n�|dd�d"k�rRt�}|jd#�}|}
ytd$�}Wntk
�r�YnJXztjd%|j��}
Wd|j�X|
dk	�r4d&j|
jd�jd&�dd��}
|�s>|
}|�rR|}d'}|
d&d(k�rd)t�jd*d�j�k�rd+}t�jd*�}tjd,|�}ttt|���}t|�dk�r�|d}n^|d;k�r�d+}nN|d<k�r�d0}n>|d=k�r�d1}n.|d>k�r�d3}n|d?k�rd4}ntd5|f��n<|d-k�r2tjd@k�rRd/}n |dAk�rRtjdBk�rNd2}nd.}d9|||fS)Ca�Return a string that identifies the current platform.

    This is used mainly to distinguish platform-specific build directories and
    platform-specific built distributions.  Typically includes the OS name
    and version and the architecture (as supplied by 'os.uname()'),
    although the exact information included depends on the OS; eg. for IRIX
    the architecture isn't particularly important (IRIX only runs on SGI
    hardware), but for Linux the kernel version isn't particularly
    important.

    Examples of returned values:
       linux-i586
       linux-alpha (?)
       solaris-2.6-sun4u
       irix-5.3
       irix64-6.2

    Windows will return one of:
       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
       win-ia64 (64bit Windows on Itanium)
       win32 (all others - specifically, sys.platform is returned)

    For other non-POSIX platforms, currently just returns 'sys.platform'.
    rz bit (r$�)�amd64z	win-amd64�itaniumzwin-ia64rLr��/rir��_�-N��linuxz%s-%s�sunosr�5�solarisz%d.%srBr!��irix�aixz%s-%s.%s��cygwinz[\d.]+rZ�MACOSX_DEPLOYMENT_TARGETz0/System/Library/CoreServices/SystemVersion.plistz=<key>ProductUserVisibleVersion</key>\s*<string>(.*?)</string>r#Zmacosxz10.4.z-archrkZfatz
-arch\s+(\S+)�i386�ppc�x86_64ZintelZfat3�ppc64Zfat64Z	universalz%Don't know machine value for archs=%r� �PowerPC�Power_Macintoshz%s-%s-%s���)r�r�)r�r�)r�r�r�)r�r�)r�r�r�r�l)r�r�l) rr6r^r�r,r_r{�lowerr�r�rwrxrorprvr4rr]rrr�r|�read�closerr�ru�findallr:r�r/ry�maxsize)r��i�j�look�osname�host�releaser��machine�rel_rer�ZcfgvarsZmacverZ
macreleaser�ZcflagsZarchsrrrr[s�
$












cCstS)N)r�rrrrr�scCsJxDtt|j���D]0\}\}}|dkr2td|�td||f�qWdS)Nrz%s: z
	%s = "%s")�	enumerater�r9�print)�title�data�indexrJr@rrr�_print_dictsrcCsRtdt��tdt��tdt��t�tdt��t�tdt��dS)z*Display all information sysconfig detains.zPlatform: "%s"zPython version: "%s"z!Current installation scheme: "%s"�Paths�	VariablesN)r�rrrSrr
rrrrr�_mainsr�__main__i����i����i�)N)N)A�__doc__rqrror^Zos.pathrrZconfigparser�ImportErrorZConfigParser�__all__rr�rr�rr�r6r�rr r.r)r2ZRawConfigParserr-rpr=rAr�r�r�r�r�rNr�r�r�r�r��
_USER_BASErErKrQrRrSrdr�rr�r�rrr
r	r
rrrrrrrr*rrrr�<module>s� "
#
	
v

	#
_vendor/distlib/_backport/__pycache__/sysconfig.cpython-36.pyc000064400000037115151733136310020452 0ustar003

�PfKi�@s�dZddlZddlZddlZddlZddlmZmZyddlZWne	k
r\ddl
ZYnXdddddd	d
ddd
dgZdd�Zej
r�ejjeej
��Zneej��Zejdkr�dedEd�j�kr�eejjee��Zejdko�dedFd�j�k�r
eejjeee��Zejdk�r@dedGd�j�k�r@eejjeee��Zdd�Ze�Zdadd�Zej�Zejd�Zdd�Zejj�dZ ejdd �Z!e de d!Z"ejj#ej$�Z%ejj#ej&�Z'da(dZ)d"d#�Z*d$d%�Z+d&d'�Z,d(d)�Z-d*d+�Z.d,d-�Z/dHd.d/�Z0d0d�Z1d1d2�Z2d3d4�Z3dId5d�Z4d6d�Z5d7d
�Z6d8d	�Z7e.�dd9fd:d
�Z8e.�dd9fd;d�Z9d<d�Z:d=d�Z;d>d�Z<d?d�Z=d@dA�Z>dBdC�Z?e@dDk�r�e?�dS)Jz-Access to Python's configuration information.�N)�pardir�realpath�get_config_h_filename�get_config_var�get_config_vars�get_makefile_filename�get_path�get_path_names�	get_paths�get_platform�get_python_version�get_scheme_names�parse_config_hcCs"yt|�Stk
r|SXdS)N)r�OSError)�path�r�/usr/lib/python3.6/sysconfig.py�_safe_realpath"sr�ntZpcbuild�z\pc\v�
z\pcbuild\amd64�cCs.x(dD] }tjjtjjtd|��rdSqWdS)N�
Setup.dist�Setup.local�ModulesTF)rr)�osr�isfile�join�
_PROJECT_BASE)�fnrrr�is_python_build:s
r FcCs�ts�ddlm}tjdd�d}||�}|jd�}|s>td��|j��}tj	|�WdQRXt
r�x(dD] }tj|d
d�tj|dd
�qfWdadS)N�)�finder�.�rz
sysconfig.cfgzsysconfig.cfg exists�posix_prefix�
posix_home�includez{srcdir}/Include�platincludez{projectbase}/.T)r%r&)�	_cfg_readZ	resourcesr"�__name__�rsplit�find�AssertionErrorZ	as_stream�_SCHEMESZreadfp�
_PYTHON_BUILD�set)r"Zbackport_packageZ_finderZ_cfgfile�s�schemerrr�_ensure_cfg_readDs


r3z\{([^{]*?)\}cs�t�|jd�r|jd�}nt�}|j�}xD|D]<}|dkr>q0x,|D]$\}}|j||�rZqD|j|||�qDWq0W|jd�xX|j�D]L}t|j|����fdd�}x,|j|�D]\}}|j||t	j
||��q�Wq�WdS)N�globalscs$|jd�}|�kr�|S|jd�S)Nr$r)�group)�matchobj�name)�	variablesrr�	_replaceros
z"_expand_globals.<locals>._replacer)r3Zhas_section�items�tuple�sectionsZ
has_optionr0Zremove_section�dict�	_VAR_REPL�sub)�configr4r<ZsectionZoption�valuer9r)r8r�_expand_globalsYs$


rB�r!cs�fdd�}tj||�S)z�In the string `path`, replace tokens like {some.thing} with the
    corresponding value from the map `local_vars`.

    If there is no corresponding value, leave the token unchanged.
    cs8|jd�}|�kr�|S|tjkr.tj|S|jd�S)Nr$r)r5r�environ)r6r7)�
local_varsrrr9�s


z_subst_vars.<locals>._replacer)r>r?)rrEr9r)rEr�_subst_vars�srFcCs4|j�}x&|j�D]\}}||kr$q|||<qWdS)N)�keysr:)�target_dict�
other_dict�target_keys�keyrArrr�_extend_dict�s
rLcCsdi}|dkri}t|t��xBtj|�D]4\}}tjdkrFtjj|�}tjjt	||��||<q(W|S)N�posixr)rMr)
rLrr.r:rr7r�
expanduser�normpathrF)r2�vars�resrKrArrr�_expand_vars�s
rRcs�fdd�}tj||�S)Ncs$|jd�}|�kr�|S|jd�S)Nr$r)r5)r6r7)rPrrr9�s
zformat_value.<locals>._replacer)r>r?)rArPr9r)rPr�format_value�srScCstjdkrdStjS)NrMr%)rr7rrrr�_get_default_scheme�s
rTcCs�tjjdd�}dd�}tjdkrBtjjd�p.d}|r8|S||d�Stjdkr|td	�}|r||r`|S|dd
|dtjdd��S|r�|S|dd
�SdS)N�PYTHONUSERBASEcWstjjtjj|��S)N)rrrNr)�argsrrr�joinuser�sz_getuserbase.<locals>.joinuserr�APPDATA�~�Python�darwin�PYTHONFRAMEWORK�Libraryz%d.%dr!z.local)rrD�getr7�sys�platformr�version_info)�env_baserW�base�	frameworkrrr�_getuserbase�s"



recCs"tjd�}tjd�}tjd�}|dkr*i}i}i}tj|ddd��}|j�}WdQRXx�|D]�}	|	jd�s\|	j�d	krxq\|j|	�}
|
r\|
jd
d�\}}|j�}|j	dd	�}
d
|
kr�|||<q\yt
|�}Wn$tk
r�|j	dd
�||<Yq\X|||<q\Wt|j
��}d}�x�t|�dk�r�x�t|�D�]�}||}|j|��pJ|j|�}
|
dk	�r�|
jd
�}d}||k�r|t||�}n�||k�r�d}nx|tjk�r�tj|}n`||k�r�|jd��r�|dd�|k�r�d	}n$d||k�r�d}nt|d|�}nd	||<}|�r�||
j�d�}|d|
j��||}d
|k�rF|||<n~yt
|�}Wn"tk
�rt|j�||<Yn
X|||<|j|�|jd��r�|dd�|k�r�|dd�}||k�r�|||<n|||<|j|��q(W�qWx.|j�D]"\}}t|t��r�|j�||<�q�W|j|�|S)z�Parse a Makefile-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    z"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)z\$\(([A-Za-z][A-Za-z0-9_]*)\)z\${([A-Za-z][A-Za-z0-9_]*)}Nzutf-8�surrogateescape)�encoding�errors�#�r$r!z$$�$�CFLAGS�LDFLAGS�CPPFLAGSrTF�PY_rC)rlrmrn)�re�compile�codecs�open�	readlines�
startswith�strip�matchr5�replace�int�
ValueError�listrG�lenr;�search�strrrD�end�start�remover:�
isinstance�update)�filenamerP�_variable_rx�_findvar1_rx�_findvar2_rx�done�notdone�f�lines�line�m�n�v�tmpvr8�renamed_variablesr7rA�found�item�after�krrr�_parse_makefile�s�	
















r�cCsDtrtjjtd�Sttd�r,dttjf}nd}tjjt	d�|d�S)z Return the path of the Makefile.�Makefile�abiflagszconfig-%s%sr@�stdlib)
r/rrrr�hasattrr_�_PY_VERSION_SHORTr�r)�config_dir_namerrrrMs
cCs�t�}yt||�WnJtk
r^}z.d|}t|d�rF|d|j}t|��WYdd}~XnXt�}y"t|��}t||�WdQRXWnJtk
r�}z.d|}t|d�r�|d|j}t|��WYdd}~XnXtr�|d|d<dS)z7Initialize the module as appropriate for POSIX systems.z.invalid Python installation: unable to open %s�strerrorz (%s)N�	BLDSHARED�LDSHARED)	rr��IOErrorr�r�rrsrr/)rP�makefile�e�msg�config_hr�rrr�_init_posixXs&


r�cCsVtd�|d<td�|d<td�|d<d|d<d	|d
<t|d<tjjttj��|d<d
S)z+Initialize the module as appropriate for NTr��LIBDEST�
platstdlib�
BINLIBDESTr'�	INCLUDEPYz.pyd�SOz.exe�EXE�VERSION�BINDIRN)r�_PY_VERSION_SHORT_NO_DOTrr�dirnamerr_�
executable)rPrrr�_init_non_posixtsr�cCs�|dkri}tjd�}tjd�}xx|j�}|s0P|j|�}|rz|jdd�\}}yt|�}Wntk
rnYnX|||<q"|j|�}|r"d||jd�<q"W|S)z�Parse a config.h-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    Nz"#define ([A-Z][A-Za-z0-9_]+) (.*)
z&/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/
r$r!r)rprq�readlinerwr5ryrz)�fprP�	define_rx�undef_rxr�r�r�r�rrrr�s(




cCs:tr$tjdkrtjjtd�}q,t}ntd�}tjj|d�S)zReturn the path of pyconfig.h.r�PCr(z
pyconfig.h)r/rr7rrrr)�inc_dirrrrr�s
cCstttj���S)z,Return a tuple containing the schemes names.)r;�sortedr.r<rrrrr
�scCs
tjd�S)z*Return a tuple containing the paths names.r%)r.Zoptionsrrrrr	�sTcCs&t�|rt||�Sttj|��SdS)z�Return a mapping containing an install scheme.

    ``scheme`` is the install scheme name. If not provided, it will
    return the default scheme for the current platform.
    N)r3rRr=r.r:)r2rP�expandrrrr
�s
cCst|||�|S)z[Return a path corresponding to the scheme.

    ``scheme`` is the install scheme name.
    )r
)r7r2rPr�rrrr�scGs�tdk�r�iattd<ttd<ttd<ttd<tdtdtd<ttd	<ttd
<ttd<ytjtd<Wntk
r�d
td<YnXt	j
d#kr�tt�t	j
dkr�tt�tj
dkr�t�td<dtkr�ttd<nttd�td<to�t	j
dk�r\t}yt	j�}Wntk
�rd}YnXt	jjtd��r\||k�r\t	jj|td�}t	jj|�td<tjdk�r�t	j�d}t|jd�d�}|dk�r�x:d$D]2}t|}tjdd|�}tjdd|�}|t|<�q�Wn�dt	jk�rt	jd}x8d%D]0}t|}tjdd|�}|d|}|t|<�q�Wtjdd
�}	tjd |	�}
|
dk	�r�|
j d!�}t	jj!|��s�x,d&D]$}t|}tjd"d|�}|t|<�q^W|�r�g}x|D]}
|j"tj|
���q�W|StSdS)'ayWith no arguments, return a dictionary of all configuration
    variables relevant for the current platform.

    On Unix, this means every variable defined in Python's installed Makefile;
    On Windows and Mac OS it's a much smaller set.

    With arguments, return a list of values that result from looking up
    each argument in the configuration variable dictionary.
    N�prefix�exec_prefix�
py_version�py_version_shortrr!�py_version_nodotrc�platbase�projectbaser�rjr�os2rMz2.6�userbase�srcdirr[r#rrm�
BASECFLAGSrl�	PY_CFLAGSr�z
-arch\s+\w+\s� z-isysroot [^ 	]*Z	ARCHFLAGSz-isysroot\s+(\S+)r$z-isysroot\s+\S+(\s|$))rr�)rmr�rlr�r�)rmr�rlr�r�)rmr�rlr�r�)#�_CONFIG_VARS�_PREFIX�_EXEC_PREFIX�_PY_VERSIONr�rr_r��AttributeErrorrr7r�r��versionrerr/�getcwdrr�isabsrrOr`�unamery�splitrpr?rDr^r}r5�exists�append)rVrc�cwdr�Zkernel_versionZ
major_versionrK�flagsZarchrlr�Zsdk�valsr7rrrr�s�












cCst�j|�S)z�Return the value of a single variable using the dictionary returned by
    'get_config_vars()'.

    Equivalent to get_config_vars().get(name)
    )rr^)r7rrrrRscCs`tjdkrnd}tjj|�}|d:kr(tjStjjd|�}tj|t|�|�j�}|dkr\dS|dkrhdStjStjd	ks�ttd
�r�tjStj	�\}}}}}|j�j
dd�}|j
d
d�}|j
dd�}|dd�dkr�d||fS|dd�dk�r(|ddk�rRd}dt|d�d|dd�f}�n*|dd�dk�rFd||fS|dd�dk�rfd|||fS|dd�d k�r�d }tj
d!�}	|	j|�}
|
�rR|
j�}�n�|dd�d"k�rRt�}|jd#�}|}
ytd$�}Wntk
�r�YnJXztjd%|j��}
Wd|j�X|
dk	�r4d&j|
jd�jd&�dd��}
|�s>|
}|�rR|}d'}|
d&d(k�rd)t�jd*d�j�k�rd+}t�jd*�}tjd,|�}ttt|���}t|�dk�r�|d}n^|d;k�r�d+}nN|d<k�r�d0}n>|d=k�r�d1}n.|d>k�r�d3}n|d?k�rd4}ntd5|f��n<|d-k�r2tjd@k�rRd/}n |dAk�rRtjdBk�rNd2}nd.}d9|||fS)Ca�Return a string that identifies the current platform.

    This is used mainly to distinguish platform-specific build directories and
    platform-specific built distributions.  Typically includes the OS name
    and version and the architecture (as supplied by 'os.uname()'),
    although the exact information included depends on the OS; eg. for IRIX
    the architecture isn't particularly important (IRIX only runs on SGI
    hardware), but for Linux the kernel version isn't particularly
    important.

    Examples of returned values:
       linux-i586
       linux-alpha (?)
       solaris-2.6-sun4u
       irix-5.3
       irix64-6.2

    Windows will return one of:
       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
       win-ia64 (64bit Windows on Itanium)
       win32 (all others - specifically, sys.platform is returned)

    For other non-POSIX platforms, currently just returns 'sys.platform'.
    rz bit (r$�)�amd64z	win-amd64�itaniumzwin-ia64rMr��/rjr��_�-N��linuxz%s-%s�sunosr�5�solarisz%d.%srCr!��irix�aixz%s-%s.%s��cygwinz[\d.]+r[�MACOSX_DEPLOYMENT_TARGETz0/System/Library/CoreServices/SystemVersion.plistz=<key>ProductUserVisibleVersion</key>\s*<string>(.*?)</string>r#Zmacosxz10.4.z-archrlZfatz
-arch\s+(\S+)�i386�ppc�x86_64ZintelZfat3�ppc64Zfat64Z	universalz%Don't know machine value for archs=%r� �PowerPC�Power_Macintoshz%s-%s-%s���)r�r�)r�r�)r�r�r�)r�r�)r�r�r�r�l)r�r�l) rr7r_r�r,r`r|�lowerr�r�rxryrprqrwr5rr^rsr�r}�read�closerr�rv�findallr;r�r0rz�maxsize)r��i�j�look�osname�host�releaser��machine�rel_rer�ZcfgvarsZmacverZ
macreleaser�ZcflagsZarchsrrrr[s�
$












cCstS)N)r�rrrrr�scCsJxDtt|j���D]0\}\}}|dkr2td|�td||f�qWdS)Nrz%s: z
	%s = "%s")�	enumerater�r:�print)�title�data�indexrKrArrr�_print_dictsrcCsRtdt��tdt��tdt��t�tdt��t�tdt��dS)z*Display all information sysconfig detains.zPlatform: "%s"zPython version: "%s"z!Current installation scheme: "%s"�Paths�	VariablesN)r�rrrTrr
rrrrr�_mainsr�__main__i����i����i�)N)N)A�__doc__rrrrpr_Zos.pathrrZconfigparser�ImportErrorZConfigParser�__all__rr�rr�rr�r7r�rr r/r)r3ZRawConfigParserr.rqr>rBr�r�r�r�r�rOr�r�r�r�r��
_USER_BASErFrLrRrSrTrer�rr�r�rrr
r	r
rrrrrrrr*rrrr�<module>s� "
#
	
v

	#
_vendor/distlib/_backport/__pycache__/__init__.cpython-36.pyc000064400000000613151733136310020176 0ustar003

�Pf�@sdZdS)aModules copied from Python 3 standard libraries, for internal use only.

Individual classes and functions are found in d2._backport.misc.  Intended
usage is to always import things missing from 3.1 from that module: the
built-in/stdlib objects will be used if found.
N)�__doc__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/distlib/_backport/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000613151733136310021135 0ustar003

�Pf�@sdZdS)aModules copied from Python 3 standard libraries, for internal use only.

Individual classes and functions are found in d2._backport.misc.  Intended
usage is to always import things missing from 3.1 from that module: the
built-in/stdlib objects will be used if found.
N)�__doc__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/distlib/_backport/__pycache__/misc.cpython-36.pyc000064400000001740151733136310017374 0ustar003

�Pf��@s�dZddlZddlZdddgZyddlmZWnek
rLddd�ZYnXyeZWn(ek
r~dd	l	m
Z
d
d�ZYnXy
ejZWnek
r�dd�ZYnXdS)
z/Backports for individual classes and functions.�N�cache_from_source�callable�fsencode)rTcCs|rdp
d}||S)N�c�o�)Zpy_file�debugZextrr�/usr/lib/python3.6/misc.pyrs)�CallablecCs
t|t�S)N)�
isinstancer
)�objrrr	rscCs<t|t�r|St|t�r&|jtj��Stdt|�j��dS)Nzexpect bytes or str, not %s)	r�bytes�str�encode�sys�getfilesystemencoding�	TypeError�type�__name__)�filenamerrr	r"s

)T)
�__doc__�osr�__all__Zimpr�ImportErrorr�	NameError�collectionsr
r�AttributeErrorrrrr	�<module>s 

_vendor/distlib/_backport/__pycache__/misc.cpython-36.opt-1.pyc000064400000001740151733136310020333 0ustar003

�Pf��@s�dZddlZddlZdddgZyddlmZWnek
rLddd�ZYnXyeZWn(ek
r~dd	l	m
Z
d
d�ZYnXy
ejZWnek
r�dd�ZYnXdS)
z/Backports for individual classes and functions.�N�cache_from_source�callable�fsencode)rFcCs|rdp
d}||S)N�c�o�)Zpy_file�debugZextrr�/usr/lib/python3.6/misc.pyrs)�CallablecCs
t|t�S)N)�
isinstancer
)�objrrr	rscCs<t|t�r|St|t�r&|jtj��Stdt|�j��dS)Nzexpect bytes or str, not %s)	r�bytes�str�encode�sys�getfilesystemencoding�	TypeError�type�__name__)�filenamerrr	r"s

)F)
�__doc__�osr�__all__Zimpr�ImportErrorr�	NameError�collectionsr
r�AttributeErrorrrrr	�<module>s 

_vendor/distlib/_backport/__init__.py000064400000000422151733136310013710 0ustar00"""Modules copied from Python 3 standard libraries, for internal use only.

Individual classes and functions are found in d2._backport.misc.  Intended
usage is to always import things missing from 3.1 from that module: the
built-in/stdlib objects will be used if found.
"""
_vendor/distlib/_backport/misc.py000064400000001713151733136310013110 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Backports for individual classes and functions."""

import os
import sys

__all__ = ['cache_from_source', 'callable', 'fsencode']


try:
    from imp import cache_from_source
except ImportError:
    def cache_from_source(py_file, debug=__debug__):
        ext = debug and 'c' or 'o'
        return py_file + ext


try:
    callable = callable
except NameError:
    from collections import Callable

    def callable(obj):
        return isinstance(obj, Callable)


try:
    fsencode = os.fsencode
except AttributeError:
    def fsencode(filename):
        if isinstance(filename, bytes):
            return filename
        elif isinstance(filename, str):
            return filename.encode(sys.getfilesystemencoding())
        else:
            raise TypeError("expect bytes or str, not %s" %
                            type(filename).__name__)
_vendor/distlib/_backport/sysconfig.py000064400000064513151733136310014170 0ustar00# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Access to Python's configuration information."""

import codecs
import os
import re
import sys
from os.path import pardir, realpath
try:
    import configparser
except ImportError:
    import ConfigParser as configparser


__all__ = [
    'get_config_h_filename',
    'get_config_var',
    'get_config_vars',
    'get_makefile_filename',
    'get_path',
    'get_path_names',
    'get_paths',
    'get_platform',
    'get_python_version',
    'get_scheme_names',
    'parse_config_h',
]


def _safe_realpath(path):
    try:
        return realpath(path)
    except OSError:
        return path


if sys.executable:
    _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
else:
    # sys.executable can be empty if argv[0] has been changed and Python is
    # unable to retrieve the real program name
    _PROJECT_BASE = _safe_realpath(os.getcwd())

if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
# PC/VS7.1
if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# PC/AMD64
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))


def is_python_build():
    for fn in ("Setup.dist", "Setup.local"):
        if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
            return True
    return False

_PYTHON_BUILD = is_python_build()

_cfg_read = False

def _ensure_cfg_read():
    global _cfg_read
    if not _cfg_read:
        from ..resources import finder
        backport_package = __name__.rsplit('.', 1)[0]
        _finder = finder(backport_package)
        _cfgfile = _finder.find('sysconfig.cfg')
        assert _cfgfile, 'sysconfig.cfg exists'
        with _cfgfile.as_stream() as s:
            _SCHEMES.readfp(s)
        if _PYTHON_BUILD:
            for scheme in ('posix_prefix', 'posix_home'):
                _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
                _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')

        _cfg_read = True


_SCHEMES = configparser.RawConfigParser()
_VAR_REPL = re.compile(r'\{([^{]*?)\}')

def _expand_globals(config):
    _ensure_cfg_read()
    if config.has_section('globals'):
        globals = config.items('globals')
    else:
        globals = tuple()

    sections = config.sections()
    for section in sections:
        if section == 'globals':
            continue
        for option, value in globals:
            if config.has_option(section, option):
                continue
            config.set(section, option, value)
    config.remove_section('globals')

    # now expanding local variables defined in the cfg file
    #
    for section in config.sections():
        variables = dict(config.items(section))

        def _replacer(matchobj):
            name = matchobj.group(1)
            if name in variables:
                return variables[name]
            return matchobj.group(0)

        for option, value in config.items(section):
            config.set(section, option, _VAR_REPL.sub(_replacer, value))

#_expand_globals(_SCHEMES)

 # FIXME don't rely on sys.version here, its format is an implementation detail
 # of CPython, use sys.version_info or sys.hexversion
_PY_VERSION = sys.version.split()[0]
_PY_VERSION_SHORT = sys.version[:3]
_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
_PREFIX = os.path.normpath(sys.prefix)
_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
_CONFIG_VARS = None
_USER_BASE = None


def _subst_vars(path, local_vars):
    """In the string `path`, replace tokens like {some.thing} with the
    corresponding value from the map `local_vars`.

    If there is no corresponding value, leave the token unchanged.
    """
    def _replacer(matchobj):
        name = matchobj.group(1)
        if name in local_vars:
            return local_vars[name]
        elif name in os.environ:
            return os.environ[name]
        return matchobj.group(0)
    return _VAR_REPL.sub(_replacer, path)


def _extend_dict(target_dict, other_dict):
    target_keys = target_dict.keys()
    for key, value in other_dict.items():
        if key in target_keys:
            continue
        target_dict[key] = value


def _expand_vars(scheme, vars):
    res = {}
    if vars is None:
        vars = {}
    _extend_dict(vars, get_config_vars())

    for key, value in _SCHEMES.items(scheme):
        if os.name in ('posix', 'nt'):
            value = os.path.expanduser(value)
        res[key] = os.path.normpath(_subst_vars(value, vars))
    return res


def format_value(value, vars):
    def _replacer(matchobj):
        name = matchobj.group(1)
        if name in vars:
            return vars[name]
        return matchobj.group(0)
    return _VAR_REPL.sub(_replacer, value)


def _get_default_scheme():
    if os.name == 'posix':
        # the default scheme for posix is posix_prefix
        return 'posix_prefix'
    return os.name


def _getuserbase():
    env_base = os.environ.get("PYTHONUSERBASE", None)

    def joinuser(*args):
        return os.path.expanduser(os.path.join(*args))

    # what about 'os2emx', 'riscos' ?
    if os.name == "nt":
        base = os.environ.get("APPDATA") or "~"
        if env_base:
            return env_base
        else:
            return joinuser(base, "Python")

    if sys.platform == "darwin":
        framework = get_config_var("PYTHONFRAMEWORK")
        if framework:
            if env_base:
                return env_base
            else:
                return joinuser("~", "Library", framework, "%d.%d" %
                                sys.version_info[:2])

    if env_base:
        return env_base
    else:
        return joinuser("~", ".local")


def _parse_makefile(filename, vars=None):
    """Parse a Makefile-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    """
    # Regexes needed for parsing Makefile (and similar syntaxes,
    # like old-style Setup files).
    _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
    _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
    _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")

    if vars is None:
        vars = {}
    done = {}
    notdone = {}

    with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
        lines = f.readlines()

    for line in lines:
        if line.startswith('#') or line.strip() == '':
            continue
        m = _variable_rx.match(line)
        if m:
            n, v = m.group(1, 2)
            v = v.strip()
            # `$$' is a literal `$' in make
            tmpv = v.replace('$$', '')

            if "$" in tmpv:
                notdone[n] = v
            else:
                try:
                    v = int(v)
                except ValueError:
                    # insert literal `$'
                    done[n] = v.replace('$$', '$')
                else:
                    done[n] = v

    # do variable interpolation here
    variables = list(notdone.keys())

    # Variables with a 'PY_' prefix in the makefile. These need to
    # be made available without that prefix through sysconfig.
    # Special care is needed to ensure that variable expansion works, even
    # if the expansion uses the name without a prefix.
    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')

    while len(variables) > 0:
        for name in tuple(variables):
            value = notdone[name]
            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
            if m is not None:
                n = m.group(1)
                found = True
                if n in done:
                    item = str(done[n])
                elif n in notdone:
                    # get it on a subsequent round
                    found = False
                elif n in os.environ:
                    # do it like make: fall back to environment
                    item = os.environ[n]

                elif n in renamed_variables:
                    if (name.startswith('PY_') and
                        name[3:] in renamed_variables):
                        item = ""

                    elif 'PY_' + n in notdone:
                        found = False

                    else:
                        item = str(done['PY_' + n])

                else:
                    done[n] = item = ""

                if found:
                    after = value[m.end():]
                    value = value[:m.start()] + item + after
                    if "$" in after:
                        notdone[name] = value
                    else:
                        try:
                            value = int(value)
                        except ValueError:
                            done[name] = value.strip()
                        else:
                            done[name] = value
                        variables.remove(name)

                        if (name.startswith('PY_') and
                            name[3:] in renamed_variables):

                            name = name[3:]
                            if name not in done:
                                done[name] = value

            else:
                # bogus variable reference (e.g. "prefix=$/opt/python");
                # just drop it since we can't deal
                done[name] = value
                variables.remove(name)

    # strip spurious spaces
    for k, v in done.items():
        if isinstance(v, str):
            done[k] = v.strip()

    # save the results in the global dictionary
    vars.update(done)
    return vars


def get_makefile_filename():
    """Return the path of the Makefile."""
    if _PYTHON_BUILD:
        return os.path.join(_PROJECT_BASE, "Makefile")
    if hasattr(sys, 'abiflags'):
        config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
    else:
        config_dir_name = 'config'
    return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')


def _init_posix(vars):
    """Initialize the module as appropriate for POSIX systems."""
    # load the installed Makefile:
    makefile = get_makefile_filename()
    try:
        _parse_makefile(makefile, vars)
    except IOError as e:
        msg = "invalid Python installation: unable to open %s" % makefile
        if hasattr(e, "strerror"):
            msg = msg + " (%s)" % e.strerror
        raise IOError(msg)
    # load the installed pyconfig.h:
    config_h = get_config_h_filename()
    try:
        with open(config_h) as f:
            parse_config_h(f, vars)
    except IOError as e:
        msg = "invalid Python installation: unable to open %s" % config_h
        if hasattr(e, "strerror"):
            msg = msg + " (%s)" % e.strerror
        raise IOError(msg)
    # On AIX, there are wrong paths to the linker scripts in the Makefile
    # -- these paths are relative to the Python source, but when installed
    # the scripts are in another directory.
    if _PYTHON_BUILD:
        vars['LDSHARED'] = vars['BLDSHARED']


def _init_non_posix(vars):
    """Initialize the module as appropriate for NT"""
    # set basic install directories
    vars['LIBDEST'] = get_path('stdlib')
    vars['BINLIBDEST'] = get_path('platstdlib')
    vars['INCLUDEPY'] = get_path('include')
    vars['SO'] = '.pyd'
    vars['EXE'] = '.exe'
    vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
    vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))

#
# public APIs
#


def parse_config_h(fp, vars=None):
    """Parse a config.h-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    """
    if vars is None:
        vars = {}
    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")

    while True:
        line = fp.readline()
        if not line:
            break
        m = define_rx.match(line)
        if m:
            n, v = m.group(1, 2)
            try:
                v = int(v)
            except ValueError:
                pass
            vars[n] = v
        else:
            m = undef_rx.match(line)
            if m:
                vars[m.group(1)] = 0
    return vars


def get_config_h_filename():
    """Return the path of pyconfig.h."""
    if _PYTHON_BUILD:
        if os.name == "nt":
            inc_dir = os.path.join(_PROJECT_BASE, "PC")
        else:
            inc_dir = _PROJECT_BASE
    else:
        inc_dir = get_path('platinclude')
    return os.path.join(inc_dir, 'pyconfig.h')


def get_scheme_names():
    """Return a tuple containing the schemes names."""
    return tuple(sorted(_SCHEMES.sections()))


def get_path_names():
    """Return a tuple containing the paths names."""
    # xxx see if we want a static list
    return _SCHEMES.options('posix_prefix')


def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
    """Return a mapping containing an install scheme.

    ``scheme`` is the install scheme name. If not provided, it will
    return the default scheme for the current platform.
    """
    _ensure_cfg_read()
    if expand:
        return _expand_vars(scheme, vars)
    else:
        return dict(_SCHEMES.items(scheme))


def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
    """Return a path corresponding to the scheme.

    ``scheme`` is the install scheme name.
    """
    return get_paths(scheme, vars, expand)[name]


def get_config_vars(*args):
    """With no arguments, return a dictionary of all configuration
    variables relevant for the current platform.

    On Unix, this means every variable defined in Python's installed Makefile;
    On Windows and Mac OS it's a much smaller set.

    With arguments, return a list of values that result from looking up
    each argument in the configuration variable dictionary.
    """
    global _CONFIG_VARS
    if _CONFIG_VARS is None:
        _CONFIG_VARS = {}
        # Normalized versions of prefix and exec_prefix are handy to have;
        # in fact, these are the standard versions used most places in the
        # distutils2 module.
        _CONFIG_VARS['prefix'] = _PREFIX
        _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
        _CONFIG_VARS['py_version'] = _PY_VERSION
        _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
        _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
        _CONFIG_VARS['base'] = _PREFIX
        _CONFIG_VARS['platbase'] = _EXEC_PREFIX
        _CONFIG_VARS['projectbase'] = _PROJECT_BASE
        try:
            _CONFIG_VARS['abiflags'] = sys.abiflags
        except AttributeError:
            # sys.abiflags may not be defined on all platforms.
            _CONFIG_VARS['abiflags'] = ''

        if os.name in ('nt', 'os2'):
            _init_non_posix(_CONFIG_VARS)
        if os.name == 'posix':
            _init_posix(_CONFIG_VARS)
        # Setting 'userbase' is done below the call to the
        # init function to enable using 'get_config_var' in
        # the init-function.
        if sys.version >= '2.6':
            _CONFIG_VARS['userbase'] = _getuserbase()

        if 'srcdir' not in _CONFIG_VARS:
            _CONFIG_VARS['srcdir'] = _PROJECT_BASE
        else:
            _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])

        # Convert srcdir into an absolute path if it appears necessary.
        # Normally it is relative to the build directory.  However, during
        # testing, for example, we might be running a non-installed python
        # from a different directory.
        if _PYTHON_BUILD and os.name == "posix":
            base = _PROJECT_BASE
            try:
                cwd = os.getcwd()
            except OSError:
                cwd = None
            if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
                base != cwd):
                # srcdir is relative and we are not in the same directory
                # as the executable. Assume executable is in the build
                # directory and make srcdir absolute.
                srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
                _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)

        if sys.platform == 'darwin':
            kernel_version = os.uname()[2]  # Kernel version (8.4.3)
            major_version = int(kernel_version.split('.')[0])

            if major_version < 8:
                # On macOS before 10.4, check if -arch and -isysroot
                # are in CFLAGS or LDFLAGS and remove them if they are.
                # This is needed when building extensions on a 10.3 system
                # using a universal build of python.
                for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
                    flags = _CONFIG_VARS[key]
                    flags = re.sub('-arch\s+\w+\s', ' ', flags)
                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
                    _CONFIG_VARS[key] = flags
            else:
                # Allow the user to override the architecture flags using
                # an environment variable.
                # NOTE: This name was introduced by Apple in OSX 10.5 and
                # is used by several scripting languages distributed with
                # that OS release.
                if 'ARCHFLAGS' in os.environ:
                    arch = os.environ['ARCHFLAGS']
                    for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                        flags = _CONFIG_VARS[key]
                        flags = re.sub('-arch\s+\w+\s', ' ', flags)
                        flags = flags + ' ' + arch
                        _CONFIG_VARS[key] = flags

                # If we're on OSX 10.5 or later and the user tries to
                # compiles an extension using an SDK that is not present
                # on the current machine it is better to not use an SDK
                # than to fail.
                #
                # The major usecase for this is users using a Python.org
                # binary installer  on OSX 10.6: that installer uses
                # the 10.4u SDK, but that SDK is not installed by default
                # when you install Xcode.
                #
                CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
                m = re.search('-isysroot\s+(\S+)', CFLAGS)
                if m is not None:
                    sdk = m.group(1)
                    if not os.path.exists(sdk):
                        for key in ('LDFLAGS', 'BASECFLAGS',
                             # a number of derived variables. These need to be
                             # patched up as well.
                            'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                            flags = _CONFIG_VARS[key]
                            flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
                            _CONFIG_VARS[key] = flags

    if args:
        vals = []
        for name in args:
            vals.append(_CONFIG_VARS.get(name))
        return vals
    else:
        return _CONFIG_VARS


def get_config_var(name):
    """Return the value of a single variable using the dictionary returned by
    'get_config_vars()'.

    Equivalent to get_config_vars().get(name)
    """
    return get_config_vars().get(name)


def get_platform():
    """Return a string that identifies the current platform.

    This is used mainly to distinguish platform-specific build directories and
    platform-specific built distributions.  Typically includes the OS name
    and version and the architecture (as supplied by 'os.uname()'),
    although the exact information included depends on the OS; eg. for IRIX
    the architecture isn't particularly important (IRIX only runs on SGI
    hardware), but for Linux the kernel version isn't particularly
    important.

    Examples of returned values:
       linux-i586
       linux-alpha (?)
       solaris-2.6-sun4u
       irix-5.3
       irix64-6.2

    Windows will return one of:
       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
       win-ia64 (64bit Windows on Itanium)
       win32 (all others - specifically, sys.platform is returned)

    For other non-POSIX platforms, currently just returns 'sys.platform'.
    """
    if os.name == 'nt':
        # sniff sys.version for architecture.
        prefix = " bit ("
        i = sys.version.find(prefix)
        if i == -1:
            return sys.platform
        j = sys.version.find(")", i)
        look = sys.version[i+len(prefix):j].lower()
        if look == 'amd64':
            return 'win-amd64'
        if look == 'itanium':
            return 'win-ia64'
        return sys.platform

    if os.name != "posix" or not hasattr(os, 'uname'):
        # XXX what about the architecture? NT is Intel or Alpha,
        # Mac OS is M68k or PPC, etc.
        return sys.platform

    # Try to distinguish various flavours of Unix
    osname, host, release, version, machine = os.uname()

    # Convert the OS name to lowercase, remove '/' characters
    # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
    osname = osname.lower().replace('/', '')
    machine = machine.replace(' ', '_')
    machine = machine.replace('/', '-')

    if osname[:5] == "linux":
        # At least on Linux/Intel, 'machine' is the processor --
        # i386, etc.
        # XXX what about Alpha, SPARC, etc?
        return  "%s-%s" % (osname, machine)
    elif osname[:5] == "sunos":
        if release[0] >= "5":           # SunOS 5 == Solaris 2
            osname = "solaris"
            release = "%d.%s" % (int(release[0]) - 3, release[2:])
        # fall through to standard osname-release-machine representation
    elif osname[:4] == "irix":              # could be "irix64"!
        return "%s-%s" % (osname, release)
    elif osname[:3] == "aix":
        return "%s-%s.%s" % (osname, version, release)
    elif osname[:6] == "cygwin":
        osname = "cygwin"
        rel_re = re.compile(r'[\d.]+')
        m = rel_re.match(release)
        if m:
            release = m.group()
    elif osname[:6] == "darwin":
        #
        # For our purposes, we'll assume that the system version from
        # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
        # to. This makes the compatibility story a bit more sane because the
        # machine is going to compile and link as if it were
        # MACOSX_DEPLOYMENT_TARGET.
        cfgvars = get_config_vars()
        macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')

        if True:
            # Always calculate the release of the running machine,
            # needed to determine if we can build fat binaries or not.

            macrelease = macver
            # Get the system version. Reading this plist is a documented
            # way to get the system version (see the documentation for
            # the Gestalt Manager)
            try:
                f = open('/System/Library/CoreServices/SystemVersion.plist')
            except IOError:
                # We're on a plain darwin box, fall back to the default
                # behaviour.
                pass
            else:
                try:
                    m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
                                  r'<string>(.*?)</string>', f.read())
                finally:
                    f.close()
                if m is not None:
                    macrelease = '.'.join(m.group(1).split('.')[:2])
                # else: fall back to the default behaviour

        if not macver:
            macver = macrelease

        if macver:
            release = macver
            osname = "macosx"

            if ((macrelease + '.') >= '10.4.' and
                '-arch' in get_config_vars().get('CFLAGS', '').strip()):
                # The universal build will build fat binaries, but not on
                # systems before 10.4
                #
                # Try to detect 4-way universal builds, those have machine-type
                # 'universal' instead of 'fat'.

                machine = 'fat'
                cflags = get_config_vars().get('CFLAGS')

                archs = re.findall('-arch\s+(\S+)', cflags)
                archs = tuple(sorted(set(archs)))

                if len(archs) == 1:
                    machine = archs[0]
                elif archs == ('i386', 'ppc'):
                    machine = 'fat'
                elif archs == ('i386', 'x86_64'):
                    machine = 'intel'
                elif archs == ('i386', 'ppc', 'x86_64'):
                    machine = 'fat3'
                elif archs == ('ppc64', 'x86_64'):
                    machine = 'fat64'
                elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
                    machine = 'universal'
                else:
                    raise ValueError(
                       "Don't know machine value for archs=%r" % (archs,))

            elif machine == 'i386':
                # On OSX the machine type returned by uname is always the
                # 32-bit variant, even if the executable architecture is
                # the 64-bit variant
                if sys.maxsize >= 2**32:
                    machine = 'x86_64'

            elif machine in ('PowerPC', 'Power_Macintosh'):
                # Pick a sane name for the PPC architecture.
                # See 'i386' case
                if sys.maxsize >= 2**32:
                    machine = 'ppc64'
                else:
                    machine = 'ppc'

    return "%s-%s-%s" % (osname, release, machine)


def get_python_version():
    return _PY_VERSION_SHORT


def _print_dict(title, data):
    for index, (key, value) in enumerate(sorted(data.items())):
        if index == 0:
            print('%s: ' % (title))
        print('\t%s = "%s"' % (key, value))


def _main():
    """Display all information sysconfig detains."""
    print('Platform: "%s"' % get_platform())
    print('Python version: "%s"' % get_python_version())
    print('Current installation scheme: "%s"' % _get_default_scheme())
    print()
    _print_dict('Paths', get_paths())
    print()
    _print_dict('Variables', get_config_vars())


if __name__ == '__main__':
    _main()
_vendor/distlib/_backport/sysconfig.cfg000064400000005071151733136310014271 0ustar00[posix_prefix]
# Configuration directories.  Some of these come straight out of the
# configure script.  They are for implementing the other variables, not to
# be used directly in [resource_locations].
confdir = /etc
datadir = /usr/share
libdir = /usr/lib
statedir = /var
# User resource directory
local = ~/.local/{distribution.name}

stdlib = {base}/lib/python{py_version_short}
platstdlib = {platbase}/lib/python{py_version_short}
purelib = {base}/lib/python{py_version_short}/site-packages
platlib = {platbase}/lib/python{py_version_short}/site-packages
include = {base}/include/python{py_version_short}{abiflags}
platinclude = {platbase}/include/python{py_version_short}{abiflags}
data = {base}

[posix_home]
stdlib = {base}/lib/python
platstdlib = {base}/lib/python
purelib = {base}/lib/python
platlib = {base}/lib/python
include = {base}/include/python
platinclude = {base}/include/python
scripts = {base}/bin
data = {base}

[nt]
stdlib = {base}/Lib
platstdlib = {base}/Lib
purelib = {base}/Lib/site-packages
platlib = {base}/Lib/site-packages
include = {base}/Include
platinclude = {base}/Include
scripts = {base}/Scripts
data = {base}

[os2]
stdlib = {base}/Lib
platstdlib = {base}/Lib
purelib = {base}/Lib/site-packages
platlib = {base}/Lib/site-packages
include = {base}/Include
platinclude = {base}/Include
scripts = {base}/Scripts
data = {base}

[os2_home]
stdlib = {userbase}/lib/python{py_version_short}
platstdlib = {userbase}/lib/python{py_version_short}
purelib = {userbase}/lib/python{py_version_short}/site-packages
platlib = {userbase}/lib/python{py_version_short}/site-packages
include = {userbase}/include/python{py_version_short}
scripts = {userbase}/bin
data = {userbase}

[nt_user]
stdlib = {userbase}/Python{py_version_nodot}
platstdlib = {userbase}/Python{py_version_nodot}
purelib = {userbase}/Python{py_version_nodot}/site-packages
platlib = {userbase}/Python{py_version_nodot}/site-packages
include = {userbase}/Python{py_version_nodot}/Include
scripts = {userbase}/Scripts
data = {userbase}

[posix_user]
stdlib = {userbase}/lib/python{py_version_short}
platstdlib = {userbase}/lib/python{py_version_short}
purelib = {userbase}/lib/python{py_version_short}/site-packages
platlib = {userbase}/lib/python{py_version_short}/site-packages
include = {userbase}/include/python{py_version_short}
scripts = {userbase}/bin
data = {userbase}

[osx_framework_user]
stdlib = {userbase}/lib/python
platstdlib = {userbase}/lib/python
purelib = {userbase}/lib/python/site-packages
platlib = {userbase}/lib/python/site-packages
include = {userbase}/include
scripts = {userbase}/bin
data = {userbase}
_vendor/webencodings/__pycache__/x_user_defined.cpython-36.opt-1.pyc000064400000005025151733136310021434 0ustar003

�Pf��	@s�dZddlmZddlZGdd�dej�ZGdd�dej�ZGdd	�d	ej�ZGd
d�deej�ZGdd
�d
eej�Zej	de�j
e�jeeeed�ZdZ
eje
�ZdS)z�

    webencodings.x_user_defined
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~

    An implementation of the x-user-defined encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsNc@s eZdZddd�Zddd�ZdS)	�Codec�strictcCstj||t�S)N)�codecs�charmap_encode�encoding_table)�self�input�errors�r�$/usr/lib/python3.6/x_user_defined.py�encodeszCodec.encodecCstj||t�S)N)r�charmap_decode�decoding_table)rr	r
rrr�decodeszCodec.decodeN)r)r)�__name__�
__module__�__qualname__r
rrrrrrs
rc@seZdZddd�ZdS)�IncrementalEncoderFcCstj||jt�dS)Nr)rrr
r)rr	�finalrrrr
szIncrementalEncoder.encodeN)F)rrrr
rrrrrsrc@seZdZddd�ZdS)�IncrementalDecoderFcCstj||jt�dS)Nr)rrr
r)rr	rrrrr$szIncrementalDecoder.decodeN)F)rrrrrrrrr#src@seZdZdS)�StreamWriterN)rrrrrrrr(src@seZdZdS)�StreamReaderN)rrrrrrrr,srzx-user-defined)�namer
r�incrementalencoder�incrementaldecoder�streamreader�streamwriteru	

 !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~)�__doc__Z
__future__rrrrrrr�	CodecInfor
rZ
codec_infor�
charmap_buildrrrrr�<module>s&		_vendor/webencodings/__pycache__/mklabels.cpython-36.opt-1.pyc000064400000003376151733136310020252 0ustar003

�Pf�@sfdZddlZyddlmZWn ek
r<ddlmZYnXdd�Zdd�Zedkrbe	ed	��dS)
z�

    webencodings.mklabels
    ~~~~~~~~~~~~~~~~~~~~~

    Regenarate the webencodings.labels module.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�N)�urlopencCs|S)N�)�stringrr�/usr/lib/python3.6/mklabels.py�assert_lowersrcsfdg}dd�tjt|�j�jd��D�}tdd�|D���|j�fdd�|D��|jd�d	j|�S)
Na"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
cSsLg|]D}|dD]6}|dD](}tt|��jd�t|d�jd�f�qqqS)Z	encodings�labels�u�name)�reprr�lstrip)�.0�category�encoding�labelrrr�
<listcomp>-szgenerate.<locals>.<listcomp>�asciicss|]\}}t|�VqdS)N)�len)rrr	rrr�	<genexpr>2szgenerate.<locals>.<genexpr>c3s,|]$\}}d|d�t|�|fVqdS)z    %s:%s %s,
� N)r)rrr	)�max_lenrrr4s�}�)	�json�loadsr�read�decode�max�extend�append�join)Zurl�partsrr)rr�generates


r!�__main__z.http://encoding.spec.whatwg.org/encodings.json)
�__doc__rZurllibr�ImportErrorZurllib.requestrr!�__name__�printrrrr�<module>s!_vendor/webencodings/__pycache__/__init__.cpython-36.pyc000064400000022570151733136310017255 0ustar003

�PfP)�@s�dZddlmZddlZddlmZdZddd	d
d�ZiZdd
�Z	dd�Z
dd�ZGdd�de�Z
e
d�Ze
d�Ze
d�Zd+dd�Zdd�Zedfdd�Zd,dd �Zd!d"�Zedfd#d$�Zd%d&�ZGd'd(�d(e�ZGd)d*�d*e�ZdS)-a

    webencodings
    ~~~~~~~~~~~~

    This is a Python implementation of the `WHATWG Encoding standard
    <http://encoding.spec.whatwg.org/>`. See README for details.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsN�)�LABELSz0.5z
iso-8859-8zmac-cyrillicz	mac-romanZcp874)ziso-8859-8-izx-mac-cyrillic�	macintoshzwindows-874cCs|jd�j�jd�S)a9Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.

    :param string: An Unicode string.
    :returns: A new Unicode string.

    This is used for `ASCII case-insensitive
    <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
    matching of encoding labels.
    The same matching is also used, among other things,
    for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.

    This is different from the :meth:`~py:str.lower` method of Unicode strings
    which also affect non-ASCII characters,
    sometimes mapping them into the ASCII range:

        >>> keyword = u'Bac\N{KELVIN SIGN}ground'
        >>> assert keyword.lower() == u'background'
        >>> assert ascii_lower(keyword) != keyword.lower()
        >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'

    �utf8)�encode�lower�decode)�string�r�/usr/lib/python3.6/__init__.py�ascii_lower#sr
cCsxt|jd��}tj|�}|dkr$dStj|�}|dkrt|dkrLddlm}ntj||�}tj	|�}t
||�}|t|<|S)u<
    Look for an encoding by its label.
    This is the spec’s `get an encoding
    <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
    Supported labels are listed there.

    :param label: A string.
    :returns:
        An :class:`Encoding` object, or :obj:`None` for an unknown label.

    z	

 Nzx-user-definedr)�
codec_info)r
�stripr�get�CACHEZx_user_definedr�PYTHON_NAMES�codecs�lookup�Encoding)Zlabel�name�encodingrZpython_namerrrr=s




rcCs.t|d�r|St|�}|dkr*td|��|S)z�
    Accept either an encoding object or label.

    :param encoding: An :class:`Encoding` object or a label string.
    :returns: An :class:`Encoding` object.
    :raises: :exc:`~exceptions.LookupError` for an unknown label.

    rNzUnknown encoding label: %r)�hasattrr�LookupError)Zencoding_or_labelrrrr�
_get_encoding[s	
rc@s eZdZdZdd�Zdd�ZdS)raOReresents a character encoding such as UTF-8,
    that can be used for decoding or encoding.

    .. attribute:: name

        Canonical name of the encoding

    .. attribute:: codec_info

        The actual implementation of the encoding,
        a stdlib :class:`~codecs.CodecInfo` object.
        See :func:`codecs.register`.

    cCs||_||_dS)N)rr)�selfrrrrr�__init__|szEncoding.__init__cCs
d|jS)Nz
<Encoding %s>)r)rrrr�__repr__�szEncoding.__repr__N)�__name__�
__module__�__qualname__�__doc__rrrrrrrmsrzutf-8zutf-16lezutf-16be�replacecCs2t|�}t|�\}}|p|}|jj||�d|fS)a�
    Decode a single string.

    :param input: A byte string
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return:
        A ``(output, encoding)`` tuple of an Unicode string
        and an :obj:`Encoding`.

    r)r�_detect_bomrr	)�input�fallback_encoding�errorsZbom_encodingrrrrr	�sr	cCsV|jd�rt|dd�fS|jd�r4t|dd�fS|jd�rNt|dd�fSd|fS)zBReturn (bom_encoding, input), with any BOM removed from the input.s���Ns��s�)�
startswith�_UTF16LE�_UTF16BE�UTF8)r$rrrr#�s


r#�strictcCst|�jj||�dS)a;
    Encode a single string.

    :param input: An Unicode string.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return: A byte string.

    r)rrr)r$rr&rrrr�srcCs$t||�}t||�}t|�}||fS)a�
    "Pull"-based decoder.

    :param input:
        An iterable of byte strings.

        The input is first consumed just enough to determine the encoding
        based on the precense of a BOM,
        then consumed on demand when the return value is.
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns:
        An ``(output, encoding)`` tuple.
        :obj:`output` is an iterable of Unicode strings,
        :obj:`encoding` is the :obj:`Encoding` that is being used.

    )�IncrementalDecoder�_iter_decode_generator�next)r$r%r&�decoder�	generatorrrrr�iter_decode�s

r3ccs�|j}t|�}xf|D].}||�}|r|jdk	s2t�|jV|VPqW|ddd�}|jdk	s`t�|jV|rr|VdSx|D]}||�}|r||Vq|W|ddd�}|r�|VdS)zqReturn a generator that first yields the :obj:`Encoding`,
    then yields output chukns as Unicode strings.

    N�T)�final)r	�iterr�AssertionError)r$r1r	�chunck�outputrrrr/�s,


r/cCst||�j}t||�S)uY
    “Pull”-based encoder.

    :param input: An iterable of Unicode strings.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns: An iterable of byte strings.

    )�IncrementalEncoderr�_iter_encode_generator)r$rr&rrrr�iter_encode�sr<ccs:x|D]}||�}|r|VqW|ddd�}|r6|VdS)N�T)r5r)r$rr8r9rrrr;s

r;c@s$eZdZdZd	dd�Zd
dd�ZdS)r.uO
    “Push”-based decoder.

    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    r"cCs&t|�|_||_d|_d|_d|_dS)Nr4)r�_fallback_encoding�_errors�_buffer�_decoderr)rr%r&rrrrs

zIncrementalDecoder.__init__FcCs~|j}|dk	r|||�S|j|}t|�\}}|dkrXt|�dkrR|rR||_dS|j}|jj|j�j}||_||_	|||�S)z�Decode one chunk of the input.

        :param input: A byte string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: An Unicode string.

        Nr(r=)
rAr@r#�lenr>r�incrementaldecoderr?r	r)rr$r5r1rrrrr	's


zIncrementalDecoder.decodeN)r")F)rrr r!rr	rrrrr.s

r.c@seZdZdZedfdd�ZdS)r:u�
    “Push”-based encoder.

    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    .. method:: encode(input, final=False)

        :param input: An Unicode string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: A byte string.

    r-cCst|�}|jj|�j|_dS)N)rr�incrementalencoderr)rrr&rrrrTszIncrementalEncoder.__init__N)rrr r!r,rrrrrr:Csr:)r")r")r!Z
__future__rrZlabelsr�VERSIONrrr
rr�objectrr,r*r+r	r#rr3r/r<r;r.r:rrrr�<module>
s2

 
3_vendor/webencodings/__pycache__/mklabels.cpython-36.pyc000064400000003444151733136310017307 0ustar003

�Pf�@sfdZddlZyddlmZWn ek
r<ddlmZYnXdd�Zdd�Zedkrbe	ed	��dS)
z�

    webencodings.mklabels
    ~~~~~~~~~~~~~~~~~~~~~

    Regenarate the webencodings.labels module.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�N)�urlopencCs||j�kst�|S)N)�lower�AssertionError)�string�r�/usr/lib/python3.6/mklabels.py�assert_lowersrcsfdg}dd�tjt|�j�jd��D�}tdd�|D���|j�fdd�|D��|jd�d	j|�S)
Na"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
cSsLg|]D}|dD]6}|dD](}tt|��jd�t|d�jd�f�qqqS)Z	encodings�labels�u�name)�reprr�lstrip)�.0�category�encoding�labelrrr�
<listcomp>-szgenerate.<locals>.<listcomp>�asciicss|]\}}t|�VqdS)N)�len)rrrrrr�	<genexpr>2szgenerate.<locals>.<genexpr>c3s,|]$\}}d|d�t|�|fVqdS)z    %s:%s %s,
� N)r)rrr)�max_lenrrr4s�}�)	�json�loadsr�read�decode�max�extend�append�join)Zurl�partsr	r)rr�generates


r#�__main__z.http://encoding.spec.whatwg.org/encodings.json)
�__doc__rZurllibr�ImportErrorZurllib.requestrr#�__name__�printrrrr�<module>s!_vendor/webencodings/__pycache__/labels.cpython-36.opt-1.pyc000064400000007646151733136310017726 0ustar003

�Pf#��@s�dZddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd	d	d	d	d	d	d	d	d	d	d	d
d
d
ddddddddddd
d
d
dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd d!d!d!d!d!d"d"d"d#d#d$d$d$d$d$d$d$d%d%d%d%d%d%d%d%d%d%d&d&d'd(d(d)d*��Zd+S),z�

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

zutf-8�ibm866z
iso-8859-2z
iso-8859-3z
iso-8859-4z
iso-8859-5z
iso-8859-6z
iso-8859-7z
iso-8859-8ziso-8859-8-iziso-8859-10ziso-8859-13ziso-8859-14ziso-8859-15ziso-8859-16zkoi8-rzkoi8-u�	macintoshzwindows-874zwindows-1250zwindows-1251zwindows-1252zwindows-1253zwindows-1254zwindows-1255zwindows-1256zwindows-1257zwindows-1258zx-mac-cyrillic�gbk�gb18030z
hz-gb-2312�big5zeuc-jpziso-2022-jp�	shift_jiszeuc-krziso-2022-krzutf-16bezutf-16lezx-user-defined)�zunicode-1-1-utf-8zutf-8�utf8�866�cp866�csibm866r�csisolatin2z
iso-8859-2z
iso-ir-101z	iso8859-2Ziso88592z
iso_8859-2ziso_8859-2:1987�l2�latin2�csisolatin3z
iso-8859-3z
iso-ir-109z	iso8859-3Ziso88593z
iso_8859-3ziso_8859-3:1988�l3�latin3�csisolatin4z
iso-8859-4z
iso-ir-110z	iso8859-4Ziso88594z
iso_8859-4ziso_8859-4:1988�l4�latin4�csisolatincyrillic�cyrillicz
iso-8859-5z
iso-ir-144z	iso8859-5Ziso88595z
iso_8859-5ziso_8859-5:1988�arabiczasmo-708Zcsiso88596eZcsiso88596i�csisolatinarabiczecma-114z
iso-8859-6ziso-8859-6-eziso-8859-6-iz
iso-ir-127z	iso8859-6Ziso88596z
iso_8859-6ziso_8859-6:1987�csisolatingreekzecma-118�elot_928�greek�greek8z
iso-8859-7z
iso-ir-126z	iso8859-7Ziso88597z
iso_8859-7ziso_8859-7:1987Zsun_eu_greekZcsiso88598e�csisolatinhebrew�hebrewz
iso-8859-8ziso-8859-8-ez
iso-ir-138z	iso8859-8Ziso88598z
iso_8859-8ziso_8859-8:1988ZvisualZcsiso88598iziso-8859-8-iZlogical�csisolatin6ziso-8859-10z
iso-ir-157z
iso8859-10Z	iso885910�l6�latin6ziso-8859-13z
iso8859-13Z	iso885913ziso-8859-14z
iso8859-14Z	iso885914Zcsisolatin9ziso-8859-15z
iso8859-15Z	iso885915ziso_8859-15�l9ziso-8859-16�cskoi8rZkoiZkoi8zkoi8-r�koi8_rzkoi8-uZcsmacintoshZmacrzx-mac-romanzdos-874ziso-8859-11z
iso8859-11Z	iso885911ztis-620zwindows-874�cp1250zwindows-1250zx-cp1250�cp1251zwindows-1251zx-cp1251zansi_x3.4-1968�ascii�cp1252�cp819�csisolatin1�ibm819z
iso-8859-1z
iso-ir-100z	iso8859-1Ziso88591z
iso_8859-1ziso_8859-1:1987�l1�latin1zus-asciizwindows-1252zx-cp1252�cp1253zwindows-1253zx-cp1253�cp1254�csisolatin5z
iso-8859-9z
iso-ir-148z	iso8859-9Ziso88599z
iso_8859-9ziso_8859-9:1989�l5�latin5zwindows-1254zx-cp1254�cp1255zwindows-1255zx-cp1255�cp1256zwindows-1256zx-cp1256�cp1257zwindows-1257zx-cp1257�cp1258zwindows-1258zx-cp1258zx-mac-cyrilliczx-mac-ukrainian�chineseZcsgb2312�csiso58gb231280�gb2312Zgb_2312z
gb_2312-80rz	iso-ir-58zx-gbkrz
hz-gb-2312rz
big5-hkscszcn-big5�csbig5zx-x-big5Zcseucpkdfmtjapanesezeuc-jpzx-euc-jp�csiso2022jpziso-2022-jp�
csshiftjis�ms_kanjiz	shift-jisr�sjiszwindows-31jzx-sjisZcseuckrZ
csksc56011987zeuc-krz
iso-ir-149�koreanzks_c_5601-1987zks_c_5601-1989�ksc5601Zksc_5601zwindows-949�csiso2022krziso-2022-krzutf-16bezutf-16zutf-16lezx-user-definedN)�__doc__ZLABELS�rBrB�/usr/lib/python3.6/labels.py�<module>s�_vendor/webencodings/__pycache__/__init__.cpython-36.opt-1.pyc000064400000022510151733136310020206 0ustar003

�PfP)�@s�dZddlmZddlZddlmZdZddd	d
d�ZiZdd
�Z	dd�Z
dd�ZGdd�de�Z
e
d�Ze
d�Ze
d�Zd+dd�Zdd�Zedfdd�Zd,dd �Zd!d"�Zedfd#d$�Zd%d&�ZGd'd(�d(e�ZGd)d*�d*e�ZdS)-a

    webencodings
    ~~~~~~~~~~~~

    This is a Python implementation of the `WHATWG Encoding standard
    <http://encoding.spec.whatwg.org/>`. See README for details.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsN�)�LABELSz0.5z
iso-8859-8zmac-cyrillicz	mac-romanZcp874)ziso-8859-8-izx-mac-cyrillic�	macintoshzwindows-874cCs|jd�j�jd�S)a9Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.

    :param string: An Unicode string.
    :returns: A new Unicode string.

    This is used for `ASCII case-insensitive
    <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
    matching of encoding labels.
    The same matching is also used, among other things,
    for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.

    This is different from the :meth:`~py:str.lower` method of Unicode strings
    which also affect non-ASCII characters,
    sometimes mapping them into the ASCII range:

        >>> keyword = u'Bac\N{KELVIN SIGN}ground'
        >>> assert keyword.lower() == u'background'
        >>> assert ascii_lower(keyword) != keyword.lower()
        >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'

    �utf8)�encode�lower�decode)�string�r�/usr/lib/python3.6/__init__.py�ascii_lower#sr
cCsxt|jd��}tj|�}|dkr$dStj|�}|dkrt|dkrLddlm}ntj||�}tj	|�}t
||�}|t|<|S)u<
    Look for an encoding by its label.
    This is the spec’s `get an encoding
    <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
    Supported labels are listed there.

    :param label: A string.
    :returns:
        An :class:`Encoding` object, or :obj:`None` for an unknown label.

    z	

 Nzx-user-definedr)�
codec_info)r
�stripr�get�CACHEZx_user_definedr�PYTHON_NAMES�codecs�lookup�Encoding)Zlabel�name�encodingrZpython_namerrrr=s




rcCs.t|d�r|St|�}|dkr*td|��|S)z�
    Accept either an encoding object or label.

    :param encoding: An :class:`Encoding` object or a label string.
    :returns: An :class:`Encoding` object.
    :raises: :exc:`~exceptions.LookupError` for an unknown label.

    rNzUnknown encoding label: %r)�hasattrr�LookupError)Zencoding_or_labelrrrr�
_get_encoding[s	
rc@s eZdZdZdd�Zdd�ZdS)raOReresents a character encoding such as UTF-8,
    that can be used for decoding or encoding.

    .. attribute:: name

        Canonical name of the encoding

    .. attribute:: codec_info

        The actual implementation of the encoding,
        a stdlib :class:`~codecs.CodecInfo` object.
        See :func:`codecs.register`.

    cCs||_||_dS)N)rr)�selfrrrrr�__init__|szEncoding.__init__cCs
d|jS)Nz
<Encoding %s>)r)rrrr�__repr__�szEncoding.__repr__N)�__name__�
__module__�__qualname__�__doc__rrrrrrrmsrzutf-8zutf-16lezutf-16be�replacecCs2t|�}t|�\}}|p|}|jj||�d|fS)a�
    Decode a single string.

    :param input: A byte string
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return:
        A ``(output, encoding)`` tuple of an Unicode string
        and an :obj:`Encoding`.

    r)r�_detect_bomrr	)�input�fallback_encoding�errorsZbom_encodingrrrrr	�sr	cCsV|jd�rt|dd�fS|jd�r4t|dd�fS|jd�rNt|dd�fSd|fS)zBReturn (bom_encoding, input), with any BOM removed from the input.s���Ns��s�)�
startswith�_UTF16LE�_UTF16BE�UTF8)r$rrrr#�s


r#�strictcCst|�jj||�dS)a;
    Encode a single string.

    :param input: An Unicode string.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return: A byte string.

    r)rrr)r$rr&rrrr�srcCs$t||�}t||�}t|�}||fS)a�
    "Pull"-based decoder.

    :param input:
        An iterable of byte strings.

        The input is first consumed just enough to determine the encoding
        based on the precense of a BOM,
        then consumed on demand when the return value is.
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns:
        An ``(output, encoding)`` tuple.
        :obj:`output` is an iterable of Unicode strings,
        :obj:`encoding` is the :obj:`Encoding` that is being used.

    )�IncrementalDecoder�_iter_decode_generator�next)r$r%r&�decoder�	generatorrrrr�iter_decode�s

r3ccs�|j}t|�}xJ|D] }||�}|r|jV|VPqW|ddd�}|jV|rV|VdSx|D]}||�}|r`|Vq`W|ddd�}|r�|VdS)zqReturn a generator that first yields the :obj:`Encoding`,
    then yields output chukns as Unicode strings.

    �T)�finalN)r	�iterr)r$r1r	�chunck�outputrrrr/�s(


r/cCst||�j}t||�S)uY
    “Pull”-based encoder.

    :param input: An iterable of Unicode strings.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns: An iterable of byte strings.

    )�IncrementalEncoderr�_iter_encode_generator)r$rr&rrrr�iter_encode�sr;ccs:x|D]}||�}|r|VqW|ddd�}|r6|VdS)N�T)r5r)r$rr7r8rrrr:s

r:c@s$eZdZdZd	dd�Zd
dd�ZdS)r.uO
    “Push”-based decoder.

    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    r"cCs&t|�|_||_d|_d|_d|_dS)Nr4)r�_fallback_encoding�_errors�_buffer�_decoderr)rr%r&rrrrs

zIncrementalDecoder.__init__FcCs~|j}|dk	r|||�S|j|}t|�\}}|dkrXt|�dkrR|rR||_dS|j}|jj|j�j}||_||_	|||�S)z�Decode one chunk of the input.

        :param input: A byte string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: An Unicode string.

        Nr(r<)
r@r?r#�lenr=r�incrementaldecoderr>r	r)rr$r5r1rrrrr	's


zIncrementalDecoder.decodeN)r")F)rrr r!rr	rrrrr.s

r.c@seZdZdZedfdd�ZdS)r9u�
    “Push”-based encoder.

    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    .. method:: encode(input, final=False)

        :param input: An Unicode string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: A byte string.

    r-cCst|�}|jj|�j|_dS)N)rr�incrementalencoderr)rrr&rrrrTszIncrementalEncoder.__init__N)rrr r!r,rrrrrr9Csr9)r")r")r!Z
__future__rrZlabelsr�VERSIONrrr
rr�objectrr,r*r+r	r#rr3r/r;r:r.r9rrrr�<module>
s2

 
3_vendor/webencodings/__pycache__/x_user_defined.cpython-36.pyc000064400000005025151733136310020475 0ustar003

�Pf��	@s�dZddlmZddlZGdd�dej�ZGdd�dej�ZGdd	�d	ej�ZGd
d�deej�ZGdd
�d
eej�Zej	de�j
e�jeeeed�ZdZ
eje
�ZdS)z�

    webencodings.x_user_defined
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~

    An implementation of the x-user-defined encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literalsNc@s eZdZddd�Zddd�ZdS)	�Codec�strictcCstj||t�S)N)�codecs�charmap_encode�encoding_table)�self�input�errors�r�$/usr/lib/python3.6/x_user_defined.py�encodeszCodec.encodecCstj||t�S)N)r�charmap_decode�decoding_table)rr	r
rrr�decodeszCodec.decodeN)r)r)�__name__�
__module__�__qualname__r
rrrrrrs
rc@seZdZddd�ZdS)�IncrementalEncoderFcCstj||jt�dS)Nr)rrr
r)rr	�finalrrrr
szIncrementalEncoder.encodeN)F)rrrr
rrrrrsrc@seZdZddd�ZdS)�IncrementalDecoderFcCstj||jt�dS)Nr)rrr
r)rr	rrrrr$szIncrementalDecoder.decodeN)F)rrrrrrrrr#src@seZdZdS)�StreamWriterN)rrrrrrrr(src@seZdZdS)�StreamReaderN)rrrrrrrr,srzx-user-defined)�namer
r�incrementalencoder�incrementaldecoder�streamreader�streamwriteru	

 !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~)�__doc__Z
__future__rrrrrrr�	CodecInfor
rZ
codec_infor�
charmap_buildrrrrr�<module>s&		_vendor/webencodings/__pycache__/tests.cpython-36.pyc000064400000011570151733136310016656 0ustar003

�Pf��@s�dZddlmZddlmZmZmZmZmZm	Z	m
Z
mZmZdd�Z
dd�Zd	d
�Zdd�Zd
d�Zdd�Zdd�Zdd�Zdd�ZdS)z�

    webencodings.tests
    ~~~~~~~~~~~~~~~~~~

    A basic test suite for Encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literals�)	�lookup�LABELS�decode�encode�iter_decode�iter_encode�IncrementalDecoder�IncrementalEncoder�UTF8cOs4y|||�Wn|k
r"dSXtd|��dS)NzDid not raise %s.)�AssertionError)Z	exceptionZfunction�args�kwargs�r�/usr/lib/python3.6/tests.py�
assert_raisess
rcCstd�jdkst�td�jdks$t�td�jdks6t�td�jdksHt�td�jdksZt�td�jdkslt�td�jdks~t�td�dks�t�td�dks�t�td	�jd
ks�t�td�jd
ks�t�td�jd
ks�t�td
�jd
ks�t�td�dks�t�td�dk�st�dS)Nzutf-8zUtf-8zUTF-8�utf8zutf8 z 
utf8	�u8uutf-8 zUS-ASCIIzwindows-1252z
iso-8859-1�latin1ZLATIN1zlatin-1uLATİN1)r�namer
rrrr�test_labelssrcCsx�tD]�}td|�dt|�fks$t�td|�dks6t�xLdD]D}tdg||�\}}t|�gksdt�ttdg||��gks<t�q<Wt|�}|jd�dks�t�|jddd�dks�t�t	|�}|jd�dks�t�|jddd�dkst�qWx&t
tj��D]}t|�j|ks�t�q�WdS)	N��rr�T)�final)rrr)
rrrr
rr�listr	r
r�set�valuesr)Zlabel�repeat�output�_�decoder�encoderrrrr�test_all_labels0s

 r$cCsTtttdd�tttdd�tttgd�tttgd�tttd�tttd�dS)NséZinvalid�é)r�LookupErrorrrrr	r
rrrrr�test_invalid_labelCsr'cCs�tdd�dtd�fkst�tdtd��dtd�fks8t�tdd�dtd�fksRt�tdt�dtd�fkslt�tdd�dtd�fks�t�td	d�dtd�fks�t�td
d�dtd�fks�t�tdd�dtd
�fks�t�tdd�dtd�fks�t�tdd�dtd
�fk�s
t�tdd�dtd�fk�s&t�tdd�dtd
�fk�sBt�tdd�dtd
�fk�s^t�tdd�dtd�fk�szt�tdd�dtd
�fk�s�t�tdd�dtd
�fk�s�t�dS)N��ru€sérr%�asciiuésés���zutf-16bes���zutf-16les���us���s�zUTF-16BEs�zUTF-16LEzUTF-16)rrr
rrrrr�test_decodeLs r*cCsptdd�dkst�tdd�dks$t�tdd�dks6t�tdd�dksHt�tdd�dksZt�tdd	�d
kslt�dS)Nr%r��rsézutf-16s�zutf-16lezutf-16bes�)rr
rrrr�test_encodebsr,cCs�dd�}|gd�dkst�|dgd�dks.t�|dgd�dksBt�|dgd�d	ksVt�|d
dgd�d	kslt�|dd
gd�d	ks�t�|dgd�dks�t�|dgd�dks�t�|dddgd�dks�t�|dddgd�dks�t�|ddddddgd�dks�t�|dgd�dk�st�|dgd�dk�s$t�|dgd�dk�s:t�|dgd�dk�sPt�|ddddddgd�dk�spt�|dddgd �d!k�s�t�dS)"NcSst||�\}}dj|�S)Nr)r�join)�inputZfallback_encodingr Z	_encodingrrr�iter_decode_to_stringlsz/test_iter_decode.<locals>.iter_decode_to_stringrrrr+r%shelloZhellosheslloshell�oséuésés�����aua���s���s�uï»s���s�����s���sh�zx-user-defineduhllo)r
)r/rrr�test_iter_decodeks.r7cCs�djtgd��dkst�djtdgd��dks2t�djtdgd��dksLt�djtddddgd��dkslt�djtddddgd��dks�t�djtddddgd��dks�t�djtddddgd	��d
ks�t�djtddddgd
��dks�t�dS)Nrrrr%r+zutf-16s�zutf-16lezutf-16bes�uhZllozx-user-definedsh�llo)r-r	r
rrrr�test_iter_encode�s    r8cCs@d}d}d}d}t|d�|td�fks*t�t|d�|ks<t�dS)Ns2,O�#�ɻtϨ�u2,O#tsaaZaazx-user-defined)rrr
r)ZencodedZdecodedrrr�test_x_user_defined�sr9N)�__doc__Z
__future__rrrrrrrr	r
rrrrr$r'r*r,r7r8r9rrrr�<module>s,			_vendor/webencodings/__pycache__/tests.cpython-36.opt-1.pyc000064400000004443151733136310017616 0ustar003

�Pf��@s�dZddlmZddlmZmZmZmZmZm	Z	m
Z
mZmZdd�Z
dd�Zd	d
�Zdd�Zd
d�Zdd�Zdd�Zdd�Zdd�ZdS)z�

    webencodings.tests
    ~~~~~~~~~~~~~~~~~~

    A basic test suite for Encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

�)�unicode_literals�)	�lookup�LABELS�decode�encode�iter_decode�iter_encode�IncrementalDecoder�IncrementalEncoder�UTF8cOs4y|||�Wn|k
r"dSXtd|��dS)NzDid not raise %s.)�AssertionError)Z	exceptionZfunction�args�kwargs�r�/usr/lib/python3.6/tests.py�
assert_raisess
rcCsdS)Nrrrrr�test_labelssrcCsZx>tD]6}x dD]}tdg||�\}}qWt|�}t|�}qWxttj��D]}qNWdS)Nrr��)rrr)rrr
r�set�values)Zlabel�repeat�output�_�decoder�encoder�namerrr�test_all_labels0s

rcCsTtttdd�tttdd�tttgd�tttgd�tttd�tttd�dS)NséZinvalid�é)r�LookupErrorrrrr	r
rrrrr�test_invalid_labelCsr!cCsdS)Nrrrrr�test_decodeLsr"cCsdS)Nrrrrr�test_encodebsr#cCsdd�}dS)NcSst||�\}}dj|�S)N�)r�join)�inputZfallback_encodingrZ	_encodingrrr�iter_decode_to_stringlsz/test_iter_decode.<locals>.iter_decode_to_stringr)r'rrr�test_iter_decodeksr(cCsdS)Nrrrrr�test_iter_encode�sr)cCsd}d}d}d}dS)Ns2,O�#�ɻtϨ�u2,O#tsaaZaar)ZencodedZdecodedrrr�test_x_user_defined�s
r*N)�__doc__Z
__future__rr$rrrrrr	r
rrrrrr!r"r#r(r)r*rrrr�<module>s,			_vendor/webencodings/__pycache__/labels.cpython-36.pyc000064400000007646151733136310016767 0ustar003

�Pf#��@s�dZddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd	d	d	d	d	d	d	d	d	d	d	d
d
d
ddddddddddd
d
d
dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd d!d!d!d!d!d"d"d"d#d#d$d$d$d$d$d$d$d%d%d%d%d%d%d%d%d%d%d&d&d'd(d(d)d*��Zd+S),z�

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

zutf-8�ibm866z
iso-8859-2z
iso-8859-3z
iso-8859-4z
iso-8859-5z
iso-8859-6z
iso-8859-7z
iso-8859-8ziso-8859-8-iziso-8859-10ziso-8859-13ziso-8859-14ziso-8859-15ziso-8859-16zkoi8-rzkoi8-u�	macintoshzwindows-874zwindows-1250zwindows-1251zwindows-1252zwindows-1253zwindows-1254zwindows-1255zwindows-1256zwindows-1257zwindows-1258zx-mac-cyrillic�gbk�gb18030z
hz-gb-2312�big5zeuc-jpziso-2022-jp�	shift_jiszeuc-krziso-2022-krzutf-16bezutf-16lezx-user-defined)�zunicode-1-1-utf-8zutf-8�utf8�866�cp866�csibm866r�csisolatin2z
iso-8859-2z
iso-ir-101z	iso8859-2Ziso88592z
iso_8859-2ziso_8859-2:1987�l2�latin2�csisolatin3z
iso-8859-3z
iso-ir-109z	iso8859-3Ziso88593z
iso_8859-3ziso_8859-3:1988�l3�latin3�csisolatin4z
iso-8859-4z
iso-ir-110z	iso8859-4Ziso88594z
iso_8859-4ziso_8859-4:1988�l4�latin4�csisolatincyrillic�cyrillicz
iso-8859-5z
iso-ir-144z	iso8859-5Ziso88595z
iso_8859-5ziso_8859-5:1988�arabiczasmo-708Zcsiso88596eZcsiso88596i�csisolatinarabiczecma-114z
iso-8859-6ziso-8859-6-eziso-8859-6-iz
iso-ir-127z	iso8859-6Ziso88596z
iso_8859-6ziso_8859-6:1987�csisolatingreekzecma-118�elot_928�greek�greek8z
iso-8859-7z
iso-ir-126z	iso8859-7Ziso88597z
iso_8859-7ziso_8859-7:1987Zsun_eu_greekZcsiso88598e�csisolatinhebrew�hebrewz
iso-8859-8ziso-8859-8-ez
iso-ir-138z	iso8859-8Ziso88598z
iso_8859-8ziso_8859-8:1988ZvisualZcsiso88598iziso-8859-8-iZlogical�csisolatin6ziso-8859-10z
iso-ir-157z
iso8859-10Z	iso885910�l6�latin6ziso-8859-13z
iso8859-13Z	iso885913ziso-8859-14z
iso8859-14Z	iso885914Zcsisolatin9ziso-8859-15z
iso8859-15Z	iso885915ziso_8859-15�l9ziso-8859-16�cskoi8rZkoiZkoi8zkoi8-r�koi8_rzkoi8-uZcsmacintoshZmacrzx-mac-romanzdos-874ziso-8859-11z
iso8859-11Z	iso885911ztis-620zwindows-874�cp1250zwindows-1250zx-cp1250�cp1251zwindows-1251zx-cp1251zansi_x3.4-1968�ascii�cp1252�cp819�csisolatin1�ibm819z
iso-8859-1z
iso-ir-100z	iso8859-1Ziso88591z
iso_8859-1ziso_8859-1:1987�l1�latin1zus-asciizwindows-1252zx-cp1252�cp1253zwindows-1253zx-cp1253�cp1254�csisolatin5z
iso-8859-9z
iso-ir-148z	iso8859-9Ziso88599z
iso_8859-9ziso_8859-9:1989�l5�latin5zwindows-1254zx-cp1254�cp1255zwindows-1255zx-cp1255�cp1256zwindows-1256zx-cp1256�cp1257zwindows-1257zx-cp1257�cp1258zwindows-1258zx-cp1258zx-mac-cyrilliczx-mac-ukrainian�chineseZcsgb2312�csiso58gb231280�gb2312Zgb_2312z
gb_2312-80rz	iso-ir-58zx-gbkrz
hz-gb-2312rz
big5-hkscszcn-big5�csbig5zx-x-big5Zcseucpkdfmtjapanesezeuc-jpzx-euc-jp�csiso2022jpziso-2022-jp�
csshiftjis�ms_kanjiz	shift-jisr�sjiszwindows-31jzx-sjisZcseuckrZ
csksc56011987zeuc-krz
iso-ir-149�koreanzks_c_5601-1987zks_c_5601-1989�ksc5601Zksc_5601zwindows-949�csiso2022krziso-2022-krzutf-16bezutf-16zutf-16lezx-user-definedN)�__doc__ZLABELS�rBrB�/usr/lib/python3.6/labels.py�<module>s�_vendor/webencodings/tests.py000064400000014642151733136310012375 0ustar00# coding: utf8
"""

    webencodings.tests
    ~~~~~~~~~~~~~~~~~~

    A basic test suite for Encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

from __future__ import unicode_literals

from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode,
               IncrementalDecoder, IncrementalEncoder, UTF8)


def assert_raises(exception, function, *args, **kwargs):
    try:
        function(*args, **kwargs)
    except exception:
        return
    else:  # pragma: no cover
        raise AssertionError('Did not raise %s.' % exception)


def test_labels():
    assert lookup('utf-8').name == 'utf-8'
    assert lookup('Utf-8').name == 'utf-8'
    assert lookup('UTF-8').name == 'utf-8'
    assert lookup('utf8').name == 'utf-8'
    assert lookup('utf8').name == 'utf-8'
    assert lookup('utf8 ').name == 'utf-8'
    assert lookup(' \r\nutf8\t').name == 'utf-8'
    assert lookup('u8') is None  # Python label.
    assert lookup('utf-8 ') is None  # Non-ASCII white space.

    assert lookup('US-ASCII').name == 'windows-1252'
    assert lookup('iso-8859-1').name == 'windows-1252'
    assert lookup('latin1').name == 'windows-1252'
    assert lookup('LATIN1').name == 'windows-1252'
    assert lookup('latin-1') is None
    assert lookup('LATİN1') is None  # ASCII-only case insensitivity.


def test_all_labels():
    for label in LABELS:
        assert decode(b'', label) == ('', lookup(label))
        assert encode('', label) == b''
        for repeat in [0, 1, 12]:
            output, _ = iter_decode([b''] * repeat, label)
            assert list(output) == []
            assert list(iter_encode([''] * repeat, label)) == []
        decoder = IncrementalDecoder(label)
        assert decoder.decode(b'') == ''
        assert decoder.decode(b'', final=True) == ''
        encoder = IncrementalEncoder(label)
        assert encoder.encode('') == b''
        assert encoder.encode('', final=True) == b''
    # All encoding names are valid labels too:
    for name in set(LABELS.values()):
        assert lookup(name).name == name


def test_invalid_label():
    assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid')
    assert_raises(LookupError, encode, 'é', 'invalid')
    assert_raises(LookupError, iter_decode, [], 'invalid')
    assert_raises(LookupError, iter_encode, [], 'invalid')
    assert_raises(LookupError, IncrementalDecoder, 'invalid')
    assert_raises(LookupError, IncrementalEncoder, 'invalid')


def test_decode():
    assert decode(b'\x80', 'latin1') == ('€', lookup('latin1'))
    assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))
    assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))
    assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))
    assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))
    assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8'))  # UTF-8 with BOM

    assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be'))  # UTF-16-BE with BOM
    assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le'))  # UTF-16-LE with BOM
    assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))
    assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le'))

    assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be'))
    assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))
    assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le'))

    assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be'))
    assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))
    assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le'))


def test_encode():
    assert encode('é', 'latin1') == b'\xe9'
    assert encode('é', 'utf8') == b'\xc3\xa9'
    assert encode('é', 'utf8') == b'\xc3\xa9'
    assert encode('é', 'utf-16') == b'\xe9\x00'
    assert encode('é', 'utf-16le') == b'\xe9\x00'
    assert encode('é', 'utf-16be') == b'\x00\xe9'


def test_iter_decode():
    def iter_decode_to_string(input, fallback_encoding):
        output, _encoding = iter_decode(input, fallback_encoding)
        return ''.join(output)
    assert iter_decode_to_string([], 'latin1') == ''
    assert iter_decode_to_string([b''], 'latin1') == ''
    assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'hello'], 'latin1') == 'hello'
    assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello'
    assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello'
    assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD'
    assert iter_decode_to_string([
        b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == ''
    assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»'
    assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é'
    assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é'
    assert iter_decode_to_string([
        b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo'


def test_iter_encode():
    assert b''.join(iter_encode([], 'latin1')) == b''
    assert b''.join(iter_encode([''], 'latin1')) == b''
    assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9'
    assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9'
    assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00'
    assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00'
    assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9'
    assert b''.join(iter_encode([
        '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo'


def test_x_user_defined():
    encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca'
    decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca'
    encoded = b'aa'
    decoded = 'aa'
    assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined'))
    assert encode(decoded, 'x-user-defined') == encoded
_vendor/webencodings/__init__.py000064400000024520151733136310012766 0ustar00# coding: utf8
"""

    webencodings
    ~~~~~~~~~~~~

    This is a Python implementation of the `WHATWG Encoding standard
    <http://encoding.spec.whatwg.org/>`. See README for details.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

from __future__ import unicode_literals

import codecs

from .labels import LABELS


VERSION = '0.5'


# Some names in Encoding are not valid Python aliases. Remap these.
PYTHON_NAMES = {
    'iso-8859-8-i': 'iso-8859-8',
    'x-mac-cyrillic': 'mac-cyrillic',
    'macintosh': 'mac-roman',
    'windows-874': 'cp874'}

CACHE = {}


def ascii_lower(string):
    r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.

    :param string: An Unicode string.
    :returns: A new Unicode string.

    This is used for `ASCII case-insensitive
    <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
    matching of encoding labels.
    The same matching is also used, among other things,
    for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.

    This is different from the :meth:`~py:str.lower` method of Unicode strings
    which also affect non-ASCII characters,
    sometimes mapping them into the ASCII range:

        >>> keyword = u'Bac\N{KELVIN SIGN}ground'
        >>> assert keyword.lower() == u'background'
        >>> assert ascii_lower(keyword) != keyword.lower()
        >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'

    """
    # This turns out to be faster than unicode.translate()
    return string.encode('utf8').lower().decode('utf8')


def lookup(label):
    """
    Look for an encoding by its label.
    This is the spec’s `get an encoding
    <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
    Supported labels are listed there.

    :param label: A string.
    :returns:
        An :class:`Encoding` object, or :obj:`None` for an unknown label.

    """
    # Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020.
    label = ascii_lower(label.strip('\t\n\f\r '))
    name = LABELS.get(label)
    if name is None:
        return None
    encoding = CACHE.get(name)
    if encoding is None:
        if name == 'x-user-defined':
            from .x_user_defined import codec_info
        else:
            python_name = PYTHON_NAMES.get(name, name)
            # Any python_name value that gets to here should be valid.
            codec_info = codecs.lookup(python_name)
        encoding = Encoding(name, codec_info)
        CACHE[name] = encoding
    return encoding


def _get_encoding(encoding_or_label):
    """
    Accept either an encoding object or label.

    :param encoding: An :class:`Encoding` object or a label string.
    :returns: An :class:`Encoding` object.
    :raises: :exc:`~exceptions.LookupError` for an unknown label.

    """
    if hasattr(encoding_or_label, 'codec_info'):
        return encoding_or_label

    encoding = lookup(encoding_or_label)
    if encoding is None:
        raise LookupError('Unknown encoding label: %r' % encoding_or_label)
    return encoding


class Encoding(object):
    """Reresents a character encoding such as UTF-8,
    that can be used for decoding or encoding.

    .. attribute:: name

        Canonical name of the encoding

    .. attribute:: codec_info

        The actual implementation of the encoding,
        a stdlib :class:`~codecs.CodecInfo` object.
        See :func:`codecs.register`.

    """
    def __init__(self, name, codec_info):
        self.name = name
        self.codec_info = codec_info

    def __repr__(self):
        return '<Encoding %s>' % self.name


#: The UTF-8 encoding. Should be used for new content and formats.
UTF8 = lookup('utf-8')

_UTF16LE = lookup('utf-16le')
_UTF16BE = lookup('utf-16be')


def decode(input, fallback_encoding, errors='replace'):
    """
    Decode a single string.

    :param input: A byte string
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return:
        A ``(output, encoding)`` tuple of an Unicode string
        and an :obj:`Encoding`.

    """
    # Fail early if `encoding` is an invalid label.
    fallback_encoding = _get_encoding(fallback_encoding)
    bom_encoding, input = _detect_bom(input)
    encoding = bom_encoding or fallback_encoding
    return encoding.codec_info.decode(input, errors)[0], encoding


def _detect_bom(input):
    """Return (bom_encoding, input), with any BOM removed from the input."""
    if input.startswith(b'\xFF\xFE'):
        return _UTF16LE, input[2:]
    if input.startswith(b'\xFE\xFF'):
        return _UTF16BE, input[2:]
    if input.startswith(b'\xEF\xBB\xBF'):
        return UTF8, input[3:]
    return None, input


def encode(input, encoding=UTF8, errors='strict'):
    """
    Encode a single string.

    :param input: An Unicode string.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :return: A byte string.

    """
    return _get_encoding(encoding).codec_info.encode(input, errors)[0]


def iter_decode(input, fallback_encoding, errors='replace'):
    """
    "Pull"-based decoder.

    :param input:
        An iterable of byte strings.

        The input is first consumed just enough to determine the encoding
        based on the precense of a BOM,
        then consumed on demand when the return value is.
    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns:
        An ``(output, encoding)`` tuple.
        :obj:`output` is an iterable of Unicode strings,
        :obj:`encoding` is the :obj:`Encoding` that is being used.

    """

    decoder = IncrementalDecoder(fallback_encoding, errors)
    generator = _iter_decode_generator(input, decoder)
    encoding = next(generator)
    return generator, encoding


def _iter_decode_generator(input, decoder):
    """Return a generator that first yields the :obj:`Encoding`,
    then yields output chukns as Unicode strings.

    """
    decode = decoder.decode
    input = iter(input)
    for chunck in input:
        output = decode(chunck)
        if output:
            assert decoder.encoding is not None
            yield decoder.encoding
            yield output
            break
    else:
        # Input exhausted without determining the encoding
        output = decode(b'', final=True)
        assert decoder.encoding is not None
        yield decoder.encoding
        if output:
            yield output
        return

    for chunck in input:
        output = decode(chunck)
        if output:
            yield output
    output = decode(b'', final=True)
    if output:
        yield output


def iter_encode(input, encoding=UTF8, errors='strict'):
    """
    “Pull”-based encoder.

    :param input: An iterable of Unicode strings.
    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
    :returns: An iterable of byte strings.

    """
    # Fail early if `encoding` is an invalid label.
    encode = IncrementalEncoder(encoding, errors).encode
    return _iter_encode_generator(input, encode)


def _iter_encode_generator(input, encode):
    for chunck in input:
        output = encode(chunck)
        if output:
            yield output
    output = encode('', final=True)
    if output:
        yield output


class IncrementalDecoder(object):
    """
    “Push”-based decoder.

    :param fallback_encoding:
        An :class:`Encoding` object or a label string.
        The encoding to use if :obj:`input` does note have a BOM.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    """
    def __init__(self, fallback_encoding, errors='replace'):
        # Fail early if `encoding` is an invalid label.
        self._fallback_encoding = _get_encoding(fallback_encoding)
        self._errors = errors
        self._buffer = b''
        self._decoder = None
        #: The actual :class:`Encoding` that is being used,
        #: or :obj:`None` if that is not determined yet.
        #: (Ie. if there is not enough input yet to determine
        #: if there is a BOM.)
        self.encoding = None  # Not known yet.

    def decode(self, input, final=False):
        """Decode one chunk of the input.

        :param input: A byte string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: An Unicode string.

        """
        decoder = self._decoder
        if decoder is not None:
            return decoder(input, final)

        input = self._buffer + input
        encoding, input = _detect_bom(input)
        if encoding is None:
            if len(input) < 3 and not final:  # Not enough data yet.
                self._buffer = input
                return ''
            else:  # No BOM
                encoding = self._fallback_encoding
        decoder = encoding.codec_info.incrementaldecoder(self._errors).decode
        self._decoder = decoder
        self.encoding = encoding
        return decoder(input, final)


class IncrementalEncoder(object):
    """
    “Push”-based encoder.

    :param encoding: An :class:`Encoding` object or a label string.
    :param errors: Type of error handling. See :func:`codecs.register`.
    :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.

    .. method:: encode(input, final=False)

        :param input: An Unicode string.
        :param final:
            Indicate that no more input is available.
            Must be :obj:`True` if this is the last call.
        :returns: A byte string.

    """
    def __init__(self, encoding=UTF8, errors='strict'):
        encoding = _get_encoding(encoding)
        self.encode = encoding.codec_info.incrementalencoder(errors).encode
_vendor/webencodings/mklabels.py000064400000002431151733136310013016 0ustar00"""

    webencodings.mklabels
    ~~~~~~~~~~~~~~~~~~~~~

    Regenarate the webencodings.labels module.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

import json
try:
    from urllib import urlopen
except ImportError:
    from urllib.request import urlopen


def assert_lower(string):
    assert string == string.lower()
    return string


def generate(url):
    parts = ['''\
"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
''']
    labels = [
        (repr(assert_lower(label)).lstrip('u'),
         repr(encoding['name']).lstrip('u'))
        for category in json.loads(urlopen(url).read().decode('ascii'))
        for encoding in category['encodings']
        for label in encoding['labels']]
    max_len = max(len(label) for label, name in labels)
    parts.extend(
        '    %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name)
        for label, name in labels)
    parts.append('}')
    return ''.join(parts)


if __name__ == '__main__':
    print(generate('http://encoding.spec.whatwg.org/encodings.json'))
_vendor/webencodings/labels.py000064400000021423151733136310012470 0ustar00"""

    webencodings.labels
    ~~~~~~~~~~~~~~~~~~~

    Map encoding labels to their name.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

# XXX Do not edit!
# This file is automatically generated by mklabels.py

LABELS = {
    'unicode-1-1-utf-8':   'utf-8',
    'utf-8':               'utf-8',
    'utf8':                'utf-8',
    '866':                 'ibm866',
    'cp866':               'ibm866',
    'csibm866':            'ibm866',
    'ibm866':              'ibm866',
    'csisolatin2':         'iso-8859-2',
    'iso-8859-2':          'iso-8859-2',
    'iso-ir-101':          'iso-8859-2',
    'iso8859-2':           'iso-8859-2',
    'iso88592':            'iso-8859-2',
    'iso_8859-2':          'iso-8859-2',
    'iso_8859-2:1987':     'iso-8859-2',
    'l2':                  'iso-8859-2',
    'latin2':              'iso-8859-2',
    'csisolatin3':         'iso-8859-3',
    'iso-8859-3':          'iso-8859-3',
    'iso-ir-109':          'iso-8859-3',
    'iso8859-3':           'iso-8859-3',
    'iso88593':            'iso-8859-3',
    'iso_8859-3':          'iso-8859-3',
    'iso_8859-3:1988':     'iso-8859-3',
    'l3':                  'iso-8859-3',
    'latin3':              'iso-8859-3',
    'csisolatin4':         'iso-8859-4',
    'iso-8859-4':          'iso-8859-4',
    'iso-ir-110':          'iso-8859-4',
    'iso8859-4':           'iso-8859-4',
    'iso88594':            'iso-8859-4',
    'iso_8859-4':          'iso-8859-4',
    'iso_8859-4:1988':     'iso-8859-4',
    'l4':                  'iso-8859-4',
    'latin4':              'iso-8859-4',
    'csisolatincyrillic':  'iso-8859-5',
    'cyrillic':            'iso-8859-5',
    'iso-8859-5':          'iso-8859-5',
    'iso-ir-144':          'iso-8859-5',
    'iso8859-5':           'iso-8859-5',
    'iso88595':            'iso-8859-5',
    'iso_8859-5':          'iso-8859-5',
    'iso_8859-5:1988':     'iso-8859-5',
    'arabic':              'iso-8859-6',
    'asmo-708':            'iso-8859-6',
    'csiso88596e':         'iso-8859-6',
    'csiso88596i':         'iso-8859-6',
    'csisolatinarabic':    'iso-8859-6',
    'ecma-114':            'iso-8859-6',
    'iso-8859-6':          'iso-8859-6',
    'iso-8859-6-e':        'iso-8859-6',
    'iso-8859-6-i':        'iso-8859-6',
    'iso-ir-127':          'iso-8859-6',
    'iso8859-6':           'iso-8859-6',
    'iso88596':            'iso-8859-6',
    'iso_8859-6':          'iso-8859-6',
    'iso_8859-6:1987':     'iso-8859-6',
    'csisolatingreek':     'iso-8859-7',
    'ecma-118':            'iso-8859-7',
    'elot_928':            'iso-8859-7',
    'greek':               'iso-8859-7',
    'greek8':              'iso-8859-7',
    'iso-8859-7':          'iso-8859-7',
    'iso-ir-126':          'iso-8859-7',
    'iso8859-7':           'iso-8859-7',
    'iso88597':            'iso-8859-7',
    'iso_8859-7':          'iso-8859-7',
    'iso_8859-7:1987':     'iso-8859-7',
    'sun_eu_greek':        'iso-8859-7',
    'csiso88598e':         'iso-8859-8',
    'csisolatinhebrew':    'iso-8859-8',
    'hebrew':              'iso-8859-8',
    'iso-8859-8':          'iso-8859-8',
    'iso-8859-8-e':        'iso-8859-8',
    'iso-ir-138':          'iso-8859-8',
    'iso8859-8':           'iso-8859-8',
    'iso88598':            'iso-8859-8',
    'iso_8859-8':          'iso-8859-8',
    'iso_8859-8:1988':     'iso-8859-8',
    'visual':              'iso-8859-8',
    'csiso88598i':         'iso-8859-8-i',
    'iso-8859-8-i':        'iso-8859-8-i',
    'logical':             'iso-8859-8-i',
    'csisolatin6':         'iso-8859-10',
    'iso-8859-10':         'iso-8859-10',
    'iso-ir-157':          'iso-8859-10',
    'iso8859-10':          'iso-8859-10',
    'iso885910':           'iso-8859-10',
    'l6':                  'iso-8859-10',
    'latin6':              'iso-8859-10',
    'iso-8859-13':         'iso-8859-13',
    'iso8859-13':          'iso-8859-13',
    'iso885913':           'iso-8859-13',
    'iso-8859-14':         'iso-8859-14',
    'iso8859-14':          'iso-8859-14',
    'iso885914':           'iso-8859-14',
    'csisolatin9':         'iso-8859-15',
    'iso-8859-15':         'iso-8859-15',
    'iso8859-15':          'iso-8859-15',
    'iso885915':           'iso-8859-15',
    'iso_8859-15':         'iso-8859-15',
    'l9':                  'iso-8859-15',
    'iso-8859-16':         'iso-8859-16',
    'cskoi8r':             'koi8-r',
    'koi':                 'koi8-r',
    'koi8':                'koi8-r',
    'koi8-r':              'koi8-r',
    'koi8_r':              'koi8-r',
    'koi8-u':              'koi8-u',
    'csmacintosh':         'macintosh',
    'mac':                 'macintosh',
    'macintosh':           'macintosh',
    'x-mac-roman':         'macintosh',
    'dos-874':             'windows-874',
    'iso-8859-11':         'windows-874',
    'iso8859-11':          'windows-874',
    'iso885911':           'windows-874',
    'tis-620':             'windows-874',
    'windows-874':         'windows-874',
    'cp1250':              'windows-1250',
    'windows-1250':        'windows-1250',
    'x-cp1250':            'windows-1250',
    'cp1251':              'windows-1251',
    'windows-1251':        'windows-1251',
    'x-cp1251':            'windows-1251',
    'ansi_x3.4-1968':      'windows-1252',
    'ascii':               'windows-1252',
    'cp1252':              'windows-1252',
    'cp819':               'windows-1252',
    'csisolatin1':         'windows-1252',
    'ibm819':              'windows-1252',
    'iso-8859-1':          'windows-1252',
    'iso-ir-100':          'windows-1252',
    'iso8859-1':           'windows-1252',
    'iso88591':            'windows-1252',
    'iso_8859-1':          'windows-1252',
    'iso_8859-1:1987':     'windows-1252',
    'l1':                  'windows-1252',
    'latin1':              'windows-1252',
    'us-ascii':            'windows-1252',
    'windows-1252':        'windows-1252',
    'x-cp1252':            'windows-1252',
    'cp1253':              'windows-1253',
    'windows-1253':        'windows-1253',
    'x-cp1253':            'windows-1253',
    'cp1254':              'windows-1254',
    'csisolatin5':         'windows-1254',
    'iso-8859-9':          'windows-1254',
    'iso-ir-148':          'windows-1254',
    'iso8859-9':           'windows-1254',
    'iso88599':            'windows-1254',
    'iso_8859-9':          'windows-1254',
    'iso_8859-9:1989':     'windows-1254',
    'l5':                  'windows-1254',
    'latin5':              'windows-1254',
    'windows-1254':        'windows-1254',
    'x-cp1254':            'windows-1254',
    'cp1255':              'windows-1255',
    'windows-1255':        'windows-1255',
    'x-cp1255':            'windows-1255',
    'cp1256':              'windows-1256',
    'windows-1256':        'windows-1256',
    'x-cp1256':            'windows-1256',
    'cp1257':              'windows-1257',
    'windows-1257':        'windows-1257',
    'x-cp1257':            'windows-1257',
    'cp1258':              'windows-1258',
    'windows-1258':        'windows-1258',
    'x-cp1258':            'windows-1258',
    'x-mac-cyrillic':      'x-mac-cyrillic',
    'x-mac-ukrainian':     'x-mac-cyrillic',
    'chinese':             'gbk',
    'csgb2312':            'gbk',
    'csiso58gb231280':     'gbk',
    'gb2312':              'gbk',
    'gb_2312':             'gbk',
    'gb_2312-80':          'gbk',
    'gbk':                 'gbk',
    'iso-ir-58':           'gbk',
    'x-gbk':               'gbk',
    'gb18030':             'gb18030',
    'hz-gb-2312':          'hz-gb-2312',
    'big5':                'big5',
    'big5-hkscs':          'big5',
    'cn-big5':             'big5',
    'csbig5':              'big5',
    'x-x-big5':            'big5',
    'cseucpkdfmtjapanese': 'euc-jp',
    'euc-jp':              'euc-jp',
    'x-euc-jp':            'euc-jp',
    'csiso2022jp':         'iso-2022-jp',
    'iso-2022-jp':         'iso-2022-jp',
    'csshiftjis':          'shift_jis',
    'ms_kanji':            'shift_jis',
    'shift-jis':           'shift_jis',
    'shift_jis':           'shift_jis',
    'sjis':                'shift_jis',
    'windows-31j':         'shift_jis',
    'x-sjis':              'shift_jis',
    'cseuckr':             'euc-kr',
    'csksc56011987':       'euc-kr',
    'euc-kr':              'euc-kr',
    'iso-ir-149':          'euc-kr',
    'korean':              'euc-kr',
    'ks_c_5601-1987':      'euc-kr',
    'ks_c_5601-1989':      'euc-kr',
    'ksc5601':             'euc-kr',
    'ksc_5601':            'euc-kr',
    'windows-949':         'euc-kr',
    'csiso2022kr':         'iso-2022-kr',
    'iso-2022-kr':         'iso-2022-kr',
    'utf-16be':            'utf-16be',
    'utf-16':              'utf-16le',
    'utf-16le':            'utf-16le',
    'x-user-defined':      'x-user-defined',
}
_vendor/webencodings/x_user_defined.py000064400000010322151733136310014205 0ustar00# coding: utf8
"""

    webencodings.x_user_defined
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~

    An implementation of the x-user-defined encoding.

    :copyright: Copyright 2012 by Simon Sapin
    :license: BSD, see LICENSE for details.

"""

from __future__ import unicode_literals

import codecs


### Codec APIs

class Codec(codecs.Codec):

    def encode(self, input, errors='strict'):
        return codecs.charmap_encode(input, errors, encoding_table)

    def decode(self, input, errors='strict'):
        return codecs.charmap_decode(input, errors, decoding_table)


class IncrementalEncoder(codecs.IncrementalEncoder):
    def encode(self, input, final=False):
        return codecs.charmap_encode(input, self.errors, encoding_table)[0]


class IncrementalDecoder(codecs.IncrementalDecoder):
    def decode(self, input, final=False):
        return codecs.charmap_decode(input, self.errors, decoding_table)[0]


class StreamWriter(Codec, codecs.StreamWriter):
    pass


class StreamReader(Codec, codecs.StreamReader):
    pass


### encodings module API

codec_info = codecs.CodecInfo(
    name='x-user-defined',
    encode=Codec().encode,
    decode=Codec().decode,
    incrementalencoder=IncrementalEncoder,
    incrementaldecoder=IncrementalDecoder,
    streamreader=StreamReader,
    streamwriter=StreamWriter,
)


### Decoding Table

# Python 3:
# for c in range(256): print('    %r' % chr(c if c < 128 else c + 0xF700))
decoding_table = (
    '\x00'
    '\x01'
    '\x02'
    '\x03'
    '\x04'
    '\x05'
    '\x06'
    '\x07'
    '\x08'
    '\t'
    '\n'
    '\x0b'
    '\x0c'
    '\r'
    '\x0e'
    '\x0f'
    '\x10'
    '\x11'
    '\x12'
    '\x13'
    '\x14'
    '\x15'
    '\x16'
    '\x17'
    '\x18'
    '\x19'
    '\x1a'
    '\x1b'
    '\x1c'
    '\x1d'
    '\x1e'
    '\x1f'
    ' '
    '!'
    '"'
    '#'
    '$'
    '%'
    '&'
    "'"
    '('
    ')'
    '*'
    '+'
    ','
    '-'
    '.'
    '/'
    '0'
    '1'
    '2'
    '3'
    '4'
    '5'
    '6'
    '7'
    '8'
    '9'
    ':'
    ';'
    '<'
    '='
    '>'
    '?'
    '@'
    'A'
    'B'
    'C'
    'D'
    'E'
    'F'
    'G'
    'H'
    'I'
    'J'
    'K'
    'L'
    'M'
    'N'
    'O'
    'P'
    'Q'
    'R'
    'S'
    'T'
    'U'
    'V'
    'W'
    'X'
    'Y'
    'Z'
    '['
    '\\'
    ']'
    '^'
    '_'
    '`'
    'a'
    'b'
    'c'
    'd'
    'e'
    'f'
    'g'
    'h'
    'i'
    'j'
    'k'
    'l'
    'm'
    'n'
    'o'
    'p'
    'q'
    'r'
    's'
    't'
    'u'
    'v'
    'w'
    'x'
    'y'
    'z'
    '{'
    '|'
    '}'
    '~'
    '\x7f'
    '\uf780'
    '\uf781'
    '\uf782'
    '\uf783'
    '\uf784'
    '\uf785'
    '\uf786'
    '\uf787'
    '\uf788'
    '\uf789'
    '\uf78a'
    '\uf78b'
    '\uf78c'
    '\uf78d'
    '\uf78e'
    '\uf78f'
    '\uf790'
    '\uf791'
    '\uf792'
    '\uf793'
    '\uf794'
    '\uf795'
    '\uf796'
    '\uf797'
    '\uf798'
    '\uf799'
    '\uf79a'
    '\uf79b'
    '\uf79c'
    '\uf79d'
    '\uf79e'
    '\uf79f'
    '\uf7a0'
    '\uf7a1'
    '\uf7a2'
    '\uf7a3'
    '\uf7a4'
    '\uf7a5'
    '\uf7a6'
    '\uf7a7'
    '\uf7a8'
    '\uf7a9'
    '\uf7aa'
    '\uf7ab'
    '\uf7ac'
    '\uf7ad'
    '\uf7ae'
    '\uf7af'
    '\uf7b0'
    '\uf7b1'
    '\uf7b2'
    '\uf7b3'
    '\uf7b4'
    '\uf7b5'
    '\uf7b6'
    '\uf7b7'
    '\uf7b8'
    '\uf7b9'
    '\uf7ba'
    '\uf7bb'
    '\uf7bc'
    '\uf7bd'
    '\uf7be'
    '\uf7bf'
    '\uf7c0'
    '\uf7c1'
    '\uf7c2'
    '\uf7c3'
    '\uf7c4'
    '\uf7c5'
    '\uf7c6'
    '\uf7c7'
    '\uf7c8'
    '\uf7c9'
    '\uf7ca'
    '\uf7cb'
    '\uf7cc'
    '\uf7cd'
    '\uf7ce'
    '\uf7cf'
    '\uf7d0'
    '\uf7d1'
    '\uf7d2'
    '\uf7d3'
    '\uf7d4'
    '\uf7d5'
    '\uf7d6'
    '\uf7d7'
    '\uf7d8'
    '\uf7d9'
    '\uf7da'
    '\uf7db'
    '\uf7dc'
    '\uf7dd'
    '\uf7de'
    '\uf7df'
    '\uf7e0'
    '\uf7e1'
    '\uf7e2'
    '\uf7e3'
    '\uf7e4'
    '\uf7e5'
    '\uf7e6'
    '\uf7e7'
    '\uf7e8'
    '\uf7e9'
    '\uf7ea'
    '\uf7eb'
    '\uf7ec'
    '\uf7ed'
    '\uf7ee'
    '\uf7ef'
    '\uf7f0'
    '\uf7f1'
    '\uf7f2'
    '\uf7f3'
    '\uf7f4'
    '\uf7f5'
    '\uf7f6'
    '\uf7f7'
    '\uf7f8'
    '\uf7f9'
    '\uf7fa'
    '\uf7fb'
    '\uf7fc'
    '\uf7fd'
    '\uf7fe'
    '\uf7ff'
)

### Encoding table
encoding_table = codecs.charmap_build(decoding_table)
_vendor/cachecontrol/__pycache__/wrapper.cpython-36.pyc000064400000000745151733136310017173 0ustar003

�Pf��@s&ddlmZddlmZddd�ZdS)�)�CacheControlAdapter)�	DictCacheNTcCs6|pt�}t||||d�}|jd|�|jd|�|S)N)�cache_etags�
serializer�	heuristiczhttp://zhttps://)rrZmount)Zsess�cacherrr�adapter�r	�/usr/lib/python3.6/wrapper.py�CacheControls
r)NTNN)rrrrrr	r	r	r
�<module>s
_vendor/cachecontrol/__pycache__/serialize.cpython-36.pyc000064400000010445151733136310017500 0ustar003

�Pf��@s|ddlZddlZddlZddlZddlmZddlmZmZm	Z	dd�Z
dd�Zd	d
�Zdd�Z
d
d�ZGdd�de�ZdS)�N)�CaseInsensitiveDict�)�HTTPResponse�pickle�	text_typecCstj|�jd�S)N�ascii)�base64Z	b64encode�decode)�b�r�/usr/lib/python3.6/serialize.py�_b64_encode_bytessr
cCst|jd��S)N�utf8)r
�encode)�srrr�_b64_encode_strsrcCst|t�rt|�St|�S)N)�
isinstancerrr
)rrrr�_b64_encodes
rcCstj|jd��S)Nr)rZ	b64decoder)r
rrr�_b64_decode_bytessrcCst|�jd�S)Nr)rr	)rrrr�_b64_decode_strsrc@s>eZdZddd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�ZdS)�
SerializerNc	Cs�t|j�}|dkr*|jdd�}tj|�|_dt|�tdd�|jj�D��|j	|j
t|j�|j
|jd�i}i|d<d|kr�|djd�}x*|D]"}|j�}|jj|d�|d|<q�Wtd	d�|dj�D��|d<d
jdtjtj|dd
d�jd��g�S)NF)�decode_content�responsecss"|]\}}t|�t|�fVqdS)N)r)�.0�k�vrrr�	<genexpr>9sz#Serializer.dumps.<locals>.<genexpr>)�body�headers�status�version�reason�strictr�vary�,css.|]&\}}t|�|dk	r t|�n|fVqdS)N)r)rrrrrrrNs�,scc=2�:T)Z
separatorsZ	sort_keysr)r$r&)rr�read�io�BytesIOZ_fpr
�dict�itemsrr rr!r"r�split�strip�get�join�zlib�compress�json�dumpsr)�self�requestrrZresponse_headers�dataZvaried_headers�headerrrrr3#s:

zSerializer.dumpscCs�|sdSy|jdd�\}}Wntk
r4d}YnX|dd�dkrR||}d}|jdd�d	jd�}yt|dj|��||�Stk
r�dSXdS)
Nr%rscc=0�scc=�=rz_loads_v{0}���)r,�
ValueErrorr	�getattr�format�AttributeError)r4r5r6Zverrrr�loads[s
zSerializer.loadsc
Cs�d|jdi�krdSx2|jdi�j�D]\}}|jj|d�|kr&dSq&W|djd�}t|ddd�}|jdd	�d
kr�|jd�||dd<ytj|�}Wn$tk
r�tj|jd��}YnXt	f|dd
�|d��S)z`Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        �*r#Nrrr)r6ztransfer-encoding�ZchunkedrF)rZpreload_content)
r.r+r�poprr(r)�	TypeErrorrr)r4r5�cachedr7�valueZbody_rawrrrrr�prepare_responsexs$
zSerializer.prepare_responsecCsdS)Nr)r4r5r6rrr�	_loads_v0�szSerializer._loads_v0cCs0ytj|�}Wntk
r"dSX|j||�S)N)rr?r;rF)r4r5r6rDrrr�	_loads_v1�s
zSerializer._loads_v1cCs�ytjtj|�jd��}Wntk
r.dSXt|dd�|dd<tdd�|ddj�D��|dd<t	|dd�|dd<tdd�|d	j�D��|d	<|j
||�S)
Nrrrcss"|]\}}t|�t|�fVqdS)N)r)rrrrrrr�sz'Serializer._loads_v2.<locals>.<genexpr>rr!css.|]&\}}t|�|dk	r t|�n|fVqdS)N)r)rrrrrrr�sr#)r2r?r0Z
decompressr	r;rr*r+rrF)r4r5r6rDrrr�	_loads_v2�szSerializer._loads_v2)N)	�__name__�
__module__�__qualname__r3r?rFrGrHrIrrrrr!s
8(r)rr(r2r0Zpip._vendor.requests.structuresr�compatrrrr
rrrr�objectrrrrr�<module>s_vendor/cachecontrol/__pycache__/_cmd.cpython-36.pyc000064400000002703151733136310016411 0ustar003

�Pf(�@sxddlZddlmZddlmZddlmZddlmZddl	m
Z
dd�Zd	d
�Zdd�Z
dd
d�Zedkrte�dS)�N)�requests)�CacheControlAdapter)�	DictCache)�logger)�ArgumentParsercCs"tjtj�tj�}tj|�dS)N)rZsetLevel�logging�DEBUGZ
StreamHandlerZ
addHandler)Zhandler�r	�/usr/lib/python3.6/_cmd.py�
setup_loggingsrcCs>tt�dddd�}tj�}|jd|�|jd|�|j|_|S)NT)Zcache_etagsZ
serializerZ	heuristiczhttp://zhttps://)rrrZSessionZmountZ
controller�cache_controller)Zadapter�sessr	r	r
�get_sessionsrcCst�}|jddd�|j�S)N�urlzThe URL to try and cache)�help)r�add_argument�
parse_args)�parserr	r	r
�get_args!srcCsTt�}t�}|j|j�}t�|jj|j|j�|jj	|j�rHt
d�nt
d�dS)NzCached!z
Not cached :()rr�getrrrZcache_responseZrequest�rawZcached_request�print)�argsr
Zrespr	r	r
�main's
r�__main__)N)rZpip._vendorrZ pip._vendor.cachecontrol.adapterrZpip._vendor.cachecontrol.cacherZ#pip._vendor.cachecontrol.controllerr�argparserrrrr�__name__r	r	r	r
�<module>s
_vendor/cachecontrol/__pycache__/serialize.cpython-36.opt-1.pyc000064400000010445151733136310020437 0ustar003

�Pf��@s|ddlZddlZddlZddlZddlmZddlmZmZm	Z	dd�Z
dd�Zd	d
�Zdd�Z
d
d�ZGdd�de�ZdS)�N)�CaseInsensitiveDict�)�HTTPResponse�pickle�	text_typecCstj|�jd�S)N�ascii)�base64Z	b64encode�decode)�b�r�/usr/lib/python3.6/serialize.py�_b64_encode_bytessr
cCst|jd��S)N�utf8)r
�encode)�srrr�_b64_encode_strsrcCst|t�rt|�St|�S)N)�
isinstancerrr
)rrrr�_b64_encodes
rcCstj|jd��S)Nr)rZ	b64decoder)r
rrr�_b64_decode_bytessrcCst|�jd�S)Nr)rr	)rrrr�_b64_decode_strsrc@s>eZdZddd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�ZdS)�
SerializerNc	Cs�t|j�}|dkr*|jdd�}tj|�|_dt|�tdd�|jj�D��|j	|j
t|j�|j
|jd�i}i|d<d|kr�|djd�}x*|D]"}|j�}|jj|d�|d|<q�Wtd	d�|dj�D��|d<d
jdtjtj|dd
d�jd��g�S)NF)�decode_content�responsecss"|]\}}t|�t|�fVqdS)N)r)�.0�k�vrrr�	<genexpr>9sz#Serializer.dumps.<locals>.<genexpr>)�body�headers�status�version�reason�strictr�vary�,css.|]&\}}t|�|dk	r t|�n|fVqdS)N)r)rrrrrrrNs�,scc=2�:T)Z
separatorsZ	sort_keysr)r$r&)rr�read�io�BytesIOZ_fpr
�dict�itemsrr rr!r"r�split�strip�get�join�zlib�compress�json�dumpsr)�self�requestrrZresponse_headers�dataZvaried_headers�headerrrrr3#s:

zSerializer.dumpscCs�|sdSy|jdd�\}}Wntk
r4d}YnX|dd�dkrR||}d}|jdd�d	jd�}yt|dj|��||�Stk
r�dSXdS)
Nr%rscc=0�scc=�=rz_loads_v{0}���)r,�
ValueErrorr	�getattr�format�AttributeError)r4r5r6Zverrrr�loads[s
zSerializer.loadsc
Cs�d|jdi�krdSx2|jdi�j�D]\}}|jj|d�|kr&dSq&W|djd�}t|ddd�}|jdd	�d
kr�|jd�||dd<ytj|�}Wn$tk
r�tj|jd��}YnXt	f|dd
�|d��S)z`Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        �*r#Nrrr)r6ztransfer-encoding�ZchunkedrF)rZpreload_content)
r.r+r�poprr(r)�	TypeErrorrr)r4r5�cachedr7�valueZbody_rawrrrrr�prepare_responsexs$
zSerializer.prepare_responsecCsdS)Nr)r4r5r6rrr�	_loads_v0�szSerializer._loads_v0cCs0ytj|�}Wntk
r"dSX|j||�S)N)rr?r;rF)r4r5r6rDrrr�	_loads_v1�s
zSerializer._loads_v1cCs�ytjtj|�jd��}Wntk
r.dSXt|dd�|dd<tdd�|ddj�D��|dd<t	|dd�|dd<tdd�|d	j�D��|d	<|j
||�S)
Nrrrcss"|]\}}t|�t|�fVqdS)N)r)rrrrrrr�sz'Serializer._loads_v2.<locals>.<genexpr>rr!css.|]&\}}t|�|dk	r t|�n|fVqdS)N)r)rrrrrrr�sr#)r2r?r0Z
decompressr	r;rr*r+rrF)r4r5r6rDrrr�	_loads_v2�szSerializer._loads_v2)N)	�__name__�
__module__�__qualname__r3r?rFrGrHrIrrrrr!s
8(r)rr(r2r0Zpip._vendor.requests.structuresr�compatrrrr
rrrr�objectrrrrr�<module>s_vendor/cachecontrol/__pycache__/_cmd.cpython-36.opt-1.pyc000064400000002703151733136310017350 0ustar003

�Pf(�@sxddlZddlmZddlmZddlmZddlmZddl	m
Z
dd�Zd	d
�Zdd�Z
dd
d�Zedkrte�dS)�N)�requests)�CacheControlAdapter)�	DictCache)�logger)�ArgumentParsercCs"tjtj�tj�}tj|�dS)N)rZsetLevel�logging�DEBUGZ
StreamHandlerZ
addHandler)Zhandler�r	�/usr/lib/python3.6/_cmd.py�
setup_loggingsrcCs>tt�dddd�}tj�}|jd|�|jd|�|j|_|S)NT)Zcache_etagsZ
serializerZ	heuristiczhttp://zhttps://)rrrZSessionZmountZ
controller�cache_controller)Zadapter�sessr	r	r
�get_sessionsrcCst�}|jddd�|j�S)N�urlzThe URL to try and cache)�help)r�add_argument�
parse_args)�parserr	r	r
�get_args!srcCsTt�}t�}|j|j�}t�|jj|j|j�|jj	|j�rHt
d�nt
d�dS)NzCached!z
Not cached :()rr�getrrrZcache_responseZrequest�rawZcached_request�print)�argsr
Zrespr	r	r
�main's
r�__main__)N)rZpip._vendorrZ pip._vendor.cachecontrol.adapterrZpip._vendor.cachecontrol.cacherZ#pip._vendor.cachecontrol.controllerr�argparserrrrr�__name__r	r	r	r
�<module>s
_vendor/cachecontrol/__pycache__/adapter.cpython-36.pyc000064400000005362151733136310017133 0ustar003

�Pf�@sTddlZddlZddlmZddlmZddlmZddlm	Z	Gdd�de�Z
dS)	�N)�HTTPAdapter�)�CacheController)�	DictCache)�CallbackFileWrappercsPeZdZeddg�Zd�fdd�	Z�fdd�Zd�fd
d�	Z�fdd
�Z�Z	S)�CacheControlAdapterZPUTZDELETENTc	sBtt|�j||�|pt�|_||_|p*t}||j||d�|_dS)N)�cache_etags�
serializer)�superr�__init__r�cache�	heuristicr�
controller)	�selfrrZcontroller_classr	r
�args�kwZcontroller_factory)�	__class__��/usr/lib/python3.6/adapter.pyrszCacheControlAdapter.__init__csV|jdkr>|jj|�}|r*|j||dd�S|jj|jj|��tt|�j	|f|�}|S)z�
        Send a request. Use the request information to see if it
        exists in the cache and cache the response if we need to and can.
        �GETT)�
from_cache)
�methodrZcached_request�build_responseZheaders�updateZconditional_headersr
r�send)r�requestr�cached_response�resp)rrrrs
zCacheControlAdapter.sendFcs|r�|jdkr�|jr"|jj|�}|jdkr`|jj||�}||k	rFd}|jdd�|j�|}n\|jdkrz|jj||�nBt	|j
tj|jj||��|_
|j
r�|j��fdd�}tj||�|_tt|�j||�}|j|jkr�|jr�|jj|j�}|jj|�||_|S)	z�
        Build a response by making a request or using the cache.

        This will end up calling send and returning a potentially
        cached response
        ri0TF)Zdecode_contenti-cs��|jdkr|jj�dS)Nr)Z
chunk_left�_fpZ_close)r)�super_update_chunk_lengthrr�_update_chunk_lengthgs
z@CacheControlAdapter.build_response.<locals>._update_chunk_length)rr
ZapplyZstatusrZupdate_cached_response�readZrelease_connZcache_responserr�	functools�partialZchunkedr �types�
MethodTyper
rr�invalidating_methods�ok�	cache_urlZurlr�deleter)rrZresponserrr rr()r)rrr3s<



z"CacheControlAdapter.build_responsecs|jj�tt|�j�dS)N)r�closer
r)r)rrrr*{s
zCacheControlAdapter.close)NTNNN)F)
�__name__�
__module__�__qualname__�setr&rrrr*�
__classcell__rr)rrrs
Hr)r$r"Zpip._vendor.requests.adaptersrrrrrZfilewrapperrrrrrr�<module>s_vendor/cachecontrol/__pycache__/__init__.cpython-36.pyc000064400000000722151733136310017245 0ustar003

�Pf.�@s8dZdZdZdZddlmZddlmZddlm	Z	dS)	zbCacheControl import Interface.

Make it easy to import from cachecontrol without long namespaces.
zEric Larsonzeric@ionrock.orgz0.11.7�)�CacheControl)�CacheControlAdapter)�CacheControllerN)
�__doc__�
__author__Z	__email__�__version__�wrapperrZadapterrZ
controllerr�r	r	�/usr/lib/python3.6/__init__.py�<module>s_vendor/cachecontrol/__pycache__/compat.cpython-36.opt-1.pyc000064400000000761151733136310017733 0ustar003

�Pf|�@s�yddlmZWn ek
r0ddlmZYnXyddlZWnek
rZddlZYnXddlmZddlm	Z	y
e
fZWnek
r�e
fZYnXdS)�)�urljoinN)�HTTPResponse)�is_fp_closed)Zurllib.parser�ImportErrorZurlparseZcPickle�pickleZpip._vendor.urllib3.responserZpip._vendor.urllib3.utilrZunicodeZ	text_type�	NameError�str�r	r	�/usr/lib/python3.6/compat.py�<module>s
_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.opt-1.pyc000064400000004026151733136310020766 0ustar003

�Pf�	�@s ddlmZGdd�de�ZdS)�)�BytesIOc@sBeZdZdZdd�Zdd�Zdd�Zdd	�Zddd�Zd
d�Z	d
S)�CallbackFileWrapperav
    Small wrapper around a fp object which will tee everything read into a
    buffer, and when that file is closed it will execute a callback with the
    contents of that buffer.

    All attributes are proxied to the underlying file object.

    This class uses members with a double underscore (__) leading prefix so as
    not to accidentally shadow an attribute.
    cCst�|_||_||_dS)N)r�_CallbackFileWrapper__buf�_CallbackFileWrapper__fp�_CallbackFileWrapper__callback)�self�fp�callback�r
�!/usr/lib/python3.6/filewrapper.py�__init__szCallbackFileWrapper.__init__cCs|jd�}t||�S)Nr)�__getattribute__�getattr)r�namerr
r
r�__getattr__s	
zCallbackFileWrapper.__getattr__cCsDy|jjdkStk
r YnXy|jjStk
r>YnXdS)NF)rr�AttributeError�closed)rr
r
rZ__is_fp_closed!sz"CallbackFileWrapper.__is_fp_closedcCs |jr|j|jj��d|_dS)N)rr�getvalue)rr
r
r�_close0szCallbackFileWrapper._closeNcCs,|jj|�}|jj|�|j�r(|j�|S)N)r�readr�write�"_CallbackFileWrapper__is_fp_closedr)r�amt�datar
r
rr;s
zCallbackFileWrapper.readcCs@|jj|�}|dkr |dkr |S|jj|�|j�r<|j�|S)N�s
)r�
_safe_readrrrr)rrrr
r
rrCszCallbackFileWrapper._safe_read)N)
�__name__�
__module__�__qualname__�__doc__rrrrrrr
r
r
rrs

rN)�ior�objectrr
r
r
r�<module>s_vendor/cachecontrol/__pycache__/heuristics.cpython-36.opt-1.pyc000064400000011004151733136320020623 0ustar003

�Pf-�@s�ddlZddlZddlmZmZmZddlmZmZdZddd�Z	dd�Z
Gd	d
�d
e�ZGdd�de�Z
Gd
d�de�ZGdd�de�ZdS)�N)�
formatdate�	parsedate�parsedate_tz)�datetime�	timedeltaz%a, %d %b %Y %H:%M:%S GMTcCs|p
tj�}||S)N)r�now)�delta�date�r
� /usr/lib/python3.6/heuristics.py�expire_aftersrcCsttj|j���S)N)r�calendar�timegmZ	timetuple)Zdtr
r
r�datetime_to_headersrc@s$eZdZdd�Zdd�Zdd�ZdS)�
BaseHeuristiccCsdS)a!
        Return a valid 1xx warning header value describing the cache
        adjustments.

        The response is provided too allow warnings like 113
        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
        to explicitly say response is over 24 hours old.
        z110 - "Response is Stale"r
)�self�responser
r
r�warnings	zBaseHeuristic.warningcCsiS)z�Update the response headers with any new headers.

        NOTE: This SHOULD always include some Warning header to
              signify that the response was cached by the client, not
              by way of the provided headers.
        r
)rrr
r
r�update_headers!szBaseHeuristic.update_headerscCs@|j|�}|r<|jj|�|j|�}|dk	r<|jjd|i�|S)N�Warning)r�headers�updater)rrZupdated_headersZwarning_header_valuer
r
r�apply*s

zBaseHeuristic.applyN)�__name__�
__module__�__qualname__rrrr
r
r
rrs	rc@seZdZdZdd�ZdS)�OneDayCachezM
    Cache the response by providing an expires 1 day in the
    future.
    cCsRi}d|jkrNt|jd�}ttdd�t|dd��d�}t|�|d<d|d<|S)	N�expiresr	�)Zdays�)r	�publicz
cache-control)rrrrrr)rrrr	rr
r
rr;s

zOneDayCache.update_headersN)rrr�__doc__rr
r
r
rr6src@s(eZdZdZdd�Zdd�Zdd�ZdS)	�ExpiresAfterz;
    Cache **all** requests for a defined time period.
    cKstf|�|_dS)N)rr)r�kwr
r
r�__init__LszExpiresAfter.__init__cCst|j�}t|�dd�S)Nr )rz
cache-control)rrr)rrrr
r
rrOs
zExpiresAfter.update_headerscCsd}||jS)Nz:110 - Automatically cached for %s. Response might be stale)r)rrZtmplr
r
rrVszExpiresAfter.warningN)rrrr!r$rrr
r
r
rr"Gsr"c@s>eZdZdZedddddddd	d
ddg�Zd
d�Zdd�ZdS)�LastModifieda�
    If there is no Expires header already, fall back on Last-Modified
    using the heuristic from
    http://tools.ietf.org/html/rfc7234#section-4.2.2
    to calculate a reasonable value.

    Firefox also does something like this per
    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
    Unlike mozilla we limit this to 24-hr.
    ��������i,i-i�i�i�i�i�c
Cs�|j}d|kriSd|kr*|ddkr*iS|j|jkr:iSd|ksJd|krNiStjt|d��}t|d�}|dks||dkr�iStj�}td||�}|tj|�}tdt	|dd
��}||kr�iS||}	dtj
ttj|	��iS)Nrz
cache-controlr r	z
last-modifiedr�
�ii�Q)
rZstatus�cacheable_by_default_statusesr
rrr�time�max�minZstrftime�TIME_FMTZgmtime)
r�resprr	Z
last_modifiedrZcurrent_agerZfreshness_lifetimerr
r
rrks*zLastModified.update_headerscCsdS)Nr
)rr1r
r
rr�szLastModified.warningN)rrrr!�setr,rrr
r
r
rr%[s
r%)N)r
r-Zemail.utilsrrrrrr0rr�objectrrr"r%r
r
r
r�<module>s
"_vendor/cachecontrol/__pycache__/adapter.cpython-36.opt-1.pyc000064400000005362151733136320020073 0ustar003

�Pf�@sTddlZddlZddlmZddlmZddlmZddlm	Z	Gdd�de�Z
dS)	�N)�HTTPAdapter�)�CacheController)�	DictCache)�CallbackFileWrappercsPeZdZeddg�Zd�fdd�	Z�fdd�Zd�fd
d�	Z�fdd
�Z�Z	S)�CacheControlAdapterZPUTZDELETENTc	sBtt|�j||�|pt�|_||_|p*t}||j||d�|_dS)N)�cache_etags�
serializer)�superr�__init__r�cache�	heuristicr�
controller)	�selfrrZcontroller_classr	r
�args�kwZcontroller_factory)�	__class__��/usr/lib/python3.6/adapter.pyrszCacheControlAdapter.__init__csV|jdkr>|jj|�}|r*|j||dd�S|jj|jj|��tt|�j	|f|�}|S)z�
        Send a request. Use the request information to see if it
        exists in the cache and cache the response if we need to and can.
        �GETT)�
from_cache)
�methodrZcached_request�build_responseZheaders�updateZconditional_headersr
r�send)r�requestr�cached_response�resp)rrrrs
zCacheControlAdapter.sendFcs|r�|jdkr�|jr"|jj|�}|jdkr`|jj||�}||k	rFd}|jdd�|j�|}n\|jdkrz|jj||�nBt	|j
tj|jj||��|_
|j
r�|j��fdd�}tj||�|_tt|�j||�}|j|jkr�|jr�|jj|j�}|jj|�||_|S)	z�
        Build a response by making a request or using the cache.

        This will end up calling send and returning a potentially
        cached response
        ri0TF)Zdecode_contenti-cs��|jdkr|jj�dS)Nr)Z
chunk_left�_fpZ_close)r)�super_update_chunk_lengthrr�_update_chunk_lengthgs
z@CacheControlAdapter.build_response.<locals>._update_chunk_length)rr
ZapplyZstatusrZupdate_cached_response�readZrelease_connZcache_responserr�	functools�partialZchunkedr �types�
MethodTyper
rr�invalidating_methods�ok�	cache_urlZurlr�deleter)rrZresponserrr rr()r)rrr3s<



z"CacheControlAdapter.build_responsecs|jj�tt|�j�dS)N)r�closer
r)r)rrrr*{s
zCacheControlAdapter.close)NTNNN)F)
�__name__�
__module__�__qualname__�setr&rrrr*�
__classcell__rr)rrrs
Hr)r$r"Zpip._vendor.requests.adaptersrrrrrZfilewrapperrrrrrr�<module>s_vendor/cachecontrol/__pycache__/compat.cpython-36.pyc000064400000000761151733136320016775 0ustar003

�Pf|�@s�yddlmZWn ek
r0ddlmZYnXyddlZWnek
rZddlZYnXddlmZddlm	Z	y
e
fZWnek
r�e
fZYnXdS)�)�urljoinN)�HTTPResponse)�is_fp_closed)Zurllib.parser�ImportErrorZurlparseZcPickle�pickleZpip._vendor.urllib3.responserZpip._vendor.urllib3.utilrZunicodeZ	text_type�	NameError�str�r	r	�/usr/lib/python3.6/compat.py�<module>s
_vendor/cachecontrol/__pycache__/controller.cpython-36.pyc000064400000016775151733136320017711 0ustar003

�Pf�2�@s�dZddlZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZej
e�Zejd�Zd	d
�ZGdd�de�ZdS)
z7
The httplib2 algorithms ported for use with requests.
�N)�parsedate_tz)�CaseInsensitiveDict�)�	DictCache)�
Serializerz9^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?cCs0tj|�j�}|d|d|d|d|dfS)z�Parses a URI using the regex given in Appendix B of RFC 3986.

        (scheme, authority, path, query, fragment) = parse_uri(uri)
    r����)�URI�match�groups)�urir
�r� /usr/lib/python3.6/controller.py�	parse_urisrc@s\eZdZdZddd�Zedd��Zedd	��Zd
d�Zdd
�Z	dd�Z
ddd�Zdd�ZdS)�CacheControllerz9An interface to see if request should cached or not.
    NTcCs"|pt�|_||_|pt�|_dS)N)r�cache�cache_etagsr�
serializer)�selfrrrrrr�__init__!szCacheController.__init__c	Cslt|�\}}}}}|s|r*td|��|j�}|j�}|sBd}|rTdj||g�pV|}|d||}|S)z4Normalize the URL to create a safe key for the cachez(Only absolute URIs are allowed. uri = %s�/�?z://)r�	Exception�lower�join)	�clsr�schemeZ	authority�pathZqueryZfragmentZrequest_uriZ
defrag_urirrr�_urlnorm&szCacheController._urlnormcCs
|j|�S)N)r )rrrrr�	cache_url:szCacheController.cache_urlcCsVi}d}d|krd}||krR||jd�}dd�|D�}dd�|D�}t||�}|S)zz
        Parse the cache control headers returning a dictionary with values
        for the different directives.
        z
cache-controlz
Cache-Control�,cSs4g|],}d|jd�krtdd�|jdd�D���qS)r�=cSsg|]}|j�j��qSr)�stripr)�.0�xrrr�
<listcomp>LszBCacheController.parse_cache_control.<locals>.<listcomp>.<listcomp>���)�find�tuple�split)r%�partrrrr'Lsz7CacheController.parse_cache_control.<locals>.<listcomp>cSs*g|]"}d|jd�kr|j�j�df�qS)rr#r()r)r$r)r%�namerrrr'Ps)r+�dict)r�headersZretvalZ	cc_header�partsZparts_with_argsZ
parts_wo_argsrrr�parse_cache_control>sz#CacheController.parse_cache_controlcCs�|j|j�}tjd|�|j|j�}d|kr:tjd�dSd|kr\|ddkr\tjd�dS|jj|�}|dkr~tjd	�dS|jj	||�}|s�tj
d
�dS|jdkr�d}tj|�|St|j�}|s�d
|k�rd|kr�tjd�|jj
|�tjd�dStj�}tjt|d
��}	td||	�}
tjd|
�|j|�}d}d|k�rt|dj��rtt|d�}tjd|�nDd|k�r�t|d�}
|
dk	�r�tj|
�|	}td|�}tjd|�d|k�r�yt|d�}tjd|�Wntk
�r�d}YnXd|k�rDyt|d�}Wntk
�r.d}YnX|
|7}
tjd|
�||
k�rjtjd�tjd||
�|Sd|k�r�tjd�|jj
|�dS)ze
        Return a cached response if it exists in the cache, otherwise
        return False.
        zLooking up "%s" in the cachezno-cachez-Request header has "no-cache", cache bypassedFzmax-agerz1Request header has "max_age" as 0, cache bypassedNzNo cache entry availablez1Cache entry deserialization failed, entry ignoredi-zVReturning cached "301 Moved Permanently" response (ignoring date and etag information)�date�etagz(Purging cached response: no date or etagz!Ignoring cached response: no datezCurrent age based on date: %iz#Freshness lifetime from max-age: %i�expiresz#Freshness lifetime from expires: %iz+Freshness lifetime from request max-age: %iz	min-freshz'Adjusted current age from min-fresh: %iz2The response is "fresh", returning cached responsez%i > %iz4The cached response is "stale" with no etag, purging)r!�url�logger�debugr1r/r�getr�loadsZwarning�statusr�delete�time�calendarZtimegmr�max�isdigit�int�
ValueError)r�requestr!�ccZ
cache_data�resp�msgr/Znowr2Zcurrent_ageZresp_ccZfreshness_lifetimer4Zexpire_timeZ	min_freshrrr�cached_requestVs�





















zCacheController.cached_requestcCs`|j|j�}|jj||jj|��}i}|r\t|j�}d|krH|d|d<d|kr\|d|d<|S)Nr3ZETagz
If-None-Matchz
last-modifiedz
Last-ModifiedzIf-Modified-Since)r!r5rr9rr8rr/)rrBr!rDZnew_headersr/rrr�conditional_headers�s
z#CacheController.conditional_headersc
Cs�ddddg}|j|kr*tjd|j|�dSt|j�}|dk	rhd|krh|dj�rht|d�t|�krhdS|j|j�}|j|�}|j	|j
�}tjd|�d	}	|jd
�r�d}	tjd�|jd
�r�d}	tjd
�|	r�|jj|�r�tjd�|jj
|�|jo�d|k�r(tjd�|jj||jj|||d��n�|jdk�rXtjd�|jj||jj||��n�d|k�r�|�r�|jd��r�|dj��r�t|d�dk�r�tjd�|jj||jj|||d��n:d|k�r�|d�r�tjd�|jj||jj|||d��dS)zc
        Algorithm for caching requests.

        This assumes a requests Response object.
        ����i,i-zStatus code %s not in %sNzcontent-lengthz&Updating cache with response from "%s"Fzno-storeTzResponse header has "no-store"zRequest header has "no-store"z0Purging existing cache entry to honor "no-store"r3zCaching due to etag)�bodyzCaching permanant redirectr2zmax-agerz'Caching b/c date exists and max-age > 0r4zCaching b/c of expires header)r:r6r7rr/r?r@�lenr1r!r5r8rr;r�setr�dumps)
rrB�responserJZcacheable_status_codesZresponse_headersZcc_reqrCr!Zno_storerrr�cache_response�sd










 



zCacheController.cache_responsecsv|j|j�}|jj||jj|��}|s*|Sdg�|jjt�fdd�|jj	�D���d|_
|jj||jj||��|S)z�On a 304 we will get a new set of headers that we want to
        update our cached value with, assuming we have one.

        This should only ever be called when we've sent an ETag and
        gotten a 304 as the response.
        zcontent-lengthc3s&|]\}}|j��kr||fVqdS)N)r)r%�k�v)�excluded_headersrr�	<genexpr>Tsz9CacheController.update_cached_response.<locals>.<genexpr>rH)
r!r5rr9rr8r/�updater.�itemsr:rLrM)rrBrNr!Zcached_responser)rRr�update_cached_response6s
z&CacheController.update_cached_response)NTN)N)
�__name__�
__module__�__qualname__�__doc__r�classmethodr r!r1rFrGrOrVrrrrrs
y
Wr)rZZlogging�rer=r<Zemail.utilsrZpip._vendor.requests.structuresrrrZ	serializerZ	getLoggerrWr6�compilerr�objectrrrrr�<module>s

	_vendor/cachecontrol/__pycache__/filewrapper.cpython-36.pyc000064400000004026151733136320020030 0ustar003

�Pf�	�@s ddlmZGdd�de�ZdS)�)�BytesIOc@sBeZdZdZdd�Zdd�Zdd�Zdd	�Zddd�Zd
d�Z	d
S)�CallbackFileWrapperav
    Small wrapper around a fp object which will tee everything read into a
    buffer, and when that file is closed it will execute a callback with the
    contents of that buffer.

    All attributes are proxied to the underlying file object.

    This class uses members with a double underscore (__) leading prefix so as
    not to accidentally shadow an attribute.
    cCst�|_||_||_dS)N)r�_CallbackFileWrapper__buf�_CallbackFileWrapper__fp�_CallbackFileWrapper__callback)�self�fp�callback�r
�!/usr/lib/python3.6/filewrapper.py�__init__szCallbackFileWrapper.__init__cCs|jd�}t||�S)Nr)�__getattribute__�getattr)r�namerr
r
r�__getattr__s	
zCallbackFileWrapper.__getattr__cCsDy|jjdkStk
r YnXy|jjStk
r>YnXdS)NF)rr�AttributeError�closed)rr
r
rZ__is_fp_closed!sz"CallbackFileWrapper.__is_fp_closedcCs |jr|j|jj��d|_dS)N)rr�getvalue)rr
r
r�_close0szCallbackFileWrapper._closeNcCs,|jj|�}|jj|�|j�r(|j�|S)N)r�readr�write�"_CallbackFileWrapper__is_fp_closedr)r�amt�datar
r
rr;s
zCallbackFileWrapper.readcCs@|jj|�}|dkr |dkr |S|jj|�|j�r<|j�|S)N�s
)r�
_safe_readrrrr)rrrr
r
rrCszCallbackFileWrapper._safe_read)N)
�__name__�
__module__�__qualname__�__doc__rrrrrrr
r
r
rrs

rN)�ior�objectrr
r
r
r�<module>s_vendor/cachecontrol/__pycache__/cache.cpython-36.opt-1.pyc000064400000003215151733136320017511 0ustar003

�Pf�@s4dZddlmZGdd�de�ZGdd�de�ZdS)zb
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
�)�Lockc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�	BaseCachecCs
t��dS)N)�NotImplemented)�self�key�r�/usr/lib/python3.6/cache.py�get
sz
BaseCache.getcCs
t��dS)N)r)rr�valuerrr�set
sz
BaseCache.setcCs
t��dS)N)r)rrrrr�deleteszBaseCache.deletecCsdS)Nr)rrrr�closeszBaseCache.closeN)�__name__�
__module__�__qualname__r	rrr
rrrrrsrc@s.eZdZd
dd�Zdd�Zdd�Zdd	�ZdS)�	DictCacheNcCst�|_|pi|_dS)N)r�lock�data)rZ	init_dictrrr�__init__szDictCache.__init__cCs|jj|d�S)N)rr	)rrrrrr	sz
DictCache.getc
Cs&|j�|jj||i�WdQRXdS)N)rr�update)rrr
rrrr sz
DictCache.setc	Cs,|j�||jkr|jj|�WdQRXdS)N)rr�pop)rrrrrr$s
zDictCache.delete)N)rrrrr	rrrrrrrs
rN)�__doc__Z	threadingr�objectrrrrrr�<module>s_vendor/cachecontrol/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000722151733136320020205 0ustar003

�Pf.�@s8dZdZdZdZddlmZddlmZddlm	Z	dS)	zbCacheControl import Interface.

Make it easy to import from cachecontrol without long namespaces.
zEric Larsonzeric@ionrock.orgz0.11.7�)�CacheControl)�CacheControlAdapter)�CacheControllerN)
�__doc__�
__author__Z	__email__�__version__�wrapperrZadapterrZ
controllerr�r	r	�/usr/lib/python3.6/__init__.py�<module>s_vendor/cachecontrol/__pycache__/heuristics.cpython-36.pyc000064400000011004151733136320017664 0ustar003

�Pf-�@s�ddlZddlZddlmZmZmZddlmZmZdZddd�Z	dd�Z
Gd	d
�d
e�ZGdd�de�Z
Gd
d�de�ZGdd�de�ZdS)�N)�
formatdate�	parsedate�parsedate_tz)�datetime�	timedeltaz%a, %d %b %Y %H:%M:%S GMTcCs|p
tj�}||S)N)r�now)�delta�date�r
� /usr/lib/python3.6/heuristics.py�expire_aftersrcCsttj|j���S)N)r�calendar�timegmZ	timetuple)Zdtr
r
r�datetime_to_headersrc@s$eZdZdd�Zdd�Zdd�ZdS)�
BaseHeuristiccCsdS)a!
        Return a valid 1xx warning header value describing the cache
        adjustments.

        The response is provided too allow warnings like 113
        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
        to explicitly say response is over 24 hours old.
        z110 - "Response is Stale"r
)�self�responser
r
r�warnings	zBaseHeuristic.warningcCsiS)z�Update the response headers with any new headers.

        NOTE: This SHOULD always include some Warning header to
              signify that the response was cached by the client, not
              by way of the provided headers.
        r
)rrr
r
r�update_headers!szBaseHeuristic.update_headerscCs@|j|�}|r<|jj|�|j|�}|dk	r<|jjd|i�|S)N�Warning)r�headers�updater)rrZupdated_headersZwarning_header_valuer
r
r�apply*s

zBaseHeuristic.applyN)�__name__�
__module__�__qualname__rrrr
r
r
rrs	rc@seZdZdZdd�ZdS)�OneDayCachezM
    Cache the response by providing an expires 1 day in the
    future.
    cCsRi}d|jkrNt|jd�}ttdd�t|dd��d�}t|�|d<d|d<|S)	N�expiresr	�)Zdays�)r	�publicz
cache-control)rrrrrr)rrrr	rr
r
rr;s

zOneDayCache.update_headersN)rrr�__doc__rr
r
r
rr6src@s(eZdZdZdd�Zdd�Zdd�ZdS)	�ExpiresAfterz;
    Cache **all** requests for a defined time period.
    cKstf|�|_dS)N)rr)r�kwr
r
r�__init__LszExpiresAfter.__init__cCst|j�}t|�dd�S)Nr )rz
cache-control)rrr)rrrr
r
rrOs
zExpiresAfter.update_headerscCsd}||jS)Nz:110 - Automatically cached for %s. Response might be stale)r)rrZtmplr
r
rrVszExpiresAfter.warningN)rrrr!r$rrr
r
r
rr"Gsr"c@s>eZdZdZedddddddd	d
ddg�Zd
d�Zdd�ZdS)�LastModifieda�
    If there is no Expires header already, fall back on Last-Modified
    using the heuristic from
    http://tools.ietf.org/html/rfc7234#section-4.2.2
    to calculate a reasonable value.

    Firefox also does something like this per
    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
    Unlike mozilla we limit this to 24-hr.
    ��������i,i-i�i�i�i�i�c
Cs�|j}d|kriSd|kr*|ddkr*iS|j|jkr:iSd|ksJd|krNiStjt|d��}t|d�}|dks||dkr�iStj�}td||�}|tj|�}tdt	|dd
��}||kr�iS||}	dtj
ttj|	��iS)Nrz
cache-controlr r	z
last-modifiedr�
�ii�Q)
rZstatus�cacheable_by_default_statusesr
rrr�time�max�minZstrftime�TIME_FMTZgmtime)
r�resprr	Z
last_modifiedrZcurrent_agerZfreshness_lifetimerr
r
rrks*zLastModified.update_headerscCsdS)Nr
)rr1r
r
rr�szLastModified.warningN)rrrr!�setr,rrr
r
r
rr%[s
r%)N)r
r-Zemail.utilsrrrrrr0rr�objectrrr"r%r
r
r
r�<module>s
"_vendor/cachecontrol/__pycache__/controller.cpython-36.opt-1.pyc000064400000016775151733136320020650 0ustar003

�Pf�2�@s�dZddlZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZej
e�Zejd�Zd	d
�ZGdd�de�ZdS)
z7
The httplib2 algorithms ported for use with requests.
�N)�parsedate_tz)�CaseInsensitiveDict�)�	DictCache)�
Serializerz9^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?cCs0tj|�j�}|d|d|d|d|dfS)z�Parses a URI using the regex given in Appendix B of RFC 3986.

        (scheme, authority, path, query, fragment) = parse_uri(uri)
    r����)�URI�match�groups)�urir
�r� /usr/lib/python3.6/controller.py�	parse_urisrc@s\eZdZdZddd�Zedd��Zedd	��Zd
d�Zdd
�Z	dd�Z
ddd�Zdd�ZdS)�CacheControllerz9An interface to see if request should cached or not.
    NTcCs"|pt�|_||_|pt�|_dS)N)r�cache�cache_etagsr�
serializer)�selfrrrrrr�__init__!szCacheController.__init__c	Cslt|�\}}}}}|s|r*td|��|j�}|j�}|sBd}|rTdj||g�pV|}|d||}|S)z4Normalize the URL to create a safe key for the cachez(Only absolute URIs are allowed. uri = %s�/�?z://)r�	Exception�lower�join)	�clsr�schemeZ	authority�pathZqueryZfragmentZrequest_uriZ
defrag_urirrr�_urlnorm&szCacheController._urlnormcCs
|j|�S)N)r )rrrrr�	cache_url:szCacheController.cache_urlcCsVi}d}d|krd}||krR||jd�}dd�|D�}dd�|D�}t||�}|S)zz
        Parse the cache control headers returning a dictionary with values
        for the different directives.
        z
cache-controlz
Cache-Control�,cSs4g|],}d|jd�krtdd�|jdd�D���qS)r�=cSsg|]}|j�j��qSr)�stripr)�.0�xrrr�
<listcomp>LszBCacheController.parse_cache_control.<locals>.<listcomp>.<listcomp>���)�find�tuple�split)r%�partrrrr'Lsz7CacheController.parse_cache_control.<locals>.<listcomp>cSs*g|]"}d|jd�kr|j�j�df�qS)rr#r()r)r$r)r%�namerrrr'Ps)r+�dict)r�headersZretvalZ	cc_header�partsZparts_with_argsZ
parts_wo_argsrrr�parse_cache_control>sz#CacheController.parse_cache_controlcCs�|j|j�}tjd|�|j|j�}d|kr:tjd�dSd|kr\|ddkr\tjd�dS|jj|�}|dkr~tjd	�dS|jj	||�}|s�tj
d
�dS|jdkr�d}tj|�|St|j�}|s�d
|k�rd|kr�tjd�|jj
|�tjd�dStj�}tjt|d
��}	td||	�}
tjd|
�|j|�}d}d|k�rt|dj��rtt|d�}tjd|�nDd|k�r�t|d�}
|
dk	�r�tj|
�|	}td|�}tjd|�d|k�r�yt|d�}tjd|�Wntk
�r�d}YnXd|k�rDyt|d�}Wntk
�r.d}YnX|
|7}
tjd|
�||
k�rjtjd�tjd||
�|Sd|k�r�tjd�|jj
|�dS)ze
        Return a cached response if it exists in the cache, otherwise
        return False.
        zLooking up "%s" in the cachezno-cachez-Request header has "no-cache", cache bypassedFzmax-agerz1Request header has "max_age" as 0, cache bypassedNzNo cache entry availablez1Cache entry deserialization failed, entry ignoredi-zVReturning cached "301 Moved Permanently" response (ignoring date and etag information)�date�etagz(Purging cached response: no date or etagz!Ignoring cached response: no datezCurrent age based on date: %iz#Freshness lifetime from max-age: %i�expiresz#Freshness lifetime from expires: %iz+Freshness lifetime from request max-age: %iz	min-freshz'Adjusted current age from min-fresh: %iz2The response is "fresh", returning cached responsez%i > %iz4The cached response is "stale" with no etag, purging)r!�url�logger�debugr1r/r�getr�loadsZwarning�statusr�delete�time�calendarZtimegmr�max�isdigit�int�
ValueError)r�requestr!�ccZ
cache_data�resp�msgr/Znowr2Zcurrent_ageZresp_ccZfreshness_lifetimer4Zexpire_timeZ	min_freshrrr�cached_requestVs�





















zCacheController.cached_requestcCs`|j|j�}|jj||jj|��}i}|r\t|j�}d|krH|d|d<d|kr\|d|d<|S)Nr3ZETagz
If-None-Matchz
last-modifiedz
Last-ModifiedzIf-Modified-Since)r!r5rr9rr8rr/)rrBr!rDZnew_headersr/rrr�conditional_headers�s
z#CacheController.conditional_headersc
Cs�ddddg}|j|kr*tjd|j|�dSt|j�}|dk	rhd|krh|dj�rht|d�t|�krhdS|j|j�}|j|�}|j	|j
�}tjd|�d	}	|jd
�r�d}	tjd�|jd
�r�d}	tjd
�|	r�|jj|�r�tjd�|jj
|�|jo�d|k�r(tjd�|jj||jj|||d��n�|jdk�rXtjd�|jj||jj||��n�d|k�r�|�r�|jd��r�|dj��r�t|d�dk�r�tjd�|jj||jj|||d��n:d|k�r�|d�r�tjd�|jj||jj|||d��dS)zc
        Algorithm for caching requests.

        This assumes a requests Response object.
        ����i,i-zStatus code %s not in %sNzcontent-lengthz&Updating cache with response from "%s"Fzno-storeTzResponse header has "no-store"zRequest header has "no-store"z0Purging existing cache entry to honor "no-store"r3zCaching due to etag)�bodyzCaching permanant redirectr2zmax-agerz'Caching b/c date exists and max-age > 0r4zCaching b/c of expires header)r:r6r7rr/r?r@�lenr1r!r5r8rr;r�setr�dumps)
rrB�responserJZcacheable_status_codesZresponse_headersZcc_reqrCr!Zno_storerrr�cache_response�sd










 



zCacheController.cache_responsecsv|j|j�}|jj||jj|��}|s*|Sdg�|jjt�fdd�|jj	�D���d|_
|jj||jj||��|S)z�On a 304 we will get a new set of headers that we want to
        update our cached value with, assuming we have one.

        This should only ever be called when we've sent an ETag and
        gotten a 304 as the response.
        zcontent-lengthc3s&|]\}}|j��kr||fVqdS)N)r)r%�k�v)�excluded_headersrr�	<genexpr>Tsz9CacheController.update_cached_response.<locals>.<genexpr>rH)
r!r5rr9rr8r/�updater.�itemsr:rLrM)rrBrNr!Zcached_responser)rRr�update_cached_response6s
z&CacheController.update_cached_response)NTN)N)
�__name__�
__module__�__qualname__�__doc__r�classmethodr r!r1rFrGrOrVrrrrrs
y
Wr)rZZlogging�rer=r<Zemail.utilsrZpip._vendor.requests.structuresrrrZ	serializerZ	getLoggerrWr6�compilerr�objectrrrrr�<module>s

	_vendor/cachecontrol/__pycache__/wrapper.cpython-36.opt-1.pyc000064400000000745151733136320020133 0ustar003

�Pf��@s&ddlmZddlmZddd�ZdS)�)�CacheControlAdapter)�	DictCacheNTcCs6|pt�}t||||d�}|jd|�|jd|�|S)N)�cache_etags�
serializer�	heuristiczhttp://zhttps://)rrZmount)Zsess�cacherrr�adapter�r	�/usr/lib/python3.6/wrapper.py�CacheControls
r)NTNN)rrrrrr	r	r	r
�<module>s
_vendor/cachecontrol/__pycache__/cache.cpython-36.pyc000064400000003215151733136320016552 0ustar003

�Pf�@s4dZddlmZGdd�de�ZGdd�de�ZdS)zb
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
�)�Lockc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�	BaseCachecCs
t��dS)N)�NotImplemented)�self�key�r�/usr/lib/python3.6/cache.py�get
sz
BaseCache.getcCs
t��dS)N)r)rr�valuerrr�set
sz
BaseCache.setcCs
t��dS)N)r)rrrrr�deleteszBaseCache.deletecCsdS)Nr)rrrr�closeszBaseCache.closeN)�__name__�
__module__�__qualname__r	rrr
rrrrrsrc@s.eZdZd
dd�Zdd�Zdd�Zdd	�ZdS)�	DictCacheNcCst�|_|pi|_dS)N)r�lock�data)rZ	init_dictrrr�__init__szDictCache.__init__cCs|jj|d�S)N)rr	)rrrrrr	sz
DictCache.getc
Cs&|j�|jj||i�WdQRXdS)N)rr�update)rrr
rrrr sz
DictCache.setc	Cs,|j�||jkr|jj|�WdQRXdS)N)rr�pop)rrrrrr$s
zDictCache.delete)N)rrrrr	rrrrrrrs
rN)�__doc__Z	threadingr�objectrrrrrr�<module>s_vendor/cachecontrol/heuristics.py000064400000010055151733136320013405 0ustar00import calendar
import time

from email.utils import formatdate, parsedate, parsedate_tz

from datetime import datetime, timedelta

TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"


def expire_after(delta, date=None):
    date = date or datetime.now()
    return date + delta


def datetime_to_header(dt):
    return formatdate(calendar.timegm(dt.timetuple()))


class BaseHeuristic(object):

    def warning(self, response):
        """
        Return a valid 1xx warning header value describing the cache
        adjustments.

        The response is provided too allow warnings like 113
        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
        to explicitly say response is over 24 hours old.
        """
        return '110 - "Response is Stale"'

    def update_headers(self, response):
        """Update the response headers with any new headers.

        NOTE: This SHOULD always include some Warning header to
              signify that the response was cached by the client, not
              by way of the provided headers.
        """
        return {}

    def apply(self, response):
        updated_headers = self.update_headers(response)

        if updated_headers:
            response.headers.update(updated_headers)
            warning_header_value = self.warning(response)
            if warning_header_value is not None:
                response.headers.update({'Warning': warning_header_value})

        return response


class OneDayCache(BaseHeuristic):
    """
    Cache the response by providing an expires 1 day in the
    future.
    """
    def update_headers(self, response):
        headers = {}

        if 'expires' not in response.headers:
            date = parsedate(response.headers['date'])
            expires = expire_after(timedelta(days=1),
                                   date=datetime(*date[:6]))
            headers['expires'] = datetime_to_header(expires)
            headers['cache-control'] = 'public'
        return headers


class ExpiresAfter(BaseHeuristic):
    """
    Cache **all** requests for a defined time period.
    """

    def __init__(self, **kw):
        self.delta = timedelta(**kw)

    def update_headers(self, response):
        expires = expire_after(self.delta)
        return {
            'expires': datetime_to_header(expires),
            'cache-control': 'public',
        }

    def warning(self, response):
        tmpl = '110 - Automatically cached for %s. Response might be stale'
        return tmpl % self.delta


class LastModified(BaseHeuristic):
    """
    If there is no Expires header already, fall back on Last-Modified
    using the heuristic from
    http://tools.ietf.org/html/rfc7234#section-4.2.2
    to calculate a reasonable value.

    Firefox also does something like this per
    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
    Unlike mozilla we limit this to 24-hr.
    """
    cacheable_by_default_statuses = set([
        200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
    ])

    def update_headers(self, resp):
        headers = resp.headers

        if 'expires' in headers:
            return {}

        if 'cache-control' in headers and headers['cache-control'] != 'public':
            return {}

        if resp.status not in self.cacheable_by_default_statuses:
            return {}

        if 'date' not in headers or 'last-modified' not in headers:
            return {}

        date = calendar.timegm(parsedate_tz(headers['date']))
        last_modified = parsedate(headers['last-modified'])
        if date is None or last_modified is None:
            return {}

        now = time.time()
        current_age = max(0, now - date)
        delta = date - calendar.timegm(last_modified)
        freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
        if freshness_lifetime <= current_age:
            return {}

        expires = date + freshness_lifetime
        return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}

    def warning(self, resp):
        return None
_vendor/cachecontrol/controller.py000064400000031340151733136320013406 0ustar00"""
The httplib2 algorithms ported for use with requests.
"""
import logging
import re
import calendar
import time
from email.utils import parsedate_tz

from pip._vendor.requests.structures import CaseInsensitiveDict

from .cache import DictCache
from .serialize import Serializer


logger = logging.getLogger(__name__)

URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")


def parse_uri(uri):
    """Parses a URI using the regex given in Appendix B of RFC 3986.

        (scheme, authority, path, query, fragment) = parse_uri(uri)
    """
    groups = URI.match(uri).groups()
    return (groups[1], groups[3], groups[4], groups[6], groups[8])


class CacheController(object):
    """An interface to see if request should cached or not.
    """
    def __init__(self, cache=None, cache_etags=True, serializer=None):
        self.cache = cache or DictCache()
        self.cache_etags = cache_etags
        self.serializer = serializer or Serializer()

    @classmethod
    def _urlnorm(cls, uri):
        """Normalize the URL to create a safe key for the cache"""
        (scheme, authority, path, query, fragment) = parse_uri(uri)
        if not scheme or not authority:
            raise Exception("Only absolute URIs are allowed. uri = %s" % uri)

        scheme = scheme.lower()
        authority = authority.lower()

        if not path:
            path = "/"

        # Could do syntax based normalization of the URI before
        # computing the digest. See Section 6.2.2 of Std 66.
        request_uri = query and "?".join([path, query]) or path
        defrag_uri = scheme + "://" + authority + request_uri

        return defrag_uri

    @classmethod
    def cache_url(cls, uri):
        return cls._urlnorm(uri)

    def parse_cache_control(self, headers):
        """
        Parse the cache control headers returning a dictionary with values
        for the different directives.
        """
        retval = {}

        cc_header = 'cache-control'
        if 'Cache-Control' in headers:
            cc_header = 'Cache-Control'

        if cc_header in headers:
            parts = headers[cc_header].split(',')
            parts_with_args = [
                tuple([x.strip().lower() for x in part.split("=", 1)])
                for part in parts if -1 != part.find("=")
            ]
            parts_wo_args = [
                (name.strip().lower(), 1)
                for name in parts if -1 == name.find("=")
            ]
            retval = dict(parts_with_args + parts_wo_args)
        return retval

    def cached_request(self, request):
        """
        Return a cached response if it exists in the cache, otherwise
        return False.
        """
        cache_url = self.cache_url(request.url)
        logger.debug('Looking up "%s" in the cache', cache_url)
        cc = self.parse_cache_control(request.headers)

        # Bail out if the request insists on fresh data
        if 'no-cache' in cc:
            logger.debug('Request header has "no-cache", cache bypassed')
            return False

        if 'max-age' in cc and cc['max-age'] == 0:
            logger.debug('Request header has "max_age" as 0, cache bypassed')
            return False

        # Request allows serving from the cache, let's see if we find something
        cache_data = self.cache.get(cache_url)
        if cache_data is None:
            logger.debug('No cache entry available')
            return False

        # Check whether it can be deserialized
        resp = self.serializer.loads(request, cache_data)
        if not resp:
            logger.warning('Cache entry deserialization failed, entry ignored')
            return False

        # If we have a cached 301, return it immediately. We don't
        # need to test our response for other headers b/c it is
        # intrinsically "cacheable" as it is Permanent.
        # See:
        #   https://tools.ietf.org/html/rfc7231#section-6.4.2
        #
        # Client can try to refresh the value by repeating the request
        # with cache busting headers as usual (ie no-cache).
        if resp.status == 301:
            msg = ('Returning cached "301 Moved Permanently" response '
                   '(ignoring date and etag information)')
            logger.debug(msg)
            return resp

        headers = CaseInsensitiveDict(resp.headers)
        if not headers or 'date' not in headers:
            if 'etag' not in headers:
                # Without date or etag, the cached response can never be used
                # and should be deleted.
                logger.debug('Purging cached response: no date or etag')
                self.cache.delete(cache_url)
            logger.debug('Ignoring cached response: no date')
            return False

        now = time.time()
        date = calendar.timegm(
            parsedate_tz(headers['date'])
        )
        current_age = max(0, now - date)
        logger.debug('Current age based on date: %i', current_age)

        # TODO: There is an assumption that the result will be a
        #       urllib3 response object. This may not be best since we
        #       could probably avoid instantiating or constructing the
        #       response until we know we need it.
        resp_cc = self.parse_cache_control(headers)

        # determine freshness
        freshness_lifetime = 0

        # Check the max-age pragma in the cache control header
        if 'max-age' in resp_cc and resp_cc['max-age'].isdigit():
            freshness_lifetime = int(resp_cc['max-age'])
            logger.debug('Freshness lifetime from max-age: %i',
                         freshness_lifetime)

        # If there isn't a max-age, check for an expires header
        elif 'expires' in headers:
            expires = parsedate_tz(headers['expires'])
            if expires is not None:
                expire_time = calendar.timegm(expires) - date
                freshness_lifetime = max(0, expire_time)
                logger.debug("Freshness lifetime from expires: %i",
                             freshness_lifetime)

        # Determine if we are setting freshness limit in the
        # request. Note, this overrides what was in the response.
        if 'max-age' in cc:
            try:
                freshness_lifetime = int(cc['max-age'])
                logger.debug('Freshness lifetime from request max-age: %i',
                             freshness_lifetime)
            except ValueError:
                freshness_lifetime = 0

        if 'min-fresh' in cc:
            try:
                min_fresh = int(cc['min-fresh'])
            except ValueError:
                min_fresh = 0
            # adjust our current age by our min fresh
            current_age += min_fresh
            logger.debug('Adjusted current age from min-fresh: %i',
                         current_age)

        # Return entry if it is fresh enough
        if freshness_lifetime > current_age:
            logger.debug('The response is "fresh", returning cached response')
            logger.debug('%i > %i', freshness_lifetime, current_age)
            return resp

        # we're not fresh. If we don't have an Etag, clear it out
        if 'etag' not in headers:
            logger.debug(
                'The cached response is "stale" with no etag, purging'
            )
            self.cache.delete(cache_url)

        # return the original handler
        return False

    def conditional_headers(self, request):
        cache_url = self.cache_url(request.url)
        resp = self.serializer.loads(request, self.cache.get(cache_url))
        new_headers = {}

        if resp:
            headers = CaseInsensitiveDict(resp.headers)

            if 'etag' in headers:
                new_headers['If-None-Match'] = headers['ETag']

            if 'last-modified' in headers:
                new_headers['If-Modified-Since'] = headers['Last-Modified']

        return new_headers

    def cache_response(self, request, response, body=None):
        """
        Algorithm for caching requests.

        This assumes a requests Response object.
        """
        # From httplib2: Don't cache 206's since we aren't going to
        #                handle byte range requests
        cacheable_status_codes = [200, 203, 300, 301]
        if response.status not in cacheable_status_codes:
            logger.debug(
                'Status code %s not in %s',
                response.status,
                cacheable_status_codes
            )
            return

        response_headers = CaseInsensitiveDict(response.headers)

        # If we've been given a body, our response has a Content-Length, that
        # Content-Length is valid then we can check to see if the body we've
        # been given matches the expected size, and if it doesn't we'll just
        # skip trying to cache it.
        if (body is not None and
                "content-length" in response_headers and
                response_headers["content-length"].isdigit() and
                int(response_headers["content-length"]) != len(body)):
            return

        cc_req = self.parse_cache_control(request.headers)
        cc = self.parse_cache_control(response_headers)

        cache_url = self.cache_url(request.url)
        logger.debug('Updating cache with response from "%s"', cache_url)

        # Delete it from the cache if we happen to have it stored there
        no_store = False
        if cc.get('no-store'):
            no_store = True
            logger.debug('Response header has "no-store"')
        if cc_req.get('no-store'):
            no_store = True
            logger.debug('Request header has "no-store"')
        if no_store and self.cache.get(cache_url):
            logger.debug('Purging existing cache entry to honor "no-store"')
            self.cache.delete(cache_url)

        # If we've been given an etag, then keep the response
        if self.cache_etags and 'etag' in response_headers:
            logger.debug('Caching due to etag')
            self.cache.set(
                cache_url,
                self.serializer.dumps(request, response, body=body),
            )

        # Add to the cache any 301s. We do this before looking that
        # the Date headers.
        elif response.status == 301:
            logger.debug('Caching permanant redirect')
            self.cache.set(
                cache_url,
                self.serializer.dumps(request, response)
            )

        # Add to the cache if the response headers demand it. If there
        # is no date header then we can't do anything about expiring
        # the cache.
        elif 'date' in response_headers:
            # cache when there is a max-age > 0
            if cc and cc.get('max-age'):
                if cc['max-age'].isdigit() and int(cc['max-age']) > 0:
                    logger.debug('Caching b/c date exists and max-age > 0')
                    self.cache.set(
                        cache_url,
                        self.serializer.dumps(request, response, body=body),
                    )

            # If the request can expire, it means we should cache it
            # in the meantime.
            elif 'expires' in response_headers:
                if response_headers['expires']:
                    logger.debug('Caching b/c of expires header')
                    self.cache.set(
                        cache_url,
                        self.serializer.dumps(request, response, body=body),
                    )

    def update_cached_response(self, request, response):
        """On a 304 we will get a new set of headers that we want to
        update our cached value with, assuming we have one.

        This should only ever be called when we've sent an ETag and
        gotten a 304 as the response.
        """
        cache_url = self.cache_url(request.url)

        cached_response = self.serializer.loads(
            request,
            self.cache.get(cache_url)
        )

        if not cached_response:
            # we didn't have a cached response
            return response

        # Lets update our headers with the headers from the new request:
        # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
        #
        # The server isn't supposed to send headers that would make
        # the cached body invalid. But... just in case, we'll be sure
        # to strip out ones we know that might be problmatic due to
        # typical assumptions.
        excluded_headers = [
            "content-length",
        ]

        cached_response.headers.update(
            dict((k, v) for k, v in response.headers.items()
                 if k.lower() not in excluded_headers)
        )

        # we want a 200 b/c we have content via the cache
        cached_response.status = 200

        # update our cache
        self.cache.set(
            cache_url,
            self.serializer.dumps(request, cached_response),
        )

        return cached_response
_vendor/cachecontrol/serialize.py000064400000014610151733136320013213 0ustar00import base64
import io
import json
import zlib

from pip._vendor.requests.structures import CaseInsensitiveDict

from .compat import HTTPResponse, pickle, text_type


def _b64_encode_bytes(b):
    return base64.b64encode(b).decode("ascii")


def _b64_encode_str(s):
    return _b64_encode_bytes(s.encode("utf8"))


def _b64_encode(s):
    if isinstance(s, text_type):
        return _b64_encode_str(s)
    return _b64_encode_bytes(s)


def _b64_decode_bytes(b):
    return base64.b64decode(b.encode("ascii"))


def _b64_decode_str(s):
    return _b64_decode_bytes(s).decode("utf8")


class Serializer(object):

    def dumps(self, request, response, body=None):
        response_headers = CaseInsensitiveDict(response.headers)

        if body is None:
            body = response.read(decode_content=False)

            # NOTE: 99% sure this is dead code. I'm only leaving it
            #       here b/c I don't have a test yet to prove
            #       it. Basically, before using
            #       `cachecontrol.filewrapper.CallbackFileWrapper`,
            #       this made an effort to reset the file handle. The
            #       `CallbackFileWrapper` short circuits this code by
            #       setting the body as the content is consumed, the
            #       result being a `body` argument is *always* passed
            #       into cache_response, and in turn,
            #       `Serializer.dump`.
            response._fp = io.BytesIO(body)

        data = {
            "response": {
                "body": _b64_encode_bytes(body),
                "headers": dict(
                    (_b64_encode(k), _b64_encode(v))
                    for k, v in response.headers.items()
                ),
                "status": response.status,
                "version": response.version,
                "reason": _b64_encode_str(response.reason),
                "strict": response.strict,
                "decode_content": response.decode_content,
            },
        }

        # Construct our vary headers
        data["vary"] = {}
        if "vary" in response_headers:
            varied_headers = response_headers['vary'].split(',')
            for header in varied_headers:
                header = header.strip()
                data["vary"][header] = request.headers.get(header, None)

        # Encode our Vary headers to ensure they can be serialized as JSON
        data["vary"] = dict(
            (_b64_encode(k), _b64_encode(v) if v is not None else v)
            for k, v in data["vary"].items()
        )

        return b",".join([
            b"cc=2",
            zlib.compress(
                json.dumps(
                    data, separators=(",", ":"), sort_keys=True,
                ).encode("utf8"),
            ),
        ])

    def loads(self, request, data):
        # Short circuit if we've been given an empty set of data
        if not data:
            return

        # Determine what version of the serializer the data was serialized
        # with
        try:
            ver, data = data.split(b",", 1)
        except ValueError:
            ver = b"cc=0"

        # Make sure that our "ver" is actually a version and isn't a false
        # positive from a , being in the data stream.
        if ver[:3] != b"cc=":
            data = ver + data
            ver = b"cc=0"

        # Get the version number out of the cc=N
        ver = ver.split(b"=", 1)[-1].decode("ascii")

        # Dispatch to the actual load method for the given version
        try:
            return getattr(self, "_loads_v{0}".format(ver))(request, data)
        except AttributeError:
            # This is a version we don't have a loads function for, so we'll
            # just treat it as a miss and return None
            return

    def prepare_response(self, request, cached):
        """Verify our vary headers match and construct a real urllib3
        HTTPResponse object.
        """
        # Special case the '*' Vary value as it means we cannot actually
        # determine if the cached response is suitable for this request.
        if "*" in cached.get("vary", {}):
            return

        # Ensure that the Vary headers for the cached response match our
        # request
        for header, value in cached.get("vary", {}).items():
            if request.headers.get(header, None) != value:
                return

        body_raw = cached["response"].pop("body")

        headers = CaseInsensitiveDict(data=cached['response']['headers'])
        if headers.get('transfer-encoding', '') == 'chunked':
            headers.pop('transfer-encoding')

        cached['response']['headers'] = headers

        try:
            body = io.BytesIO(body_raw)
        except TypeError:
            # This can happen if cachecontrol serialized to v1 format (pickle)
            # using Python 2. A Python 2 str(byte string) will be unpickled as
            # a Python 3 str (unicode string), which will cause the above to
            # fail with:
            #
            #     TypeError: 'str' does not support the buffer interface
            body = io.BytesIO(body_raw.encode('utf8'))

        return HTTPResponse(
            body=body,
            preload_content=False,
            **cached["response"]
        )

    def _loads_v0(self, request, data):
        # The original legacy cache data. This doesn't contain enough
        # information to construct everything we need, so we'll treat this as
        # a miss.
        return

    def _loads_v1(self, request, data):
        try:
            cached = pickle.loads(data)
        except ValueError:
            return

        return self.prepare_response(request, cached)

    def _loads_v2(self, request, data):
        try:
            cached = json.loads(zlib.decompress(data).decode("utf8"))
        except ValueError:
            return

        # We need to decode the items that we've base64 encoded
        cached["response"]["body"] = _b64_decode_bytes(
            cached["response"]["body"]
        )
        cached["response"]["headers"] = dict(
            (_b64_decode_str(k), _b64_decode_str(v))
            for k, v in cached["response"]["headers"].items()
        )
        cached["response"]["reason"] = _b64_decode_str(
            cached["response"]["reason"],
        )
        cached["vary"] = dict(
            (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
            for k, v in cached["vary"].items()
        )

        return self.prepare_response(request, cached)
_vendor/cachecontrol/cache.py000064400000001426151733136320012270 0ustar00"""
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
"""
from threading import Lock


class BaseCache(object):

    def get(self, key):
        raise NotImplemented()

    def set(self, key, value):
        raise NotImplemented()

    def delete(self, key):
        raise NotImplemented()

    def close(self):
        pass


class DictCache(BaseCache):

    def __init__(self, init_dict=None):
        self.lock = Lock()
        self.data = init_dict or {}

    def get(self, key):
        return self.data.get(key, None)

    def set(self, key, value):
        with self.lock:
            self.data.update({key: value})

    def delete(self, key):
        with self.lock:
            if key in self.data:
                self.data.pop(key)
_vendor/cachecontrol/__init__.py000064400000000456151733136320012766 0ustar00"""CacheControl import Interface.

Make it easy to import from cachecontrol without long namespaces.
"""
__author__ = 'Eric Larson'
__email__ = 'eric@ionrock.org'
__version__ = '0.11.7'

from .wrapper import CacheControl
from .adapter import CacheControlAdapter
from .controller import CacheController
_vendor/cachecontrol/_cmd.py000064400000002450151733136320012125 0ustar00import logging

from pip._vendor import requests

from pip._vendor.cachecontrol.adapter import CacheControlAdapter
from pip._vendor.cachecontrol.cache import DictCache
from pip._vendor.cachecontrol.controller import logger

from argparse import ArgumentParser


def setup_logging():
    logger.setLevel(logging.DEBUG)
    handler = logging.StreamHandler()
    logger.addHandler(handler)


def get_session():
    adapter = CacheControlAdapter(
        DictCache(),
        cache_etags=True,
        serializer=None,
        heuristic=None,
    )
    sess = requests.Session()
    sess.mount('http://', adapter)
    sess.mount('https://', adapter)

    sess.cache_controller = adapter.controller
    return sess


def get_args():
    parser = ArgumentParser()
    parser.add_argument('url', help='The URL to try and cache')
    return parser.parse_args()


def main(args=None):
    args = get_args()
    sess = get_session()

    # Make a request to get a response
    resp = sess.get(args.url)

    # Turn on logging
    setup_logging()

    # try setting the cache
    sess.cache_controller.cache_response(resp.request, resp.raw)

    # Now try to get it
    if sess.cache_controller.cached_request(resp.request):
        print('Cached!')
    else:
        print('Not cached :(')


if __name__ == '__main__':
    main()
_vendor/cachecontrol/adapter.py000064400000011000151733136320012632 0ustar00import types
import functools

from pip._vendor.requests.adapters import HTTPAdapter

from .controller import CacheController
from .cache import DictCache
from .filewrapper import CallbackFileWrapper


class CacheControlAdapter(HTTPAdapter):
    invalidating_methods = set(['PUT', 'DELETE'])

    def __init__(self, cache=None,
                 cache_etags=True,
                 controller_class=None,
                 serializer=None,
                 heuristic=None,
                 *args, **kw):
        super(CacheControlAdapter, self).__init__(*args, **kw)
        self.cache = cache or DictCache()
        self.heuristic = heuristic

        controller_factory = controller_class or CacheController
        self.controller = controller_factory(
            self.cache,
            cache_etags=cache_etags,
            serializer=serializer,
        )

    def send(self, request, **kw):
        """
        Send a request. Use the request information to see if it
        exists in the cache and cache the response if we need to and can.
        """
        if request.method == 'GET':
            cached_response = self.controller.cached_request(request)
            if cached_response:
                return self.build_response(request, cached_response,
                                           from_cache=True)

            # check for etags and add headers if appropriate
            request.headers.update(
                self.controller.conditional_headers(request)
            )

        resp = super(CacheControlAdapter, self).send(request, **kw)

        return resp

    def build_response(self, request, response, from_cache=False):
        """
        Build a response by making a request or using the cache.

        This will end up calling send and returning a potentially
        cached response
        """
        if not from_cache and request.method == 'GET':
            # Check for any heuristics that might update headers
            # before trying to cache.
            if self.heuristic:
                response = self.heuristic.apply(response)

            # apply any expiration heuristics
            if response.status == 304:
                # We must have sent an ETag request. This could mean
                # that we've been expired already or that we simply
                # have an etag. In either case, we want to try and
                # update the cache if that is the case.
                cached_response = self.controller.update_cached_response(
                    request, response
                )

                if cached_response is not response:
                    from_cache = True

                # We are done with the server response, read a
                # possible response body (compliant servers will
                # not return one, but we cannot be 100% sure) and
                # release the connection back to the pool.
                response.read(decode_content=False)
                response.release_conn()

                response = cached_response

            # We always cache the 301 responses
            elif response.status == 301:
                self.controller.cache_response(request, response)
            else:
                # Wrap the response file with a wrapper that will cache the
                #   response when the stream has been consumed.
                response._fp = CallbackFileWrapper(
                    response._fp,
                    functools.partial(
                        self.controller.cache_response,
                        request,
                        response,
                    )
                )
                if response.chunked:
                    super_update_chunk_length = response._update_chunk_length

                    def _update_chunk_length(self):
                        super_update_chunk_length()
                        if self.chunk_left == 0:
                            self._fp._close()
                    response._update_chunk_length = types.MethodType(_update_chunk_length, response)

        resp = super(CacheControlAdapter, self).build_response(
            request, response
        )

        # See if we should invalidate the cache.
        if request.method in self.invalidating_methods and resp.ok:
            cache_url = self.controller.cache_url(request.url)
            self.cache.delete(cache_url)

        # Give the request a from_cache attr to let people use it
        resp.from_cache = from_cache

        return resp

    def close(self):
        self.cache.close()
        super(CacheControlAdapter, self).close()
_vendor/cachecontrol/wrapper.py000064400000000762151733136320012707 0ustar00from .adapter import CacheControlAdapter
from .cache import DictCache


def CacheControl(sess,
                 cache=None,
                 cache_etags=True,
                 serializer=None,
                 heuristic=None):

    cache = cache or DictCache()
    adapter = CacheControlAdapter(
        cache,
        cache_etags=cache_etags,
        serializer=serializer,
        heuristic=heuristic,
    )
    sess.mount('http://', adapter)
    sess.mount('https://', adapter)

    return sess
_vendor/cachecontrol/compat.py000064400000000574151733136320012513 0ustar00try:
    from urllib.parse import urljoin
except ImportError:
    from urlparse import urljoin


try:
    import cPickle as pickle
except ImportError:
    import pickle


from pip._vendor.urllib3.response import HTTPResponse
from pip._vendor.urllib3.util import is_fp_closed

# Replicate some six behaviour
try:
    text_type = (unicode,)
except NameError:
    text_type = (str,)
_vendor/cachecontrol/filewrapper.py000064400000004743151733136320013552 0ustar00from io import BytesIO


class CallbackFileWrapper(object):
    """
    Small wrapper around a fp object which will tee everything read into a
    buffer, and when that file is closed it will execute a callback with the
    contents of that buffer.

    All attributes are proxied to the underlying file object.

    This class uses members with a double underscore (__) leading prefix so as
    not to accidentally shadow an attribute.
    """

    def __init__(self, fp, callback):
        self.__buf = BytesIO()
        self.__fp = fp
        self.__callback = callback

    def __getattr__(self, name):
        # The vaguaries of garbage collection means that self.__fp is
        # not always set.  By using __getattribute__ and the private
        # name[0] allows looking up the attribute value and raising an
        # AttributeError when it doesn't exist. This stop thigns from
        # infinitely recursing calls to getattr in the case where
        # self.__fp hasn't been set.
        #
        # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
        fp = self.__getattribute__('_CallbackFileWrapper__fp')
        return getattr(fp, name)

    def __is_fp_closed(self):
        try:
            return self.__fp.fp is None
        except AttributeError:
            pass

        try:
            return self.__fp.closed
        except AttributeError:
            pass

        # We just don't cache it then.
        # TODO: Add some logging here...
        return False

    def _close(self):
        if self.__callback:
            self.__callback(self.__buf.getvalue())

        # We assign this to None here, because otherwise we can get into
        # really tricky problems where the CPython interpreter dead locks
        # because the callback is holding a reference to something which
        # has a __del__ method. Setting this to None breaks the cycle
        # and allows the garbage collector to do it's thing normally.
        self.__callback = None

    def read(self, amt=None):
        data = self.__fp.read(amt)
        self.__buf.write(data)
        if self.__is_fp_closed():
            self._close()

        return data

    def _safe_read(self, amt):
        data = self.__fp._safe_read(amt)
        if amt == 2 and data == b'\r\n':
            # urllib executes this read to toss the CRLF at the end
            # of the chunk.
            return data

        self.__buf.write(data)
        if self.__is_fp_closed():
            self._close()

        return data
_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.pyc000064400000001004151733136320020466 0ustar003

�Pfq�@stddlmZyddlmZWn$ek
r@ed�Zee�YnXyddlZddlm	Z	Wnek
rnYnXdS)�)�dedent�)�	FileCachez�
    NOTE: In order to use the FileCache you must have
    lockfile installed. You can install it via pip:
      pip install lockfile
    N)�
RedisCache)
�textwraprZ
file_cacher�ImportErrorZnotice�printZredisZredis_cacher�r	r	�/usr/lib/python3.6/__init__.py�<module>s_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.pyc000064400000003123151733136320021164 0ustar003

�Pf��@s4ddlmZddlmZdd�ZGdd�de�ZdS)�)�division)�datetimecCs<t|d�r|j�S|j}|j|jdd}||ddS)zPython 2.6 compatability�
total_seconds�i�
�i@Bi@B)�hasattrrZmicrosecondsZsecondsZdays)ZtdZmsZsecs�r	�!/usr/lib/python3.6/redis_cache.pyrs

rc@s>eZdZdd�Zdd�Zddd�Zdd	�Zd
d�Zdd
�ZdS)�
RedisCachecCs
||_dS)N)�conn)�selfrr	r	r
�__init__szRedisCache.__init__cCs|jj|�S)N)r�get)r
�keyr	r	r
rszRedisCache.getNcCs8|s|jj||�n |tj�}|jj|t|�|�dS)N)r�setrZnowZsetexr)r
r�valueZexpiresr	r	r
rszRedisCache.setcCs|jj|�dS)N)r�delete)r
rr	r	r
rszRedisCache.deletecCs$x|jj�D]}|jj|�qWdS)zIHelper for clearing all the keys in a database. Use with
        caution!N)r�keysr)r
rr	r	r
�clear"szRedisCache.clearcCs|jj�dS)N)rZ
disconnect)r
r	r	r
�close(szRedisCache.close)N)	�__name__�
__module__�__qualname__rrrrrrr	r	r	r
rs
rN)Z
__future__rrr�objectrr	r	r	r
�<module>s
_vendor/cachecontrol/caches/__pycache__/__init__.cpython-36.opt-1.pyc000064400000001004151733136320021425 0ustar003

�Pfq�@stddlmZyddlmZWn$ek
r@ed�Zee�YnXyddlZddlm	Z	Wnek
rnYnXdS)�)�dedent�)�	FileCachez�
    NOTE: In order to use the FileCache you must have
    lockfile installed. You can install it via pip:
      pip install lockfile
    N)�
RedisCache)
�textwraprZ
file_cacher�ImportErrorZnotice�printZredisZredis_cacher�r	r	�/usr/lib/python3.6/__init__.py�<module>s_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.pyc000064400000005231151733136320020777 0ustar003

�Pf�
�@sdddlZddlZddlmZddlmZddlmZddlm	Z	dd�Z
Gd	d
�d
e�Zdd�ZdS)
�N)�LockFile)�
MkdirLockFile�)�	BaseCache)�CacheControllercCs�tj}|tjtjBO}ttd�r*|tjO}ttd�r>|tjO}ytj|�Wntt	fk
rdYnXtj
|||�}ytj|d�Stj|��YnXdS)N�
O_NOFOLLOW�O_BINARY�wb)
�os�O_WRONLY�O_CREAT�O_EXCL�hasattrrr�remove�IOError�OSError�open�fdopen�close)�filenameZfmode�flags�fd�r� /usr/lib/python3.6/file_cache.py�_secure_open_writes 




rc@sBeZdZddd�Zedd��Zd	d
�Zdd�Zd
d�Zdd�Z	dS)�	FileCacheF��NcCsN|dk	r|dk	rtd��|r t}|dkr,t}||_||_||_||_||_dS)Nz/Cannot use use_dir_lock and lock_class together)�
ValueErrorrr�	directory�forever�filemode�dirmode�
lock_class)�selfrr r!r"Zuse_dir_lockr#rrr�__init__4szFileCache.__init__cCstj|j��j�S)N)�hashlibZsha224�encodeZ	hexdigest)�xrrrr'GszFileCache.encodecCs4|j|�}t|dd��|g}tjj|jf|��S)N�)r'�listr
�path�joinr)r$�nameZhashed�partsrrr�_fnKs
z
FileCache._fnc	Cs8|j|�}tjj|�sdSt|d��
}|j�SQRXdS)N�rb)r/r
r+�existsr�read)r$�keyr-�fhrrr�getRs

z
FileCache.getcCs||j|�}ytjtjj|�|j�Wnttfk
r<YnX|j|��*}t	|j|j
��}|j|�WdQRXWdQRXdS)N)r/r
�makedirsr+�dirnamer"rrr#rr!�write)r$r3�valuer-�lockr4rrr�setZs
z
FileCache.setcCs|j|�}|jstj|�dS)N)r/r r
r)r$r3r-rrr�deletehs
zFileCache.delete)FrrNN)
�__name__�
__module__�__qualname__r%�staticmethodr'r/r5r;r<rrrrr3s
rcCstj|�}|j|�S)z\Return the file cache path based on the URL.

    This does not ensure the file exists!
    )rZ	cache_urlr/)ZurlZ	filecacher3rrr�url_to_file_pathns
rA)
r&r
Zpip._vendor.lockfilerZ"pip._vendor.lockfile.mkdirlockfiler�cacherZ
controllerrrrrArrrr�<module>s(;_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-36.opt-1.pyc000064400000003123151733136330022124 0ustar003

�Pf��@s4ddlmZddlmZdd�ZGdd�de�ZdS)�)�division)�datetimecCs<t|d�r|j�S|j}|j|jdd}||ddS)zPython 2.6 compatability�
total_seconds�i�
�i@Bi@B)�hasattrrZmicrosecondsZsecondsZdays)ZtdZmsZsecs�r	�!/usr/lib/python3.6/redis_cache.pyrs

rc@s>eZdZdd�Zdd�Zddd�Zdd	�Zd
d�Zdd
�ZdS)�
RedisCachecCs
||_dS)N)�conn)�selfrr	r	r
�__init__szRedisCache.__init__cCs|jj|�S)N)r�get)r
�keyr	r	r
rszRedisCache.getNcCs8|s|jj||�n |tj�}|jj|t|�|�dS)N)r�setrZnowZsetexr)r
r�valueZexpiresr	r	r
rszRedisCache.setcCs|jj|�dS)N)r�delete)r
rr	r	r
rszRedisCache.deletecCs$x|jj�D]}|jj|�qWdS)zIHelper for clearing all the keys in a database. Use with
        caution!N)r�keysr)r
rr	r	r
�clear"szRedisCache.clearcCs|jj�dS)N)rZ
disconnect)r
r	r	r
�close(szRedisCache.close)N)	�__name__�
__module__�__qualname__rrrrrrr	r	r	r
rs
rN)Z
__future__rrr�objectrr	r	r	r
�<module>s
_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-36.opt-1.pyc000064400000005231151733136330021737 0ustar003

�Pf�
�@sdddlZddlZddlmZddlmZddlmZddlm	Z	dd�Z
Gd	d
�d
e�Zdd�ZdS)
�N)�LockFile)�
MkdirLockFile�)�	BaseCache)�CacheControllercCs�tj}|tjtjBO}ttd�r*|tjO}ttd�r>|tjO}ytj|�Wntt	fk
rdYnXtj
|||�}ytj|d�Stj|��YnXdS)N�
O_NOFOLLOW�O_BINARY�wb)
�os�O_WRONLY�O_CREAT�O_EXCL�hasattrrr�remove�IOError�OSError�open�fdopen�close)�filenameZfmode�flags�fd�r� /usr/lib/python3.6/file_cache.py�_secure_open_writes 




rc@sBeZdZddd�Zedd��Zd	d
�Zdd�Zd
d�Zdd�Z	dS)�	FileCacheF��NcCsN|dk	r|dk	rtd��|r t}|dkr,t}||_||_||_||_||_dS)Nz/Cannot use use_dir_lock and lock_class together)�
ValueErrorrr�	directory�forever�filemode�dirmode�
lock_class)�selfrr r!r"Zuse_dir_lockr#rrr�__init__4szFileCache.__init__cCstj|j��j�S)N)�hashlibZsha224�encodeZ	hexdigest)�xrrrr'GszFileCache.encodecCs4|j|�}t|dd��|g}tjj|jf|��S)N�)r'�listr
�path�joinr)r$�nameZhashed�partsrrr�_fnKs
z
FileCache._fnc	Cs8|j|�}tjj|�sdSt|d��
}|j�SQRXdS)N�rb)r/r
r+�existsr�read)r$�keyr-�fhrrr�getRs

z
FileCache.getcCs||j|�}ytjtjj|�|j�Wnttfk
r<YnX|j|��*}t	|j|j
��}|j|�WdQRXWdQRXdS)N)r/r
�makedirsr+�dirnamer"rrr#rr!�write)r$r3�valuer-�lockr4rrr�setZs
z
FileCache.setcCs|j|�}|jstj|�dS)N)r/r r
r)r$r3r-rrr�deletehs
zFileCache.delete)FrrNN)
�__name__�
__module__�__qualname__r%�staticmethodr'r/r5r;r<rrrrr3s
rcCstj|�}|j|�S)z\Return the file cache path based on the URL.

    This does not ensure the file exists!
    )rZ	cache_urlr/)ZurlZ	filecacher3rrr�url_to_file_pathns
rA)
r&r
Zpip._vendor.lockfilerZ"pip._vendor.lockfile.mkdirlockfiler�cacherZ
controllerrrrrArrrr�<module>s(;_vendor/cachecontrol/caches/__init__.py000064400000000561151733136330014212 0ustar00from textwrap import dedent

try:
    from .file_cache import FileCache
except ImportError:
    notice = dedent('''
    NOTE: In order to use the FileCache you must have
    lockfile installed. You can install it via pip:
      pip install lockfile
    ''')
    print(notice)


try:
    import redis
    from .redis_cache import RedisCache
except ImportError:
    pass
_vendor/cachecontrol/caches/redis_cache.py000064400000001715151733136330014706 0ustar00from __future__ import division

from datetime import datetime


def total_seconds(td):
    """Python 2.6 compatability"""
    if hasattr(td, 'total_seconds'):
        return td.total_seconds()

    ms = td.microseconds
    secs = (td.seconds + td.days * 24 * 3600)
    return (ms + secs * 10**6) / 10**6


class RedisCache(object):

    def __init__(self, conn):
        self.conn = conn

    def get(self, key):
        return self.conn.get(key)

    def set(self, key, value, expires=None):
        if not expires:
            self.conn.set(key, value)
        else:
            expires = expires - datetime.now()
            self.conn.setex(key, total_seconds(expires), value)

    def delete(self, key):
        self.conn.delete(key)

    def clear(self):
        """Helper for clearing all the keys in a database. Use with
        caution!"""
        for key in self.conn.keys():
            self.conn.delete(key)

    def close(self):
        self.conn.disconnect()
_vendor/cachecontrol/caches/file_cache.py000064400000006714151733136330014523 0ustar00import hashlib
import os

from pip._vendor.lockfile import LockFile
from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile

from ..cache import BaseCache
from ..controller import CacheController


def _secure_open_write(filename, fmode):
    # We only want to write to this file, so open it in write only mode
    flags = os.O_WRONLY

    # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
    #  will open *new* files.
    # We specify this because we want to ensure that the mode we pass is the
    # mode of the file.
    flags |= os.O_CREAT | os.O_EXCL

    # Do not follow symlinks to prevent someone from making a symlink that
    # we follow and insecurely open a cache file.
    if hasattr(os, "O_NOFOLLOW"):
        flags |= os.O_NOFOLLOW

    # On Windows we'll mark this file as binary
    if hasattr(os, "O_BINARY"):
        flags |= os.O_BINARY

    # Before we open our file, we want to delete any existing file that is
    # there
    try:
        os.remove(filename)
    except (IOError, OSError):
        # The file must not exist already, so we can just skip ahead to opening
        pass

    # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
    # race condition happens between the os.remove and this line, that an
    # error will be raised. Because we utilize a lockfile this should only
    # happen if someone is attempting to attack us.
    fd = os.open(filename, flags, fmode)
    try:
        return os.fdopen(fd, "wb")
    except:
        # An error occurred wrapping our FD in a file object
        os.close(fd)
        raise


class FileCache(BaseCache):
    def __init__(self, directory, forever=False, filemode=0o0600,
                 dirmode=0o0700, use_dir_lock=None, lock_class=None):

        if use_dir_lock is not None and lock_class is not None:
            raise ValueError("Cannot use use_dir_lock and lock_class together")

        if use_dir_lock:
            lock_class = MkdirLockFile

        if lock_class is None:
            lock_class = LockFile

        self.directory = directory
        self.forever = forever
        self.filemode = filemode
        self.dirmode = dirmode
        self.lock_class = lock_class


    @staticmethod
    def encode(x):
        return hashlib.sha224(x.encode()).hexdigest()

    def _fn(self, name):
        # NOTE: This method should not change as some may depend on it.
        #       See: https://github.com/ionrock/cachecontrol/issues/63
        hashed = self.encode(name)
        parts = list(hashed[:5]) + [hashed]
        return os.path.join(self.directory, *parts)

    def get(self, key):
        name = self._fn(key)
        if not os.path.exists(name):
            return None

        with open(name, 'rb') as fh:
            return fh.read()

    def set(self, key, value):
        name = self._fn(key)

        # Make sure the directory exists
        try:
            os.makedirs(os.path.dirname(name), self.dirmode)
        except (IOError, OSError):
            pass

        with self.lock_class(name) as lock:
            # Write our actual file
            with _secure_open_write(lock.path, self.filemode) as fh:
                fh.write(value)

    def delete(self, key):
        name = self._fn(key)
        if not self.forever:
            os.remove(name)


def url_to_file_path(url, filecache):
    """Return the file cache path based on the URL.

    This does not ensure the file exists!
    """
    key = CacheController.cache_url(url)
    return filecache._fn(key)
_vendor/urllib3/__pycache__/response.cpython-36.opt-1.pyc000064400000037433151733136330017226 0ustar003

�PfwY�@sddlmZddlmZddlZddlZddlZddlmZ	ddlm
Zddlm
Z
ddlmZmZmZmZmZmZmZdd	lmZmZmZdd
lmZddlmZm Z ddl!m"Z"m#Z#ej$e%�Z&Gd
d�de'�Z(Gdd�de'�Z)dd�Z*Gdd�dej+�Z,dS)�)�absolute_import)�contextmanagerN)�timeout)�error�)�HTTPHeaderDict)�BodyNotHttplibCompatible�
ProtocolError�DecodeError�ReadTimeoutError�ResponseNotChunked�IncompleteRead�
InvalidHeader)�string_types�binary_type�PY3)�http_client)�
HTTPException�BaseSSLError)�is_fp_closed�is_response_to_headc@s$eZdZdd�Zdd�Zdd�ZdS)�DeflateDecodercCsd|_t�|_tj�|_dS)NT)�
_first_tryr�_data�zlib�
decompressobj�_obj)�self�r�/usr/lib/python3.6/response.py�__init__szDeflateDecoder.__init__cCst|j|�S)N)�getattrr)r�namerrr�__getattr__szDeflateDecoder.__getattr__cCs�|s|S|js|jj|�S|j|7_y |jj|�}|rFd|_d|_|Stjk
r�d|_tjtj�|_z|j|j�Sd|_XYnXdS)NF)rr�
decompressrrrr�	MAX_WBITS)r�dataZdecompressedrrrr$ s"zDeflateDecoder.decompressN)�__name__�
__module__�__qualname__r r#r$rrrrrsrc@s$eZdZdd�Zdd�Zdd�ZdS)�GzipDecodercCstjdtj�|_dS)N�)rrr%r)rrrrr 9szGzipDecoder.__init__cCst|j|�S)N)r!r)rr"rrrr#<szGzipDecoder.__getattr__cCs|s|S|jj|�S)N)rr$)rr&rrrr$?szGzipDecoder.decompressN)r'r(r)r r#r$rrrrr*7sr*cCs|dkrt�St�S)N�gzip)r*r)�moderrr�_get_decoderEsr.c@seZdZdZddgZdddddgZdFdd�Zdd�Zdd�Ze	dd��Z
e	dd��Zdd�Zdd�Z
dd�Zdd�Zd d!�Zed"d#��ZdGd$d%�ZdId(d)�Zed*d+��Zd,d-�ZdJd.d/�Zd0d1�Zd2d3�Ze	d4d5��Zd6d7�Zd8d9�Zd:d;�Zd<d=�Zd>d?�Z d@dA�Z!dBdC�Z"dKdDdE�Z#d
S)L�HTTPResponsea	
    HTTP Response container.

    Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
    loaded and decoded on-demand when the ``data`` property is accessed.  This
    class is also compatible with the Python standard library's :mod:`io`
    module, and can hence be treated as a readable object in the context of that
    framework.

    Extra parameters for behaviour not present in httplib.HTTPResponse:

    :param preload_content:
        If True, the response's body will be preloaded during construction.

    :param decode_content:
        If True, attempts to decode specific content-encoding's based on headers
        (like 'gzip' and 'deflate') will be skipped and raw data will be used
        instead.

    :param original_response:
        When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
        object, it's convenient to include the original for debug purposes. It's
        otherwise unused.

    :param retries:
        The retries contains the last :class:`~urllib3.util.retry.Retry` that
        was used during the request.

    :param enforce_content_length:
        Enforce content length checking. Body returned by server must match
        value of Content-Length header, if present. Otherwise, raise error.
    r,Zdeflatei-i.i/i3i4�NrTFcCst|t�r||_n
t|�|_||_||_||_||_||_||_|
|_	d|_
d|_d|_|	|_
d|_|r|t|ttf�r|||_|
|_||_t|d�r�||_d|_d|_|jjdd�j�}dd�|jd�D�}d	|kr�d
|_|j|�|_|r�|jr�|j|d�|_dS)Nr�readFztransfer-encodingr0css|]}|j�VqdS)N)�strip)�.0�encrrr�	<genexpr>�sz(HTTPResponse.__init__.<locals>.<genexpr>�,�chunkedT)�decode_content)�
isinstancer�headers�status�version�reason�strictr8�retries�enforce_content_length�_decoder�_body�_fp�_original_response�_fp_bytes_read�
basestringr�_pool�_connection�hasattrr7�
chunk_left�get�lower�split�_init_length�length_remainingr1)r�bodyr:r;r<r=r>Zpreload_contentr8�original_responseZpool�
connectionr?r@�request_methodZtr_encZ	encodingsrrrr qs<


zHTTPResponse.__init__cCs|j|jkr|jjd�SdS)a
        Should we redirect and where to?

        :returns: Truthy redirect location string if we got a redirect status
            code and valid location. ``None`` if redirect status and no
            location. ``False`` if not a redirect status code.
        �locationF)r;�REDIRECT_STATUSESr:rK)rrrr�get_redirect_location�sz"HTTPResponse.get_redirect_locationcCs,|js|jrdS|jj|j�d|_dS)N)rGrHZ	_put_conn)rrrr�release_conn�szHTTPResponse.release_conncCs"|jr|jS|jr|jdd�SdS)NT)�
cache_content)rBrCr1)rrrrr&�szHTTPResponse.datacCs|jS)N)rH)rrrrrR�szHTTPResponse.connectioncCs|jS)z�
        Obtain the number of bytes pulled over the wire so far. May differ from
        the amount of content returned by :meth:``HTTPResponse.read`` if bytes
        are encoded on the wire (e.g, compressed).
        )rE)rrrr�tell�szHTTPResponse.tellcCs�|jjd�}|dk	r(|jr(tjd�dS|dk	r�y<tdd�|jd�D��}t|�dkrbtd|��|j	�}Wnt
k
r�d}YnX|d	kr�d}yt|j�}Wnt
k
r�d	}YnX|dks�d|ko�d
kns�|dkr�d	}|S)zM
        Set initial length value for Response content if available.
        zcontent-lengthNz�Received response with both Content-Length and Transfer-Encoding set. This is expressly forbidden by RFC 7230 sec 3.3.2. Ignoring Content-Length and attempting to process response as Transfer-Encoding: chunked.cSsg|]}t|��qSr)�int)r3�valrrr�
<listcomp>�sz-HTTPResponse._init_length.<locals>.<listcomp>r6rz8Content-Length contained multiple unmatching values (%s)r���0�d��ZHEAD)r]r^)
r:rKr7�logZwarning�setrM�lenr�pop�
ValueErrorrZr;)rrSZlengthZlengthsr;rrrrN�s,


(zHTTPResponse._init_lengthcCs4|jjdd�j�}|jdkr0||jkr0t|�|_dS)z=
        Set-up the _decoder attribute if necessary.
        zcontent-encodingr0N)r:rKrLrA�CONTENT_DECODERSr.)r�content_encodingrrr�
_init_decoder�szHTTPResponse._init_decodercCs|y|r|jr|jj|�}WnHttjfk
rb}z&|jjdd�j�}td||��WYdd}~XnX|rx|rx||j	�7}|S)zN
        Decode the data passed in and potentially flush the decoder.
        zcontent-encodingr0zEReceived response with content-encoding: %s, but failed to decode it.N)
rAr$�IOErrorrrr:rKrLr
�_flush_decoder)rr&r8�
flush_decoder�ergrrr�_decodes
zHTTPResponse._decodecCs$|jr |jjd�}||jj�SdS)zk
        Flushes the decoder. Should only be called if the decoder is actually
        being used.
        �)rAr$�flush)rZbufrrrrjszHTTPResponse._flush_decoderccs�d}z�y
dVWn�tk
r2t|jdd��Ynptk
rn}z"dt|�krP�t|jdd��WYdd}~Xn4ttfk
r�}ztd||��WYdd}~XnXd}Wd|s�|jr�|jj	�|j
r�|j
j	�|jr�|jj�r�|j�XdS)z�
        Catch low-level python exceptions, instead re-raising urllib3
        variants, so that low-level exceptions are not leaked in the
        high-level api.

        On exit, release the connection back to the pool.
        FNzRead timed out.zread operation timed outzConnection broken: %rT)
�
SocketTimeoutrrGr�strr�SocketErrorr	rD�closerH�isclosedrW)rZ
clean_exitrlrrr�_error_catcher!s(	
 

zHTTPResponse._error_catchercCs�|j�|dkr|j}|jdkr$dSd}d}|j��h|dkrN|jj�}d}nJd}|jj|�}|dkr�|r�|jj�d}|jr�|jdkr�t|j	|j��WdQRX|r�|j	t
|�7_	|jdk	r�|jt
|�8_|j|||�}|r�||_|S)aP
        Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
        parameters: ``decode_content`` and ``cache_content``.

        :param amt:
            How much of the content to read. If specified, caching is skipped
            because it doesn't make sense to cache partial content as the full
            response.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.

        :param cache_content:
            If True, will save the returned data such that the same result is
            returned despite of the state of the underlying file object. This
            is useful if you want the ``.data`` property to continue working
            after having ``.read()`` the file object. (Overridden if ``amt`` is
            set.)
        NFTr)rN)
rhr8rCrur1rsr@rOr
rErcrmrB)r�amtr8rXrkr&rrrr1Zs4




zHTTPResponse.read�r+ccsZ|jr.|j�r.xF|j||d�D]
}|VqWn(x&t|j�sT|j||d�}|r0|Vq0WdS)a_
        A generator wrapper for the read() method. A call will block until
        ``amt`` bytes have been read from the connection or until the
        connection is closed.

        :param amt:
            How much of the content to read. The generator will return up to
            much data per iteration, but may return less. This is particularly
            likely when using compressed data. However, the empty string will
            never be returned.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
        )r8)rvr8N)r7�supports_chunked_reads�read_chunkedrrCr1)rrvr8�liner&rrr�stream�szHTTPResponse.streamc
Ks`|j}t|t�s,tr"t|j��}n
tj|�}t|dd�}|f|||j|j|j	||d�|��}|S)a
        Given an :class:`httplib.HTTPResponse` instance ``r``, return a
        corresponding :class:`urllib3.response.HTTPResponse` object.

        Remaining parameters are passed to the HTTPResponse constructor, along
        with ``original_response=r``.
        r>r)rPr:r;r<r=r>rQ)
�msgr9rr�items�from_httplibr!r;r<r=)ZResponseCls�rZresponse_kwr:r>Zresprrrr~�s	

zHTTPResponse.from_httplibcCs|jS)N)r:)rrrr�
getheaders�szHTTPResponse.getheaderscCs|jj||�S)N)r:rK)rr"�defaultrrr�	getheader�szHTTPResponse.getheadercCs|jS)N)r:)rrrr�info�szHTTPResponse.infocCs$|js|jj�|jr |jj�dS)N)�closedrCrsrH)rrrrrs�s
zHTTPResponse.closecCs@|jdkrdSt|jd�r$|jj�St|jd�r8|jjSdSdS)NTrtr�)rCrIrtr�)rrrrr��s

zHTTPResponse.closedcCs6|jdkrtd��nt|jd�r*|jj�Std��dS)Nz-HTTPResponse has no file to get a fileno from�filenozOThe file-like object this HTTPResponse is wrapped around has no file descriptor)rCrirIr�)rrrrr��s



zHTTPResponse.filenocCs$|jdk	r t|jd�r |jj�SdS)Nro)rCrIro)rrrrro�szHTTPResponse.flushcCsdS)NTr)rrrr�readableszHTTPResponse.readablecCs:|jt|��}t|�dkrdS||dt|��<t|�SdS)Nr)r1rc)r�bZtemprrr�readintos
zHTTPResponse.readintocCst|jd�S)z�
        Checks if the underlying file-like object looks like a
        httplib.HTTPResponse object. We do this by testing for the fp
        attribute. If it is present we assume it returns raw chunks as
        processed by read_chunked().
        �fp)rIrC)rrrrrxsz#HTTPResponse.supports_chunked_readscCsf|jdk	rdS|jjj�}|jdd�d}yt|d�|_Wn&tk
r`|j�tj	|��YnXdS)N�;rrr+)
rJrCr��readlinerMrZrers�httplibr
)rrzrrr�_update_chunk_lengths
z!HTTPResponse._update_chunk_lengthcCs�d}|dkr2|jj|j�}|}|jjd�d|_nv||jkrZ|jj|�}|j||_|}nN||jkr�|jj|�}|jjd�d|_|}n |jj|j�}|jjd�d|_|S)Nrw)rCZ
_safe_readrJ)rrvZreturned_chunk�chunk�valuerrr�
_handle_chunk%s&

zHTTPResponse._handle_chunkccs�|j�|jstd��|j�s&td��|jrDt|j�rD|jj�dS|j���x<|j	�|j
dkrdP|j|�}|j||dd�}|rP|VqPW|r�|j
�}|r�|Vx |jjj�}|s�P|dkr�Pq�W|jr�|jj�WdQRXdS)z�
        Similar to :meth:`HTTPResponse.read`, but with an additional
        parameter: ``decode_content``.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
        zHResponse is not chunked. Header 'transfer-encoding: chunked' is missing.zgBody should be httplib.HTTPResponse like. It should have have an fp attribute which returns raw chunks.NrF)r8rks
)rhr7rrxrrDrrsrur�rJr�rmrjrCr�r�)rrvr8r�Zdecodedrzrrrry;s@	




zHTTPResponse.read_chunked)r0NrrNrTTNNNNFN)NNF�)r�N)N)NN)$r'r(r)�__doc__rfrUr rVrW�propertyr&rRrYrNrhrmrjrrur1r{�classmethodr~r�r�r�rsr�r�ror�r�rxr�r�ryrrrrr/LsB 
-
	0
9
E

			r/)-Z
__future__r�
contextlibrr�ioZloggingZsocketrrprrr�_collectionsr�
exceptionsrr	r
rrr
rZpackages.sixrrFrrZpackages.six.movesrr�rRrrZ
util.responserrZ	getLoggerr'ra�objectrr*r.�IOBaser/rrrr�<module>s"$
!_vendor/urllib3/__pycache__/connection.cpython-36.pyc000064400000021073151733136330016561 0ustar003

�Pf�2�@s�ddlmZddlZddlZddlZddlZddlZddlmZm	Z
ddlZddlm
Z
ddlmZddlmZyddlZejZWn,eefk
r�dZGdd	�d	e�ZYnXyeZWn$ek
r�Gd
d�de�ZYnXddlmZmZmZmZdd
l m!Z!m"Z"ddl#m$Z$m%Z%m&Z&m'Z'm(Z(ddl)m*Z*ddl+m,Z,ej-e.�Z/ddd�Z0ej1ddd�Z2Gdd�de3�Z4Gdd�dee3�ZGdd�de�Z5Gdd�de5�Z6dd�Z7e�r�e5Z8e6Z5ne4Z5dS)�)�absolute_importN)�error�timeout�)�six)�HTTPConnection)�
HTTPExceptionc@seZdZdS)�BaseSSLErrorN)�__name__�
__module__�__qualname__�r
r
� /usr/lib/python3.6/connection.pyr	sr	c@seZdZdS)�ConnectionErrorN)r
rrr
r
r
rrsr)�NewConnectionError�ConnectTimeoutError�SubjectAltNameWarning�SystemTimeWarning)�match_hostname�CertificateError)�resolve_cert_reqs�resolve_ssl_version�assert_fingerprint�create_urllib3_context�ssl_wrap_socket)�
connection)�HTTPHeaderDict�Pi�)�http�httpsi�c@seZdZdZdS)�DummyConnectionz-Used to detect a failed ConnectionCls import.N)r
rr�__doc__r
r
r
rr Asr c@sVeZdZdZedZejejdfgZ	dZ
dd�Zdd�Zd	d
�Z
dd�Zddd�Zd
S)ra{
    Based on httplib.HTTPConnection but provides an extra constructor
    backwards-compatibility layer between older and newer Pythons.

    Additional keyword parameters are used to configure attributes of the connection.
    Accepted parameters include:

      - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
      - ``source_address``: Set the source address for the current connection.

        .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x

      - ``socket_options``: Set specific options on the underlying socket. If not specified, then
        defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
        Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.

        For example, if you wish to enable TCP Keep Alive in addition to the defaults,
        you might pass::

            HTTPConnection.default_socket_options + [
                (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
            ]

        Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
    rrFcOsZtjr|jdd�|jd�|_tjdkr4|jdd�|jd|j�|_t	j
|f|�|�dS)N�strict�source_address���socket_options)r$r%)rZPY3�pop�getr#�sys�version_info�default_socket_optionsr&�_HTTPConnection�__init__)�self�args�kwr
r
rr-js
zHTTPConnection.__init__cCs�i}|jr|j|d<|jr$|j|d<ytj|j|jf|jf|�}Wnftk
rz}zt|d|j|jf��WYdd}~Xn0t	k
r�}zt
|d|��WYdd}~XnX|S)zp Establish a socket connection and set nodelay settings on it.

        :return: New socket connection.
        r#r&z0Connection to %s timed out. (connect timeout=%s)Nz(Failed to establish a new connection: %s)r#r&rZcreate_connection�host�portr�
SocketTimeoutr�SocketErrorr)r.Zextra_kw�conn�er
r
r�	_new_conns 

 zHTTPConnection._new_conncCs$||_t|dd�r |j�d|_dS)N�_tunnel_hostr)�sock�getattr�_tunnel�	auto_open)r.r5r
r
r�
_prepare_conn�szHTTPConnection._prepare_conncCs|j�}|j|�dS)N)r7r=)r.r5r
r
r�connect�szHTTPConnection.connectNcCst|dk	r|ni�}d|k}d|k}|j||||d�x |j�D]\}}|j||�q@Wd|krl|jdd�|j�|dk	�rtjtjf}	t||	�r�|f}xj|D]b}
|
s�q�t|
tj�s�|
j	d�}
t
t|
��d	d�}|j|j	d
��|jd�|j|
�|jd�q�W|jd�dS)
z�
        Alternative to the common request method, which sends the
        body with chunked encoding and not as one block
        Nzaccept-encodingr1)�skip_accept_encoding�	skip_hostztransfer-encodingzTransfer-EncodingZchunked�utf8r$zutf-8s
s0

)
rZ
putrequest�itemsZ	putheaderZ
endheadersrZstring_typesZbinary_type�
isinstance�encode�hex�len�send)r.�methodZurlZbodyZheadersr?r@�header�valueZstringish_types�chunkZlen_strr
r
r�request_chunked�s8





zHTTPConnection.request_chunked)NN)r
rrr!�port_by_scheme�default_port�socketZIPPROTO_TCPZTCP_NODELAYr+�is_verifiedr-r7r=r>rLr
r
r
rrFsrc@s8eZdZedZdZddddejdfdd�Zdd�Z	dS)�HTTPSConnectionrNc	Ks8tj|||f||d�|��||_||_||_d|_dS)N)r"rr)rr-�key_file�	cert_file�ssl_contextZ	_protocol)	r.r1r2rRrSr"rrTr0r
r
rr-�szHTTPSConnection.__init__cCsN|j�}|j|�|jdkr2ttd�td�d�|_t||j|j|jd�|_	dS)N)�ssl_version�	cert_reqs)r9�keyfile�certfilerT)
r7r=rTrrrrrRrSr9)r.r5r
r
rr>�s

zHTTPSConnection.connect)
r
rrrMrNrUrOZ_GLOBAL_DEFAULT_TIMEOUTr-r>r
r
r
rrQ�s
rQc@s6eZdZdZdZdZdZdZdZddd�Z	dd�Z
dS)�VerifiedHTTPSConnectionz[
    Based on httplib.HTTPSConnection but wraps the socket with
    SSL certification.
    NcCsn|dkr(|s|rd}n|jdk	r(|jj}||_||_||_||_||_|oTtjj	|�|_
|oftjj	|�|_dS)zX
        This method should only be called once, before the connection is used.
        N�
CERT_REQUIRED)rT�verify_moderRrSrV�assert_hostnamer�os�path�
expanduser�ca_certs�ca_cert_dir)r.rRrSrVr`r\rrar
r
r�set_certs
z VerifiedHTTPSConnection.set_certc	CsL|j�}|j}t|dd�r4||_|j�d|_|j}tjj	�t
k}|rXtjdj
t
�t�|jdkr|tt|j�t|j�d�|_|j}t|j�|_t||j|j|j|j||d�|_|jr�t|jjdd�|j�nb|jtjko�t|dd	�o�|jd	k	�r.|jj�}|j d
f��stjdj
|�t!�t"||j�p*|�|jtj#k�pD|jdk	|_$dS)Nr8rzWSystem time is way off (before {0}). This will probably lead to SSL verification errors)rUrV)r9rWrXr`raZserver_hostnamerTT)Zbinary_formZcheck_hostnameFZsubjectAltNamez�Certificate for {0} has no `subjectAltName`, falling back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 for details.))%r7r1r:r9r;r<r8�datetime�dateZtoday�RECENT_DATE�warnings�warn�formatrrTrrrUrrVr[rrRrSr`rarZgetpeercert�sslZ	CERT_NONEr\r(r�_match_hostnamerZrP)r.r5ZhostnameZis_time_off�context�certr
r
rr>sT



zVerifiedHTTPSConnection.connect)NNNNNNN)r
rrr!rVr`rarUrrbr>r
r
r
rrY�s
rYcCsLyt||�Wn8tk
rF}ztjd||�||_�WYdd}~XnXdS)Nz@Certificate did not match expected hostname: %s. Certificate: %s)rr�logrZ
_peer_cert)rlZasserted_hostnamer6r
r
rrjbsrj)9Z
__future__rrcZloggingr]r)rOrr4rr3rfZpackagesrZpackages.six.moves.http_clientrr,rriZSSLErrorr	�ImportError�AttributeError�
BaseExceptionr�	NameError�	Exception�
exceptionsrrrrZpackages.ssl_match_hostnamerrZ	util.ssl_rrrrr�utilr�_collectionsrZ	getLoggerr
rmrMrdre�objectr rQrYrjZUnverifiedHTTPSConnectionr
r
r
r�<module>sN
	
&l_vendor/urllib3/__pycache__/fields.cpython-36.opt-1.pyc000064400000013240151733136330016624 0ustar003

�Pf7�@sNddlmZddlZddlZddlmZddd�Zdd	�ZGd
d�de	�Z
dS)
�)�absolute_importN�)�six�application/octet-streamcCs|rtj|�dp|S|S)z�
    Guess the "Content-Type" of a file.

    :param filename:
        The filename to guess the "Content-Type" of using :mod:`mimetypes`.
    :param default:
        If no "Content-Type" can be guessed, default to `default`.
    r)�	mimetypesZ
guess_type)�filename�default�r	�/usr/lib/python3.6/fields.py�guess_content_types	rcs�t�fdd�dD��sNd|�f}y|jd�Wnttfk
rHYnX|Stjrlt�tj�rl�jd��tj	j
�d��d|�f��S)a�
    Helper function to format and quote a single header parameter.

    Particularly useful for header parameters which might contain
    non-ASCII values, like file names. This follows RFC 2231, as
    suggested by RFC 2388 Section 4.4.

    :param name:
        The name of the parameter, a string expected to be ASCII only.
    :param value:
        The value of the parameter, provided as a unicode string.
    c3s|]}|�kVqdS)Nr	)�.0Zch)�valuer	r
�	<genexpr>#sz&format_header_param.<locals>.<genexpr>z"\
z%s="%s"�asciizutf-8z%s*=%s)�any�encode�UnicodeEncodeError�UnicodeDecodeErrorrZPY3�
isinstanceZ	text_type�emailZutilsZencode_rfc2231)�namer
�resultr	)r
r
�format_header_params

rc@sHeZdZdZddd�Zedd��Zdd�Zd	d
�Zdd�Z	dd
d�Z
dS)�RequestFieldaK
    A data container for request body parameters.

    :param name:
        The name of this request field.
    :param data:
        The data/value body.
    :param filename:
        An optional filename of the request field.
    :param headers:
        An optional dict-like object of headers to initially use for the field.
    NcCs*||_||_||_i|_|r&t|�|_dS)N)�_name�	_filename�data�headers�dict)�selfrrrrr	r	r
�__init__?szRequestField.__init__cCs^t|t�r4t|�dkr"|\}}}q@|\}}t|�}nd}d}|}||||d�}|j|d�|S)a�
        A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.

        Supports constructing :class:`~urllib3.fields.RequestField` from
        parameter of key/value strings AND key/filetuple. A filetuple is a
        (filename, data, MIME type) tuple where the MIME type is optional.
        For example::

            'foo': 'bar',
            'fakefile': ('foofile.txt', 'contents of foofile'),
            'realfile': ('barfile.txt', open('realfile').read()),
            'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
            'nonamefile': 'contents of nonamefile field',

        Field names and filenames must be unicode.
        �N)r)�content_type)r�tuple�lenr�make_multipart)�clsZ	fieldnamer
rrr"Z
request_paramr	r	r
�from_tuplesGs

zRequestField.from_tuplescCs
t||�S)a
        Overridable helper function to format a single header parameter.

        :param name:
            The name of the parameter, a string expected to be ASCII only.
        :param value:
            The value of the parameter, provided as a unicode string.
        )r)rrr
r	r	r
�_render_partis	zRequestField._render_partcCsPg}|}t|t�r|j�}x*|D]"\}}|dk	r |j|j||��q Wdj|�S)aO
        Helper function to format and quote a single header.

        Useful for single headers that are composed of multiple items. E.g.,
        'Content-Disposition' fields.

        :param header_parts:
            A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
            as `k1="v1"; k2="v2"; ...`.
        Nz; )rr�items�appendr(�join)rZheader_parts�parts�iterablerr
r	r	r
�
_render_partsts
zRequestField._render_partscCs�g}dddg}x2|D]*}|jj|d�r|jd||j|f�qWx4|jj�D]&\}}||krN|rN|jd||f�qNW|jd�dj|�S)z=
        Renders the headers for this request field.
        zContent-DispositionzContent-TypezContent-LocationFz%s: %sz
)r�getr*r)r+)r�linesZ	sort_keysZsort_keyZheader_nameZheader_valuer	r	r
�render_headers�s


zRequestField.render_headersc	CsX|pd|jd<|jddjd|jd|jfd|jff�g�7<||jd<||jd<d	S)
a|
        Makes this request field into a multipart request field.

        This method overrides "Content-Disposition", "Content-Type" and
        "Content-Location" headers to the request parameter.

        :param content_type:
            The 'Content-Type' of the request body.
        :param content_location:
            The 'Content-Location' of the request body.

        z	form-datazContent-Dispositionz; �rrzContent-TypezContent-LocationN)rr+r.rr)rZcontent_dispositionr"Zcontent_locationr	r	r
r%�s
zRequestField.make_multipart)NN)NNN)�__name__�
__module__�__qualname__�__doc__r �classmethodr'r(r.r1r%r	r	r	r
r2s
"r)r)Z
__future__rZemail.utilsrrZpackagesrrr�objectrr	r	r	r
�<module>s
_vendor/urllib3/__pycache__/exceptions.cpython-36.pyc000064400000024121151733136330016600 0ustar003

�Pf��@sLddlmZddlmZGdd�de�ZGdd�de�ZGdd	�d	e�Z	Gd
d�de	�Z
Gdd
�d
e�ZGdd�de�ZGdd�de�Z
Gdd�de�ZeZGdd�de
�ZGdd�de
�ZGdd�de�ZGdd�de�ZGdd�dee
�ZGdd�de�ZGd d!�d!ee	�ZGd"d#�d#e	�ZGd$d%�d%e	�ZGd&d'�d'ee�ZGd(d)�d)e�ZGd*d+�d+e�ZGd,d-�d-e�ZGd.d/�d/e�ZGd0d1�d1e�ZGd2d3�d3e�Z Gd4d5�d5e�Z!Gd6d7�d7e�Z"Gd8d9�d9e�Z#Gd:d;�d;ee�Z$Gd<d=�d=e�Z%Gd>d?�d?ee�ZGd@dA�dAe�Z&GdBdC�dCe'e�Z(GdDdE�dEe�Z)GdFdG�dGe�Z*dHS)I�)�absolute_import�)�IncompleteReadc@seZdZdZdS)�	HTTPErrorz#Base exception used by this module.N)�__name__�
__module__�__qualname__�__doc__�r
r
� /usr/lib/python3.6/exceptions.pyrsrc@seZdZdZdS)�HTTPWarningz!Base warning used by this module.N)rrrr	r
r
r
rr
src@s eZdZdZdd�Zdd�ZdS)�	PoolErrorz/Base exception for errors caused within a pool.cCs||_tj|d||f�dS)Nz%s: %s)�poolr�__init__)�selfr�messager
r
rrszPoolError.__init__cCs
|jdfS)N)NN)�	__class__)rr
r
r�
__reduce__szPoolError.__reduce__N)rrrr	rrr
r
r
rr
sr
c@s eZdZdZdd�Zdd�ZdS)�RequestErrorz8Base exception for PoolErrors that have associated URLs.cCs||_tj|||�dS)N)�urlr
r)rrrrr
r
rrszRequestError.__init__cCs|jd|jdffS)N)rr)rr
r
rr#szRequestError.__reduce__N)rrrr	rrr
r
r
rrsrc@seZdZdZdS)�SSLErrorz9Raised when SSL certificate fails in an HTTPS connection.N)rrrr	r
r
r
rr(src@seZdZdZdS)�
ProxyErrorz,Raised when the connection to a proxy fails.N)rrrr	r
r
r
rr-src@seZdZdZdS)�DecodeErrorz;Raised when automatic decoding based on Content-Type fails.N)rrrr	r
r
r
rr2src@seZdZdZdS)�
ProtocolErrorz>Raised when something unexpected happens mid-request/response.N)rrrr	r
r
r
rr7src@seZdZdZddd�ZdS)�
MaxRetryErroraRaised when the maximum number of retries is exceeded.

    :param pool: The connection pool
    :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
    :param string url: The requested Url
    :param exceptions.Exception reason: The underlying error

    NcCs&||_d||f}tj||||�dS)Nz0Max retries exceeded with url: %s (Caused by %r))�reasonrr)rrrrrr
r
rrLs
zMaxRetryError.__init__)N)rrrr	rr
r
r
rrBsrc@seZdZdZddd�ZdS)�HostChangedErrorz?Raised when an existing pool gets a request for a foreign host.�cCs"d|}tj||||�||_dS)Nz)Tried to open a foreign host with url: %s)rr�retries)rrrrrr
r
rrXszHostChangedError.__init__N)r)rrrr	rr
r
r
rrUsrc@seZdZdZdS)�TimeoutStateErrorz3 Raised when passing an invalid state to a timeout N)rrrr	r
r
r
rr^src@seZdZdZdS)�TimeoutErrorz� Raised when a socket timeout error occurs.

    Catching this error will catch both :exc:`ReadTimeoutErrors
    <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
    N)rrrr	r
r
r
rr csr c@seZdZdZdS)�ReadTimeoutErrorzFRaised when a socket timeout occurs while receiving data from a serverN)rrrr	r
r
r
rr!lsr!c@seZdZdZdS)�ConnectTimeoutErrorz@Raised when a socket timeout occurs while connecting to a serverN)rrrr	r
r
r
rr"ssr"c@seZdZdZdS)�NewConnectionErrorzHRaised when we fail to establish a new connection. Usually ECONNREFUSED.N)rrrr	r
r
r
rr#xsr#c@seZdZdZdS)�EmptyPoolErrorzCRaised when a pool runs out of connections and no more are allowed.N)rrrr	r
r
r
rr$}sr$c@seZdZdZdS)�ClosedPoolErrorzCRaised when a request enters a pool after the pool has been closed.N)rrrr	r
r
r
rr%�sr%c@seZdZdZdS)�LocationValueErrorz<Raised when there is something wrong with a given URL input.N)rrrr	r
r
r
rr&�sr&c@seZdZdZdd�ZdS)�LocationParseErrorz=Raised when get_host or similar fails to parse the URL input.cCsd|}tj||�||_dS)NzFailed to parse: %s)rr�location)rr(rr
r
rr�szLocationParseError.__init__N)rrrr	rr
r
r
rr'�sr'c@seZdZdZdZdZdS)�
ResponseErrorzDUsed as a container for an error reason supplied in a MaxRetryError.ztoo many error responsesz&too many {status_code} error responsesN)rrrr	Z
GENERIC_ERRORZSPECIFIC_ERRORr
r
r
rr)�sr)c@seZdZdZdS)�SecurityWarningz/Warned when perfoming security reducing actionsN)rrrr	r
r
r
rr*�sr*c@seZdZdZdS)�SubjectAltNameWarningzBWarned when connecting to a host with a certificate missing a SAN.N)rrrr	r
r
r
rr+�sr+c@seZdZdZdS)�InsecureRequestWarningz/Warned when making an unverified HTTPS request.N)rrrr	r
r
r
rr,�sr,c@seZdZdZdS)�SystemTimeWarningz0Warned when system time is suspected to be wrongN)rrrr	r
r
r
rr-�sr-c@seZdZdZdS)�InsecurePlatformWarningzEWarned when certain SSL configuration is not available on a platform.N)rrrr	r
r
r
rr.�sr.c@seZdZdZdS)�SNIMissingWarningz9Warned when making a HTTPS request without SNI available.N)rrrr	r
r
r
rr/�sr/c@seZdZdZdS)�DependencyWarningzc
    Warned when an attempt is made to import a module with missing optional
    dependencies.
    N)rrrr	r
r
r
rr0�sr0c@seZdZdZdS)�ResponseNotChunkedz;Response needs to be chunked in order to read it as chunks.N)rrrr	r
r
r
rr1�sr1c@seZdZdZdS)�BodyNotHttplibCompatiblezz
    Body should be httplib.HTTPResponse like (have an fp attribute which
    returns raw chunks) for read_chunked().
    N)rrrr	r
r
r
rr2�sr2cs(eZdZdZ�fdd�Zdd�Z�ZS)rz�
    Response length doesn't match expected Content-Length

    Subclass of http_client.IncompleteRead to allow int value
    for `partial` to avoid creating large objects on streamed
    reads.
    cstt|�j||�dS)N)�superrr)r�partial�expected)rr
rr�szIncompleteRead.__init__cCsd|j|jfS)Nz/IncompleteRead(%i bytes read, %i more expected))r4r5)rr
r
r�__repr__�szIncompleteRead.__repr__)rrrr	rr6�
__classcell__r
r
)rrr�src@seZdZdZdS)�
InvalidHeaderz(The header provided was somehow invalid.N)rrrr	r
r
r
rr8�sr8cs eZdZdZ�fdd�Z�ZS)�ProxySchemeUnknownz1ProxyManager does not support the supplied schemecsd|}tt|�j|�dS)NzNot supported proxy scheme %s)r3r9r)r�schemer)rr
rr�szProxySchemeUnknown.__init__)rrrr	rr7r
r
)rrr9�sr9cs eZdZdZ�fdd�Z�ZS)�HeaderParsingErrorzNRaised by assert_header_parsing, but we convert it to a log.warning statement.cs$d|pd|f}tt|�j|�dS)Nz%s, unparsed data: %rZUnknown)r3r;r)rZdefectsZ
unparsed_datar)rr
rr�szHeaderParsingError.__init__)rrrr	rr7r
r
)rrr;�sr;c@seZdZdZdS)�UnrewindableBodyErrorz9urllib3 encountered an error when trying to rewind a bodyN)rrrr	r
r
r
rr<�sr<N)+Z
__future__rZpackages.six.moves.http_clientrZhttplib_IncompleteRead�	Exceptionr�Warningrr
rrrrr�ConnectionErrorrrrr r!r"r#r$r%�
ValueErrorr&r'r)r*r+r,r-r.r/r0r1r2r8�AssertionErrorr9r;r<r
r
r
r�<module>sH		
	_vendor/urllib3/__pycache__/poolmanager.cpython-36.opt-1.pyc000064400000031074151733136330017667 0ustar003

�Pf�A�@sddlmZddlZddlZddlZddlmZddlmZm	Z	ddlm
Z
ddlmZm
Z
mZddlmZdd	lmZdd
lmZddlmZdd
dgZeje�Zd4Zd5Zejd-e�Zd.d/�Zej ee�ej ee�d0�Z!ee	d0�Z"Gd1d�de�Z#Gd2d
�d
e#�Z$d3d�Z%dS)6�)�absolute_importN�)�RecentlyUsedContainer)�HTTPConnectionPool�HTTPSConnectionPool)�port_by_scheme)�LocationValueError�
MaxRetryError�ProxySchemeUnknown)�urljoin)�RequestMethods)�	parse_url)�Retry�PoolManager�ProxyManager�proxy_from_url�key_file�	cert_file�	cert_reqs�ca_certs�ssl_version�ca_cert_dir�ssl_context�
key_scheme�key_host�key_port�key_timeout�key_retries�
key_strict�	key_block�key_source_address�key_key_file�
key_cert_file�
key_cert_reqs�key_ca_certs�key_ssl_version�key_ca_cert_dir�key_ssl_context�key_maxsize�key_headers�
key__proxy�key__proxy_headers�key_socket_options�key__socks_options�key_assert_hostname�key_assert_fingerprint�PoolKeycCs�|j�}|dj�|d<|dj�|d<x4d	D],}||kr.||dk	r.t||j��||<q.W|jd�}|dk	r|t|�|d<x&t|j��D]}|j|�|d|<q�Wx|j	D]}||kr�d||<q�W|f|�S)
a�
    Create a pool key out of a request context dictionary.

    According to RFC 3986, both the scheme and host are case-insensitive.
    Therefore, this function normalizes both before constructing the pool
    key for an HTTPS request. If you wish to change this behaviour, provide
    alternate callables to ``key_fn_by_scheme``.

    :param key_class:
        The class to use when constructing the key. This should be a namedtuple
        with the ``scheme`` and ``host`` keys at a minimum.
    :type  key_class: namedtuple
    :param request_context:
        A dictionary-like object that contain the context for a request.
    :type  request_context: dict

    :return: A namedtuple that can be used as a connection pool key.
    :rtype:  PoolKey
    �scheme�host�headers�_proxy_headers�_socks_optionsNZsocket_optionsZkey_)r3r4r5)
�copy�lower�	frozenset�items�get�tuple�list�keys�pop�_fields)Z	key_class�request_context�context�keyZsocket_optsZfield�rC�!/usr/lib/python3.6/poolmanager.py�_default_key_normalizer9s

rE)�http�httpsc@sxeZdZdZdZddd�Zdd�Zdd	�Zdd
d�Zdd
�Z	ddd�Z
dd�Zddd�Zd dd�Z
dd�Zd!dd�ZdS)"ra$
    Allows for arbitrary requests while transparently keeping track of
    necessary connection pools for you.

    :param num_pools:
        Number of connection pools to cache before discarding the least
        recently used pool.

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.

    :param \**connection_pool_kw:
        Additional parameters are used to create fresh
        :class:`urllib3.connectionpool.ConnectionPool` instances.

    Example::

        >>> manager = PoolManager(num_pools=2)
        >>> r = manager.request('GET', 'http://google.com/')
        >>> r = manager.request('GET', 'http://google.com/mail')
        >>> r = manager.request('GET', 'http://yahoo.com/')
        >>> len(manager.pools)
        2

    N�
cKs8tj||�||_t|dd�d�|_t|_tj�|_dS)NcSs|j�S)N)�close)�prCrCrD�<lambda>�sz&PoolManager.__init__.<locals>.<lambda>)Zdispose_func)r�__init__�connection_pool_kwr�pools�pool_classes_by_scheme�key_fn_by_schemer6)�self�	num_poolsr3rMrCrCrDrL�szPoolManager.__init__cCs|S)NrC)rQrCrCrD�	__enter__�szPoolManager.__enter__cCs|j�dS)NF)�clear)rQ�exc_typeZexc_valZexc_tbrCrCrD�__exit__�szPoolManager.__exit__cCsf|j|}|dkr|jj�}xdD]}|j|d�q"W|dkrXxtD]}|j|d�qDW|||f|�S)a�
        Create a new :class:`ConnectionPool` based on host, port, scheme, and
        any additional pool keyword arguments.

        If ``request_context`` is provided, it is provided as keyword arguments
        to the pool class used. This method is used to actually create the
        connection pools handed out by :meth:`connection_from_url` and
        companion methods. It is intended to be overridden for customization.
        Nr1r2�portrF)r1r2rW)rOrMr6r>�SSL_KEYWORDS)rQr1r2rWr@Zpool_clsrB�kwrCrCrD�	_new_pool�s




zPoolManager._new_poolcCs|jj�dS)z�
        Empty our store of pools and direct them all to close.

        This will not affect in-flight connections, but they will not be
        re-used after completion.
        N)rNrT)rQrCrCrDrT�szPoolManager.clearrFcCsT|std��|j|�}|pd|d<|s:tj|dj�d�}||d<||d<|j|�S)a�
        Get a :class:`ConnectionPool` based on the host, port, and scheme.

        If ``port`` isn't given, it will be derived from the ``scheme`` using
        ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
        provided, it is merged with the instance's ``connection_pool_kw``
        variable and used to create the new connection pool, if one is
        needed.
        zNo host specified.rFr1�PrWr2)r�_merge_pool_kwargsrr:r7�connection_from_context)rQr2rWr1�pool_kwargsr@rCrCrD�connection_from_host�s
z PoolManager.connection_from_hostcCs,|dj�}|j|}||�}|j||d�S)z�
        Get a :class:`ConnectionPool` based on the request context.

        ``request_context`` must at least contain the ``scheme`` key and its
        value must be a key in ``key_fn_by_scheme`` instance variable.
        r1)r@)r7rP�connection_from_pool_key)rQr@r1Zpool_key_constructor�pool_keyrCrCrDr]�s
z#PoolManager.connection_from_contextc
Cs`|jj�N|jj|�}|r|S|d}|d}|d}|j||||d�}||j|<WdQRX|S)z�
        Get a :class:`ConnectionPool` based on the provided pool key.

        ``pool_key`` should be a namedtuple that only contains immutable
        objects. At a minimum it must have the ``scheme``, ``host``, and
        ``port`` fields.
        r1r2rW)r@N)rN�lockr:rZ)rQrar@Zpoolr1r2rWrCrCrDr`�s
z$PoolManager.connection_from_pool_keycCs t|�}|j|j|j|j|d�S)a�
        Similar to :func:`urllib3.connectionpool.connection_from_url`.

        If ``pool_kwargs`` is not provided and a new pool needs to be
        constructed, ``self.connection_pool_kw`` is used to initialize
        the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
        is provided, it is used instead. Note that if a new pool does not
        need to be created for the request, the provided ``pool_kwargs`` are
        not used.
        )rWr1r^)r
r_r2rWr1)rQ�urlr^�urCrCrD�connection_from_url
szPoolManager.connection_from_urlcCsZ|jj�}|rVxF|j�D]:\}}|dkrJy
||=WqRtk
rFYqRXq|||<qW|S)a
        Merge a dictionary of override values for self.connection_pool_kw.

        This does not modify self.connection_pool_kw and returns a new dict.
        Any keys in the override dictionary with a value of ``None`` are
        removed from the merged dictionary.
        N)rMr6r9�KeyError)rQ�overrideZbase_pool_kwargsrB�valuerCrCrDr\s

zPoolManager._merge_pool_kwargsTcKsdt|�}|j|j|j|jd�}d|d<d|d<d|krD|jj�|d<|jdk	rj|jdkrj|j||f|�}n|j||j	f|�}|o�|j
�}|s�|St||�}|jdkr�d	}|j
d
�}	t|	t�s�tj|	|d�}	|	jo�|j|��r�x|	jD]}
|dj|
d�q�Wy|	j||||d�}	Wn tk
�r4|	j�r0�|SX|	|d
<||d<tjd
||�|j||f|�S)a]
        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
        with custom cross-host redirect logic and only sends the request-uri
        portion of the ``url``.

        The given ``url`` parameter must be absolute, such that an appropriate
        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
        )rWr1FZassert_same_host�redirectr3NrFi/ZGET�retries)ri)�responseZ_poolzRedirecting %s -> %s)r
r_r2rWr1r3r6�proxy�urlopenZrequest_uriZget_redirect_locationrZstatusr:�
isinstancerZfrom_intZremove_headers_on_redirectZis_same_hostr>Z	incrementr	Zraise_on_redirect�log�info)rQ�methodrcrirYrdZconnrkZredirect_locationrj�headerrCrCrDrm-s@	



zPoolManager.urlopen)rHN)N)NrFN)N)N)T)�__name__�
__module__�__qualname__�__doc__rlrLrSrVrZrTr_r]r`rer\rmrCrCrCrDrys

	


csHeZdZdZd�fdd�	Zd�fdd�	Zdd	d
�Zd�fdd
�	Z�ZS)raw
    Behaves just like :class:`PoolManager`, but sends all requests through
    the defined proxy, using the CONNECT method for HTTPS URLs.

    :param proxy_url:
        The URL of the proxy to be used.

    :param proxy_headers:
        A dictionary contaning headers that will be sent to the proxy. In case
        of HTTP they are being sent with each request, while in the
        HTTPS/CONNECT case they are sent only once. Could be used for proxy
        authentication.

    Example:
        >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
        >>> r1 = proxy.request('GET', 'http://google.com/')
        >>> r2 = proxy.request('GET', 'http://httpbin.org/')
        >>> len(proxy.pools)
        1
        >>> r3 = proxy.request('GET', 'https://httpbin.org/')
        >>> r4 = proxy.request('GET', 'https://twitter.com/')
        >>> len(proxy.pools)
        3

    rHNcs�t|t�rd|j|j|jf}t|�}|jsFtj|jd�}|j|d�}|jdkrZt	|j��||_
|pfi|_|j
|d<|j|d<tt
|�j||f|�dS)	Nz
%s://%s:%ir[)rWrFrG�_proxyr4)rFrG)rnrr1r2rWr
rr:�_replacer
rl�
proxy_headers�superrrL)rQ�	proxy_urlrRr3ryrMrlrW)�	__class__rCrDrL�s








zProxyManager.__init__rFcsD|dkr tt|�j||||d�Stt|�j|jj|jj|jj|d�S)NrG)r^)rzrr_rlr2rWr1)rQr2rWr1r^)r|rCrDr_�s


z!ProxyManager.connection_from_hostcCs0ddi}t|�j}|r||d<|r,|j|�|S)z�
        Sets headers needed by proxies: specifically, the Accept and Host
        headers. Only sets headers not provided by the user.
        ZAcceptz*/*ZHost)r
�netloc�update)rQrcr3Zheaders_r}rCrCrD�_set_proxy_headers�s

zProxyManager._set_proxy_headersTcsNt|�}|jdkr0|jd|j�}|j||�|d<tt|�j||fd|i|��S)z@Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute.rFr3ri)r
r1r:r3rrzrrm)rQrqrcrirYrdr3)r|rCrDrm�s

zProxyManager.urlopen)rHNN)NrFN)N)T)	rsrtrurvrLr_rrm�
__classcell__rCrC)r|rDris
cKstfd|i|��S)Nr{)r)rcrYrCrCrDr�s)rrrrrrr)rrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/)&Z
__future__r�collections�	functoolsZlogging�_collectionsrZconnectionpoolrrr�
exceptionsrr	r
Zpackages.six.moves.urllib.parserZrequestrZutil.urlr
Z
util.retryr�__all__Z	getLoggerrsrorXZ_key_fields�
namedtupler0rE�partialrPrOrrrrCrCrCrD�<module>s`

6
qW_vendor/urllib3/__pycache__/request.cpython-36.opt-1.pyc000064400000012556151733136330017057 0ustar003

�Pf:�@s>ddlmZddlmZddlmZdgZGdd�de�ZdS)�)�absolute_import�)�encode_multipart_formdata)�	urlencode�RequestMethodsc@sReZdZdZeddddg�Zddd�Zdd
d�Zddd
�Zddd�Z	ddd�Z
dS)ra�
    Convenience mixin for classes who implement a :meth:`urlopen` method, such
    as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
    :class:`~urllib3.poolmanager.PoolManager`.

    Provides behavior for making common types of HTTP request methods and
    decides which type of request field encoding to use.

    Specifically,

    :meth:`.request_encode_url` is for sending requests whose fields are
    encoded in the URL (such as GET, HEAD, DELETE).

    :meth:`.request_encode_body` is for sending requests whose fields are
    encoded in the *body* of the request using multipart or www-form-urlencoded
    (such as for POST, PUT, PATCH).

    :meth:`.request` is for making any kind of request, it will look up the
    appropriate encoding format and use one of the above two methods to make
    the request.

    Initializer parameters:

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.
    ZDELETEZGETZHEADZOPTIONSNcCs|pi|_dS)N)�headers)�selfr�r	�/usr/lib/python3.6/request.py�__init__)szRequestMethods.__init__TcKstd��dS)NzMClasses extending RequestMethods must implement their own ``urlopen`` method.)�NotImplemented)r�method�url�bodyr�encode_multipart�multipart_boundary�kwr	r	r
�urlopen,szRequestMethods.urlopencKsJ|j�}||jkr,|j||f||d�|��S|j||f||d�|��SdS)a�
        Make a request using :meth:`urlopen` with the appropriate encoding of
        ``fields`` based on the ``method`` used.

        This is a convenience method that requires the least amount of manual
        effort. It can be used in most situations, while still having the
        option to drop down to more specific methods when necessary, such as
        :meth:`request_encode_url`, :meth:`request_encode_body`,
        or even the lowest level :meth:`urlopen`.
        )�fieldsrN)�upper�_encode_url_methods�request_encode_url�request_encode_body)rr
rrr�
urlopen_kwr	r	r
�request2s
zRequestMethods.requestcKsD|dkr|j}d|i}|j|�|r4|dt|�7}|j||f|�S)z�
        Make a request using :meth:`urlopen` with the ``fields`` encoded in
        the url. This is useful for request methods like GET, HEAD, DELETE, etc.
        Nr�?)r�updaterr)rr
rrrr�extra_kwr	r	r
rHs
z!RequestMethods.request_encode_urlcKs�|dkr|j}dii}|rbd|kr*td��|r@t||d�\}	}
nt|�d}	}
|	|d<d|
i|d<|dj|�|j|�|j||f|�S)a�
        Make a request using :meth:`urlopen` with the ``fields`` encoded in
        the body. This is useful for request methods like POST, PUT, PATCH, etc.

        When ``encode_multipart=True`` (default), then
        :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
        the payload with the appropriate content type. Otherwise
        :meth:`urllib.urlencode` is used with the
        'application/x-www-form-urlencoded' content type.

        Multipart encoding must be used when posting files, and it's reasonably
        safe to use it in other times too. However, it may break request
        signing, such as with OAuth.

        Supports an optional ``fields`` parameter of key/value strings AND
        key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
        the MIME type is optional. For example::

            fields = {
                'foo': 'bar',
                'fakefile': ('foofile.txt', 'contents of foofile'),
                'realfile': ('barfile.txt', open('realfile').read()),
                'typedfile': ('bazfile.bin', open('bazfile').read(),
                              'image/jpeg'),
                'nonamefile': 'contents of nonamefile field',
            }

        When uploading a file, providing a filename (the first parameter of the
        tuple) is optional but recommended to best mimick behavior of browsers.

        Note that if ``headers`` are supplied, the 'Content-Type' header will
        be overwritten because it depends on the dynamic random boundary string
        which is used to compose the body of the request. The random boundary
        string can be explicitly set with the ``multipart_boundary`` parameter.
        NrrzFrequest got values for both 'fields' and 'body', can only specify one.)�boundaryz!application/x-www-form-urlencodedzContent-Type)r�	TypeErrorrrrr)rr
rrrrrrrrZcontent_typer	r	r
rYs&
z"RequestMethods.request_encode_body)N)NNTN)NN)NN)NNTN)�__name__�
__module__�__qualname__�__doc__�setrrrrrrr	r	r	r
r
s



N)	Z
__future__rZfilepostrZpackages.six.moves.urllib.parser�__all__�objectrr	r	r	r
�<module>s_vendor/urllib3/__pycache__/__init__.cpython-36.pyc000064400000004576151733136330016172 0ustar003

�Pf%�@s`dZddlmZddlZddlmZmZmZddlm	Z	ddl
mZddlm
Z
mZmZdd	lmZdd
lmZddlmZddlmZdd
lmZddlZyddlmZWn&ek
r�Gdd�dej�ZYnXdZdZdZ d(Z!ej"e#�j$e��ej%fd"d�Z&[ej'd#e	j(d$d%�ej'd&e	j)d$d%�ej'd&e	j*d$d%�ej'd&e	j+d$d%�e	j,fd'd�Z-dS))z8
urllib3 - Thread-safe connection pooling and re-using.
�)�absolute_importN�)�HTTPConnectionPool�HTTPSConnectionPool�connection_from_url)�
exceptions)�encode_multipart_formdata)�PoolManager�ProxyManager�proxy_from_url)�HTTPResponse)�make_headers)�get_host)�Timeout)�Retry)�NullHandlerc@seZdZdd�ZdS)rcCsdS)N�)�self�recordrr�/usr/lib/python3.6/__init__.py�emitszNullHandler.emitN)�__name__�
__module__�__qualname__rrrrrrsrz(Andrey Petrov (andrey.petrov@shazow.net)ZMITz1.22rrr	r
rrr�add_stderr_loggerr�disable_warningsrrr
rcCsFtjt�}tj�}|jtjd��|j|�|j|�|jdt�|S)z�
    Helper for quickly adding a StreamHandler to the logger. Useful for
    debugging.

    Returns the handler after adding it.
    z%%(asctime)s %(levelname)s %(message)sz,Added a stderr logging handler to logger: %s)	�logging�	getLoggerrZ
StreamHandlerZsetFormatterZ	Formatter�
addHandlerZsetLevel�debug)�levelZloggerZhandlerrrrr9s	


�alwaysT)�append�defaultcCstjd|�dS)z<
    Helper for quickly disabling all urllib3 warnings.
    �ignoreN)�warnings�simplefilter)�categoryrrrr]s)rrr	r
rrrrrrrrr
r).�__doc__Z
__future__rr%Zconnectionpoolrrr�rZfilepostrZpoolmanagerr	r
rZresponserZutil.requestr
Zutil.urlrZutil.timeoutrZ
util.retryrrr�ImportErrorZHandler�
__author__Z__license__�__version__�__all__rrr�DEBUGrr&ZSecurityWarningZSubjectAltNameWarningZInsecurePlatformWarningZSNIMissingWarningZHTTPWarningrrrrr�<module>sT
_vendor/urllib3/__pycache__/_collections.cpython-36.opt-1.pyc000064400000024340151733136330020036 0ustar003

�Pf�'�@s�ddlmZddlmZmZyddlmZWn"ek
rNGdd�d�ZYnXyddlmZWn ek
r�ddl	mZYnXddl
mZmZm
Z
d	d
gZe�ZGdd	�d	e�ZGdd
�d
e�Zd
S)�)�absolute_import)�Mapping�MutableMapping)�RLockc@seZdZdd�Zdd�ZdS)rcCsdS)N�)�selfrr�"/usr/lib/python3.6/_collections.py�	__enter__szRLock.__enter__cCsdS)Nr)r�exc_type�	exc_value�	tracebackrrr�__exit__
szRLock.__exit__N)�__name__�
__module__�__qualname__r	r
rrrrrsr)�OrderedDict�)�iterkeys�
itervalues�PY3�RecentlyUsedContainer�HTTPHeaderDictc@sVeZdZdZeZddd�Zdd�Zdd	�Zd
d�Z	dd
�Z
dd�Zdd�Zdd�Z
dS)ra�
    Provides a thread-safe dict-like container which maintains up to
    ``maxsize`` keys while throwing away the least-recently-used keys beyond
    ``maxsize``.

    :param maxsize:
        Maximum number of recent elements to retain.

    :param dispose_func:
        Every time an item is evicted from the container,
        ``dispose_func(value)`` is called.  Callback which will get called
    �
NcCs"||_||_|j�|_t�|_dS)N)�_maxsize�dispose_func�ContainerCls�
_containerr�lock)r�maxsizerrrr�__init__+s
zRecentlyUsedContainer.__init__c
Cs,|j�|jj|�}||j|<|SQRXdS)N)rr�pop)r�key�itemrrr�__getitem__2s
z!RecentlyUsedContainer.__getitem__c
Cslt}|j�@|jj|t�}||j|<t|j�|jkrF|jjdd�\}}WdQRX|jrh|tk	rh|j|�dS)NF)Zlast)�_Nullrr�get�lenr�popitemr)rr!�valueZ
evicted_valueZ_keyrrr�__setitem__9s
z!RecentlyUsedContainer.__setitem__c	Cs2|j�|jj|�}WdQRX|jr.|j|�dS)N)rrr r)rr!r(rrr�__delitem__Hsz!RecentlyUsedContainer.__delitem__c	Cs|j�t|j�SQRXdS)N)rr&r)rrrr�__len__OszRecentlyUsedContainer.__len__cCstd��dS)Nz7Iteration over this class is unlikely to be threadsafe.)�NotImplementedError)rrrr�__iter__SszRecentlyUsedContainer.__iter__c
CsL|j�tt|j��}|jj�WdQRX|jrHx|D]}|j|�q6WdS)N)r�listrr�clearr)r�valuesr(rrrr/Vs
zRecentlyUsedContainer.clearc
Cs |j�tt|j��SQRXdS)N)rr.rr)rrrr�keys`szRecentlyUsedContainer.keys)rN)rrr�__doc__rrrr#r)r*r+r-r/r1rrrrrs

cs�eZdZdZd-�fdd�	Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
esZej
Z
ejZe�Zdd�Zdd�Zefdd�Zdd�Zdd�Zdd�Zefdd�ZeZeZeZeZdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Z d)d*�Z!e"d+d,��Z#�Z$S).rap
    :param headers:
        An iterable of field-value pairs. Must not contain multiple field names
        when compared case-insensitively.

    :param kwargs:
        Additional field-value pairs to pass in to ``dict.update``.

    A ``dict`` like container for storing HTTP Headers.

    Field names are stored and compared case-insensitively in compliance with
    RFC 7230. Iteration provides the first case-sensitive key seen for each
    case-insensitive pair.

    Using ``__setitem__`` syntax overwrites fields that compare equal
    case-insensitively in order to maintain ``dict``'s api. For fields that
    compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
    in a loop.

    If multiple fields that are equal case-insensitively are passed to the
    constructor or ``.update``, the behavior is undefined and some will be
    lost.

    >>> headers = HTTPHeaderDict()
    >>> headers.add('Set-Cookie', 'foo=bar')
    >>> headers.add('set-cookie', 'baz=quxx')
    >>> headers['content-length'] = '7'
    >>> headers['SET-cookie']
    'foo=bar, baz=quxx'
    >>> headers['Content-Length']
    '7'
    NcsPtt|�j�t�|_|dk	r>t|t�r4|j|�n
|j|�|rL|j|�dS)N)�superrrrr�
isinstance�
_copy_from�extend)r�headers�kwargs)�	__class__rrr�s

zHTTPHeaderDict.__init__cCs ||g|j|j�<|j|j�S)N)r�lower)rr!�valrrrr)�szHTTPHeaderDict.__setitem__cCs |j|j�}dj|dd��S)Nz, r)rr:�join)rr!r;rrrr#�szHTTPHeaderDict.__getitem__cCs|j|j�=dS)N)rr:)rr!rrrr*�szHTTPHeaderDict.__delitem__cCs|j�|jkS)N)r:r)rr!rrr�__contains__�szHTTPHeaderDict.__contains__cCsbt|t�rt|d�rdSt|t|��s6t|�|�}tdd�|j�D��tdd�|j�D��kS)Nr1Fcss|]\}}|j�|fVqdS)N)r:)�.0�k�vrrr�	<genexpr>�sz(HTTPHeaderDict.__eq__.<locals>.<genexpr>css|]\}}|j�|fVqdS)N)r:)r>r?r@rrrrA�s)r4r�hasattr�type�dict�
itermerged)r�otherrrr�__eq__�szHTTPHeaderDict.__eq__cCs|j|�S)N)rG)rrFrrr�__ne__�szHTTPHeaderDict.__ne__cCs
t|j�S)N)r&r)rrrrr+�szHTTPHeaderDict.__len__ccs"x|jj�D]}|dVqWdS)Nr)rr0)r�valsrrrr-�szHTTPHeaderDict.__iter__cCs<y||}Wn tk
r,||jkr(�|SX||=|SdS)z�D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
          If key is not found, d is returned if given, otherwise KeyError is raised.
        N)�KeyError�_HTTPHeaderDict__marker)rr!�defaultr(rrrr �s
zHTTPHeaderDict.popcCs$y
||=Wntk
rYnXdS)N)rJ)rr!rrr�discard�s
zHTTPHeaderDict.discardcCs4|j�}||g}|jj||�}||k	r0|j|�dS)z�Adds a (name, value) pair, doesn't overwrite the value if it already
        exists.

        >>> headers = HTTPHeaderDict(foo='bar')
        >>> headers.add('Foo', 'baz')
        >>> headers['foo']
        'bar, baz'
        N)r:r�
setdefault�append)rr!r;Z	key_lowerZnew_valsrIrrr�add�s
	zHTTPHeaderDict.addcOst|�dkrtdjt|����t|�dkr2|dnf}t|t�rdx�|j�D]\}}|j||�qJWnvt|t�r�xj|D]}|j|||�qtWnLt|d�r�x@|j	�D]}|j|||�q�Wnx|D]\}}|j||�q�Wx |j
�D]\}}|j||�q�WdS)z�Generic import function for any type of header-like object.
        Adapted version of MutableMapping.update in order to insert items
        with self.add instead of self.__setitem__
        rz9extend() takes at most 1 positional arguments ({0} given)rr1N)r&�	TypeError�formatr4r�	iteritemsrPrrBr1�items)r�argsr8rFr!r;r(rrrr6�s"



zHTTPHeaderDict.extendcCsFy|j|j�}Wn"tk
r4||jkr0gS|SX|dd�SdS)zmReturns a list of all the values for the named field. Returns an
        empty list if the key doesn't exist.rN)rr:rJrK)rr!rLrIrrr�getlist�s
zHTTPHeaderDict.getlistcCsdt|�jt|j��fS)Nz%s(%s))rCrrDrE)rrrr�__repr__szHTTPHeaderDict.__repr__cCsBx<|D]4}|j|�}t|t�r&t|�}|g||j|j�<qWdS)N)rVr4r.rr:)rrFr!r;rrrr5s



zHTTPHeaderDict._copy_fromcCst|��}|j|�|S)N)rCr5)rZclonerrr�copys

zHTTPHeaderDict.copyccsDx>|D]6}|j|j�}x"|dd�D]}|d|fVq&WqWdS)z8Iterate over all header lines, including duplicate ones.rNr)rr:)rr!rIr;rrrrSs
zHTTPHeaderDict.iteritemsccs<x6|D].}|j|j�}|ddj|dd��fVqWdS)z:Iterate over all headers, merging duplicate ones together.rz, rN)rr:r<)rr!r;rrrrE%s
zHTTPHeaderDict.itermergedcCst|j��S)N)r.rS)rrrrrT+szHTTPHeaderDict.itemscCsng}x`|jD]V}|jd�r@|d\}}||d|j�f|d<q|jdd�\}}|j||j�f�qW||�S)	z4Read headers from a Python 2 httplib message object.� �	rz
�:)rYrZ���r\)r7�
startswith�rstrip�splitrO�strip)�cls�messager7�liner!r(rrr�from_httplib.s
zHTTPHeaderDict.from_httplib)N)%rrrr2rr)r#r*r=rGrHrrrr�objectrKr+r-r rMrPr6rVZ
getheadersZgetallmatchingheadersZigetZget_allrWr5rXrSrErT�classmethodrd�
__classcell__rr)r9rres< 
N)Z
__future__r�collectionsrrZ	threadingr�ImportErrorrZpackages.ordered_dictZpackages.sixrrr�__all__rer$rrrrrr�<module>sJ_vendor/urllib3/__pycache__/filepost.cpython-36.opt-1.pyc000064400000005037151733136330017210 0ustar003

�Pf	�@s�ddlmZddlZddlmZddlmZddlmZddl	m
Z
ddlmZej
d	�d
Zdd�Zd
d�Zdd�Zddd�ZdS)�)�absolute_importN)�uuid4)�BytesIO�)�six)�b)�RequestFieldzutf-8�cCst�jS)zN
    Our embarrassingly-simple replacement for mimetools.choose_boundary.
    )r�hex�rr�/usr/lib/python3.6/filepost.py�choose_boundarysr
ccsNt|t�rtj|�}nt|�}x*|D]"}t|t�r:|Vq$tj|�Vq$WdS)z�
    Iterate over fields.

    Supports list of (k, v) tuples and dicts, and lists of
    :class:`~urllib3.fields.RequestField`.

    N)�
isinstance�dictr�	iteritems�iterrZfrom_tuples)�fields�i�fieldrrr�iter_field_objectss


rcCs,t|t�rdd�tj|�D�Sdd�|D�S)a-
    .. deprecated:: 1.6

    Iterate over fields.

    The addition of :class:`~urllib3.fields.RequestField` makes this function
    obsolete. Instead, use :func:`iter_field_objects`, which returns
    :class:`~urllib3.fields.RequestField` objects.

    Supports list of (k, v) tuples and dicts.
    css|]\}}||fVqdS)Nr)�.0�k�vrrr�	<genexpr>6sziter_fields.<locals>.<genexpr>css|]\}}||fVqdS)Nr)rrrrrrr8s)rrrr)rrrr�iter_fields)s
rcCs�t�}|dkrt�}x|t|�D]p}|jtd|��t|�j|j��|j}t|t	�r^t
|�}t|tj�rzt|�j|�n
|j|�|jd�qW|jtd|��t
d|�}|j
�|fS)aJ
    Encode a dictionary of ``fields`` using the multipart/form-data MIME format.

    :param fields:
        Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).

    :param boundary:
        If not specified, then a random boundary will be generated using
        :func:`mimetools.choose_boundary`.
    Nz--%s
s
z--%s--
z multipart/form-data; boundary=%s)rr
r�writer�writerZrender_headers�datar�int�strrZ	text_type�getvalue)r�boundaryZbodyrrZcontent_typerrr�encode_multipart_formdata;s 

r")N)Z
__future__r�codecsZuuidr�iorZpackagesrZpackages.sixrrr�lookuprr
rrr"rrrr�<module>s_vendor/urllib3/__pycache__/_collections.cpython-36.pyc000064400000024340151733136330017077 0ustar003

�Pf�'�@s�ddlmZddlmZmZyddlmZWn"ek
rNGdd�d�ZYnXyddlmZWn ek
r�ddl	mZYnXddl
mZmZm
Z
d	d
gZe�ZGdd	�d	e�ZGdd
�d
e�Zd
S)�)�absolute_import)�Mapping�MutableMapping)�RLockc@seZdZdd�Zdd�ZdS)rcCsdS)N�)�selfrr�"/usr/lib/python3.6/_collections.py�	__enter__szRLock.__enter__cCsdS)Nr)r�exc_type�	exc_value�	tracebackrrr�__exit__
szRLock.__exit__N)�__name__�
__module__�__qualname__r	r
rrrrrsr)�OrderedDict�)�iterkeys�
itervalues�PY3�RecentlyUsedContainer�HTTPHeaderDictc@sVeZdZdZeZddd�Zdd�Zdd	�Zd
d�Z	dd
�Z
dd�Zdd�Zdd�Z
dS)ra�
    Provides a thread-safe dict-like container which maintains up to
    ``maxsize`` keys while throwing away the least-recently-used keys beyond
    ``maxsize``.

    :param maxsize:
        Maximum number of recent elements to retain.

    :param dispose_func:
        Every time an item is evicted from the container,
        ``dispose_func(value)`` is called.  Callback which will get called
    �
NcCs"||_||_|j�|_t�|_dS)N)�_maxsize�dispose_func�ContainerCls�
_containerr�lock)r�maxsizerrrr�__init__+s
zRecentlyUsedContainer.__init__c
Cs,|j�|jj|�}||j|<|SQRXdS)N)rr�pop)r�key�itemrrr�__getitem__2s
z!RecentlyUsedContainer.__getitem__c
Cslt}|j�@|jj|t�}||j|<t|j�|jkrF|jjdd�\}}WdQRX|jrh|tk	rh|j|�dS)NF)Zlast)�_Nullrr�get�lenr�popitemr)rr!�valueZ
evicted_valueZ_keyrrr�__setitem__9s
z!RecentlyUsedContainer.__setitem__c	Cs2|j�|jj|�}WdQRX|jr.|j|�dS)N)rrr r)rr!r(rrr�__delitem__Hsz!RecentlyUsedContainer.__delitem__c	Cs|j�t|j�SQRXdS)N)rr&r)rrrr�__len__OszRecentlyUsedContainer.__len__cCstd��dS)Nz7Iteration over this class is unlikely to be threadsafe.)�NotImplementedError)rrrr�__iter__SszRecentlyUsedContainer.__iter__c
CsL|j�tt|j��}|jj�WdQRX|jrHx|D]}|j|�q6WdS)N)r�listrr�clearr)r�valuesr(rrrr/Vs
zRecentlyUsedContainer.clearc
Cs |j�tt|j��SQRXdS)N)rr.rr)rrrr�keys`szRecentlyUsedContainer.keys)rN)rrr�__doc__rrrr#r)r*r+r-r/r1rrrrrs

cs�eZdZdZd-�fdd�	Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
esZej
Z
ejZe�Zdd�Zdd�Zefdd�Zdd�Zdd�Zdd�Zefdd�ZeZeZeZeZdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Z d)d*�Z!e"d+d,��Z#�Z$S).rap
    :param headers:
        An iterable of field-value pairs. Must not contain multiple field names
        when compared case-insensitively.

    :param kwargs:
        Additional field-value pairs to pass in to ``dict.update``.

    A ``dict`` like container for storing HTTP Headers.

    Field names are stored and compared case-insensitively in compliance with
    RFC 7230. Iteration provides the first case-sensitive key seen for each
    case-insensitive pair.

    Using ``__setitem__`` syntax overwrites fields that compare equal
    case-insensitively in order to maintain ``dict``'s api. For fields that
    compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
    in a loop.

    If multiple fields that are equal case-insensitively are passed to the
    constructor or ``.update``, the behavior is undefined and some will be
    lost.

    >>> headers = HTTPHeaderDict()
    >>> headers.add('Set-Cookie', 'foo=bar')
    >>> headers.add('set-cookie', 'baz=quxx')
    >>> headers['content-length'] = '7'
    >>> headers['SET-cookie']
    'foo=bar, baz=quxx'
    >>> headers['Content-Length']
    '7'
    NcsPtt|�j�t�|_|dk	r>t|t�r4|j|�n
|j|�|rL|j|�dS)N)�superrrrr�
isinstance�
_copy_from�extend)r�headers�kwargs)�	__class__rrr�s

zHTTPHeaderDict.__init__cCs ||g|j|j�<|j|j�S)N)r�lower)rr!�valrrrr)�szHTTPHeaderDict.__setitem__cCs |j|j�}dj|dd��S)Nz, r)rr:�join)rr!r;rrrr#�szHTTPHeaderDict.__getitem__cCs|j|j�=dS)N)rr:)rr!rrrr*�szHTTPHeaderDict.__delitem__cCs|j�|jkS)N)r:r)rr!rrr�__contains__�szHTTPHeaderDict.__contains__cCsbt|t�rt|d�rdSt|t|��s6t|�|�}tdd�|j�D��tdd�|j�D��kS)Nr1Fcss|]\}}|j�|fVqdS)N)r:)�.0�k�vrrr�	<genexpr>�sz(HTTPHeaderDict.__eq__.<locals>.<genexpr>css|]\}}|j�|fVqdS)N)r:)r>r?r@rrrrA�s)r4r�hasattr�type�dict�
itermerged)r�otherrrr�__eq__�szHTTPHeaderDict.__eq__cCs|j|�S)N)rG)rrFrrr�__ne__�szHTTPHeaderDict.__ne__cCs
t|j�S)N)r&r)rrrrr+�szHTTPHeaderDict.__len__ccs"x|jj�D]}|dVqWdS)Nr)rr0)r�valsrrrr-�szHTTPHeaderDict.__iter__cCs<y||}Wn tk
r,||jkr(�|SX||=|SdS)z�D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
          If key is not found, d is returned if given, otherwise KeyError is raised.
        N)�KeyError�_HTTPHeaderDict__marker)rr!�defaultr(rrrr �s
zHTTPHeaderDict.popcCs$y
||=Wntk
rYnXdS)N)rJ)rr!rrr�discard�s
zHTTPHeaderDict.discardcCs4|j�}||g}|jj||�}||k	r0|j|�dS)z�Adds a (name, value) pair, doesn't overwrite the value if it already
        exists.

        >>> headers = HTTPHeaderDict(foo='bar')
        >>> headers.add('Foo', 'baz')
        >>> headers['foo']
        'bar, baz'
        N)r:r�
setdefault�append)rr!r;Z	key_lowerZnew_valsrIrrr�add�s
	zHTTPHeaderDict.addcOst|�dkrtdjt|����t|�dkr2|dnf}t|t�rdx�|j�D]\}}|j||�qJWnvt|t�r�xj|D]}|j|||�qtWnLt|d�r�x@|j	�D]}|j|||�q�Wnx|D]\}}|j||�q�Wx |j
�D]\}}|j||�q�WdS)z�Generic import function for any type of header-like object.
        Adapted version of MutableMapping.update in order to insert items
        with self.add instead of self.__setitem__
        rz9extend() takes at most 1 positional arguments ({0} given)rr1N)r&�	TypeError�formatr4r�	iteritemsrPrrBr1�items)r�argsr8rFr!r;r(rrrr6�s"



zHTTPHeaderDict.extendcCsFy|j|j�}Wn"tk
r4||jkr0gS|SX|dd�SdS)zmReturns a list of all the values for the named field. Returns an
        empty list if the key doesn't exist.rN)rr:rJrK)rr!rLrIrrr�getlist�s
zHTTPHeaderDict.getlistcCsdt|�jt|j��fS)Nz%s(%s))rCrrDrE)rrrr�__repr__szHTTPHeaderDict.__repr__cCsBx<|D]4}|j|�}t|t�r&t|�}|g||j|j�<qWdS)N)rVr4r.rr:)rrFr!r;rrrr5s



zHTTPHeaderDict._copy_fromcCst|��}|j|�|S)N)rCr5)rZclonerrr�copys

zHTTPHeaderDict.copyccsDx>|D]6}|j|j�}x"|dd�D]}|d|fVq&WqWdS)z8Iterate over all header lines, including duplicate ones.rNr)rr:)rr!rIr;rrrrSs
zHTTPHeaderDict.iteritemsccs<x6|D].}|j|j�}|ddj|dd��fVqWdS)z:Iterate over all headers, merging duplicate ones together.rz, rN)rr:r<)rr!r;rrrrE%s
zHTTPHeaderDict.itermergedcCst|j��S)N)r.rS)rrrrrT+szHTTPHeaderDict.itemscCsng}x`|jD]V}|jd�r@|d\}}||d|j�f|d<q|jdd�\}}|j||j�f�qW||�S)	z4Read headers from a Python 2 httplib message object.� �	rz
�:)rYrZ���r\)r7�
startswith�rstrip�splitrO�strip)�cls�messager7�liner!r(rrr�from_httplib.s
zHTTPHeaderDict.from_httplib)N)%rrrr2rr)r#r*r=rGrHrrrr�objectrKr+r-r rMrPr6rVZ
getheadersZgetallmatchingheadersZigetZget_allrWr5rXrSrErT�classmethodrd�
__classcell__rr)r9rres< 
N)Z
__future__r�collectionsrrZ	threadingr�ImportErrorrZpackages.ordered_dictZpackages.sixrrr�__all__rer$rrrrrr�<module>sJ_vendor/urllib3/__pycache__/exceptions.cpython-36.opt-1.pyc000064400000024121151733136330017537 0ustar003

�Pf��@sLddlmZddlmZGdd�de�ZGdd�de�ZGdd	�d	e�Z	Gd
d�de	�Z
Gdd
�d
e�ZGdd�de�ZGdd�de�Z
Gdd�de�ZeZGdd�de
�ZGdd�de
�ZGdd�de�ZGdd�de�ZGdd�dee
�ZGdd�de�ZGd d!�d!ee	�ZGd"d#�d#e	�ZGd$d%�d%e	�ZGd&d'�d'ee�ZGd(d)�d)e�ZGd*d+�d+e�ZGd,d-�d-e�ZGd.d/�d/e�ZGd0d1�d1e�ZGd2d3�d3e�Z Gd4d5�d5e�Z!Gd6d7�d7e�Z"Gd8d9�d9e�Z#Gd:d;�d;ee�Z$Gd<d=�d=e�Z%Gd>d?�d?ee�ZGd@dA�dAe�Z&GdBdC�dCe'e�Z(GdDdE�dEe�Z)GdFdG�dGe�Z*dHS)I�)�absolute_import�)�IncompleteReadc@seZdZdZdS)�	HTTPErrorz#Base exception used by this module.N)�__name__�
__module__�__qualname__�__doc__�r
r
� /usr/lib/python3.6/exceptions.pyrsrc@seZdZdZdS)�HTTPWarningz!Base warning used by this module.N)rrrr	r
r
r
rr
src@s eZdZdZdd�Zdd�ZdS)�	PoolErrorz/Base exception for errors caused within a pool.cCs||_tj|d||f�dS)Nz%s: %s)�poolr�__init__)�selfr�messager
r
rrszPoolError.__init__cCs
|jdfS)N)NN)�	__class__)rr
r
r�
__reduce__szPoolError.__reduce__N)rrrr	rrr
r
r
rr
sr
c@s eZdZdZdd�Zdd�ZdS)�RequestErrorz8Base exception for PoolErrors that have associated URLs.cCs||_tj|||�dS)N)�urlr
r)rrrrr
r
rrszRequestError.__init__cCs|jd|jdffS)N)rr)rr
r
rr#szRequestError.__reduce__N)rrrr	rrr
r
r
rrsrc@seZdZdZdS)�SSLErrorz9Raised when SSL certificate fails in an HTTPS connection.N)rrrr	r
r
r
rr(src@seZdZdZdS)�
ProxyErrorz,Raised when the connection to a proxy fails.N)rrrr	r
r
r
rr-src@seZdZdZdS)�DecodeErrorz;Raised when automatic decoding based on Content-Type fails.N)rrrr	r
r
r
rr2src@seZdZdZdS)�
ProtocolErrorz>Raised when something unexpected happens mid-request/response.N)rrrr	r
r
r
rr7src@seZdZdZddd�ZdS)�
MaxRetryErroraRaised when the maximum number of retries is exceeded.

    :param pool: The connection pool
    :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
    :param string url: The requested Url
    :param exceptions.Exception reason: The underlying error

    NcCs&||_d||f}tj||||�dS)Nz0Max retries exceeded with url: %s (Caused by %r))�reasonrr)rrrrrr
r
rrLs
zMaxRetryError.__init__)N)rrrr	rr
r
r
rrBsrc@seZdZdZddd�ZdS)�HostChangedErrorz?Raised when an existing pool gets a request for a foreign host.�cCs"d|}tj||||�||_dS)Nz)Tried to open a foreign host with url: %s)rr�retries)rrrrrr
r
rrXszHostChangedError.__init__N)r)rrrr	rr
r
r
rrUsrc@seZdZdZdS)�TimeoutStateErrorz3 Raised when passing an invalid state to a timeout N)rrrr	r
r
r
rr^src@seZdZdZdS)�TimeoutErrorz� Raised when a socket timeout error occurs.

    Catching this error will catch both :exc:`ReadTimeoutErrors
    <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
    N)rrrr	r
r
r
rr csr c@seZdZdZdS)�ReadTimeoutErrorzFRaised when a socket timeout occurs while receiving data from a serverN)rrrr	r
r
r
rr!lsr!c@seZdZdZdS)�ConnectTimeoutErrorz@Raised when a socket timeout occurs while connecting to a serverN)rrrr	r
r
r
rr"ssr"c@seZdZdZdS)�NewConnectionErrorzHRaised when we fail to establish a new connection. Usually ECONNREFUSED.N)rrrr	r
r
r
rr#xsr#c@seZdZdZdS)�EmptyPoolErrorzCRaised when a pool runs out of connections and no more are allowed.N)rrrr	r
r
r
rr$}sr$c@seZdZdZdS)�ClosedPoolErrorzCRaised when a request enters a pool after the pool has been closed.N)rrrr	r
r
r
rr%�sr%c@seZdZdZdS)�LocationValueErrorz<Raised when there is something wrong with a given URL input.N)rrrr	r
r
r
rr&�sr&c@seZdZdZdd�ZdS)�LocationParseErrorz=Raised when get_host or similar fails to parse the URL input.cCsd|}tj||�||_dS)NzFailed to parse: %s)rr�location)rr(rr
r
rr�szLocationParseError.__init__N)rrrr	rr
r
r
rr'�sr'c@seZdZdZdZdZdS)�
ResponseErrorzDUsed as a container for an error reason supplied in a MaxRetryError.ztoo many error responsesz&too many {status_code} error responsesN)rrrr	Z
GENERIC_ERRORZSPECIFIC_ERRORr
r
r
rr)�sr)c@seZdZdZdS)�SecurityWarningz/Warned when perfoming security reducing actionsN)rrrr	r
r
r
rr*�sr*c@seZdZdZdS)�SubjectAltNameWarningzBWarned when connecting to a host with a certificate missing a SAN.N)rrrr	r
r
r
rr+�sr+c@seZdZdZdS)�InsecureRequestWarningz/Warned when making an unverified HTTPS request.N)rrrr	r
r
r
rr,�sr,c@seZdZdZdS)�SystemTimeWarningz0Warned when system time is suspected to be wrongN)rrrr	r
r
r
rr-�sr-c@seZdZdZdS)�InsecurePlatformWarningzEWarned when certain SSL configuration is not available on a platform.N)rrrr	r
r
r
rr.�sr.c@seZdZdZdS)�SNIMissingWarningz9Warned when making a HTTPS request without SNI available.N)rrrr	r
r
r
rr/�sr/c@seZdZdZdS)�DependencyWarningzc
    Warned when an attempt is made to import a module with missing optional
    dependencies.
    N)rrrr	r
r
r
rr0�sr0c@seZdZdZdS)�ResponseNotChunkedz;Response needs to be chunked in order to read it as chunks.N)rrrr	r
r
r
rr1�sr1c@seZdZdZdS)�BodyNotHttplibCompatiblezz
    Body should be httplib.HTTPResponse like (have an fp attribute which
    returns raw chunks) for read_chunked().
    N)rrrr	r
r
r
rr2�sr2cs(eZdZdZ�fdd�Zdd�Z�ZS)rz�
    Response length doesn't match expected Content-Length

    Subclass of http_client.IncompleteRead to allow int value
    for `partial` to avoid creating large objects on streamed
    reads.
    cstt|�j||�dS)N)�superrr)r�partial�expected)rr
rr�szIncompleteRead.__init__cCsd|j|jfS)Nz/IncompleteRead(%i bytes read, %i more expected))r4r5)rr
r
r�__repr__�szIncompleteRead.__repr__)rrrr	rr6�
__classcell__r
r
)rrr�src@seZdZdZdS)�
InvalidHeaderz(The header provided was somehow invalid.N)rrrr	r
r
r
rr8�sr8cs eZdZdZ�fdd�Z�ZS)�ProxySchemeUnknownz1ProxyManager does not support the supplied schemecsd|}tt|�j|�dS)NzNot supported proxy scheme %s)r3r9r)r�schemer)rr
rr�szProxySchemeUnknown.__init__)rrrr	rr7r
r
)rrr9�sr9cs eZdZdZ�fdd�Z�ZS)�HeaderParsingErrorzNRaised by assert_header_parsing, but we convert it to a log.warning statement.cs$d|pd|f}tt|�j|�dS)Nz%s, unparsed data: %rZUnknown)r3r;r)rZdefectsZ
unparsed_datar)rr
rr�szHeaderParsingError.__init__)rrrr	rr7r
r
)rrr;�sr;c@seZdZdZdS)�UnrewindableBodyErrorz9urllib3 encountered an error when trying to rewind a bodyN)rrrr	r
r
r
rr<�sr<N)+Z
__future__rZpackages.six.moves.http_clientrZhttplib_IncompleteRead�	Exceptionr�Warningrr
rrrrr�ConnectionErrorrrrr r!r"r#r$r%�
ValueErrorr&r'r)r*r+r,r-r.r/r0r1r2r8�AssertionErrorr9r;r<r
r
r
r�<module>sH		
	_vendor/urllib3/__pycache__/fields.cpython-36.pyc000064400000013240151733136330015665 0ustar003

�Pf7�@sNddlmZddlZddlZddlmZddd�Zdd	�ZGd
d�de	�Z
dS)
�)�absolute_importN�)�six�application/octet-streamcCs|rtj|�dp|S|S)z�
    Guess the "Content-Type" of a file.

    :param filename:
        The filename to guess the "Content-Type" of using :mod:`mimetypes`.
    :param default:
        If no "Content-Type" can be guessed, default to `default`.
    r)�	mimetypesZ
guess_type)�filename�default�r	�/usr/lib/python3.6/fields.py�guess_content_types	rcs�t�fdd�dD��sNd|�f}y|jd�Wnttfk
rHYnX|Stjrlt�tj�rl�jd��tj	j
�d��d|�f��S)a�
    Helper function to format and quote a single header parameter.

    Particularly useful for header parameters which might contain
    non-ASCII values, like file names. This follows RFC 2231, as
    suggested by RFC 2388 Section 4.4.

    :param name:
        The name of the parameter, a string expected to be ASCII only.
    :param value:
        The value of the parameter, provided as a unicode string.
    c3s|]}|�kVqdS)Nr	)�.0Zch)�valuer	r
�	<genexpr>#sz&format_header_param.<locals>.<genexpr>z"\
z%s="%s"�asciizutf-8z%s*=%s)�any�encode�UnicodeEncodeError�UnicodeDecodeErrorrZPY3�
isinstanceZ	text_type�emailZutilsZencode_rfc2231)�namer
�resultr	)r
r
�format_header_params

rc@sHeZdZdZddd�Zedd��Zdd�Zd	d
�Zdd�Z	dd
d�Z
dS)�RequestFieldaK
    A data container for request body parameters.

    :param name:
        The name of this request field.
    :param data:
        The data/value body.
    :param filename:
        An optional filename of the request field.
    :param headers:
        An optional dict-like object of headers to initially use for the field.
    NcCs*||_||_||_i|_|r&t|�|_dS)N)�_name�	_filename�data�headers�dict)�selfrrrrr	r	r
�__init__?szRequestField.__init__cCs^t|t�r4t|�dkr"|\}}}q@|\}}t|�}nd}d}|}||||d�}|j|d�|S)a�
        A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.

        Supports constructing :class:`~urllib3.fields.RequestField` from
        parameter of key/value strings AND key/filetuple. A filetuple is a
        (filename, data, MIME type) tuple where the MIME type is optional.
        For example::

            'foo': 'bar',
            'fakefile': ('foofile.txt', 'contents of foofile'),
            'realfile': ('barfile.txt', open('realfile').read()),
            'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
            'nonamefile': 'contents of nonamefile field',

        Field names and filenames must be unicode.
        �N)r)�content_type)r�tuple�lenr�make_multipart)�clsZ	fieldnamer
rrr"Z
request_paramr	r	r
�from_tuplesGs

zRequestField.from_tuplescCs
t||�S)a
        Overridable helper function to format a single header parameter.

        :param name:
            The name of the parameter, a string expected to be ASCII only.
        :param value:
            The value of the parameter, provided as a unicode string.
        )r)rrr
r	r	r
�_render_partis	zRequestField._render_partcCsPg}|}t|t�r|j�}x*|D]"\}}|dk	r |j|j||��q Wdj|�S)aO
        Helper function to format and quote a single header.

        Useful for single headers that are composed of multiple items. E.g.,
        'Content-Disposition' fields.

        :param header_parts:
            A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
            as `k1="v1"; k2="v2"; ...`.
        Nz; )rr�items�appendr(�join)rZheader_parts�parts�iterablerr
r	r	r
�
_render_partsts
zRequestField._render_partscCs�g}dddg}x2|D]*}|jj|d�r|jd||j|f�qWx4|jj�D]&\}}||krN|rN|jd||f�qNW|jd�dj|�S)z=
        Renders the headers for this request field.
        zContent-DispositionzContent-TypezContent-LocationFz%s: %sz
)r�getr*r)r+)r�linesZ	sort_keysZsort_keyZheader_nameZheader_valuer	r	r
�render_headers�s


zRequestField.render_headersc	CsX|pd|jd<|jddjd|jd|jfd|jff�g�7<||jd<||jd<d	S)
a|
        Makes this request field into a multipart request field.

        This method overrides "Content-Disposition", "Content-Type" and
        "Content-Location" headers to the request parameter.

        :param content_type:
            The 'Content-Type' of the request body.
        :param content_location:
            The 'Content-Location' of the request body.

        z	form-datazContent-Dispositionz; �rrzContent-TypezContent-LocationN)rr+r.rr)rZcontent_dispositionr"Zcontent_locationr	r	r
r%�s
zRequestField.make_multipart)NN)NNN)�__name__�
__module__�__qualname__�__doc__r �classmethodr'r(r.r1r%r	r	r	r
r2s
"r)r)Z
__future__rZemail.utilsrrZpackagesrrr�objectrr	r	r	r
�<module>s
_vendor/urllib3/__pycache__/connection.cpython-36.opt-1.pyc000064400000021073151733136330017520 0ustar003

�Pf�2�@s�ddlmZddlZddlZddlZddlZddlZddlmZm	Z
ddlZddlm
Z
ddlmZddlmZyddlZejZWn,eefk
r�dZGdd	�d	e�ZYnXyeZWn$ek
r�Gd
d�de�ZYnXddlmZmZmZmZdd
l m!Z!m"Z"ddl#m$Z$m%Z%m&Z&m'Z'm(Z(ddl)m*Z*ddl+m,Z,ej-e.�Z/ddd�Z0ej1ddd�Z2Gdd�de3�Z4Gdd�dee3�ZGdd�de�Z5Gdd�de5�Z6dd�Z7e�r�e5Z8e6Z5ne4Z5dS)�)�absolute_importN)�error�timeout�)�six)�HTTPConnection)�
HTTPExceptionc@seZdZdS)�BaseSSLErrorN)�__name__�
__module__�__qualname__�r
r
� /usr/lib/python3.6/connection.pyr	sr	c@seZdZdS)�ConnectionErrorN)r
rrr
r
r
rrsr)�NewConnectionError�ConnectTimeoutError�SubjectAltNameWarning�SystemTimeWarning)�match_hostname�CertificateError)�resolve_cert_reqs�resolve_ssl_version�assert_fingerprint�create_urllib3_context�ssl_wrap_socket)�
connection)�HTTPHeaderDict�Pi�)�http�httpsi�c@seZdZdZdS)�DummyConnectionz-Used to detect a failed ConnectionCls import.N)r
rr�__doc__r
r
r
rr Asr c@sVeZdZdZedZejejdfgZ	dZ
dd�Zdd�Zd	d
�Z
dd�Zddd�Zd
S)ra{
    Based on httplib.HTTPConnection but provides an extra constructor
    backwards-compatibility layer between older and newer Pythons.

    Additional keyword parameters are used to configure attributes of the connection.
    Accepted parameters include:

      - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
      - ``source_address``: Set the source address for the current connection.

        .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x

      - ``socket_options``: Set specific options on the underlying socket. If not specified, then
        defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
        Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.

        For example, if you wish to enable TCP Keep Alive in addition to the defaults,
        you might pass::

            HTTPConnection.default_socket_options + [
                (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
            ]

        Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
    rrFcOsZtjr|jdd�|jd�|_tjdkr4|jdd�|jd|j�|_t	j
|f|�|�dS)N�strict�source_address���socket_options)r$r%)rZPY3�pop�getr#�sys�version_info�default_socket_optionsr&�_HTTPConnection�__init__)�self�args�kwr
r
rr-js
zHTTPConnection.__init__cCs�i}|jr|j|d<|jr$|j|d<ytj|j|jf|jf|�}Wnftk
rz}zt|d|j|jf��WYdd}~Xn0t	k
r�}zt
|d|��WYdd}~XnX|S)zp Establish a socket connection and set nodelay settings on it.

        :return: New socket connection.
        r#r&z0Connection to %s timed out. (connect timeout=%s)Nz(Failed to establish a new connection: %s)r#r&rZcreate_connection�host�portr�
SocketTimeoutr�SocketErrorr)r.Zextra_kw�conn�er
r
r�	_new_conns 

 zHTTPConnection._new_conncCs$||_t|dd�r |j�d|_dS)N�_tunnel_hostr)�sock�getattr�_tunnel�	auto_open)r.r5r
r
r�
_prepare_conn�szHTTPConnection._prepare_conncCs|j�}|j|�dS)N)r7r=)r.r5r
r
r�connect�szHTTPConnection.connectNcCst|dk	r|ni�}d|k}d|k}|j||||d�x |j�D]\}}|j||�q@Wd|krl|jdd�|j�|dk	�rtjtjf}	t||	�r�|f}xj|D]b}
|
s�q�t|
tj�s�|
j	d�}
t
t|
��d	d�}|j|j	d
��|jd�|j|
�|jd�q�W|jd�dS)
z�
        Alternative to the common request method, which sends the
        body with chunked encoding and not as one block
        Nzaccept-encodingr1)�skip_accept_encoding�	skip_hostztransfer-encodingzTransfer-EncodingZchunked�utf8r$zutf-8s
s0

)
rZ
putrequest�itemsZ	putheaderZ
endheadersrZstring_typesZbinary_type�
isinstance�encode�hex�len�send)r.�methodZurlZbodyZheadersr?r@�header�valueZstringish_types�chunkZlen_strr
r
r�request_chunked�s8





zHTTPConnection.request_chunked)NN)r
rrr!�port_by_scheme�default_port�socketZIPPROTO_TCPZTCP_NODELAYr+�is_verifiedr-r7r=r>rLr
r
r
rrFsrc@s8eZdZedZdZddddejdfdd�Zdd�Z	dS)�HTTPSConnectionrNc	Ks8tj|||f||d�|��||_||_||_d|_dS)N)r"rr)rr-�key_file�	cert_file�ssl_contextZ	_protocol)	r.r1r2rRrSr"rrTr0r
r
rr-�szHTTPSConnection.__init__cCsN|j�}|j|�|jdkr2ttd�td�d�|_t||j|j|jd�|_	dS)N)�ssl_version�	cert_reqs)r9�keyfile�certfilerT)
r7r=rTrrrrrRrSr9)r.r5r
r
rr>�s

zHTTPSConnection.connect)
r
rrrMrNrUrOZ_GLOBAL_DEFAULT_TIMEOUTr-r>r
r
r
rrQ�s
rQc@s6eZdZdZdZdZdZdZdZddd�Z	dd�Z
dS)�VerifiedHTTPSConnectionz[
    Based on httplib.HTTPSConnection but wraps the socket with
    SSL certification.
    NcCsn|dkr(|s|rd}n|jdk	r(|jj}||_||_||_||_||_|oTtjj	|�|_
|oftjj	|�|_dS)zX
        This method should only be called once, before the connection is used.
        N�
CERT_REQUIRED)rT�verify_moderRrSrV�assert_hostnamer�os�path�
expanduser�ca_certs�ca_cert_dir)r.rRrSrVr`r\rrar
r
r�set_certs
z VerifiedHTTPSConnection.set_certc	CsL|j�}|j}t|dd�r4||_|j�d|_|j}tjj	�t
k}|rXtjdj
t
�t�|jdkr|tt|j�t|j�d�|_|j}t|j�|_t||j|j|j|j||d�|_|jr�t|jjdd�|j�nb|jtjko�t|dd	�o�|jd	k	�r.|jj�}|j d
f��stjdj
|�t!�t"||j�p*|�|jtj#k�pD|jdk	|_$dS)Nr8rzWSystem time is way off (before {0}). This will probably lead to SSL verification errors)rUrV)r9rWrXr`raZserver_hostnamerTT)Zbinary_formZcheck_hostnameFZsubjectAltNamez�Certificate for {0} has no `subjectAltName`, falling back to check for a `commonName` for now. This feature is being removed by major browsers and deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 for details.))%r7r1r:r9r;r<r8�datetime�dateZtoday�RECENT_DATE�warnings�warn�formatrrTrrrUrrVr[rrRrSr`rarZgetpeercert�sslZ	CERT_NONEr\r(r�_match_hostnamerZrP)r.r5ZhostnameZis_time_off�context�certr
r
rr>sT



zVerifiedHTTPSConnection.connect)NNNNNNN)r
rrr!rVr`rarUrrbr>r
r
r
rrY�s
rYcCsLyt||�Wn8tk
rF}ztjd||�||_�WYdd}~XnXdS)Nz@Certificate did not match expected hostname: %s. Certificate: %s)rr�logrZ
_peer_cert)rlZasserted_hostnamer6r
r
rrjbsrj)9Z
__future__rrcZloggingr]r)rOrr4rr3rfZpackagesrZpackages.six.moves.http_clientrr,rriZSSLErrorr	�ImportError�AttributeError�
BaseExceptionr�	NameError�	Exception�
exceptionsrrrrZpackages.ssl_match_hostnamerrZ	util.ssl_rrrrr�utilr�_collectionsrZ	getLoggerr
rmrMrdre�objectr rQrYrjZUnverifiedHTTPSConnectionr
r
r
r�<module>sN
	
&l_vendor/urllib3/__pycache__/__init__.cpython-36.opt-1.pyc000064400000004576151733136330017131 0ustar003

�Pf%�@s`dZddlmZddlZddlmZmZmZddlm	Z	ddl
mZddlm
Z
mZmZdd	lmZdd
lmZddlmZddlmZdd
lmZddlZyddlmZWn&ek
r�Gdd�dej�ZYnXdZdZdZ d(Z!ej"e#�j$e��ej%fd"d�Z&[ej'd#e	j(d$d%�ej'd&e	j)d$d%�ej'd&e	j*d$d%�ej'd&e	j+d$d%�e	j,fd'd�Z-dS))z8
urllib3 - Thread-safe connection pooling and re-using.
�)�absolute_importN�)�HTTPConnectionPool�HTTPSConnectionPool�connection_from_url)�
exceptions)�encode_multipart_formdata)�PoolManager�ProxyManager�proxy_from_url)�HTTPResponse)�make_headers)�get_host)�Timeout)�Retry)�NullHandlerc@seZdZdd�ZdS)rcCsdS)N�)�self�recordrr�/usr/lib/python3.6/__init__.py�emitszNullHandler.emitN)�__name__�
__module__�__qualname__rrrrrrsrz(Andrey Petrov (andrey.petrov@shazow.net)ZMITz1.22rrr	r
rrr�add_stderr_loggerr�disable_warningsrrr
rcCsFtjt�}tj�}|jtjd��|j|�|j|�|jdt�|S)z�
    Helper for quickly adding a StreamHandler to the logger. Useful for
    debugging.

    Returns the handler after adding it.
    z%%(asctime)s %(levelname)s %(message)sz,Added a stderr logging handler to logger: %s)	�logging�	getLoggerrZ
StreamHandlerZsetFormatterZ	Formatter�
addHandlerZsetLevel�debug)�levelZloggerZhandlerrrrr9s	


�alwaysT)�append�defaultcCstjd|�dS)z<
    Helper for quickly disabling all urllib3 warnings.
    �ignoreN)�warnings�simplefilter)�categoryrrrr]s)rrr	r
rrrrrrrrr
r).�__doc__Z
__future__rr%Zconnectionpoolrrr�rZfilepostrZpoolmanagerr	r
rZresponserZutil.requestr
Zutil.urlrZutil.timeoutrZ
util.retryrrr�ImportErrorZHandler�
__author__Z__license__�__version__�__all__rrr�DEBUGrr&ZSecurityWarningZSubjectAltNameWarningZInsecurePlatformWarningZSNIMissingWarningZHTTPWarningrrrrr�<module>sT
_vendor/urllib3/__pycache__/connectionpool.cpython-36.pyc000064400000056152151733136330017461 0ustar003

�Pf��@s�ddlmZddlZddlZddlZddlZddlmZm	Z
ddlZddlmZm
Z
mZmZmZmZmZmZmZmZmZmZmZddlmZddlmZddlmZdd	lm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&dd
l'm(Z(ddl)m*Z*ddl+m,Z,dd
l-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6m7Z7ej8�r<ddl9Z:ej;j<Z<ej=e>�Z?e@�ZAGdd�de@�ZBeCejDejEg�ZFGdd�deBe(�ZGGdd�deG�ZHdd�ZIdd�ZJdS)�)�absolute_importN)�error�timeout�)
�ClosedPoolError�
ProtocolError�EmptyPoolError�HeaderParsingError�HostChangedError�LocationValueError�
MaxRetryError�
ProxyError�ReadTimeoutError�SSLError�TimeoutError�InsecureRequestWarning�NewConnectionError)�CertificateError)�six)�queue)�port_by_scheme�DummyConnection�HTTPConnection�HTTPSConnection�VerifiedHTTPSConnection�
HTTPException�BaseSSLError)�RequestMethods)�HTTPResponse)�is_connection_dropped)�set_file_position)�assert_header_parsing)�Retry)�Timeout)�get_host�Urlc@sDeZdZdZdZejZd
dd�Zdd�Z	dd�Z
d	d
�Zdd�ZdS)�ConnectionPoolzz
    Base class for all connection pools, such as
    :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
    NcCs.|std��t|�j�|_|j�|_||_dS)NzNo host specified.)r�
_ipv6_host�lower�host�_proxy_host�port)�selfr)r+�r-�$/usr/lib/python3.6/connectionpool.py�__init__Cs

zConnectionPool.__init__cCsdt|�j|j|jfS)Nz%s(host=%r, port=%r))�type�__name__r)r+)r,r-r-r.�__str__Ks
zConnectionPool.__str__cCs|S)Nr-)r,r-r-r.�	__enter__OszConnectionPool.__enter__cCs|j�dS)NF)�close)r,�exc_typeZexc_valZexc_tbr-r-r.�__exit__RszConnectionPool.__exit__cCsdS)zD
        Close all pooled connections and disable the pool.
        Nr-)r,r-r-r.r4WszConnectionPool.close)N)
r1�
__module__�__qualname__�__doc__�schemerZ	LifoQueue�QueueClsr/r2r3r6r4r-r-r-r.r&:s
r&c
@s�eZdZdZdZeZeZdde	j
ddddddf	dd�Zdd	�Zd!d
d�Z
dd
�Zdd�Zdd�Zdd�Zdd�Zedfdd�Zdd�Zdd�Zdd�Zdddddeddddf
dd �ZdS)"�HTTPConnectionPoolaN	
    Thread-safe connection pool for one host.

    :param host:
        Host used for this HTTP Connection (e.g. "localhost"), passed into
        :class:`httplib.HTTPConnection`.

    :param port:
        Port used for this HTTP Connection (None is equivalent to 80), passed
        into :class:`httplib.HTTPConnection`.

    :param strict:
        Causes BadStatusLine to be raised if the status line can't be parsed
        as a valid HTTP/1.0 or 1.1 status line, passed into
        :class:`httplib.HTTPConnection`.

        .. note::
           Only works in Python 2. This parameter is ignored in Python 3.

    :param timeout:
        Socket timeout in seconds for each individual connection. This can
        be a float or integer, which sets the timeout for the HTTP request,
        or an instance of :class:`urllib3.util.Timeout` which gives you more
        fine-grained control over request timeouts. After the constructor has
        been parsed, this is always a `urllib3.util.Timeout` object.

    :param maxsize:
        Number of connections to save that can be reused. More than 1 is useful
        in multithreaded situations. If ``block`` is set to False, more
        connections will be created but they will not be saved once they've
        been used.

    :param block:
        If set to True, no more than ``maxsize`` connections will be used at
        a time. When no free connections are available, the call will block
        until a connection has been released. This is a useful side effect for
        particular multithreaded situations where one does not want to use more
        than maxsize connections per host to prevent flooding.

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.

    :param retries:
        Retry configuration to use by default with requests in this pool.

    :param _proxy:
        Parsed proxy URL, should not be used directly, instead, see
        :class:`urllib3.connectionpool.ProxyManager`"

    :param _proxy_headers:
        A dictionary with proxy headers, should not be used directly,
        instead, see :class:`urllib3.connectionpool.ProxyManager`"

    :param \**conn_kw:
        Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
        :class:`urllib3.connection.HTTPSConnection` instances.
    �httpNFrc
Ks�tj|||�tj||�||_t|t�s4tj|�}|dkrBtj}||_	||_
|j|�|_||_
|	|_|
pli|_xt|�D]}|jjd�qzWd|_d|_||_|jr�|jjdg�dS)NrZsocket_options)r&r/r�strict�
isinstancer#�
from_floatr"ZDEFAULTr�retriesr;�pool�block�proxy�
proxy_headers�xrange�put�num_connections�num_requests�conn_kw�
setdefault)
r,r)r+r>r�maxsizerC�headersrA�_proxy�_proxy_headersrJ�_r-r-r.r/�s(


zHTTPConnectionPool.__init__cCsJ|jd7_tjd|j|j�|jf|j|j|jj|jd�|j	��}|S)z9
        Return a fresh :class:`HTTPConnection`.
        rz%Starting new HTTP connection (%d): %s)r)r+rr>)
rH�log�debugr)�
ConnectionClsr+r�connect_timeoutr>rJ)r,�connr-r-r.�	_new_conn�szHTTPConnectionPool._new_conncCs�d}y|jj|j|d�}WnBtk
r8t|d��Yn&tjk
r\|jrXt|d��YnX|r�t|�r�t	j
d|j�|j�t
|dd�dkr�d}|p�|j�S)	a�
        Get a connection. Will return a pooled connection if one is available.

        If no connections are available and :prop:`.block` is ``False``, then a
        fresh connection is returned.

        :param timeout:
            Seconds to wait before giving up and raising
            :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
            :prop:`.block` is ``True``.
        N)rCrzPool is closed.z>Pool reached maximum size and no more connections are allowed.z Resetting dropped connection: %sZ	auto_openrr)rB�getrC�AttributeErrorrr�EmptyrrrQrRr)r4�getattrrV)r,rrUr-r-r.�	_get_conn�s zHTTPConnectionPool._get_conncCs\y|jj|dd�dStk
r(Yn$tjk
rJtjd|j�YnX|rX|j�dS)a�
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        F)rCNz2Connection pool is full, discarding connection: %s)	rBrGrXrZFullrQ�warningr)r4)r,rUr-r-r.�	_put_conn�szHTTPConnectionPool._put_conncCsdS)zU
        Called right before a request is made, after the socket is created.
        Nr-)r,rUr-r-r.�_validate_connsz!HTTPConnectionPool._validate_conncCsdS)Nr-)r,rUr-r-r.�_prepare_proxy!sz!HTTPConnectionPool._prepare_proxycCs2|tkr|jj�St|t�r$|j�Stj|�SdS)z< Helper that always returns a :class:`urllib3.util.Timeout` N)�_DefaultrZcloner?r#r@)r,rr-r-r.�_get_timeout%s


zHTTPConnectionPool._get_timeoutcCsjt|t�rt||d|��t|d�r>|jtkr>t||d|��dt|�ksVdt|�krft||d|��dS)zAIs the error actually a timeout? Will raise a ReadTimeout or passz!Read timed out. (read timeout=%s)�errnoz	timed outzdid not complete (read)N)r?�
SocketTimeoutr�hasattrrb�_blocking_errnos�str)r,�err�url�
timeout_valuer-r-r.�_raise_timeout1s
z!HTTPConnectionPool._raise_timeoutc
:Ks|jd7_|j|�}|j�|j|_y|j|�Wn:ttfk
rp}z|j|||jd��WYdd}~XnX|r�|j	||f|�n|j
||f|�|j}	t|dd�r�|	dkr�t
||d|	��|	tjkr�|jjtj��n|jj|	�yjy|jdd�}
WnTtk
�rPy|j�}
Wn0tk
�rJ}ztj|d�WYdd}~XnXYnXWn<tttfk
�r�}z|j|||	d��WYdd}~XnXt|d	d
�}tjd|j|j|j||||
j|
j �	yt!|
j"�Wn@t#tfk
�r}ztj$d|j%|�|dd
�WYdd}~XnX|
S)a
        Perform a request on a given urllib connection object taken from our
        pool.

        :param conn:
            a connection from one of our connection pools

        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        r)rgrhriN�sockrz!Read timed out. (read timeout=%s)T)�	bufferingZ
_http_vsn_strzHTTP/?z%s://%s:%s "%s %s %s" %s %sz$Failed to parse headers (url=%s): %s)�exc_info)&rIraZ
start_connectrTrr^rcrrjZrequest_chunked�request�read_timeoutrZrr#�DEFAULT_TIMEOUTrkZ
settimeout�socketZgetdefaulttimeoutZgetresponse�	TypeError�	ExceptionrZ
raise_from�SocketErrorrQrRr:r)r+�statusZlengthr!�msgr	r\�
_absolute_url)
r,rU�methodrhr�chunkedZhttplib_request_kw�timeout_obj�ero�httplib_responseZhttp_versionZhper-r-r.�
_make_requestBsT

(
$z HTTPConnectionPool._make_requestcCst|j|j|j|d�jS)N)r:r)r+�path)r%r:r)r+rh)r,r~r-r-r.rw�sz HTTPConnectionPool._absolute_urlcCsL|jd}|_y"x|jdd�}|r|j�qWWntjk
rFYnXdS)zD
        Close all pooled connections and disable the pool.
        NF)rC)rBrWr4rrY)r,Zold_poolrUr-r-r.r4�szHTTPConnectionPool.closecCst|jd�rdSt|�\}}}t|�j�}|jr@|r@tj|�}n|jrZ|tj|�krZd}|||f|j|j|jfkS)zj
        Check if the given ``url`` is a member of the same host as this
        connection pool.
        �/TN)	�
startswithr$r'r(r+rrWr:r))r,rhr:r)r+r-r-r.�is_same_host�s
zHTTPConnectionPool.is_same_hostTc
.Ks�|dkr|j}t|t�s*tj|||jd�}|
dkr>|
jdd�}
|rZ|j|�rZt|||��d}|
}|jdkr�|j	�}|j
|j�d}d}t||�}�zry�|j
|�}|j|	d�}|j|_|jdk	o�t|dd�}|r�|j|�|j|||||||d	�}|
�s�|nd}||
d
<|jj|f|||d�|
��}d}Wn�tjk
�rNt|d��Yn�ttttttt fk
�r}z�d}t|tt f��r�t|�}n>t|tt!f��r�|j�r�t"d
|�}nt|ttf��r�td|�}|j#||||t$j%�dd�}|j&�|}WYdd}~XnXWd|�s |�o|j'�}d}|�r0|j(|�X|�spt)j*d|||�|j+|||||||f||	|
|d�|
��Sdd�}|�o�|j,�}|�r$|j-dk�r�d}y|j#||||d�}Wn(t.k
�r�|j/�r�||��|SX||�|j0|�t)j1d||�|j+||||f|||||	|
|d�|
��St2|j3d��}|j4||j-|��r�y|j#||||d�}Wn(t.k
�r�|j5�r~||��|SX||�|j&|�t)j1d|�|j+||||f|||||	|
|d�|
��S|S)a�
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.

        .. note::

           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.

        .. note::

           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.

        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)

        :param body:
            Data to send in the request body (useful for creating
            POST requests, see HTTPConnectionPool.post_url for
            more convenience).

        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.

        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.

            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.

            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.

        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.

        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.

        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When False, you can
            use the pool on an HTTP proxy and request foreign hosts.

        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.

        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.

        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.

        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.

        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.

        :param \**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        N)�redirect�defaultZpreload_contentTr=F)rrk)r�bodyrMryZrequest_method)rB�
connectionrAz"No pool connections are available.zCannot connect to proxy.zConnection aborted.�)r�_poolZ_stacktracez1Retrying (%r) after connection broken by '%r': %s)r�pool_timeout�release_conn�body_poscSs@y|j�Wn.ttttttfk
r:}zWYdd}~XnXdS)N)�readrrrtrrr)�responser{r-r-r.�drain_and_release_conn�s

z:HTTPConnectionPool.urlopen.<locals>.drain_and_release_conni/ZGET)r�r�zRedirecting %s -> %s)rAr��assert_same_hostrr�r�r�zRetry-Afterz	Retry: %s)6rMr?r"Zfrom_intrArWr�r
r:�copy�updaterEr rar[rTrrDrZr_r}�ResponseClsZfrom_httplibrrYrrrrtrrrrrr
Z	increment�sysrmZsleepr4r]rQr\�urlopenZget_redirect_locationrurZraise_on_redirectZsleep_for_retryrR�boolZ	getheaderZis_retryZraise_on_status)r,rxrhr�rMrAr�r�rr�r�ryr�Zresponse_kwrUZrelease_this_connrgZ
clean_exitrzZis_new_proxy_connr|Z
response_connr�r{r�Zredirect_locationZhas_retry_afterr-r-r.r��s�^















zHTTPConnectionPool.urlopen)N)r1r7r8r9r:rrSrr�r#rpr/rVr[r]r^r_rarjr`r}rwr4r�r�r-r-r-r.r<bs.:%
&Ur<csneZdZdZdZeZddejddddddddddddddfdd�Z	dd	�Z
d
d�Zdd
�Z�fdd�Z
�ZS)�HTTPSConnectionPoola�
    Same as :class:`.HTTPConnectionPool`, but HTTPS.

    When Python is compiled with the :mod:`ssl` module, then
    :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
    instead of :class:`.HTTPSConnection`.

    :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
    ``assert_hostname`` and ``host`` in this order to verify connections.
    If ``assert_hostname`` is False, no verification is done.

    The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
    ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
    available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
    the connection socket into an SSL socket.
    �httpsNFrcKsftj||||||||||	|
f|�|r2|
dkr2d}
||_||_|
|_||_||_||_||_||_	dS)NZ
CERT_REQUIRED)
r<r/�key_file�	cert_file�	cert_reqs�ca_certs�ca_cert_dir�ssl_version�assert_hostname�assert_fingerprint)r,r)r+r>rrLrCrMrArNrOr�r�r�r�r�r�r�r�rJr-r-r.r/�s	zHTTPSConnectionPool.__init__c	Cs<t|t�r8|j|j|j|j|j|j|j|j	d�|j
|_
|S)z�
        Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
        and establish the tunnel if proxy is used.
        )r�r�r�r�r�r�r�)r?rZset_certr�r�r�r�r�r�r�r�)r,rUr-r-r.�
_prepare_conns

z!HTTPSConnectionPool._prepare_conncCsfy
|j}Wntk
r$|j}YnXtjdkrH|jrH||j|j�n||j|j|j�|j�dS)z�
        Establish tunnel connection early, because otherwise httplib
        would improperly set Host: header to proxy's IP:port.
        r���N)r�r�r�)	�
set_tunnelrXZ_set_tunnelr��version_inforEr*r+�connect)r,rUr�r-r-r.r_ s
z"HTTPSConnectionPool._prepare_proxycCs�|jd7_tjd|j|j�|js2|jtkr:td��|j}|j}|jdk	r`|jj}|jj}|jf|||j	j
|jd�|j��}|j
|�S)zB
        Return a fresh :class:`httplib.HTTPSConnection`.
        rz&Starting new HTTPS connection (%d): %szCCan't connect to HTTPS URL because the SSL module is not available.N)r)r+rr>)rHrQrRr)rSrrr+rDrrTr>rJr�)r,Zactual_hostZactual_portrUr-r-r.rV2s

zHTTPSConnectionPool._new_conncs:tt|�j|�t|dd�s$|j�|js6tjdt�dS)zU
        Called right before a request is made, after the socket is created.
        rkNz�Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings)	�superr�r^rZr�Zis_verified�warnings�warnr)r,rU)�	__class__r-r.r^Jsz"HTTPSConnectionPool._validate_conn)r1r7r8r9r:rrSr#rpr/r�r_rVr^�
__classcell__r-r-)r�r.r��sr�cKsRt|�\}}}|ptj|d�}|dkr:t|fd|i|��St|fd|i|��SdS)a�
    Given a url, return an :class:`.ConnectionPool` instance of its host.

    This is a shortcut for not having to parse out the scheme, host, and port
    of the url before creating an :class:`.ConnectionPool` instance.

    :param url:
        Absolute URL string that must include the scheme. Port is optional.

    :param \**kw:
        Passes additional parameters to the constructor of the appropriate
        :class:`.ConnectionPool`. Useful for specifying things like
        timeout, maxsize, headers, etc.

    Example::

        >>> conn = connection_from_url('http://google.com/')
        >>> r = conn.request('GET', '/')
    �Pr�r+N)r$rrWr�r<)rh�kwr:r)r+r-r-r.�connection_from_url]s
r�cCs*|jd�r&|jd�r&|jdd�jd�}|S)z'
    Process IPv6 address literals
    �[�]z%25�%z[])r��endswith�replace�strip)r)r-r-r.r'ysr')KZ
__future__rrbZloggingr�r�rqrrtrrc�
exceptionsrrrr	r
rrr
rrrrrZpackages.ssl_match_hostnamerZpackagesrZpackages.six.movesrr�rrrrrrrrnrr�rZutil.connectionrZutil.requestr Z
util.responser!Z
util.retryr"Zutil.timeoutr#Zutil.urlr$r%ZPY2ZQueueZ_unused_module_QueueZmovesrFZ	getLoggerr1rQ�objectr`r&�setZEAGAINZEWOULDBLOCKrer<r�r�r'r-r-r-r.�<module>sF<$
%|_vendor/urllib3/__pycache__/response.cpython-36.pyc000064400000037433151733136330016267 0ustar003

�PfwY�@sddlmZddlmZddlZddlZddlZddlmZ	ddlm
Zddlm
Z
ddlmZmZmZmZmZmZmZdd	lmZmZmZdd
lmZddlmZm Z ddl!m"Z"m#Z#ej$e%�Z&Gd
d�de'�Z(Gdd�de'�Z)dd�Z*Gdd�dej+�Z,dS)�)�absolute_import)�contextmanagerN)�timeout)�error�)�HTTPHeaderDict)�BodyNotHttplibCompatible�
ProtocolError�DecodeError�ReadTimeoutError�ResponseNotChunked�IncompleteRead�
InvalidHeader)�string_types�binary_type�PY3)�http_client)�
HTTPException�BaseSSLError)�is_fp_closed�is_response_to_headc@s$eZdZdd�Zdd�Zdd�ZdS)�DeflateDecodercCsd|_t�|_tj�|_dS)NT)�
_first_tryr�_data�zlib�
decompressobj�_obj)�self�r�/usr/lib/python3.6/response.py�__init__szDeflateDecoder.__init__cCst|j|�S)N)�getattrr)r�namerrr�__getattr__szDeflateDecoder.__getattr__cCs�|s|S|js|jj|�S|j|7_y |jj|�}|rFd|_d|_|Stjk
r�d|_tjtj�|_z|j|j�Sd|_XYnXdS)NF)rr�
decompressrrrr�	MAX_WBITS)r�dataZdecompressedrrrr$ s"zDeflateDecoder.decompressN)�__name__�
__module__�__qualname__r r#r$rrrrrsrc@s$eZdZdd�Zdd�Zdd�ZdS)�GzipDecodercCstjdtj�|_dS)N�)rrr%r)rrrrr 9szGzipDecoder.__init__cCst|j|�S)N)r!r)rr"rrrr#<szGzipDecoder.__getattr__cCs|s|S|jj|�S)N)rr$)rr&rrrr$?szGzipDecoder.decompressN)r'r(r)r r#r$rrrrr*7sr*cCs|dkrt�St�S)N�gzip)r*r)�moderrr�_get_decoderEsr.c@seZdZdZddgZdddddgZdFdd�Zdd�Zdd�Ze	dd��Z
e	dd��Zdd�Zdd�Z
dd�Zdd�Zd d!�Zed"d#��ZdGd$d%�ZdId(d)�Zed*d+��Zd,d-�ZdJd.d/�Zd0d1�Zd2d3�Ze	d4d5��Zd6d7�Zd8d9�Zd:d;�Zd<d=�Zd>d?�Z d@dA�Z!dBdC�Z"dKdDdE�Z#d
S)L�HTTPResponsea	
    HTTP Response container.

    Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
    loaded and decoded on-demand when the ``data`` property is accessed.  This
    class is also compatible with the Python standard library's :mod:`io`
    module, and can hence be treated as a readable object in the context of that
    framework.

    Extra parameters for behaviour not present in httplib.HTTPResponse:

    :param preload_content:
        If True, the response's body will be preloaded during construction.

    :param decode_content:
        If True, attempts to decode specific content-encoding's based on headers
        (like 'gzip' and 'deflate') will be skipped and raw data will be used
        instead.

    :param original_response:
        When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
        object, it's convenient to include the original for debug purposes. It's
        otherwise unused.

    :param retries:
        The retries contains the last :class:`~urllib3.util.retry.Retry` that
        was used during the request.

    :param enforce_content_length:
        Enforce content length checking. Body returned by server must match
        value of Content-Length header, if present. Otherwise, raise error.
    r,Zdeflatei-i.i/i3i4�NrTFcCst|t�r||_n
t|�|_||_||_||_||_||_||_|
|_	d|_
d|_d|_|	|_
d|_|r|t|ttf�r|||_|
|_||_t|d�r�||_d|_d|_|jjdd�j�}dd�|jd�D�}d	|kr�d
|_|j|�|_|r�|jr�|j|d�|_dS)Nr�readFztransfer-encodingr0css|]}|j�VqdS)N)�strip)�.0�encrrr�	<genexpr>�sz(HTTPResponse.__init__.<locals>.<genexpr>�,�chunkedT)�decode_content)�
isinstancer�headers�status�version�reason�strictr8�retries�enforce_content_length�_decoder�_body�_fp�_original_response�_fp_bytes_read�
basestringr�_pool�_connection�hasattrr7�
chunk_left�get�lower�split�_init_length�length_remainingr1)r�bodyr:r;r<r=r>Zpreload_contentr8�original_responseZpool�
connectionr?r@�request_methodZtr_encZ	encodingsrrrr qs<


zHTTPResponse.__init__cCs|j|jkr|jjd�SdS)a
        Should we redirect and where to?

        :returns: Truthy redirect location string if we got a redirect status
            code and valid location. ``None`` if redirect status and no
            location. ``False`` if not a redirect status code.
        �locationF)r;�REDIRECT_STATUSESr:rK)rrrr�get_redirect_location�sz"HTTPResponse.get_redirect_locationcCs,|js|jrdS|jj|j�d|_dS)N)rGrHZ	_put_conn)rrrr�release_conn�szHTTPResponse.release_conncCs"|jr|jS|jr|jdd�SdS)NT)�
cache_content)rBrCr1)rrrrr&�szHTTPResponse.datacCs|jS)N)rH)rrrrrR�szHTTPResponse.connectioncCs|jS)z�
        Obtain the number of bytes pulled over the wire so far. May differ from
        the amount of content returned by :meth:``HTTPResponse.read`` if bytes
        are encoded on the wire (e.g, compressed).
        )rE)rrrr�tell�szHTTPResponse.tellcCs�|jjd�}|dk	r(|jr(tjd�dS|dk	r�y<tdd�|jd�D��}t|�dkrbtd|��|j	�}Wnt
k
r�d}YnX|d	kr�d}yt|j�}Wnt
k
r�d	}YnX|dks�d|ko�d
kns�|dkr�d	}|S)zM
        Set initial length value for Response content if available.
        zcontent-lengthNz�Received response with both Content-Length and Transfer-Encoding set. This is expressly forbidden by RFC 7230 sec 3.3.2. Ignoring Content-Length and attempting to process response as Transfer-Encoding: chunked.cSsg|]}t|��qSr)�int)r3�valrrr�
<listcomp>�sz-HTTPResponse._init_length.<locals>.<listcomp>r6rz8Content-Length contained multiple unmatching values (%s)r���0�d��ZHEAD)r]r^)
r:rKr7�logZwarning�setrM�lenr�pop�
ValueErrorrZr;)rrSZlengthZlengthsr;rrrrN�s,


(zHTTPResponse._init_lengthcCs4|jjdd�j�}|jdkr0||jkr0t|�|_dS)z=
        Set-up the _decoder attribute if necessary.
        zcontent-encodingr0N)r:rKrLrA�CONTENT_DECODERSr.)r�content_encodingrrr�
_init_decoder�szHTTPResponse._init_decodercCs|y|r|jr|jj|�}WnHttjfk
rb}z&|jjdd�j�}td||��WYdd}~XnX|rx|rx||j	�7}|S)zN
        Decode the data passed in and potentially flush the decoder.
        zcontent-encodingr0zEReceived response with content-encoding: %s, but failed to decode it.N)
rAr$�IOErrorrrr:rKrLr
�_flush_decoder)rr&r8�
flush_decoder�ergrrr�_decodes
zHTTPResponse._decodecCs$|jr |jjd�}||jj�SdS)zk
        Flushes the decoder. Should only be called if the decoder is actually
        being used.
        �)rAr$�flush)rZbufrrrrjszHTTPResponse._flush_decoderccs�d}z�y
dVWn�tk
r2t|jdd��Ynptk
rn}z"dt|�krP�t|jdd��WYdd}~Xn4ttfk
r�}ztd||��WYdd}~XnXd}Wd|s�|jr�|jj	�|j
r�|j
j	�|jr�|jj�r�|j�XdS)z�
        Catch low-level python exceptions, instead re-raising urllib3
        variants, so that low-level exceptions are not leaked in the
        high-level api.

        On exit, release the connection back to the pool.
        FNzRead timed out.zread operation timed outzConnection broken: %rT)
�
SocketTimeoutrrGr�strr�SocketErrorr	rD�closerH�isclosedrW)rZ
clean_exitrlrrr�_error_catcher!s(	
 

zHTTPResponse._error_catchercCs�|j�|dkr|j}|jdkr$dSd}d}|j��h|dkrN|jj�}d}nJd}|jj|�}|dkr�|r�|jj�d}|jr�|jdkr�t|j	|j��WdQRX|r�|j	t
|�7_	|jdk	r�|jt
|�8_|j|||�}|r�||_|S)aP
        Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
        parameters: ``decode_content`` and ``cache_content``.

        :param amt:
            How much of the content to read. If specified, caching is skipped
            because it doesn't make sense to cache partial content as the full
            response.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.

        :param cache_content:
            If True, will save the returned data such that the same result is
            returned despite of the state of the underlying file object. This
            is useful if you want the ``.data`` property to continue working
            after having ``.read()`` the file object. (Overridden if ``amt`` is
            set.)
        NFTr)rN)
rhr8rCrur1rsr@rOr
rErcrmrB)r�amtr8rXrkr&rrrr1Zs4




zHTTPResponse.read�r+ccsZ|jr.|j�r.xF|j||d�D]
}|VqWn(x&t|j�sT|j||d�}|r0|Vq0WdS)a_
        A generator wrapper for the read() method. A call will block until
        ``amt`` bytes have been read from the connection or until the
        connection is closed.

        :param amt:
            How much of the content to read. The generator will return up to
            much data per iteration, but may return less. This is particularly
            likely when using compressed data. However, the empty string will
            never be returned.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
        )r8)rvr8N)r7�supports_chunked_reads�read_chunkedrrCr1)rrvr8�liner&rrr�stream�szHTTPResponse.streamc
Ks`|j}t|t�s,tr"t|j��}n
tj|�}t|dd�}|f|||j|j|j	||d�|��}|S)a
        Given an :class:`httplib.HTTPResponse` instance ``r``, return a
        corresponding :class:`urllib3.response.HTTPResponse` object.

        Remaining parameters are passed to the HTTPResponse constructor, along
        with ``original_response=r``.
        r>r)rPr:r;r<r=r>rQ)
�msgr9rr�items�from_httplibr!r;r<r=)ZResponseCls�rZresponse_kwr:r>Zresprrrr~�s	

zHTTPResponse.from_httplibcCs|jS)N)r:)rrrr�
getheaders�szHTTPResponse.getheaderscCs|jj||�S)N)r:rK)rr"�defaultrrr�	getheader�szHTTPResponse.getheadercCs|jS)N)r:)rrrr�info�szHTTPResponse.infocCs$|js|jj�|jr |jj�dS)N)�closedrCrsrH)rrrrrs�s
zHTTPResponse.closecCs@|jdkrdSt|jd�r$|jj�St|jd�r8|jjSdSdS)NTrtr�)rCrIrtr�)rrrrr��s

zHTTPResponse.closedcCs6|jdkrtd��nt|jd�r*|jj�Std��dS)Nz-HTTPResponse has no file to get a fileno from�filenozOThe file-like object this HTTPResponse is wrapped around has no file descriptor)rCrirIr�)rrrrr��s



zHTTPResponse.filenocCs$|jdk	r t|jd�r |jj�SdS)Nro)rCrIro)rrrrro�szHTTPResponse.flushcCsdS)NTr)rrrr�readableszHTTPResponse.readablecCs:|jt|��}t|�dkrdS||dt|��<t|�SdS)Nr)r1rc)r�bZtemprrr�readintos
zHTTPResponse.readintocCst|jd�S)z�
        Checks if the underlying file-like object looks like a
        httplib.HTTPResponse object. We do this by testing for the fp
        attribute. If it is present we assume it returns raw chunks as
        processed by read_chunked().
        �fp)rIrC)rrrrrxsz#HTTPResponse.supports_chunked_readscCsf|jdk	rdS|jjj�}|jdd�d}yt|d�|_Wn&tk
r`|j�tj	|��YnXdS)N�;rrr+)
rJrCr��readlinerMrZrers�httplibr
)rrzrrr�_update_chunk_lengths
z!HTTPResponse._update_chunk_lengthcCs�d}|dkr2|jj|j�}|}|jjd�d|_nv||jkrZ|jj|�}|j||_|}nN||jkr�|jj|�}|jjd�d|_|}n |jj|j�}|jjd�d|_|S)Nrw)rCZ
_safe_readrJ)rrvZreturned_chunk�chunk�valuerrr�
_handle_chunk%s&

zHTTPResponse._handle_chunkccs�|j�|jstd��|j�s&td��|jrDt|j�rD|jj�dS|j���x<|j	�|j
dkrdP|j|�}|j||dd�}|rP|VqPW|r�|j
�}|r�|Vx |jjj�}|s�P|dkr�Pq�W|jr�|jj�WdQRXdS)z�
        Similar to :meth:`HTTPResponse.read`, but with an additional
        parameter: ``decode_content``.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
        zHResponse is not chunked. Header 'transfer-encoding: chunked' is missing.zgBody should be httplib.HTTPResponse like. It should have have an fp attribute which returns raw chunks.NrF)r8rks
)rhr7rrxrrDrrsrur�rJr�rmrjrCr�r�)rrvr8r�Zdecodedrzrrrry;s@	




zHTTPResponse.read_chunked)r0NrrNrTTNNNNFN)NNF�)r�N)N)NN)$r'r(r)�__doc__rfrUr rVrW�propertyr&rRrYrNrhrmrjrrur1r{�classmethodr~r�r�r�rsr�r�ror�r�rxr�r�ryrrrrr/LsB 
-
	0
9
E

			r/)-Z
__future__r�
contextlibrr�ioZloggingZsocketrrprrr�_collectionsr�
exceptionsrr	r
rrr
rZpackages.sixrrFrrZpackages.six.movesrr�rRrrZ
util.responserrZ	getLoggerr'ra�objectrr*r.�IOBaser/rrrr�<module>s"$
!_vendor/urllib3/__pycache__/request.cpython-36.pyc000064400000012556151733136330016120 0ustar003

�Pf:�@s>ddlmZddlmZddlmZdgZGdd�de�ZdS)�)�absolute_import�)�encode_multipart_formdata)�	urlencode�RequestMethodsc@sReZdZdZeddddg�Zddd�Zdd
d�Zddd
�Zddd�Z	ddd�Z
dS)ra�
    Convenience mixin for classes who implement a :meth:`urlopen` method, such
    as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
    :class:`~urllib3.poolmanager.PoolManager`.

    Provides behavior for making common types of HTTP request methods and
    decides which type of request field encoding to use.

    Specifically,

    :meth:`.request_encode_url` is for sending requests whose fields are
    encoded in the URL (such as GET, HEAD, DELETE).

    :meth:`.request_encode_body` is for sending requests whose fields are
    encoded in the *body* of the request using multipart or www-form-urlencoded
    (such as for POST, PUT, PATCH).

    :meth:`.request` is for making any kind of request, it will look up the
    appropriate encoding format and use one of the above two methods to make
    the request.

    Initializer parameters:

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.
    ZDELETEZGETZHEADZOPTIONSNcCs|pi|_dS)N)�headers)�selfr�r	�/usr/lib/python3.6/request.py�__init__)szRequestMethods.__init__TcKstd��dS)NzMClasses extending RequestMethods must implement their own ``urlopen`` method.)�NotImplemented)r�method�url�bodyr�encode_multipart�multipart_boundary�kwr	r	r
�urlopen,szRequestMethods.urlopencKsJ|j�}||jkr,|j||f||d�|��S|j||f||d�|��SdS)a�
        Make a request using :meth:`urlopen` with the appropriate encoding of
        ``fields`` based on the ``method`` used.

        This is a convenience method that requires the least amount of manual
        effort. It can be used in most situations, while still having the
        option to drop down to more specific methods when necessary, such as
        :meth:`request_encode_url`, :meth:`request_encode_body`,
        or even the lowest level :meth:`urlopen`.
        )�fieldsrN)�upper�_encode_url_methods�request_encode_url�request_encode_body)rr
rrr�
urlopen_kwr	r	r
�request2s
zRequestMethods.requestcKsD|dkr|j}d|i}|j|�|r4|dt|�7}|j||f|�S)z�
        Make a request using :meth:`urlopen` with the ``fields`` encoded in
        the url. This is useful for request methods like GET, HEAD, DELETE, etc.
        Nr�?)r�updaterr)rr
rrrr�extra_kwr	r	r
rHs
z!RequestMethods.request_encode_urlcKs�|dkr|j}dii}|rbd|kr*td��|r@t||d�\}	}
nt|�d}	}
|	|d<d|
i|d<|dj|�|j|�|j||f|�S)a�
        Make a request using :meth:`urlopen` with the ``fields`` encoded in
        the body. This is useful for request methods like POST, PUT, PATCH, etc.

        When ``encode_multipart=True`` (default), then
        :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
        the payload with the appropriate content type. Otherwise
        :meth:`urllib.urlencode` is used with the
        'application/x-www-form-urlencoded' content type.

        Multipart encoding must be used when posting files, and it's reasonably
        safe to use it in other times too. However, it may break request
        signing, such as with OAuth.

        Supports an optional ``fields`` parameter of key/value strings AND
        key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
        the MIME type is optional. For example::

            fields = {
                'foo': 'bar',
                'fakefile': ('foofile.txt', 'contents of foofile'),
                'realfile': ('barfile.txt', open('realfile').read()),
                'typedfile': ('bazfile.bin', open('bazfile').read(),
                              'image/jpeg'),
                'nonamefile': 'contents of nonamefile field',
            }

        When uploading a file, providing a filename (the first parameter of the
        tuple) is optional but recommended to best mimick behavior of browsers.

        Note that if ``headers`` are supplied, the 'Content-Type' header will
        be overwritten because it depends on the dynamic random boundary string
        which is used to compose the body of the request. The random boundary
        string can be explicitly set with the ``multipart_boundary`` parameter.
        NrrzFrequest got values for both 'fields' and 'body', can only specify one.)�boundaryz!application/x-www-form-urlencodedzContent-Type)r�	TypeErrorrrrr)rr
rrrrrrrrZcontent_typer	r	r
rYs&
z"RequestMethods.request_encode_body)N)NNTN)NN)NN)NNTN)�__name__�
__module__�__qualname__�__doc__�setrrrrrrr	r	r	r
r
s



N)	Z
__future__rZfilepostrZpackages.six.moves.urllib.parser�__all__�objectrr	r	r	r
�<module>s_vendor/urllib3/__pycache__/poolmanager.cpython-36.pyc000064400000031074151733136330016730 0ustar003

�Pf�A�@sddlmZddlZddlZddlZddlmZddlmZm	Z	ddlm
Z
ddlmZm
Z
mZddlmZdd	lmZdd
lmZddlmZdd
dgZeje�Zd4Zd5Zejd-e�Zd.d/�Zej ee�ej ee�d0�Z!ee	d0�Z"Gd1d�de�Z#Gd2d
�d
e#�Z$d3d�Z%dS)6�)�absolute_importN�)�RecentlyUsedContainer)�HTTPConnectionPool�HTTPSConnectionPool)�port_by_scheme)�LocationValueError�
MaxRetryError�ProxySchemeUnknown)�urljoin)�RequestMethods)�	parse_url)�Retry�PoolManager�ProxyManager�proxy_from_url�key_file�	cert_file�	cert_reqs�ca_certs�ssl_version�ca_cert_dir�ssl_context�
key_scheme�key_host�key_port�key_timeout�key_retries�
key_strict�	key_block�key_source_address�key_key_file�
key_cert_file�
key_cert_reqs�key_ca_certs�key_ssl_version�key_ca_cert_dir�key_ssl_context�key_maxsize�key_headers�
key__proxy�key__proxy_headers�key_socket_options�key__socks_options�key_assert_hostname�key_assert_fingerprint�PoolKeycCs�|j�}|dj�|d<|dj�|d<x4d	D],}||kr.||dk	r.t||j��||<q.W|jd�}|dk	r|t|�|d<x&t|j��D]}|j|�|d|<q�Wx|j	D]}||kr�d||<q�W|f|�S)
a�
    Create a pool key out of a request context dictionary.

    According to RFC 3986, both the scheme and host are case-insensitive.
    Therefore, this function normalizes both before constructing the pool
    key for an HTTPS request. If you wish to change this behaviour, provide
    alternate callables to ``key_fn_by_scheme``.

    :param key_class:
        The class to use when constructing the key. This should be a namedtuple
        with the ``scheme`` and ``host`` keys at a minimum.
    :type  key_class: namedtuple
    :param request_context:
        A dictionary-like object that contain the context for a request.
    :type  request_context: dict

    :return: A namedtuple that can be used as a connection pool key.
    :rtype:  PoolKey
    �scheme�host�headers�_proxy_headers�_socks_optionsNZsocket_optionsZkey_)r3r4r5)
�copy�lower�	frozenset�items�get�tuple�list�keys�pop�_fields)Z	key_class�request_context�context�keyZsocket_optsZfield�rC�!/usr/lib/python3.6/poolmanager.py�_default_key_normalizer9s

rE)�http�httpsc@sxeZdZdZdZddd�Zdd�Zdd	�Zdd
d�Zdd
�Z	ddd�Z
dd�Zddd�Zd dd�Z
dd�Zd!dd�ZdS)"ra$
    Allows for arbitrary requests while transparently keeping track of
    necessary connection pools for you.

    :param num_pools:
        Number of connection pools to cache before discarding the least
        recently used pool.

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.

    :param \**connection_pool_kw:
        Additional parameters are used to create fresh
        :class:`urllib3.connectionpool.ConnectionPool` instances.

    Example::

        >>> manager = PoolManager(num_pools=2)
        >>> r = manager.request('GET', 'http://google.com/')
        >>> r = manager.request('GET', 'http://google.com/mail')
        >>> r = manager.request('GET', 'http://yahoo.com/')
        >>> len(manager.pools)
        2

    N�
cKs8tj||�||_t|dd�d�|_t|_tj�|_dS)NcSs|j�S)N)�close)�prCrCrD�<lambda>�sz&PoolManager.__init__.<locals>.<lambda>)Zdispose_func)r�__init__�connection_pool_kwr�pools�pool_classes_by_scheme�key_fn_by_schemer6)�self�	num_poolsr3rMrCrCrDrL�szPoolManager.__init__cCs|S)NrC)rQrCrCrD�	__enter__�szPoolManager.__enter__cCs|j�dS)NF)�clear)rQ�exc_typeZexc_valZexc_tbrCrCrD�__exit__�szPoolManager.__exit__cCsf|j|}|dkr|jj�}xdD]}|j|d�q"W|dkrXxtD]}|j|d�qDW|||f|�S)a�
        Create a new :class:`ConnectionPool` based on host, port, scheme, and
        any additional pool keyword arguments.

        If ``request_context`` is provided, it is provided as keyword arguments
        to the pool class used. This method is used to actually create the
        connection pools handed out by :meth:`connection_from_url` and
        companion methods. It is intended to be overridden for customization.
        Nr1r2�portrF)r1r2rW)rOrMr6r>�SSL_KEYWORDS)rQr1r2rWr@Zpool_clsrB�kwrCrCrD�	_new_pool�s




zPoolManager._new_poolcCs|jj�dS)z�
        Empty our store of pools and direct them all to close.

        This will not affect in-flight connections, but they will not be
        re-used after completion.
        N)rNrT)rQrCrCrDrT�szPoolManager.clearrFcCsT|std��|j|�}|pd|d<|s:tj|dj�d�}||d<||d<|j|�S)a�
        Get a :class:`ConnectionPool` based on the host, port, and scheme.

        If ``port`` isn't given, it will be derived from the ``scheme`` using
        ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
        provided, it is merged with the instance's ``connection_pool_kw``
        variable and used to create the new connection pool, if one is
        needed.
        zNo host specified.rFr1�PrWr2)r�_merge_pool_kwargsrr:r7�connection_from_context)rQr2rWr1�pool_kwargsr@rCrCrD�connection_from_host�s
z PoolManager.connection_from_hostcCs,|dj�}|j|}||�}|j||d�S)z�
        Get a :class:`ConnectionPool` based on the request context.

        ``request_context`` must at least contain the ``scheme`` key and its
        value must be a key in ``key_fn_by_scheme`` instance variable.
        r1)r@)r7rP�connection_from_pool_key)rQr@r1Zpool_key_constructor�pool_keyrCrCrDr]�s
z#PoolManager.connection_from_contextc
Cs`|jj�N|jj|�}|r|S|d}|d}|d}|j||||d�}||j|<WdQRX|S)z�
        Get a :class:`ConnectionPool` based on the provided pool key.

        ``pool_key`` should be a namedtuple that only contains immutable
        objects. At a minimum it must have the ``scheme``, ``host``, and
        ``port`` fields.
        r1r2rW)r@N)rN�lockr:rZ)rQrar@Zpoolr1r2rWrCrCrDr`�s
z$PoolManager.connection_from_pool_keycCs t|�}|j|j|j|j|d�S)a�
        Similar to :func:`urllib3.connectionpool.connection_from_url`.

        If ``pool_kwargs`` is not provided and a new pool needs to be
        constructed, ``self.connection_pool_kw`` is used to initialize
        the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
        is provided, it is used instead. Note that if a new pool does not
        need to be created for the request, the provided ``pool_kwargs`` are
        not used.
        )rWr1r^)r
r_r2rWr1)rQ�urlr^�urCrCrD�connection_from_url
szPoolManager.connection_from_urlcCsZ|jj�}|rVxF|j�D]:\}}|dkrJy
||=WqRtk
rFYqRXq|||<qW|S)a
        Merge a dictionary of override values for self.connection_pool_kw.

        This does not modify self.connection_pool_kw and returns a new dict.
        Any keys in the override dictionary with a value of ``None`` are
        removed from the merged dictionary.
        N)rMr6r9�KeyError)rQ�overrideZbase_pool_kwargsrB�valuerCrCrDr\s

zPoolManager._merge_pool_kwargsTcKsdt|�}|j|j|j|jd�}d|d<d|d<d|krD|jj�|d<|jdk	rj|jdkrj|j||f|�}n|j||j	f|�}|o�|j
�}|s�|St||�}|jdkr�d	}|j
d
�}	t|	t�s�tj|	|d�}	|	jo�|j|��r�x|	jD]}
|dj|
d�q�Wy|	j||||d�}	Wn tk
�r4|	j�r0�|SX|	|d
<||d<tjd
||�|j||f|�S)a]
        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
        with custom cross-host redirect logic and only sends the request-uri
        portion of the ``url``.

        The given ``url`` parameter must be absolute, such that an appropriate
        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
        )rWr1FZassert_same_host�redirectr3NrFi/ZGET�retries)ri)�responseZ_poolzRedirecting %s -> %s)r
r_r2rWr1r3r6�proxy�urlopenZrequest_uriZget_redirect_locationrZstatusr:�
isinstancerZfrom_intZremove_headers_on_redirectZis_same_hostr>Z	incrementr	Zraise_on_redirect�log�info)rQ�methodrcrirYrdZconnrkZredirect_locationrj�headerrCrCrDrm-s@	



zPoolManager.urlopen)rHN)N)NrFN)N)N)T)�__name__�
__module__�__qualname__�__doc__rlrLrSrVrZrTr_r]r`rer\rmrCrCrCrDrys

	


csHeZdZdZd�fdd�	Zd�fdd�	Zdd	d
�Zd�fdd
�	Z�ZS)raw
    Behaves just like :class:`PoolManager`, but sends all requests through
    the defined proxy, using the CONNECT method for HTTPS URLs.

    :param proxy_url:
        The URL of the proxy to be used.

    :param proxy_headers:
        A dictionary contaning headers that will be sent to the proxy. In case
        of HTTP they are being sent with each request, while in the
        HTTPS/CONNECT case they are sent only once. Could be used for proxy
        authentication.

    Example:
        >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
        >>> r1 = proxy.request('GET', 'http://google.com/')
        >>> r2 = proxy.request('GET', 'http://httpbin.org/')
        >>> len(proxy.pools)
        1
        >>> r3 = proxy.request('GET', 'https://httpbin.org/')
        >>> r4 = proxy.request('GET', 'https://twitter.com/')
        >>> len(proxy.pools)
        3

    rHNcs�t|t�rd|j|j|jf}t|�}|jsFtj|jd�}|j|d�}|jdkrZt	|j��||_
|pfi|_|j
|d<|j|d<tt
|�j||f|�dS)	Nz
%s://%s:%ir[)rWrFrG�_proxyr4)rFrG)rnrr1r2rWr
rr:�_replacer
rl�
proxy_headers�superrrL)rQ�	proxy_urlrRr3ryrMrlrW)�	__class__rCrDrL�s








zProxyManager.__init__rFcsD|dkr tt|�j||||d�Stt|�j|jj|jj|jj|d�S)NrG)r^)rzrr_rlr2rWr1)rQr2rWr1r^)r|rCrDr_�s


z!ProxyManager.connection_from_hostcCs0ddi}t|�j}|r||d<|r,|j|�|S)z�
        Sets headers needed by proxies: specifically, the Accept and Host
        headers. Only sets headers not provided by the user.
        ZAcceptz*/*ZHost)r
�netloc�update)rQrcr3Zheaders_r}rCrCrD�_set_proxy_headers�s

zProxyManager._set_proxy_headersTcsNt|�}|jdkr0|jd|j�}|j||�|d<tt|�j||fd|i|��S)z@Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute.rFr3ri)r
r1r:r3rrzrrm)rQrqrcrirYrdr3)r|rCrDrm�s

zProxyManager.urlopen)rHNN)NrFN)N)T)	rsrtrurvrLr_rrm�
__classcell__rCrC)r|rDris
cKstfd|i|��S)Nr{)r)rcrYrCrCrDr�s)rrrrrrr)rrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/)&Z
__future__r�collections�	functoolsZlogging�_collectionsrZconnectionpoolrrr�
exceptionsrr	r
Zpackages.six.moves.urllib.parserZrequestrZutil.urlr
Z
util.retryr�__all__Z	getLoggerrsrorXZ_key_fields�
namedtupler0rE�partialrPrOrrrrCrCrCrD�<module>s`

6
qW_vendor/urllib3/__pycache__/filepost.cpython-36.pyc000064400000005037151733136330016251 0ustar003

�Pf	�@s�ddlmZddlZddlmZddlmZddlmZddl	m
Z
ddlmZej
d	�d
Zdd�Zd
d�Zdd�Zddd�ZdS)�)�absolute_importN)�uuid4)�BytesIO�)�six)�b)�RequestFieldzutf-8�cCst�jS)zN
    Our embarrassingly-simple replacement for mimetools.choose_boundary.
    )r�hex�rr�/usr/lib/python3.6/filepost.py�choose_boundarysr
ccsNt|t�rtj|�}nt|�}x*|D]"}t|t�r:|Vq$tj|�Vq$WdS)z�
    Iterate over fields.

    Supports list of (k, v) tuples and dicts, and lists of
    :class:`~urllib3.fields.RequestField`.

    N)�
isinstance�dictr�	iteritems�iterrZfrom_tuples)�fields�i�fieldrrr�iter_field_objectss


rcCs,t|t�rdd�tj|�D�Sdd�|D�S)a-
    .. deprecated:: 1.6

    Iterate over fields.

    The addition of :class:`~urllib3.fields.RequestField` makes this function
    obsolete. Instead, use :func:`iter_field_objects`, which returns
    :class:`~urllib3.fields.RequestField` objects.

    Supports list of (k, v) tuples and dicts.
    css|]\}}||fVqdS)Nr)�.0�k�vrrr�	<genexpr>6sziter_fields.<locals>.<genexpr>css|]\}}||fVqdS)Nr)rrrrrrr8s)rrrr)rrrr�iter_fields)s
rcCs�t�}|dkrt�}x|t|�D]p}|jtd|��t|�j|j��|j}t|t	�r^t
|�}t|tj�rzt|�j|�n
|j|�|jd�qW|jtd|��t
d|�}|j
�|fS)aJ
    Encode a dictionary of ``fields`` using the multipart/form-data MIME format.

    :param fields:
        Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).

    :param boundary:
        If not specified, then a random boundary will be generated using
        :func:`mimetools.choose_boundary`.
    Nz--%s
s
z--%s--
z multipart/form-data; boundary=%s)rr
r�writer�writerZrender_headers�datar�int�strrZ	text_type�getvalue)r�boundaryZbodyrrZcontent_typerrr�encode_multipart_formdata;s 

r")N)Z
__future__r�codecsZuuidr�iorZpackagesrZpackages.sixrrr�lookuprr
rrr"rrrr�<module>s_vendor/urllib3/__pycache__/connectionpool.cpython-36.opt-1.pyc000064400000056152151733136330020420 0ustar003

�Pf��@s�ddlmZddlZddlZddlZddlZddlmZm	Z
ddlZddlmZm
Z
mZmZmZmZmZmZmZmZmZmZmZddlmZddlmZddlmZdd	lm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&dd
l'm(Z(ddl)m*Z*ddl+m,Z,dd
l-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6m7Z7ej8�r<ddl9Z:ej;j<Z<ej=e>�Z?e@�ZAGdd�de@�ZBeCejDejEg�ZFGdd�deBe(�ZGGdd�deG�ZHdd�ZIdd�ZJdS)�)�absolute_importN)�error�timeout�)
�ClosedPoolError�
ProtocolError�EmptyPoolError�HeaderParsingError�HostChangedError�LocationValueError�
MaxRetryError�
ProxyError�ReadTimeoutError�SSLError�TimeoutError�InsecureRequestWarning�NewConnectionError)�CertificateError)�six)�queue)�port_by_scheme�DummyConnection�HTTPConnection�HTTPSConnection�VerifiedHTTPSConnection�
HTTPException�BaseSSLError)�RequestMethods)�HTTPResponse)�is_connection_dropped)�set_file_position)�assert_header_parsing)�Retry)�Timeout)�get_host�Urlc@sDeZdZdZdZejZd
dd�Zdd�Z	dd�Z
d	d
�Zdd�ZdS)�ConnectionPoolzz
    Base class for all connection pools, such as
    :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
    NcCs.|std��t|�j�|_|j�|_||_dS)NzNo host specified.)r�
_ipv6_host�lower�host�_proxy_host�port)�selfr)r+�r-�$/usr/lib/python3.6/connectionpool.py�__init__Cs

zConnectionPool.__init__cCsdt|�j|j|jfS)Nz%s(host=%r, port=%r))�type�__name__r)r+)r,r-r-r.�__str__Ks
zConnectionPool.__str__cCs|S)Nr-)r,r-r-r.�	__enter__OszConnectionPool.__enter__cCs|j�dS)NF)�close)r,�exc_typeZexc_valZexc_tbr-r-r.�__exit__RszConnectionPool.__exit__cCsdS)zD
        Close all pooled connections and disable the pool.
        Nr-)r,r-r-r.r4WszConnectionPool.close)N)
r1�
__module__�__qualname__�__doc__�schemerZ	LifoQueue�QueueClsr/r2r3r6r4r-r-r-r.r&:s
r&c
@s�eZdZdZdZeZeZdde	j
ddddddf	dd�Zdd	�Zd!d
d�Z
dd
�Zdd�Zdd�Zdd�Zdd�Zedfdd�Zdd�Zdd�Zdd�Zdddddeddddf
dd �ZdS)"�HTTPConnectionPoolaN	
    Thread-safe connection pool for one host.

    :param host:
        Host used for this HTTP Connection (e.g. "localhost"), passed into
        :class:`httplib.HTTPConnection`.

    :param port:
        Port used for this HTTP Connection (None is equivalent to 80), passed
        into :class:`httplib.HTTPConnection`.

    :param strict:
        Causes BadStatusLine to be raised if the status line can't be parsed
        as a valid HTTP/1.0 or 1.1 status line, passed into
        :class:`httplib.HTTPConnection`.

        .. note::
           Only works in Python 2. This parameter is ignored in Python 3.

    :param timeout:
        Socket timeout in seconds for each individual connection. This can
        be a float or integer, which sets the timeout for the HTTP request,
        or an instance of :class:`urllib3.util.Timeout` which gives you more
        fine-grained control over request timeouts. After the constructor has
        been parsed, this is always a `urllib3.util.Timeout` object.

    :param maxsize:
        Number of connections to save that can be reused. More than 1 is useful
        in multithreaded situations. If ``block`` is set to False, more
        connections will be created but they will not be saved once they've
        been used.

    :param block:
        If set to True, no more than ``maxsize`` connections will be used at
        a time. When no free connections are available, the call will block
        until a connection has been released. This is a useful side effect for
        particular multithreaded situations where one does not want to use more
        than maxsize connections per host to prevent flooding.

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.

    :param retries:
        Retry configuration to use by default with requests in this pool.

    :param _proxy:
        Parsed proxy URL, should not be used directly, instead, see
        :class:`urllib3.connectionpool.ProxyManager`"

    :param _proxy_headers:
        A dictionary with proxy headers, should not be used directly,
        instead, see :class:`urllib3.connectionpool.ProxyManager`"

    :param \**conn_kw:
        Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
        :class:`urllib3.connection.HTTPSConnection` instances.
    �httpNFrc
Ks�tj|||�tj||�||_t|t�s4tj|�}|dkrBtj}||_	||_
|j|�|_||_
|	|_|
pli|_xt|�D]}|jjd�qzWd|_d|_||_|jr�|jjdg�dS)NrZsocket_options)r&r/r�strict�
isinstancer#�
from_floatr"ZDEFAULTr�retriesr;�pool�block�proxy�
proxy_headers�xrange�put�num_connections�num_requests�conn_kw�
setdefault)
r,r)r+r>r�maxsizerC�headersrA�_proxy�_proxy_headersrJ�_r-r-r.r/�s(


zHTTPConnectionPool.__init__cCsJ|jd7_tjd|j|j�|jf|j|j|jj|jd�|j	��}|S)z9
        Return a fresh :class:`HTTPConnection`.
        rz%Starting new HTTP connection (%d): %s)r)r+rr>)
rH�log�debugr)�
ConnectionClsr+r�connect_timeoutr>rJ)r,�connr-r-r.�	_new_conn�szHTTPConnectionPool._new_conncCs�d}y|jj|j|d�}WnBtk
r8t|d��Yn&tjk
r\|jrXt|d��YnX|r�t|�r�t	j
d|j�|j�t
|dd�dkr�d}|p�|j�S)	a�
        Get a connection. Will return a pooled connection if one is available.

        If no connections are available and :prop:`.block` is ``False``, then a
        fresh connection is returned.

        :param timeout:
            Seconds to wait before giving up and raising
            :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
            :prop:`.block` is ``True``.
        N)rCrzPool is closed.z>Pool reached maximum size and no more connections are allowed.z Resetting dropped connection: %sZ	auto_openrr)rB�getrC�AttributeErrorrr�EmptyrrrQrRr)r4�getattrrV)r,rrUr-r-r.�	_get_conn�s zHTTPConnectionPool._get_conncCs\y|jj|dd�dStk
r(Yn$tjk
rJtjd|j�YnX|rX|j�dS)a�
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        F)rCNz2Connection pool is full, discarding connection: %s)	rBrGrXrZFullrQ�warningr)r4)r,rUr-r-r.�	_put_conn�szHTTPConnectionPool._put_conncCsdS)zU
        Called right before a request is made, after the socket is created.
        Nr-)r,rUr-r-r.�_validate_connsz!HTTPConnectionPool._validate_conncCsdS)Nr-)r,rUr-r-r.�_prepare_proxy!sz!HTTPConnectionPool._prepare_proxycCs2|tkr|jj�St|t�r$|j�Stj|�SdS)z< Helper that always returns a :class:`urllib3.util.Timeout` N)�_DefaultrZcloner?r#r@)r,rr-r-r.�_get_timeout%s


zHTTPConnectionPool._get_timeoutcCsjt|t�rt||d|��t|d�r>|jtkr>t||d|��dt|�ksVdt|�krft||d|��dS)zAIs the error actually a timeout? Will raise a ReadTimeout or passz!Read timed out. (read timeout=%s)�errnoz	timed outzdid not complete (read)N)r?�
SocketTimeoutr�hasattrrb�_blocking_errnos�str)r,�err�url�
timeout_valuer-r-r.�_raise_timeout1s
z!HTTPConnectionPool._raise_timeoutc
:Ks|jd7_|j|�}|j�|j|_y|j|�Wn:ttfk
rp}z|j|||jd��WYdd}~XnX|r�|j	||f|�n|j
||f|�|j}	t|dd�r�|	dkr�t
||d|	��|	tjkr�|jjtj��n|jj|	�yjy|jdd�}
WnTtk
�rPy|j�}
Wn0tk
�rJ}ztj|d�WYdd}~XnXYnXWn<tttfk
�r�}z|j|||	d��WYdd}~XnXt|d	d
�}tjd|j|j|j||||
j|
j �	yt!|
j"�Wn@t#tfk
�r}ztj$d|j%|�|dd
�WYdd}~XnX|
S)a
        Perform a request on a given urllib connection object taken from our
        pool.

        :param conn:
            a connection from one of our connection pools

        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        r)rgrhriN�sockrz!Read timed out. (read timeout=%s)T)�	bufferingZ
_http_vsn_strzHTTP/?z%s://%s:%s "%s %s %s" %s %sz$Failed to parse headers (url=%s): %s)�exc_info)&rIraZ
start_connectrTrr^rcrrjZrequest_chunked�request�read_timeoutrZrr#�DEFAULT_TIMEOUTrkZ
settimeout�socketZgetdefaulttimeoutZgetresponse�	TypeError�	ExceptionrZ
raise_from�SocketErrorrQrRr:r)r+�statusZlengthr!�msgr	r\�
_absolute_url)
r,rU�methodrhr�chunkedZhttplib_request_kw�timeout_obj�ero�httplib_responseZhttp_versionZhper-r-r.�
_make_requestBsT

(
$z HTTPConnectionPool._make_requestcCst|j|j|j|d�jS)N)r:r)r+�path)r%r:r)r+rh)r,r~r-r-r.rw�sz HTTPConnectionPool._absolute_urlcCsL|jd}|_y"x|jdd�}|r|j�qWWntjk
rFYnXdS)zD
        Close all pooled connections and disable the pool.
        NF)rC)rBrWr4rrY)r,Zold_poolrUr-r-r.r4�szHTTPConnectionPool.closecCst|jd�rdSt|�\}}}t|�j�}|jr@|r@tj|�}n|jrZ|tj|�krZd}|||f|j|j|jfkS)zj
        Check if the given ``url`` is a member of the same host as this
        connection pool.
        �/TN)	�
startswithr$r'r(r+rrWr:r))r,rhr:r)r+r-r-r.�is_same_host�s
zHTTPConnectionPool.is_same_hostTc
.Ks�|dkr|j}t|t�s*tj|||jd�}|
dkr>|
jdd�}
|rZ|j|�rZt|||��d}|
}|jdkr�|j	�}|j
|j�d}d}t||�}�zry�|j
|�}|j|	d�}|j|_|jdk	o�t|dd�}|r�|j|�|j|||||||d	�}|
�s�|nd}||
d
<|jj|f|||d�|
��}d}Wn�tjk
�rNt|d��Yn�ttttttt fk
�r}z�d}t|tt f��r�t|�}n>t|tt!f��r�|j�r�t"d
|�}nt|ttf��r�td|�}|j#||||t$j%�dd�}|j&�|}WYdd}~XnXWd|�s |�o|j'�}d}|�r0|j(|�X|�spt)j*d|||�|j+|||||||f||	|
|d�|
��Sdd�}|�o�|j,�}|�r$|j-dk�r�d}y|j#||||d�}Wn(t.k
�r�|j/�r�||��|SX||�|j0|�t)j1d||�|j+||||f|||||	|
|d�|
��St2|j3d��}|j4||j-|��r�y|j#||||d�}Wn(t.k
�r�|j5�r~||��|SX||�|j&|�t)j1d|�|j+||||f|||||	|
|d�|
��S|S)a�
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.

        .. note::

           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.

        .. note::

           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.

        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)

        :param body:
            Data to send in the request body (useful for creating
            POST requests, see HTTPConnectionPool.post_url for
            more convenience).

        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.

        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.

            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.

            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.

        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.

        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.

        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When False, you can
            use the pool on an HTTP proxy and request foreign hosts.

        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.

        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.

        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.

        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.

        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.

        :param \**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        N)�redirect�defaultZpreload_contentTr=F)rrk)r�bodyrMryZrequest_method)rB�
connectionrAz"No pool connections are available.zCannot connect to proxy.zConnection aborted.�)r�_poolZ_stacktracez1Retrying (%r) after connection broken by '%r': %s)r�pool_timeout�release_conn�body_poscSs@y|j�Wn.ttttttfk
r:}zWYdd}~XnXdS)N)�readrrrtrrr)�responser{r-r-r.�drain_and_release_conn�s

z:HTTPConnectionPool.urlopen.<locals>.drain_and_release_conni/ZGET)r�r�zRedirecting %s -> %s)rAr��assert_same_hostrr�r�r�zRetry-Afterz	Retry: %s)6rMr?r"Zfrom_intrArWr�r
r:�copy�updaterEr rar[rTrrDrZr_r}�ResponseClsZfrom_httplibrrYrrrrtrrrrrr
Z	increment�sysrmZsleepr4r]rQr\�urlopenZget_redirect_locationrurZraise_on_redirectZsleep_for_retryrR�boolZ	getheaderZis_retryZraise_on_status)r,rxrhr�rMrAr�r�rr�r�ryr�Zresponse_kwrUZrelease_this_connrgZ
clean_exitrzZis_new_proxy_connr|Z
response_connr�r{r�Zredirect_locationZhas_retry_afterr-r-r.r��s�^















zHTTPConnectionPool.urlopen)N)r1r7r8r9r:rrSrr�r#rpr/rVr[r]r^r_rarjr`r}rwr4r�r�r-r-r-r.r<bs.:%
&Ur<csneZdZdZdZeZddejddddddddddddddfdd�Z	dd	�Z
d
d�Zdd
�Z�fdd�Z
�ZS)�HTTPSConnectionPoola�
    Same as :class:`.HTTPConnectionPool`, but HTTPS.

    When Python is compiled with the :mod:`ssl` module, then
    :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
    instead of :class:`.HTTPSConnection`.

    :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
    ``assert_hostname`` and ``host`` in this order to verify connections.
    If ``assert_hostname`` is False, no verification is done.

    The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
    ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
    available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
    the connection socket into an SSL socket.
    �httpsNFrcKsftj||||||||||	|
f|�|r2|
dkr2d}
||_||_|
|_||_||_||_||_||_	dS)NZ
CERT_REQUIRED)
r<r/�key_file�	cert_file�	cert_reqs�ca_certs�ca_cert_dir�ssl_version�assert_hostname�assert_fingerprint)r,r)r+r>rrLrCrMrArNrOr�r�r�r�r�r�r�r�rJr-r-r.r/�s	zHTTPSConnectionPool.__init__c	Cs<t|t�r8|j|j|j|j|j|j|j|j	d�|j
|_
|S)z�
        Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
        and establish the tunnel if proxy is used.
        )r�r�r�r�r�r�r�)r?rZset_certr�r�r�r�r�r�r�r�)r,rUr-r-r.�
_prepare_conns

z!HTTPSConnectionPool._prepare_conncCsfy
|j}Wntk
r$|j}YnXtjdkrH|jrH||j|j�n||j|j|j�|j�dS)z�
        Establish tunnel connection early, because otherwise httplib
        would improperly set Host: header to proxy's IP:port.
        r���N)r�r�r�)	�
set_tunnelrXZ_set_tunnelr��version_inforEr*r+�connect)r,rUr�r-r-r.r_ s
z"HTTPSConnectionPool._prepare_proxycCs�|jd7_tjd|j|j�|js2|jtkr:td��|j}|j}|jdk	r`|jj}|jj}|jf|||j	j
|jd�|j��}|j
|�S)zB
        Return a fresh :class:`httplib.HTTPSConnection`.
        rz&Starting new HTTPS connection (%d): %szCCan't connect to HTTPS URL because the SSL module is not available.N)r)r+rr>)rHrQrRr)rSrrr+rDrrTr>rJr�)r,Zactual_hostZactual_portrUr-r-r.rV2s

zHTTPSConnectionPool._new_conncs:tt|�j|�t|dd�s$|j�|js6tjdt�dS)zU
        Called right before a request is made, after the socket is created.
        rkNz�Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings)	�superr�r^rZr�Zis_verified�warnings�warnr)r,rU)�	__class__r-r.r^Jsz"HTTPSConnectionPool._validate_conn)r1r7r8r9r:rrSr#rpr/r�r_rVr^�
__classcell__r-r-)r�r.r��sr�cKsRt|�\}}}|ptj|d�}|dkr:t|fd|i|��St|fd|i|��SdS)a�
    Given a url, return an :class:`.ConnectionPool` instance of its host.

    This is a shortcut for not having to parse out the scheme, host, and port
    of the url before creating an :class:`.ConnectionPool` instance.

    :param url:
        Absolute URL string that must include the scheme. Port is optional.

    :param \**kw:
        Passes additional parameters to the constructor of the appropriate
        :class:`.ConnectionPool`. Useful for specifying things like
        timeout, maxsize, headers, etc.

    Example::

        >>> conn = connection_from_url('http://google.com/')
        >>> r = conn.request('GET', '/')
    �Pr�r+N)r$rrWr�r<)rh�kwr:r)r+r-r-r.�connection_from_url]s
r�cCs*|jd�r&|jd�r&|jdd�jd�}|S)z'
    Process IPv6 address literals
    �[�]z%25�%z[])r��endswith�replace�strip)r)r-r-r.r'ysr')KZ
__future__rrbZloggingr�r�rqrrtrrc�
exceptionsrrrr	r
rrr
rrrrrZpackages.ssl_match_hostnamerZpackagesrZpackages.six.movesrr�rrrrrrrrnrr�rZutil.connectionrZutil.requestr Z
util.responser!Z
util.retryr"Zutil.timeoutr#Zutil.urlr$r%ZPY2ZQueueZ_unused_module_QueueZmovesrFZ	getLoggerr1rQ�objectr`r&�setZEAGAINZEWOULDBLOCKrer<r�r�r'r-r-r-r.�<module>sF<$
%|_vendor/urllib3/poolmanager.py000064400000040664151733136330012451 0ustar00from __future__ import absolute_import
import collections
import functools
import logging

from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
from .util.url import parse_url
from .util.retry import Retry


__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']


log = logging.getLogger(__name__)

SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
                'ssl_version', 'ca_cert_dir', 'ssl_context')

# All known keyword arguments that could be provided to the pool manager, its
# pools, or the underlying connections. This is used to construct a pool key.
_key_fields = (
    'key_scheme',  # str
    'key_host',  # str
    'key_port',  # int
    'key_timeout',  # int or float or Timeout
    'key_retries',  # int or Retry
    'key_strict',  # bool
    'key_block',  # bool
    'key_source_address',  # str
    'key_key_file',  # str
    'key_cert_file',  # str
    'key_cert_reqs',  # str
    'key_ca_certs',  # str
    'key_ssl_version',  # str
    'key_ca_cert_dir',  # str
    'key_ssl_context',  # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
    'key_maxsize',  # int
    'key_headers',  # dict
    'key__proxy',  # parsed proxy url
    'key__proxy_headers',  # dict
    'key_socket_options',  # list of (level (int), optname (int), value (int or str)) tuples
    'key__socks_options',  # dict
    'key_assert_hostname',  # bool or string
    'key_assert_fingerprint',  # str
)

#: The namedtuple class used to construct keys for the connection pool.
#: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple('PoolKey', _key_fields)


def _default_key_normalizer(key_class, request_context):
    """
    Create a pool key out of a request context dictionary.

    According to RFC 3986, both the scheme and host are case-insensitive.
    Therefore, this function normalizes both before constructing the pool
    key for an HTTPS request. If you wish to change this behaviour, provide
    alternate callables to ``key_fn_by_scheme``.

    :param key_class:
        The class to use when constructing the key. This should be a namedtuple
        with the ``scheme`` and ``host`` keys at a minimum.
    :type  key_class: namedtuple
    :param request_context:
        A dictionary-like object that contain the context for a request.
    :type  request_context: dict

    :return: A namedtuple that can be used as a connection pool key.
    :rtype:  PoolKey
    """
    # Since we mutate the dictionary, make a copy first
    context = request_context.copy()
    context['scheme'] = context['scheme'].lower()
    context['host'] = context['host'].lower()

    # These are both dictionaries and need to be transformed into frozensets
    for key in ('headers', '_proxy_headers', '_socks_options'):
        if key in context and context[key] is not None:
            context[key] = frozenset(context[key].items())

    # The socket_options key may be a list and needs to be transformed into a
    # tuple.
    socket_opts = context.get('socket_options')
    if socket_opts is not None:
        context['socket_options'] = tuple(socket_opts)

    # Map the kwargs to the names in the namedtuple - this is necessary since
    # namedtuples can't have fields starting with '_'.
    for key in list(context.keys()):
        context['key_' + key] = context.pop(key)

    # Default to ``None`` for keys missing from the context
    for field in key_class._fields:
        if field not in context:
            context[field] = None

    return key_class(**context)


#: A dictionary that maps a scheme to a callable that creates a pool key.
#: This can be used to alter the way pool keys are constructed, if desired.
#: Each PoolManager makes a copy of this dictionary so they can be configured
#: globally here, or individually on the instance.
key_fn_by_scheme = {
    'http': functools.partial(_default_key_normalizer, PoolKey),
    'https': functools.partial(_default_key_normalizer, PoolKey),
}

pool_classes_by_scheme = {
    'http': HTTPConnectionPool,
    'https': HTTPSConnectionPool,
}


class PoolManager(RequestMethods):
    """
    Allows for arbitrary requests while transparently keeping track of
    necessary connection pools for you.

    :param num_pools:
        Number of connection pools to cache before discarding the least
        recently used pool.

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.

    :param \\**connection_pool_kw:
        Additional parameters are used to create fresh
        :class:`urllib3.connectionpool.ConnectionPool` instances.

    Example::

        >>> manager = PoolManager(num_pools=2)
        >>> r = manager.request('GET', 'http://google.com/')
        >>> r = manager.request('GET', 'http://google.com/mail')
        >>> r = manager.request('GET', 'http://yahoo.com/')
        >>> len(manager.pools)
        2

    """

    proxy = None

    def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
        RequestMethods.__init__(self, headers)
        self.connection_pool_kw = connection_pool_kw
        self.pools = RecentlyUsedContainer(num_pools,
                                           dispose_func=lambda p: p.close())

        # Locally set the pool classes and keys so other PoolManagers can
        # override them.
        self.pool_classes_by_scheme = pool_classes_by_scheme
        self.key_fn_by_scheme = key_fn_by_scheme.copy()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.clear()
        # Return False to re-raise any potential exceptions
        return False

    def _new_pool(self, scheme, host, port, request_context=None):
        """
        Create a new :class:`ConnectionPool` based on host, port, scheme, and
        any additional pool keyword arguments.

        If ``request_context`` is provided, it is provided as keyword arguments
        to the pool class used. This method is used to actually create the
        connection pools handed out by :meth:`connection_from_url` and
        companion methods. It is intended to be overridden for customization.
        """
        pool_cls = self.pool_classes_by_scheme[scheme]
        if request_context is None:
            request_context = self.connection_pool_kw.copy()

        # Although the context has everything necessary to create the pool,
        # this function has historically only used the scheme, host, and port
        # in the positional args. When an API change is acceptable these can
        # be removed.
        for key in ('scheme', 'host', 'port'):
            request_context.pop(key, None)

        if scheme == 'http':
            for kw in SSL_KEYWORDS:
                request_context.pop(kw, None)

        return pool_cls(host, port, **request_context)

    def clear(self):
        """
        Empty our store of pools and direct them all to close.

        This will not affect in-flight connections, but they will not be
        re-used after completion.
        """
        self.pools.clear()

    def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None):
        """
        Get a :class:`ConnectionPool` based on the host, port, and scheme.

        If ``port`` isn't given, it will be derived from the ``scheme`` using
        ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
        provided, it is merged with the instance's ``connection_pool_kw``
        variable and used to create the new connection pool, if one is
        needed.
        """

        if not host:
            raise LocationValueError("No host specified.")

        request_context = self._merge_pool_kwargs(pool_kwargs)
        request_context['scheme'] = scheme or 'http'
        if not port:
            port = port_by_scheme.get(request_context['scheme'].lower(), 80)
        request_context['port'] = port
        request_context['host'] = host

        return self.connection_from_context(request_context)

    def connection_from_context(self, request_context):
        """
        Get a :class:`ConnectionPool` based on the request context.

        ``request_context`` must at least contain the ``scheme`` key and its
        value must be a key in ``key_fn_by_scheme`` instance variable.
        """
        scheme = request_context['scheme'].lower()
        pool_key_constructor = self.key_fn_by_scheme[scheme]
        pool_key = pool_key_constructor(request_context)

        return self.connection_from_pool_key(pool_key, request_context=request_context)

    def connection_from_pool_key(self, pool_key, request_context=None):
        """
        Get a :class:`ConnectionPool` based on the provided pool key.

        ``pool_key`` should be a namedtuple that only contains immutable
        objects. At a minimum it must have the ``scheme``, ``host``, and
        ``port`` fields.
        """
        with self.pools.lock:
            # If the scheme, host, or port doesn't match existing open
            # connections, open a new ConnectionPool.
            pool = self.pools.get(pool_key)
            if pool:
                return pool

            # Make a fresh ConnectionPool of the desired type
            scheme = request_context['scheme']
            host = request_context['host']
            port = request_context['port']
            pool = self._new_pool(scheme, host, port, request_context=request_context)
            self.pools[pool_key] = pool

        return pool

    def connection_from_url(self, url, pool_kwargs=None):
        """
        Similar to :func:`urllib3.connectionpool.connection_from_url`.

        If ``pool_kwargs`` is not provided and a new pool needs to be
        constructed, ``self.connection_pool_kw`` is used to initialize
        the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
        is provided, it is used instead. Note that if a new pool does not
        need to be created for the request, the provided ``pool_kwargs`` are
        not used.
        """
        u = parse_url(url)
        return self.connection_from_host(u.host, port=u.port, scheme=u.scheme,
                                         pool_kwargs=pool_kwargs)

    def _merge_pool_kwargs(self, override):
        """
        Merge a dictionary of override values for self.connection_pool_kw.

        This does not modify self.connection_pool_kw and returns a new dict.
        Any keys in the override dictionary with a value of ``None`` are
        removed from the merged dictionary.
        """
        base_pool_kwargs = self.connection_pool_kw.copy()
        if override:
            for key, value in override.items():
                if value is None:
                    try:
                        del base_pool_kwargs[key]
                    except KeyError:
                        pass
                else:
                    base_pool_kwargs[key] = value
        return base_pool_kwargs

    def urlopen(self, method, url, redirect=True, **kw):
        """
        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
        with custom cross-host redirect logic and only sends the request-uri
        portion of the ``url``.

        The given ``url`` parameter must be absolute, such that an appropriate
        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
        """
        u = parse_url(url)
        conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)

        kw['assert_same_host'] = False
        kw['redirect'] = False

        if 'headers' not in kw:
            kw['headers'] = self.headers.copy()

        if self.proxy is not None and u.scheme == "http":
            response = conn.urlopen(method, url, **kw)
        else:
            response = conn.urlopen(method, u.request_uri, **kw)

        redirect_location = redirect and response.get_redirect_location()
        if not redirect_location:
            return response

        # Support relative URLs for redirecting.
        redirect_location = urljoin(url, redirect_location)

        # RFC 7231, Section 6.4.4
        if response.status == 303:
            method = 'GET'

        retries = kw.get('retries')
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect)

        # Strip headers marked as unsafe to forward to the redirected location.
        # Check remove_headers_on_redirect to avoid a potential network call within
        # conn.is_same_host() which may use socket.gethostbyname() in the future.
        if (retries.remove_headers_on_redirect
                and not conn.is_same_host(redirect_location)):
            for header in retries.remove_headers_on_redirect:
                kw['headers'].pop(header, None)

        try:
            retries = retries.increment(method, url, response=response, _pool=conn)
        except MaxRetryError:
            if retries.raise_on_redirect:
                raise
            return response

        kw['retries'] = retries
        kw['redirect'] = redirect

        log.info("Redirecting %s -> %s", url, redirect_location)
        return self.urlopen(method, redirect_location, **kw)


class ProxyManager(PoolManager):
    """
    Behaves just like :class:`PoolManager`, but sends all requests through
    the defined proxy, using the CONNECT method for HTTPS URLs.

    :param proxy_url:
        The URL of the proxy to be used.

    :param proxy_headers:
        A dictionary contaning headers that will be sent to the proxy. In case
        of HTTP they are being sent with each request, while in the
        HTTPS/CONNECT case they are sent only once. Could be used for proxy
        authentication.

    Example:
        >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
        >>> r1 = proxy.request('GET', 'http://google.com/')
        >>> r2 = proxy.request('GET', 'http://httpbin.org/')
        >>> len(proxy.pools)
        1
        >>> r3 = proxy.request('GET', 'https://httpbin.org/')
        >>> r4 = proxy.request('GET', 'https://twitter.com/')
        >>> len(proxy.pools)
        3

    """

    def __init__(self, proxy_url, num_pools=10, headers=None,
                 proxy_headers=None, **connection_pool_kw):

        if isinstance(proxy_url, HTTPConnectionPool):
            proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
                                        proxy_url.port)
        proxy = parse_url(proxy_url)
        if not proxy.port:
            port = port_by_scheme.get(proxy.scheme, 80)
            proxy = proxy._replace(port=port)

        if proxy.scheme not in ("http", "https"):
            raise ProxySchemeUnknown(proxy.scheme)

        self.proxy = proxy
        self.proxy_headers = proxy_headers or {}

        connection_pool_kw['_proxy'] = self.proxy
        connection_pool_kw['_proxy_headers'] = self.proxy_headers

        super(ProxyManager, self).__init__(
            num_pools, headers, **connection_pool_kw)

    def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None):
        if scheme == "https":
            return super(ProxyManager, self).connection_from_host(
                host, port, scheme, pool_kwargs=pool_kwargs)

        return super(ProxyManager, self).connection_from_host(
            self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs)

    def _set_proxy_headers(self, url, headers=None):
        """
        Sets headers needed by proxies: specifically, the Accept and Host
        headers. Only sets headers not provided by the user.
        """
        headers_ = {'Accept': '*/*'}

        netloc = parse_url(url).netloc
        if netloc:
            headers_['Host'] = netloc

        if headers:
            headers_.update(headers)
        return headers_

    def urlopen(self, method, url, redirect=True, **kw):
        "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
        u = parse_url(url)

        if u.scheme == "http":
            # For proxied HTTPS requests, httplib sets the necessary headers
            # on the CONNECT to the proxy. For HTTP, we'll definitely
            # need to set 'Host' at the very least.
            headers = kw.get('headers', self.headers)
            kw['headers'] = self._set_proxy_headers(url, headers)

        return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)


def proxy_from_url(url, **kw):
    return ProxyManager(proxy_url=url, **kw)
_vendor/urllib3/__init__.py000064400000005445151733136330011702 0ustar00"""
urllib3 - Thread-safe connection pooling and re-using.
"""

from __future__ import absolute_import
import warnings

from .connectionpool import (
    HTTPConnectionPool,
    HTTPSConnectionPool,
    connection_from_url
)

from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry


# Set default logging handler to avoid "No handler found" warnings.
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = '1.22'

__all__ = (
    'HTTPConnectionPool',
    'HTTPSConnectionPool',
    'PoolManager',
    'ProxyManager',
    'HTTPResponse',
    'Retry',
    'Timeout',
    'add_stderr_logger',
    'connection_from_url',
    'disable_warnings',
    'encode_multipart_formdata',
    'get_host',
    'make_headers',
    'proxy_from_url',
)

logging.getLogger(__name__).addHandler(NullHandler())


def add_stderr_logger(level=logging.DEBUG):
    """
    Helper for quickly adding a StreamHandler to the logger. Useful for
    debugging.

    Returns the handler after adding it.
    """
    # This method needs to be in this __init__.py to get the __name__ correct
    # even if urllib3 is vendored within another package.
    logger = logging.getLogger(__name__)
    handler = logging.StreamHandler()
    handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
    logger.addHandler(handler)
    logger.setLevel(level)
    logger.debug('Added a stderr logging handler to logger: %s', __name__)
    return handler


# ... Clean up.
del NullHandler


# All warning filters *must* be appended unless you're really certain that they
# shouldn't be: otherwise, it's very hard for users to use most Python
# mechanisms to silence them.
# SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
                      append=True)
# SNIMissingWarnings should go off only once.
warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True)


def disable_warnings(category=exceptions.HTTPWarning):
    """
    Helper for quickly disabling all urllib3 warnings.
    """
    warnings.simplefilter('ignore', category)
_vendor/urllib3/response.py000064400000054567151733136330012012 0ustar00from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io
import logging
from socket import timeout as SocketTimeout
from socket import error as SocketError

from ._collections import HTTPHeaderDict
from .exceptions import (
    BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError,
    ResponseNotChunked, IncompleteRead, InvalidHeader
)
from .packages.six import string_types as basestring, binary_type, PY3
from .packages.six.moves import http_client as httplib
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed, is_response_to_head

log = logging.getLogger(__name__)


class DeflateDecoder(object):

    def __init__(self):
        self._first_try = True
        self._data = binary_type()
        self._obj = zlib.decompressobj()

    def __getattr__(self, name):
        return getattr(self._obj, name)

    def decompress(self, data):
        if not data:
            return data

        if not self._first_try:
            return self._obj.decompress(data)

        self._data += data
        try:
            decompressed = self._obj.decompress(data)
            if decompressed:
                self._first_try = False
                self._data = None
            return decompressed
        except zlib.error:
            self._first_try = False
            self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
            try:
                return self.decompress(self._data)
            finally:
                self._data = None


class GzipDecoder(object):

    def __init__(self):
        self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)

    def __getattr__(self, name):
        return getattr(self._obj, name)

    def decompress(self, data):
        if not data:
            return data
        return self._obj.decompress(data)


def _get_decoder(mode):
    if mode == 'gzip':
        return GzipDecoder()

    return DeflateDecoder()


class HTTPResponse(io.IOBase):
    """
    HTTP Response container.

    Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
    loaded and decoded on-demand when the ``data`` property is accessed.  This
    class is also compatible with the Python standard library's :mod:`io`
    module, and can hence be treated as a readable object in the context of that
    framework.

    Extra parameters for behaviour not present in httplib.HTTPResponse:

    :param preload_content:
        If True, the response's body will be preloaded during construction.

    :param decode_content:
        If True, attempts to decode specific content-encoding's based on headers
        (like 'gzip' and 'deflate') will be skipped and raw data will be used
        instead.

    :param original_response:
        When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
        object, it's convenient to include the original for debug purposes. It's
        otherwise unused.

    :param retries:
        The retries contains the last :class:`~urllib3.util.retry.Retry` that
        was used during the request.

    :param enforce_content_length:
        Enforce content length checking. Body returned by server must match
        value of Content-Length header, if present. Otherwise, raise error.
    """

    CONTENT_DECODERS = ['gzip', 'deflate']
    REDIRECT_STATUSES = [301, 302, 303, 307, 308]

    def __init__(self, body='', headers=None, status=0, version=0, reason=None,
                 strict=0, preload_content=True, decode_content=True,
                 original_response=None, pool=None, connection=None,
                 retries=None, enforce_content_length=False, request_method=None):

        if isinstance(headers, HTTPHeaderDict):
            self.headers = headers
        else:
            self.headers = HTTPHeaderDict(headers)
        self.status = status
        self.version = version
        self.reason = reason
        self.strict = strict
        self.decode_content = decode_content
        self.retries = retries
        self.enforce_content_length = enforce_content_length

        self._decoder = None
        self._body = None
        self._fp = None
        self._original_response = original_response
        self._fp_bytes_read = 0

        if body and isinstance(body, (basestring, binary_type)):
            self._body = body

        self._pool = pool
        self._connection = connection

        if hasattr(body, 'read'):
            self._fp = body

        # Are we using the chunked-style of transfer encoding?
        self.chunked = False
        self.chunk_left = None
        tr_enc = self.headers.get('transfer-encoding', '').lower()
        # Don't incur the penalty of creating a list and then discarding it
        encodings = (enc.strip() for enc in tr_enc.split(","))
        if "chunked" in encodings:
            self.chunked = True

        # Determine length of response
        self.length_remaining = self._init_length(request_method)

        # If requested, preload the body.
        if preload_content and not self._body:
            self._body = self.read(decode_content=decode_content)

    def get_redirect_location(self):
        """
        Should we redirect and where to?

        :returns: Truthy redirect location string if we got a redirect status
            code and valid location. ``None`` if redirect status and no
            location. ``False`` if not a redirect status code.
        """
        if self.status in self.REDIRECT_STATUSES:
            return self.headers.get('location')

        return False

    def release_conn(self):
        if not self._pool or not self._connection:
            return

        self._pool._put_conn(self._connection)
        self._connection = None

    @property
    def data(self):
        # For backwords-compat with earlier urllib3 0.4 and earlier.
        if self._body:
            return self._body

        if self._fp:
            return self.read(cache_content=True)

    @property
    def connection(self):
        return self._connection

    def tell(self):
        """
        Obtain the number of bytes pulled over the wire so far. May differ from
        the amount of content returned by :meth:``HTTPResponse.read`` if bytes
        are encoded on the wire (e.g, compressed).
        """
        return self._fp_bytes_read

    def _init_length(self, request_method):
        """
        Set initial length value for Response content if available.
        """
        length = self.headers.get('content-length')

        if length is not None and self.chunked:
            # This Response will fail with an IncompleteRead if it can't be
            # received as chunked. This method falls back to attempt reading
            # the response before raising an exception.
            log.warning("Received response with both Content-Length and "
                        "Transfer-Encoding set. This is expressly forbidden "
                        "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
                        "attempting to process response as Transfer-Encoding: "
                        "chunked.")
            return None

        elif length is not None:
            try:
                # RFC 7230 section 3.3.2 specifies multiple content lengths can
                # be sent in a single Content-Length header
                # (e.g. Content-Length: 42, 42). This line ensures the values
                # are all valid ints and that as long as the `set` length is 1,
                # all values are the same. Otherwise, the header is invalid.
                lengths = set([int(val) for val in length.split(',')])
                if len(lengths) > 1:
                    raise InvalidHeader("Content-Length contained multiple "
                                        "unmatching values (%s)" % length)
                length = lengths.pop()
            except ValueError:
                length = None
            else:
                if length < 0:
                    length = None

        # Convert status to int for comparison
        # In some cases, httplib returns a status of "_UNKNOWN"
        try:
            status = int(self.status)
        except ValueError:
            status = 0

        # Check for responses that shouldn't include a body
        if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD':
            length = 0

        return length

    def _init_decoder(self):
        """
        Set-up the _decoder attribute if necessary.
        """
        # Note: content-encoding value should be case-insensitive, per RFC 7230
        # Section 3.2
        content_encoding = self.headers.get('content-encoding', '').lower()
        if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
            self._decoder = _get_decoder(content_encoding)

    def _decode(self, data, decode_content, flush_decoder):
        """
        Decode the data passed in and potentially flush the decoder.
        """
        try:
            if decode_content and self._decoder:
                data = self._decoder.decompress(data)
        except (IOError, zlib.error) as e:
            content_encoding = self.headers.get('content-encoding', '').lower()
            raise DecodeError(
                "Received response with content-encoding: %s, but "
                "failed to decode it." % content_encoding, e)

        if flush_decoder and decode_content:
            data += self._flush_decoder()

        return data

    def _flush_decoder(self):
        """
        Flushes the decoder. Should only be called if the decoder is actually
        being used.
        """
        if self._decoder:
            buf = self._decoder.decompress(b'')
            return buf + self._decoder.flush()

        return b''

    @contextmanager
    def _error_catcher(self):
        """
        Catch low-level python exceptions, instead re-raising urllib3
        variants, so that low-level exceptions are not leaked in the
        high-level api.

        On exit, release the connection back to the pool.
        """
        clean_exit = False

        try:
            try:
                yield

            except SocketTimeout:
                # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
                # there is yet no clean way to get at it from this context.
                raise ReadTimeoutError(self._pool, None, 'Read timed out.')

            except BaseSSLError as e:
                # FIXME: Is there a better way to differentiate between SSLErrors?
                if 'read operation timed out' not in str(e):  # Defensive:
                    # This shouldn't happen but just in case we're missing an edge
                    # case, let's avoid swallowing SSL errors.
                    raise

                raise ReadTimeoutError(self._pool, None, 'Read timed out.')

            except (HTTPException, SocketError) as e:
                # This includes IncompleteRead.
                raise ProtocolError('Connection broken: %r' % e, e)

            # If no exception is thrown, we should avoid cleaning up
            # unnecessarily.
            clean_exit = True
        finally:
            # If we didn't terminate cleanly, we need to throw away our
            # connection.
            if not clean_exit:
                # The response may not be closed but we're not going to use it
                # anymore so close it now to ensure that the connection is
                # released back to the pool.
                if self._original_response:
                    self._original_response.close()

                # Closing the response may not actually be sufficient to close
                # everything, so if we have a hold of the connection close that
                # too.
                if self._connection:
                    self._connection.close()

            # If we hold the original response but it's closed now, we should
            # return the connection back to the pool.
            if self._original_response and self._original_response.isclosed():
                self.release_conn()

    def read(self, amt=None, decode_content=None, cache_content=False):
        """
        Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
        parameters: ``decode_content`` and ``cache_content``.

        :param amt:
            How much of the content to read. If specified, caching is skipped
            because it doesn't make sense to cache partial content as the full
            response.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.

        :param cache_content:
            If True, will save the returned data such that the same result is
            returned despite of the state of the underlying file object. This
            is useful if you want the ``.data`` property to continue working
            after having ``.read()`` the file object. (Overridden if ``amt`` is
            set.)
        """
        self._init_decoder()
        if decode_content is None:
            decode_content = self.decode_content

        if self._fp is None:
            return

        flush_decoder = False
        data = None

        with self._error_catcher():
            if amt is None:
                # cStringIO doesn't like amt=None
                data = self._fp.read()
                flush_decoder = True
            else:
                cache_content = False
                data = self._fp.read(amt)
                if amt != 0 and not data:  # Platform-specific: Buggy versions of Python.
                    # Close the connection when no data is returned
                    #
                    # This is redundant to what httplib/http.client _should_
                    # already do.  However, versions of python released before
                    # December 15, 2012 (http://bugs.python.org/issue16298) do
                    # not properly close the connection in all cases. There is
                    # no harm in redundantly calling close.
                    self._fp.close()
                    flush_decoder = True
                    if self.enforce_content_length and self.length_remaining not in (0, None):
                        # This is an edge case that httplib failed to cover due
                        # to concerns of backward compatibility. We're
                        # addressing it here to make sure IncompleteRead is
                        # raised during streaming, so all calls with incorrect
                        # Content-Length are caught.
                        raise IncompleteRead(self._fp_bytes_read, self.length_remaining)

        if data:
            self._fp_bytes_read += len(data)
            if self.length_remaining is not None:
                self.length_remaining -= len(data)

            data = self._decode(data, decode_content, flush_decoder)

            if cache_content:
                self._body = data

        return data

    def stream(self, amt=2**16, decode_content=None):
        """
        A generator wrapper for the read() method. A call will block until
        ``amt`` bytes have been read from the connection or until the
        connection is closed.

        :param amt:
            How much of the content to read. The generator will return up to
            much data per iteration, but may return less. This is particularly
            likely when using compressed data. However, the empty string will
            never be returned.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
        """
        if self.chunked and self.supports_chunked_reads():
            for line in self.read_chunked(amt, decode_content=decode_content):
                yield line
        else:
            while not is_fp_closed(self._fp):
                data = self.read(amt=amt, decode_content=decode_content)

                if data:
                    yield data

    @classmethod
    def from_httplib(ResponseCls, r, **response_kw):
        """
        Given an :class:`httplib.HTTPResponse` instance ``r``, return a
        corresponding :class:`urllib3.response.HTTPResponse` object.

        Remaining parameters are passed to the HTTPResponse constructor, along
        with ``original_response=r``.
        """
        headers = r.msg

        if not isinstance(headers, HTTPHeaderDict):
            if PY3:  # Python 3
                headers = HTTPHeaderDict(headers.items())
            else:  # Python 2
                headers = HTTPHeaderDict.from_httplib(headers)

        # HTTPResponse objects in Python 3 don't have a .strict attribute
        strict = getattr(r, 'strict', 0)
        resp = ResponseCls(body=r,
                           headers=headers,
                           status=r.status,
                           version=r.version,
                           reason=r.reason,
                           strict=strict,
                           original_response=r,
                           **response_kw)
        return resp

    # Backwards-compatibility methods for httplib.HTTPResponse
    def getheaders(self):
        return self.headers

    def getheader(self, name, default=None):
        return self.headers.get(name, default)

    # Backwards compatibility for http.cookiejar
    def info(self):
        return self.headers

    # Overrides from io.IOBase
    def close(self):
        if not self.closed:
            self._fp.close()

        if self._connection:
            self._connection.close()

    @property
    def closed(self):
        if self._fp is None:
            return True
        elif hasattr(self._fp, 'isclosed'):
            return self._fp.isclosed()
        elif hasattr(self._fp, 'closed'):
            return self._fp.closed
        else:
            return True

    def fileno(self):
        if self._fp is None:
            raise IOError("HTTPResponse has no file to get a fileno from")
        elif hasattr(self._fp, "fileno"):
            return self._fp.fileno()
        else:
            raise IOError("The file-like object this HTTPResponse is wrapped "
                          "around has no file descriptor")

    def flush(self):
        if self._fp is not None and hasattr(self._fp, 'flush'):
            return self._fp.flush()

    def readable(self):
        # This method is required for `io` module compatibility.
        return True

    def readinto(self, b):
        # This method is required for `io` module compatibility.
        temp = self.read(len(b))
        if len(temp) == 0:
            return 0
        else:
            b[:len(temp)] = temp
            return len(temp)

    def supports_chunked_reads(self):
        """
        Checks if the underlying file-like object looks like a
        httplib.HTTPResponse object. We do this by testing for the fp
        attribute. If it is present we assume it returns raw chunks as
        processed by read_chunked().
        """
        return hasattr(self._fp, 'fp')

    def _update_chunk_length(self):
        # First, we'll figure out length of a chunk and then
        # we'll try to read it from socket.
        if self.chunk_left is not None:
            return
        line = self._fp.fp.readline()
        line = line.split(b';', 1)[0]
        try:
            self.chunk_left = int(line, 16)
        except ValueError:
            # Invalid chunked protocol response, abort.
            self.close()
            raise httplib.IncompleteRead(line)

    def _handle_chunk(self, amt):
        returned_chunk = None
        if amt is None:
            chunk = self._fp._safe_read(self.chunk_left)
            returned_chunk = chunk
            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
            self.chunk_left = None
        elif amt < self.chunk_left:
            value = self._fp._safe_read(amt)
            self.chunk_left = self.chunk_left - amt
            returned_chunk = value
        elif amt == self.chunk_left:
            value = self._fp._safe_read(amt)
            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
            self.chunk_left = None
            returned_chunk = value
        else:  # amt > self.chunk_left
            returned_chunk = self._fp._safe_read(self.chunk_left)
            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
            self.chunk_left = None
        return returned_chunk

    def read_chunked(self, amt=None, decode_content=None):
        """
        Similar to :meth:`HTTPResponse.read`, but with an additional
        parameter: ``decode_content``.

        :param decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
        """
        self._init_decoder()
        # FIXME: Rewrite this method and make it a class with a better structured logic.
        if not self.chunked:
            raise ResponseNotChunked(
                "Response is not chunked. "
                "Header 'transfer-encoding: chunked' is missing.")
        if not self.supports_chunked_reads():
            raise BodyNotHttplibCompatible(
                "Body should be httplib.HTTPResponse like. "
                "It should have have an fp attribute which returns raw chunks.")

        # Don't bother reading the body of a HEAD request.
        if self._original_response and is_response_to_head(self._original_response):
            self._original_response.close()
            return

        with self._error_catcher():
            while True:
                self._update_chunk_length()
                if self.chunk_left == 0:
                    break
                chunk = self._handle_chunk(amt)
                decoded = self._decode(chunk, decode_content=decode_content,
                                       flush_decoder=False)
                if decoded:
                    yield decoded

            if decode_content:
                # On CPython and PyPy, we should never need to flush the
                # decoder. However, on Jython we *might* need to, so
                # lets defensively do it anyway.
                decoded = self._flush_decoder()
                if decoded:  # Platform-specific: Jython.
                    yield decoded

            # Chunk content ends with \r\n: discard it.
            while True:
                line = self._fp.fp.readline()
                if not line:
                    # Some sites may not end with '\r\n'.
                    break
                if line == b'\r\n':
                    break

            # We read everything; close the "file".
            if self._original_response:
                self._original_response.close()
_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-36.pyc000064400000033337151733136330020124 0ustar003

�Pf�;�@s,dZddlmZddlZddlmZddlmZ	ddl
mZddlm
Z
mZddlmZydd	lmZWn$ek
r�dZd
dlmZYnXddlZddlZd
dlmZddlZd
d
lmZddgZdZejej j!ej"ej j#iZ$e%ed�o�e%ej d��rej j&e$ej'<e%ed��r0e%ej d��r0ej j(e$ej)<ye$j*ej+ej j,i�Wne-k
�r^YnXej.ej j/ej0ej j1ej2ej j1ej j3iZ4e5dd�e4j6�D��Z7dZ8ejZ9ej:j;Z<ej=e>�Z?dd�Z@dd�ZAdd�ZBdd�ZCdd�ZDGd d!�d!eE�ZFe�rd*d#d$�ZGneZGeGeF_GGd%d&�d&eE�ZHd'd(�ZIdS)+ab
SSL with SNI_-support for Python 2. Follow these instructions if you would
like to verify SSL certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.

This needs the following packages installed:

* pyOpenSSL (tested with 16.0.0)
* cryptography (minimum 1.3.4, from pyopenssl)
* idna (minimum 2.0, from cryptography)

However, pyopenssl depends on cryptography, which depends on idna, so while we
use all three directly here we end up having relatively few packages required.

You can install them with the following command:

    pip install pyopenssl cryptography idna

To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this::

    try:
        import urllib3.contrib.pyopenssl
        urllib3.contrib.pyopenssl.inject_into_urllib3()
    except ImportError:
        pass

Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.

Activating this module also has the positive side effect of disabling SSL/TLS
compression in Python 2 (see `CRIME attack`_).

If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.

.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
�)�absolute_importN)�x509)�backend)�_Certificate)�timeout�error)�BytesIO)�_fileobject�)�backport_makefile)�six)�util�inject_into_urllib3�extract_from_urllib3T�PROTOCOL_TLSv1_1�TLSv1_1_METHOD�PROTOCOL_TLSv1_2�TLSv1_2_METHODccs|]\}}||fVqdS)N�)�.0�k�vrr�/usr/lib/python3.6/pyopenssl.py�	<genexpr>`sri@cCs.t�ttj_tt_ttj_dt_dtj_dS)z7Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.TN)�_validate_dependencies_met�PyOpenSSLContextr
�ssl_�
SSLContext�HAS_SNI�IS_PYOPENSSLrrrrrmscCs(ttj_tt_ttj_dt_dtj_dS)z4Undo monkey-patching by :func:`inject_into_urllib3`.FN)�orig_util_SSLContextr
rr�orig_util_HAS_SNIrrrrrrrys
cCsRddlm}t|dd�dkr$td��ddlm}|�}t|dd�dkrNtd��dS)	z{
    Verifies that PyOpenSSL's package-level dependencies have been met.
    Throws `ImportError` if they are not met.
    r)�
Extensions�get_extension_for_classNzX'cryptography' module missing required functionality.  Try upgrading to v1.3.4 or newer.)�X509�_x509zS'pyOpenSSL' module missing required functionality. Try upgrading to v0.14 or newer.)Zcryptography.x509.extensionsr"�getattr�ImportErrorZOpenSSL.cryptor$)r"r$rrrrr�srcCs(dd�}||�}tjdkr$|jd�}|S)a�
    Converts a dNSName SubjectAlternativeName field to the form used by the
    standard library on the given Python version.

    Cryptography produces a dNSName as a unicode string that was idna-decoded
    from ASCII bytes. We need to idna-encode that string to get it back, and
    then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
    uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
    cSsNddl}x:dD]2}|j|�r|t|�d�}|jd�|j|�SqW|j|�S)z�
        Borrowed wholesale from the Python Cryptography Project. It turns out
        that we can't just safely call `idna.encode`: it can explode for
        wildcard names. This avoids that problem.
        rN�*.�.�ascii)r(r))�idna�
startswith�len�encode)�namer+�prefixrrr�idna_encode�s

z'_dnsname_to_stdlib.<locals>.idna_encode�rzutf-8)r2r)�sys�version_info�decode)r/r1rrr�_dnsname_to_stdlib�s



r6cCs�t|d�r|j�}ntt|j�}y|jjtj�j	}WnNtj
k
rJgStjtjtj
tfk
r�}ztjd|�gSd}~XnXdd�|jtj�D�}|jdd�|jtj�D��|S)zU
    Given an PyOpenSSL certificate, provides all the subject alternative names.
    �to_cryptographyz�A problem was encountered with the certificate that prevented urllib3 from finding the SubjectAlternativeName field. This can affect certificate validation. The error was %sNcSsg|]}dt|�f�qS)ZDNS)r6)rr/rrr�
<listcomp>�sz%get_subj_alt_name.<locals>.<listcomp>css|]}dt|�fVqdS)z
IP AddressN)�str)rr/rrrr�sz$get_subj_alt_name.<locals>.<genexpr>)�hasattrr7r�openssl_backendr%�
extensionsr#rZSubjectAlternativeName�valueZExtensionNotFoundZDuplicateExtensionZUnsupportedExtensionZUnsupportedGeneralNameType�UnicodeError�logZwarningZget_values_for_typeZDNSName�extendZ	IPAddress)Z	peer_certZcertZext�e�namesrrr�get_subj_alt_name�s(


	rCc@s|eZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
d dd�Zdd�Zdd�ZdS)!�
WrappedSocketz�API-compatibility wrapper for Python OpenSSL's Connection-class.

    Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
    collector of pypy.
    TcCs"||_||_||_d|_d|_dS)NrF)�
connection�socket�suppress_ragged_eofs�_makefile_refs�_closed)�selfrErFrGrrr�__init__�s
zWrappedSocket.__init__cCs
|jj�S)N)rF�fileno)rJrrrrL�szWrappedSocket.filenocCs*|jdkr|jd8_|jr&|j�dS)Nr�)rHrI�close)rJrrr�_decref_socketios�s
zWrappedSocket._decref_socketioscOs�y|jj||�}Wn�tjjk
rX}z&|jr<|jdkr<dStt|���WYdd}~Xn�tjj	k
r�}z|jj
�tjjkr�dS�WYdd}~XnJtjjk
r�t
j|j|jj��}|s�td��n|j||�SYnX|SdS)NrM�Unexpected EOF�zThe read operation timed out���)rRrP)rE�recv�OpenSSL�SSL�SysCallErrorrG�args�SocketErrorr9�ZeroReturnError�get_shutdown�RECEIVED_SHUTDOWN�
WantReadErrorr
�
wait_for_readrF�
gettimeoutr)rJrW�kwargs�datarA�rdrrrrSs 
zWrappedSocket.recvcOs�y|jj||�Stjjk
rT}z&|jr8|jdkr8dStt|���WYdd}~Xn�tjj	k
r�}z|jj
�tjjkr~dS�WYdd}~XnFtjjk
r�t
j|j|jj��}|s�td��n|j||�SYnXdS)NrM�Unexpected EOFrzThe read operation timed outrR)rRrb)rE�	recv_intorTrUrVrGrWrXr9rYrZr[r\r
r]rFr^r)rJrWr_rArarrrrcs
zWrappedSocket.recv_intocCs|jj|�S)N)rF�
settimeout)rJrrrrrd*szWrappedSocket.settimeoutcCs�xzy|jj|�Stjjk
rFtj|j|jj��}|s@t	��wYqtjj
k
rv}ztt|���WYdd}~XqXqWdS)N)
rE�sendrTrUZWantWriteErrorr
Zwait_for_writerFr^rrVrXr9)rJr`�wrrArrr�_send_until_done-szWrappedSocket._send_until_donecCs8d}x.|t|�kr2|j|||t��}||7}qWdS)Nr)r-rg�SSL_WRITE_BLOCKSIZE)rJr`Z
total_sentZsentrrr�sendall9szWrappedSocket.sendallcCs|jj�dS)N)rE�shutdown)rJrrrrj?szWrappedSocket.shutdowncCsH|jdkr6yd|_|jj�Stjjk
r2dSXn|jd8_dS)NrMT)rHrIrErNrTrU�Error)rJrrrrNCs

zWrappedSocket.closeFcCsD|jj�}|s|S|r(tjjtjj|�Sd|j�jffft|�d�S)NZ
commonName)ZsubjectZsubjectAltName)	rEZget_peer_certificaterTZcryptoZdump_certificateZ
FILETYPE_ASN1Zget_subjectZCNrC)rJZbinary_formrrrr�getpeercertMs
zWrappedSocket.getpeercertcCs|jd7_dS)NrM)rH)rJrrr�_reuse_szWrappedSocket._reusecCs&|jdkr|j�n|jd8_dS)NrM)rHrN)rJrrr�_dropbs

zWrappedSocket._dropN)T)F)�__name__�
__module__�__qualname__�__doc__rKrLrOrSrcrdrgrirjrNrlrmrnrrrrrD�s


rDrMcCs|jd7_t|||dd�S)NrMT)rN)rHr	)rJ�mode�bufsizerrr�makefilejsruc@szeZdZdZdd�Zedd��Zejdd��Zedd��Zejd	d��Zd
d�Z	dd
�Z
ddd�Zddd�Zddd�Z
dS)rz�
    I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
    for translating the interface of the standard library ``SSLContext`` object
    to calls into PyOpenSSL.
    cCs*t||_tjj|j�|_d|_d|_dS)NrF)�_openssl_versions�protocolrTrUZContext�_ctx�_optionsZcheck_hostname)rJrwrrrrKys
zPyOpenSSLContext.__init__cCs|jS)N)ry)rJrrr�optionsszPyOpenSSLContext.optionscCs||_|jj|�dS)N)ryrxZset_options)rJr=rrrrz�scCst|jj�S)N)�_openssl_to_stdlib_verifyrxZget_verify_mode)rJrrr�verify_mode�szPyOpenSSLContext.verify_modecCs|jjt|t�dS)N)rxZ
set_verify�_stdlib_to_openssl_verify�_verify_callback)rJr=rrrr|�scCs|jj�dS)N)rx�set_default_verify_paths)rJrrrr�sz)PyOpenSSLContext.set_default_verify_pathscCs&t|tj�r|jd�}|jj|�dS)Nzutf-8)�
isinstancer�	text_typer.rxZset_cipher_list)rJZciphersrrr�set_ciphers�s
zPyOpenSSLContext.set_ciphersNcCsN|dk	r|jd�}|dk	r$|jd�}|jj||�|dk	rJ|jjt|��dS)Nzutf-8)r.rx�load_verify_locationsr)rJZcafileZcapathZcadatarrrr��s

z&PyOpenSSLContext.load_verify_locationscs<|jj|��dk	r(|jj�fdd��|jj|p4|�dS)Ncs�S)Nr)Z
max_lengthZprompt_twiceZuserdata)�passwordrr�<lambda>�sz2PyOpenSSLContext.load_cert_chain.<locals>.<lambda>)rxZuse_certificate_fileZ
set_passwd_cbZuse_privatekey_file)rJZcertfileZkeyfiler�r)r�r�load_cert_chain�sz PyOpenSSLContext.load_cert_chainFTc	Cs�tjj|j|�}t|tj�r&|jd�}|dk	r8|j|�|j	�x|y|j
�Wnhtjjk
r�tj
||j��}|s~td��wBYn4tjjk
r�}ztjd|��WYdd}~XnXPqBWt||�S)Nzutf-8zselect timed outzbad handshake: %r)rTrUZ
Connectionrxr�rr�r.Zset_tlsext_host_nameZset_connect_stateZdo_handshaker\r
r]r^rrk�sslZSSLErrorrD)	rJZsockZserver_sideZdo_handshake_on_connectrGZserver_hostname�cnxrarArrr�wrap_socket�s$

 zPyOpenSSLContext.wrap_socket)NNN)NN)FTTN)rorprqrrrK�propertyrz�setterr|rr�r�r�r�rrrrrss
	
rcCs|dkS)Nrr)r�rZerr_noZ	err_depthZreturn_coderrrr~�sr~rR)rR)JrrZ
__future__rZOpenSSL.SSLrTZcryptographyrZ$cryptography.hazmat.backends.opensslrr;Z)cryptography.hazmat.backends.openssl.x509rrFrrrX�iorr	r'Zpackages.backports.makefilerZloggingr�Zpackagesrr3�r
�__all__rZPROTOCOL_SSLv23rUZ
SSLv23_METHODZPROTOCOL_TLSv1ZTLSv1_METHODrvr:rrrr�updateZPROTOCOL_SSLv3ZSSLv3_METHOD�AttributeErrorZ	CERT_NONEZVERIFY_NONEZ
CERT_OPTIONALZVERIFY_PEERZ
CERT_REQUIREDZVERIFY_FAIL_IF_NO_PEER_CERTr}�dict�itemsr{rhr!rrr Z	getLoggerror?rrrr6rC�objectrDrurr~rrrr�<module>+sh




3S_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-36.pyc000064400000006131151733136340017725 0ustar003

�Pf~�@s\dZddlmZddlmZddlmZddlmZddlm	Z	ee
�ZGdd	�d	e�Zd
S)z
NTLM authenticating pool, contributed by erikcederstran

Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
�)�absolute_import)�	getLogger)�ntlm�)�HTTPSConnectionPool)�HTTPSConnectioncs:eZdZdZdZ�fdd�Zdd�Zd�fd
d�	Z�ZS)
�NTLMConnectionPoolzQ
    Implements an NTLM authentication version of an urllib3 connection pool
    ZhttpscsLtt|�j||�||_||_|jdd�}|dj�|_|d|_||_	dS)z�
        authurl is a random URL on the server that is protected by NTLM.
        user is the Windows user, probably in the DOMAIN\username format.
        pw is the password for the user.
        �\�rN)
�superr�__init__�authurl�rawuser�split�upper�domain�user�pw)�selfrrr
�args�kwargsZ
user_parts)�	__class__��/usr/lib/python3.6/ntlmpool.pyrs
zNTLMConnectionPool.__init__c
Cs�|jd7_tjd|j|j|j�i}d|d<d}d}t|j|jd�}dtj|j	�||<tjd	|�|j
d
|jd|�|j�}t|j
��}tjd|j|j�tjd|�tjd
|jd��d|_||jd�}d}x(|D] }	|	dd�dkr�|	dd�}q�W|dk�rtd|||f��tj|�\}
}tj|
|j|j|j|�}d|||<tjd	|�|j
d
|jd|�|j�}tjd|j|j�tjdt|j
���tjd
|j�dd��|jdk�r�|jdk�r�td��td|j|jf��d|_tjd�|S)Nr
z3Starting NTLM HTTPS connection no. %d: https://%s%sz
Keep-Alive�
ConnectionZ
Authorizationzwww-authenticate)�host�portzNTLM %szRequest headers: %sZGETzResponse status: %s %szResponse headers: %szResponse data: %s [...]�dz, �zNTLM z!Unexpected %s response header: %s��i�z3Server rejected request: wrong username or passwordzWrong server response: %s %szConnection established)Znum_connections�log�debugrr
rrrZcreate_NTLM_NEGOTIATE_MESSAGErZrequestZgetresponse�dictZ
getheadersZstatus�reason�read�fpr�	ExceptionZparse_NTLM_CHALLENGE_MESSAGEZ create_NTLM_AUTHENTICATE_MESSAGErrr)
r�headersZ
req_headerZresp_headerZconn�resZreshdrZauth_header_valuesZauth_header_value�sZServerChallengeZNegotiateFlagsZauth_msgrrr�	_new_conn's\


zNTLMConnectionPool._new_connN�Tcs0|dkri}d|d<tt|�j|||||||�S)Nz
Keep-Aliver)rr�urlopen)r�methodZurlZbodyr'ZretriesZredirectZassert_same_host)rrrr,hszNTLMConnectionPool.urlopen)NNr+TT)	�__name__�
__module__�__qualname__�__doc__�schemerr*r,�
__classcell__rr)rrrsArN)
r1Z
__future__rZloggingrr�rZpackages.six.moves.http_clientrr.r rrrrr�<module>s_vendor/urllib3/contrib/__pycache__/socks.cpython-36.pyc000064400000011162151733136340017203 0ustar003

�Pf3�@s(dZddlmZyddlZWn6ek
rRddlZddlmZejde��YnXddl	m
ZmZ
ddlmZmZdd	lmZmZdd
lmZmZddlmZddlmZyddlZWnek
r�dZYnXGd
d�de�ZGdd�dee�ZGdd�de�ZGdd�de�ZGdd�de�ZdS)a�
This module contains provisional support for SOCKS proxies from within
urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and
SOCKS5. To enable its functionality, either install PySocks or install this
module with the ``socks`` extra.

The SOCKS implementation supports the full range of urllib3 features. It also
supports the following SOCKS features:

- SOCKS4
- SOCKS4a
- SOCKS5
- Usernames and passwords for the SOCKS proxy

Known Limitations:

- Currently PySocks does not support contacting remote websites via literal
  IPv6 addresses. Any such connection attempt will fail. You must use a domain
  name.
- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any
  such connection attempt will fail.
�)�absolute_importN�)�DependencyWarningz�SOCKS support in urllib3 requires the installation of optional dependencies: specifically, PySocks.  For more information, see https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies)�error�timeout)�HTTPConnection�HTTPSConnection)�HTTPConnectionPool�HTTPSConnectionPool)�ConnectTimeoutError�NewConnectionError)�PoolManager)�	parse_urlcs(eZdZdZ�fdd�Zdd�Z�ZS)�SOCKSConnectionzG
    A plain-text HTTP connection that connects via a SOCKS proxy.
    cs"|jd�|_tt|�j||�dS)N�_socks_options)�popr�superr�__init__)�self�args�kwargs)�	__class__��/usr/lib/python3.6/socks.pyr?szSOCKSConnection.__init__cCsXi}|jr|j|d<|jr$|j|d<yTtj|j|jff|jd|jd|jd|jd|jd|jd|jd	�|��}Wn�tk
r�}zt	|d
|j|jf��WYdd}~Xn�tj
k
�r"}zT|j�r|j}t|t�r�t	|d
|j|jf��nt
|d|��nt
|d|��WYdd}~Xn2tk
�rR}zt
|d|��WYdd}~XnX|S)
zA
        Establish a new connection via the SOCKS proxy.
        �source_address�socket_options�
socks_version�
proxy_host�
proxy_port�username�password�rdns)Z
proxy_typeZ
proxy_addrrZproxy_usernameZproxy_passwordZ
proxy_rdnsrz0Connection to %s timed out. (connect timeout=%s)Nz(Failed to establish a new connection: %s)rr�socksZcreate_connection�host�portrr�
SocketTimeoutrZ
ProxyErrorZ
socket_err�
isinstancer�SocketError)rZextra_kwZconn�errrr�	_new_connCsL

 
zSOCKSConnection._new_conn)�__name__�
__module__�__qualname__�__doc__rr)�
__classcell__rr)rrr;src@seZdZdS)�SOCKSHTTPSConnectionN)r*r+r,rrrrr/�sr/c@seZdZeZdS)�SOCKSHTTPConnectionPoolN)r*r+r,r�
ConnectionClsrrrrr0�sr0c@seZdZeZdS)�SOCKSHTTPSConnectionPoolN)r*r+r,r/r1rrrrr2�sr2cs,eZdZdZeed�Zd�fdd�	Z�ZS)�SOCKSProxyManagerzh
    A version of the urllib3 ProxyManager that routes connections via the
    defined SOCKS proxy.
    )ZhttpZhttpsN�
cs�t|�}|jdkrtj}d}	nN|jdkr4tj}d}	n8|jdkrJtj}d}	n"|jdkr`tj}d}	ntd|��||_||j|j|||	d�}
|
|d	<t	t
|�j||f|�t
j|_dS)
NZsocks5FZsocks5hTZsocks4Zsocks4az)Unable to determine SOCKS version from %s)rrrrr r!r)
r�schemer"ZPROXY_TYPE_SOCKS5ZPROXY_TYPE_SOCKS4�
ValueError�	proxy_urlr#r$rr3r�pool_classes_by_scheme)rr7rr Z	num_poolsZheadersZconnection_pool_kwZparsedrr!Z
socks_options)rrrr�s4





zSOCKSProxyManager.__init__)NNr4N)	r*r+r,r-r0r2r8rr.rr)rrr3�s
r3) r-Z
__future__rr"�ImportError�warnings�
exceptionsr�warnZsocketrr'rr%Z
connectionrrZconnectionpoolr	r
rrZpoolmanagerr
Zutil.urlrZsslrr/r0r2r3rrrr�<module>s2
F_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-36.opt-1.pyc000064400000033337151733136340021064 0ustar003

�Pf�;�@s,dZddlmZddlZddlmZddlmZ	ddl
mZddlm
Z
mZddlmZydd	lmZWn$ek
r�dZd
dlmZYnXddlZddlZd
dlmZddlZd
d
lmZddgZdZejej j!ej"ej j#iZ$e%ed�o�e%ej d��rej j&e$ej'<e%ed��r0e%ej d��r0ej j(e$ej)<ye$j*ej+ej j,i�Wne-k
�r^YnXej.ej j/ej0ej j1ej2ej j1ej j3iZ4e5dd�e4j6�D��Z7dZ8ejZ9ej:j;Z<ej=e>�Z?dd�Z@dd�ZAdd�ZBdd�ZCdd�ZDGd d!�d!eE�ZFe�rd*d#d$�ZGneZGeGeF_GGd%d&�d&eE�ZHd'd(�ZIdS)+ab
SSL with SNI_-support for Python 2. Follow these instructions if you would
like to verify SSL certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.

This needs the following packages installed:

* pyOpenSSL (tested with 16.0.0)
* cryptography (minimum 1.3.4, from pyopenssl)
* idna (minimum 2.0, from cryptography)

However, pyopenssl depends on cryptography, which depends on idna, so while we
use all three directly here we end up having relatively few packages required.

You can install them with the following command:

    pip install pyopenssl cryptography idna

To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this::

    try:
        import urllib3.contrib.pyopenssl
        urllib3.contrib.pyopenssl.inject_into_urllib3()
    except ImportError:
        pass

Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.

Activating this module also has the positive side effect of disabling SSL/TLS
compression in Python 2 (see `CRIME attack`_).

If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.

.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
�)�absolute_importN)�x509)�backend)�_Certificate)�timeout�error)�BytesIO)�_fileobject�)�backport_makefile)�six)�util�inject_into_urllib3�extract_from_urllib3T�PROTOCOL_TLSv1_1�TLSv1_1_METHOD�PROTOCOL_TLSv1_2�TLSv1_2_METHODccs|]\}}||fVqdS)N�)�.0�k�vrr�/usr/lib/python3.6/pyopenssl.py�	<genexpr>`sri@cCs.t�ttj_tt_ttj_dt_dtj_dS)z7Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.TN)�_validate_dependencies_met�PyOpenSSLContextr
�ssl_�
SSLContext�HAS_SNI�IS_PYOPENSSLrrrrrmscCs(ttj_tt_ttj_dt_dtj_dS)z4Undo monkey-patching by :func:`inject_into_urllib3`.FN)�orig_util_SSLContextr
rr�orig_util_HAS_SNIrrrrrrrys
cCsRddlm}t|dd�dkr$td��ddlm}|�}t|dd�dkrNtd��dS)	z{
    Verifies that PyOpenSSL's package-level dependencies have been met.
    Throws `ImportError` if they are not met.
    r)�
Extensions�get_extension_for_classNzX'cryptography' module missing required functionality.  Try upgrading to v1.3.4 or newer.)�X509�_x509zS'pyOpenSSL' module missing required functionality. Try upgrading to v0.14 or newer.)Zcryptography.x509.extensionsr"�getattr�ImportErrorZOpenSSL.cryptor$)r"r$rrrrr�srcCs(dd�}||�}tjdkr$|jd�}|S)a�
    Converts a dNSName SubjectAlternativeName field to the form used by the
    standard library on the given Python version.

    Cryptography produces a dNSName as a unicode string that was idna-decoded
    from ASCII bytes. We need to idna-encode that string to get it back, and
    then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
    uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
    cSsNddl}x:dD]2}|j|�r|t|�d�}|jd�|j|�SqW|j|�S)z�
        Borrowed wholesale from the Python Cryptography Project. It turns out
        that we can't just safely call `idna.encode`: it can explode for
        wildcard names. This avoids that problem.
        rN�*.�.�ascii)r(r))�idna�
startswith�len�encode)�namer+�prefixrrr�idna_encode�s

z'_dnsname_to_stdlib.<locals>.idna_encode�rzutf-8)r2r)�sys�version_info�decode)r/r1rrr�_dnsname_to_stdlib�s



r6cCs�t|d�r|j�}ntt|j�}y|jjtj�j	}WnNtj
k
rJgStjtjtj
tfk
r�}ztjd|�gSd}~XnXdd�|jtj�D�}|jdd�|jtj�D��|S)zU
    Given an PyOpenSSL certificate, provides all the subject alternative names.
    �to_cryptographyz�A problem was encountered with the certificate that prevented urllib3 from finding the SubjectAlternativeName field. This can affect certificate validation. The error was %sNcSsg|]}dt|�f�qS)ZDNS)r6)rr/rrr�
<listcomp>�sz%get_subj_alt_name.<locals>.<listcomp>css|]}dt|�fVqdS)z
IP AddressN)�str)rr/rrrr�sz$get_subj_alt_name.<locals>.<genexpr>)�hasattrr7r�openssl_backendr%�
extensionsr#rZSubjectAlternativeName�valueZExtensionNotFoundZDuplicateExtensionZUnsupportedExtensionZUnsupportedGeneralNameType�UnicodeError�logZwarningZget_values_for_typeZDNSName�extendZ	IPAddress)Z	peer_certZcertZext�e�namesrrr�get_subj_alt_name�s(


	rCc@s|eZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
d dd�Zdd�Zdd�ZdS)!�
WrappedSocketz�API-compatibility wrapper for Python OpenSSL's Connection-class.

    Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
    collector of pypy.
    TcCs"||_||_||_d|_d|_dS)NrF)�
connection�socket�suppress_ragged_eofs�_makefile_refs�_closed)�selfrErFrGrrr�__init__�s
zWrappedSocket.__init__cCs
|jj�S)N)rF�fileno)rJrrrrL�szWrappedSocket.filenocCs*|jdkr|jd8_|jr&|j�dS)Nr�)rHrI�close)rJrrr�_decref_socketios�s
zWrappedSocket._decref_socketioscOs�y|jj||�}Wn�tjjk
rX}z&|jr<|jdkr<dStt|���WYdd}~Xn�tjj	k
r�}z|jj
�tjjkr�dS�WYdd}~XnJtjjk
r�t
j|j|jj��}|s�td��n|j||�SYnX|SdS)NrM�Unexpected EOF�zThe read operation timed out���)rRrP)rE�recv�OpenSSL�SSL�SysCallErrorrG�args�SocketErrorr9�ZeroReturnError�get_shutdown�RECEIVED_SHUTDOWN�
WantReadErrorr
�
wait_for_readrF�
gettimeoutr)rJrW�kwargs�datarA�rdrrrrSs 
zWrappedSocket.recvcOs�y|jj||�Stjjk
rT}z&|jr8|jdkr8dStt|���WYdd}~Xn�tjj	k
r�}z|jj
�tjjkr~dS�WYdd}~XnFtjjk
r�t
j|j|jj��}|s�td��n|j||�SYnXdS)NrM�Unexpected EOFrzThe read operation timed outrR)rRrb)rE�	recv_intorTrUrVrGrWrXr9rYrZr[r\r
r]rFr^r)rJrWr_rArarrrrcs
zWrappedSocket.recv_intocCs|jj|�S)N)rF�
settimeout)rJrrrrrd*szWrappedSocket.settimeoutcCs�xzy|jj|�Stjjk
rFtj|j|jj��}|s@t	��wYqtjj
k
rv}ztt|���WYdd}~XqXqWdS)N)
rE�sendrTrUZWantWriteErrorr
Zwait_for_writerFr^rrVrXr9)rJr`�wrrArrr�_send_until_done-szWrappedSocket._send_until_donecCs8d}x.|t|�kr2|j|||t��}||7}qWdS)Nr)r-rg�SSL_WRITE_BLOCKSIZE)rJr`Z
total_sentZsentrrr�sendall9szWrappedSocket.sendallcCs|jj�dS)N)rE�shutdown)rJrrrrj?szWrappedSocket.shutdowncCsH|jdkr6yd|_|jj�Stjjk
r2dSXn|jd8_dS)NrMT)rHrIrErNrTrU�Error)rJrrrrNCs

zWrappedSocket.closeFcCsD|jj�}|s|S|r(tjjtjj|�Sd|j�jffft|�d�S)NZ
commonName)ZsubjectZsubjectAltName)	rEZget_peer_certificaterTZcryptoZdump_certificateZ
FILETYPE_ASN1Zget_subjectZCNrC)rJZbinary_formrrrr�getpeercertMs
zWrappedSocket.getpeercertcCs|jd7_dS)NrM)rH)rJrrr�_reuse_szWrappedSocket._reusecCs&|jdkr|j�n|jd8_dS)NrM)rHrN)rJrrr�_dropbs

zWrappedSocket._dropN)T)F)�__name__�
__module__�__qualname__�__doc__rKrLrOrSrcrdrgrirjrNrlrmrnrrrrrD�s


rDrMcCs|jd7_t|||dd�S)NrMT)rN)rHr	)rJ�mode�bufsizerrr�makefilejsruc@szeZdZdZdd�Zedd��Zejdd��Zedd��Zejd	d��Zd
d�Z	dd
�Z
ddd�Zddd�Zddd�Z
dS)rz�
    I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
    for translating the interface of the standard library ``SSLContext`` object
    to calls into PyOpenSSL.
    cCs*t||_tjj|j�|_d|_d|_dS)NrF)�_openssl_versions�protocolrTrUZContext�_ctx�_optionsZcheck_hostname)rJrwrrrrKys
zPyOpenSSLContext.__init__cCs|jS)N)ry)rJrrr�optionsszPyOpenSSLContext.optionscCs||_|jj|�dS)N)ryrxZset_options)rJr=rrrrz�scCst|jj�S)N)�_openssl_to_stdlib_verifyrxZget_verify_mode)rJrrr�verify_mode�szPyOpenSSLContext.verify_modecCs|jjt|t�dS)N)rxZ
set_verify�_stdlib_to_openssl_verify�_verify_callback)rJr=rrrr|�scCs|jj�dS)N)rx�set_default_verify_paths)rJrrrr�sz)PyOpenSSLContext.set_default_verify_pathscCs&t|tj�r|jd�}|jj|�dS)Nzutf-8)�
isinstancer�	text_typer.rxZset_cipher_list)rJZciphersrrr�set_ciphers�s
zPyOpenSSLContext.set_ciphersNcCsN|dk	r|jd�}|dk	r$|jd�}|jj||�|dk	rJ|jjt|��dS)Nzutf-8)r.rx�load_verify_locationsr)rJZcafileZcapathZcadatarrrr��s

z&PyOpenSSLContext.load_verify_locationscs<|jj|��dk	r(|jj�fdd��|jj|p4|�dS)Ncs�S)Nr)Z
max_lengthZprompt_twiceZuserdata)�passwordrr�<lambda>�sz2PyOpenSSLContext.load_cert_chain.<locals>.<lambda>)rxZuse_certificate_fileZ
set_passwd_cbZuse_privatekey_file)rJZcertfileZkeyfiler�r)r�r�load_cert_chain�sz PyOpenSSLContext.load_cert_chainFTc	Cs�tjj|j|�}t|tj�r&|jd�}|dk	r8|j|�|j	�x|y|j
�Wnhtjjk
r�tj
||j��}|s~td��wBYn4tjjk
r�}ztjd|��WYdd}~XnXPqBWt||�S)Nzutf-8zselect timed outzbad handshake: %r)rTrUZ
Connectionrxr�rr�r.Zset_tlsext_host_nameZset_connect_stateZdo_handshaker\r
r]r^rrk�sslZSSLErrorrD)	rJZsockZserver_sideZdo_handshake_on_connectrGZserver_hostname�cnxrarArrr�wrap_socket�s$

 zPyOpenSSLContext.wrap_socket)NNN)NN)FTTN)rorprqrrrK�propertyrz�setterr|rr�r�r�r�rrrrrss
	
rcCs|dkS)Nrr)r�rZerr_noZ	err_depthZreturn_coderrrr~�sr~rR)rR)JrrZ
__future__rZOpenSSL.SSLrTZcryptographyrZ$cryptography.hazmat.backends.opensslrr;Z)cryptography.hazmat.backends.openssl.x509rrFrrrX�iorr	r'Zpackages.backports.makefilerZloggingr�Zpackagesrr3�r
�__all__rZPROTOCOL_SSLv23rUZ
SSLv23_METHODZPROTOCOL_TLSv1ZTLSv1_METHODrvr:rrrr�updateZPROTOCOL_SSLv3ZSSLv3_METHOD�AttributeErrorZ	CERT_NONEZVERIFY_NONEZ
CERT_OPTIONALZVERIFY_PEERZ
CERT_REQUIREDZVERIFY_FAIL_IF_NO_PEER_CERTr}�dict�itemsr{rhr!rrr Z	getLoggerror?rrrr6rC�objectrDrurr~rrrr�<module>+sh




3S_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.pyc000064400000000161151733136340017615 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/contrib/__pycache__/securetransport.cpython-36.pyc000064400000043014151733136340021325 0ustar003

�Pf%w�1@s�dZddlmZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
mZddlmZmZmZddlmZmZmZmZydd	l	mZWn$ek
r�dZdd
lmZYnXyed�Wnek
r�ed��YnXd
dgZdZejZ ej!j"Z#ej$�Z%ej&�Z'dZ(ej)ej*ej+ej,ej-ej.ej/ej0ej1ej2ej3ej4ej5ej6ej7ej8ej9ej:ej;ej<ej=ej>ej?ej@ejAejBejCejDejEejFejGejHejIg!ZJe
jKejLejMfiZNeOe
d��r�ejPejPfeNe
jQ<eOe
d��r�ejRejRfeNe
jS<eOe
d��rejLejLfeNe
jT<eOe
d��r0ejUejUfeNe
jV<eOe
d��rNejMejMfeNe
jW<eOe
d��rjeNe
jKeNe
jX<dd
�ZYdd�ZZdd�Z[dd�Z\ej]e[�Z^ej_e\�Z`Gdd�dea�Zbe�r�d&dd �Zcn
d'd"d �Zceceb_cGd#d$�d$ea�ZddS)(aU
SecureTranport support for urllib3 via ctypes.

This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.

We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.

This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.

To use this module, simply import and inject it::

    import urllib3.contrib.securetransport
    urllib3.contrib.securetransport.inject_into_urllib3()

Happy TLSing!
�)�absolute_importN�)�util�)�Security�
SecurityConst�CoreFoundation)�_assert_no_error�_cert_array_from_pem�_temporary_keychain�_load_client_cert_chain)�_fileobject)�backport_makefile�z5SecureTransport only works on Pythons with memoryview�inject_into_urllib3�extract_from_urllib3Ti@�PROTOCOL_SSLv2�PROTOCOL_SSLv3�PROTOCOL_TLSv1�PROTOCOL_TLSv1_1�PROTOCOL_TLSv1_2�PROTOCOL_TLScCs(ttj_tt_ttj_dt_dtj_dS)zG
    Monkey-patch urllib3 with SecureTransport-backed SSL-support.
    TN)�SecureTransportContextr�ssl_�
SSLContext�HAS_SNI�IS_SECURETRANSPORT�rr�%/usr/lib/python3.6/securetransport.pyr�s
cCs(ttj_tt_ttj_dt_dtj_dS)z>
    Undo monkey-patching by :func:`inject_into_urllib3`.
    FN)�orig_util_SSLContextrrr�orig_util_HAS_SNIrrrrrrr�s
cCsld}�y,tj|�}|dkr tjS|j}|d}|j�}d}d}tj|j|�}	t	|	�}
ylxf||kr�|dksr|dkr�t
j|g|�}|s�tjt
jd��|j|
||��}||7}|sZ|s�tjSPqZWWnTtjk
�r}
z4|
j
}|dk	o�|t
jk�r|t
jk�rtjS�WYdd}
~
XnX||d<||k�r0tjSdStk
�rf}
z|dk	�rV|
|_tjSd}
~
XnXdS)zs
    SecureTransport read callback. This is called by ST to request that data
    be returned from the socket.
    Nrz	timed out)�_connection_refs�getr�errSSLInternal�socket�
gettimeout�ctypes�c_charZfrom_address�
memoryviewrZ
wait_for_read�error�errno�EAGAIN�	recv_into�errSSLClosedGraceful�
ECONNRESET�errSSLClosedAbort�errSSLWouldBlock�	Exception�
_exception)�
connection_id�data_buffer�data_length_pointer�wrapped_socket�base_socketZrequested_length�timeoutr)Z
read_count�bufferZbuffer_viewZ	readablesZ
chunk_size�errr�_read_callback�sN




r;c
CsNd}�ytj|�}|dkr tjS|j}|d}tj||�}|j�}d}d}	y`xZ|	|kr�|dksf|dkr�tj	|g|�}
|
s�tj
tjd��|j
|�}|	|7}	||d�}qNWWnNtj
k
r�}z0|j}|dk	r�|tjkr�|tjkr�tjS�WYdd}~XnX|	|d<|	|k�rtjSdStk
�rH}z|dk	�r8||_tjSd}~XnXdS)zx
    SecureTransport write callback. This is called by ST to request that data
    actually be sent on the network.
    Nrz	timed out)r!r"rr#r$r&�	string_atr%rZwait_for_writer)r*r+�sendr.r/r0r1r2)
r3r4r5r6r7Zbytes_to_write�datar8r)�sentZ	writablesZ
chunk_sentr:rrr�_write_callback�sD





r@c@s�eZdZdZdd�Zejdd��Zdd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
d(dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd)d"d#�Zd$d%�Zd&d'�ZdS)*�
WrappedSocketz�
    API-compatibility wrapper for Python's OpenSSL wrapped socket object.

    Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
    collector of PyPy.
    cCsL||_d|_d|_d|_d|_d|_d|_d|_|jj�|_	|jj
d�dS)NrF)r$�context�_makefile_refs�_closedr2�	_keychain�
_keychain_dir�_client_cert_chainr%�_timeout�
settimeout)�selfr$rrr�__init__.szWrappedSocket.__init__ccs4d|_dV|jdk	r0|jd}|_|j�|�dS)a]
        A context manager that can be used to wrap calls that do I/O from
        SecureTransport. If any of the I/O callbacks hit an exception, this
        context manager will correctly propagate the exception after the fact.
        This avoids silently swallowing those exceptions.

        It also correctly forces the socket closed.
        N)r2�close)rJZ	exceptionrrr�_raise_on_error@s

zWrappedSocket._raise_on_errorcCs2tjtt�t�}tj|j|tt��}t|�dS)a4
        Sets up the allowed ciphers. By default this matches the set in
        util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
        custom and doesn't allow changing at this time, mostly because parsing
        OpenSSL cipher strings is going to be a freaking nightmare.
        N)rZSSLCipherSuite�len�
CIPHER_SUITESZSSLSetEnabledCiphersrBr	)rJ�ciphers�resultrrr�_set_ciphersUszWrappedSocket._set_ciphersc	Cs|sdStjj|�r2t|d��}|j�}WdQRXd}tj�}z�t|�}tj|j	t
j|��}t|�|srt
jd��tj||�}t|�tj|d�}t|�tj�}tj|t
j|��}t|�Wd|r�tj|�|dkr�tj|�Xtjtjf}|j|k�r
t
jd|j��dS)z�
        Called when we have set custom validation. We do this in two cases:
        first, when cert validation is entirely disabled; and second, when
        using a custom trust DB.
        N�rbzFailed to copy trust referenceTz)certificate verify failed, error code: %d)�os�path�isfile�open�readr�SecTrustRefr
�SSLCopyPeerTrustrBr&�byrefr	�sslZSSLErrorZSecTrustSetAnchorCertificatesZ!SecTrustSetAnchorCertificatesOnlyZSecTrustResultTypeZSecTrustEvaluater�	CFReleaserZkSecTrustResultUnspecifiedZkSecTrustResultProceed�value)	rJ�verify�trust_bundle�fZ
cert_array�trustrQZtrust_resultZ	successesrrr�_custom_validatebs@

zWrappedSocket._custom_validatec	Cs�tjdtjtj�|_tj|jtt�}	t	|	�t
�4t|�d}
x|
tkrV|
dd}
q@W|t|
<WdQRXtj
|j|
�}	t	|	�|r�t|t�s�|jd�}tj|j|t|��}	t	|	�|j�tj|j|�}	t	|	�tj|j|�}	t	|	�|s�|dk	�rtj|jtjd�}	t	|	�|�rNt�\|_|_t|j||�|_tj|j|j�}	t	|	�xf|j��Rtj|j�}	|	tj k�r~t!j"d��n(|	tj#k�r�|j$||��wPn
t	|	�PWdQRX�qPWdS)z�
        Actually performs the TLS handshake. This is run automatically by
        wrapped socket, and shouldn't be needed in user code.
        Ni���rzutf-8Tzhandshake timed out)%rZSSLCreateContextrZkSSLClientSideZkSSLStreamTyperBZ
SSLSetIOFuncs�_read_callback_pointer�_write_callback_pointerr	�_connection_ref_lock�idr!ZSSLSetConnection�
isinstance�bytes�encodeZSSLSetPeerDomainNamerNrRZSSLSetProtocolVersionMinZSSLSetProtocolVersionMaxZSSLSetSessionOptionZ"kSSLSessionOptionBreakOnServerAuthrrErFrrGZSSLSetCertificaterMZSSLHandshaker0r$r8ZerrSSLServerAuthCompletedrc)rJ�server_hostnamer_r`Zmin_versionZmax_versionZclient_certZ
client_keyZclient_key_passphraserQZhandlerrr�	handshake�s\



zWrappedSocket.handshakecCs
|jj�S)N)r$�fileno)rJrrrrm�szWrappedSocket.filenocCs*|jdkr|jd8_|jr&|j�dS)Nrr)rCrDrL)rJrrr�_decref_socketios�s
zWrappedSocket._decref_socketioscCs&tj|�}|j||�}|d|�}|S)N)r&Zcreate_string_bufferr,)rJZbufsizr9Z
bytes_readr>rrr�recvs
zWrappedSocket.recvNc
Cs�|jr
dS|dkrt|�}tj|j|�}tjd�}|j��tj|j	||tj
|��}WdQRX|tjkr�|j
dkr�tjd��n"|tjtjfkr�|j�nt|�|j
S)Nrzrecv timed out)rDrNr&r'Zfrom_buffer�c_size_trMrZSSLReadrBr[rr0r^r$r8r-ZerrSSLClosedNoNotifyrLr	)rJr9�nbytes�processed_bytesrQrrrr,
s 




zWrappedSocket.recv_intocCs
||_dS)N)rH)rJr8rrrrI2szWrappedSocket.settimeoutcCs|jS)N)rH)rJrrrr%5szWrappedSocket.gettimeoutc
Cshtjd�}|j��"tj|j|t|�tj|��}WdQRX|tj	krZ|j
dkrZtjd��nt
|�|j
S)Nrzsend timed out)r&rprMrZSSLWriterBrNr[rr0r^r$r8r	)rJr>rrrQrrrr=8s

"zWrappedSocket.sendcCs8d}x.|t|�kr2|j|||t��}||7}qWdS)Nr)rNr=�SSL_WRITE_BLOCKSIZE)rJr>Z
total_sentr?rrr�sendallIszWrappedSocket.sendallc	Cs$|j��tj|j�WdQRXdS)N)rMrZSSLCloserB)rJrrr�shutdownOs
zWrappedSocket.shutdowncCs�|jdkr�d|_|jr(tj|j�d|_|jr@tj|j�d|_|jrvtj|j�tj|j�t	j
|j�d|_|_|jj
�S|jd8_dS)NrT)rCrDrBrr]rGrErZSecKeychainDelete�shutilZrmtreerFr$rL)rJrrrrLSs

zWrappedSocket.closeFc
Cs�|std��tj�}d}d}z�tj|jtj|��}t|�|sBdStj|�}|sTdStj	|d�}|sht
�tj|�}|szt
�tj
|�}tj|�}	tj|	|�}Wd|r�tj|�|r�tj|�X|S)Nz2SecureTransport only supports dumping binary certsr)�
ValueErrorrrYrZrBr&r[r	ZSecTrustGetCertificateCountZSecTrustGetCertificateAtIndex�AssertionErrorZSecCertificateCopyDatarZCFDataGetLengthZCFDataGetBytePtrr<r])
rJZbinary_formrbZcertdataZ	der_bytesrQZ
cert_countZleafZdata_lengthr4rrr�getpeercertfs6




zWrappedSocket.getpeercertcCs|jd7_dS)Nr)rC)rJrrr�_reuse�szWrappedSocket._reusecCs&|jdkr|j�n|jd8_dS)Nr)rCrL)rJrrr�_drop�s

zWrappedSocket._drop)N)F)�__name__�
__module__�__qualname__�__doc__rK�
contextlib�contextmanagerrMrRrcrlrmrnror,rIr%r=rtrurLryrzr{rrrrrA's&
>Z
(
>rAcCs|jd7_t|||dd�S)NrT)rL)rCr
)rJ�mode�bufsizerrr�makefile�sr��rcOsd}t|||f|�|�S)Nr)r)rJr��	buffering�args�kwargsrrrr��sc@s�eZdZdZdd�Zedd��Zejdd��Zedd��Zejd	d��Zed
d��Z	e	jdd��Z	d
d�Z
dd�Zdd�Zddd�Z
ddd�Zddd�ZdS)rz�
    I am a wrapper class for the SecureTransport library, to translate the
    interface of the standard library ``SSLContext`` object to calls into
    SecureTransport.
    cCs8t|\|_|_d|_d|_d|_d|_d|_d|_dS)NrF)	�_protocol_to_min_max�_min_version�_max_version�_options�_verify�
_trust_bundle�_client_cert�_client_key�_client_key_passphrase)rJZprotocolrrrrK�szSecureTransportContext.__init__cCsdS)z�
        SecureTransport cannot have its hostname checking disabled. For more,
        see the comment on getpeercert() in this file.
        Tr)rJrrr�check_hostname�sz%SecureTransportContext.check_hostnamecCsdS)z�
        SecureTransport cannot have its hostname checking disabled. For more,
        see the comment on getpeercert() in this file.
        Nr)rJr^rrrr��scCs|jS)N)r�)rJrrr�options�szSecureTransportContext.optionscCs
||_dS)N)r�)rJr^rrrr��scCs|jrtjStjS)N)r�r\�
CERT_REQUIREDZ	CERT_NONE)rJrrr�verify_mode�sz"SecureTransportContext.verify_modecCs|tjkrdnd|_dS)NTF)r\r�r�)rJr^rrrr��scCsdS)Nr)rJrrr�set_default_verify_paths�s
z/SecureTransportContext.set_default_verify_pathscCs|j�S)N)r�)rJrrr�load_default_certs�sz)SecureTransportContext.load_default_certscCs|tjjkrtd��dS)Nz5SecureTransport doesn't support custom cipher strings)rrZDEFAULT_CIPHERSrw)rJrPrrr�set_cipherssz"SecureTransportContext.set_ciphersNcCs|dk	rtd��|p||_dS)Nz1SecureTransport does not support cert directories)rwr�)rJZcafileZcapathZcadatarrr�load_verify_locationssz,SecureTransportContext.load_verify_locationscCs||_||_||_dS)N)r�r�Z_client_cert_passphrase)rJZcertfileZkeyfileZpasswordrrr�load_cert_chainsz&SecureTransportContext.load_cert_chainFTc	CsL|s
t�|st�|st�t|�}|j||j|j|j|j|j|j|j	�|S)N)
rxrArlr�r�r�r�r�r�r�)rJZsockZserver_sideZdo_handshake_on_connectZsuppress_ragged_eofsrkr6rrr�wrap_sockets

z"SecureTransportContext.wrap_socket)NNN)NN)FTTN)r|r}r~rrK�propertyr��setterr�r�r�r�r�r�r�r�rrrrr�s 	

	
r���)r�)r�N)erZ
__future__rr�r&r*Zos.pathrTrvr$r\Z	threading�weakref�rZ_securetransport.bindingsrrrZ_securetransport.low_levelr	r
rrr
�ImportErrorZpackages.backports.makefilerr(�	NameError�__all__rr rrr�WeakValueDictionaryr!ZLockrfrsZTLS_AES_256_GCM_SHA384ZTLS_CHACHA20_POLY1305_SHA256ZTLS_AES_128_GCM_SHA256Z'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384Z'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256Z#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384Z#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384Z#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256Z#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256Z'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384Z$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_256_CBC_SHAZ TLS_DHE_DSS_WITH_AES_256_CBC_SHAZ'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256Z$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_128_CBC_SHAZ TLS_DHE_DSS_WITH_AES_128_CBC_SHAZTLS_RSA_WITH_AES_256_GCM_SHA384ZTLS_RSA_WITH_AES_128_GCM_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHA256ZTLS_RSA_WITH_AES_128_CBC_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHAZTLS_RSA_WITH_AES_128_CBC_SHArOZPROTOCOL_SSLv23Z
kTLSProtocol1ZkTLSProtocol12r��hasattrZ
kSSLProtocol2rZ
kSSLProtocol3rrZkTLSProtocol11rrrrrr;r@ZSSLReadFuncrdZSSLWriteFuncre�objectrAr�rrrrr�<module>s�95



_vendor/urllib3/contrib/__pycache__/appengine.cpython-36.pyc000064400000021027151733136340020030 0ustar003

�Pfq*�@s dZddlmZddlZddlZddlZddlmZddlm	Z	m
Z
mZmZm
Z
mZddlmZddlmZdd	lmZdd
lmZddlmZyddlmZWnek
r�dZYnXeje�ZGd
d�de
�ZGdd�de	�Z Gdd�de�Z!dd�Z"dd�Z#dd�Z$dd�Z%dd�Z&dS)aC
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.

Example usage::

    from urllib3 import PoolManager
    from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox

    if is_appengine_sandbox():
        # AppEngineManager uses AppEngine's URLFetch API behind the scenes
        http = AppEngineManager()
    else:
        # PoolManager uses a socket-level API behind the scenes
        http = PoolManager()

    r = http.request('GET', 'https://google.com/')

There are `limitations <https://cloud.google.com/appengine/docs/python/urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:

1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
   cost-effective in many circumstances as long as your usage is within the
   limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
   Sockets also have `limitations and restrictions
   <https://cloud.google.com/appengine/docs/python/sockets/   #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
   To use sockets, be sure to specify the following in your ``app.yaml``::

        env_variables:
            GAE_USE_SOCKETS_HTTPLIB : 'true'

3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
�)�absolute_importN�)�urljoin)�	HTTPError�HTTPWarning�
MaxRetryError�
ProtocolError�TimeoutError�SSLError)�BytesIO)�RequestMethods)�HTTPResponse)�Timeout)�Retry)�urlfetchc@seZdZdS)�AppEnginePlatformWarningN)�__name__�
__module__�__qualname__�rr�/usr/lib/python3.6/appengine.pyrGsrc@seZdZdS)�AppEnginePlatformErrorN)rrrrrrrrKsrc@sXeZdZdZddd�Zdd�Zdd	�Zddddejfd
d�Z	dd
�Z
dd�Zdd�ZdS)�AppEngineManagera
    Connection manager for Google App Engine sandbox applications.

    This manager uses the URLFetch service directly instead of using the
    emulated httplib, and is subject to URLFetch limitations as described in
    the App Engine documentation `here
    <https://cloud.google.com/appengine/docs/python/urlfetch>`_.

    Notably it will raise an :class:`AppEnginePlatformError` if:
        * URLFetch is not available.
        * If you attempt to use this on App Engine Flexible, as full socket
          support is available.
        * If a request size is more than 10 megabytes.
        * If a response size is more than 32 megabtyes.
        * If you use an unsupported request method such as OPTIONS.

    Beyond those cases, it will raise normal urllib3 errors.
    NTcCsNtstd��t�rtd��tjdt�tj||�||_||_	|pFt
j|_dS)Nz.URLFetch is not available in this environment.z�Use normal urllib3.PoolManager instead of AppEngineManageron Managed VMs, as using URLFetch is not necessary in this environment.z�urllib3 is using URLFetch on Google App Engine sandbox instead of sockets. To use sockets directly instead of URLFetch see https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.)
rr�is_prod_appengine_mvms�warnings�warnrr�__init__�validate_certificate�urlfetch_retriesrZDEFAULT�retries)�self�headersrrrrrrrcszAppEngineManager.__init__cCs|S)Nr)r rrr�	__enter__{szAppEngineManager.__enter__cCsdS)NFr)r �exc_typeZexc_valZexc_tbrrr�__exit__~szAppEngineManager.__exit__cKs�|j||�}yF|o |jdko |j}	tj||||p2id|jo<|	|j|�|jd�}
W�nBtjk
r�}zt	||��WYdd}~X�ntj
k
r�}z$dt|�kr�td|��t
|��WYdd}~Xn�tjk
�r}z(dt|�kr�t|||d��t
|��WYdd}~Xn�tjk
�r6}ztd|��WYdd}~Xn`tjk
�rb}zt|��WYdd}~Xn4tjk
�r�}ztd	||��WYdd}~XnX|j|
fd
|i|��}|�o�|j�}
|
�rr|j�r�|j�r�t||d��n�|jdk�r�d
}y|j||||d�}Wn*tk
�r.|j�r*t||d��|SX|j|�tjd||
�t||
�}|j||||f|||d�|��St|jd��}|j ||j|��r�|j||||d�}tjd|�|j!|�|j||f|||||d�|��S|S)NrF)Zpayload�methodr!Zallow_truncated�follow_redirectsZdeadlinerz	too largezOURLFetch request too large, URLFetch only supports requests up to 10mb in size.zToo many redirects)�reasonzPURLFetch response too large, URLFetch only supportsresponses up to 32mb in size.z$URLFetch does not support method: %srztoo many redirectsi/ZGET)�responseZ_poolzRedirecting %s -> %s)r�redirect�timeoutzRetry-Afterz	Retry: %s)�bodyr!rr)r*)"�_get_retriesr)�totalrZfetchr�_get_absolute_timeoutrZDeadlineExceededErrorr	ZInvalidURLError�strrrZ
DownloadErrorrZResponseTooLargeErrorZSSLCertificateErrorr
ZInvalidMethodError�#_urlfetch_response_to_http_responseZget_redirect_locationZraise_on_redirect�statusZ	incrementZsleep_for_retry�log�debugr�urlopen�boolZ	getheaderZis_retryZsleep)r r%Zurlr+r!rr)r*�response_kwr&r(�eZ
http_responseZredirect_locationZredirect_urlZhas_retry_afterrrrr4�s�




zAppEngineManager.urlopencKszt�r"|jjd�}|dkr"|jd=|jjd�}|dkrZ|jd�}|jd�dj|�|jd<tft|j�|j|j	d�|��S)Nzcontent-encodingZdeflateztransfer-encodingZchunked�,)r+r!r1)
�is_prod_appenginer!�get�split�remove�joinr
rZcontentZstatus_code)r Z
urlfetch_respr6Zcontent_encodingZtransfer_encodingZ	encodingsrrrr0�s

z4AppEngineManager._urlfetch_response_to_http_responsecCsB|tjkrdSt|t�r>|jdk	s,|jdk	r8tjdt�|jS|S)NzdURLFetch does not support granular timeout settings, reverting to total or default URLFetch timeout.)	r�DEFAULT_TIMEOUT�
isinstanceZ_readZ_connectrrrr-)r r*rrrr.�s

z&AppEngineManager._get_absolute_timeoutcCs>t|t�stj|||jd�}|js.|js.|jr:tjdt	�|S)N)r)�defaultzhURLFetch only supports total retries and does not recognize connect, read, or redirect retry parameters.)
r?rZfrom_intrZconnect�readr)rrr)r rr)rrrr,s
zAppEngineManager._get_retries)NNTT)
rrr�__doc__rr"r$rr>r4r0r.r,rrrrrOs
ZrcCst�pt�pt�S)N)�is_local_appenginer9rrrrr�is_appenginesrDcCst�ot�S)N)rDrrrrr�is_appengine_sandboxsrEcCsdtjkodtjdkS)N�APPENGINE_RUNTIMEzDevelopment/�SERVER_SOFTWARE)�os�environrrrrrCs
rCcCs dtjkodtjdkot�S)NrFzGoogle App Engine/rG)rHrIrrrrrr9!s
r9cCstjjdd�dkS)NZGAE_VMF�true)rHrIr:rrrrr'sr)'rBZ
__future__rZloggingrHrZpackages.six.moves.urllib.parser�
exceptionsrrrrr	r
Zpackages.sixrZrequestrr(r
Zutil.timeoutrZ
util.retryrZgoogle.appengine.apir�ImportErrorZ	getLoggerrr2rrrrDrErCr9rrrrr�<module>'s2 	

D_vendor/urllib3/contrib/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000161151733136340020554 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/contrib/__pycache__/appengine.cpython-36.opt-1.pyc000064400000021027151733136340020767 0ustar003

�Pfq*�@s dZddlmZddlZddlZddlZddlmZddlm	Z	m
Z
mZmZm
Z
mZddlmZddlmZdd	lmZdd
lmZddlmZyddlmZWnek
r�dZYnXeje�ZGd
d�de
�ZGdd�de	�Z Gdd�de�Z!dd�Z"dd�Z#dd�Z$dd�Z%dd�Z&dS)aC
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.

Example usage::

    from urllib3 import PoolManager
    from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox

    if is_appengine_sandbox():
        # AppEngineManager uses AppEngine's URLFetch API behind the scenes
        http = AppEngineManager()
    else:
        # PoolManager uses a socket-level API behind the scenes
        http = PoolManager()

    r = http.request('GET', 'https://google.com/')

There are `limitations <https://cloud.google.com/appengine/docs/python/urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:

1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
   cost-effective in many circumstances as long as your usage is within the
   limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
   Sockets also have `limitations and restrictions
   <https://cloud.google.com/appengine/docs/python/sockets/   #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
   To use sockets, be sure to specify the following in your ``app.yaml``::

        env_variables:
            GAE_USE_SOCKETS_HTTPLIB : 'true'

3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
�)�absolute_importN�)�urljoin)�	HTTPError�HTTPWarning�
MaxRetryError�
ProtocolError�TimeoutError�SSLError)�BytesIO)�RequestMethods)�HTTPResponse)�Timeout)�Retry)�urlfetchc@seZdZdS)�AppEnginePlatformWarningN)�__name__�
__module__�__qualname__�rr�/usr/lib/python3.6/appengine.pyrGsrc@seZdZdS)�AppEnginePlatformErrorN)rrrrrrrrKsrc@sXeZdZdZddd�Zdd�Zdd	�Zddddejfd
d�Z	dd
�Z
dd�Zdd�ZdS)�AppEngineManagera
    Connection manager for Google App Engine sandbox applications.

    This manager uses the URLFetch service directly instead of using the
    emulated httplib, and is subject to URLFetch limitations as described in
    the App Engine documentation `here
    <https://cloud.google.com/appengine/docs/python/urlfetch>`_.

    Notably it will raise an :class:`AppEnginePlatformError` if:
        * URLFetch is not available.
        * If you attempt to use this on App Engine Flexible, as full socket
          support is available.
        * If a request size is more than 10 megabytes.
        * If a response size is more than 32 megabtyes.
        * If you use an unsupported request method such as OPTIONS.

    Beyond those cases, it will raise normal urllib3 errors.
    NTcCsNtstd��t�rtd��tjdt�tj||�||_||_	|pFt
j|_dS)Nz.URLFetch is not available in this environment.z�Use normal urllib3.PoolManager instead of AppEngineManageron Managed VMs, as using URLFetch is not necessary in this environment.z�urllib3 is using URLFetch on Google App Engine sandbox instead of sockets. To use sockets directly instead of URLFetch see https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.)
rr�is_prod_appengine_mvms�warnings�warnrr�__init__�validate_certificate�urlfetch_retriesrZDEFAULT�retries)�self�headersrrrrrrrcszAppEngineManager.__init__cCs|S)Nr)r rrr�	__enter__{szAppEngineManager.__enter__cCsdS)NFr)r �exc_typeZexc_valZexc_tbrrr�__exit__~szAppEngineManager.__exit__cKs�|j||�}yF|o |jdko |j}	tj||||p2id|jo<|	|j|�|jd�}
W�nBtjk
r�}zt	||��WYdd}~X�ntj
k
r�}z$dt|�kr�td|��t
|��WYdd}~Xn�tjk
�r}z(dt|�kr�t|||d��t
|��WYdd}~Xn�tjk
�r6}ztd|��WYdd}~Xn`tjk
�rb}zt|��WYdd}~Xn4tjk
�r�}ztd	||��WYdd}~XnX|j|
fd
|i|��}|�o�|j�}
|
�rr|j�r�|j�r�t||d��n�|jdk�r�d
}y|j||||d�}Wn*tk
�r.|j�r*t||d��|SX|j|�tjd||
�t||
�}|j||||f|||d�|��St|jd��}|j ||j|��r�|j||||d�}tjd|�|j!|�|j||f|||||d�|��S|S)NrF)Zpayload�methodr!Zallow_truncated�follow_redirectsZdeadlinerz	too largezOURLFetch request too large, URLFetch only supports requests up to 10mb in size.zToo many redirects)�reasonzPURLFetch response too large, URLFetch only supportsresponses up to 32mb in size.z$URLFetch does not support method: %srztoo many redirectsi/ZGET)�responseZ_poolzRedirecting %s -> %s)r�redirect�timeoutzRetry-Afterz	Retry: %s)�bodyr!rr)r*)"�_get_retriesr)�totalrZfetchr�_get_absolute_timeoutrZDeadlineExceededErrorr	ZInvalidURLError�strrrZ
DownloadErrorrZResponseTooLargeErrorZSSLCertificateErrorr
ZInvalidMethodError�#_urlfetch_response_to_http_responseZget_redirect_locationZraise_on_redirect�statusZ	incrementZsleep_for_retry�log�debugr�urlopen�boolZ	getheaderZis_retryZsleep)r r%Zurlr+r!rr)r*�response_kwr&r(�eZ
http_responseZredirect_locationZredirect_urlZhas_retry_afterrrrr4�s�




zAppEngineManager.urlopencKszt�r"|jjd�}|dkr"|jd=|jjd�}|dkrZ|jd�}|jd�dj|�|jd<tft|j�|j|j	d�|��S)Nzcontent-encodingZdeflateztransfer-encodingZchunked�,)r+r!r1)
�is_prod_appenginer!�get�split�remove�joinr
rZcontentZstatus_code)r Z
urlfetch_respr6Zcontent_encodingZtransfer_encodingZ	encodingsrrrr0�s

z4AppEngineManager._urlfetch_response_to_http_responsecCsB|tjkrdSt|t�r>|jdk	s,|jdk	r8tjdt�|jS|S)NzdURLFetch does not support granular timeout settings, reverting to total or default URLFetch timeout.)	r�DEFAULT_TIMEOUT�
isinstanceZ_readZ_connectrrrr-)r r*rrrr.�s

z&AppEngineManager._get_absolute_timeoutcCs>t|t�stj|||jd�}|js.|js.|jr:tjdt	�|S)N)r)�defaultzhURLFetch only supports total retries and does not recognize connect, read, or redirect retry parameters.)
r?rZfrom_intrZconnect�readr)rrr)r rr)rrrr,s
zAppEngineManager._get_retries)NNTT)
rrr�__doc__rr"r$rr>r4r0r.r,rrrrrOs
ZrcCst�pt�pt�S)N)�is_local_appenginer9rrrrr�is_appenginesrDcCst�ot�S)N)rDrrrrr�is_appengine_sandboxsrEcCsdtjkodtjdkS)N�APPENGINE_RUNTIMEzDevelopment/�SERVER_SOFTWARE)�os�environrrrrrCs
rCcCs dtjkodtjdkot�S)NrFzGoogle App Engine/rG)rHrIrrrrrr9!s
r9cCstjjdd�dkS)NZGAE_VMF�true)rHrIr:rrrrr'sr)'rBZ
__future__rZloggingrHrZpackages.six.moves.urllib.parser�
exceptionsrrrrr	r
Zpackages.sixrZrequestrr(r
Zutil.timeoutrZ
util.retryrZgoogle.appengine.apir�ImportErrorZ	getLoggerrr2rrrrDrErCr9rrrrr�<module>'s2 	

D_vendor/urllib3/contrib/__pycache__/socks.cpython-36.opt-1.pyc000064400000011162151733136340020142 0ustar003

�Pf3�@s(dZddlmZyddlZWn6ek
rRddlZddlmZejde��YnXddl	m
ZmZ
ddlmZmZdd	lmZmZdd
lmZmZddlmZddlmZyddlZWnek
r�dZYnXGd
d�de�ZGdd�dee�ZGdd�de�ZGdd�de�ZGdd�de�ZdS)a�
This module contains provisional support for SOCKS proxies from within
urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and
SOCKS5. To enable its functionality, either install PySocks or install this
module with the ``socks`` extra.

The SOCKS implementation supports the full range of urllib3 features. It also
supports the following SOCKS features:

- SOCKS4
- SOCKS4a
- SOCKS5
- Usernames and passwords for the SOCKS proxy

Known Limitations:

- Currently PySocks does not support contacting remote websites via literal
  IPv6 addresses. Any such connection attempt will fail. You must use a domain
  name.
- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any
  such connection attempt will fail.
�)�absolute_importN�)�DependencyWarningz�SOCKS support in urllib3 requires the installation of optional dependencies: specifically, PySocks.  For more information, see https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies)�error�timeout)�HTTPConnection�HTTPSConnection)�HTTPConnectionPool�HTTPSConnectionPool)�ConnectTimeoutError�NewConnectionError)�PoolManager)�	parse_urlcs(eZdZdZ�fdd�Zdd�Z�ZS)�SOCKSConnectionzG
    A plain-text HTTP connection that connects via a SOCKS proxy.
    cs"|jd�|_tt|�j||�dS)N�_socks_options)�popr�superr�__init__)�self�args�kwargs)�	__class__��/usr/lib/python3.6/socks.pyr?szSOCKSConnection.__init__cCsXi}|jr|j|d<|jr$|j|d<yTtj|j|jff|jd|jd|jd|jd|jd|jd|jd	�|��}Wn�tk
r�}zt	|d
|j|jf��WYdd}~Xn�tj
k
�r"}zT|j�r|j}t|t�r�t	|d
|j|jf��nt
|d|��nt
|d|��WYdd}~Xn2tk
�rR}zt
|d|��WYdd}~XnX|S)
zA
        Establish a new connection via the SOCKS proxy.
        �source_address�socket_options�
socks_version�
proxy_host�
proxy_port�username�password�rdns)Z
proxy_typeZ
proxy_addrrZproxy_usernameZproxy_passwordZ
proxy_rdnsrz0Connection to %s timed out. (connect timeout=%s)Nz(Failed to establish a new connection: %s)rr�socksZcreate_connection�host�portrr�
SocketTimeoutrZ
ProxyErrorZ
socket_err�
isinstancer�SocketError)rZextra_kwZconn�errrr�	_new_connCsL

 
zSOCKSConnection._new_conn)�__name__�
__module__�__qualname__�__doc__rr)�
__classcell__rr)rrr;src@seZdZdS)�SOCKSHTTPSConnectionN)r*r+r,rrrrr/�sr/c@seZdZeZdS)�SOCKSHTTPConnectionPoolN)r*r+r,r�
ConnectionClsrrrrr0�sr0c@seZdZeZdS)�SOCKSHTTPSConnectionPoolN)r*r+r,r/r1rrrrr2�sr2cs,eZdZdZeed�Zd�fdd�	Z�ZS)�SOCKSProxyManagerzh
    A version of the urllib3 ProxyManager that routes connections via the
    defined SOCKS proxy.
    )ZhttpZhttpsN�
cs�t|�}|jdkrtj}d}	nN|jdkr4tj}d}	n8|jdkrJtj}d}	n"|jdkr`tj}d}	ntd|��||_||j|j|||	d�}
|
|d	<t	t
|�j||f|�t
j|_dS)
NZsocks5FZsocks5hTZsocks4Zsocks4az)Unable to determine SOCKS version from %s)rrrrr r!r)
r�schemer"ZPROXY_TYPE_SOCKS5ZPROXY_TYPE_SOCKS4�
ValueError�	proxy_urlr#r$rr3r�pool_classes_by_scheme)rr7rr Z	num_poolsZheadersZconnection_pool_kwZparsedrr!Z
socks_options)rrrr�s4





zSOCKSProxyManager.__init__)NNr4N)	r*r+r,r-r0r2r8rr.rr)rrr3�s
r3) r-Z
__future__rr"�ImportError�warnings�
exceptionsr�warnZsocketrr'rr%Z
connectionrrZconnectionpoolr	r
rrZpoolmanagerr
Zutil.urlrZsslrr/r0r2r3rrrr�<module>s2
F_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-36.opt-1.pyc000064400000006131151733136340020664 0ustar003

�Pf~�@s\dZddlmZddlmZddlmZddlmZddlm	Z	ee
�ZGdd	�d	e�Zd
S)z
NTLM authenticating pool, contributed by erikcederstran

Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
�)�absolute_import)�	getLogger)�ntlm�)�HTTPSConnectionPool)�HTTPSConnectioncs:eZdZdZdZ�fdd�Zdd�Zd�fd
d�	Z�ZS)
�NTLMConnectionPoolzQ
    Implements an NTLM authentication version of an urllib3 connection pool
    ZhttpscsLtt|�j||�||_||_|jdd�}|dj�|_|d|_||_	dS)z�
        authurl is a random URL on the server that is protected by NTLM.
        user is the Windows user, probably in the DOMAIN\username format.
        pw is the password for the user.
        �\�rN)
�superr�__init__�authurl�rawuser�split�upper�domain�user�pw)�selfrrr
�args�kwargsZ
user_parts)�	__class__��/usr/lib/python3.6/ntlmpool.pyrs
zNTLMConnectionPool.__init__c
Cs�|jd7_tjd|j|j|j�i}d|d<d}d}t|j|jd�}dtj|j	�||<tjd	|�|j
d
|jd|�|j�}t|j
��}tjd|j|j�tjd|�tjd
|jd��d|_||jd�}d}x(|D] }	|	dd�dkr�|	dd�}q�W|dk�rtd|||f��tj|�\}
}tj|
|j|j|j|�}d|||<tjd	|�|j
d
|jd|�|j�}tjd|j|j�tjdt|j
���tjd
|j�dd��|jdk�r�|jdk�r�td��td|j|jf��d|_tjd�|S)Nr
z3Starting NTLM HTTPS connection no. %d: https://%s%sz
Keep-Alive�
ConnectionZ
Authorizationzwww-authenticate)�host�portzNTLM %szRequest headers: %sZGETzResponse status: %s %szResponse headers: %szResponse data: %s [...]�dz, �zNTLM z!Unexpected %s response header: %s��i�z3Server rejected request: wrong username or passwordzWrong server response: %s %szConnection established)Znum_connections�log�debugrr
rrrZcreate_NTLM_NEGOTIATE_MESSAGErZrequestZgetresponse�dictZ
getheadersZstatus�reason�read�fpr�	ExceptionZparse_NTLM_CHALLENGE_MESSAGEZ create_NTLM_AUTHENTICATE_MESSAGErrr)
r�headersZ
req_headerZresp_headerZconn�resZreshdrZauth_header_valuesZauth_header_value�sZServerChallengeZNegotiateFlagsZauth_msgrrr�	_new_conn's\


zNTLMConnectionPool._new_connN�Tcs0|dkri}d|d<tt|�j|||||||�S)Nz
Keep-Aliver)rr�urlopen)r�methodZurlZbodyr'ZretriesZredirectZassert_same_host)rrrr,hszNTLMConnectionPool.urlopen)NNr+TT)	�__name__�
__module__�__qualname__�__doc__�schemerr*r,�
__classcell__rr)rrrsArN)
r1Z
__future__rZloggingrr�rZpackages.six.moves.http_clientrr.r rrrrr�<module>s_vendor/urllib3/contrib/__pycache__/securetransport.cpython-36.opt-1.pyc000064400000042703151733136340022270 0ustar003

�Pf%w�1@s�dZddlmZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
mZddlmZmZmZddlmZmZmZmZydd	l	mZWn$ek
r�dZdd
lmZYnXyed�Wnek
r�ed��YnXd
dgZdZejZ ej!j"Z#ej$�Z%ej&�Z'dZ(ej)ej*ej+ej,ej-ej.ej/ej0ej1ej2ej3ej4ej5ej6ej7ej8ej9ej:ej;ej<ej=ej>ej?ej@ejAejBejCejDejEejFejGejHejIg!ZJe
jKejLejMfiZNeOe
d��r�ejPejPfeNe
jQ<eOe
d��r�ejRejRfeNe
jS<eOe
d��rejLejLfeNe
jT<eOe
d��r0ejUejUfeNe
jV<eOe
d��rNejMejMfeNe
jW<eOe
d��rjeNe
jKeNe
jX<dd
�ZYdd�ZZdd�Z[dd�Z\ej]e[�Z^ej_e\�Z`Gdd�dea�Zbe�r�d&dd �Zcn
d'd"d �Zceceb_cGd#d$�d$ea�ZddS)(aU
SecureTranport support for urllib3 via ctypes.

This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.

We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.

This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.

To use this module, simply import and inject it::

    import urllib3.contrib.securetransport
    urllib3.contrib.securetransport.inject_into_urllib3()

Happy TLSing!
�)�absolute_importN�)�util�)�Security�
SecurityConst�CoreFoundation)�_assert_no_error�_cert_array_from_pem�_temporary_keychain�_load_client_cert_chain)�_fileobject)�backport_makefile�z5SecureTransport only works on Pythons with memoryview�inject_into_urllib3�extract_from_urllib3Ti@�PROTOCOL_SSLv2�PROTOCOL_SSLv3�PROTOCOL_TLSv1�PROTOCOL_TLSv1_1�PROTOCOL_TLSv1_2�PROTOCOL_TLScCs(ttj_tt_ttj_dt_dtj_dS)zG
    Monkey-patch urllib3 with SecureTransport-backed SSL-support.
    TN)�SecureTransportContextr�ssl_�
SSLContext�HAS_SNI�IS_SECURETRANSPORT�rr�%/usr/lib/python3.6/securetransport.pyr�s
cCs(ttj_tt_ttj_dt_dtj_dS)z>
    Undo monkey-patching by :func:`inject_into_urllib3`.
    FN)�orig_util_SSLContextrrr�orig_util_HAS_SNIrrrrrrr�s
cCsld}�y,tj|�}|dkr tjS|j}|d}|j�}d}d}tj|j|�}	t	|	�}
ylxf||kr�|dksr|dkr�t
j|g|�}|s�tjt
jd��|j|
||��}||7}|sZ|s�tjSPqZWWnTtjk
�r}
z4|
j
}|dk	o�|t
jk�r|t
jk�rtjS�WYdd}
~
XnX||d<||k�r0tjSdStk
�rf}
z|dk	�rV|
|_tjSd}
~
XnXdS)zs
    SecureTransport read callback. This is called by ST to request that data
    be returned from the socket.
    Nrz	timed out)�_connection_refs�getr�errSSLInternal�socket�
gettimeout�ctypes�c_charZfrom_address�
memoryviewrZ
wait_for_read�error�errno�EAGAIN�	recv_into�errSSLClosedGraceful�
ECONNRESET�errSSLClosedAbort�errSSLWouldBlock�	Exception�
_exception)�
connection_id�data_buffer�data_length_pointer�wrapped_socket�base_socketZrequested_length�timeoutr)Z
read_count�bufferZbuffer_viewZ	readablesZ
chunk_size�errr�_read_callback�sN




r;c
CsNd}�ytj|�}|dkr tjS|j}|d}tj||�}|j�}d}d}	y`xZ|	|kr�|dksf|dkr�tj	|g|�}
|
s�tj
tjd��|j
|�}|	|7}	||d�}qNWWnNtj
k
r�}z0|j}|dk	r�|tjkr�|tjkr�tjS�WYdd}~XnX|	|d<|	|k�rtjSdStk
�rH}z|dk	�r8||_tjSd}~XnXdS)zx
    SecureTransport write callback. This is called by ST to request that data
    actually be sent on the network.
    Nrz	timed out)r!r"rr#r$r&�	string_atr%rZwait_for_writer)r*r+�sendr.r/r0r1r2)
r3r4r5r6r7Zbytes_to_write�datar8r)�sentZ	writablesZ
chunk_sentr:rrr�_write_callback�sD





r@c@s�eZdZdZdd�Zejdd��Zdd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
d(dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd)d"d#�Zd$d%�Zd&d'�ZdS)*�
WrappedSocketz�
    API-compatibility wrapper for Python's OpenSSL wrapped socket object.

    Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
    collector of PyPy.
    cCsL||_d|_d|_d|_d|_d|_d|_d|_|jj�|_	|jj
d�dS)NrF)r$�context�_makefile_refs�_closedr2�	_keychain�
_keychain_dir�_client_cert_chainr%�_timeout�
settimeout)�selfr$rrr�__init__.szWrappedSocket.__init__ccs4d|_dV|jdk	r0|jd}|_|j�|�dS)a]
        A context manager that can be used to wrap calls that do I/O from
        SecureTransport. If any of the I/O callbacks hit an exception, this
        context manager will correctly propagate the exception after the fact.
        This avoids silently swallowing those exceptions.

        It also correctly forces the socket closed.
        N)r2�close)rJZ	exceptionrrr�_raise_on_error@s

zWrappedSocket._raise_on_errorcCs2tjtt�t�}tj|j|tt��}t|�dS)a4
        Sets up the allowed ciphers. By default this matches the set in
        util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
        custom and doesn't allow changing at this time, mostly because parsing
        OpenSSL cipher strings is going to be a freaking nightmare.
        N)rZSSLCipherSuite�len�
CIPHER_SUITESZSSLSetEnabledCiphersrBr	)rJ�ciphers�resultrrr�_set_ciphersUszWrappedSocket._set_ciphersc	Cs|sdStjj|�r2t|d��}|j�}WdQRXd}tj�}z�t|�}tj|j	t
j|��}t|�|srt
jd��tj||�}t|�tj|d�}t|�tj�}tj|t
j|��}t|�Wd|r�tj|�|dkr�tj|�Xtjtjf}|j|k�r
t
jd|j��dS)z�
        Called when we have set custom validation. We do this in two cases:
        first, when cert validation is entirely disabled; and second, when
        using a custom trust DB.
        N�rbzFailed to copy trust referenceTz)certificate verify failed, error code: %d)�os�path�isfile�open�readr�SecTrustRefr
�SSLCopyPeerTrustrBr&�byrefr	�sslZSSLErrorZSecTrustSetAnchorCertificatesZ!SecTrustSetAnchorCertificatesOnlyZSecTrustResultTypeZSecTrustEvaluater�	CFReleaserZkSecTrustResultUnspecifiedZkSecTrustResultProceed�value)	rJ�verify�trust_bundle�fZ
cert_array�trustrQZtrust_resultZ	successesrrr�_custom_validatebs@

zWrappedSocket._custom_validatec	Cs�tjdtjtj�|_tj|jtt�}	t	|	�t
�4t|�d}
x|
tkrV|
dd}
q@W|t|
<WdQRXtj
|j|
�}	t	|	�|r�t|t�s�|jd�}tj|j|t|��}	t	|	�|j�tj|j|�}	t	|	�tj|j|�}	t	|	�|s�|dk	�rtj|jtjd�}	t	|	�|�rNt�\|_|_t|j||�|_tj|j|j�}	t	|	�xf|j��Rtj|j�}	|	tj k�r~t!j"d��n(|	tj#k�r�|j$||��wPn
t	|	�PWdQRX�qPWdS)z�
        Actually performs the TLS handshake. This is run automatically by
        wrapped socket, and shouldn't be needed in user code.
        Ni���rzutf-8Tzhandshake timed out)%rZSSLCreateContextrZkSSLClientSideZkSSLStreamTyperBZ
SSLSetIOFuncs�_read_callback_pointer�_write_callback_pointerr	�_connection_ref_lock�idr!ZSSLSetConnection�
isinstance�bytes�encodeZSSLSetPeerDomainNamerNrRZSSLSetProtocolVersionMinZSSLSetProtocolVersionMaxZSSLSetSessionOptionZ"kSSLSessionOptionBreakOnServerAuthrrErFrrGZSSLSetCertificaterMZSSLHandshaker0r$r8ZerrSSLServerAuthCompletedrc)rJ�server_hostnamer_r`Zmin_versionZmax_versionZclient_certZ
client_keyZclient_key_passphraserQZhandlerrr�	handshake�s\



zWrappedSocket.handshakecCs
|jj�S)N)r$�fileno)rJrrrrm�szWrappedSocket.filenocCs*|jdkr|jd8_|jr&|j�dS)Nrr)rCrDrL)rJrrr�_decref_socketios�s
zWrappedSocket._decref_socketioscCs&tj|�}|j||�}|d|�}|S)N)r&Zcreate_string_bufferr,)rJZbufsizr9Z
bytes_readr>rrr�recvs
zWrappedSocket.recvNc
Cs�|jr
dS|dkrt|�}tj|j|�}tjd�}|j��tj|j	||tj
|��}WdQRX|tjkr�|j
dkr�tjd��n"|tjtjfkr�|j�nt|�|j
S)Nrzrecv timed out)rDrNr&r'Zfrom_buffer�c_size_trMrZSSLReadrBr[rr0r^r$r8r-ZerrSSLClosedNoNotifyrLr	)rJr9�nbytes�processed_bytesrQrrrr,
s 




zWrappedSocket.recv_intocCs
||_dS)N)rH)rJr8rrrrI2szWrappedSocket.settimeoutcCs|jS)N)rH)rJrrrr%5szWrappedSocket.gettimeoutc
Cshtjd�}|j��"tj|j|t|�tj|��}WdQRX|tj	krZ|j
dkrZtjd��nt
|�|j
S)Nrzsend timed out)r&rprMrZSSLWriterBrNr[rr0r^r$r8r	)rJr>rrrQrrrr=8s

"zWrappedSocket.sendcCs8d}x.|t|�kr2|j|||t��}||7}qWdS)Nr)rNr=�SSL_WRITE_BLOCKSIZE)rJr>Z
total_sentr?rrr�sendallIszWrappedSocket.sendallc	Cs$|j��tj|j�WdQRXdS)N)rMrZSSLCloserB)rJrrr�shutdownOs
zWrappedSocket.shutdowncCs�|jdkr�d|_|jr(tj|j�d|_|jr@tj|j�d|_|jrvtj|j�tj|j�t	j
|j�d|_|_|jj
�S|jd8_dS)NrT)rCrDrBrr]rGrErZSecKeychainDelete�shutilZrmtreerFr$rL)rJrrrrLSs

zWrappedSocket.closeFc
Cs�|std��tj�}d}d}zptj|jtj|��}t|�|sBdStj|�}|sTdStj	|d�}tj
|�}tj|�}tj
|�}	tj|	|�}Wd|r�tj|�|r�tj|�X|S)Nz2SecureTransport only supports dumping binary certsr)�
ValueErrorrrYrZrBr&r[r	ZSecTrustGetCertificateCountZSecTrustGetCertificateAtIndexZSecCertificateCopyDatarZCFDataGetLengthZCFDataGetBytePtrr<r])
rJZbinary_formrbZcertdataZ	der_bytesrQZ
cert_countZleafZdata_lengthr4rrr�getpeercertfs2




zWrappedSocket.getpeercertcCs|jd7_dS)Nr)rC)rJrrr�_reuse�szWrappedSocket._reusecCs&|jdkr|j�n|jd8_dS)Nr)rCrL)rJrrr�_drop�s

zWrappedSocket._drop)N)F)�__name__�
__module__�__qualname__�__doc__rK�
contextlib�contextmanagerrMrRrcrlrmrnror,rIr%r=rtrurLrxryrzrrrrrA's&
>Z
(
>rAcCs|jd7_t|||dd�S)NrT)rL)rCr
)rJ�mode�bufsizerrr�makefile�sr��rcOsd}t|||f|�|�S)Nr)r)rJr��	buffering�args�kwargsrrrr��sc@s�eZdZdZdd�Zedd��Zejdd��Zedd��Zejd	d��Zed
d��Z	e	jdd��Z	d
d�Z
dd�Zdd�Zddd�Z
ddd�Zddd�ZdS)rz�
    I am a wrapper class for the SecureTransport library, to translate the
    interface of the standard library ``SSLContext`` object to calls into
    SecureTransport.
    cCs8t|\|_|_d|_d|_d|_d|_d|_d|_dS)NrF)	�_protocol_to_min_max�_min_version�_max_version�_options�_verify�
_trust_bundle�_client_cert�_client_key�_client_key_passphrase)rJZprotocolrrrrK�szSecureTransportContext.__init__cCsdS)z�
        SecureTransport cannot have its hostname checking disabled. For more,
        see the comment on getpeercert() in this file.
        Tr)rJrrr�check_hostname�sz%SecureTransportContext.check_hostnamecCsdS)z�
        SecureTransport cannot have its hostname checking disabled. For more,
        see the comment on getpeercert() in this file.
        Nr)rJr^rrrr��scCs|jS)N)r�)rJrrr�options�szSecureTransportContext.optionscCs
||_dS)N)r�)rJr^rrrr��scCs|jrtjStjS)N)r�r\�
CERT_REQUIREDZ	CERT_NONE)rJrrr�verify_mode�sz"SecureTransportContext.verify_modecCs|tjkrdnd|_dS)NTF)r\r�r�)rJr^rrrr��scCsdS)Nr)rJrrr�set_default_verify_paths�s
z/SecureTransportContext.set_default_verify_pathscCs|j�S)N)r�)rJrrr�load_default_certs�sz)SecureTransportContext.load_default_certscCs|tjjkrtd��dS)Nz5SecureTransport doesn't support custom cipher strings)rrZDEFAULT_CIPHERSrw)rJrPrrr�set_cipherssz"SecureTransportContext.set_ciphersNcCs|dk	rtd��|p||_dS)Nz1SecureTransport does not support cert directories)rwr�)rJZcafileZcapathZcadatarrr�load_verify_locationssz,SecureTransportContext.load_verify_locationscCs||_||_||_dS)N)r�r�Z_client_cert_passphrase)rJZcertfileZkeyfileZpasswordrrr�load_cert_chainsz&SecureTransportContext.load_cert_chainFTc	Cs2t|�}|j||j|j|j|j|j|j|j�|S)N)	rArlr�r�r�r�r�r�r�)rJZsockZserver_sideZdo_handshake_on_connectZsuppress_ragged_eofsrkr6rrr�wrap_sockets
z"SecureTransportContext.wrap_socket)NNN)NN)FTTN)r{r|r}r~rK�propertyr��setterr�r�r�r�r�r�r�r�rrrrr�s 	

	
r���)r�)r�N)er~Z
__future__rrr&r*Zos.pathrTrvr$r\Z	threading�weakref�rZ_securetransport.bindingsrrrZ_securetransport.low_levelr	r
rrr
�ImportErrorZpackages.backports.makefilerr(�	NameError�__all__rr rrr�WeakValueDictionaryr!ZLockrfrsZTLS_AES_256_GCM_SHA384ZTLS_CHACHA20_POLY1305_SHA256ZTLS_AES_128_GCM_SHA256Z'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384Z'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256Z#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384Z#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384Z#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256Z#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256Z'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384Z$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_256_CBC_SHAZ TLS_DHE_DSS_WITH_AES_256_CBC_SHAZ'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256Z$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_128_CBC_SHAZ TLS_DHE_DSS_WITH_AES_128_CBC_SHAZTLS_RSA_WITH_AES_256_GCM_SHA384ZTLS_RSA_WITH_AES_128_GCM_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHA256ZTLS_RSA_WITH_AES_128_CBC_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHAZTLS_RSA_WITH_AES_128_CBC_SHArOZPROTOCOL_SSLv23Z
kTLSProtocol1ZkTLSProtocol12r��hasattrZ
kSSLProtocol2rZ
kSSLProtocol3rrZkTLSProtocol11rrrrrr;r@ZSSLReadFuncrdZSSLWriteFuncre�objectrAr�rrrrr�<module>s�95



_vendor/urllib3/contrib/securetransport.py000064400000073445151733136340015054 0ustar00"""
SecureTranport support for urllib3 via ctypes.

This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.

We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.

This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.

To use this module, simply import and inject it::

    import urllib3.contrib.securetransport
    urllib3.contrib.securetransport.inject_into_urllib3()

Happy TLSing!
"""
from __future__ import absolute_import

import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import threading
import weakref

from .. import util
from ._securetransport.bindings import (
    Security, SecurityConst, CoreFoundation
)
from ._securetransport.low_level import (
    _assert_no_error, _cert_array_from_pem, _temporary_keychain,
    _load_client_cert_chain
)

try:  # Platform-specific: Python 2
    from socket import _fileobject
except ImportError:  # Platform-specific: Python 3
    _fileobject = None
    from ..packages.backports.makefile import backport_makefile

try:
    memoryview(b'')
except NameError:
    raise ImportError("SecureTransport only works on Pythons with memoryview")

__all__ = ['inject_into_urllib3', 'extract_from_urllib3']

# SNI always works
HAS_SNI = True

orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext

# This dictionary is used by the read callback to obtain a handle to the
# calling wrapped socket. This is a pretty silly approach, but for now it'll
# do. I feel like I should be able to smuggle a handle to the wrapped socket
# directly in the SSLConnectionRef, but for now this approach will work I
# guess.
#
# We need to lock around this structure for inserts, but we don't do it for
# reads/writes in the callbacks. The reasoning here goes as follows:
#
#    1. It is not possible to call into the callbacks before the dictionary is
#       populated, so once in the callback the id must be in the dictionary.
#    2. The callbacks don't mutate the dictionary, they only read from it, and
#       so cannot conflict with any of the insertions.
#
# This is good: if we had to lock in the callbacks we'd drastically slow down
# the performance of this code.
_connection_refs = weakref.WeakValueDictionary()
_connection_ref_lock = threading.Lock()

# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
# for no better reason than we need *a* limit, and this one is right there.
SSL_WRITE_BLOCKSIZE = 16384

# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
# individual cipher suites. We need to do this becuase this is how
# SecureTransport wants them.
CIPHER_SUITES = [
    SecurityConst.TLS_AES_256_GCM_SHA384,
    SecurityConst.TLS_CHACHA20_POLY1305_SHA256,
    SecurityConst.TLS_AES_128_GCM_SHA256,
    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
    SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
    SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
    SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384,
    SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
    SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256,
    SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
    SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
    SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
    SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
    SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256,
    SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
    SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA,
    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
    SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
    SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
    SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
    SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
    SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256,
    SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
    SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
    SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
    SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
    SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
    SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
    SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
    SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
]

# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
_protocol_to_min_max = {
    ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
}

if hasattr(ssl, "PROTOCOL_SSLv2"):
    _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
        SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2
    )
if hasattr(ssl, "PROTOCOL_SSLv3"):
    _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
        SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3
    )
if hasattr(ssl, "PROTOCOL_TLSv1"):
    _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
        SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1
    )
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
    _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
        SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11
    )
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
    _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
        SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12
    )
if hasattr(ssl, "PROTOCOL_TLS"):
    _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23]


def inject_into_urllib3():
    """
    Monkey-patch urllib3 with SecureTransport-backed SSL-support.
    """
    util.ssl_.SSLContext = SecureTransportContext
    util.HAS_SNI = HAS_SNI
    util.ssl_.HAS_SNI = HAS_SNI
    util.IS_SECURETRANSPORT = True
    util.ssl_.IS_SECURETRANSPORT = True


def extract_from_urllib3():
    """
    Undo monkey-patching by :func:`inject_into_urllib3`.
    """
    util.ssl_.SSLContext = orig_util_SSLContext
    util.HAS_SNI = orig_util_HAS_SNI
    util.ssl_.HAS_SNI = orig_util_HAS_SNI
    util.IS_SECURETRANSPORT = False
    util.ssl_.IS_SECURETRANSPORT = False


def _read_callback(connection_id, data_buffer, data_length_pointer):
    """
    SecureTransport read callback. This is called by ST to request that data
    be returned from the socket.
    """
    wrapped_socket = None
    try:
        wrapped_socket = _connection_refs.get(connection_id)
        if wrapped_socket is None:
            return SecurityConst.errSSLInternal
        base_socket = wrapped_socket.socket

        requested_length = data_length_pointer[0]

        timeout = wrapped_socket.gettimeout()
        error = None
        read_count = 0
        buffer = (ctypes.c_char * requested_length).from_address(data_buffer)
        buffer_view = memoryview(buffer)

        try:
            while read_count < requested_length:
                if timeout is None or timeout >= 0:
                    readables = util.wait_for_read([base_socket], timeout)
                    if not readables:
                        raise socket.error(errno.EAGAIN, 'timed out')

                # We need to tell ctypes that we have a buffer that can be
                # written to. Upsettingly, we do that like this:
                chunk_size = base_socket.recv_into(
                    buffer_view[read_count:requested_length]
                )
                read_count += chunk_size
                if not chunk_size:
                    if not read_count:
                        return SecurityConst.errSSLClosedGraceful
                    break
        except (socket.error) as e:
            error = e.errno

            if error is not None and error != errno.EAGAIN:
                if error == errno.ECONNRESET:
                    return SecurityConst.errSSLClosedAbort
                raise

        data_length_pointer[0] = read_count

        if read_count != requested_length:
            return SecurityConst.errSSLWouldBlock

        return 0
    except Exception as e:
        if wrapped_socket is not None:
            wrapped_socket._exception = e
        return SecurityConst.errSSLInternal


def _write_callback(connection_id, data_buffer, data_length_pointer):
    """
    SecureTransport write callback. This is called by ST to request that data
    actually be sent on the network.
    """
    wrapped_socket = None
    try:
        wrapped_socket = _connection_refs.get(connection_id)
        if wrapped_socket is None:
            return SecurityConst.errSSLInternal
        base_socket = wrapped_socket.socket

        bytes_to_write = data_length_pointer[0]
        data = ctypes.string_at(data_buffer, bytes_to_write)

        timeout = wrapped_socket.gettimeout()
        error = None
        sent = 0

        try:
            while sent < bytes_to_write:
                if timeout is None or timeout >= 0:
                    writables = util.wait_for_write([base_socket], timeout)
                    if not writables:
                        raise socket.error(errno.EAGAIN, 'timed out')
                chunk_sent = base_socket.send(data)
                sent += chunk_sent

                # This has some needless copying here, but I'm not sure there's
                # much value in optimising this data path.
                data = data[chunk_sent:]
        except (socket.error) as e:
            error = e.errno

            if error is not None and error != errno.EAGAIN:
                if error == errno.ECONNRESET:
                    return SecurityConst.errSSLClosedAbort
                raise

        data_length_pointer[0] = sent
        if sent != bytes_to_write:
            return SecurityConst.errSSLWouldBlock

        return 0
    except Exception as e:
        if wrapped_socket is not None:
            wrapped_socket._exception = e
        return SecurityConst.errSSLInternal


# We need to keep these two objects references alive: if they get GC'd while
# in use then SecureTransport could attempt to call a function that is in freed
# memory. That would be...uh...bad. Yeah, that's the word. Bad.
_read_callback_pointer = Security.SSLReadFunc(_read_callback)
_write_callback_pointer = Security.SSLWriteFunc(_write_callback)


class WrappedSocket(object):
    """
    API-compatibility wrapper for Python's OpenSSL wrapped socket object.

    Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
    collector of PyPy.
    """
    def __init__(self, socket):
        self.socket = socket
        self.context = None
        self._makefile_refs = 0
        self._closed = False
        self._exception = None
        self._keychain = None
        self._keychain_dir = None
        self._client_cert_chain = None

        # We save off the previously-configured timeout and then set it to
        # zero. This is done because we use select and friends to handle the
        # timeouts, but if we leave the timeout set on the lower socket then
        # Python will "kindly" call select on that socket again for us. Avoid
        # that by forcing the timeout to zero.
        self._timeout = self.socket.gettimeout()
        self.socket.settimeout(0)

    @contextlib.contextmanager
    def _raise_on_error(self):
        """
        A context manager that can be used to wrap calls that do I/O from
        SecureTransport. If any of the I/O callbacks hit an exception, this
        context manager will correctly propagate the exception after the fact.
        This avoids silently swallowing those exceptions.

        It also correctly forces the socket closed.
        """
        self._exception = None

        # We explicitly don't catch around this yield because in the unlikely
        # event that an exception was hit in the block we don't want to swallow
        # it.
        yield
        if self._exception is not None:
            exception, self._exception = self._exception, None
            self.close()
            raise exception

    def _set_ciphers(self):
        """
        Sets up the allowed ciphers. By default this matches the set in
        util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
        custom and doesn't allow changing at this time, mostly because parsing
        OpenSSL cipher strings is going to be a freaking nightmare.
        """
        ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
        result = Security.SSLSetEnabledCiphers(
            self.context, ciphers, len(CIPHER_SUITES)
        )
        _assert_no_error(result)

    def _custom_validate(self, verify, trust_bundle):
        """
        Called when we have set custom validation. We do this in two cases:
        first, when cert validation is entirely disabled; and second, when
        using a custom trust DB.
        """
        # If we disabled cert validation, just say: cool.
        if not verify:
            return

        # We want data in memory, so load it up.
        if os.path.isfile(trust_bundle):
            with open(trust_bundle, 'rb') as f:
                trust_bundle = f.read()

        cert_array = None
        trust = Security.SecTrustRef()

        try:
            # Get a CFArray that contains the certs we want.
            cert_array = _cert_array_from_pem(trust_bundle)

            # Ok, now the hard part. We want to get the SecTrustRef that ST has
            # created for this connection, shove our CAs into it, tell ST to
            # ignore everything else it knows, and then ask if it can build a
            # chain. This is a buuuunch of code.
            result = Security.SSLCopyPeerTrust(
                self.context, ctypes.byref(trust)
            )
            _assert_no_error(result)
            if not trust:
                raise ssl.SSLError("Failed to copy trust reference")

            result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
            _assert_no_error(result)

            result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
            _assert_no_error(result)

            trust_result = Security.SecTrustResultType()
            result = Security.SecTrustEvaluate(
                trust, ctypes.byref(trust_result)
            )
            _assert_no_error(result)
        finally:
            if trust:
                CoreFoundation.CFRelease(trust)

            if cert_array is None:
                CoreFoundation.CFRelease(cert_array)

        # Ok, now we can look at what the result was.
        successes = (
            SecurityConst.kSecTrustResultUnspecified,
            SecurityConst.kSecTrustResultProceed
        )
        if trust_result.value not in successes:
            raise ssl.SSLError(
                "certificate verify failed, error code: %d" %
                trust_result.value
            )

    def handshake(self,
                  server_hostname,
                  verify,
                  trust_bundle,
                  min_version,
                  max_version,
                  client_cert,
                  client_key,
                  client_key_passphrase):
        """
        Actually performs the TLS handshake. This is run automatically by
        wrapped socket, and shouldn't be needed in user code.
        """
        # First, we do the initial bits of connection setup. We need to create
        # a context, set its I/O funcs, and set the connection reference.
        self.context = Security.SSLCreateContext(
            None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
        )
        result = Security.SSLSetIOFuncs(
            self.context, _read_callback_pointer, _write_callback_pointer
        )
        _assert_no_error(result)

        # Here we need to compute the handle to use. We do this by taking the
        # id of self modulo 2**31 - 1. If this is already in the dictionary, we
        # just keep incrementing by one until we find a free space.
        with _connection_ref_lock:
            handle = id(self) % 2147483647
            while handle in _connection_refs:
                handle = (handle + 1) % 2147483647
            _connection_refs[handle] = self

        result = Security.SSLSetConnection(self.context, handle)
        _assert_no_error(result)

        # If we have a server hostname, we should set that too.
        if server_hostname:
            if not isinstance(server_hostname, bytes):
                server_hostname = server_hostname.encode('utf-8')

            result = Security.SSLSetPeerDomainName(
                self.context, server_hostname, len(server_hostname)
            )
            _assert_no_error(result)

        # Setup the ciphers.
        self._set_ciphers()

        # Set the minimum and maximum TLS versions.
        result = Security.SSLSetProtocolVersionMin(self.context, min_version)
        _assert_no_error(result)
        result = Security.SSLSetProtocolVersionMax(self.context, max_version)
        _assert_no_error(result)

        # If there's a trust DB, we need to use it. We do that by telling
        # SecureTransport to break on server auth. We also do that if we don't
        # want to validate the certs at all: we just won't actually do any
        # authing in that case.
        if not verify or trust_bundle is not None:
            result = Security.SSLSetSessionOption(
                self.context,
                SecurityConst.kSSLSessionOptionBreakOnServerAuth,
                True
            )
            _assert_no_error(result)

        # If there's a client cert, we need to use it.
        if client_cert:
            self._keychain, self._keychain_dir = _temporary_keychain()
            self._client_cert_chain = _load_client_cert_chain(
                self._keychain, client_cert, client_key
            )
            result = Security.SSLSetCertificate(
                self.context, self._client_cert_chain
            )
            _assert_no_error(result)

        while True:
            with self._raise_on_error():
                result = Security.SSLHandshake(self.context)

                if result == SecurityConst.errSSLWouldBlock:
                    raise socket.timeout("handshake timed out")
                elif result == SecurityConst.errSSLServerAuthCompleted:
                    self._custom_validate(verify, trust_bundle)
                    continue
                else:
                    _assert_no_error(result)
                    break

    def fileno(self):
        return self.socket.fileno()

    # Copy-pasted from Python 3.5 source code
    def _decref_socketios(self):
        if self._makefile_refs > 0:
            self._makefile_refs -= 1
        if self._closed:
            self.close()

    def recv(self, bufsiz):
        buffer = ctypes.create_string_buffer(bufsiz)
        bytes_read = self.recv_into(buffer, bufsiz)
        data = buffer[:bytes_read]
        return data

    def recv_into(self, buffer, nbytes=None):
        # Read short on EOF.
        if self._closed:
            return 0

        if nbytes is None:
            nbytes = len(buffer)

        buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
        processed_bytes = ctypes.c_size_t(0)

        with self._raise_on_error():
            result = Security.SSLRead(
                self.context, buffer, nbytes, ctypes.byref(processed_bytes)
            )

        # There are some result codes that we want to treat as "not always
        # errors". Specifically, those are errSSLWouldBlock,
        # errSSLClosedGraceful, and errSSLClosedNoNotify.
        if (result == SecurityConst.errSSLWouldBlock):
            # If we didn't process any bytes, then this was just a time out.
            # However, we can get errSSLWouldBlock in situations when we *did*
            # read some data, and in those cases we should just read "short"
            # and return.
            if processed_bytes.value == 0:
                # Timed out, no data read.
                raise socket.timeout("recv timed out")
        elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify):
            # The remote peer has closed this connection. We should do so as
            # well. Note that we don't actually return here because in
            # principle this could actually be fired along with return data.
            # It's unlikely though.
            self.close()
        else:
            _assert_no_error(result)

        # Ok, we read and probably succeeded. We should return whatever data
        # was actually read.
        return processed_bytes.value

    def settimeout(self, timeout):
        self._timeout = timeout

    def gettimeout(self):
        return self._timeout

    def send(self, data):
        processed_bytes = ctypes.c_size_t(0)

        with self._raise_on_error():
            result = Security.SSLWrite(
                self.context, data, len(data), ctypes.byref(processed_bytes)
            )

        if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
            # Timed out
            raise socket.timeout("send timed out")
        else:
            _assert_no_error(result)

        # We sent, and probably succeeded. Tell them how much we sent.
        return processed_bytes.value

    def sendall(self, data):
        total_sent = 0
        while total_sent < len(data):
            sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
            total_sent += sent

    def shutdown(self):
        with self._raise_on_error():
            Security.SSLClose(self.context)

    def close(self):
        # TODO: should I do clean shutdown here? Do I have to?
        if self._makefile_refs < 1:
            self._closed = True
            if self.context:
                CoreFoundation.CFRelease(self.context)
                self.context = None
            if self._client_cert_chain:
                CoreFoundation.CFRelease(self._client_cert_chain)
                self._client_cert_chain = None
            if self._keychain:
                Security.SecKeychainDelete(self._keychain)
                CoreFoundation.CFRelease(self._keychain)
                shutil.rmtree(self._keychain_dir)
                self._keychain = self._keychain_dir = None
            return self.socket.close()
        else:
            self._makefile_refs -= 1

    def getpeercert(self, binary_form=False):
        # Urgh, annoying.
        #
        # Here's how we do this:
        #
        # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
        #    connection.
        # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
        # 3. To get the CN, call SecCertificateCopyCommonName and process that
        #    string so that it's of the appropriate type.
        # 4. To get the SAN, we need to do something a bit more complex:
        #    a. Call SecCertificateCopyValues to get the data, requesting
        #       kSecOIDSubjectAltName.
        #    b. Mess about with this dictionary to try to get the SANs out.
        #
        # This is gross. Really gross. It's going to be a few hundred LoC extra
        # just to repeat something that SecureTransport can *already do*. So my
        # operating assumption at this time is that what we want to do is
        # instead to just flag to urllib3 that it shouldn't do its own hostname
        # validation when using SecureTransport.
        if not binary_form:
            raise ValueError(
                "SecureTransport only supports dumping binary certs"
            )
        trust = Security.SecTrustRef()
        certdata = None
        der_bytes = None

        try:
            # Grab the trust store.
            result = Security.SSLCopyPeerTrust(
                self.context, ctypes.byref(trust)
            )
            _assert_no_error(result)
            if not trust:
                # Probably we haven't done the handshake yet. No biggie.
                return None

            cert_count = Security.SecTrustGetCertificateCount(trust)
            if not cert_count:
                # Also a case that might happen if we haven't handshaked.
                # Handshook? Handshaken?
                return None

            leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
            assert leaf

            # Ok, now we want the DER bytes.
            certdata = Security.SecCertificateCopyData(leaf)
            assert certdata

            data_length = CoreFoundation.CFDataGetLength(certdata)
            data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
            der_bytes = ctypes.string_at(data_buffer, data_length)
        finally:
            if certdata:
                CoreFoundation.CFRelease(certdata)
            if trust:
                CoreFoundation.CFRelease(trust)

        return der_bytes

    def _reuse(self):
        self._makefile_refs += 1

    def _drop(self):
        if self._makefile_refs < 1:
            self.close()
        else:
            self._makefile_refs -= 1


if _fileobject:  # Platform-specific: Python 2
    def makefile(self, mode, bufsize=-1):
        self._makefile_refs += 1
        return _fileobject(self, mode, bufsize, close=True)
else:  # Platform-specific: Python 3
    def makefile(self, mode="r", buffering=None, *args, **kwargs):
        # We disable buffering with SecureTransport because it conflicts with
        # the buffering that ST does internally (see issue #1153 for more).
        buffering = 0
        return backport_makefile(self, mode, buffering, *args, **kwargs)

WrappedSocket.makefile = makefile


class SecureTransportContext(object):
    """
    I am a wrapper class for the SecureTransport library, to translate the
    interface of the standard library ``SSLContext`` object to calls into
    SecureTransport.
    """
    def __init__(self, protocol):
        self._min_version, self._max_version = _protocol_to_min_max[protocol]
        self._options = 0
        self._verify = False
        self._trust_bundle = None
        self._client_cert = None
        self._client_key = None
        self._client_key_passphrase = None

    @property
    def check_hostname(self):
        """
        SecureTransport cannot have its hostname checking disabled. For more,
        see the comment on getpeercert() in this file.
        """
        return True

    @check_hostname.setter
    def check_hostname(self, value):
        """
        SecureTransport cannot have its hostname checking disabled. For more,
        see the comment on getpeercert() in this file.
        """
        pass

    @property
    def options(self):
        # TODO: Well, crap.
        #
        # So this is the bit of the code that is the most likely to cause us
        # trouble. Essentially we need to enumerate all of the SSL options that
        # users might want to use and try to see if we can sensibly translate
        # them, or whether we should just ignore them.
        return self._options

    @options.setter
    def options(self, value):
        # TODO: Update in line with above.
        self._options = value

    @property
    def verify_mode(self):
        return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE

    @verify_mode.setter
    def verify_mode(self, value):
        self._verify = True if value == ssl.CERT_REQUIRED else False

    def set_default_verify_paths(self):
        # So, this has to do something a bit weird. Specifically, what it does
        # is nothing.
        #
        # This means that, if we had previously had load_verify_locations
        # called, this does not undo that. We need to do that because it turns
        # out that the rest of the urllib3 code will attempt to load the
        # default verify paths if it hasn't been told about any paths, even if
        # the context itself was sometime earlier. We resolve that by just
        # ignoring it.
        pass

    def load_default_certs(self):
        return self.set_default_verify_paths()

    def set_ciphers(self, ciphers):
        # For now, we just require the default cipher string.
        if ciphers != util.ssl_.DEFAULT_CIPHERS:
            raise ValueError(
                "SecureTransport doesn't support custom cipher strings"
            )

    def load_verify_locations(self, cafile=None, capath=None, cadata=None):
        # OK, we only really support cadata and cafile.
        if capath is not None:
            raise ValueError(
                "SecureTransport does not support cert directories"
            )

        self._trust_bundle = cafile or cadata

    def load_cert_chain(self, certfile, keyfile=None, password=None):
        self._client_cert = certfile
        self._client_key = keyfile
        self._client_cert_passphrase = password

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True, suppress_ragged_eofs=True,
                    server_hostname=None):
        # So, what do we do here? Firstly, we assert some properties. This is a
        # stripped down shim, so there is some functionality we don't support.
        # See PEP 543 for the real deal.
        assert not server_side
        assert do_handshake_on_connect
        assert suppress_ragged_eofs

        # Ok, we're good to go. Now we want to create the wrapped socket object
        # and store it in the appropriate place.
        wrapped_socket = WrappedSocket(sock)

        # Now we can handshake
        wrapped_socket.handshake(
            server_hostname, self._verify, self._trust_bundle,
            self._min_version, self._max_version, self._client_cert,
            self._client_key, self._client_key_passphrase
        )
        return wrapped_socket
_vendor/urllib3/contrib/socks.py000064400000014063151733136340012722 0ustar00# -*- coding: utf-8 -*-
"""
This module contains provisional support for SOCKS proxies from within
urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and
SOCKS5. To enable its functionality, either install PySocks or install this
module with the ``socks`` extra.

The SOCKS implementation supports the full range of urllib3 features. It also
supports the following SOCKS features:

- SOCKS4
- SOCKS4a
- SOCKS5
- Usernames and passwords for the SOCKS proxy

Known Limitations:

- Currently PySocks does not support contacting remote websites via literal
  IPv6 addresses. Any such connection attempt will fail. You must use a domain
  name.
- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any
  such connection attempt will fail.
"""
from __future__ import absolute_import

try:
    import socks
except ImportError:
    import warnings
    from ..exceptions import DependencyWarning

    warnings.warn((
        'SOCKS support in urllib3 requires the installation of optional '
        'dependencies: specifically, PySocks.  For more information, see '
        'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies'
        ),
        DependencyWarning
    )
    raise

from socket import error as SocketError, timeout as SocketTimeout

from ..connection import (
    HTTPConnection, HTTPSConnection
)
from ..connectionpool import (
    HTTPConnectionPool, HTTPSConnectionPool
)
from ..exceptions import ConnectTimeoutError, NewConnectionError
from ..poolmanager import PoolManager
from ..util.url import parse_url

try:
    import ssl
except ImportError:
    ssl = None


class SOCKSConnection(HTTPConnection):
    """
    A plain-text HTTP connection that connects via a SOCKS proxy.
    """
    def __init__(self, *args, **kwargs):
        self._socks_options = kwargs.pop('_socks_options')
        super(SOCKSConnection, self).__init__(*args, **kwargs)

    def _new_conn(self):
        """
        Establish a new connection via the SOCKS proxy.
        """
        extra_kw = {}
        if self.source_address:
            extra_kw['source_address'] = self.source_address

        if self.socket_options:
            extra_kw['socket_options'] = self.socket_options

        try:
            conn = socks.create_connection(
                (self.host, self.port),
                proxy_type=self._socks_options['socks_version'],
                proxy_addr=self._socks_options['proxy_host'],
                proxy_port=self._socks_options['proxy_port'],
                proxy_username=self._socks_options['username'],
                proxy_password=self._socks_options['password'],
                proxy_rdns=self._socks_options['rdns'],
                timeout=self.timeout,
                **extra_kw
            )

        except SocketTimeout as e:
            raise ConnectTimeoutError(
                self, "Connection to %s timed out. (connect timeout=%s)" %
                (self.host, self.timeout))

        except socks.ProxyError as e:
            # This is fragile as hell, but it seems to be the only way to raise
            # useful errors here.
            if e.socket_err:
                error = e.socket_err
                if isinstance(error, SocketTimeout):
                    raise ConnectTimeoutError(
                        self,
                        "Connection to %s timed out. (connect timeout=%s)" %
                        (self.host, self.timeout)
                    )
                else:
                    raise NewConnectionError(
                        self,
                        "Failed to establish a new connection: %s" % error
                    )
            else:
                raise NewConnectionError(
                    self,
                    "Failed to establish a new connection: %s" % e
                )

        except SocketError as e:  # Defensive: PySocks should catch all these.
            raise NewConnectionError(
                self, "Failed to establish a new connection: %s" % e)

        return conn


# We don't need to duplicate the Verified/Unverified distinction from
# urllib3/connection.py here because the HTTPSConnection will already have been
# correctly set to either the Verified or Unverified form by that module. This
# means the SOCKSHTTPSConnection will automatically be the correct type.
class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
    pass


class SOCKSHTTPConnectionPool(HTTPConnectionPool):
    ConnectionCls = SOCKSConnection


class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
    ConnectionCls = SOCKSHTTPSConnection


class SOCKSProxyManager(PoolManager):
    """
    A version of the urllib3 ProxyManager that routes connections via the
    defined SOCKS proxy.
    """
    pool_classes_by_scheme = {
        'http': SOCKSHTTPConnectionPool,
        'https': SOCKSHTTPSConnectionPool,
    }

    def __init__(self, proxy_url, username=None, password=None,
                 num_pools=10, headers=None, **connection_pool_kw):
        parsed = parse_url(proxy_url)

        if parsed.scheme == 'socks5':
            socks_version = socks.PROXY_TYPE_SOCKS5
            rdns = False
        elif parsed.scheme == 'socks5h':
            socks_version = socks.PROXY_TYPE_SOCKS5
            rdns = True
        elif parsed.scheme == 'socks4':
            socks_version = socks.PROXY_TYPE_SOCKS4
            rdns = False
        elif parsed.scheme == 'socks4a':
            socks_version = socks.PROXY_TYPE_SOCKS4
            rdns = True
        else:
            raise ValueError(
                "Unable to determine SOCKS version from %s" % proxy_url
            )

        self.proxy_url = proxy_url

        socks_options = {
            'socks_version': socks_version,
            'proxy_host': parsed.host,
            'proxy_port': parsed.port,
            'username': username,
            'password': password,
            'rdns': rdns
        }
        connection_pool_kw['_socks_options'] = socks_options

        super(SOCKSProxyManager, self).__init__(
            num_pools, headers, **connection_pool_kw
        )

        self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
_vendor/urllib3/contrib/appengine.py000064400000025161151733136340013547 0ustar00"""
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.

Example usage::

    from urllib3 import PoolManager
    from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox

    if is_appengine_sandbox():
        # AppEngineManager uses AppEngine's URLFetch API behind the scenes
        http = AppEngineManager()
    else:
        # PoolManager uses a socket-level API behind the scenes
        http = PoolManager()

    r = http.request('GET', 'https://google.com/')

There are `limitations <https://cloud.google.com/appengine/docs/python/\
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:

1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
   cost-effective in many circumstances as long as your usage is within the
   limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
   Sockets also have `limitations and restrictions
   <https://cloud.google.com/appengine/docs/python/sockets/\
   #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
   To use sockets, be sure to specify the following in your ``app.yaml``::

        env_variables:
            GAE_USE_SOCKETS_HTTPLIB : 'true'

3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
"""

from __future__ import absolute_import
import logging
import os
import warnings
from ..packages.six.moves.urllib.parse import urljoin

from ..exceptions import (
    HTTPError,
    HTTPWarning,
    MaxRetryError,
    ProtocolError,
    TimeoutError,
    SSLError
)

from ..packages.six import BytesIO
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.timeout import Timeout
from ..util.retry import Retry

try:
    from google.appengine.api import urlfetch
except ImportError:
    urlfetch = None


log = logging.getLogger(__name__)


class AppEnginePlatformWarning(HTTPWarning):
    pass


class AppEnginePlatformError(HTTPError):
    pass


class AppEngineManager(RequestMethods):
    """
    Connection manager for Google App Engine sandbox applications.

    This manager uses the URLFetch service directly instead of using the
    emulated httplib, and is subject to URLFetch limitations as described in
    the App Engine documentation `here
    <https://cloud.google.com/appengine/docs/python/urlfetch>`_.

    Notably it will raise an :class:`AppEnginePlatformError` if:
        * URLFetch is not available.
        * If you attempt to use this on App Engine Flexible, as full socket
          support is available.
        * If a request size is more than 10 megabytes.
        * If a response size is more than 32 megabtyes.
        * If you use an unsupported request method such as OPTIONS.

    Beyond those cases, it will raise normal urllib3 errors.
    """

    def __init__(self, headers=None, retries=None, validate_certificate=True,
                 urlfetch_retries=True):
        if not urlfetch:
            raise AppEnginePlatformError(
                "URLFetch is not available in this environment.")

        if is_prod_appengine_mvms():
            raise AppEnginePlatformError(
                "Use normal urllib3.PoolManager instead of AppEngineManager"
                "on Managed VMs, as using URLFetch is not necessary in "
                "this environment.")

        warnings.warn(
            "urllib3 is using URLFetch on Google App Engine sandbox instead "
            "of sockets. To use sockets directly instead of URLFetch see "
            "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
            AppEnginePlatformWarning)

        RequestMethods.__init__(self, headers)
        self.validate_certificate = validate_certificate
        self.urlfetch_retries = urlfetch_retries

        self.retries = retries or Retry.DEFAULT

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        # Return False to re-raise any potential exceptions
        return False

    def urlopen(self, method, url, body=None, headers=None,
                retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
                **response_kw):

        retries = self._get_retries(retries, redirect)

        try:
            follow_redirects = (
                    redirect and
                    retries.redirect != 0 and
                    retries.total)
            response = urlfetch.fetch(
                url,
                payload=body,
                method=method,
                headers=headers or {},
                allow_truncated=False,
                follow_redirects=self.urlfetch_retries and follow_redirects,
                deadline=self._get_absolute_timeout(timeout),
                validate_certificate=self.validate_certificate,
            )
        except urlfetch.DeadlineExceededError as e:
            raise TimeoutError(self, e)

        except urlfetch.InvalidURLError as e:
            if 'too large' in str(e):
                raise AppEnginePlatformError(
                    "URLFetch request too large, URLFetch only "
                    "supports requests up to 10mb in size.", e)
            raise ProtocolError(e)

        except urlfetch.DownloadError as e:
            if 'Too many redirects' in str(e):
                raise MaxRetryError(self, url, reason=e)
            raise ProtocolError(e)

        except urlfetch.ResponseTooLargeError as e:
            raise AppEnginePlatformError(
                "URLFetch response too large, URLFetch only supports"
                "responses up to 32mb in size.", e)

        except urlfetch.SSLCertificateError as e:
            raise SSLError(e)

        except urlfetch.InvalidMethodError as e:
            raise AppEnginePlatformError(
                "URLFetch does not support method: %s" % method, e)

        http_response = self._urlfetch_response_to_http_response(
            response, retries=retries, **response_kw)

        # Handle redirect?
        redirect_location = redirect and http_response.get_redirect_location()
        if redirect_location:
            # Check for redirect response
            if (self.urlfetch_retries and retries.raise_on_redirect):
                raise MaxRetryError(self, url, "too many redirects")
            else:
                if http_response.status == 303:
                    method = 'GET'

                try:
                    retries = retries.increment(method, url, response=http_response, _pool=self)
                except MaxRetryError:
                    if retries.raise_on_redirect:
                        raise MaxRetryError(self, url, "too many redirects")
                    return http_response

                retries.sleep_for_retry(http_response)
                log.debug("Redirecting %s -> %s", url, redirect_location)
                redirect_url = urljoin(url, redirect_location)
                return self.urlopen(
                    method, redirect_url, body, headers,
                    retries=retries, redirect=redirect,
                    timeout=timeout, **response_kw)

        # Check if we should retry the HTTP response.
        has_retry_after = bool(http_response.getheader('Retry-After'))
        if retries.is_retry(method, http_response.status, has_retry_after):
            retries = retries.increment(
                method, url, response=http_response, _pool=self)
            log.debug("Retry: %s", url)
            retries.sleep(http_response)
            return self.urlopen(
                method, url,
                body=body, headers=headers,
                retries=retries, redirect=redirect,
                timeout=timeout, **response_kw)

        return http_response

    def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):

        if is_prod_appengine():
            # Production GAE handles deflate encoding automatically, but does
            # not remove the encoding header.
            content_encoding = urlfetch_resp.headers.get('content-encoding')

            if content_encoding == 'deflate':
                del urlfetch_resp.headers['content-encoding']

        transfer_encoding = urlfetch_resp.headers.get('transfer-encoding')
        # We have a full response's content,
        # so let's make sure we don't report ourselves as chunked data.
        if transfer_encoding == 'chunked':
            encodings = transfer_encoding.split(",")
            encodings.remove('chunked')
            urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings)

        return HTTPResponse(
            # In order for decoding to work, we must present the content as
            # a file-like object.
            body=BytesIO(urlfetch_resp.content),
            headers=urlfetch_resp.headers,
            status=urlfetch_resp.status_code,
            **response_kw
        )

    def _get_absolute_timeout(self, timeout):
        if timeout is Timeout.DEFAULT_TIMEOUT:
            return None  # Defer to URLFetch's default.
        if isinstance(timeout, Timeout):
            if timeout._read is not None or timeout._connect is not None:
                warnings.warn(
                    "URLFetch does not support granular timeout settings, "
                    "reverting to total or default URLFetch timeout.",
                    AppEnginePlatformWarning)
            return timeout.total
        return timeout

    def _get_retries(self, retries, redirect):
        if not isinstance(retries, Retry):
            retries = Retry.from_int(
                retries, redirect=redirect, default=self.retries)

        if retries.connect or retries.read or retries.redirect:
            warnings.warn(
                "URLFetch only supports total retries and does not "
                "recognize connect, read, or redirect retry parameters.",
                AppEnginePlatformWarning)

        return retries


def is_appengine():
    return (is_local_appengine() or
            is_prod_appengine() or
            is_prod_appengine_mvms())


def is_appengine_sandbox():
    return is_appengine() and not is_prod_appengine_mvms()


def is_local_appengine():
    return ('APPENGINE_RUNTIME' in os.environ and
            'Development/' in os.environ['SERVER_SOFTWARE'])


def is_prod_appengine():
    return ('APPENGINE_RUNTIME' in os.environ and
            'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
            not is_prod_appengine_mvms())


def is_prod_appengine_mvms():
    return os.environ.get('GAE_VM', False) == 'true'
_vendor/urllib3/contrib/__init__.py000064400000000000151733136340013321 0ustar00_vendor/urllib3/contrib/pyopenssl.py000064400000035772151733136340013646 0ustar00"""
SSL with SNI_-support for Python 2. Follow these instructions if you would
like to verify SSL certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.

This needs the following packages installed:

* pyOpenSSL (tested with 16.0.0)
* cryptography (minimum 1.3.4, from pyopenssl)
* idna (minimum 2.0, from cryptography)

However, pyopenssl depends on cryptography, which depends on idna, so while we
use all three directly here we end up having relatively few packages required.

You can install them with the following command:

    pip install pyopenssl cryptography idna

To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this::

    try:
        import urllib3.contrib.pyopenssl
        urllib3.contrib.pyopenssl.inject_into_urllib3()
    except ImportError:
        pass

Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.

Activating this module also has the positive side effect of disabling SSL/TLS
compression in Python 2 (see `CRIME attack`_).

If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.

.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
"""
from __future__ import absolute_import

import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate

from socket import timeout, error as SocketError
from io import BytesIO

try:  # Platform-specific: Python 2
    from socket import _fileobject
except ImportError:  # Platform-specific: Python 3
    _fileobject = None
    from ..packages.backports.makefile import backport_makefile

import logging
import ssl
from ..packages import six
import sys

from .. import util

__all__ = ['inject_into_urllib3', 'extract_from_urllib3']

# SNI always works.
HAS_SNI = True

# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
    ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
    ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}

if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
    _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD

if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
    _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD

try:
    _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
except AttributeError:
    pass

_stdlib_to_openssl_verify = {
    ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
    ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
    ssl.CERT_REQUIRED:
        OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
_openssl_to_stdlib_verify = dict(
    (v, k) for k, v in _stdlib_to_openssl_verify.items()
)

# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384

orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext


log = logging.getLogger(__name__)


def inject_into_urllib3():
    'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'

    _validate_dependencies_met()

    util.ssl_.SSLContext = PyOpenSSLContext
    util.HAS_SNI = HAS_SNI
    util.ssl_.HAS_SNI = HAS_SNI
    util.IS_PYOPENSSL = True
    util.ssl_.IS_PYOPENSSL = True


def extract_from_urllib3():
    'Undo monkey-patching by :func:`inject_into_urllib3`.'

    util.ssl_.SSLContext = orig_util_SSLContext
    util.HAS_SNI = orig_util_HAS_SNI
    util.ssl_.HAS_SNI = orig_util_HAS_SNI
    util.IS_PYOPENSSL = False
    util.ssl_.IS_PYOPENSSL = False


def _validate_dependencies_met():
    """
    Verifies that PyOpenSSL's package-level dependencies have been met.
    Throws `ImportError` if they are not met.
    """
    # Method added in `cryptography==1.1`; not available in older versions
    from cryptography.x509.extensions import Extensions
    if getattr(Extensions, "get_extension_for_class", None) is None:
        raise ImportError("'cryptography' module missing required functionality.  "
                          "Try upgrading to v1.3.4 or newer.")

    # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
    # attribute is only present on those versions.
    from OpenSSL.crypto import X509
    x509 = X509()
    if getattr(x509, "_x509", None) is None:
        raise ImportError("'pyOpenSSL' module missing required functionality. "
                          "Try upgrading to v0.14 or newer.")


def _dnsname_to_stdlib(name):
    """
    Converts a dNSName SubjectAlternativeName field to the form used by the
    standard library on the given Python version.

    Cryptography produces a dNSName as a unicode string that was idna-decoded
    from ASCII bytes. We need to idna-encode that string to get it back, and
    then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
    uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
    """
    def idna_encode(name):
        """
        Borrowed wholesale from the Python Cryptography Project. It turns out
        that we can't just safely call `idna.encode`: it can explode for
        wildcard names. This avoids that problem.
        """
        import idna

        for prefix in [u'*.', u'.']:
            if name.startswith(prefix):
                name = name[len(prefix):]
                return prefix.encode('ascii') + idna.encode(name)
        return idna.encode(name)

    name = idna_encode(name)
    if sys.version_info >= (3, 0):
        name = name.decode('utf-8')
    return name


def get_subj_alt_name(peer_cert):
    """
    Given an PyOpenSSL certificate, provides all the subject alternative names.
    """
    # Pass the cert to cryptography, which has much better APIs for this.
    if hasattr(peer_cert, "to_cryptography"):
        cert = peer_cert.to_cryptography()
    else:
        # This is technically using private APIs, but should work across all
        # relevant versions before PyOpenSSL got a proper API for this.
        cert = _Certificate(openssl_backend, peer_cert._x509)

    # We want to find the SAN extension. Ask Cryptography to locate it (it's
    # faster than looping in Python)
    try:
        ext = cert.extensions.get_extension_for_class(
            x509.SubjectAlternativeName
        ).value
    except x509.ExtensionNotFound:
        # No such extension, return the empty list.
        return []
    except (x509.DuplicateExtension, x509.UnsupportedExtension,
            x509.UnsupportedGeneralNameType, UnicodeError) as e:
        # A problem has been found with the quality of the certificate. Assume
        # no SAN field is present.
        log.warning(
            "A problem was encountered with the certificate that prevented "
            "urllib3 from finding the SubjectAlternativeName field. This can "
            "affect certificate validation. The error was %s",
            e,
        )
        return []

    # We want to return dNSName and iPAddress fields. We need to cast the IPs
    # back to strings because the match_hostname function wants them as
    # strings.
    # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
    # decoded. This is pretty frustrating, but that's what the standard library
    # does with certificates, and so we need to attempt to do the same.
    names = [
        ('DNS', _dnsname_to_stdlib(name))
        for name in ext.get_values_for_type(x509.DNSName)
    ]
    names.extend(
        ('IP Address', str(name))
        for name in ext.get_values_for_type(x509.IPAddress)
    )

    return names


class WrappedSocket(object):
    '''API-compatibility wrapper for Python OpenSSL's Connection-class.

    Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
    collector of pypy.
    '''

    def __init__(self, connection, socket, suppress_ragged_eofs=True):
        self.connection = connection
        self.socket = socket
        self.suppress_ragged_eofs = suppress_ragged_eofs
        self._makefile_refs = 0
        self._closed = False

    def fileno(self):
        return self.socket.fileno()

    # Copy-pasted from Python 3.5 source code
    def _decref_socketios(self):
        if self._makefile_refs > 0:
            self._makefile_refs -= 1
        if self._closed:
            self.close()

    def recv(self, *args, **kwargs):
        try:
            data = self.connection.recv(*args, **kwargs)
        except OpenSSL.SSL.SysCallError as e:
            if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
                return b''
            else:
                raise SocketError(str(e))
        except OpenSSL.SSL.ZeroReturnError as e:
            if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
                return b''
            else:
                raise
        except OpenSSL.SSL.WantReadError:
            rd = util.wait_for_read(self.socket, self.socket.gettimeout())
            if not rd:
                raise timeout('The read operation timed out')
            else:
                return self.recv(*args, **kwargs)
        else:
            return data

    def recv_into(self, *args, **kwargs):
        try:
            return self.connection.recv_into(*args, **kwargs)
        except OpenSSL.SSL.SysCallError as e:
            if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
                return 0
            else:
                raise SocketError(str(e))
        except OpenSSL.SSL.ZeroReturnError as e:
            if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
                return 0
            else:
                raise
        except OpenSSL.SSL.WantReadError:
            rd = util.wait_for_read(self.socket, self.socket.gettimeout())
            if not rd:
                raise timeout('The read operation timed out')
            else:
                return self.recv_into(*args, **kwargs)

    def settimeout(self, timeout):
        return self.socket.settimeout(timeout)

    def _send_until_done(self, data):
        while True:
            try:
                return self.connection.send(data)
            except OpenSSL.SSL.WantWriteError:
                wr = util.wait_for_write(self.socket, self.socket.gettimeout())
                if not wr:
                    raise timeout()
                continue
            except OpenSSL.SSL.SysCallError as e:
                raise SocketError(str(e))

    def sendall(self, data):
        total_sent = 0
        while total_sent < len(data):
            sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
            total_sent += sent

    def shutdown(self):
        # FIXME rethrow compatible exceptions should we ever use this
        self.connection.shutdown()

    def close(self):
        if self._makefile_refs < 1:
            try:
                self._closed = True
                return self.connection.close()
            except OpenSSL.SSL.Error:
                return
        else:
            self._makefile_refs -= 1

    def getpeercert(self, binary_form=False):
        x509 = self.connection.get_peer_certificate()

        if not x509:
            return x509

        if binary_form:
            return OpenSSL.crypto.dump_certificate(
                OpenSSL.crypto.FILETYPE_ASN1,
                x509)

        return {
            'subject': (
                (('commonName', x509.get_subject().CN),),
            ),
            'subjectAltName': get_subj_alt_name(x509)
        }

    def _reuse(self):
        self._makefile_refs += 1

    def _drop(self):
        if self._makefile_refs < 1:
            self.close()
        else:
            self._makefile_refs -= 1


if _fileobject:  # Platform-specific: Python 2
    def makefile(self, mode, bufsize=-1):
        self._makefile_refs += 1
        return _fileobject(self, mode, bufsize, close=True)
else:  # Platform-specific: Python 3
    makefile = backport_makefile

WrappedSocket.makefile = makefile


class PyOpenSSLContext(object):
    """
    I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
    for translating the interface of the standard library ``SSLContext`` object
    to calls into PyOpenSSL.
    """
    def __init__(self, protocol):
        self.protocol = _openssl_versions[protocol]
        self._ctx = OpenSSL.SSL.Context(self.protocol)
        self._options = 0
        self.check_hostname = False

    @property
    def options(self):
        return self._options

    @options.setter
    def options(self, value):
        self._options = value
        self._ctx.set_options(value)

    @property
    def verify_mode(self):
        return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]

    @verify_mode.setter
    def verify_mode(self, value):
        self._ctx.set_verify(
            _stdlib_to_openssl_verify[value],
            _verify_callback
        )

    def set_default_verify_paths(self):
        self._ctx.set_default_verify_paths()

    def set_ciphers(self, ciphers):
        if isinstance(ciphers, six.text_type):
            ciphers = ciphers.encode('utf-8')
        self._ctx.set_cipher_list(ciphers)

    def load_verify_locations(self, cafile=None, capath=None, cadata=None):
        if cafile is not None:
            cafile = cafile.encode('utf-8')
        if capath is not None:
            capath = capath.encode('utf-8')
        self._ctx.load_verify_locations(cafile, capath)
        if cadata is not None:
            self._ctx.load_verify_locations(BytesIO(cadata))

    def load_cert_chain(self, certfile, keyfile=None, password=None):
        self._ctx.use_certificate_file(certfile)
        if password is not None:
            self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password)
        self._ctx.use_privatekey_file(keyfile or certfile)

    def wrap_socket(self, sock, server_side=False,
                    do_handshake_on_connect=True, suppress_ragged_eofs=True,
                    server_hostname=None):
        cnx = OpenSSL.SSL.Connection(self._ctx, sock)

        if isinstance(server_hostname, six.text_type):  # Platform-specific: Python 3
            server_hostname = server_hostname.encode('utf-8')

        if server_hostname is not None:
            cnx.set_tlsext_host_name(server_hostname)

        cnx.set_connect_state()

        while True:
            try:
                cnx.do_handshake()
            except OpenSSL.SSL.WantReadError:
                rd = util.wait_for_read(sock, sock.gettimeout())
                if not rd:
                    raise timeout('select timed out')
                continue
            except OpenSSL.SSL.Error as e:
                raise ssl.SSLError('bad handshake: %r' % e)
            break

        return WrappedSocket(cnx, sock)


def _verify_callback(cnx, x509, err_no, err_depth, return_code):
    return err_no == 0
_vendor/urllib3/contrib/ntlmpool.py000064400000010576151733136340013451 0ustar00"""
NTLM authenticating pool, contributed by erikcederstran

Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import

from logging import getLogger
from ntlm import ntlm

from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection


log = getLogger(__name__)


class NTLMConnectionPool(HTTPSConnectionPool):
    """
    Implements an NTLM authentication version of an urllib3 connection pool
    """

    scheme = 'https'

    def __init__(self, user, pw, authurl, *args, **kwargs):
        """
        authurl is a random URL on the server that is protected by NTLM.
        user is the Windows user, probably in the DOMAIN\\username format.
        pw is the password for the user.
        """
        super(NTLMConnectionPool, self).__init__(*args, **kwargs)
        self.authurl = authurl
        self.rawuser = user
        user_parts = user.split('\\', 1)
        self.domain = user_parts[0].upper()
        self.user = user_parts[1]
        self.pw = pw

    def _new_conn(self):
        # Performs the NTLM handshake that secures the connection. The socket
        # must be kept open while requests are performed.
        self.num_connections += 1
        log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
                  self.num_connections, self.host, self.authurl)

        headers = {}
        headers['Connection'] = 'Keep-Alive'
        req_header = 'Authorization'
        resp_header = 'www-authenticate'

        conn = HTTPSConnection(host=self.host, port=self.port)

        # Send negotiation message
        headers[req_header] = (
            'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
        log.debug('Request headers: %s', headers)
        conn.request('GET', self.authurl, None, headers)
        res = conn.getresponse()
        reshdr = dict(res.getheaders())
        log.debug('Response status: %s %s', res.status, res.reason)
        log.debug('Response headers: %s', reshdr)
        log.debug('Response data: %s [...]', res.read(100))

        # Remove the reference to the socket, so that it can not be closed by
        # the response object (we want to keep the socket open)
        res.fp = None

        # Server should respond with a challenge message
        auth_header_values = reshdr[resp_header].split(', ')
        auth_header_value = None
        for s in auth_header_values:
            if s[:5] == 'NTLM ':
                auth_header_value = s[5:]
        if auth_header_value is None:
            raise Exception('Unexpected %s response header: %s' %
                            (resp_header, reshdr[resp_header]))

        # Send authentication message
        ServerChallenge, NegotiateFlags = \
            ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
        auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
                                                         self.user,
                                                         self.domain,
                                                         self.pw,
                                                         NegotiateFlags)
        headers[req_header] = 'NTLM %s' % auth_msg
        log.debug('Request headers: %s', headers)
        conn.request('GET', self.authurl, None, headers)
        res = conn.getresponse()
        log.debug('Response status: %s %s', res.status, res.reason)
        log.debug('Response headers: %s', dict(res.getheaders()))
        log.debug('Response data: %s [...]', res.read()[:100])
        if res.status != 200:
            if res.status == 401:
                raise Exception('Server rejected request: wrong '
                                'username or password')
            raise Exception('Wrong server response: %s %s' %
                            (res.status, res.reason))

        res.fp = None
        log.debug('Connection established')
        return conn

    def urlopen(self, method, url, body=None, headers=None, retries=3,
                redirect=True, assert_same_host=True):
        if headers is None:
            headers = {}
        headers['Connection'] = 'Keep-Alive'
        return super(NTLMConnectionPool, self).urlopen(method, url, body,
                                                       headers, retries,
                                                       redirect,
                                                       assert_same_host)
_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.opt-1.pyc000064400000024130151733136340024216 0ustar003

�Pf�D�@s�dZddlmZddlZddlmZddlmZmZm	Z	m
Z
mZmZm
Z
mZmZddlmZmZmZed�Zesxed��ed	�Zes�ed
��ej�dZeeeejd���Zedkr�edededf��eedd�Zeedd�ZeZ eZ!eZ"eZ#eZ$eZ%eZ&eZ'eZ(eZ)e
Z*ee)�Z+eZ,eZ-ee#�Z.ee$�Z/ee%�Z0ee&�Z1ee'�Z2eZ3eZ4eZ5ee�Z6eZ7eZ8ee�Z9eZ:eZ;ee�Z<eZ=eZ>ee�Z?ee�Z@eZAeZBeZCeZDeZEeZF�y�e.e/ee7�ee8�e:ee;�e<ee0�gejG_He-ejG_IgejJ_He*ejJ_IgejK_He*ejK_IgejL_He*ejL_Ie,e.gejM_He6ejM_Ie6gejN_He.ejN_Ie-egejO_He/ejO_Ie+e6ee9�gejP_He-ejP_Ie	eee eee<�gejQ_He-ejQ_Ie<gejR_He-ejR_Ie.e2ee0�gejS_He-ejS_Iee-eAeee
��ZTee-eAee�ee
��ZUe?eTeUgejV_He-ejV_Ie?e	e
gejW_He-ejW_Ie?e0gejX_He-ejX_Ie?e+e gejY_He-ejY_Ie?eAgejZ_He-ejZ_Ie?e	e
gej[_He-ej[_Ie?gej\_He-ej\_Ie?e	e
ee
�gej]_He-ej]_Ie?e	e
ee
�gej^_He-ej^_Ie?gej__He-ej__Ie?ee
�gej`_He-ej`_Ie?ee>�ee
�geja_He-eja_Ie?ee>�e
gejb_He-ejb_Ie?ee
�gejc_de-ejc_Ie?ee>�ee
�geje_He-eje_Ie?ee>�gejf_He-ejf_Ie?ee=�gejg_He-ejg_Ie?ee@�gejh_He-ejh_Ie@e0geji_He-eji_Ie@e gejj_ke-ejj_Ie@eeB�gejl_He-ejl_Ie@gejm_He!ejm_Ie@e!gejn_He6ejn_Ie,eDeEgejo_He?ejo_Ie?eFe gejp_He-ejp_Ie?e=gejq_He-ejq_Ie?e=gejr_He-ejr_Ie-egejO_He/ejO_IeTe_TeUe_Ue?e_?e=e_=e>e_>e9e_9e<e_<e@e_@eBe_Be7e_7e-e_-e/jsed�e_te/jsed�e_ue+gejv_He+ejv_Ie+gejw_Hdejw_Ie+gejx_He*ejx_Ie,e	e"gejy_He/ejy_Ie/e"gejz_He	ejz_Ie/e	e!e"gej{_Heej{_Ie,e	e!gej|_He.ej|_Ie.gej}_He!ej}_Ie.gej~_Heej~_Ie,ee+�ee+�e!e4e5gej_He2ej_Ie2e+gej�_He+ej�_Ie,ee+�e!e3gej�_He0ej�_Ie,e!e3gej�_He1ej�_Ie1egej�_Hdej�_Ie0gej�_He!ej�_Ie0e!gej�_Heej�_Ie,jsed�e_�ejsed�e_�ejsed�e_�ejsed�e_�e+e_+e0e_0e/e_/e2e_2Wne�k
�rted��YnXGdd�de��Z�Gdd�de��Z�dS)ay
This module uses ctypes to bind a whole bunch of functions and constants from
SecureTransport. The goal here is to provide the low-level API to
SecureTransport. These are essentially the C-level functions and constants, and
they're pretty gross to work with.

This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:

    Copyright (c) 2015-2016 Will Bond <will@wbond.net>

    Permission is hereby granted, free of charge, to any person obtaining a
    copy of this software and associated documentation files (the "Software"),
    to deal in the Software without restriction, including without limitation
    the rights to use, copy, modify, merge, publish, distribute, sublicense,
    and/or sell copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following conditions:

    The above copyright notice and this permission notice shall be included in
    all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    DEALINGS IN THE SOFTWARE.
�)�absolute_importN)�find_library)	�c_void_p�c_int32�c_char_p�c_size_t�c_byte�c_uint32�c_ulong�c_long�c_bool)�CDLL�POINTER�	CFUNCTYPE�Securityz'The library Security could not be found�CoreFoundationz-The library CoreFoundation could not be found�.�
�z1Only OS X 10.8 and newer are supported, not %s.%s�T)Z	use_errno�kSecImportExportPassphrase�kSecImportItemIdentity�kCFAllocatorDefault�kCFTypeArrayCallBacks�kCFTypeDictionaryKeyCallBacks�kCFTypeDictionaryValueCallBackszError initializing ctypesc@seZdZdZed�ZdS)�CFConstz_
    A class object that acts as essentially a namespace for CoreFoundation
    constants.
    iN)�__name__�
__module__�__qualname__�__doc__�CFStringEncodingZkCFStringEncodingUTF8�r"r"�/usr/lib/python3.6/bindings.pyr�src@s,eZdZdZdZdZdZdZdZdZ	dZ
dZdZdZ
dZd	ZdZd
ZdZdZdDZdEZdFZdGZdHZdIZdJZdKZdLZdMZdNZdOZdPZ dQZ!dRZ"dSZ#dTZ$dUZ%dVZ&dWZ'dXZ(dYZ)d"Z*d#Z+d$Z,d%Z-d&Z.d'Z/d(Z0d)Z1d*Z2d+Z3d,Z4d-Z5d.Z6d/Z7d0Z8d1Z9d2Z:d3Z;d4Z<d5Z=d6Z>d7Z?d8Z@d9ZAd:ZBd;ZCd<ZDd=ZEd>ZFd?ZGd@ZHdAZIdBZJdCS)Z�
SecurityConstzU
    A class object that acts as essentially a namespace for Security constants.
    rr���rr���iH&iK&iM&iX&iN&iO&iQ&iR&iV&iW&iT&iU&is&i`&io&iz&iq&iw&i�i�bi�bi�bi,�i0�i+�i/�����i$�i(�i
�i��k�j�9�8i#�i'�i	�i��g�@�3�2���=�<�5�/iiiNi���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i ���iQ���i,���iR���)Krrrr Z"kSSLSessionOptionBreakOnServerAuthZ
kSSLProtocol2Z
kSSLProtocol3Z
kTLSProtocol1ZkTLSProtocol11ZkTLSProtocol12ZkSSLClientSideZkSSLStreamTypeZkSecFormatPEMSequenceZkSecTrustResultInvalidZkSecTrustResultProceedZkSecTrustResultDenyZkSecTrustResultUnspecifiedZ&kSecTrustResultRecoverableTrustFailureZ kSecTrustResultFatalTrustFailureZkSecTrustResultOtherErrorZerrSSLProtocolZerrSSLWouldBlockZerrSSLClosedGracefulZerrSSLClosedNoNotifyZerrSSLClosedAbortZerrSSLXCertChainInvalidZerrSSLCryptoZerrSSLInternalZerrSSLCertExpiredZerrSSLCertNotYetValidZerrSSLUnknownRootCertZerrSSLNoRootCertZerrSSLHostNameMismatchZerrSSLPeerHandshakeFailZerrSSLPeerUserCancelledZerrSSLWeakPeerEphemeralDHKeyZerrSSLServerAuthCompletedZerrSSLRecordOverflowZerrSecVerifyFailedZerrSecNoTrustSettingsZerrSecItemNotFoundZerrSecInvalidTrustSettingsZ'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384Z'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256Z#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384Z#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384Z#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256Z#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256Z'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384Z$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_256_CBC_SHAZ TLS_DHE_DSS_WITH_AES_256_CBC_SHAZ'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256Z$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_128_CBC_SHAZ TLS_DHE_DSS_WITH_AES_128_CBC_SHAZTLS_RSA_WITH_AES_256_GCM_SHA384ZTLS_RSA_WITH_AES_128_GCM_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHA256ZTLS_RSA_WITH_AES_128_CBC_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHAZTLS_RSA_WITH_AES_128_CBC_SHAZTLS_AES_128_GCM_SHA256ZTLS_AES_256_GCM_SHA384ZTLS_CHACHA20_POLY1305_SHA256r"r"r"r#r$�s�r$)rr)�r Z
__future__r�platformZctypes.utilrZctypesrrrrrr	r
rrr
rrZ
security_path�ImportErrorZcore_foundation_pathZmac_ver�version�tuple�map�int�split�version_info�OSErrorrrZBooleanZCFIndexr!ZCFDataZCFStringZCFArrayZCFMutableArrayZCFDictionaryZCFErrorZCFTypeZCFTypeIDZ	CFTypeRefZCFAllocatorRefZOSStatusZ	CFDataRefZCFStringRefZ
CFArrayRefZCFMutableArrayRefZCFDictionaryRefZCFArrayCallBacksZCFDictionaryKeyCallBacksZCFDictionaryValueCallBacksZSecCertificateRefZSecExternalFormatZSecExternalItemTypeZSecIdentityRefZSecItemImportExportFlagsZ SecItemImportExportKeyParametersZSecKeychainRefZSSLProtocolZSSLCipherSuiteZ
SSLContextRefZSecTrustRefZSSLConnectionRefZSecTrustResultTypeZSecTrustOptionFlagsZSSLProtocolSideZSSLConnectionTypeZSSLSessionOptionZ
SecItemImportZargtypesZrestypeZSecCertificateGetTypeIDZSecIdentityGetTypeIDZSecKeyGetTypeIDZSecCertificateCreateWithDataZSecCertificateCopyDataZSecCopyErrorMessageStringZ SecIdentityCreateWithCertificateZSecKeychainCreateZSecKeychainDeleteZSecPKCS12ImportZSSLReadFuncZSSLWriteFuncZ
SSLSetIOFuncsZSSLSetPeerIDZSSLSetCertificateZSSLSetCertificateAuthoritiesZSSLSetConnectionZSSLSetPeerDomainNameZSSLHandshakeZSSLReadZSSLWriteZSSLCloseZSSLGetNumberSupportedCiphersZSSLGetSupportedCiphersZSSLSetEnabledCiphersZSSLGetNumberEnabledCiphersZargtypeZSSLGetEnabledCiphersZSSLGetNegotiatedCipherZSSLGetNegotiatedProtocolVersionZSSLCopyPeerTrustZSecTrustSetAnchorCertificatesZ!SecTrustSetAnchorCertificatesOnlyZ	argstypesZSecTrustEvaluateZSecTrustGetCertificateCountZSecTrustGetCertificateAtIndexZSSLCreateContextZSSLSetSessionOptionZSSLSetProtocolVersionMinZSSLSetProtocolVersionMaxZin_dllrrZCFRetainZ	CFReleaseZCFGetTypeIDZCFStringCreateWithCStringZCFStringGetCStringPtrZCFStringGetCStringZCFDataCreateZCFDataGetLengthZCFDataGetBytePtrZCFDictionaryCreateZCFDictionaryGetValueZ
CFArrayCreateZCFArrayCreateMutableZCFArrayAppendValueZCFArrayGetCountZCFArrayGetValueAtIndexrrrr�AttributeError�objectrr$r"r"r"r#�<module>s,,











































_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.pyc000064400000016271151733136350023462 0ustar003

�Pf/�@s�dZddlZddlZddlZddlZddlZddlZddlZddlm	Z	m
Z
mZejdej
�Zdd�Zdd	�Zd
d�Zddd
�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�ZdS)a�
Low-level helpers for the SecureTransport bindings.

These are Python functions that are not directly related to the high-level APIs
but are necessary to get them to work. They include a whole bunch of low-level
CoreFoundation messing about and memory management. The concerns in this module
are almost entirely about trying to avoid memory leaks and providing
appropriate and useful assistance to the higher-level code.
�N�)�Security�CoreFoundation�CFConsts;-----BEGIN CERTIFICATE-----
(.*?)
-----END CERTIFICATE-----cCstjtj|t|��S)zv
    Given a bytestring, create a CFData object from it. This CFData object must
    be CFReleased by the caller.
    )r�CFDataCreate�kCFAllocatorDefault�len)Z
bytestring�r	�/usr/lib/python3.6/low_level.py�_cf_data_from_bytessrcCsZt|�}dd�|D�}dd�|D�}tj||�}tj||�}tjtj|||tjtj�S)zK
    Given a list of Python tuples, create an associated CFDictionary.
    css|]}|dVqdS)rNr	)�.0�tr	r	r
�	<genexpr>,sz-_cf_dictionary_from_tuples.<locals>.<genexpr>css|]}|dVqdS)rNr	)rr
r	r	r
r-s)rr�	CFTypeRefZCFDictionaryCreaterZkCFTypeDictionaryKeyCallBacksZkCFTypeDictionaryValueCallBacks)ZtuplesZdictionary_size�keys�valuesZcf_keysZ	cf_valuesr	r	r
�_cf_dictionary_from_tuples%srcCsntj|tjtj��}tj|tj�}|dkrXtjd�}tj	||dtj�}|sRt
d��|j}|dk	rj|jd�}|S)z�
    Creates a Unicode string from a CFString object. Used entirely for error
    reporting.

    Yes, it annoys me quite a lot that this function is this complex.
    Niz'Error copying C string from CFStringRefzutf-8)
�ctypes�castZPOINTERZc_void_prZCFStringGetCStringPtrrZkCFStringEncodingUTF8Zcreate_string_bufferZCFStringGetCString�OSError�value�decode)rZvalue_as_void_p�string�buffer�resultr	r	r
�_cf_string_to_unicode;s"

rcCs\|dkrdStj|d�}t|�}tj|�|dks:|dkrBd|}|dkrPtj}||��dS)z[
    Checks the return code and throws an exception if there is an error to
    report
    rN�zOSStatus %s)rZSecCopyErrorMessageStringrr�	CFRelease�ssl�SSLError)�errorZexception_classZcf_error_string�outputr	r	r
�_assert_no_errorXs
r"cCs�dd�tj|�D�}|s"tjd��tjtjdtjtj	��}|sHtjd��ydx^|D]V}t
|�}|sjtjd��tjtj|�}tj
|�|s�tjd��tj||�tj
|�qPWWntk
r�tj
|�YnX|S)z�
    Given a bundle of certs in PEM format, turns them into a CFArray of certs
    that can be used to validate a cert chain.
    cSsg|]}tj|jd���qS)r)�base64Z	b64decode�group)r�matchr	r	r
�
<listcomp>ssz(_cert_array_from_pem.<locals>.<listcomp>zNo root certificates specifiedrzUnable to allocate memory!zUnable to build cert object!)�
_PEM_CERTS_RE�finditerrrr�CFArrayCreateMutablerr�byref�kCFTypeArrayCallBacksrrZSecCertificateCreateWithDatar�CFArrayAppendValue�	Exception)Z
pem_bundleZ	der_certsZ
cert_arrayZ	der_bytesZcertdataZcertr	r	r
�_cert_array_from_pemms2






r.cCstj�}tj|�|kS)z=
    Returns True if a given CFTypeRef is a certificate.
    )rZSecCertificateGetTypeIDr�CFGetTypeID)�item�expectedr	r	r
�_is_cert�sr2cCstj�}tj|�|kS)z;
    Returns True if a given CFTypeRef is an identity.
    )rZSecIdentityGetTypeIDrr/)r0r1r	r	r
�_is_identity�sr3cCs�tjd�}tj|dd��jd�}tj|dd��}tj�}tjj||�j	d�}t
j�}t
j|t
|�|ddtj|��}t|�||fS)a�
    This function creates a temporary Mac keychain that we can use to work with
    credentials. This keychain uses a one-time password and a temporary file to
    store the data. We expect to have one keychain per socket. The returned
    SecKeychainRef must be freed by the caller, including calling
    SecKeychainDelete.

    Returns a tuple of the SecKeychainRef and the path to the temporary
    directory that contains it.
    �(N�zutf-8F)�os�urandomr#Z	b64encoder�tempfileZmkdtemp�path�join�encoderZSecKeychainRefZSecKeychainCreaterrr*r")Zrandom_bytes�filenameZpasswordZ
tempdirectoryZ
keychain_path�keychain�statusr	r	r
�_temporary_keychain�s
r?cCsg}g}d}t|d��}|j�}WdQRXz�tjtj|t|��}tj�}tj|ddddd|t	j
|��}t|�tj|�}	xdt
|	�D]X}
tj||
�}t	j|tj�}t|�r�tj|�|j|�q�t|�r�tj|�|j|�q�WWd|r�tj|�tj|�X||fS)z�
    Given a single file, loads all the trust objects from it into arrays and
    the keychain.
    Returns a tuple of lists: the first list is a list of identities, the
    second a list of certs.
    N�rbr)�open�readrrrrZ
CFArrayRefrZ
SecItemImportrr*r"ZCFArrayGetCount�rangeZCFArrayGetValueAtIndexrrr2ZCFRetain�appendr3r)r=r9�certificates�
identitiesZresult_array�fZraw_filedataZfiledatarZresult_count�indexr0r	r	r
�_load_items_from_file�sH




rIcGs�g}g}dd�|D�}z�x.|D]&}t||�\}}|j|�|j|�qW|s�tj�}tj||dtj|��}t|�|j|�t	j
|jd��t	jt	j
dtjt	j��}	x tj||�D]}
t	j|	|
�q�W|	Sxtj||�D]}t	j
|�q�WXdS)z�
    Load certificates and maybe keys from a number of files. Has the end goal
    of returning a CFArray containing one SecIdentityRef, and then zero or more
    SecCertificateRef objects, suitable for use as a client certificate trust
    chain.
    css|]}|r|VqdS)Nr	)rr9r	r	r
r/sz*_load_client_cert_chain.<locals>.<genexpr>rN)rI�extendrZSecIdentityRefZ SecIdentityCreateWithCertificaterr*r"rDrr�popr)rr+�	itertools�chainr,)r=�pathsrErFZ	file_pathZnew_identitiesZ	new_certsZnew_identityr>Ztrust_chainr0�objr	r	r
�_load_client_cert_chains6 


rP)N)�__doc__r#rrL�rer6rr8Zbindingsrrr�compile�DOTALLr'rrrr"r.r2r3r?rIrPr	r	r	r
�<module>	s(


+(;_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-36.opt-1.pyc000064400000016271151733136350024421 0ustar003

�Pf/�@s�dZddlZddlZddlZddlZddlZddlZddlZddlm	Z	m
Z
mZejdej
�Zdd�Zdd	�Zd
d�Zddd
�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�ZdS)a�
Low-level helpers for the SecureTransport bindings.

These are Python functions that are not directly related to the high-level APIs
but are necessary to get them to work. They include a whole bunch of low-level
CoreFoundation messing about and memory management. The concerns in this module
are almost entirely about trying to avoid memory leaks and providing
appropriate and useful assistance to the higher-level code.
�N�)�Security�CoreFoundation�CFConsts;-----BEGIN CERTIFICATE-----
(.*?)
-----END CERTIFICATE-----cCstjtj|t|��S)zv
    Given a bytestring, create a CFData object from it. This CFData object must
    be CFReleased by the caller.
    )r�CFDataCreate�kCFAllocatorDefault�len)Z
bytestring�r	�/usr/lib/python3.6/low_level.py�_cf_data_from_bytessrcCsZt|�}dd�|D�}dd�|D�}tj||�}tj||�}tjtj|||tjtj�S)zK
    Given a list of Python tuples, create an associated CFDictionary.
    css|]}|dVqdS)rNr	)�.0�tr	r	r
�	<genexpr>,sz-_cf_dictionary_from_tuples.<locals>.<genexpr>css|]}|dVqdS)rNr	)rr
r	r	r
r-s)rr�	CFTypeRefZCFDictionaryCreaterZkCFTypeDictionaryKeyCallBacksZkCFTypeDictionaryValueCallBacks)ZtuplesZdictionary_size�keys�valuesZcf_keysZ	cf_valuesr	r	r
�_cf_dictionary_from_tuples%srcCsntj|tjtj��}tj|tj�}|dkrXtjd�}tj	||dtj�}|sRt
d��|j}|dk	rj|jd�}|S)z�
    Creates a Unicode string from a CFString object. Used entirely for error
    reporting.

    Yes, it annoys me quite a lot that this function is this complex.
    Niz'Error copying C string from CFStringRefzutf-8)
�ctypes�castZPOINTERZc_void_prZCFStringGetCStringPtrrZkCFStringEncodingUTF8Zcreate_string_bufferZCFStringGetCString�OSError�value�decode)rZvalue_as_void_p�string�buffer�resultr	r	r
�_cf_string_to_unicode;s"

rcCs\|dkrdStj|d�}t|�}tj|�|dks:|dkrBd|}|dkrPtj}||��dS)z[
    Checks the return code and throws an exception if there is an error to
    report
    rN�zOSStatus %s)rZSecCopyErrorMessageStringrr�	CFRelease�ssl�SSLError)�errorZexception_classZcf_error_string�outputr	r	r
�_assert_no_errorXs
r"cCs�dd�tj|�D�}|s"tjd��tjtjdtjtj	��}|sHtjd��ydx^|D]V}t
|�}|sjtjd��tjtj|�}tj
|�|s�tjd��tj||�tj
|�qPWWntk
r�tj
|�YnX|S)z�
    Given a bundle of certs in PEM format, turns them into a CFArray of certs
    that can be used to validate a cert chain.
    cSsg|]}tj|jd���qS)r)�base64Z	b64decode�group)r�matchr	r	r
�
<listcomp>ssz(_cert_array_from_pem.<locals>.<listcomp>zNo root certificates specifiedrzUnable to allocate memory!zUnable to build cert object!)�
_PEM_CERTS_RE�finditerrrr�CFArrayCreateMutablerr�byref�kCFTypeArrayCallBacksrrZSecCertificateCreateWithDatar�CFArrayAppendValue�	Exception)Z
pem_bundleZ	der_certsZ
cert_arrayZ	der_bytesZcertdataZcertr	r	r
�_cert_array_from_pemms2






r.cCstj�}tj|�|kS)z=
    Returns True if a given CFTypeRef is a certificate.
    )rZSecCertificateGetTypeIDr�CFGetTypeID)�item�expectedr	r	r
�_is_cert�sr2cCstj�}tj|�|kS)z;
    Returns True if a given CFTypeRef is an identity.
    )rZSecIdentityGetTypeIDrr/)r0r1r	r	r
�_is_identity�sr3cCs�tjd�}tj|dd��jd�}tj|dd��}tj�}tjj||�j	d�}t
j�}t
j|t
|�|ddtj|��}t|�||fS)a�
    This function creates a temporary Mac keychain that we can use to work with
    credentials. This keychain uses a one-time password and a temporary file to
    store the data. We expect to have one keychain per socket. The returned
    SecKeychainRef must be freed by the caller, including calling
    SecKeychainDelete.

    Returns a tuple of the SecKeychainRef and the path to the temporary
    directory that contains it.
    �(N�zutf-8F)�os�urandomr#Z	b64encoder�tempfileZmkdtemp�path�join�encoderZSecKeychainRefZSecKeychainCreaterrr*r")Zrandom_bytes�filenameZpasswordZ
tempdirectoryZ
keychain_path�keychain�statusr	r	r
�_temporary_keychain�s
r?cCsg}g}d}t|d��}|j�}WdQRXz�tjtj|t|��}tj�}tj|ddddd|t	j
|��}t|�tj|�}	xdt
|	�D]X}
tj||
�}t	j|tj�}t|�r�tj|�|j|�q�t|�r�tj|�|j|�q�WWd|r�tj|�tj|�X||fS)z�
    Given a single file, loads all the trust objects from it into arrays and
    the keychain.
    Returns a tuple of lists: the first list is a list of identities, the
    second a list of certs.
    N�rbr)�open�readrrrrZ
CFArrayRefrZ
SecItemImportrr*r"ZCFArrayGetCount�rangeZCFArrayGetValueAtIndexrrr2ZCFRetain�appendr3r)r=r9�certificates�
identitiesZresult_array�fZraw_filedataZfiledatarZresult_count�indexr0r	r	r
�_load_items_from_file�sH




rIcGs�g}g}dd�|D�}z�x.|D]&}t||�\}}|j|�|j|�qW|s�tj�}tj||dtj|��}t|�|j|�t	j
|jd��t	jt	j
dtjt	j��}	x tj||�D]}
t	j|	|
�q�W|	Sxtj||�D]}t	j
|�q�WXdS)z�
    Load certificates and maybe keys from a number of files. Has the end goal
    of returning a CFArray containing one SecIdentityRef, and then zero or more
    SecCertificateRef objects, suitable for use as a client certificate trust
    chain.
    css|]}|r|VqdS)Nr	)rr9r	r	r
r/sz*_load_client_cert_chain.<locals>.<genexpr>rN)rI�extendrZSecIdentityRefZ SecIdentityCreateWithCertificaterr*r"rDrr�popr)rr+�	itertools�chainr,)r=�pathsrErFZ	file_pathZnew_identitiesZ	new_certsZnew_identityr>Ztrust_chainr0�objr	r	r
�_load_client_cert_chains6 


rP)N)�__doc__r#rrL�rer6rr8Zbindingsrrr�compile�DOTALLr'rrrr"r.r2r3r?rIrPr	r	r	r
�<module>	s(


+(;_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.pyc000064400000000161151733136350023220 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-36.pyc000064400000024130151733136350023260 0ustar003

�Pf�D�@s�dZddlmZddlZddlmZddlmZmZm	Z	m
Z
mZmZm
Z
mZmZddlmZmZmZed�Zesxed��ed	�Zes�ed
��ej�dZeeeejd���Zedkr�edededf��eedd�Zeedd�ZeZ eZ!eZ"eZ#eZ$eZ%eZ&eZ'eZ(eZ)e
Z*ee)�Z+eZ,eZ-ee#�Z.ee$�Z/ee%�Z0ee&�Z1ee'�Z2eZ3eZ4eZ5ee�Z6eZ7eZ8ee�Z9eZ:eZ;ee�Z<eZ=eZ>ee�Z?ee�Z@eZAeZBeZCeZDeZEeZF�y�e.e/ee7�ee8�e:ee;�e<ee0�gejG_He-ejG_IgejJ_He*ejJ_IgejK_He*ejK_IgejL_He*ejL_Ie,e.gejM_He6ejM_Ie6gejN_He.ejN_Ie-egejO_He/ejO_Ie+e6ee9�gejP_He-ejP_Ie	eee eee<�gejQ_He-ejQ_Ie<gejR_He-ejR_Ie.e2ee0�gejS_He-ejS_Iee-eAeee
��ZTee-eAee�ee
��ZUe?eTeUgejV_He-ejV_Ie?e	e
gejW_He-ejW_Ie?e0gejX_He-ejX_Ie?e+e gejY_He-ejY_Ie?eAgejZ_He-ejZ_Ie?e	e
gej[_He-ej[_Ie?gej\_He-ej\_Ie?e	e
ee
�gej]_He-ej]_Ie?e	e
ee
�gej^_He-ej^_Ie?gej__He-ej__Ie?ee
�gej`_He-ej`_Ie?ee>�ee
�geja_He-eja_Ie?ee>�e
gejb_He-ejb_Ie?ee
�gejc_de-ejc_Ie?ee>�ee
�geje_He-eje_Ie?ee>�gejf_He-ejf_Ie?ee=�gejg_He-ejg_Ie?ee@�gejh_He-ejh_Ie@e0geji_He-eji_Ie@e gejj_ke-ejj_Ie@eeB�gejl_He-ejl_Ie@gejm_He!ejm_Ie@e!gejn_He6ejn_Ie,eDeEgejo_He?ejo_Ie?eFe gejp_He-ejp_Ie?e=gejq_He-ejq_Ie?e=gejr_He-ejr_Ie-egejO_He/ejO_IeTe_TeUe_Ue?e_?e=e_=e>e_>e9e_9e<e_<e@e_@eBe_Be7e_7e-e_-e/jsed�e_te/jsed�e_ue+gejv_He+ejv_Ie+gejw_Hdejw_Ie+gejx_He*ejx_Ie,e	e"gejy_He/ejy_Ie/e"gejz_He	ejz_Ie/e	e!e"gej{_Heej{_Ie,e	e!gej|_He.ej|_Ie.gej}_He!ej}_Ie.gej~_Heej~_Ie,ee+�ee+�e!e4e5gej_He2ej_Ie2e+gej�_He+ej�_Ie,ee+�e!e3gej�_He0ej�_Ie,e!e3gej�_He1ej�_Ie1egej�_Hdej�_Ie0gej�_He!ej�_Ie0e!gej�_Heej�_Ie,jsed�e_�ejsed�e_�ejsed�e_�ejsed�e_�e+e_+e0e_0e/e_/e2e_2Wne�k
�rted��YnXGdd�de��Z�Gdd�de��Z�dS)ay
This module uses ctypes to bind a whole bunch of functions and constants from
SecureTransport. The goal here is to provide the low-level API to
SecureTransport. These are essentially the C-level functions and constants, and
they're pretty gross to work with.

This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:

    Copyright (c) 2015-2016 Will Bond <will@wbond.net>

    Permission is hereby granted, free of charge, to any person obtaining a
    copy of this software and associated documentation files (the "Software"),
    to deal in the Software without restriction, including without limitation
    the rights to use, copy, modify, merge, publish, distribute, sublicense,
    and/or sell copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following conditions:

    The above copyright notice and this permission notice shall be included in
    all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    DEALINGS IN THE SOFTWARE.
�)�absolute_importN)�find_library)	�c_void_p�c_int32�c_char_p�c_size_t�c_byte�c_uint32�c_ulong�c_long�c_bool)�CDLL�POINTER�	CFUNCTYPE�Securityz'The library Security could not be found�CoreFoundationz-The library CoreFoundation could not be found�.�
�z1Only OS X 10.8 and newer are supported, not %s.%s�T)Z	use_errno�kSecImportExportPassphrase�kSecImportItemIdentity�kCFAllocatorDefault�kCFTypeArrayCallBacks�kCFTypeDictionaryKeyCallBacks�kCFTypeDictionaryValueCallBackszError initializing ctypesc@seZdZdZed�ZdS)�CFConstz_
    A class object that acts as essentially a namespace for CoreFoundation
    constants.
    iN)�__name__�
__module__�__qualname__�__doc__�CFStringEncodingZkCFStringEncodingUTF8�r"r"�/usr/lib/python3.6/bindings.pyr�src@s,eZdZdZdZdZdZdZdZdZ	dZ
dZdZdZ
dZd	ZdZd
ZdZdZdDZdEZdFZdGZdHZdIZdJZdKZdLZdMZdNZdOZdPZ dQZ!dRZ"dSZ#dTZ$dUZ%dVZ&dWZ'dXZ(dYZ)d"Z*d#Z+d$Z,d%Z-d&Z.d'Z/d(Z0d)Z1d*Z2d+Z3d,Z4d-Z5d.Z6d/Z7d0Z8d1Z9d2Z:d3Z;d4Z<d5Z=d6Z>d7Z?d8Z@d9ZAd:ZBd;ZCd<ZDd=ZEd>ZFd?ZGd@ZHdAZIdBZJdCS)Z�
SecurityConstzU
    A class object that acts as essentially a namespace for Security constants.
    rr���rr���iH&iK&iM&iX&iN&iO&iQ&iR&iV&iW&iT&iU&is&i`&io&iz&iq&iw&i�i�bi�bi�bi,�i0�i+�i/�����i$�i(�i
�i��k�j�9�8i#�i'�i	�i��g�@�3�2���=�<�5�/iiiNi���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i���i ���iQ���i,���iR���)Krrrr Z"kSSLSessionOptionBreakOnServerAuthZ
kSSLProtocol2Z
kSSLProtocol3Z
kTLSProtocol1ZkTLSProtocol11ZkTLSProtocol12ZkSSLClientSideZkSSLStreamTypeZkSecFormatPEMSequenceZkSecTrustResultInvalidZkSecTrustResultProceedZkSecTrustResultDenyZkSecTrustResultUnspecifiedZ&kSecTrustResultRecoverableTrustFailureZ kSecTrustResultFatalTrustFailureZkSecTrustResultOtherErrorZerrSSLProtocolZerrSSLWouldBlockZerrSSLClosedGracefulZerrSSLClosedNoNotifyZerrSSLClosedAbortZerrSSLXCertChainInvalidZerrSSLCryptoZerrSSLInternalZerrSSLCertExpiredZerrSSLCertNotYetValidZerrSSLUnknownRootCertZerrSSLNoRootCertZerrSSLHostNameMismatchZerrSSLPeerHandshakeFailZerrSSLPeerUserCancelledZerrSSLWeakPeerEphemeralDHKeyZerrSSLServerAuthCompletedZerrSSLRecordOverflowZerrSecVerifyFailedZerrSecNoTrustSettingsZerrSecItemNotFoundZerrSecInvalidTrustSettingsZ'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384Z'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256Z#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384Z#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384Z#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256Z#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256Z'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384Z%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384Z$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_256_CBC_SHAZ TLS_DHE_DSS_WITH_AES_256_CBC_SHAZ'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256Z%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256Z$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAZ"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAZ#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256Z#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256Z TLS_DHE_RSA_WITH_AES_128_CBC_SHAZ TLS_DHE_DSS_WITH_AES_128_CBC_SHAZTLS_RSA_WITH_AES_256_GCM_SHA384ZTLS_RSA_WITH_AES_128_GCM_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHA256ZTLS_RSA_WITH_AES_128_CBC_SHA256ZTLS_RSA_WITH_AES_256_CBC_SHAZTLS_RSA_WITH_AES_128_CBC_SHAZTLS_AES_128_GCM_SHA256ZTLS_AES_256_GCM_SHA384ZTLS_CHACHA20_POLY1305_SHA256r"r"r"r#r$�s�r$)rr)�r Z
__future__r�platformZctypes.utilrZctypesrrrrrr	r
rrr
rrZ
security_path�ImportErrorZcore_foundation_pathZmac_ver�version�tuple�map�int�split�version_info�OSErrorrrZBooleanZCFIndexr!ZCFDataZCFStringZCFArrayZCFMutableArrayZCFDictionaryZCFErrorZCFTypeZCFTypeIDZ	CFTypeRefZCFAllocatorRefZOSStatusZ	CFDataRefZCFStringRefZ
CFArrayRefZCFMutableArrayRefZCFDictionaryRefZCFArrayCallBacksZCFDictionaryKeyCallBacksZCFDictionaryValueCallBacksZSecCertificateRefZSecExternalFormatZSecExternalItemTypeZSecIdentityRefZSecItemImportExportFlagsZ SecItemImportExportKeyParametersZSecKeychainRefZSSLProtocolZSSLCipherSuiteZ
SSLContextRefZSecTrustRefZSSLConnectionRefZSecTrustResultTypeZSecTrustOptionFlagsZSSLProtocolSideZSSLConnectionTypeZSSLSessionOptionZ
SecItemImportZargtypesZrestypeZSecCertificateGetTypeIDZSecIdentityGetTypeIDZSecKeyGetTypeIDZSecCertificateCreateWithDataZSecCertificateCopyDataZSecCopyErrorMessageStringZ SecIdentityCreateWithCertificateZSecKeychainCreateZSecKeychainDeleteZSecPKCS12ImportZSSLReadFuncZSSLWriteFuncZ
SSLSetIOFuncsZSSLSetPeerIDZSSLSetCertificateZSSLSetCertificateAuthoritiesZSSLSetConnectionZSSLSetPeerDomainNameZSSLHandshakeZSSLReadZSSLWriteZSSLCloseZSSLGetNumberSupportedCiphersZSSLGetSupportedCiphersZSSLSetEnabledCiphersZSSLGetNumberEnabledCiphersZargtypeZSSLGetEnabledCiphersZSSLGetNegotiatedCipherZSSLGetNegotiatedProtocolVersionZSSLCopyPeerTrustZSecTrustSetAnchorCertificatesZ!SecTrustSetAnchorCertificatesOnlyZ	argstypesZSecTrustEvaluateZSecTrustGetCertificateCountZSecTrustGetCertificateAtIndexZSSLCreateContextZSSLSetSessionOptionZSSLSetProtocolVersionMinZSSLSetProtocolVersionMaxZin_dllrrZCFRetainZ	CFReleaseZCFGetTypeIDZCFStringCreateWithCStringZCFStringGetCStringPtrZCFStringGetCStringZCFDataCreateZCFDataGetLengthZCFDataGetBytePtrZCFDictionaryCreateZCFDictionaryGetValueZ
CFArrayCreateZCFArrayCreateMutableZCFArrayAppendValueZCFArrayGetCountZCFArrayGetValueAtIndexrrrr�AttributeError�objectrr$r"r"r"r#�<module>s,,











































_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000161151733136350024157 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/contrib/_securetransport/__init__.py000064400000000000151733136350016724 0ustar00_vendor/urllib3/contrib/_securetransport/bindings.py000064400000042230151733136350016775 0ustar00"""
This module uses ctypes to bind a whole bunch of functions and constants from
SecureTransport. The goal here is to provide the low-level API to
SecureTransport. These are essentially the C-level functions and constants, and
they're pretty gross to work with.

This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:

    Copyright (c) 2015-2016 Will Bond <will@wbond.net>

    Permission is hereby granted, free of charge, to any person obtaining a
    copy of this software and associated documentation files (the "Software"),
    to deal in the Software without restriction, including without limitation
    the rights to use, copy, modify, merge, publish, distribute, sublicense,
    and/or sell copies of the Software, and to permit persons to whom the
    Software is furnished to do so, subject to the following conditions:

    The above copyright notice and this permission notice shall be included in
    all copies or substantial portions of the Software.

    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import

import platform
from ctypes.util import find_library
from ctypes import (
    c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long,
    c_bool
)
from ctypes import CDLL, POINTER, CFUNCTYPE


security_path = find_library('Security')
if not security_path:
    raise ImportError('The library Security could not be found')


core_foundation_path = find_library('CoreFoundation')
if not core_foundation_path:
    raise ImportError('The library CoreFoundation could not be found')


version = platform.mac_ver()[0]
version_info = tuple(map(int, version.split('.')))
if version_info < (10, 8):
    raise OSError(
        'Only OS X 10.8 and newer are supported, not %s.%s' % (
            version_info[0], version_info[1]
        )
    )

Security = CDLL(security_path, use_errno=True)
CoreFoundation = CDLL(core_foundation_path, use_errno=True)

Boolean = c_bool
CFIndex = c_long
CFStringEncoding = c_uint32
CFData = c_void_p
CFString = c_void_p
CFArray = c_void_p
CFMutableArray = c_void_p
CFDictionary = c_void_p
CFError = c_void_p
CFType = c_void_p
CFTypeID = c_ulong

CFTypeRef = POINTER(CFType)
CFAllocatorRef = c_void_p

OSStatus = c_int32

CFDataRef = POINTER(CFData)
CFStringRef = POINTER(CFString)
CFArrayRef = POINTER(CFArray)
CFMutableArrayRef = POINTER(CFMutableArray)
CFDictionaryRef = POINTER(CFDictionary)
CFArrayCallBacks = c_void_p
CFDictionaryKeyCallBacks = c_void_p
CFDictionaryValueCallBacks = c_void_p

SecCertificateRef = POINTER(c_void_p)
SecExternalFormat = c_uint32
SecExternalItemType = c_uint32
SecIdentityRef = POINTER(c_void_p)
SecItemImportExportFlags = c_uint32
SecItemImportExportKeyParameters = c_void_p
SecKeychainRef = POINTER(c_void_p)
SSLProtocol = c_uint32
SSLCipherSuite = c_uint32
SSLContextRef = POINTER(c_void_p)
SecTrustRef = POINTER(c_void_p)
SSLConnectionRef = c_uint32
SecTrustResultType = c_uint32
SecTrustOptionFlags = c_uint32
SSLProtocolSide = c_uint32
SSLConnectionType = c_uint32
SSLSessionOption = c_uint32


try:
    Security.SecItemImport.argtypes = [
        CFDataRef,
        CFStringRef,
        POINTER(SecExternalFormat),
        POINTER(SecExternalItemType),
        SecItemImportExportFlags,
        POINTER(SecItemImportExportKeyParameters),
        SecKeychainRef,
        POINTER(CFArrayRef),
    ]
    Security.SecItemImport.restype = OSStatus

    Security.SecCertificateGetTypeID.argtypes = []
    Security.SecCertificateGetTypeID.restype = CFTypeID

    Security.SecIdentityGetTypeID.argtypes = []
    Security.SecIdentityGetTypeID.restype = CFTypeID

    Security.SecKeyGetTypeID.argtypes = []
    Security.SecKeyGetTypeID.restype = CFTypeID

    Security.SecCertificateCreateWithData.argtypes = [
        CFAllocatorRef,
        CFDataRef
    ]
    Security.SecCertificateCreateWithData.restype = SecCertificateRef

    Security.SecCertificateCopyData.argtypes = [
        SecCertificateRef
    ]
    Security.SecCertificateCopyData.restype = CFDataRef

    Security.SecCopyErrorMessageString.argtypes = [
        OSStatus,
        c_void_p
    ]
    Security.SecCopyErrorMessageString.restype = CFStringRef

    Security.SecIdentityCreateWithCertificate.argtypes = [
        CFTypeRef,
        SecCertificateRef,
        POINTER(SecIdentityRef)
    ]
    Security.SecIdentityCreateWithCertificate.restype = OSStatus

    Security.SecKeychainCreate.argtypes = [
        c_char_p,
        c_uint32,
        c_void_p,
        Boolean,
        c_void_p,
        POINTER(SecKeychainRef)
    ]
    Security.SecKeychainCreate.restype = OSStatus

    Security.SecKeychainDelete.argtypes = [
        SecKeychainRef
    ]
    Security.SecKeychainDelete.restype = OSStatus

    Security.SecPKCS12Import.argtypes = [
        CFDataRef,
        CFDictionaryRef,
        POINTER(CFArrayRef)
    ]
    Security.SecPKCS12Import.restype = OSStatus

    SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
    SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t))

    Security.SSLSetIOFuncs.argtypes = [
        SSLContextRef,
        SSLReadFunc,
        SSLWriteFunc
    ]
    Security.SSLSetIOFuncs.restype = OSStatus

    Security.SSLSetPeerID.argtypes = [
        SSLContextRef,
        c_char_p,
        c_size_t
    ]
    Security.SSLSetPeerID.restype = OSStatus

    Security.SSLSetCertificate.argtypes = [
        SSLContextRef,
        CFArrayRef
    ]
    Security.SSLSetCertificate.restype = OSStatus

    Security.SSLSetCertificateAuthorities.argtypes = [
        SSLContextRef,
        CFTypeRef,
        Boolean
    ]
    Security.SSLSetCertificateAuthorities.restype = OSStatus

    Security.SSLSetConnection.argtypes = [
        SSLContextRef,
        SSLConnectionRef
    ]
    Security.SSLSetConnection.restype = OSStatus

    Security.SSLSetPeerDomainName.argtypes = [
        SSLContextRef,
        c_char_p,
        c_size_t
    ]
    Security.SSLSetPeerDomainName.restype = OSStatus

    Security.SSLHandshake.argtypes = [
        SSLContextRef
    ]
    Security.SSLHandshake.restype = OSStatus

    Security.SSLRead.argtypes = [
        SSLContextRef,
        c_char_p,
        c_size_t,
        POINTER(c_size_t)
    ]
    Security.SSLRead.restype = OSStatus

    Security.SSLWrite.argtypes = [
        SSLContextRef,
        c_char_p,
        c_size_t,
        POINTER(c_size_t)
    ]
    Security.SSLWrite.restype = OSStatus

    Security.SSLClose.argtypes = [
        SSLContextRef
    ]
    Security.SSLClose.restype = OSStatus

    Security.SSLGetNumberSupportedCiphers.argtypes = [
        SSLContextRef,
        POINTER(c_size_t)
    ]
    Security.SSLGetNumberSupportedCiphers.restype = OSStatus

    Security.SSLGetSupportedCiphers.argtypes = [
        SSLContextRef,
        POINTER(SSLCipherSuite),
        POINTER(c_size_t)
    ]
    Security.SSLGetSupportedCiphers.restype = OSStatus

    Security.SSLSetEnabledCiphers.argtypes = [
        SSLContextRef,
        POINTER(SSLCipherSuite),
        c_size_t
    ]
    Security.SSLSetEnabledCiphers.restype = OSStatus

    Security.SSLGetNumberEnabledCiphers.argtype = [
        SSLContextRef,
        POINTER(c_size_t)
    ]
    Security.SSLGetNumberEnabledCiphers.restype = OSStatus

    Security.SSLGetEnabledCiphers.argtypes = [
        SSLContextRef,
        POINTER(SSLCipherSuite),
        POINTER(c_size_t)
    ]
    Security.SSLGetEnabledCiphers.restype = OSStatus

    Security.SSLGetNegotiatedCipher.argtypes = [
        SSLContextRef,
        POINTER(SSLCipherSuite)
    ]
    Security.SSLGetNegotiatedCipher.restype = OSStatus

    Security.SSLGetNegotiatedProtocolVersion.argtypes = [
        SSLContextRef,
        POINTER(SSLProtocol)
    ]
    Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus

    Security.SSLCopyPeerTrust.argtypes = [
        SSLContextRef,
        POINTER(SecTrustRef)
    ]
    Security.SSLCopyPeerTrust.restype = OSStatus

    Security.SecTrustSetAnchorCertificates.argtypes = [
        SecTrustRef,
        CFArrayRef
    ]
    Security.SecTrustSetAnchorCertificates.restype = OSStatus

    Security.SecTrustSetAnchorCertificatesOnly.argstypes = [
        SecTrustRef,
        Boolean
    ]
    Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus

    Security.SecTrustEvaluate.argtypes = [
        SecTrustRef,
        POINTER(SecTrustResultType)
    ]
    Security.SecTrustEvaluate.restype = OSStatus

    Security.SecTrustGetCertificateCount.argtypes = [
        SecTrustRef
    ]
    Security.SecTrustGetCertificateCount.restype = CFIndex

    Security.SecTrustGetCertificateAtIndex.argtypes = [
        SecTrustRef,
        CFIndex
    ]
    Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef

    Security.SSLCreateContext.argtypes = [
        CFAllocatorRef,
        SSLProtocolSide,
        SSLConnectionType
    ]
    Security.SSLCreateContext.restype = SSLContextRef

    Security.SSLSetSessionOption.argtypes = [
        SSLContextRef,
        SSLSessionOption,
        Boolean
    ]
    Security.SSLSetSessionOption.restype = OSStatus

    Security.SSLSetProtocolVersionMin.argtypes = [
        SSLContextRef,
        SSLProtocol
    ]
    Security.SSLSetProtocolVersionMin.restype = OSStatus

    Security.SSLSetProtocolVersionMax.argtypes = [
        SSLContextRef,
        SSLProtocol
    ]
    Security.SSLSetProtocolVersionMax.restype = OSStatus

    Security.SecCopyErrorMessageString.argtypes = [
        OSStatus,
        c_void_p
    ]
    Security.SecCopyErrorMessageString.restype = CFStringRef

    Security.SSLReadFunc = SSLReadFunc
    Security.SSLWriteFunc = SSLWriteFunc
    Security.SSLContextRef = SSLContextRef
    Security.SSLProtocol = SSLProtocol
    Security.SSLCipherSuite = SSLCipherSuite
    Security.SecIdentityRef = SecIdentityRef
    Security.SecKeychainRef = SecKeychainRef
    Security.SecTrustRef = SecTrustRef
    Security.SecTrustResultType = SecTrustResultType
    Security.SecExternalFormat = SecExternalFormat
    Security.OSStatus = OSStatus

    Security.kSecImportExportPassphrase = CFStringRef.in_dll(
        Security, 'kSecImportExportPassphrase'
    )
    Security.kSecImportItemIdentity = CFStringRef.in_dll(
        Security, 'kSecImportItemIdentity'
    )

    # CoreFoundation time!
    CoreFoundation.CFRetain.argtypes = [
        CFTypeRef
    ]
    CoreFoundation.CFRetain.restype = CFTypeRef

    CoreFoundation.CFRelease.argtypes = [
        CFTypeRef
    ]
    CoreFoundation.CFRelease.restype = None

    CoreFoundation.CFGetTypeID.argtypes = [
        CFTypeRef
    ]
    CoreFoundation.CFGetTypeID.restype = CFTypeID

    CoreFoundation.CFStringCreateWithCString.argtypes = [
        CFAllocatorRef,
        c_char_p,
        CFStringEncoding
    ]
    CoreFoundation.CFStringCreateWithCString.restype = CFStringRef

    CoreFoundation.CFStringGetCStringPtr.argtypes = [
        CFStringRef,
        CFStringEncoding
    ]
    CoreFoundation.CFStringGetCStringPtr.restype = c_char_p

    CoreFoundation.CFStringGetCString.argtypes = [
        CFStringRef,
        c_char_p,
        CFIndex,
        CFStringEncoding
    ]
    CoreFoundation.CFStringGetCString.restype = c_bool

    CoreFoundation.CFDataCreate.argtypes = [
        CFAllocatorRef,
        c_char_p,
        CFIndex
    ]
    CoreFoundation.CFDataCreate.restype = CFDataRef

    CoreFoundation.CFDataGetLength.argtypes = [
        CFDataRef
    ]
    CoreFoundation.CFDataGetLength.restype = CFIndex

    CoreFoundation.CFDataGetBytePtr.argtypes = [
        CFDataRef
    ]
    CoreFoundation.CFDataGetBytePtr.restype = c_void_p

    CoreFoundation.CFDictionaryCreate.argtypes = [
        CFAllocatorRef,
        POINTER(CFTypeRef),
        POINTER(CFTypeRef),
        CFIndex,
        CFDictionaryKeyCallBacks,
        CFDictionaryValueCallBacks
    ]
    CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef

    CoreFoundation.CFDictionaryGetValue.argtypes = [
        CFDictionaryRef,
        CFTypeRef
    ]
    CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef

    CoreFoundation.CFArrayCreate.argtypes = [
        CFAllocatorRef,
        POINTER(CFTypeRef),
        CFIndex,
        CFArrayCallBacks,
    ]
    CoreFoundation.CFArrayCreate.restype = CFArrayRef

    CoreFoundation.CFArrayCreateMutable.argtypes = [
        CFAllocatorRef,
        CFIndex,
        CFArrayCallBacks
    ]
    CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef

    CoreFoundation.CFArrayAppendValue.argtypes = [
        CFMutableArrayRef,
        c_void_p
    ]
    CoreFoundation.CFArrayAppendValue.restype = None

    CoreFoundation.CFArrayGetCount.argtypes = [
        CFArrayRef
    ]
    CoreFoundation.CFArrayGetCount.restype = CFIndex

    CoreFoundation.CFArrayGetValueAtIndex.argtypes = [
        CFArrayRef,
        CFIndex
    ]
    CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p

    CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
        CoreFoundation, 'kCFAllocatorDefault'
    )
    CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks')
    CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
        CoreFoundation, 'kCFTypeDictionaryKeyCallBacks'
    )
    CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
        CoreFoundation, 'kCFTypeDictionaryValueCallBacks'
    )

    CoreFoundation.CFTypeRef = CFTypeRef
    CoreFoundation.CFArrayRef = CFArrayRef
    CoreFoundation.CFStringRef = CFStringRef
    CoreFoundation.CFDictionaryRef = CFDictionaryRef

except (AttributeError):
    raise ImportError('Error initializing ctypes')


class CFConst(object):
    """
    A class object that acts as essentially a namespace for CoreFoundation
    constants.
    """
    kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)


class SecurityConst(object):
    """
    A class object that acts as essentially a namespace for Security constants.
    """
    kSSLSessionOptionBreakOnServerAuth = 0

    kSSLProtocol2 = 1
    kSSLProtocol3 = 2
    kTLSProtocol1 = 4
    kTLSProtocol11 = 7
    kTLSProtocol12 = 8

    kSSLClientSide = 1
    kSSLStreamType = 0

    kSecFormatPEMSequence = 10

    kSecTrustResultInvalid = 0
    kSecTrustResultProceed = 1
    # This gap is present on purpose: this was kSecTrustResultConfirm, which
    # is deprecated.
    kSecTrustResultDeny = 3
    kSecTrustResultUnspecified = 4
    kSecTrustResultRecoverableTrustFailure = 5
    kSecTrustResultFatalTrustFailure = 6
    kSecTrustResultOtherError = 7

    errSSLProtocol = -9800
    errSSLWouldBlock = -9803
    errSSLClosedGraceful = -9805
    errSSLClosedNoNotify = -9816
    errSSLClosedAbort = -9806

    errSSLXCertChainInvalid = -9807
    errSSLCrypto = -9809
    errSSLInternal = -9810
    errSSLCertExpired = -9814
    errSSLCertNotYetValid = -9815
    errSSLUnknownRootCert = -9812
    errSSLNoRootCert = -9813
    errSSLHostNameMismatch = -9843
    errSSLPeerHandshakeFail = -9824
    errSSLPeerUserCancelled = -9839
    errSSLWeakPeerEphemeralDHKey = -9850
    errSSLServerAuthCompleted = -9841
    errSSLRecordOverflow = -9847

    errSecVerifyFailed = -67808
    errSecNoTrustSettings = -25263
    errSecItemNotFound = -25300
    errSecInvalidTrustSettings = -25262

    # Cipher suites. We only pick the ones our default cipher string allows.
    TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
    TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
    TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
    TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
    TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3
    TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
    TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2
    TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
    TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
    TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
    TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
    TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
    TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
    TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A
    TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
    TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038
    TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
    TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
    TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
    TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
    TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
    TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040
    TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
    TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032
    TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
    TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
    TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
    TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
    TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
    TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
    TLS_AES_128_GCM_SHA256 = 0x1301
    TLS_AES_256_GCM_SHA384 = 0x1302
    TLS_CHACHA20_POLY1305_SHA256 = 0x1303
_vendor/urllib3/contrib/_securetransport/low_level.py000064400000027436151733136350017203 0ustar00"""
Low-level helpers for the SecureTransport bindings.

These are Python functions that are not directly related to the high-level APIs
but are necessary to get them to work. They include a whole bunch of low-level
CoreFoundation messing about and memory management. The concerns in this module
are almost entirely about trying to avoid memory leaks and providing
appropriate and useful assistance to the higher-level code.
"""
import base64
import ctypes
import itertools
import re
import os
import ssl
import tempfile

from .bindings import Security, CoreFoundation, CFConst


# This regular expression is used to grab PEM data out of a PEM bundle.
_PEM_CERTS_RE = re.compile(
    b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
)


def _cf_data_from_bytes(bytestring):
    """
    Given a bytestring, create a CFData object from it. This CFData object must
    be CFReleased by the caller.
    """
    return CoreFoundation.CFDataCreate(
        CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
    )


def _cf_dictionary_from_tuples(tuples):
    """
    Given a list of Python tuples, create an associated CFDictionary.
    """
    dictionary_size = len(tuples)

    # We need to get the dictionary keys and values out in the same order.
    keys = (t[0] for t in tuples)
    values = (t[1] for t in tuples)
    cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
    cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)

    return CoreFoundation.CFDictionaryCreate(
        CoreFoundation.kCFAllocatorDefault,
        cf_keys,
        cf_values,
        dictionary_size,
        CoreFoundation.kCFTypeDictionaryKeyCallBacks,
        CoreFoundation.kCFTypeDictionaryValueCallBacks,
    )


def _cf_string_to_unicode(value):
    """
    Creates a Unicode string from a CFString object. Used entirely for error
    reporting.

    Yes, it annoys me quite a lot that this function is this complex.
    """
    value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))

    string = CoreFoundation.CFStringGetCStringPtr(
        value_as_void_p,
        CFConst.kCFStringEncodingUTF8
    )
    if string is None:
        buffer = ctypes.create_string_buffer(1024)
        result = CoreFoundation.CFStringGetCString(
            value_as_void_p,
            buffer,
            1024,
            CFConst.kCFStringEncodingUTF8
        )
        if not result:
            raise OSError('Error copying C string from CFStringRef')
        string = buffer.value
    if string is not None:
        string = string.decode('utf-8')
    return string


def _assert_no_error(error, exception_class=None):
    """
    Checks the return code and throws an exception if there is an error to
    report
    """
    if error == 0:
        return

    cf_error_string = Security.SecCopyErrorMessageString(error, None)
    output = _cf_string_to_unicode(cf_error_string)
    CoreFoundation.CFRelease(cf_error_string)

    if output is None or output == u'':
        output = u'OSStatus %s' % error

    if exception_class is None:
        exception_class = ssl.SSLError

    raise exception_class(output)


def _cert_array_from_pem(pem_bundle):
    """
    Given a bundle of certs in PEM format, turns them into a CFArray of certs
    that can be used to validate a cert chain.
    """
    der_certs = [
        base64.b64decode(match.group(1))
        for match in _PEM_CERTS_RE.finditer(pem_bundle)
    ]
    if not der_certs:
        raise ssl.SSLError("No root certificates specified")

    cert_array = CoreFoundation.CFArrayCreateMutable(
        CoreFoundation.kCFAllocatorDefault,
        0,
        ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks)
    )
    if not cert_array:
        raise ssl.SSLError("Unable to allocate memory!")

    try:
        for der_bytes in der_certs:
            certdata = _cf_data_from_bytes(der_bytes)
            if not certdata:
                raise ssl.SSLError("Unable to allocate memory!")
            cert = Security.SecCertificateCreateWithData(
                CoreFoundation.kCFAllocatorDefault, certdata
            )
            CoreFoundation.CFRelease(certdata)
            if not cert:
                raise ssl.SSLError("Unable to build cert object!")

            CoreFoundation.CFArrayAppendValue(cert_array, cert)
            CoreFoundation.CFRelease(cert)
    except Exception:
        # We need to free the array before the exception bubbles further.
        # We only want to do that if an error occurs: otherwise, the caller
        # should free.
        CoreFoundation.CFRelease(cert_array)

    return cert_array


def _is_cert(item):
    """
    Returns True if a given CFTypeRef is a certificate.
    """
    expected = Security.SecCertificateGetTypeID()
    return CoreFoundation.CFGetTypeID(item) == expected


def _is_identity(item):
    """
    Returns True if a given CFTypeRef is an identity.
    """
    expected = Security.SecIdentityGetTypeID()
    return CoreFoundation.CFGetTypeID(item) == expected


def _temporary_keychain():
    """
    This function creates a temporary Mac keychain that we can use to work with
    credentials. This keychain uses a one-time password and a temporary file to
    store the data. We expect to have one keychain per socket. The returned
    SecKeychainRef must be freed by the caller, including calling
    SecKeychainDelete.

    Returns a tuple of the SecKeychainRef and the path to the temporary
    directory that contains it.
    """
    # Unfortunately, SecKeychainCreate requires a path to a keychain. This
    # means we cannot use mkstemp to use a generic temporary file. Instead,
    # we're going to create a temporary directory and a filename to use there.
    # This filename will be 8 random bytes expanded into base64. We also need
    # some random bytes to password-protect the keychain we're creating, so we
    # ask for 40 random bytes.
    random_bytes = os.urandom(40)
    filename = base64.b64encode(random_bytes[:8]).decode('utf-8')
    password = base64.b64encode(random_bytes[8:])  # Must be valid UTF-8
    tempdirectory = tempfile.mkdtemp()

    keychain_path = os.path.join(tempdirectory, filename).encode('utf-8')

    # We now want to create the keychain itself.
    keychain = Security.SecKeychainRef()
    status = Security.SecKeychainCreate(
        keychain_path,
        len(password),
        password,
        False,
        None,
        ctypes.byref(keychain)
    )
    _assert_no_error(status)

    # Having created the keychain, we want to pass it off to the caller.
    return keychain, tempdirectory


def _load_items_from_file(keychain, path):
    """
    Given a single file, loads all the trust objects from it into arrays and
    the keychain.
    Returns a tuple of lists: the first list is a list of identities, the
    second a list of certs.
    """
    certificates = []
    identities = []
    result_array = None

    with open(path, 'rb') as f:
        raw_filedata = f.read()

    try:
        filedata = CoreFoundation.CFDataCreate(
            CoreFoundation.kCFAllocatorDefault,
            raw_filedata,
            len(raw_filedata)
        )
        result_array = CoreFoundation.CFArrayRef()
        result = Security.SecItemImport(
            filedata,  # cert data
            None,  # Filename, leaving it out for now
            None,  # What the type of the file is, we don't care
            None,  # what's in the file, we don't care
            0,  # import flags
            None,  # key params, can include passphrase in the future
            keychain,  # The keychain to insert into
            ctypes.byref(result_array)  # Results
        )
        _assert_no_error(result)

        # A CFArray is not very useful to us as an intermediary
        # representation, so we are going to extract the objects we want
        # and then free the array. We don't need to keep hold of keys: the
        # keychain already has them!
        result_count = CoreFoundation.CFArrayGetCount(result_array)
        for index in range(result_count):
            item = CoreFoundation.CFArrayGetValueAtIndex(
                result_array, index
            )
            item = ctypes.cast(item, CoreFoundation.CFTypeRef)

            if _is_cert(item):
                CoreFoundation.CFRetain(item)
                certificates.append(item)
            elif _is_identity(item):
                CoreFoundation.CFRetain(item)
                identities.append(item)
    finally:
        if result_array:
            CoreFoundation.CFRelease(result_array)

        CoreFoundation.CFRelease(filedata)

    return (identities, certificates)


def _load_client_cert_chain(keychain, *paths):
    """
    Load certificates and maybe keys from a number of files. Has the end goal
    of returning a CFArray containing one SecIdentityRef, and then zero or more
    SecCertificateRef objects, suitable for use as a client certificate trust
    chain.
    """
    # Ok, the strategy.
    #
    # This relies on knowing that macOS will not give you a SecIdentityRef
    # unless you have imported a key into a keychain. This is a somewhat
    # artificial limitation of macOS (for example, it doesn't necessarily
    # affect iOS), but there is nothing inside Security.framework that lets you
    # get a SecIdentityRef without having a key in a keychain.
    #
    # So the policy here is we take all the files and iterate them in order.
    # Each one will use SecItemImport to have one or more objects loaded from
    # it. We will also point at a keychain that macOS can use to work with the
    # private key.
    #
    # Once we have all the objects, we'll check what we actually have. If we
    # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
    # we'll take the first certificate (which we assume to be our leaf) and
    # ask the keychain to give us a SecIdentityRef with that cert's associated
    # key.
    #
    # We'll then return a CFArray containing the trust chain: one
    # SecIdentityRef and then zero-or-more SecCertificateRef objects. The
    # responsibility for freeing this CFArray will be with the caller. This
    # CFArray must remain alive for the entire connection, so in practice it
    # will be stored with a single SSLSocket, along with the reference to the
    # keychain.
    certificates = []
    identities = []

    # Filter out bad paths.
    paths = (path for path in paths if path)

    try:
        for file_path in paths:
            new_identities, new_certs = _load_items_from_file(
                keychain, file_path
            )
            identities.extend(new_identities)
            certificates.extend(new_certs)

        # Ok, we have everything. The question is: do we have an identity? If
        # not, we want to grab one from the first cert we have.
        if not identities:
            new_identity = Security.SecIdentityRef()
            status = Security.SecIdentityCreateWithCertificate(
                keychain,
                certificates[0],
                ctypes.byref(new_identity)
            )
            _assert_no_error(status)
            identities.append(new_identity)

            # We now want to release the original certificate, as we no longer
            # need it.
            CoreFoundation.CFRelease(certificates.pop(0))

        # We now need to build a new CFArray that holds the trust chain.
        trust_chain = CoreFoundation.CFArrayCreateMutable(
            CoreFoundation.kCFAllocatorDefault,
            0,
            ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
        )
        for item in itertools.chain(identities, certificates):
            # ArrayAppendValue does a CFRetain on the item. That's fine,
            # because the finally block will release our other refs to them.
            CoreFoundation.CFArrayAppendValue(trust_chain, item)

        return trust_chain
    finally:
        for obj in itertools.chain(identities, certificates):
            CoreFoundation.CFRelease(obj)
_vendor/urllib3/fields.py000064400000013467151733136350011416 0ustar00from __future__ import absolute_import
import email.utils
import mimetypes

from .packages import six


def guess_content_type(filename, default='application/octet-stream'):
    """
    Guess the "Content-Type" of a file.

    :param filename:
        The filename to guess the "Content-Type" of using :mod:`mimetypes`.
    :param default:
        If no "Content-Type" can be guessed, default to `default`.
    """
    if filename:
        return mimetypes.guess_type(filename)[0] or default
    return default


def format_header_param(name, value):
    """
    Helper function to format and quote a single header parameter.

    Particularly useful for header parameters which might contain
    non-ASCII values, like file names. This follows RFC 2231, as
    suggested by RFC 2388 Section 4.4.

    :param name:
        The name of the parameter, a string expected to be ASCII only.
    :param value:
        The value of the parameter, provided as a unicode string.
    """
    if not any(ch in value for ch in '"\\\r\n'):
        result = '%s="%s"' % (name, value)
        try:
            result.encode('ascii')
        except (UnicodeEncodeError, UnicodeDecodeError):
            pass
        else:
            return result
    if not six.PY3 and isinstance(value, six.text_type):  # Python 2:
        value = value.encode('utf-8')
    value = email.utils.encode_rfc2231(value, 'utf-8')
    value = '%s*=%s' % (name, value)
    return value


class RequestField(object):
    """
    A data container for request body parameters.

    :param name:
        The name of this request field.
    :param data:
        The data/value body.
    :param filename:
        An optional filename of the request field.
    :param headers:
        An optional dict-like object of headers to initially use for the field.
    """
    def __init__(self, name, data, filename=None, headers=None):
        self._name = name
        self._filename = filename
        self.data = data
        self.headers = {}
        if headers:
            self.headers = dict(headers)

    @classmethod
    def from_tuples(cls, fieldname, value):
        """
        A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.

        Supports constructing :class:`~urllib3.fields.RequestField` from
        parameter of key/value strings AND key/filetuple. A filetuple is a
        (filename, data, MIME type) tuple where the MIME type is optional.
        For example::

            'foo': 'bar',
            'fakefile': ('foofile.txt', 'contents of foofile'),
            'realfile': ('barfile.txt', open('realfile').read()),
            'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
            'nonamefile': 'contents of nonamefile field',

        Field names and filenames must be unicode.
        """
        if isinstance(value, tuple):
            if len(value) == 3:
                filename, data, content_type = value
            else:
                filename, data = value
                content_type = guess_content_type(filename)
        else:
            filename = None
            content_type = None
            data = value

        request_param = cls(fieldname, data, filename=filename)
        request_param.make_multipart(content_type=content_type)

        return request_param

    def _render_part(self, name, value):
        """
        Overridable helper function to format a single header parameter.

        :param name:
            The name of the parameter, a string expected to be ASCII only.
        :param value:
            The value of the parameter, provided as a unicode string.
        """
        return format_header_param(name, value)

    def _render_parts(self, header_parts):
        """
        Helper function to format and quote a single header.

        Useful for single headers that are composed of multiple items. E.g.,
        'Content-Disposition' fields.

        :param header_parts:
            A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
            as `k1="v1"; k2="v2"; ...`.
        """
        parts = []
        iterable = header_parts
        if isinstance(header_parts, dict):
            iterable = header_parts.items()

        for name, value in iterable:
            if value is not None:
                parts.append(self._render_part(name, value))

        return '; '.join(parts)

    def render_headers(self):
        """
        Renders the headers for this request field.
        """
        lines = []

        sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
        for sort_key in sort_keys:
            if self.headers.get(sort_key, False):
                lines.append('%s: %s' % (sort_key, self.headers[sort_key]))

        for header_name, header_value in self.headers.items():
            if header_name not in sort_keys:
                if header_value:
                    lines.append('%s: %s' % (header_name, header_value))

        lines.append('\r\n')
        return '\r\n'.join(lines)

    def make_multipart(self, content_disposition=None, content_type=None,
                       content_location=None):
        """
        Makes this request field into a multipart request field.

        This method overrides "Content-Disposition", "Content-Type" and
        "Content-Location" headers to the request parameter.

        :param content_type:
            The 'Content-Type' of the request body.
        :param content_location:
            The 'Content-Location' of the request body.

        """
        self.headers['Content-Disposition'] = content_disposition or 'form-data'
        self.headers['Content-Disposition'] += '; '.join([
            '', self._render_parts(
                (('name', self._name), ('filename', self._filename))
            )
        ])
        self.headers['Content-Type'] = content_type
        self.headers['Content-Location'] = content_location
_vendor/urllib3/util/timeout.py000064400000023035151733136350012603 0ustar00from __future__ import absolute_import
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
import time

from ..exceptions import TimeoutStateError

# A sentinel value to indicate that no timeout was specified by the user in
# urllib3
_Default = object()


# Use time.monotonic if available.
current_time = getattr(time, "monotonic", time.time)


class Timeout(object):
    """ Timeout configuration.

    Timeouts can be defined as a default for a pool::

        timeout = Timeout(connect=2.0, read=7.0)
        http = PoolManager(timeout=timeout)
        response = http.request('GET', 'http://example.com/')

    Or per-request (which overrides the default for the pool)::

        response = http.request('GET', 'http://example.com/', timeout=Timeout(10))

    Timeouts can be disabled by setting all the parameters to ``None``::

        no_timeout = Timeout(connect=None, read=None)
        response = http.request('GET', 'http://example.com/, timeout=no_timeout)


    :param total:
        This combines the connect and read timeouts into one; the read timeout
        will be set to the time leftover from the connect attempt. In the
        event that both a connect timeout and a total are specified, or a read
        timeout and a total are specified, the shorter timeout will be applied.

        Defaults to None.

    :type total: integer, float, or None

    :param connect:
        The maximum amount of time to wait for a connection attempt to a server
        to succeed. Omitting the parameter will default the connect timeout to
        the system default, probably `the global default timeout in socket.py
        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
        None will set an infinite timeout for connection attempts.

    :type connect: integer, float, or None

    :param read:
        The maximum amount of time to wait between consecutive
        read operations for a response from the server. Omitting
        the parameter will default the read timeout to the system
        default, probably `the global default timeout in socket.py
        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
        None will set an infinite timeout.

    :type read: integer, float, or None

    .. note::

        Many factors can affect the total amount of time for urllib3 to return
        an HTTP response.

        For example, Python's DNS resolver does not obey the timeout specified
        on the socket. Other factors that can affect total request time include
        high CPU load, high swap, the program running at a low priority level,
        or other behaviors.

        In addition, the read and total timeouts only measure the time between
        read operations on the socket connecting the client and the server,
        not the total amount of time for the request to return a complete
        response. For most requests, the timeout is raised because the server
        has not sent the first byte in the specified time. This is not always
        the case; if a server streams one byte every fifteen seconds, a timeout
        of 20 seconds will not trigger, even though the request will take
        several minutes to complete.

        If your goal is to cut off any request after a set amount of wall clock
        time, consider having a second "watcher" thread to cut off a slow
        request.
    """

    #: A sentinel object representing the default timeout value
    DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT

    def __init__(self, total=None, connect=_Default, read=_Default):
        self._connect = self._validate_timeout(connect, 'connect')
        self._read = self._validate_timeout(read, 'read')
        self.total = self._validate_timeout(total, 'total')
        self._start_connect = None

    def __str__(self):
        return '%s(connect=%r, read=%r, total=%r)' % (
            type(self).__name__, self._connect, self._read, self.total)

    @classmethod
    def _validate_timeout(cls, value, name):
        """ Check that a timeout attribute is valid.

        :param value: The timeout value to validate
        :param name: The name of the timeout attribute to validate. This is
            used to specify in error messages.
        :return: The validated and casted version of the given value.
        :raises ValueError: If it is a numeric value less than or equal to
            zero, or the type is not an integer, float, or None.
        """
        if value is _Default:
            return cls.DEFAULT_TIMEOUT

        if value is None or value is cls.DEFAULT_TIMEOUT:
            return value

        if isinstance(value, bool):
            raise ValueError("Timeout cannot be a boolean value. It must "
                             "be an int, float or None.")
        try:
            float(value)
        except (TypeError, ValueError):
            raise ValueError("Timeout value %s was %s, but it must be an "
                             "int, float or None." % (name, value))

        try:
            if value <= 0:
                raise ValueError("Attempted to set %s timeout to %s, but the "
                                 "timeout cannot be set to a value less "
                                 "than or equal to 0." % (name, value))
        except TypeError:  # Python 3
            raise ValueError("Timeout value %s was %s, but it must be an "
                             "int, float or None." % (name, value))

        return value

    @classmethod
    def from_float(cls, timeout):
        """ Create a new Timeout from a legacy timeout value.

        The timeout value used by httplib.py sets the same timeout on the
        connect(), and recv() socket requests. This creates a :class:`Timeout`
        object that sets the individual timeouts to the ``timeout`` value
        passed to this function.

        :param timeout: The legacy timeout value.
        :type timeout: integer, float, sentinel default object, or None
        :return: Timeout object
        :rtype: :class:`Timeout`
        """
        return Timeout(read=timeout, connect=timeout)

    def clone(self):
        """ Create a copy of the timeout object

        Timeout properties are stored per-pool but each request needs a fresh
        Timeout object to ensure each one has its own start/stop configured.

        :return: a copy of the timeout object
        :rtype: :class:`Timeout`
        """
        # We can't use copy.deepcopy because that will also create a new object
        # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
        # detect the user default.
        return Timeout(connect=self._connect, read=self._read,
                       total=self.total)

    def start_connect(self):
        """ Start the timeout clock, used during a connect() attempt

        :raises urllib3.exceptions.TimeoutStateError: if you attempt
            to start a timer that has been started already.
        """
        if self._start_connect is not None:
            raise TimeoutStateError("Timeout timer has already been started.")
        self._start_connect = current_time()
        return self._start_connect

    def get_connect_duration(self):
        """ Gets the time elapsed since the call to :meth:`start_connect`.

        :return: Elapsed time.
        :rtype: float
        :raises urllib3.exceptions.TimeoutStateError: if you attempt
            to get duration for a timer that hasn't been started.
        """
        if self._start_connect is None:
            raise TimeoutStateError("Can't get connect duration for timer "
                                    "that has not started.")
        return current_time() - self._start_connect

    @property
    def connect_timeout(self):
        """ Get the value to use when setting a connection timeout.

        This will be a positive float or integer, the value None
        (never timeout), or the default system timeout.

        :return: Connect timeout.
        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
        """
        if self.total is None:
            return self._connect

        if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
            return self.total

        return min(self._connect, self.total)

    @property
    def read_timeout(self):
        """ Get the value for the read timeout.

        This assumes some time has elapsed in the connection timeout and
        computes the read timeout appropriately.

        If self.total is set, the read timeout is dependent on the amount of
        time taken by the connect timeout. If the connection time has not been
        established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
        raised.

        :return: Value to use for the read timeout.
        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
        :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
            has not yet been called on this object.
        """
        if (self.total is not None and
                self.total is not self.DEFAULT_TIMEOUT and
                self._read is not None and
                self._read is not self.DEFAULT_TIMEOUT):
            # In case the connect timeout has not yet been established.
            if self._start_connect is None:
                return self._read
            return max(0, min(self.total - self.get_connect_duration(),
                              self._read))
        elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
            return max(0, self.total - self.get_connect_duration())
        else:
            return self._read
_vendor/urllib3/util/__pycache__/response.cpython-36.opt-1.pyc000064400000003433151733136350020176 0ustar003

�Pf'	�@s@ddlmZddlmZddlmZdd�Zdd�Zd	d
�Z	dS)�)�absolute_import�)�http_client)�HeaderParsingErrorcCsfy|j�Stk
rYnXy|jStk
r8YnXy
|jdkStk
rXYnXtd��dS)zt
    Checks whether a given file-like object is closed.

    :param obj:
        The file-like object to check.
    Nz)Unable to determine whether fp is closed.)Zisclosed�AttributeError�closed�fp�
ValueError)�obj�r�/usr/lib/python3.6/response.py�is_fp_closeds
r
cCs\t|tj�stdjt|����t|dd�}t|dd�}d}|rD|�}|sL|rXt||d��dS)aP
    Asserts whether all headers have been successfully parsed.
    Extracts encountered errors from the result of parsing headers.

    Only works on Python 3.

    :param headers: Headers to verify.
    :type headers: `httplib.HTTPMessage`.

    :raises urllib3.exceptions.HeaderParsingError:
        If parsing errors are found.
    z"expected httplib.Message, got {0}.�defectsN�get_payload)r�
unparsed_data)�
isinstance�httplibZHTTPMessage�	TypeError�format�type�getattrr)Zheadersrrrrrr�assert_header_parsing&srcCs$|j}t|t�r|dkS|j�dkS)z�
    Checks whether the request of a response has been a HEAD-request.
    Handles the quirks of AppEngine.

    :param conn:
    :type conn: :class:`httplib.HTTPResponse`
    �ZHEAD)�_methodr�int�upper)Zresponse�methodrrr�is_response_to_headEs	
rN)
Z
__future__rZpackages.six.movesrr�
exceptionsrr
rrrrrr�<module>s
_vendor/urllib3/util/__pycache__/connection.cpython-36.pyc000064400000005744151733136350017547 0ustar003

�Pf��@snddlmZddlZddlmZddlmZmZdd�Zej	ddfdd	�Z
d
d�Zdd
�Zdd�Z
e
d�ZdS)�)�absolute_importN�)�
wait_for_read)�
HAS_SELECT�
SelectorErrorcCsVt|dd�}|dkrdS|dkr$dSts,dSytt|dd��Stk
rPdSXdS)a 
    Returns True if the connection is dropped and should be closed.

    :param conn:
        :class:`httplib.HTTPConnection` object.

    Note: For platforms like AppEngine, this will always return ``False`` to
    let the platform handle connection recycling transparently for us.
    �sockFNTg)�timeout)�getattrr�boolrr)Zconnr�r� /usr/lib/python3.6/connection.py�is_connection_droppeds
r
cCs�|\}}|jd�r|jd�}d}t�}x�tj|||tj�D]�}|\}	}
}}}
d}yHtj|	|
|�}t||�|tjk	r~|j|�|r�|j	|�|j
|
�|Stjk
r�}z|}|dk	r�|j�d}WYdd}~Xq:Xq:W|dk	r�|�tjd��dS)adConnect to *address* and return the socket object.

    Convenience function.  Connect to *address* (a 2-tuple ``(host,
    port)``) and return the socket object.  Passing the optional
    *timeout* parameter will set the timeout on the socket instance
    before attempting to connect.  If no *timeout* is supplied, the
    global default timeout setting returned by :func:`getdefaulttimeout`
    is used.  If *source_address* is set it must be a tuple of (host, port)
    for the socket to bind as a source address before making the connection.
    An host of '' or port 0 tells the OS to use the default.
    �[z[]Nz!getaddrinfo returns an empty list)
�
startswith�strip�allowed_gai_family�socketZgetaddrinfoZSOCK_STREAM�_set_socket_options�_GLOBAL_DEFAULT_TIMEOUTZ
settimeout�bindZconnect�error�close)ZaddressrZsource_addressZsocket_options�hostZport�err�family�resZafZsocktype�protoZ	canonnameZsar�errr�create_connection$s2






rcCs(|dkrdSx|D]}|j|�qWdS)N)Z
setsockopt)rZoptions�optrrrrXs
rcCstj}trtj}|S)z�This function is designed to work in the context of
    getaddrinfo, where family=socket.AF_UNSPEC is the default and
    will perform a DNS search for both IPv6 and IPv4 records.)rZAF_INET�HAS_IPV6Z	AF_UNSPEC)rrrrr`srcCsVd}d}tjrFy"tjtj�}|j|df�d}Wntk
rDYnX|rR|j�|S)z6 Returns True if the system can bind an IPv6 address. NFrT)r�has_ipv6ZAF_INET6r�	Exceptionr)rrr!rrr�	_has_ipv6ksr#z::1)Z
__future__rr�waitrZ	selectorsrrr
rrrrr#r rrrr�<module>s3_vendor/urllib3/util/__pycache__/timeout.cpython-36.opt-1.pyc000064400000020757151733136350020036 0ustar003

�Pf&�@sTddlmZddlmZddlZddlmZe�Ze	edej�Z
Gdd�de�ZdS)	�)�absolute_import)�_GLOBAL_DEFAULT_TIMEOUTN�)�TimeoutStateErrorZ	monotonicc@steZdZdZeZdeefdd�Zdd�Ze	dd��Z
e	d	d
��Zdd�Zd
d�Z
dd�Zedd��Zedd��ZdS)�Timeouta� Timeout configuration.

    Timeouts can be defined as a default for a pool::

        timeout = Timeout(connect=2.0, read=7.0)
        http = PoolManager(timeout=timeout)
        response = http.request('GET', 'http://example.com/')

    Or per-request (which overrides the default for the pool)::

        response = http.request('GET', 'http://example.com/', timeout=Timeout(10))

    Timeouts can be disabled by setting all the parameters to ``None``::

        no_timeout = Timeout(connect=None, read=None)
        response = http.request('GET', 'http://example.com/, timeout=no_timeout)


    :param total:
        This combines the connect and read timeouts into one; the read timeout
        will be set to the time leftover from the connect attempt. In the
        event that both a connect timeout and a total are specified, or a read
        timeout and a total are specified, the shorter timeout will be applied.

        Defaults to None.

    :type total: integer, float, or None

    :param connect:
        The maximum amount of time to wait for a connection attempt to a server
        to succeed. Omitting the parameter will default the connect timeout to
        the system default, probably `the global default timeout in socket.py
        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
        None will set an infinite timeout for connection attempts.

    :type connect: integer, float, or None

    :param read:
        The maximum amount of time to wait between consecutive
        read operations for a response from the server. Omitting
        the parameter will default the read timeout to the system
        default, probably `the global default timeout in socket.py
        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
        None will set an infinite timeout.

    :type read: integer, float, or None

    .. note::

        Many factors can affect the total amount of time for urllib3 to return
        an HTTP response.

        For example, Python's DNS resolver does not obey the timeout specified
        on the socket. Other factors that can affect total request time include
        high CPU load, high swap, the program running at a low priority level,
        or other behaviors.

        In addition, the read and total timeouts only measure the time between
        read operations on the socket connecting the client and the server,
        not the total amount of time for the request to return a complete
        response. For most requests, the timeout is raised because the server
        has not sent the first byte in the specified time. This is not always
        the case; if a server streams one byte every fifteen seconds, a timeout
        of 20 seconds will not trigger, even though the request will take
        several minutes to complete.

        If your goal is to cut off any request after a set amount of wall clock
        time, consider having a second "watcher" thread to cut off a slow
        request.
    NcCs4|j|d�|_|j|d�|_|j|d�|_d|_dS)N�connect�read�total)�_validate_timeout�_connect�_readr	�_start_connect)�selfr	rr�r�/usr/lib/python3.6/timeout.py�__init__]szTimeout.__init__cCsdt|�j|j|j|jfS)Nz!%s(connect=%r, read=%r, total=%r))�type�__name__rrr	)rrrr�__str__cszTimeout.__str__cCs�|tkr|jS|dks ||jkr$|St|t�r6td��yt|�Wn(ttfk
rjtd||f��YnXy|dkr�td||f��Wn$tk
r�td||f��YnX|S)a� Check that a timeout attribute is valid.

        :param value: The timeout value to validate
        :param name: The name of the timeout attribute to validate. This is
            used to specify in error messages.
        :return: The validated and casted version of the given value.
        :raises ValueError: If it is a numeric value less than or equal to
            zero, or the type is not an integer, float, or None.
        NzDTimeout cannot be a boolean value. It must be an int, float or None.z>Timeout value %s was %s, but it must be an int, float or None.rzdAttempted to set %s timeout to %s, but the timeout cannot be set to a value less than or equal to 0.)�_Default�DEFAULT_TIMEOUT�
isinstance�bool�
ValueError�float�	TypeError)�cls�value�namerrrr
gs&
zTimeout._validate_timeoutcCst||d�S)a� Create a new Timeout from a legacy timeout value.

        The timeout value used by httplib.py sets the same timeout on the
        connect(), and recv() socket requests. This creates a :class:`Timeout`
        object that sets the individual timeouts to the ``timeout`` value
        passed to this function.

        :param timeout: The legacy timeout value.
        :type timeout: integer, float, sentinel default object, or None
        :return: Timeout object
        :rtype: :class:`Timeout`
        )rr)r)rZtimeoutrrr�
from_float�szTimeout.from_floatcCst|j|j|jd�S)a Create a copy of the timeout object

        Timeout properties are stored per-pool but each request needs a fresh
        Timeout object to ensure each one has its own start/stop configured.

        :return: a copy of the timeout object
        :rtype: :class:`Timeout`
        )rrr	)rrrr	)rrrr�clone�s
z
Timeout.clonecCs |jdk	rtd��t�|_|jS)z� Start the timeout clock, used during a connect() attempt

        :raises urllib3.exceptions.TimeoutStateError: if you attempt
            to start a timer that has been started already.
        Nz'Timeout timer has already been started.)r
r�current_time)rrrr�
start_connect�s
zTimeout.start_connectcCs|jdkrtd��t�|jS)a Gets the time elapsed since the call to :meth:`start_connect`.

        :return: Elapsed time.
        :rtype: float
        :raises urllib3.exceptions.TimeoutStateError: if you attempt
            to get duration for a timer that hasn't been started.
        Nz:Can't get connect duration for timer that has not started.)r
rr!)rrrr�get_connect_duration�s
zTimeout.get_connect_durationcCs:|jdkr|jS|jdks&|j|jkr,|jSt|j|j�S)a" Get the value to use when setting a connection timeout.

        This will be a positive float or integer, the value None
        (never timeout), or the default system timeout.

        :return: Connect timeout.
        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
        N)r	rr�min)rrrr�connect_timeout�s


zTimeout.connect_timeoutcCs�|jdk	rX|j|jk	rX|jdk	rX|j|jk	rX|jdkr<|jStdt|j|j�|j��S|jdk	r�|j|jk	r�td|j|j��S|jSdS)a� Get the value for the read timeout.

        This assumes some time has elapsed in the connection timeout and
        computes the read timeout appropriately.

        If self.total is set, the read timeout is dependent on the amount of
        time taken by the connect timeout. If the connection time has not been
        established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
        raised.

        :return: Value to use for the read timeout.
        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
        :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
            has not yet been called on this object.
        Nr)r	rrr
�maxr$r#)rrrr�read_timeout�s



zTimeout.read_timeout)r�
__module__�__qualname__�__doc__rrrrr�classmethodr
rr r"r#�propertyr%r'rrrrrsF%
r)Z
__future__rZsocketrZtime�
exceptionsr�objectr�getattrr!rrrrr�<module>s_vendor/urllib3/util/__pycache__/wait.cpython-36.opt-1.pyc000064400000003055151733136350017304 0ustar003

�Pf��@s:ddlmZmZmZmZd	dd�Zd
dd�Zddd�ZdS)�)�
HAS_SELECT�DefaultSelector�
EVENT_READ�EVENT_WRITENcsttstd��t|t�s0t|d�r(|g}nt|�}t��4}x|D]}|j|��q>W�fdd�|j|�D�SQRXdS)z� Waits for IO events to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be interacted with immediately. z!Platform does not have a selector�filenocs"g|]}|d�@r|dj�qS)r�)Zfileobj)�.0�key)�events��/usr/lib/python3.6/wait.py�
<listcomp>sz'_wait_for_io_events.<locals>.<listcomp>N)r�
ValueError�
isinstance�list�hasattrr�registerZselect)�socksr
�timeoutZselectorZsockr)r
r�_wait_for_io_events	s



rcCst|t|�S)z� Waits for reading to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be read from immediately. )rr)rrrrr�
wait_for_readsrcCst|t|�S)z� Waits for writing to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be written to immediately. )rr)rrrrr�wait_for_write$sr)N)N)N)Z	selectorsrrrrrrrrrrr�<module>s

_vendor/urllib3/util/__pycache__/url.cpython-36.pyc000064400000012430151733136350016200 0ustar003

�Pf��@s�ddlmZddlmZddlZddlmZdddd	d
ddgZdZej	d�Z
ddlmZGdd�dede��Z
dd�Zdd�Zdd�ZdS)�)�absolute_import)�
namedtupleN�)�LocationParseError�scheme�auth�host�port�path�query�fragment�http�httpsz[- ])�quotecs^eZdZdZfZd�fdd�	Zedd��Zedd��Zed	d
��Z	edd��Z
d
d�Z�ZS)�Urlz�
    Datastructure for representing an HTTP URL. Used as a return value for
    :func:`parse_url`. Both the scheme and host are normalized as they are
    both case-insensitive according to RFC 3986.
    Nc	sV|r|jd�rd|}|r$|j�}|r8|tkr8|j�}tt|�j||||||||�S)N�/)�
startswith�lower�NORMALIZABLE_SCHEMES�superr�__new__)�clsrrrr	r
rr)�	__class__��/usr/lib/python3.6/url.pyrszUrl.__new__cCs|jS)z@For backwards-compatibility with urlparse. We're nice like that.)r)�selfrrr�hostname$szUrl.hostnamecCs&|jpd}|jdk	r"|d|j7}|S)z)Absolute path including the query string.rN�?)r
r)rZurirrr�request_uri)s

zUrl.request_uricCs|jrd|j|jfS|jS)z(Network location including host and portz%s:%d)r	r)rrrr�netloc3sz
Url.netlocc	Cs�|\}}}}}}}d}|dk	r*||d7}|dk	r>||d7}|dk	rN||7}|dk	rf|dt|�7}|dk	rv||7}|dk	r�|d|7}|dk	r�|d|7}|S)a�
        Convert self into a url

        This function should more or less round-trip with :func:`.parse_url`. The
        returned url may not be exactly the same as the url inputted to
        :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
        with a blank port will have : removed).

        Example: ::

            >>> U = parse_url('http://google.com/mail/')
            >>> U.url
            'http://google.com/mail/'
            >>> Url('http', 'username:password', 'host.com', 80,
            ... '/path', 'query', 'fragment').url
            'http://username:password@host.com:80/path?query#fragment'
        �Nz://�@�:r�#)�str)	rrrrr	r
rr�urlrrrr%:s"zUrl.urlcCs|jS)N)r%)rrrr�__str__bszUrl.__str__)NNNNNNN)
�__name__�
__module__�__qualname__�__doc__�	__slots__r�propertyrrrr%r&�
__classcell__rr)rrrs

(rcCszd}d}x8|D]0}|j|�}|dkr&q|dks6||kr|}|}qW|dksR|dkr\|ddfS|d|�||dd�|fS)a�
    Given a string and an iterable of delimiters, split on the first found
    delimiter. Return two split parts and the matched delimiter.

    If not found, then the first part is the full input string.

    Example::

        >>> split_first('foo/bar?baz', '?/=')
        ('foo', 'bar?baz', '/')
        >>> split_first('foo/bar?baz', '123')
        ('foo/bar?baz', '', None)

    Scales linearly with number of delims. Not ideal for large number of delims.
    Nrr �)�find)�sZdelimsZmin_idxZ	min_delim�d�idxrrr�split_firstfs


r3cCs�|s
t�Stjdd�|�}d}d}d}d}d}d}d}d|krN|jdd�\}}t|dddg�\}}}	|	rp|	|}d	|kr�|jd	d�\}}|r�|d
dkr�|jdd�\}}|d7}d
|k�r|jd
d�\}
}|s�|
}|�r|j�s�t|��yt|�}Wnt	k
�rt|��YnXnd}n|�r.|�r.|}|�sHt|||||||�Sd|k�rb|jdd�\}}d|k�r||jdd�\}}t|||||||�S)a:
    Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
    performed to parse incomplete urls. Fields not provided will be None.

    Partly backwards-compatible with :mod:`urlparse`.

    Example::

        >>> parse_url('http://google.com/mail/')
        Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
        >>> parse_url('google.com:80')
        Url(scheme=None, host='google.com', port=80, path=None, ...)
        >>> parse_url('/foo?bar')
        Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
    cSst|j��S)N)r�group)�matchrrr�<lambda>�szparse_url.<locals>.<lambda>Nz://r.rrr#r!r�[�]r")
r�!_contains_disallowed_url_pchar_re�sub�splitr3�rsplit�isdigitr�int�
ValueError)r%rrrr	r
rrZpath_ZdelimZ_hostrrr�	parse_url�sR


r@cCst|�}|jpd|j|jfS)z4
    Deprecated. Use :func:`parse_url` instead.
    r
)r@rrr	)r%�prrr�get_host�srB)r
rN)Z
__future__r�collectionsr�re�
exceptionsrZ	url_attrsr�compiler9Zpackages.six.moves.urllib.parserrr3r@rBrrrr�<module>s
U!a_vendor/urllib3/util/__pycache__/ssl_.cpython-36.opt-1.pyc000064400000021265151733136350017303 0ustar003

�Pf�/�!@s�ddlmZddlZddlZddlZddlmZmZddlm	Z	m
Z
mZddlm
Z
mZmZdZdZdZdZe	e
ed�Zd	d
�Zeede�Zy,ddlZddlmZmZmZdd
lmZWnek
r�YnXyddlmZmZmZWn"ek
�rd0\ZZdZYnXdj dddddddddddddd d!d"g�Z!ydd#lmZWn.ek
�rrddl"Z"Gd$d%�d%e#�ZYnXd&d'�Z$d(d)�Z%d*d+�Z&d1d,d-�Z'd2d.d/�Z(dS)3�)�absolute_importN)�hexlify�	unhexlify)�md5�sha1�sha256�)�SSLError�InsecurePlatformWarning�SNIMissingWarningF)� �(�@cCsHtt|�t|��}x*tt|�t|��D]\}}|||AO}q(W|dkS)z�
    Compare two digests of equal length in constant time.

    The digests must be of type str/bytes.
    Returns True if the digests match, and False otherwise.
    r)�abs�len�zip�	bytearray)�a�b�result�l�r�r�/usr/lib/python3.6/ssl_.py�_const_compare_digest_backportsrZcompare_digest)�wrap_socket�	CERT_NONE�PROTOCOL_SSLv23)�HAS_SNI)�OP_NO_SSLv2�OP_NO_SSLv3�OP_NO_COMPRESSION��i�:zTLS13-AES-256-GCM-SHA384zTLS13-CHACHA20-POLY1305-SHA256zTLS13-AES-128-GCM-SHA256zECDH+AESGCMz
ECDH+CHACHA20z	DH+AESGCMzDH+CHACHA20zECDH+AES256z	DH+AES256zECDH+AES128zDH+AESz
RSA+AESGCMzRSA+AESz!aNULLz!eNULLz!MD5)�
SSLContextc@s\eZdZdejkodknp*dejkZdd�Zdd�Zdd	d
�Zdd�Z	ddd�Z
dS)r%r��cCs6||_d|_tj|_d|_d|_d|_d|_d|_	dS)NFr)
�protocol�check_hostname�sslr�verify_mode�ca_certs�options�certfile�keyfile�ciphers)�selfZprotocol_versionrrr�__init__cszSSLContext.__init__cCs||_||_dS)N)r.r/)r1r.r/rrr�load_cert_chainnszSSLContext.load_cert_chainNcCs||_|dk	rtd��dS)Nz-CA directories not supported in older Pythons)r,r	)r1ZcafileZcapathrrr�load_verify_locationsrsz SSLContext.load_verify_locationscCs|jstd��||_dS)Nz�Your version of Python does not support setting a custom cipher suite. Please upgrade to Python 2.7, 3.2, or later if you need this functionality.)�supports_set_ciphers�	TypeErrorr0)r1Zcipher_suiterrr�set_ciphersxszSSLContext.set_ciphersFcCsTtjdt�|j|j|j|j|j|d�}|jrDt	|fd|j
i|��St	|f|�SdS)Na2A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings)r/r.r,�	cert_reqs�ssl_version�server_sider0)�warnings�warnr
r/r.r,r+r(r5rr0)r1Zsocket�server_hostnamer:�kwargsrrrr�szSSLContext.wrap_socket)rr&)r')r'r)NN)NF)�__name__�
__module__�__qualname__�sys�version_infor5r2r3r4r7rrrrrr%_s

	r%cCsn|jdd�j�}t|�}tj|�}|s4tdj|���t|j��}||�j	�}t
||�sjtdj|t|����dS)z�
    Checks if given fingerprint matches the supplied certificate.

    :param cert:
        Certificate as bytes object.
    :param fingerprint:
        Fingerprint as string of hexdigits, can be interspersed by colons.
    r$�z"Fingerprint of invalid length: {0}z6Fingerprints did not match. Expected "{0}", got "{1}".N)�replace�lowerr�HASHFUNC_MAP�getr	�formatr�encodeZdigest�_const_compare_digestr)ZcertZfingerprintZ
digest_lengthZhashfuncZfingerprint_bytesZcert_digestrrr�assert_fingerprint�s


rLcCs@|dkrtSt|t�r<tt|d�}|dkr8ttd|�}|S|S)a�
    Resolves the argument to a numeric constant, which can be passed to
    the wrap_socket function/method from the ssl module.
    Defaults to :data:`ssl.CERT_NONE`.
    If given a string it is assumed to be the name of the constant in the
    :mod:`ssl` module or its abbrevation.
    (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
    If it's neither `None` nor a string we assume it is already the numeric
    constant which can directly be passed to wrap_socket.
    NZCERT_)r�
isinstance�str�getattrr*)�	candidate�resrrr�resolve_cert_reqs�s
rRcCs@|dkrtSt|t�r<tt|d�}|dkr8ttd|�}|S|S)z 
    like resolve_cert_reqs
    NZ	PROTOCOL_)rrMrNrOr*)rPrQrrr�resolve_ssl_version�s
rScCs�t|p
tj�}|dkrtjn|}|dkrDd}|tO}|tO}|tO}|j|O_t|dd�rl|j	|pht
�||_t|dd�dk	r�d|_|S)a�All arguments have the same meaning as ``ssl_wrap_socket``.

    By default, this function does a lot of the same work that
    ``ssl.create_default_context`` does on Python 3.4+. It:

    - Disables SSLv2, SSLv3, and compression
    - Sets a restricted set of server ciphers

    If you wish to enable SSLv3, you can do::

        from urllib3.util import ssl_
        context = ssl_.create_urllib3_context()
        context.options &= ~ssl_.OP_NO_SSLv3

    You can do the same to enable compression (substituting ``COMPRESSION``
    for ``SSLv3`` in the last line above).

    :param ssl_version:
        The desired protocol version to use. This will default to
        PROTOCOL_SSLv23 which will negotiate the highest protocol that both
        the server and your installation of OpenSSL support.
    :param cert_reqs:
        Whether to require the certificate verification. This defaults to
        ``ssl.CERT_REQUIRED``.
    :param options:
        Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
    :param ciphers:
        Which cipher suites to allow the server to select.
    :returns:
        Constructed SSLContext object with specified options
    :rtype: SSLContext
    Nrr5Tr)F)
r%r*rZ
CERT_REQUIREDrr r!r-rOr7�DEFAULT_CIPHERSr+r))r9r8r-r0�contextrrr�create_urllib3_context�s#rVc
Cs�|}
|
dkrt|||d�}
|s"|	r�y|
j||	�Wq�tk
r\}zt|��WYdd}~Xq�tk
r�}z|jtjkr�t|���WYdd}~Xq�Xn|dkr�t|
d�r�|
j�|r�|
j	||�t
r�|
j||d�Stj
dt�|
j|�S)a
    All arguments except for server_hostname, ssl_context, and ca_cert_dir have
    the same meaning as they do when using :func:`ssl.wrap_socket`.

    :param server_hostname:
        When SNI is supported, the expected hostname of the certificate
    :param ssl_context:
        A pre-made :class:`SSLContext` object. If none is provided, one will
        be created using :func:`create_urllib3_context`.
    :param ciphers:
        A string of ciphers we wish the client to support. This is not
        supported on Python 2.6 as the ssl module does not support it.
    :param ca_cert_dir:
        A directory containing CA certificates in multiple separate files, as
        supported by OpenSSL's -CApath flag or the capath argument to
        SSLContext.load_verify_locations().
    N)r0�load_default_certs)r=a�An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform. This may cause the server to present an incorrect TLS certificate, which can cause validation failures. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings)rVr4�IOErrorr	�OSError�errno�ENOENT�hasattrrWr3rrr;r<r)Zsockr/r.r8r,r=r9r0Zssl_contextZca_cert_dirrU�errr�ssl_wrap_sockets.r^)r"r#)NNNN)	NNNNNNNNN))Z
__future__rrZr;ZhmacZbinasciirrZhashlibrrr�
exceptionsr	r
rr%rZIS_PYOPENSSLZIS_SECURETRANSPORTrGrrOrKr*rrr�ImportErrorrr r!�joinrTrB�objectrLrRrSrVr^rrrr�<module>st

:
>_vendor/urllib3/util/__pycache__/request.cpython-36.opt-1.pyc000064400000006102151733136360020025 0ustar003

�Pfy�@s\ddlmZddlmZddlmZmZddlmZdZ	e
�Zddd	�Zd
d�Z
dd
�ZdS)�)�absolute_import)�	b64encode�)�b�
integer_types)�UnrewindableBodyErrorzgzip,deflateNcCs�i}|r6t|t�rnt|t�r*dj|�}nt}||d<|rB||d<|rNd|d<|rldtt|��jd�|d<|r�dtt|��jd�|d	<|r�d
|d<|S)a
    Shortcuts for generating request headers.

    :param keep_alive:
        If ``True``, adds 'connection: keep-alive' header.

    :param accept_encoding:
        Can be a boolean, list, or string.
        ``True`` translates to 'gzip,deflate'.
        List will get joined by comma.
        String will be used as provided.

    :param user_agent:
        String representing the user-agent you want, such as
        "python-urllib3/0.6"

    :param basic_auth:
        Colon-separated username:password string for 'authorization: basic ...'
        auth header.

    :param proxy_basic_auth:
        Colon-separated username:password string for 'proxy-authorization: basic ...'
        auth header.

    :param disable_cache:
        If ``True``, adds 'cache-control: no-cache' header.

    Example::

        >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
        {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
        >>> make_headers(accept_encoding=True)
        {'accept-encoding': 'gzip,deflate'}
    �,zaccept-encodingz
user-agentz
keep-aliveZ
connectionzBasic zutf-8Z
authorizationzproxy-authorizationzno-cachez
cache-control)�
isinstance�str�list�join�ACCEPT_ENCODINGrr�decode)Z
keep_aliveZaccept_encodingZ
user_agentZ
basic_authZproxy_basic_authZ
disable_cacheZheaders�r�/usr/lib/python3.6/request.py�make_headerss*$

rcCsR|dk	rt||�n:t|dd�dk	rNy|j�}Wnttfk
rLt}YnX|S)z
    If a position is provided, move file to that point.
    Otherwise, we'll attempt to record a position for future use.
    N�tell)�rewind_body�getattrr�IOError�OSError�_FAILEDTELL)�body�posrrr�set_file_positionMs
rcCstt|dd�}|dk	rNt|t�rNy||�Wqpttfk
rJtd��YqpXn"|tkr`td��ntdt|���dS)z�
    Attempt to rewind body to a certain position.
    Primarily used for request redirects and retries.

    :param body:
        File-like object that supports seek.

    :param int pos:
        Position to seek to in file.
    �seekNzAAn error occurred when rewinding request body for redirect/retry.zRUnable to record file position for rewinding request body during a redirect/retry.z4body_pos must be of type integer, instead it was %s.)	rr	rrrrr�
ValueError�type)rZbody_posZ	body_seekrrrr_s
r)NNNNNN)Z
__future__r�base64rZpackages.sixrr�
exceptionsrr
�objectrrrrrrrr�<module>s
A_vendor/urllib3/util/__pycache__/__init__.cpython-36.pyc000064400000002026151733136360017136 0ustar003

�Pf�@s�ddlmZddlmZddlmZddlmZddlm	Z	m
Z
mZmZm
Z
mZmZmZddlmZmZddlmZdd	lmZmZmZmZdd
lmZmZd ZdS)!�)�absolute_import�)�is_connection_dropped)�make_headers)�is_fp_closed)�
SSLContext�HAS_SNI�IS_PYOPENSSL�IS_SECURETRANSPORT�assert_fingerprint�resolve_cert_reqs�resolve_ssl_version�ssl_wrap_socket)�current_time�Timeout)�Retry)�get_host�	parse_url�split_first�Url)�
wait_for_read�wait_for_writerr	r
rrrrrrrrrrrrr
rrrrN)rr	r
rrrrrrrrrrrrr
rrrr)Z
__future__rZ
connectionrZrequestrZresponserZssl_rrr	r
rrr
rZtimeoutrrZretryrZurlrrrr�waitrr�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s8(
_vendor/urllib3/util/__pycache__/ssl_.cpython-36.pyc000064400000021265151733136360016345 0ustar003

�Pf�/�!@s�ddlmZddlZddlZddlZddlmZmZddlm	Z	m
Z
mZddlm
Z
mZmZdZdZdZdZe	e
ed�Zd	d
�Zeede�Zy,ddlZddlmZmZmZdd
lmZWnek
r�YnXyddlmZmZmZWn"ek
�rd0\ZZdZYnXdj dddddddddddddd d!d"g�Z!ydd#lmZWn.ek
�rrddl"Z"Gd$d%�d%e#�ZYnXd&d'�Z$d(d)�Z%d*d+�Z&d1d,d-�Z'd2d.d/�Z(dS)3�)�absolute_importN)�hexlify�	unhexlify)�md5�sha1�sha256�)�SSLError�InsecurePlatformWarning�SNIMissingWarningF)� �(�@cCsHtt|�t|��}x*tt|�t|��D]\}}|||AO}q(W|dkS)z�
    Compare two digests of equal length in constant time.

    The digests must be of type str/bytes.
    Returns True if the digests match, and False otherwise.
    r)�abs�len�zip�	bytearray)�a�b�result�l�r�r�/usr/lib/python3.6/ssl_.py�_const_compare_digest_backportsrZcompare_digest)�wrap_socket�	CERT_NONE�PROTOCOL_SSLv23)�HAS_SNI)�OP_NO_SSLv2�OP_NO_SSLv3�OP_NO_COMPRESSION��i�:zTLS13-AES-256-GCM-SHA384zTLS13-CHACHA20-POLY1305-SHA256zTLS13-AES-128-GCM-SHA256zECDH+AESGCMz
ECDH+CHACHA20z	DH+AESGCMzDH+CHACHA20zECDH+AES256z	DH+AES256zECDH+AES128zDH+AESz
RSA+AESGCMzRSA+AESz!aNULLz!eNULLz!MD5)�
SSLContextc@s\eZdZdejkodknp*dejkZdd�Zdd�Zdd	d
�Zdd�Z	ddd�Z
dS)r%r��cCs6||_d|_tj|_d|_d|_d|_d|_d|_	dS)NFr)
�protocol�check_hostname�sslr�verify_mode�ca_certs�options�certfile�keyfile�ciphers)�selfZprotocol_versionrrr�__init__cszSSLContext.__init__cCs||_||_dS)N)r.r/)r1r.r/rrr�load_cert_chainnszSSLContext.load_cert_chainNcCs||_|dk	rtd��dS)Nz-CA directories not supported in older Pythons)r,r	)r1ZcafileZcapathrrr�load_verify_locationsrsz SSLContext.load_verify_locationscCs|jstd��||_dS)Nz�Your version of Python does not support setting a custom cipher suite. Please upgrade to Python 2.7, 3.2, or later if you need this functionality.)�supports_set_ciphers�	TypeErrorr0)r1Zcipher_suiterrr�set_ciphersxszSSLContext.set_ciphersFcCsTtjdt�|j|j|j|j|j|d�}|jrDt	|fd|j
i|��St	|f|�SdS)Na2A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings)r/r.r,�	cert_reqs�ssl_version�server_sider0)�warnings�warnr
r/r.r,r+r(r5rr0)r1Zsocket�server_hostnamer:�kwargsrrrr�szSSLContext.wrap_socket)rr&)r')r'r)NN)NF)�__name__�
__module__�__qualname__�sys�version_infor5r2r3r4r7rrrrrr%_s

	r%cCsn|jdd�j�}t|�}tj|�}|s4tdj|���t|j��}||�j	�}t
||�sjtdj|t|����dS)z�
    Checks if given fingerprint matches the supplied certificate.

    :param cert:
        Certificate as bytes object.
    :param fingerprint:
        Fingerprint as string of hexdigits, can be interspersed by colons.
    r$�z"Fingerprint of invalid length: {0}z6Fingerprints did not match. Expected "{0}", got "{1}".N)�replace�lowerr�HASHFUNC_MAP�getr	�formatr�encodeZdigest�_const_compare_digestr)ZcertZfingerprintZ
digest_lengthZhashfuncZfingerprint_bytesZcert_digestrrr�assert_fingerprint�s


rLcCs@|dkrtSt|t�r<tt|d�}|dkr8ttd|�}|S|S)a�
    Resolves the argument to a numeric constant, which can be passed to
    the wrap_socket function/method from the ssl module.
    Defaults to :data:`ssl.CERT_NONE`.
    If given a string it is assumed to be the name of the constant in the
    :mod:`ssl` module or its abbrevation.
    (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
    If it's neither `None` nor a string we assume it is already the numeric
    constant which can directly be passed to wrap_socket.
    NZCERT_)r�
isinstance�str�getattrr*)�	candidate�resrrr�resolve_cert_reqs�s
rRcCs@|dkrtSt|t�r<tt|d�}|dkr8ttd|�}|S|S)z 
    like resolve_cert_reqs
    NZ	PROTOCOL_)rrMrNrOr*)rPrQrrr�resolve_ssl_version�s
rScCs�t|p
tj�}|dkrtjn|}|dkrDd}|tO}|tO}|tO}|j|O_t|dd�rl|j	|pht
�||_t|dd�dk	r�d|_|S)a�All arguments have the same meaning as ``ssl_wrap_socket``.

    By default, this function does a lot of the same work that
    ``ssl.create_default_context`` does on Python 3.4+. It:

    - Disables SSLv2, SSLv3, and compression
    - Sets a restricted set of server ciphers

    If you wish to enable SSLv3, you can do::

        from urllib3.util import ssl_
        context = ssl_.create_urllib3_context()
        context.options &= ~ssl_.OP_NO_SSLv3

    You can do the same to enable compression (substituting ``COMPRESSION``
    for ``SSLv3`` in the last line above).

    :param ssl_version:
        The desired protocol version to use. This will default to
        PROTOCOL_SSLv23 which will negotiate the highest protocol that both
        the server and your installation of OpenSSL support.
    :param cert_reqs:
        Whether to require the certificate verification. This defaults to
        ``ssl.CERT_REQUIRED``.
    :param options:
        Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
    :param ciphers:
        Which cipher suites to allow the server to select.
    :returns:
        Constructed SSLContext object with specified options
    :rtype: SSLContext
    Nrr5Tr)F)
r%r*rZ
CERT_REQUIREDrr r!r-rOr7�DEFAULT_CIPHERSr+r))r9r8r-r0�contextrrr�create_urllib3_context�s#rVc
Cs�|}
|
dkrt|||d�}
|s"|	r�y|
j||	�Wq�tk
r\}zt|��WYdd}~Xq�tk
r�}z|jtjkr�t|���WYdd}~Xq�Xn|dkr�t|
d�r�|
j�|r�|
j	||�t
r�|
j||d�Stj
dt�|
j|�S)a
    All arguments except for server_hostname, ssl_context, and ca_cert_dir have
    the same meaning as they do when using :func:`ssl.wrap_socket`.

    :param server_hostname:
        When SNI is supported, the expected hostname of the certificate
    :param ssl_context:
        A pre-made :class:`SSLContext` object. If none is provided, one will
        be created using :func:`create_urllib3_context`.
    :param ciphers:
        A string of ciphers we wish the client to support. This is not
        supported on Python 2.6 as the ssl module does not support it.
    :param ca_cert_dir:
        A directory containing CA certificates in multiple separate files, as
        supported by OpenSSL's -CApath flag or the capath argument to
        SSLContext.load_verify_locations().
    N)r0�load_default_certs)r=a�An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform. This may cause the server to present an incorrect TLS certificate, which can cause validation failures. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings)rVr4�IOErrorr	�OSError�errno�ENOENT�hasattrrWr3rrr;r<r)Zsockr/r.r8r,r=r9r0Zssl_contextZca_cert_dirrU�errr�ssl_wrap_sockets.r^)r"r#)NNNN)	NNNNNNNNN))Z
__future__rrZr;ZhmacZbinasciirrZhashlibrrr�
exceptionsr	r
rr%rZIS_PYOPENSSLZIS_SECURETRANSPORTrGrrOrKr*rrr�ImportErrorrr r!�joinrTrB�objectrLrRrSrVr^rrrr�<module>st

:
>_vendor/urllib3/util/__pycache__/url.cpython-36.opt-1.pyc000064400000012430151733136360017140 0ustar003

�Pf��@s�ddlmZddlmZddlZddlmZdddd	d
ddgZdZej	d�Z
ddlmZGdd�dede��Z
dd�Zdd�Zdd�ZdS)�)�absolute_import)�
namedtupleN�)�LocationParseError�scheme�auth�host�port�path�query�fragment�http�httpsz[- ])�quotecs^eZdZdZfZd�fdd�	Zedd��Zedd��Zed	d
��Z	edd��Z
d
d�Z�ZS)�Urlz�
    Datastructure for representing an HTTP URL. Used as a return value for
    :func:`parse_url`. Both the scheme and host are normalized as they are
    both case-insensitive according to RFC 3986.
    Nc	sV|r|jd�rd|}|r$|j�}|r8|tkr8|j�}tt|�j||||||||�S)N�/)�
startswith�lower�NORMALIZABLE_SCHEMES�superr�__new__)�clsrrrr	r
rr)�	__class__��/usr/lib/python3.6/url.pyrszUrl.__new__cCs|jS)z@For backwards-compatibility with urlparse. We're nice like that.)r)�selfrrr�hostname$szUrl.hostnamecCs&|jpd}|jdk	r"|d|j7}|S)z)Absolute path including the query string.rN�?)r
r)rZurirrr�request_uri)s

zUrl.request_uricCs|jrd|j|jfS|jS)z(Network location including host and portz%s:%d)r	r)rrrr�netloc3sz
Url.netlocc	Cs�|\}}}}}}}d}|dk	r*||d7}|dk	r>||d7}|dk	rN||7}|dk	rf|dt|�7}|dk	rv||7}|dk	r�|d|7}|dk	r�|d|7}|S)a�
        Convert self into a url

        This function should more or less round-trip with :func:`.parse_url`. The
        returned url may not be exactly the same as the url inputted to
        :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
        with a blank port will have : removed).

        Example: ::

            >>> U = parse_url('http://google.com/mail/')
            >>> U.url
            'http://google.com/mail/'
            >>> Url('http', 'username:password', 'host.com', 80,
            ... '/path', 'query', 'fragment').url
            'http://username:password@host.com:80/path?query#fragment'
        �Nz://�@�:r�#)�str)	rrrrr	r
rr�urlrrrr%:s"zUrl.urlcCs|jS)N)r%)rrrr�__str__bszUrl.__str__)NNNNNNN)
�__name__�
__module__�__qualname__�__doc__�	__slots__r�propertyrrrr%r&�
__classcell__rr)rrrs

(rcCszd}d}x8|D]0}|j|�}|dkr&q|dks6||kr|}|}qW|dksR|dkr\|ddfS|d|�||dd�|fS)a�
    Given a string and an iterable of delimiters, split on the first found
    delimiter. Return two split parts and the matched delimiter.

    If not found, then the first part is the full input string.

    Example::

        >>> split_first('foo/bar?baz', '?/=')
        ('foo', 'bar?baz', '/')
        >>> split_first('foo/bar?baz', '123')
        ('foo/bar?baz', '', None)

    Scales linearly with number of delims. Not ideal for large number of delims.
    Nrr �)�find)�sZdelimsZmin_idxZ	min_delim�d�idxrrr�split_firstfs


r3cCs�|s
t�Stjdd�|�}d}d}d}d}d}d}d}d|krN|jdd�\}}t|dddg�\}}}	|	rp|	|}d	|kr�|jd	d�\}}|r�|d
dkr�|jdd�\}}|d7}d
|k�r|jd
d�\}
}|s�|
}|�r|j�s�t|��yt|�}Wnt	k
�rt|��YnXnd}n|�r.|�r.|}|�sHt|||||||�Sd|k�rb|jdd�\}}d|k�r||jdd�\}}t|||||||�S)a:
    Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
    performed to parse incomplete urls. Fields not provided will be None.

    Partly backwards-compatible with :mod:`urlparse`.

    Example::

        >>> parse_url('http://google.com/mail/')
        Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
        >>> parse_url('google.com:80')
        Url(scheme=None, host='google.com', port=80, path=None, ...)
        >>> parse_url('/foo?bar')
        Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
    cSst|j��S)N)r�group)�matchrrr�<lambda>�szparse_url.<locals>.<lambda>Nz://r.rrr#r!r�[�]r")
r�!_contains_disallowed_url_pchar_re�sub�splitr3�rsplit�isdigitr�int�
ValueError)r%rrrr	r
rrZpath_ZdelimZ_hostrrr�	parse_url�sR


r@cCst|�}|jpd|j|jfS)z4
    Deprecated. Use :func:`parse_url` instead.
    r
)r@rrr	)r%�prrr�get_host�srB)r
rN)Z
__future__r�collectionsr�re�
exceptionsrZ	url_attrsr�compiler9Zpackages.six.moves.urllib.parserrr3r@rBrrrr�<module>s
U!a_vendor/urllib3/util/__pycache__/retry.cpython-36.pyc000064400000030433151733136360016547 0ustar003

�Pf;�@s�ddlmZddlZddlZddlmZddlmZddlZddl	Z	ddl
mZmZm
Z
mZmZmZddlmZeje�Zedd	d
ddd
g�ZGdd�de�Zed�e_dS)�)�absolute_importN)�
namedtuple)�	takewhile�)�ConnectTimeoutError�
MaxRetryError�
ProtocolError�ReadTimeoutError�
ResponseError�
InvalidHeader)�six�RequestHistory�method�url�error�status�redirect_locationc
@s�eZdZdZeddddddg�Zedg�Zed	d
dg�ZdZd
ddddeddddddef
dd�Z	dd�Z
ed2dd��Zdd�Z
dd�Zdd�Zd3dd�Zdd �Zd4d!d"�Zd#d$�Zd%d&�Zd'd(�Zd5d*d+�Zd,d-�Zd6d.d/�Zd0d1�ZdS)7�Retrya2 Retry configuration.

    Each retry attempt will create a new Retry object with updated values, so
    they can be safely reused.

    Retries can be defined as a default for a pool::

        retries = Retry(connect=5, read=2, redirect=5)
        http = PoolManager(retries=retries)
        response = http.request('GET', 'http://example.com/')

    Or per-request (which overrides the default for the pool)::

        response = http.request('GET', 'http://example.com/', retries=Retry(10))

    Retries can be disabled by passing ``False``::

        response = http.request('GET', 'http://example.com/', retries=False)

    Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
    retries are disabled, in which case the causing exception will be raised.

    :param int total:
        Total number of retries to allow. Takes precedence over other counts.

        Set to ``None`` to remove this constraint and fall back on other
        counts. It's a good idea to set this to some sensibly-high value to
        account for unexpected edge cases and avoid infinite retry loops.

        Set to ``0`` to fail on the first retry.

        Set to ``False`` to disable and imply ``raise_on_redirect=False``.

    :param int connect:
        How many connection-related errors to retry on.

        These are errors raised before the request is sent to the remote server,
        which we assume has not triggered the server to process the request.

        Set to ``0`` to fail on the first retry of this type.

    :param int read:
        How many times to retry on read errors.

        These errors are raised after the request was sent to the server, so the
        request may have side-effects.

        Set to ``0`` to fail on the first retry of this type.

    :param int redirect:
        How many redirects to perform. Limit this to avoid infinite redirect
        loops.

        A redirect is a HTTP response with a status code 301, 302, 303, 307 or
        308.

        Set to ``0`` to fail on the first retry of this type.

        Set to ``False`` to disable and imply ``raise_on_redirect=False``.

    :param int status:
        How many times to retry on bad status codes.

        These are retries made on responses, where status code matches
        ``status_forcelist``.

        Set to ``0`` to fail on the first retry of this type.

    :param iterable method_whitelist:
        Set of uppercased HTTP method verbs that we should retry on.

        By default, we only retry on methods which are considered to be
        idempotent (multiple requests with the same parameters end with the
        same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.

        Set to a ``False`` value to retry on any verb.

    :param iterable status_forcelist:
        A set of integer HTTP status codes that we should force a retry on.
        A retry is initiated if the request method is in ``method_whitelist``
        and the response status code is in ``status_forcelist``.

        By default, this is disabled with ``None``.

    :param float backoff_factor:
        A backoff factor to apply between attempts after the second try
        (most errors are resolved immediately by a second try without a
        delay). urllib3 will sleep for::

            {backoff factor} * (2 ^ ({number of total retries} - 1))

        seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
        for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
        than :attr:`Retry.BACKOFF_MAX`.

        By default, backoff is disabled (set to 0).

    :param bool raise_on_redirect: Whether, if the number of redirects is
        exhausted, to raise a MaxRetryError, or to return a response with a
        response code in the 3xx range.

    :param iterable remove_headers_on_redirect:
        Sequence of headers to remove from the request when a response
        indicating a redirect is returned before firing off the redirected
        request

    :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
        whether we should raise an exception, or return a response,
        if status falls in ``status_forcelist`` range and retries have
        been exhausted.

    :param tuple history: The history of the request encountered during
        each call to :meth:`~Retry.increment`. The list is in the order
        the requests occurred. Each list item is of class :class:`RequestHistory`.

    :param bool respect_retry_after_header:
        Whether to respect Retry-After header on status codes defined as
        :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.

    ZHEADZGETZPUTZDELETEZOPTIONSZTRACEZ
Authorizationi�i�i��x�
NrTcCsv||_||_||_||_|dks(|dkr0d}d}	||_|p>t�|_||_||_|	|_	|
|_
|pbt�|_||_
|
|_dS)NFr)�total�connect�readr�redirect�set�status_forcelist�method_whitelist�backoff_factor�raise_on_redirect�raise_on_status�tuple�history�respect_retry_after_header�remove_headers_on_redirect)�selfrrrrrrrrrrr!r"r#�r%�/usr/lib/python3.6/retry.py�__init__�s zRetry.__init__cKsPt|j|j|j|j|j|j|j|j|j	|j
|j|jd�}|j
|�t|�f|�S)N)rrrrrrrrrrr!r#)�dictrrrrrrrrrrr!r#�update�type)r$�kwZparamsr%r%r&�new�s

z	Retry.newcCsR|dkr|dk	r|n|j}t|t�r(|St|�o2d}|||d�}tjd||�|S)z4 Backwards-compatibility for the old retries format.N)rz!Converted retries value: %r -> %r)�DEFAULT�
isinstancer�bool�log�debug)�clsZretriesr�defaultZnew_retriesr%r%r&�from_int�s
zRetry.from_intcCsFtttdd�t|j����}|dkr(dS|jd|d}t|j|�S)zJ Formula for computing the current backoff

        :rtype: float
        cSs
|jdkS)N)r)�xr%r%r&�<lambda>�sz(Retry.get_backoff_time.<locals>.<lambda>�rr)�len�listr�reversedr!r�min�BACKOFF_MAX)r$Zconsecutive_errors_lenZ
backoff_valuer%r%r&�get_backoff_time�szRetry.get_backoff_timecCs\tjd|�rt|�}n6tjj|�}|dkr6td|��tj|�}|tj�}|dkrXd}|S)Nz^\s*[0-9]+\s*$zInvalid Retry-After header: %sr)	�re�match�int�emailZutilsZ	parsedater�timeZmktime)r$�retry_afterZsecondsZretry_date_tupleZ
retry_dater%r%r&�parse_retry_after�s

zRetry.parse_retry_aftercCs |jd�}|dkrdS|j|�S)z* Get the value of Retry-After in seconds. zRetry-AfterN)Z	getheaderrD)r$�responserCr%r%r&�get_retry_after�s
zRetry.get_retry_aftercCs |j|�}|rtj|�dSdS)NTF)rFrB�sleep)r$rErCr%r%r&�sleep_for_retry�s


zRetry.sleep_for_retrycCs"|j�}|dkrdStj|�dS)Nr)r=rBrG)r$Zbackoffr%r%r&�_sleep_backoffszRetry._sleep_backoffcCs"|r|j|�}|rdS|j�dS)aC Sleep between retry attempts.

        This method will respect a server's ``Retry-After`` response header
        and sleep the duration of the time requested. If that is not present, it
        will use an exponential backoff. By default, the backoff factor is 0 and
        this method will return immediately.
        N)rHrI)r$rEZsleptr%r%r&rGs
	
zRetry.sleepcCs
t|t�S)z{ Errors when we're fairly sure that the server did not receive the
        request, so it should be safe to retry.
        )r.r)r$�errr%r%r&�_is_connection_errorszRetry._is_connection_errorcCst|ttf�S)z� Errors that occur after the request has been started, so we should
        assume that the server began processing it.
        )r.r	r)r$rJr%r%r&�_is_read_error!szRetry._is_read_errorcCs|jr|j�|jkrdSdS)z| Checks if a given HTTP method should be retried upon, depending if
        it is included on the method whitelist.
        FT)r�upper)r$rr%r%r&�_is_method_retryable'szRetry._is_method_retryableFcCs<|j|�sdS|jr"||jkr"dS|jo:|jo:|o:||jkS)ax Is this method/status code retryable? (Based on whitelists and control
        variables such as the number of total retries to allow, whether to
        respect the Retry-After header, whether this header is present, and
        whether the returned status code is on the list of status codes to
        be retried upon on the presence of the aforementioned header)
        FT)rNrrr"�RETRY_AFTER_STATUS_CODES)r$r�status_codeZhas_retry_afterr%r%r&�is_retry0s
zRetry.is_retrycCs:|j|j|j|j|jf}ttd|��}|s.dSt|�dkS)z Are we out of retries? NFr)rrrrrr9�filterr;)r$Zretry_countsr%r%r&�is_exhausted@s
zRetry.is_exhaustedcCs�|jdkr |r tjt|�||��|j}|dk	r6|d8}|j}|j}	|j}
|j}d}d}
d}|r�|j|�r�|dkr�tjt|�||��n|dk	r�|d8}n�|r�|j	|�r�|	dks�|j
|�r�tjt|�||��n|	dk	r�|	d8}	nn|o�|j��r|
dk	r�|
d8}
d}|j�}|j}
n<tj
}|�rL|j�rL|dk	�r6|d8}tjj|jd�}|j}
|jt||||
|�f}|j|||	|
||d�}|j��r�t|||�p�t|���tjd||�|S)	a� Return a new Retry object with incremented retry counters.

        :param response: A response object, or None, if the server did not
            return a response.
        :type response: :class:`~urllib3.response.HTTPResponse`
        :param Exception error: An error encountered during the request, or
            None if the response was received successfully.

        :return: A new ``Retry`` object.
        FNr7�unknownztoo many redirects)rP)rrrrrr!z$Incremented Retry for (url='%s'): %r)rrZreraiser*rrrrrKrLrNZget_redirect_locationr
Z
GENERIC_ERRORZSPECIFIC_ERROR�formatr!r
r,rSrr0r1)r$rrrErZ_poolZ_stacktracerrrrZstatus_count�causerrr!Z	new_retryr%r%r&�	incrementIsX




zRetry.incrementcCsdjt|�|d�S)Nz|{cls.__name__}(total={self.total}, connect={self.connect}, read={self.read}, redirect={self.redirect}, status={self.status}))r2r$)rUr*)r$r%r%r&�__repr__�szRetry.__repr__)TN)N)N)F)NNNNNN)�__name__�
__module__�__qualname__�__doc__�	frozensetZDEFAULT_METHOD_WHITELISTZ"DEFAULT_REDIRECT_HEADERS_BLACKLISTrOr<r'r,�classmethodr4r=rDrFrHrIrGrKrLrNrQrSrWrXr%r%r%r&rs8x





	
	
Jr�)Z
__future__rrBZlogging�collectionsr�	itertoolsrrAr>�
exceptionsrrrr	r
rZpackagesrZ	getLoggerrYr0r
�objectrr-r%r%r%r&�<module>s  


_vendor/urllib3/util/__pycache__/wait.cpython-36.pyc000064400000003055151733136360016346 0ustar003

�Pf��@s:ddlmZmZmZmZd	dd�Zd
dd�Zddd�ZdS)�)�
HAS_SELECT�DefaultSelector�
EVENT_READ�EVENT_WRITENcsttstd��t|t�s0t|d�r(|g}nt|�}t��4}x|D]}|j|��q>W�fdd�|j|�D�SQRXdS)z� Waits for IO events to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be interacted with immediately. z!Platform does not have a selector�filenocs"g|]}|d�@r|dj�qS)r�)Zfileobj)�.0�key)�events��/usr/lib/python3.6/wait.py�
<listcomp>sz'_wait_for_io_events.<locals>.<listcomp>N)r�
ValueError�
isinstance�list�hasattrr�registerZselect)�socksr
�timeoutZselectorZsockr)r
r�_wait_for_io_events	s



rcCst|t|�S)z� Waits for reading to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be read from immediately. )rr)rrrrr�
wait_for_readsrcCst|t|�S)z� Waits for writing to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be written to immediately. )rr)rrrrr�wait_for_write$sr)N)N)N)Z	selectorsrrrrrrrrrrr�<module>s

_vendor/urllib3/util/__pycache__/selectors.cpython-36.pyc000064400000037715151733136360017417 0ustar003

�Pf�R�@szddlZddlZddlZddlZddlZddlZddlmZmZy
ej	Z	Wne
efk
rhejZ	YnXd(Zd)Z
dZe�ZdaGdd�de�Zdd�Zejd*kr�dd�Znd
d�Zedddddg�ZGdd�de�ZGdd�de�Zeed��rGdd�de�Zeed��rGdd�de�Zeed��r:Gdd�de�Zeed ��rVGd!d"�d"e�Zeed��sfd#Zd$d%�Zd&d'�Z dS)+�N)�
namedtuple�Mapping�Tcs,eZdZ�fdd�Zdd�Zdd�Z�ZS)�
SelectorErrorcstt|�j�||_dS)N)�superr�__init__�errno)�self�errcode)�	__class__��/usr/lib/python3.6/selectors.pyrszSelectorError.__init__cCsdj|j�S)Nz<SelectorError errno={0}>)�formatr)r	rrr
�__repr__"szSelectorError.__repr__cCs|j�S)N)r)r	rrr
�__str__%szSelectorError.__str__)�__name__�
__module__�__qualname__rrr�
__classcell__rr)rr
rsrc
Csdt|t�r|}n:yt|j��}Wn(tttfk
rHtdj|���YnX|dkr`tdj|���|S)zl Return a file descriptor from a file object. If
    given an integer will simply return that integer back. zInvalid file object: {0!r}rzInvalid file descriptor: {0})�
isinstance�int�fileno�AttributeError�	TypeError�
ValueErrorr)�fileobj�fdrrr
�_fileobj_to_fd)s
r��cOsVy
|||�Stttjfk
rP}z"d}t|d�r8|j}t|��WYdd}~XnXdS)z� This is the short-circuit version of the below logic
        because in Python 3.5+ all system calls automatically restart
        and recalculate their timeouts. Nr)�OSError�IOError�select�error�hasattrrr)�func�_�args�kwargs�er
rrr
�_syscall_wrapper;s

r*cOsR|jdd�}|dkrd}d}n t|�}|dkr4d}n
t�|}t|�}|rZd|krZtd��t}x�|tk�rLy|||�}Wq`tttj	fk
�rH}z�d}t
|d�r�|j}nt
|d�r�|jd}|tj
kp�t
td	�o�|tjk}	|	�r&|dk	�r$t�}
|
|k�rttjd
��|�r$d|k�r$||
|d<w`|�r6t|��n�WYdd}~Xq`Xq`W|S)z� Wrapper function for syscalls that could fail due to EINTR.
        All functions should be retried if there is time left in the timeout
        in accordance with PEP 475. �timeoutNFgz4Timeout must be in args or kwargs to be recalculatedrr'r�WSAEINTR)r)�get�float�	monotonic�listr�_SYSCALL_SENTINELr r!r"r#r$rr'ZEINTRr,Z	ETIMEDOUTr)r%Zrecalc_timeoutr'r(r+Zexpires�resultr)r
Zis_interruptZcurrent_timerrr
r*GsJ








�SelectorKeyrr�events�datac@s0eZdZdZdd�Zdd�Zdd�Zdd	�Zd
S)�_SelectorMappingz* Mapping of file objects to selector keys cCs
||_dS)N)�	_selector)r	Zselectorrrr
r�sz_SelectorMapping.__init__cCst|jj�S)N)�lenr7�
_fd_to_key)r	rrr
�__len__�sz_SelectorMapping.__len__cCs@y|jj|�}|jj|Stk
r:tdj|���YnXdS)Nz{0!r} is not registered.)r7�_fileobj_lookupr9�KeyErrorr)r	rrrrr
�__getitem__�s
z_SelectorMapping.__getitem__cCst|jj�S)N)�iterr7r9)r	rrr
�__iter__�sz_SelectorMapping.__iter__N)rrr�__doc__rr:r=r?rrrr
r6�s
r6c@sveZdZdZdd�Zdd�Zddd�Zd	d
�Zddd�Zdd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)�BaseSelectora/ Abstract Selector class

    A selector supports registering file objects to be monitored
    for specific I/O events.

    A file object is a file descriptor or any object with a
    `fileno()` method. An arbitrary object can be attached to the
    file object which can be used for example to store context info,
    a callback, etc.

    A selector can use various implementations (select(), poll(), epoll(),
    and kqueue()) depending on the platform. The 'DefaultSelector' class uses
    the most efficient implementation for the current platform.
    cCsi|_t|�|_dS)N)r9r6�_map)r	rrr
r�szBaseSelector.__init__cCsHyt|�Stk
rBx"|jj�D]}|j|kr$|jSq$W�YnXdS)aa Return a file descriptor from a file object.
        This wraps _fileobj_to_fd() to do an exhaustive
        search in case the object is invalid but we still
        have it in our map. Used by unregister() so we can
        unregister an object that was previously registered
        even if it is closed. It is also used by _SelectorMapping
        N)rrr9�valuesrr)r	r�keyrrr
r;�s

zBaseSelector._fileobj_lookupNcCsd|s|ttB@r"tdj|���t||j|�||�}|j|jkrTtdj||j���||j|j<|S)z8 Register a file object for a set of events to monitor. zInvalid events: {0!r}z${0!r} (FD {1}) is already registered)	�
EVENT_READ�EVENT_WRITErrr3r;rr9r<)r	rr4r5rDrrr
�register�szBaseSelector.registercCs�y|jj|j|��}Wn�tk
r8tdj|���Ynptjk
r�}zR|jtjkrZ�n<x:|jj	�D]}|j
|krf|jj|j�PqfWtdj|���WYdd}~XnX|S)z0 Unregister a file object from being monitored. z{0!r} is not registeredN)r9�popr;r<r�socketr#rZEBADFrCrr)r	rrDr)rrr
�
unregister�s
 zBaseSelector.unregistercCs�y|j|j|�}Wn"tk
r6tdj|���YnX||jkr\|j|�|j|||�}n"||jkr~|j|d�}||j|j	<|S)z< Change a registered file object monitored events and data. z{0!r} is not registered)r5)
r9r;r<rr4rJrGr5�_replacer)r	rr4r5rDrrr
�modify�s


zBaseSelector.modifycCs
t��dS)zj Perform the actual selection until some monitored file objects
        are ready or the timeout expires. N)�NotImplementedError)r	r+rrr
r"�szBaseSelector.selectcCs|jj�d|_dS)zd Close the selector. This must be called to ensure that all
        underlying resources are freed. N)r9�clearrB)r	rrr
�close�s
zBaseSelector.closecCsH|j�}|dkrtd��y||Stk
rBtdj|���YnXdS)z: Return the key associated with a registered file object. NzSelector is closedz{0!r} is not registered)�get_map�RuntimeErrorr<r)r	r�mappingrrr
�get_keyszBaseSelector.get_keycCs|jS)z3 Return a mapping of file objects to selector keys )rB)r	rrr
rP
szBaseSelector.get_mapcCs$y
|j|Stk
rdSXdS)z_ Return the key associated to a given file descriptor
         Return None if it is not found. N)r9r<)r	rrrr
�_key_from_fds
zBaseSelector._key_from_fdcCs|S)Nr)r	rrr
�	__enter__szBaseSelector.__enter__cGs|j�dS)N)rO)r	r'rrr
�__exit__szBaseSelector.__exit__)N)N)N)rrrr@rr;rGrJrLr"rOrSrPrTrUrVrrrr
rA�s



rAr"csNeZdZdZ�fdd�Zd
�fdd�	Z�fdd�Zdd	d
�Zddd�Z�Z	S)�SelectSelectorz Select-based selector. cs"tt|�j�t�|_t�|_dS)N)rrWr�set�_readers�_writers)r	)rrr
r!szSelectSelector.__init__NcsDtt|�j|||�}|t@r*|jj|j�|t@r@|jj|j�|S)N)	rrWrGrErY�addrrFrZ)r	rr4r5rD)rrr
rG&szSelectSelector.registercs0tt|�j|�}|jj|j�|jj|j�|S)N)rrWrJrY�discardrrZ)r	rrD)rrr
rJ.szSelectSelector.unregistercCstj||g|�S)z? Wrapper for select.select because timeout is a positional arg )r")r	�r�wr+rrr
�_select4szSelectSelector._selectc	Cs�t|j�rt|j�rgS|dkr(dnt|d�}g}t|jd|j|j|�\}}}t|�}t|�}xV||BD]J}d}||kr�|tO}||kr�|tO}|j	|�}|rl|j
|||j@f�qlW|S)NgTr)r8rYrZ�maxr*r_rXrErFrT�appendr4)	r	r+�readyr]r^r&rr4rDrrr
r"8s$
zSelectSelector.select)N)N)N)
rrrr@rrGrJr_r"rrr)rr
rWs
rW�pollcsNeZdZdZ�fdd�Zd
�fdd�	Z�fdd�Zdd	d
�Zddd�Z�Z	S)�PollSelectorz Poll-based selector cstt|�j�tj�|_dS)N)rrdrr"rc�_poll)r	)rrr
rSszPollSelector.__init__NcsPtt|�j|||�}d}|t@r*|tjO}|t@r<|tjO}|jj|j	|�|S)Nr)
rrdrGrEr"�POLLINrF�POLLOUTrer)r	rr4r5rD�
event_mask)rrr
rGWs

zPollSelector.registercs"tt|�j|�}|jj|j�|S)N)rrdrJrer)r	rrD)rrr
rJaszPollSelector.unregistercCs4|dk	r$|dkrd}ntj|d�}|jj|�}|S)zj Wrapper function for select.poll.poll() so that
            _syscall_wrapper can work with only seconds. Nrg@�@)�math�ceilrerc)r	r+r2rrr
�
_wrap_pollfszPollSelector._wrap_pollcCsxg}t|jd|d�}x^|D]V\}}d}|tj@r:|tO}|tj@rN|tO}|j|�}|r|j|||j	@f�qW|S)NT)r+r)
r*rkr"rfrFrgrErTrar4)r	r+rb�	fd_eventsrrhr4rDrrr
r"ts
zPollSelector.select)N)N)N)
rrrr@rrGrJrkr"rrr)rr
rdQs

rd�epollcsXeZdZdZ�fdd�Zdd�Zd�fdd�	Z�fd	d
�Zddd�Z�fd
d�Z	�Z
S)�
EpollSelectorz Epoll-based selector cstt|�j�tj�|_dS)N)rrnrr"rm�_epoll)r	)rrr
r�szEpollSelector.__init__cCs
|jj�S)N)ror)r	rrr
r�szEpollSelector.filenoNcsTtt|�j|||�}d}|t@r*|tjO}|t@r<|tjO}t|j	jd|j
|�|S)NrF)rrnrGrEr"�EPOLLINrF�EPOLLOUTr*ror)r	rr4r5rDZevents_mask)rrr
rG�s

zEpollSelector.registercs@tt|�j|�}yt|jjd|j�Wntk
r:YnX|S)NF)rrnrJr*rorr)r	rrD)rrr
rJ�szEpollSelector.unregisterc	Cs�|dk	r2|dkrd}ntj|d�d}t|�}nd	}tt|j�d�}g}t|jjd||d�}x^|D]V\}}d}|t	j
@r�|tO}|t	j@r�|t
O}|j|�}|rd|j|||j@f�qdW|S)
Nrgg@�@g����MbP?g�?rT)r+Z	maxeventsg�)rirjr.r`r8r9r*rorcr"rprFrqrErTrar4)	r	r+�
max_eventsrbrlrrhr4rDrrr
r"�s*


zEpollSelector.selectcs|jj�tt|�j�dS)N)rorOrrn)r	)rrr
rO�s
zEpollSelector.close)N)N)rrrr@rrrGrJr"rOrrr)rr
rn�s
	
!rn�kqueuecsXeZdZdZ�fdd�Zdd�Zd�fdd�	Z�fd	d
�Zddd�Z�fd
d�Z	�Z
S)�KqueueSelectorz  Kqueue / Kevent-based selector cstt|�j�tj�|_dS)N)rrtrr"rs�_kqueue)r	)rrr
r�szKqueueSelector.__init__cCs
|jj�S)N)rur)r	rrr
r�szKqueueSelector.filenoNcs|tt|�j|||�}|t@rFtj|jtjtj�}t	|j
jd|gdd�|t@rxtj|jtj
tj�}t	|j
jd|gdd�|S)NFr)rrtrGrEr"�keventr�KQ_FILTER_READZ	KQ_EV_ADDr*ru�controlrF�KQ_FILTER_WRITE)r	rr4r5rDrv)rrr
rG�szKqueueSelector.registercs�tt|�j|�}|jt@r^tj|jtjtj	�}yt
|jjd|gdd�Wnt
k
r\YnX|jt@r�tj|jtjtj	�}yt
|jjd|gdd�Wnt
k
r�YnX|S)NFr)rrtrJr4rEr"rvrrwZKQ_EV_DELETEr*rurxrrFry)r	rrDrv)rrr
rJ�s$

zKqueueSelector.unregistercCs�|dk	rt|d�}t|j�d}i}t|jjdd||�}x�|D]�}|j}|j}d}|tj	krd|t
O}|tjkrv|tO}|j
|�}	|	r>|	j|kr�|	||	j@f||	j<q>||	jd}
|	||
B|	j@f||	j<q>Wt|j��S)Nr�Tr)r`r8r9r*rurxZident�filterr"rwrEryrFrTrr4r0rC)r	r+rrZ	ready_fdsZkevent_listrvrrhr4rDZ
old_eventsrrr
r"�s*







zKqueueSelector.selectcs|jj�tt|�j�dS)N)rurOrrt)r	)rrr
rOs
zKqueueSelector.close)N)N)rrrr@rrrGrJr"rOrrr)rr
rt�s
rtFcCsZy0|dkrtj�}|jd�ntt|��j�dSttfk
rT}zdSd}~XnXdS)a
 Checks that select structs can be allocated by the underlying
    operating system, not just advertised by the select module. We don't
    check select() because we'll be hopeful that most platforms that
    don't have it available will not advertise it. (ie: GAE) rcrTFN)r"rc�getattrrOr r)�struct�pr)rrr
�
_can_allocatesrcCsPtdkrJtd�rtan4td�r$tan&td�r2tanttd�rBtantd��t�S)z� This function serves as a first call for DefaultSelector to
    detect if the select module is being monkey-patched incorrectly
    by eventlet, greenlet, and preserve proper behavior. Nrsrmrcr"z!Platform does not have a selector)	�_DEFAULT_SELECTORrrtrnrdr$r"rWrrrrr
�DefaultSelector5s
r�rrz)rr)!rrir"rI�sysZtime�collectionsrrr/r�ImportErrorrErFZ
HAS_SELECT�objectr1r��	Exceptionrr�version_infor*r3r6rAr$rWrdrnrtrr�rrrr
�<module>sH


8	14BR_vendor/urllib3/util/__pycache__/timeout.cpython-36.pyc000064400000020757151733136360017100 0ustar003

�Pf&�@sTddlmZddlmZddlZddlmZe�Ze	edej�Z
Gdd�de�ZdS)	�)�absolute_import)�_GLOBAL_DEFAULT_TIMEOUTN�)�TimeoutStateErrorZ	monotonicc@steZdZdZeZdeefdd�Zdd�Ze	dd��Z
e	d	d
��Zdd�Zd
d�Z
dd�Zedd��Zedd��ZdS)�Timeouta� Timeout configuration.

    Timeouts can be defined as a default for a pool::

        timeout = Timeout(connect=2.0, read=7.0)
        http = PoolManager(timeout=timeout)
        response = http.request('GET', 'http://example.com/')

    Or per-request (which overrides the default for the pool)::

        response = http.request('GET', 'http://example.com/', timeout=Timeout(10))

    Timeouts can be disabled by setting all the parameters to ``None``::

        no_timeout = Timeout(connect=None, read=None)
        response = http.request('GET', 'http://example.com/, timeout=no_timeout)


    :param total:
        This combines the connect and read timeouts into one; the read timeout
        will be set to the time leftover from the connect attempt. In the
        event that both a connect timeout and a total are specified, or a read
        timeout and a total are specified, the shorter timeout will be applied.

        Defaults to None.

    :type total: integer, float, or None

    :param connect:
        The maximum amount of time to wait for a connection attempt to a server
        to succeed. Omitting the parameter will default the connect timeout to
        the system default, probably `the global default timeout in socket.py
        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
        None will set an infinite timeout for connection attempts.

    :type connect: integer, float, or None

    :param read:
        The maximum amount of time to wait between consecutive
        read operations for a response from the server. Omitting
        the parameter will default the read timeout to the system
        default, probably `the global default timeout in socket.py
        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
        None will set an infinite timeout.

    :type read: integer, float, or None

    .. note::

        Many factors can affect the total amount of time for urllib3 to return
        an HTTP response.

        For example, Python's DNS resolver does not obey the timeout specified
        on the socket. Other factors that can affect total request time include
        high CPU load, high swap, the program running at a low priority level,
        or other behaviors.

        In addition, the read and total timeouts only measure the time between
        read operations on the socket connecting the client and the server,
        not the total amount of time for the request to return a complete
        response. For most requests, the timeout is raised because the server
        has not sent the first byte in the specified time. This is not always
        the case; if a server streams one byte every fifteen seconds, a timeout
        of 20 seconds will not trigger, even though the request will take
        several minutes to complete.

        If your goal is to cut off any request after a set amount of wall clock
        time, consider having a second "watcher" thread to cut off a slow
        request.
    NcCs4|j|d�|_|j|d�|_|j|d�|_d|_dS)N�connect�read�total)�_validate_timeout�_connect�_readr	�_start_connect)�selfr	rr�r�/usr/lib/python3.6/timeout.py�__init__]szTimeout.__init__cCsdt|�j|j|j|jfS)Nz!%s(connect=%r, read=%r, total=%r))�type�__name__rrr	)rrrr�__str__cszTimeout.__str__cCs�|tkr|jS|dks ||jkr$|St|t�r6td��yt|�Wn(ttfk
rjtd||f��YnXy|dkr�td||f��Wn$tk
r�td||f��YnX|S)a� Check that a timeout attribute is valid.

        :param value: The timeout value to validate
        :param name: The name of the timeout attribute to validate. This is
            used to specify in error messages.
        :return: The validated and casted version of the given value.
        :raises ValueError: If it is a numeric value less than or equal to
            zero, or the type is not an integer, float, or None.
        NzDTimeout cannot be a boolean value. It must be an int, float or None.z>Timeout value %s was %s, but it must be an int, float or None.rzdAttempted to set %s timeout to %s, but the timeout cannot be set to a value less than or equal to 0.)�_Default�DEFAULT_TIMEOUT�
isinstance�bool�
ValueError�float�	TypeError)�cls�value�namerrrr
gs&
zTimeout._validate_timeoutcCst||d�S)a� Create a new Timeout from a legacy timeout value.

        The timeout value used by httplib.py sets the same timeout on the
        connect(), and recv() socket requests. This creates a :class:`Timeout`
        object that sets the individual timeouts to the ``timeout`` value
        passed to this function.

        :param timeout: The legacy timeout value.
        :type timeout: integer, float, sentinel default object, or None
        :return: Timeout object
        :rtype: :class:`Timeout`
        )rr)r)rZtimeoutrrr�
from_float�szTimeout.from_floatcCst|j|j|jd�S)a Create a copy of the timeout object

        Timeout properties are stored per-pool but each request needs a fresh
        Timeout object to ensure each one has its own start/stop configured.

        :return: a copy of the timeout object
        :rtype: :class:`Timeout`
        )rrr	)rrrr	)rrrr�clone�s
z
Timeout.clonecCs |jdk	rtd��t�|_|jS)z� Start the timeout clock, used during a connect() attempt

        :raises urllib3.exceptions.TimeoutStateError: if you attempt
            to start a timer that has been started already.
        Nz'Timeout timer has already been started.)r
r�current_time)rrrr�
start_connect�s
zTimeout.start_connectcCs|jdkrtd��t�|jS)a Gets the time elapsed since the call to :meth:`start_connect`.

        :return: Elapsed time.
        :rtype: float
        :raises urllib3.exceptions.TimeoutStateError: if you attempt
            to get duration for a timer that hasn't been started.
        Nz:Can't get connect duration for timer that has not started.)r
rr!)rrrr�get_connect_duration�s
zTimeout.get_connect_durationcCs:|jdkr|jS|jdks&|j|jkr,|jSt|j|j�S)a" Get the value to use when setting a connection timeout.

        This will be a positive float or integer, the value None
        (never timeout), or the default system timeout.

        :return: Connect timeout.
        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
        N)r	rr�min)rrrr�connect_timeout�s


zTimeout.connect_timeoutcCs�|jdk	rX|j|jk	rX|jdk	rX|j|jk	rX|jdkr<|jStdt|j|j�|j��S|jdk	r�|j|jk	r�td|j|j��S|jSdS)a� Get the value for the read timeout.

        This assumes some time has elapsed in the connection timeout and
        computes the read timeout appropriately.

        If self.total is set, the read timeout is dependent on the amount of
        time taken by the connect timeout. If the connection time has not been
        established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
        raised.

        :return: Value to use for the read timeout.
        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
        :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
            has not yet been called on this object.
        Nr)r	rrr
�maxr$r#)rrrr�read_timeout�s



zTimeout.read_timeout)r�
__module__�__qualname__�__doc__rrrrr�classmethodr
rr r"r#�propertyr%r'rrrrrsF%
r)Z
__future__rZsocketrZtime�
exceptionsr�objectr�getattrr!rrrrr�<module>s_vendor/urllib3/util/__pycache__/connection.cpython-36.opt-1.pyc000064400000005744151733136360020507 0ustar003

�Pf��@snddlmZddlZddlmZddlmZmZdd�Zej	ddfdd	�Z
d
d�Zdd
�Zdd�Z
e
d�ZdS)�)�absolute_importN�)�
wait_for_read)�
HAS_SELECT�
SelectorErrorcCsVt|dd�}|dkrdS|dkr$dSts,dSytt|dd��Stk
rPdSXdS)a 
    Returns True if the connection is dropped and should be closed.

    :param conn:
        :class:`httplib.HTTPConnection` object.

    Note: For platforms like AppEngine, this will always return ``False`` to
    let the platform handle connection recycling transparently for us.
    �sockFNTg)�timeout)�getattrr�boolrr)Zconnr�r� /usr/lib/python3.6/connection.py�is_connection_droppeds
r
cCs�|\}}|jd�r|jd�}d}t�}x�tj|||tj�D]�}|\}	}
}}}
d}yHtj|	|
|�}t||�|tjk	r~|j|�|r�|j	|�|j
|
�|Stjk
r�}z|}|dk	r�|j�d}WYdd}~Xq:Xq:W|dk	r�|�tjd��dS)adConnect to *address* and return the socket object.

    Convenience function.  Connect to *address* (a 2-tuple ``(host,
    port)``) and return the socket object.  Passing the optional
    *timeout* parameter will set the timeout on the socket instance
    before attempting to connect.  If no *timeout* is supplied, the
    global default timeout setting returned by :func:`getdefaulttimeout`
    is used.  If *source_address* is set it must be a tuple of (host, port)
    for the socket to bind as a source address before making the connection.
    An host of '' or port 0 tells the OS to use the default.
    �[z[]Nz!getaddrinfo returns an empty list)
�
startswith�strip�allowed_gai_family�socketZgetaddrinfoZSOCK_STREAM�_set_socket_options�_GLOBAL_DEFAULT_TIMEOUTZ
settimeout�bindZconnect�error�close)ZaddressrZsource_addressZsocket_options�hostZport�err�family�resZafZsocktype�protoZ	canonnameZsar�errr�create_connection$s2






rcCs(|dkrdSx|D]}|j|�qWdS)N)Z
setsockopt)rZoptions�optrrrrXs
rcCstj}trtj}|S)z�This function is designed to work in the context of
    getaddrinfo, where family=socket.AF_UNSPEC is the default and
    will perform a DNS search for both IPv6 and IPv4 records.)rZAF_INET�HAS_IPV6Z	AF_UNSPEC)rrrrr`srcCsVd}d}tjrFy"tjtj�}|j|df�d}Wntk
rDYnX|rR|j�|S)z6 Returns True if the system can bind an IPv6 address. NFrT)r�has_ipv6ZAF_INET6r�	Exceptionr)rrr!rrr�	_has_ipv6ksr#z::1)Z
__future__rr�waitrZ	selectorsrrr
rrrrr#r rrrr�<module>s3_vendor/urllib3/util/__pycache__/__init__.cpython-36.opt-1.pyc000064400000002026151733136360020075 0ustar003

�Pf�@s�ddlmZddlmZddlmZddlmZddlm	Z	m
Z
mZmZm
Z
mZmZmZddlmZmZddlmZdd	lmZmZmZmZdd
lmZmZd ZdS)!�)�absolute_import�)�is_connection_dropped)�make_headers)�is_fp_closed)�
SSLContext�HAS_SNI�IS_PYOPENSSL�IS_SECURETRANSPORT�assert_fingerprint�resolve_cert_reqs�resolve_ssl_version�ssl_wrap_socket)�current_time�Timeout)�Retry)�get_host�	parse_url�split_first�Url)�
wait_for_read�wait_for_writerr	r
rrrrrrrrrrrrr
rrrrN)rr	r
rrrrrrrrrrrrr
rrrr)Z
__future__rZ
connectionrZrequestrZresponserZssl_rrr	r
rrr
rZtimeoutrrZretryrZurlrrrr�waitrr�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s8(
_vendor/urllib3/util/__pycache__/response.cpython-36.pyc000064400000003433151733136360017240 0ustar003

�Pf'	�@s@ddlmZddlmZddlmZdd�Zdd�Zd	d
�Z	dS)�)�absolute_import�)�http_client)�HeaderParsingErrorcCsfy|j�Stk
rYnXy|jStk
r8YnXy
|jdkStk
rXYnXtd��dS)zt
    Checks whether a given file-like object is closed.

    :param obj:
        The file-like object to check.
    Nz)Unable to determine whether fp is closed.)Zisclosed�AttributeError�closed�fp�
ValueError)�obj�r�/usr/lib/python3.6/response.py�is_fp_closeds
r
cCs\t|tj�stdjt|����t|dd�}t|dd�}d}|rD|�}|sL|rXt||d��dS)aP
    Asserts whether all headers have been successfully parsed.
    Extracts encountered errors from the result of parsing headers.

    Only works on Python 3.

    :param headers: Headers to verify.
    :type headers: `httplib.HTTPMessage`.

    :raises urllib3.exceptions.HeaderParsingError:
        If parsing errors are found.
    z"expected httplib.Message, got {0}.�defectsN�get_payload)r�
unparsed_data)�
isinstance�httplibZHTTPMessage�	TypeError�format�type�getattrr)Zheadersrrrrrr�assert_header_parsing&srcCs$|j}t|t�r|dkS|j�dkS)z�
    Checks whether the request of a response has been a HEAD-request.
    Handles the quirks of AppEngine.

    :param conn:
    :type conn: :class:`httplib.HTTPResponse`
    �ZHEAD)�_methodr�int�upper)Zresponse�methodrrr�is_response_to_headEs	
rN)
Z
__future__rZpackages.six.movesrr�
exceptionsrr
rrrrrr�<module>s
_vendor/urllib3/util/__pycache__/selectors.cpython-36.opt-1.pyc000064400000037715151733136360020356 0ustar003

�Pf�R�@szddlZddlZddlZddlZddlZddlZddlmZmZy
ej	Z	Wne
efk
rhejZ	YnXd(Zd)Z
dZe�ZdaGdd�de�Zdd�Zejd*kr�dd�Znd
d�Zedddddg�ZGdd�de�ZGdd�de�Zeed��rGdd�de�Zeed��rGdd�de�Zeed��r:Gdd�de�Zeed ��rVGd!d"�d"e�Zeed��sfd#Zd$d%�Zd&d'�Z dS)+�N)�
namedtuple�Mapping�Tcs,eZdZ�fdd�Zdd�Zdd�Z�ZS)�
SelectorErrorcstt|�j�||_dS)N)�superr�__init__�errno)�self�errcode)�	__class__��/usr/lib/python3.6/selectors.pyrszSelectorError.__init__cCsdj|j�S)Nz<SelectorError errno={0}>)�formatr)r	rrr
�__repr__"szSelectorError.__repr__cCs|j�S)N)r)r	rrr
�__str__%szSelectorError.__str__)�__name__�
__module__�__qualname__rrr�
__classcell__rr)rr
rsrc
Csdt|t�r|}n:yt|j��}Wn(tttfk
rHtdj|���YnX|dkr`tdj|���|S)zl Return a file descriptor from a file object. If
    given an integer will simply return that integer back. zInvalid file object: {0!r}rzInvalid file descriptor: {0})�
isinstance�int�fileno�AttributeError�	TypeError�
ValueErrorr)�fileobj�fdrrr
�_fileobj_to_fd)s
r��cOsVy
|||�Stttjfk
rP}z"d}t|d�r8|j}t|��WYdd}~XnXdS)z� This is the short-circuit version of the below logic
        because in Python 3.5+ all system calls automatically restart
        and recalculate their timeouts. Nr)�OSError�IOError�select�error�hasattrrr)�func�_�args�kwargs�er
rrr
�_syscall_wrapper;s

r*cOsR|jdd�}|dkrd}d}n t|�}|dkr4d}n
t�|}t|�}|rZd|krZtd��t}x�|tk�rLy|||�}Wq`tttj	fk
�rH}z�d}t
|d�r�|j}nt
|d�r�|jd}|tj
kp�t
td	�o�|tjk}	|	�r&|dk	�r$t�}
|
|k�rttjd
��|�r$d|k�r$||
|d<w`|�r6t|��n�WYdd}~Xq`Xq`W|S)z� Wrapper function for syscalls that could fail due to EINTR.
        All functions should be retried if there is time left in the timeout
        in accordance with PEP 475. �timeoutNFgz4Timeout must be in args or kwargs to be recalculatedrr'r�WSAEINTR)r)�get�float�	monotonic�listr�_SYSCALL_SENTINELr r!r"r#r$rr'ZEINTRr,Z	ETIMEDOUTr)r%Zrecalc_timeoutr'r(r+Zexpires�resultr)r
Zis_interruptZcurrent_timerrr
r*GsJ








�SelectorKeyrr�events�datac@s0eZdZdZdd�Zdd�Zdd�Zdd	�Zd
S)�_SelectorMappingz* Mapping of file objects to selector keys cCs
||_dS)N)�	_selector)r	Zselectorrrr
r�sz_SelectorMapping.__init__cCst|jj�S)N)�lenr7�
_fd_to_key)r	rrr
�__len__�sz_SelectorMapping.__len__cCs@y|jj|�}|jj|Stk
r:tdj|���YnXdS)Nz{0!r} is not registered.)r7�_fileobj_lookupr9�KeyErrorr)r	rrrrr
�__getitem__�s
z_SelectorMapping.__getitem__cCst|jj�S)N)�iterr7r9)r	rrr
�__iter__�sz_SelectorMapping.__iter__N)rrr�__doc__rr:r=r?rrrr
r6�s
r6c@sveZdZdZdd�Zdd�Zddd�Zd	d
�Zddd�Zdd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)�BaseSelectora/ Abstract Selector class

    A selector supports registering file objects to be monitored
    for specific I/O events.

    A file object is a file descriptor or any object with a
    `fileno()` method. An arbitrary object can be attached to the
    file object which can be used for example to store context info,
    a callback, etc.

    A selector can use various implementations (select(), poll(), epoll(),
    and kqueue()) depending on the platform. The 'DefaultSelector' class uses
    the most efficient implementation for the current platform.
    cCsi|_t|�|_dS)N)r9r6�_map)r	rrr
r�szBaseSelector.__init__cCsHyt|�Stk
rBx"|jj�D]}|j|kr$|jSq$W�YnXdS)aa Return a file descriptor from a file object.
        This wraps _fileobj_to_fd() to do an exhaustive
        search in case the object is invalid but we still
        have it in our map. Used by unregister() so we can
        unregister an object that was previously registered
        even if it is closed. It is also used by _SelectorMapping
        N)rrr9�valuesrr)r	r�keyrrr
r;�s

zBaseSelector._fileobj_lookupNcCsd|s|ttB@r"tdj|���t||j|�||�}|j|jkrTtdj||j���||j|j<|S)z8 Register a file object for a set of events to monitor. zInvalid events: {0!r}z${0!r} (FD {1}) is already registered)	�
EVENT_READ�EVENT_WRITErrr3r;rr9r<)r	rr4r5rDrrr
�register�szBaseSelector.registercCs�y|jj|j|��}Wn�tk
r8tdj|���Ynptjk
r�}zR|jtjkrZ�n<x:|jj	�D]}|j
|krf|jj|j�PqfWtdj|���WYdd}~XnX|S)z0 Unregister a file object from being monitored. z{0!r} is not registeredN)r9�popr;r<r�socketr#rZEBADFrCrr)r	rrDr)rrr
�
unregister�s
 zBaseSelector.unregistercCs�y|j|j|�}Wn"tk
r6tdj|���YnX||jkr\|j|�|j|||�}n"||jkr~|j|d�}||j|j	<|S)z< Change a registered file object monitored events and data. z{0!r} is not registered)r5)
r9r;r<rr4rJrGr5�_replacer)r	rr4r5rDrrr
�modify�s


zBaseSelector.modifycCs
t��dS)zj Perform the actual selection until some monitored file objects
        are ready or the timeout expires. N)�NotImplementedError)r	r+rrr
r"�szBaseSelector.selectcCs|jj�d|_dS)zd Close the selector. This must be called to ensure that all
        underlying resources are freed. N)r9�clearrB)r	rrr
�close�s
zBaseSelector.closecCsH|j�}|dkrtd��y||Stk
rBtdj|���YnXdS)z: Return the key associated with a registered file object. NzSelector is closedz{0!r} is not registered)�get_map�RuntimeErrorr<r)r	r�mappingrrr
�get_keyszBaseSelector.get_keycCs|jS)z3 Return a mapping of file objects to selector keys )rB)r	rrr
rP
szBaseSelector.get_mapcCs$y
|j|Stk
rdSXdS)z_ Return the key associated to a given file descriptor
         Return None if it is not found. N)r9r<)r	rrrr
�_key_from_fds
zBaseSelector._key_from_fdcCs|S)Nr)r	rrr
�	__enter__szBaseSelector.__enter__cGs|j�dS)N)rO)r	r'rrr
�__exit__szBaseSelector.__exit__)N)N)N)rrrr@rr;rGrJrLr"rOrSrPrTrUrVrrrr
rA�s



rAr"csNeZdZdZ�fdd�Zd
�fdd�	Z�fdd�Zdd	d
�Zddd�Z�Z	S)�SelectSelectorz Select-based selector. cs"tt|�j�t�|_t�|_dS)N)rrWr�set�_readers�_writers)r	)rrr
r!szSelectSelector.__init__NcsDtt|�j|||�}|t@r*|jj|j�|t@r@|jj|j�|S)N)	rrWrGrErY�addrrFrZ)r	rr4r5rD)rrr
rG&szSelectSelector.registercs0tt|�j|�}|jj|j�|jj|j�|S)N)rrWrJrY�discardrrZ)r	rrD)rrr
rJ.szSelectSelector.unregistercCstj||g|�S)z? Wrapper for select.select because timeout is a positional arg )r")r	�r�wr+rrr
�_select4szSelectSelector._selectc	Cs�t|j�rt|j�rgS|dkr(dnt|d�}g}t|jd|j|j|�\}}}t|�}t|�}xV||BD]J}d}||kr�|tO}||kr�|tO}|j	|�}|rl|j
|||j@f�qlW|S)NgTr)r8rYrZ�maxr*r_rXrErFrT�appendr4)	r	r+�readyr]r^r&rr4rDrrr
r"8s$
zSelectSelector.select)N)N)N)
rrrr@rrGrJr_r"rrr)rr
rWs
rW�pollcsNeZdZdZ�fdd�Zd
�fdd�	Z�fdd�Zdd	d
�Zddd�Z�Z	S)�PollSelectorz Poll-based selector cstt|�j�tj�|_dS)N)rrdrr"rc�_poll)r	)rrr
rSszPollSelector.__init__NcsPtt|�j|||�}d}|t@r*|tjO}|t@r<|tjO}|jj|j	|�|S)Nr)
rrdrGrEr"�POLLINrF�POLLOUTrer)r	rr4r5rD�
event_mask)rrr
rGWs

zPollSelector.registercs"tt|�j|�}|jj|j�|S)N)rrdrJrer)r	rrD)rrr
rJaszPollSelector.unregistercCs4|dk	r$|dkrd}ntj|d�}|jj|�}|S)zj Wrapper function for select.poll.poll() so that
            _syscall_wrapper can work with only seconds. Nrg@�@)�math�ceilrerc)r	r+r2rrr
�
_wrap_pollfszPollSelector._wrap_pollcCsxg}t|jd|d�}x^|D]V\}}d}|tj@r:|tO}|tj@rN|tO}|j|�}|r|j|||j	@f�qW|S)NT)r+r)
r*rkr"rfrFrgrErTrar4)r	r+rb�	fd_eventsrrhr4rDrrr
r"ts
zPollSelector.select)N)N)N)
rrrr@rrGrJrkr"rrr)rr
rdQs

rd�epollcsXeZdZdZ�fdd�Zdd�Zd�fdd�	Z�fd	d
�Zddd�Z�fd
d�Z	�Z
S)�
EpollSelectorz Epoll-based selector cstt|�j�tj�|_dS)N)rrnrr"rm�_epoll)r	)rrr
r�szEpollSelector.__init__cCs
|jj�S)N)ror)r	rrr
r�szEpollSelector.filenoNcsTtt|�j|||�}d}|t@r*|tjO}|t@r<|tjO}t|j	jd|j
|�|S)NrF)rrnrGrEr"�EPOLLINrF�EPOLLOUTr*ror)r	rr4r5rDZevents_mask)rrr
rG�s

zEpollSelector.registercs@tt|�j|�}yt|jjd|j�Wntk
r:YnX|S)NF)rrnrJr*rorr)r	rrD)rrr
rJ�szEpollSelector.unregisterc	Cs�|dk	r2|dkrd}ntj|d�d}t|�}nd	}tt|j�d�}g}t|jjd||d�}x^|D]V\}}d}|t	j
@r�|tO}|t	j@r�|t
O}|j|�}|rd|j|||j@f�qdW|S)
Nrgg@�@g����MbP?g�?rT)r+Z	maxeventsg�)rirjr.r`r8r9r*rorcr"rprFrqrErTrar4)	r	r+�
max_eventsrbrlrrhr4rDrrr
r"�s*


zEpollSelector.selectcs|jj�tt|�j�dS)N)rorOrrn)r	)rrr
rO�s
zEpollSelector.close)N)N)rrrr@rrrGrJr"rOrrr)rr
rn�s
	
!rn�kqueuecsXeZdZdZ�fdd�Zdd�Zd�fdd�	Z�fd	d
�Zddd�Z�fd
d�Z	�Z
S)�KqueueSelectorz  Kqueue / Kevent-based selector cstt|�j�tj�|_dS)N)rrtrr"rs�_kqueue)r	)rrr
r�szKqueueSelector.__init__cCs
|jj�S)N)rur)r	rrr
r�szKqueueSelector.filenoNcs|tt|�j|||�}|t@rFtj|jtjtj�}t	|j
jd|gdd�|t@rxtj|jtj
tj�}t	|j
jd|gdd�|S)NFr)rrtrGrEr"�keventr�KQ_FILTER_READZ	KQ_EV_ADDr*ru�controlrF�KQ_FILTER_WRITE)r	rr4r5rDrv)rrr
rG�szKqueueSelector.registercs�tt|�j|�}|jt@r^tj|jtjtj	�}yt
|jjd|gdd�Wnt
k
r\YnX|jt@r�tj|jtjtj	�}yt
|jjd|gdd�Wnt
k
r�YnX|S)NFr)rrtrJr4rEr"rvrrwZKQ_EV_DELETEr*rurxrrFry)r	rrDrv)rrr
rJ�s$

zKqueueSelector.unregistercCs�|dk	rt|d�}t|j�d}i}t|jjdd||�}x�|D]�}|j}|j}d}|tj	krd|t
O}|tjkrv|tO}|j
|�}	|	r>|	j|kr�|	||	j@f||	j<q>||	jd}
|	||
B|	j@f||	j<q>Wt|j��S)Nr�Tr)r`r8r9r*rurxZident�filterr"rwrEryrFrTrr4r0rC)r	r+rrZ	ready_fdsZkevent_listrvrrhr4rDZ
old_eventsrrr
r"�s*







zKqueueSelector.selectcs|jj�tt|�j�dS)N)rurOrrt)r	)rrr
rOs
zKqueueSelector.close)N)N)rrrr@rrrGrJr"rOrrr)rr
rt�s
rtFcCsZy0|dkrtj�}|jd�ntt|��j�dSttfk
rT}zdSd}~XnXdS)a
 Checks that select structs can be allocated by the underlying
    operating system, not just advertised by the select module. We don't
    check select() because we'll be hopeful that most platforms that
    don't have it available will not advertise it. (ie: GAE) rcrTFN)r"rc�getattrrOr r)�struct�pr)rrr
�
_can_allocatesrcCsPtdkrJtd�rtan4td�r$tan&td�r2tanttd�rBtantd��t�S)z� This function serves as a first call for DefaultSelector to
    detect if the select module is being monkey-patched incorrectly
    by eventlet, greenlet, and preserve proper behavior. Nrsrmrcr"z!Platform does not have a selector)	�_DEFAULT_SELECTORrrtrnrdr$r"rWrrrrr
�DefaultSelector5s
r�rrz)rr)!rrir"rI�sysZtime�collectionsrrr/r�ImportErrorrErFZ
HAS_SELECT�objectr1r��	Exceptionrr�version_infor*r3r6rAr$rWrdrnrtrr�rrrr
�<module>sH


8	14BR_vendor/urllib3/util/__pycache__/request.cpython-36.pyc000064400000006102151733136360017066 0ustar003

�Pfy�@s\ddlmZddlmZddlmZmZddlmZdZ	e
�Zddd	�Zd
d�Z
dd
�ZdS)�)�absolute_import)�	b64encode�)�b�
integer_types)�UnrewindableBodyErrorzgzip,deflateNcCs�i}|r6t|t�rnt|t�r*dj|�}nt}||d<|rB||d<|rNd|d<|rldtt|��jd�|d<|r�dtt|��jd�|d	<|r�d
|d<|S)a
    Shortcuts for generating request headers.

    :param keep_alive:
        If ``True``, adds 'connection: keep-alive' header.

    :param accept_encoding:
        Can be a boolean, list, or string.
        ``True`` translates to 'gzip,deflate'.
        List will get joined by comma.
        String will be used as provided.

    :param user_agent:
        String representing the user-agent you want, such as
        "python-urllib3/0.6"

    :param basic_auth:
        Colon-separated username:password string for 'authorization: basic ...'
        auth header.

    :param proxy_basic_auth:
        Colon-separated username:password string for 'proxy-authorization: basic ...'
        auth header.

    :param disable_cache:
        If ``True``, adds 'cache-control: no-cache' header.

    Example::

        >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
        {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
        >>> make_headers(accept_encoding=True)
        {'accept-encoding': 'gzip,deflate'}
    �,zaccept-encodingz
user-agentz
keep-aliveZ
connectionzBasic zutf-8Z
authorizationzproxy-authorizationzno-cachez
cache-control)�
isinstance�str�list�join�ACCEPT_ENCODINGrr�decode)Z
keep_aliveZaccept_encodingZ
user_agentZ
basic_authZproxy_basic_authZ
disable_cacheZheaders�r�/usr/lib/python3.6/request.py�make_headerss*$

rcCsR|dk	rt||�n:t|dd�dk	rNy|j�}Wnttfk
rLt}YnX|S)z
    If a position is provided, move file to that point.
    Otherwise, we'll attempt to record a position for future use.
    N�tell)�rewind_body�getattrr�IOError�OSError�_FAILEDTELL)�body�posrrr�set_file_positionMs
rcCstt|dd�}|dk	rNt|t�rNy||�Wqpttfk
rJtd��YqpXn"|tkr`td��ntdt|���dS)z�
    Attempt to rewind body to a certain position.
    Primarily used for request redirects and retries.

    :param body:
        File-like object that supports seek.

    :param int pos:
        Position to seek to in file.
    �seekNzAAn error occurred when rewinding request body for redirect/retry.zRUnable to record file position for rewinding request body during a redirect/retry.z4body_pos must be of type integer, instead it was %s.)	rr	rrrrr�
ValueError�type)rZbody_posZ	body_seekrrrr_s
r)NNNNNN)Z
__future__r�base64rZpackages.sixrr�
exceptionsrr
�objectrrrrrrrr�<module>s
A_vendor/urllib3/util/__pycache__/retry.cpython-36.opt-1.pyc000064400000030433151733136360017506 0ustar003

�Pf;�@s�ddlmZddlZddlZddlmZddlmZddlZddl	Z	ddl
mZmZm
Z
mZmZmZddlmZeje�Zedd	d
ddd
g�ZGdd�de�Zed�e_dS)�)�absolute_importN)�
namedtuple)�	takewhile�)�ConnectTimeoutError�
MaxRetryError�
ProtocolError�ReadTimeoutError�
ResponseError�
InvalidHeader)�six�RequestHistory�method�url�error�status�redirect_locationc
@s�eZdZdZeddddddg�Zedg�Zed	d
dg�ZdZd
ddddeddddddef
dd�Z	dd�Z
ed2dd��Zdd�Z
dd�Zdd�Zd3dd�Zdd �Zd4d!d"�Zd#d$�Zd%d&�Zd'd(�Zd5d*d+�Zd,d-�Zd6d.d/�Zd0d1�ZdS)7�Retrya2 Retry configuration.

    Each retry attempt will create a new Retry object with updated values, so
    they can be safely reused.

    Retries can be defined as a default for a pool::

        retries = Retry(connect=5, read=2, redirect=5)
        http = PoolManager(retries=retries)
        response = http.request('GET', 'http://example.com/')

    Or per-request (which overrides the default for the pool)::

        response = http.request('GET', 'http://example.com/', retries=Retry(10))

    Retries can be disabled by passing ``False``::

        response = http.request('GET', 'http://example.com/', retries=False)

    Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
    retries are disabled, in which case the causing exception will be raised.

    :param int total:
        Total number of retries to allow. Takes precedence over other counts.

        Set to ``None`` to remove this constraint and fall back on other
        counts. It's a good idea to set this to some sensibly-high value to
        account for unexpected edge cases and avoid infinite retry loops.

        Set to ``0`` to fail on the first retry.

        Set to ``False`` to disable and imply ``raise_on_redirect=False``.

    :param int connect:
        How many connection-related errors to retry on.

        These are errors raised before the request is sent to the remote server,
        which we assume has not triggered the server to process the request.

        Set to ``0`` to fail on the first retry of this type.

    :param int read:
        How many times to retry on read errors.

        These errors are raised after the request was sent to the server, so the
        request may have side-effects.

        Set to ``0`` to fail on the first retry of this type.

    :param int redirect:
        How many redirects to perform. Limit this to avoid infinite redirect
        loops.

        A redirect is a HTTP response with a status code 301, 302, 303, 307 or
        308.

        Set to ``0`` to fail on the first retry of this type.

        Set to ``False`` to disable and imply ``raise_on_redirect=False``.

    :param int status:
        How many times to retry on bad status codes.

        These are retries made on responses, where status code matches
        ``status_forcelist``.

        Set to ``0`` to fail on the first retry of this type.

    :param iterable method_whitelist:
        Set of uppercased HTTP method verbs that we should retry on.

        By default, we only retry on methods which are considered to be
        idempotent (multiple requests with the same parameters end with the
        same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.

        Set to a ``False`` value to retry on any verb.

    :param iterable status_forcelist:
        A set of integer HTTP status codes that we should force a retry on.
        A retry is initiated if the request method is in ``method_whitelist``
        and the response status code is in ``status_forcelist``.

        By default, this is disabled with ``None``.

    :param float backoff_factor:
        A backoff factor to apply between attempts after the second try
        (most errors are resolved immediately by a second try without a
        delay). urllib3 will sleep for::

            {backoff factor} * (2 ^ ({number of total retries} - 1))

        seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
        for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
        than :attr:`Retry.BACKOFF_MAX`.

        By default, backoff is disabled (set to 0).

    :param bool raise_on_redirect: Whether, if the number of redirects is
        exhausted, to raise a MaxRetryError, or to return a response with a
        response code in the 3xx range.

    :param iterable remove_headers_on_redirect:
        Sequence of headers to remove from the request when a response
        indicating a redirect is returned before firing off the redirected
        request

    :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
        whether we should raise an exception, or return a response,
        if status falls in ``status_forcelist`` range and retries have
        been exhausted.

    :param tuple history: The history of the request encountered during
        each call to :meth:`~Retry.increment`. The list is in the order
        the requests occurred. Each list item is of class :class:`RequestHistory`.

    :param bool respect_retry_after_header:
        Whether to respect Retry-After header on status codes defined as
        :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.

    ZHEADZGETZPUTZDELETEZOPTIONSZTRACEZ
Authorizationi�i�i��x�
NrTcCsv||_||_||_||_|dks(|dkr0d}d}	||_|p>t�|_||_||_|	|_	|
|_
|pbt�|_||_
|
|_dS)NFr)�total�connect�readr�redirect�set�status_forcelist�method_whitelist�backoff_factor�raise_on_redirect�raise_on_status�tuple�history�respect_retry_after_header�remove_headers_on_redirect)�selfrrrrrrrrrrr!r"r#�r%�/usr/lib/python3.6/retry.py�__init__�s zRetry.__init__cKsPt|j|j|j|j|j|j|j|j|j	|j
|j|jd�}|j
|�t|�f|�S)N)rrrrrrrrrrr!r#)�dictrrrrrrrrrrr!r#�update�type)r$�kwZparamsr%r%r&�new�s

z	Retry.newcCsR|dkr|dk	r|n|j}t|t�r(|St|�o2d}|||d�}tjd||�|S)z4 Backwards-compatibility for the old retries format.N)rz!Converted retries value: %r -> %r)�DEFAULT�
isinstancer�bool�log�debug)�clsZretriesr�defaultZnew_retriesr%r%r&�from_int�s
zRetry.from_intcCsFtttdd�t|j����}|dkr(dS|jd|d}t|j|�S)zJ Formula for computing the current backoff

        :rtype: float
        cSs
|jdkS)N)r)�xr%r%r&�<lambda>�sz(Retry.get_backoff_time.<locals>.<lambda>�rr)�len�listr�reversedr!r�min�BACKOFF_MAX)r$Zconsecutive_errors_lenZ
backoff_valuer%r%r&�get_backoff_time�szRetry.get_backoff_timecCs\tjd|�rt|�}n6tjj|�}|dkr6td|��tj|�}|tj�}|dkrXd}|S)Nz^\s*[0-9]+\s*$zInvalid Retry-After header: %sr)	�re�match�int�emailZutilsZ	parsedater�timeZmktime)r$�retry_afterZsecondsZretry_date_tupleZ
retry_dater%r%r&�parse_retry_after�s

zRetry.parse_retry_aftercCs |jd�}|dkrdS|j|�S)z* Get the value of Retry-After in seconds. zRetry-AfterN)Z	getheaderrD)r$�responserCr%r%r&�get_retry_after�s
zRetry.get_retry_aftercCs |j|�}|rtj|�dSdS)NTF)rFrB�sleep)r$rErCr%r%r&�sleep_for_retry�s


zRetry.sleep_for_retrycCs"|j�}|dkrdStj|�dS)Nr)r=rBrG)r$Zbackoffr%r%r&�_sleep_backoffszRetry._sleep_backoffcCs"|r|j|�}|rdS|j�dS)aC Sleep between retry attempts.

        This method will respect a server's ``Retry-After`` response header
        and sleep the duration of the time requested. If that is not present, it
        will use an exponential backoff. By default, the backoff factor is 0 and
        this method will return immediately.
        N)rHrI)r$rEZsleptr%r%r&rGs
	
zRetry.sleepcCs
t|t�S)z{ Errors when we're fairly sure that the server did not receive the
        request, so it should be safe to retry.
        )r.r)r$�errr%r%r&�_is_connection_errorszRetry._is_connection_errorcCst|ttf�S)z� Errors that occur after the request has been started, so we should
        assume that the server began processing it.
        )r.r	r)r$rJr%r%r&�_is_read_error!szRetry._is_read_errorcCs|jr|j�|jkrdSdS)z| Checks if a given HTTP method should be retried upon, depending if
        it is included on the method whitelist.
        FT)r�upper)r$rr%r%r&�_is_method_retryable'szRetry._is_method_retryableFcCs<|j|�sdS|jr"||jkr"dS|jo:|jo:|o:||jkS)ax Is this method/status code retryable? (Based on whitelists and control
        variables such as the number of total retries to allow, whether to
        respect the Retry-After header, whether this header is present, and
        whether the returned status code is on the list of status codes to
        be retried upon on the presence of the aforementioned header)
        FT)rNrrr"�RETRY_AFTER_STATUS_CODES)r$r�status_codeZhas_retry_afterr%r%r&�is_retry0s
zRetry.is_retrycCs:|j|j|j|j|jf}ttd|��}|s.dSt|�dkS)z Are we out of retries? NFr)rrrrrr9�filterr;)r$Zretry_countsr%r%r&�is_exhausted@s
zRetry.is_exhaustedcCs�|jdkr |r tjt|�||��|j}|dk	r6|d8}|j}|j}	|j}
|j}d}d}
d}|r�|j|�r�|dkr�tjt|�||��n|dk	r�|d8}n�|r�|j	|�r�|	dks�|j
|�r�tjt|�||��n|	dk	r�|	d8}	nn|o�|j��r|
dk	r�|
d8}
d}|j�}|j}
n<tj
}|�rL|j�rL|dk	�r6|d8}tjj|jd�}|j}
|jt||||
|�f}|j|||	|
||d�}|j��r�t|||�p�t|���tjd||�|S)	a� Return a new Retry object with incremented retry counters.

        :param response: A response object, or None, if the server did not
            return a response.
        :type response: :class:`~urllib3.response.HTTPResponse`
        :param Exception error: An error encountered during the request, or
            None if the response was received successfully.

        :return: A new ``Retry`` object.
        FNr7�unknownztoo many redirects)rP)rrrrrr!z$Incremented Retry for (url='%s'): %r)rrZreraiser*rrrrrKrLrNZget_redirect_locationr
Z
GENERIC_ERRORZSPECIFIC_ERROR�formatr!r
r,rSrr0r1)r$rrrErZ_poolZ_stacktracerrrrZstatus_count�causerrr!Z	new_retryr%r%r&�	incrementIsX




zRetry.incrementcCsdjt|�|d�S)Nz|{cls.__name__}(total={self.total}, connect={self.connect}, read={self.read}, redirect={self.redirect}, status={self.status}))r2r$)rUr*)r$r%r%r&�__repr__�szRetry.__repr__)TN)N)N)F)NNNNNN)�__name__�
__module__�__qualname__�__doc__�	frozensetZDEFAULT_METHOD_WHITELISTZ"DEFAULT_REDIRECT_HEADERS_BLACKLISTrOr<r'r,�classmethodr4r=rDrFrHrIrGrKrLrNrQrSrWrXr%r%r%r&rs8x





	
	
Jr�)Z
__future__rrBZlogging�collectionsr�	itertoolsrrAr>�
exceptionsrrrr	r
rZpackagesrZ	getLoggerrYr0r
�objectrr-r%r%r%r&�<module>s  


_vendor/urllib3/util/__init__.py000064400000002024151733136360012650 0ustar00from __future__ import absolute_import
# For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped
from .request import make_headers
from .response import is_fp_closed
from .ssl_ import (
    SSLContext,
    HAS_SNI,
    IS_PYOPENSSL,
    IS_SECURETRANSPORT,
    assert_fingerprint,
    resolve_cert_reqs,
    resolve_ssl_version,
    ssl_wrap_socket,
)
from .timeout import (
    current_time,
    Timeout,
)

from .retry import Retry
from .url import (
    get_host,
    parse_url,
    split_first,
    Url,
)
from .wait import (
    wait_for_read,
    wait_for_write
)

__all__ = (
    'HAS_SNI',
    'IS_PYOPENSSL',
    'IS_SECURETRANSPORT',
    'SSLContext',
    'Retry',
    'Timeout',
    'Url',
    'assert_fingerprint',
    'current_time',
    'is_connection_dropped',
    'is_fp_closed',
    'get_host',
    'parse_url',
    'make_headers',
    'resolve_cert_reqs',
    'resolve_ssl_version',
    'split_first',
    'ssl_wrap_socket',
    'wait_for_read',
    'wait_for_write'
)
_vendor/urllib3/util/ssl_.py000064400000027666151733136360012074 0ustar00from __future__ import absolute_import
import errno
import warnings
import hmac

from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256

from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning


SSLContext = None
HAS_SNI = False
IS_PYOPENSSL = False
IS_SECURETRANSPORT = False

# Maps the length of a digest to a possible hash function producing this digest
HASHFUNC_MAP = {
    32: md5,
    40: sha1,
    64: sha256,
}


def _const_compare_digest_backport(a, b):
    """
    Compare two digests of equal length in constant time.

    The digests must be of type str/bytes.
    Returns True if the digests match, and False otherwise.
    """
    result = abs(len(a) - len(b))
    for l, r in zip(bytearray(a), bytearray(b)):
        result |= l ^ r
    return result == 0


_const_compare_digest = getattr(hmac, 'compare_digest',
                                _const_compare_digest_backport)


try:  # Test for SSL features
    import ssl
    from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
    from ssl import HAS_SNI  # Has SNI?
except ImportError:
    pass


try:
    from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
except ImportError:
    OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
    OP_NO_COMPRESSION = 0x20000

# A secure default.
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - Prefer TLS 1.3 cipher suites
# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
#   security,
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_CIPHERS = ':'.join([
    'TLS13-AES-256-GCM-SHA384',
    'TLS13-CHACHA20-POLY1305-SHA256',
    'TLS13-AES-128-GCM-SHA256',
    'ECDH+AESGCM',
    'ECDH+CHACHA20',
    'DH+AESGCM',
    'DH+CHACHA20',
    'ECDH+AES256',
    'DH+AES256',
    'ECDH+AES128',
    'DH+AES',
    'RSA+AESGCM',
    'RSA+AES',
    '!aNULL',
    '!eNULL',
    '!MD5',
])

try:
    from ssl import SSLContext  # Modern SSL?
except ImportError:
    import sys

    class SSLContext(object):  # Platform-specific: Python 2 & 3.1
        supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
                                (3, 2) <= sys.version_info)

        def __init__(self, protocol_version):
            self.protocol = protocol_version
            # Use default values from a real SSLContext
            self.check_hostname = False
            self.verify_mode = ssl.CERT_NONE
            self.ca_certs = None
            self.options = 0
            self.certfile = None
            self.keyfile = None
            self.ciphers = None

        def load_cert_chain(self, certfile, keyfile):
            self.certfile = certfile
            self.keyfile = keyfile

        def load_verify_locations(self, cafile=None, capath=None):
            self.ca_certs = cafile

            if capath is not None:
                raise SSLError("CA directories not supported in older Pythons")

        def set_ciphers(self, cipher_suite):
            if not self.supports_set_ciphers:
                raise TypeError(
                    'Your version of Python does not support setting '
                    'a custom cipher suite. Please upgrade to Python '
                    '2.7, 3.2, or later if you need this functionality.'
                )
            self.ciphers = cipher_suite

        def wrap_socket(self, socket, server_hostname=None, server_side=False):
            warnings.warn(
                'A true SSLContext object is not available. This prevents '
                'urllib3 from configuring SSL appropriately and may cause '
                'certain SSL connections to fail. You can upgrade to a newer '
                'version of Python to solve this. For more information, see '
                'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
                '#ssl-warnings',
                InsecurePlatformWarning
            )
            kwargs = {
                'keyfile': self.keyfile,
                'certfile': self.certfile,
                'ca_certs': self.ca_certs,
                'cert_reqs': self.verify_mode,
                'ssl_version': self.protocol,
                'server_side': server_side,
            }
            if self.supports_set_ciphers:  # Platform-specific: Python 2.7+
                return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
            else:  # Platform-specific: Python 2.6
                return wrap_socket(socket, **kwargs)


def assert_fingerprint(cert, fingerprint):
    """
    Checks if given fingerprint matches the supplied certificate.

    :param cert:
        Certificate as bytes object.
    :param fingerprint:
        Fingerprint as string of hexdigits, can be interspersed by colons.
    """

    fingerprint = fingerprint.replace(':', '').lower()
    digest_length = len(fingerprint)
    hashfunc = HASHFUNC_MAP.get(digest_length)
    if not hashfunc:
        raise SSLError(
            'Fingerprint of invalid length: {0}'.format(fingerprint))

    # We need encode() here for py32; works on py2 and p33.
    fingerprint_bytes = unhexlify(fingerprint.encode())

    cert_digest = hashfunc(cert).digest()

    if not _const_compare_digest(cert_digest, fingerprint_bytes):
        raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
                       .format(fingerprint, hexlify(cert_digest)))


def resolve_cert_reqs(candidate):
    """
    Resolves the argument to a numeric constant, which can be passed to
    the wrap_socket function/method from the ssl module.
    Defaults to :data:`ssl.CERT_NONE`.
    If given a string it is assumed to be the name of the constant in the
    :mod:`ssl` module or its abbrevation.
    (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
    If it's neither `None` nor a string we assume it is already the numeric
    constant which can directly be passed to wrap_socket.
    """
    if candidate is None:
        return CERT_NONE

    if isinstance(candidate, str):
        res = getattr(ssl, candidate, None)
        if res is None:
            res = getattr(ssl, 'CERT_' + candidate)
        return res

    return candidate


def resolve_ssl_version(candidate):
    """
    like resolve_cert_reqs
    """
    if candidate is None:
        return PROTOCOL_SSLv23

    if isinstance(candidate, str):
        res = getattr(ssl, candidate, None)
        if res is None:
            res = getattr(ssl, 'PROTOCOL_' + candidate)
        return res

    return candidate


def create_urllib3_context(ssl_version=None, cert_reqs=None,
                           options=None, ciphers=None):
    """All arguments have the same meaning as ``ssl_wrap_socket``.

    By default, this function does a lot of the same work that
    ``ssl.create_default_context`` does on Python 3.4+. It:

    - Disables SSLv2, SSLv3, and compression
    - Sets a restricted set of server ciphers

    If you wish to enable SSLv3, you can do::

        from urllib3.util import ssl_
        context = ssl_.create_urllib3_context()
        context.options &= ~ssl_.OP_NO_SSLv3

    You can do the same to enable compression (substituting ``COMPRESSION``
    for ``SSLv3`` in the last line above).

    :param ssl_version:
        The desired protocol version to use. This will default to
        PROTOCOL_SSLv23 which will negotiate the highest protocol that both
        the server and your installation of OpenSSL support.
    :param cert_reqs:
        Whether to require the certificate verification. This defaults to
        ``ssl.CERT_REQUIRED``.
    :param options:
        Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
    :param ciphers:
        Which cipher suites to allow the server to select.
    :returns:
        Constructed SSLContext object with specified options
    :rtype: SSLContext
    """
    context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)

    # Setting the default here, as we may have no ssl module on import
    cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs

    if options is None:
        options = 0
        # SSLv2 is easily broken and is considered harmful and dangerous
        options |= OP_NO_SSLv2
        # SSLv3 has several problems and is now dangerous
        options |= OP_NO_SSLv3
        # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
        # (issue #309)
        options |= OP_NO_COMPRESSION

    context.options |= options

    if getattr(context, 'supports_set_ciphers', True):  # Platform-specific: Python 2.6
        context.set_ciphers(ciphers or DEFAULT_CIPHERS)

    context.verify_mode = cert_reqs
    if getattr(context, 'check_hostname', None) is not None:  # Platform-specific: Python 3.2
        # We do our own verification, including fingerprints and alternative
        # hostnames. So disable it here
        context.check_hostname = False
    return context


def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
                    ca_certs=None, server_hostname=None,
                    ssl_version=None, ciphers=None, ssl_context=None,
                    ca_cert_dir=None):
    """
    All arguments except for server_hostname, ssl_context, and ca_cert_dir have
    the same meaning as they do when using :func:`ssl.wrap_socket`.

    :param server_hostname:
        When SNI is supported, the expected hostname of the certificate
    :param ssl_context:
        A pre-made :class:`SSLContext` object. If none is provided, one will
        be created using :func:`create_urllib3_context`.
    :param ciphers:
        A string of ciphers we wish the client to support. This is not
        supported on Python 2.6 as the ssl module does not support it.
    :param ca_cert_dir:
        A directory containing CA certificates in multiple separate files, as
        supported by OpenSSL's -CApath flag or the capath argument to
        SSLContext.load_verify_locations().
    """
    context = ssl_context
    if context is None:
        # Note: This branch of code and all the variables in it are no longer
        # used by urllib3 itself. We should consider deprecating and removing
        # this code.
        context = create_urllib3_context(ssl_version, cert_reqs,
                                         ciphers=ciphers)

    if ca_certs or ca_cert_dir:
        try:
            context.load_verify_locations(ca_certs, ca_cert_dir)
        except IOError as e:  # Platform-specific: Python 2.6, 2.7, 3.2
            raise SSLError(e)
        # Py33 raises FileNotFoundError which subclasses OSError
        # These are not equivalent unless we check the errno attribute
        except OSError as e:  # Platform-specific: Python 3.3 and beyond
            if e.errno == errno.ENOENT:
                raise SSLError(e)
            raise
    elif ssl_context is None and hasattr(context, 'load_default_certs'):
        # try to load OS default certs; works well on Windows (require Python3.4+)
        context.load_default_certs()

    if certfile:
        context.load_cert_chain(certfile, keyfile)
    if HAS_SNI:  # Platform-specific: OpenSSL with enabled SNI
        return context.wrap_socket(sock, server_hostname=server_hostname)

    warnings.warn(
        'An HTTPS request has been made, but the SNI (Subject Name '
        'Indication) extension to TLS is not available on this platform. '
        'This may cause the server to present an incorrect TLS '
        'certificate, which can cause validation failures. You can upgrade to '
        'a newer version of Python to solve this. For more information, see '
        'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
        '#ssl-warnings',
        SNIMissingWarning
    )
    return context.wrap_socket(sock)
_vendor/urllib3/util/response.py000064400000004447151733136360012762 0ustar00from __future__ import absolute_import
from ..packages.six.moves import http_client as httplib

from ..exceptions import HeaderParsingError


def is_fp_closed(obj):
    """
    Checks whether a given file-like object is closed.

    :param obj:
        The file-like object to check.
    """

    try:
        # Check `isclosed()` first, in case Python3 doesn't set `closed`.
        # GH Issue #928
        return obj.isclosed()
    except AttributeError:
        pass

    try:
        # Check via the official file-like-object way.
        return obj.closed
    except AttributeError:
        pass

    try:
        # Check if the object is a container for another file-like object that
        # gets released on exhaustion (e.g. HTTPResponse).
        return obj.fp is None
    except AttributeError:
        pass

    raise ValueError("Unable to determine whether fp is closed.")


def assert_header_parsing(headers):
    """
    Asserts whether all headers have been successfully parsed.
    Extracts encountered errors from the result of parsing headers.

    Only works on Python 3.

    :param headers: Headers to verify.
    :type headers: `httplib.HTTPMessage`.

    :raises urllib3.exceptions.HeaderParsingError:
        If parsing errors are found.
    """

    # This will fail silently if we pass in the wrong kind of parameter.
    # To make debugging easier add an explicit check.
    if not isinstance(headers, httplib.HTTPMessage):
        raise TypeError('expected httplib.Message, got {0}.'.format(
            type(headers)))

    defects = getattr(headers, 'defects', None)
    get_payload = getattr(headers, 'get_payload', None)

    unparsed_data = None
    if get_payload:  # Platform-specific: Python 3.
        unparsed_data = get_payload()

    if defects or unparsed_data:
        raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)


def is_response_to_head(response):
    """
    Checks whether the request of a response has been a HEAD-request.
    Handles the quirks of AppEngine.

    :param conn:
    :type conn: :class:`httplib.HTTPResponse`
    """
    # FIXME: Can we do this somehow without accessing private httplib _method?
    method = response._method
    if isinstance(method, int):  # Platform-specific: Appengine
        return method == 3
    return method.upper() == 'HEAD'
_vendor/urllib3/util/wait.py000064400000002653151733136360012065 0ustar00from .selectors import (
    HAS_SELECT,
    DefaultSelector,
    EVENT_READ,
    EVENT_WRITE
)


def _wait_for_io_events(socks, events, timeout=None):
    """ Waits for IO events to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be interacted with immediately. """
    if not HAS_SELECT:
        raise ValueError('Platform does not have a selector')
    if not isinstance(socks, list):
        # Probably just a single socket.
        if hasattr(socks, "fileno"):
            socks = [socks]
        # Otherwise it might be a non-list iterable.
        else:
            socks = list(socks)
    with DefaultSelector() as selector:
        for sock in socks:
            selector.register(sock, events)
        return [key[0].fileobj for key in
                selector.select(timeout) if key[1] & events]


def wait_for_read(socks, timeout=None):
    """ Waits for reading to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be read from immediately. """
    return _wait_for_io_events(socks, EVENT_READ, timeout)


def wait_for_write(socks, timeout=None):
    """ Waits for writing to be available from a list of sockets
    or optionally a single socket if passed in. Returns a list of
    sockets that can be written to immediately. """
    return _wait_for_io_events(socks, EVENT_WRITE, timeout)
_vendor/urllib3/util/connection.py000064400000010215151733136360013251 0ustar00from __future__ import absolute_import
import socket
from .wait import wait_for_read
from .selectors import HAS_SELECT, SelectorError


def is_connection_dropped(conn):  # Platform-specific
    """
    Returns True if the connection is dropped and should be closed.

    :param conn:
        :class:`httplib.HTTPConnection` object.

    Note: For platforms like AppEngine, this will always return ``False`` to
    let the platform handle connection recycling transparently for us.
    """
    sock = getattr(conn, 'sock', False)
    if sock is False:  # Platform-specific: AppEngine
        return False
    if sock is None:  # Connection already closed (such as by httplib).
        return True

    if not HAS_SELECT:
        return False

    try:
        return bool(wait_for_read(sock, timeout=0.0))
    except SelectorError:
        return True


# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
# One additional modification is that we avoid binding to IPv6 servers
# discovered in DNS if the system doesn't have IPv6 functionality.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
                      source_address=None, socket_options=None):
    """Connect to *address* and return the socket object.

    Convenience function.  Connect to *address* (a 2-tuple ``(host,
    port)``) and return the socket object.  Passing the optional
    *timeout* parameter will set the timeout on the socket instance
    before attempting to connect.  If no *timeout* is supplied, the
    global default timeout setting returned by :func:`getdefaulttimeout`
    is used.  If *source_address* is set it must be a tuple of (host, port)
    for the socket to bind as a source address before making the connection.
    An host of '' or port 0 tells the OS to use the default.
    """

    host, port = address
    if host.startswith('['):
        host = host.strip('[]')
    err = None

    # Using the value from allowed_gai_family() in the context of getaddrinfo lets
    # us select whether to work with IPv4 DNS records, IPv6 records, or both.
    # The original create_connection function always returns all records.
    family = allowed_gai_family()

    for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
        af, socktype, proto, canonname, sa = res
        sock = None
        try:
            sock = socket.socket(af, socktype, proto)

            # If provided, set socket level options before connecting.
            _set_socket_options(sock, socket_options)

            if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
                sock.settimeout(timeout)
            if source_address:
                sock.bind(source_address)
            sock.connect(sa)
            return sock

        except socket.error as e:
            err = e
            if sock is not None:
                sock.close()
                sock = None

    if err is not None:
        raise err

    raise socket.error("getaddrinfo returns an empty list")


def _set_socket_options(sock, options):
    if options is None:
        return

    for opt in options:
        sock.setsockopt(*opt)


def allowed_gai_family():
    """This function is designed to work in the context of
    getaddrinfo, where family=socket.AF_UNSPEC is the default and
    will perform a DNS search for both IPv6 and IPv4 records."""

    family = socket.AF_INET
    if HAS_IPV6:
        family = socket.AF_UNSPEC
    return family


def _has_ipv6(host):
    """ Returns True if the system can bind an IPv6 address. """
    sock = None
    has_ipv6 = False

    if socket.has_ipv6:
        # has_ipv6 returns true if cPython was compiled with IPv6 support.
        # It does not tell us if the system has IPv6 support enabled. To
        # determine that we must bind to an IPv6 address.
        # https://github.com/shazow/urllib3/pull/611
        # https://bugs.python.org/issue658327
        try:
            sock = socket.socket(socket.AF_INET6)
            sock.bind((host, 0))
            has_ipv6 = True
        except Exception:
            pass

    if sock:
        sock.close()
    return has_ipv6


HAS_IPV6 = _has_ipv6('::1')
_vendor/urllib3/util/retry.py000064400000035400151733136360012262 0ustar00from __future__ import absolute_import
import time
import logging
from collections import namedtuple
from itertools import takewhile
import email
import re

from ..exceptions import (
    ConnectTimeoutError,
    MaxRetryError,
    ProtocolError,
    ReadTimeoutError,
    ResponseError,
    InvalidHeader,
)
from ..packages import six


log = logging.getLogger(__name__)

# Data structure for representing the metadata of requests that result in a retry.
RequestHistory = namedtuple('RequestHistory', ["method", "url", "error",
                                               "status", "redirect_location"])


class Retry(object):
    """ Retry configuration.

    Each retry attempt will create a new Retry object with updated values, so
    they can be safely reused.

    Retries can be defined as a default for a pool::

        retries = Retry(connect=5, read=2, redirect=5)
        http = PoolManager(retries=retries)
        response = http.request('GET', 'http://example.com/')

    Or per-request (which overrides the default for the pool)::

        response = http.request('GET', 'http://example.com/', retries=Retry(10))

    Retries can be disabled by passing ``False``::

        response = http.request('GET', 'http://example.com/', retries=False)

    Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
    retries are disabled, in which case the causing exception will be raised.

    :param int total:
        Total number of retries to allow. Takes precedence over other counts.

        Set to ``None`` to remove this constraint and fall back on other
        counts. It's a good idea to set this to some sensibly-high value to
        account for unexpected edge cases and avoid infinite retry loops.

        Set to ``0`` to fail on the first retry.

        Set to ``False`` to disable and imply ``raise_on_redirect=False``.

    :param int connect:
        How many connection-related errors to retry on.

        These are errors raised before the request is sent to the remote server,
        which we assume has not triggered the server to process the request.

        Set to ``0`` to fail on the first retry of this type.

    :param int read:
        How many times to retry on read errors.

        These errors are raised after the request was sent to the server, so the
        request may have side-effects.

        Set to ``0`` to fail on the first retry of this type.

    :param int redirect:
        How many redirects to perform. Limit this to avoid infinite redirect
        loops.

        A redirect is a HTTP response with a status code 301, 302, 303, 307 or
        308.

        Set to ``0`` to fail on the first retry of this type.

        Set to ``False`` to disable and imply ``raise_on_redirect=False``.

    :param int status:
        How many times to retry on bad status codes.

        These are retries made on responses, where status code matches
        ``status_forcelist``.

        Set to ``0`` to fail on the first retry of this type.

    :param iterable method_whitelist:
        Set of uppercased HTTP method verbs that we should retry on.

        By default, we only retry on methods which are considered to be
        idempotent (multiple requests with the same parameters end with the
        same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.

        Set to a ``False`` value to retry on any verb.

    :param iterable status_forcelist:
        A set of integer HTTP status codes that we should force a retry on.
        A retry is initiated if the request method is in ``method_whitelist``
        and the response status code is in ``status_forcelist``.

        By default, this is disabled with ``None``.

    :param float backoff_factor:
        A backoff factor to apply between attempts after the second try
        (most errors are resolved immediately by a second try without a
        delay). urllib3 will sleep for::

            {backoff factor} * (2 ^ ({number of total retries} - 1))

        seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
        for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
        than :attr:`Retry.BACKOFF_MAX`.

        By default, backoff is disabled (set to 0).

    :param bool raise_on_redirect: Whether, if the number of redirects is
        exhausted, to raise a MaxRetryError, or to return a response with a
        response code in the 3xx range.

    :param iterable remove_headers_on_redirect:
        Sequence of headers to remove from the request when a response
        indicating a redirect is returned before firing off the redirected
        request

    :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
        whether we should raise an exception, or return a response,
        if status falls in ``status_forcelist`` range and retries have
        been exhausted.

    :param tuple history: The history of the request encountered during
        each call to :meth:`~Retry.increment`. The list is in the order
        the requests occurred. Each list item is of class :class:`RequestHistory`.

    :param bool respect_retry_after_header:
        Whether to respect Retry-After header on status codes defined as
        :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.

    """

    DEFAULT_METHOD_WHITELIST = frozenset([
        'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])

    DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization'])

    RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])

    #: Maximum backoff time.
    BACKOFF_MAX = 120

    def __init__(self, total=10, connect=None, read=None, redirect=None, status=None,
                 method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
                 backoff_factor=0, raise_on_redirect=True, raise_on_status=True,
                 history=None, respect_retry_after_header=True,
                 remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST):

        self.total = total
        self.connect = connect
        self.read = read
        self.status = status

        if redirect is False or total is False:
            redirect = 0
            raise_on_redirect = False

        self.redirect = redirect
        self.status_forcelist = status_forcelist or set()
        self.method_whitelist = method_whitelist
        self.backoff_factor = backoff_factor
        self.raise_on_redirect = raise_on_redirect
        self.raise_on_status = raise_on_status
        self.history = history or tuple()
        self.respect_retry_after_header = respect_retry_after_header
        self.remove_headers_on_redirect = remove_headers_on_redirect

    def new(self, **kw):
        params = dict(
            total=self.total,
            connect=self.connect, read=self.read, redirect=self.redirect, status=self.status,
            method_whitelist=self.method_whitelist,
            status_forcelist=self.status_forcelist,
            backoff_factor=self.backoff_factor,
            raise_on_redirect=self.raise_on_redirect,
            raise_on_status=self.raise_on_status,
            history=self.history,
            remove_headers_on_redirect=self.remove_headers_on_redirect,
        )
        params.update(kw)
        return type(self)(**params)

    @classmethod
    def from_int(cls, retries, redirect=True, default=None):
        """ Backwards-compatibility for the old retries format."""
        if retries is None:
            retries = default if default is not None else cls.DEFAULT

        if isinstance(retries, Retry):
            return retries

        redirect = bool(redirect) and None
        new_retries = cls(retries, redirect=redirect)
        log.debug("Converted retries value: %r -> %r", retries, new_retries)
        return new_retries

    def get_backoff_time(self):
        """ Formula for computing the current backoff

        :rtype: float
        """
        # We want to consider only the last consecutive errors sequence (Ignore redirects).
        consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None,
                                                    reversed(self.history))))
        if consecutive_errors_len <= 1:
            return 0

        backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
        return min(self.BACKOFF_MAX, backoff_value)

    def parse_retry_after(self, retry_after):
        # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
        if re.match(r"^\s*[0-9]+\s*$", retry_after):
            seconds = int(retry_after)
        else:
            retry_date_tuple = email.utils.parsedate(retry_after)
            if retry_date_tuple is None:
                raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
            retry_date = time.mktime(retry_date_tuple)
            seconds = retry_date - time.time()

        if seconds < 0:
            seconds = 0

        return seconds

    def get_retry_after(self, response):
        """ Get the value of Retry-After in seconds. """

        retry_after = response.getheader("Retry-After")

        if retry_after is None:
            return None

        return self.parse_retry_after(retry_after)

    def sleep_for_retry(self, response=None):
        retry_after = self.get_retry_after(response)
        if retry_after:
            time.sleep(retry_after)
            return True

        return False

    def _sleep_backoff(self):
        backoff = self.get_backoff_time()
        if backoff <= 0:
            return
        time.sleep(backoff)

    def sleep(self, response=None):
        """ Sleep between retry attempts.

        This method will respect a server's ``Retry-After`` response header
        and sleep the duration of the time requested. If that is not present, it
        will use an exponential backoff. By default, the backoff factor is 0 and
        this method will return immediately.
        """

        if response:
            slept = self.sleep_for_retry(response)
            if slept:
                return

        self._sleep_backoff()

    def _is_connection_error(self, err):
        """ Errors when we're fairly sure that the server did not receive the
        request, so it should be safe to retry.
        """
        return isinstance(err, ConnectTimeoutError)

    def _is_read_error(self, err):
        """ Errors that occur after the request has been started, so we should
        assume that the server began processing it.
        """
        return isinstance(err, (ReadTimeoutError, ProtocolError))

    def _is_method_retryable(self, method):
        """ Checks if a given HTTP method should be retried upon, depending if
        it is included on the method whitelist.
        """
        if self.method_whitelist and method.upper() not in self.method_whitelist:
            return False

        return True

    def is_retry(self, method, status_code, has_retry_after=False):
        """ Is this method/status code retryable? (Based on whitelists and control
        variables such as the number of total retries to allow, whether to
        respect the Retry-After header, whether this header is present, and
        whether the returned status code is on the list of status codes to
        be retried upon on the presence of the aforementioned header)
        """
        if not self._is_method_retryable(method):
            return False

        if self.status_forcelist and status_code in self.status_forcelist:
            return True

        return (self.total and self.respect_retry_after_header and
                has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES))

    def is_exhausted(self):
        """ Are we out of retries? """
        retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
        retry_counts = list(filter(None, retry_counts))
        if not retry_counts:
            return False

        return min(retry_counts) < 0

    def increment(self, method=None, url=None, response=None, error=None,
                  _pool=None, _stacktrace=None):
        """ Return a new Retry object with incremented retry counters.

        :param response: A response object, or None, if the server did not
            return a response.
        :type response: :class:`~urllib3.response.HTTPResponse`
        :param Exception error: An error encountered during the request, or
            None if the response was received successfully.

        :return: A new ``Retry`` object.
        """
        if self.total is False and error:
            # Disabled, indicate to re-raise the error.
            raise six.reraise(type(error), error, _stacktrace)

        total = self.total
        if total is not None:
            total -= 1

        connect = self.connect
        read = self.read
        redirect = self.redirect
        status_count = self.status
        cause = 'unknown'
        status = None
        redirect_location = None

        if error and self._is_connection_error(error):
            # Connect retry?
            if connect is False:
                raise six.reraise(type(error), error, _stacktrace)
            elif connect is not None:
                connect -= 1

        elif error and self._is_read_error(error):
            # Read retry?
            if read is False or not self._is_method_retryable(method):
                raise six.reraise(type(error), error, _stacktrace)
            elif read is not None:
                read -= 1

        elif response and response.get_redirect_location():
            # Redirect retry?
            if redirect is not None:
                redirect -= 1
            cause = 'too many redirects'
            redirect_location = response.get_redirect_location()
            status = response.status

        else:
            # Incrementing because of a server error like a 500 in
            # status_forcelist and a the given method is in the whitelist
            cause = ResponseError.GENERIC_ERROR
            if response and response.status:
                if status_count is not None:
                    status_count -= 1
                cause = ResponseError.SPECIFIC_ERROR.format(
                    status_code=response.status)
                status = response.status

        history = self.history + (RequestHistory(method, url, error, status, redirect_location),)

        new_retry = self.new(
            total=total,
            connect=connect, read=read, redirect=redirect, status=status_count,
            history=history)

        if new_retry.is_exhausted():
            raise MaxRetryError(_pool, url, error or ResponseError(cause))

        log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)

        return new_retry

    def __repr__(self):
        return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
                'read={self.read}, redirect={self.redirect}, status={self.status})').format(
                    cls=type(self), self=self)


# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
_vendor/urllib3/util/request.py000064400000007171151733136360012611 0ustar00from __future__ import absolute_import
from base64 import b64encode

from ..packages.six import b, integer_types
from ..exceptions import UnrewindableBodyError

ACCEPT_ENCODING = 'gzip,deflate'
_FAILEDTELL = object()


def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
                 basic_auth=None, proxy_basic_auth=None, disable_cache=None):
    """
    Shortcuts for generating request headers.

    :param keep_alive:
        If ``True``, adds 'connection: keep-alive' header.

    :param accept_encoding:
        Can be a boolean, list, or string.
        ``True`` translates to 'gzip,deflate'.
        List will get joined by comma.
        String will be used as provided.

    :param user_agent:
        String representing the user-agent you want, such as
        "python-urllib3/0.6"

    :param basic_auth:
        Colon-separated username:password string for 'authorization: basic ...'
        auth header.

    :param proxy_basic_auth:
        Colon-separated username:password string for 'proxy-authorization: basic ...'
        auth header.

    :param disable_cache:
        If ``True``, adds 'cache-control: no-cache' header.

    Example::

        >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
        {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
        >>> make_headers(accept_encoding=True)
        {'accept-encoding': 'gzip,deflate'}
    """
    headers = {}
    if accept_encoding:
        if isinstance(accept_encoding, str):
            pass
        elif isinstance(accept_encoding, list):
            accept_encoding = ','.join(accept_encoding)
        else:
            accept_encoding = ACCEPT_ENCODING
        headers['accept-encoding'] = accept_encoding

    if user_agent:
        headers['user-agent'] = user_agent

    if keep_alive:
        headers['connection'] = 'keep-alive'

    if basic_auth:
        headers['authorization'] = 'Basic ' + \
            b64encode(b(basic_auth)).decode('utf-8')

    if proxy_basic_auth:
        headers['proxy-authorization'] = 'Basic ' + \
            b64encode(b(proxy_basic_auth)).decode('utf-8')

    if disable_cache:
        headers['cache-control'] = 'no-cache'

    return headers


def set_file_position(body, pos):
    """
    If a position is provided, move file to that point.
    Otherwise, we'll attempt to record a position for future use.
    """
    if pos is not None:
        rewind_body(body, pos)
    elif getattr(body, 'tell', None) is not None:
        try:
            pos = body.tell()
        except (IOError, OSError):
            # This differentiates from None, allowing us to catch
            # a failed `tell()` later when trying to rewind the body.
            pos = _FAILEDTELL

    return pos


def rewind_body(body, body_pos):
    """
    Attempt to rewind body to a certain position.
    Primarily used for request redirects and retries.

    :param body:
        File-like object that supports seek.

    :param int pos:
        Position to seek to in file.
    """
    body_seek = getattr(body, 'seek', None)
    if body_seek is not None and isinstance(body_pos, integer_types):
        try:
            body_seek(body_pos)
        except (IOError, OSError):
            raise UnrewindableBodyError("An error occurred when rewinding request "
                                        "body for redirect/retry.")
    elif body_pos is _FAILEDTELL:
        raise UnrewindableBodyError("Unable to record file position for rewinding "
                                    "request body during a redirect/retry.")
    else:
        raise ValueError("body_pos must be of type integer, "
                         "instead it was %s." % type(body_pos))
_vendor/urllib3/util/url.py000064400000015216151733136360011722 0ustar00from __future__ import absolute_import
from collections import namedtuple
import re

from ..exceptions import LocationParseError


url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']

# We only want to normalize urls with an HTTP(S) scheme.
# urllib3 infers URLs without a scheme (None) to be http.
NORMALIZABLE_SCHEMES = ('http', 'https', None)

_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
from ..packages.six.moves.urllib.parse import quote

class Url(namedtuple('Url', url_attrs)):
    """
    Datastructure for representing an HTTP URL. Used as a return value for
    :func:`parse_url`. Both the scheme and host are normalized as they are
    both case-insensitive according to RFC 3986.
    """
    __slots__ = ()

    def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
                query=None, fragment=None):
        if path and not path.startswith('/'):
            path = '/' + path
        if scheme:
            scheme = scheme.lower()
        if host and scheme in NORMALIZABLE_SCHEMES:
            host = host.lower()
        return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
                                       query, fragment)

    @property
    def hostname(self):
        """For backwards-compatibility with urlparse. We're nice like that."""
        return self.host

    @property
    def request_uri(self):
        """Absolute path including the query string."""
        uri = self.path or '/'

        if self.query is not None:
            uri += '?' + self.query

        return uri

    @property
    def netloc(self):
        """Network location including host and port"""
        if self.port:
            return '%s:%d' % (self.host, self.port)
        return self.host

    @property
    def url(self):
        """
        Convert self into a url

        This function should more or less round-trip with :func:`.parse_url`. The
        returned url may not be exactly the same as the url inputted to
        :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
        with a blank port will have : removed).

        Example: ::

            >>> U = parse_url('http://google.com/mail/')
            >>> U.url
            'http://google.com/mail/'
            >>> Url('http', 'username:password', 'host.com', 80,
            ... '/path', 'query', 'fragment').url
            'http://username:password@host.com:80/path?query#fragment'
        """
        scheme, auth, host, port, path, query, fragment = self
        url = ''

        # We use "is not None" we want things to happen with empty strings (or 0 port)
        if scheme is not None:
            url += scheme + '://'
        if auth is not None:
            url += auth + '@'
        if host is not None:
            url += host
        if port is not None:
            url += ':' + str(port)
        if path is not None:
            url += path
        if query is not None:
            url += '?' + query
        if fragment is not None:
            url += '#' + fragment

        return url

    def __str__(self):
        return self.url


def split_first(s, delims):
    """
    Given a string and an iterable of delimiters, split on the first found
    delimiter. Return two split parts and the matched delimiter.

    If not found, then the first part is the full input string.

    Example::

        >>> split_first('foo/bar?baz', '?/=')
        ('foo', 'bar?baz', '/')
        >>> split_first('foo/bar?baz', '123')
        ('foo/bar?baz', '', None)

    Scales linearly with number of delims. Not ideal for large number of delims.
    """
    min_idx = None
    min_delim = None
    for d in delims:
        idx = s.find(d)
        if idx < 0:
            continue

        if min_idx is None or idx < min_idx:
            min_idx = idx
            min_delim = d

    if min_idx is None or min_idx < 0:
        return s, '', None

    return s[:min_idx], s[min_idx + 1:], min_delim


def parse_url(url):
    """
    Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
    performed to parse incomplete urls. Fields not provided will be None.

    Partly backwards-compatible with :mod:`urlparse`.

    Example::

        >>> parse_url('http://google.com/mail/')
        Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
        >>> parse_url('google.com:80')
        Url(scheme=None, host='google.com', port=80, path=None, ...)
        >>> parse_url('/foo?bar')
        Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
    """

    # While this code has overlap with stdlib's urlparse, it is much
    # simplified for our needs and less annoying.
    # Additionally, this implementations does silly things to be optimal
    # on CPython.

    if not url:
        # Empty
        return Url()

    # Prevent CVE-2019-9740.
    # adapted from https://github.com/python/cpython/pull/12755
    url = _contains_disallowed_url_pchar_re.sub(lambda match: quote(match.group()), url)

    scheme = None
    auth = None
    host = None
    port = None
    path = None
    fragment = None
    query = None

    # Scheme
    if '://' in url:
        scheme, url = url.split('://', 1)

    # Find the earliest Authority Terminator
    # (http://tools.ietf.org/html/rfc3986#section-3.2)
    url, path_, delim = split_first(url, ['/', '?', '#'])

    if delim:
        # Reassemble the path
        path = delim + path_

    # Auth
    if '@' in url:
        # Last '@' denotes end of auth part
        auth, url = url.rsplit('@', 1)

    # IPv6
    if url and url[0] == '[':
        host, url = url.split(']', 1)
        host += ']'

    # Port
    if ':' in url:
        _host, port = url.split(':', 1)

        if not host:
            host = _host

        if port:
            # If given, ports must be integers. No whitespace, no plus or
            # minus prefixes, no non-integer digits such as ^2 (superscript).
            if not port.isdigit():
                raise LocationParseError(url)
            try:
                port = int(port)
            except ValueError:
                raise LocationParseError(url)
        else:
            # Blank ports are cool, too. (rfc3986#section-3.2.3)
            port = None

    elif not host and url:
        host = url

    if not path:
        return Url(scheme, auth, host, port, path, query, fragment)

    # Fragment
    if '#' in path:
        path, fragment = path.split('#', 1)

    # Query
    if '?' in path:
        path, query = path.split('?', 1)

    return Url(scheme, auth, host, port, path, query, fragment)


def get_host(url):
    """
    Deprecated. Use :func:`parse_url` instead.
    """
    p = parse_url(url)
    return p.scheme or 'http', p.hostname, p.port
_vendor/urllib3/util/selectors.py000064400000051233151733136360013122 0ustar00# Backport of selectors.py from Python 3.5+ to support Python < 3.4
# Also has the behavior specified in PEP 475 which is to retry syscalls
# in the case of an EINTR error. This module is required because selectors34
# does not follow this behavior and instead returns that no dile descriptor
# events have occurred rather than retry the syscall. The decision to drop
# support for select.devpoll is made to maintain 100% test coverage.

import errno
import math
import select
import socket
import sys
import time
from collections import namedtuple, Mapping

try:
    monotonic = time.monotonic
except (AttributeError, ImportError):  # Python 3.3<
    monotonic = time.time

EVENT_READ = (1 << 0)
EVENT_WRITE = (1 << 1)

HAS_SELECT = True  # Variable that shows whether the platform has a selector.
_SYSCALL_SENTINEL = object()  # Sentinel in case a system call returns None.
_DEFAULT_SELECTOR = None


class SelectorError(Exception):
    def __init__(self, errcode):
        super(SelectorError, self).__init__()
        self.errno = errcode

    def __repr__(self):
        return "<SelectorError errno={0}>".format(self.errno)

    def __str__(self):
        return self.__repr__()


def _fileobj_to_fd(fileobj):
    """ Return a file descriptor from a file object. If
    given an integer will simply return that integer back. """
    if isinstance(fileobj, int):
        fd = fileobj
    else:
        try:
            fd = int(fileobj.fileno())
        except (AttributeError, TypeError, ValueError):
            raise ValueError("Invalid file object: {0!r}".format(fileobj))
    if fd < 0:
        raise ValueError("Invalid file descriptor: {0}".format(fd))
    return fd


# Determine which function to use to wrap system calls because Python 3.5+
# already handles the case when system calls are interrupted.
if sys.version_info >= (3, 5):
    def _syscall_wrapper(func, _, *args, **kwargs):
        """ This is the short-circuit version of the below logic
        because in Python 3.5+ all system calls automatically restart
        and recalculate their timeouts. """
        try:
            return func(*args, **kwargs)
        except (OSError, IOError, select.error) as e:
            errcode = None
            if hasattr(e, "errno"):
                errcode = e.errno
            raise SelectorError(errcode)
else:
    def _syscall_wrapper(func, recalc_timeout, *args, **kwargs):
        """ Wrapper function for syscalls that could fail due to EINTR.
        All functions should be retried if there is time left in the timeout
        in accordance with PEP 475. """
        timeout = kwargs.get("timeout", None)
        if timeout is None:
            expires = None
            recalc_timeout = False
        else:
            timeout = float(timeout)
            if timeout < 0.0:  # Timeout less than 0 treated as no timeout.
                expires = None
            else:
                expires = monotonic() + timeout

        args = list(args)
        if recalc_timeout and "timeout" not in kwargs:
            raise ValueError(
                "Timeout must be in args or kwargs to be recalculated")

        result = _SYSCALL_SENTINEL
        while result is _SYSCALL_SENTINEL:
            try:
                result = func(*args, **kwargs)
            # OSError is thrown by select.select
            # IOError is thrown by select.epoll.poll
            # select.error is thrown by select.poll.poll
            # Aren't we thankful for Python 3.x rework for exceptions?
            except (OSError, IOError, select.error) as e:
                # select.error wasn't a subclass of OSError in the past.
                errcode = None
                if hasattr(e, "errno"):
                    errcode = e.errno
                elif hasattr(e, "args"):
                    errcode = e.args[0]

                # Also test for the Windows equivalent of EINTR.
                is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and
                                                           errcode == errno.WSAEINTR))

                if is_interrupt:
                    if expires is not None:
                        current_time = monotonic()
                        if current_time > expires:
                            raise OSError(errno=errno.ETIMEDOUT)
                        if recalc_timeout:
                            if "timeout" in kwargs:
                                kwargs["timeout"] = expires - current_time
                    continue
                if errcode:
                    raise SelectorError(errcode)
                else:
                    raise
        return result


SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])


class _SelectorMapping(Mapping):
    """ Mapping of file objects to selector keys """

    def __init__(self, selector):
        self._selector = selector

    def __len__(self):
        return len(self._selector._fd_to_key)

    def __getitem__(self, fileobj):
        try:
            fd = self._selector._fileobj_lookup(fileobj)
            return self._selector._fd_to_key[fd]
        except KeyError:
            raise KeyError("{0!r} is not registered.".format(fileobj))

    def __iter__(self):
        return iter(self._selector._fd_to_key)


class BaseSelector(object):
    """ Abstract Selector class

    A selector supports registering file objects to be monitored
    for specific I/O events.

    A file object is a file descriptor or any object with a
    `fileno()` method. An arbitrary object can be attached to the
    file object which can be used for example to store context info,
    a callback, etc.

    A selector can use various implementations (select(), poll(), epoll(),
    and kqueue()) depending on the platform. The 'DefaultSelector' class uses
    the most efficient implementation for the current platform.
    """
    def __init__(self):
        # Maps file descriptors to keys.
        self._fd_to_key = {}

        # Read-only mapping returned by get_map()
        self._map = _SelectorMapping(self)

    def _fileobj_lookup(self, fileobj):
        """ Return a file descriptor from a file object.
        This wraps _fileobj_to_fd() to do an exhaustive
        search in case the object is invalid but we still
        have it in our map. Used by unregister() so we can
        unregister an object that was previously registered
        even if it is closed. It is also used by _SelectorMapping
        """
        try:
            return _fileobj_to_fd(fileobj)
        except ValueError:

            # Search through all our mapped keys.
            for key in self._fd_to_key.values():
                if key.fileobj is fileobj:
                    return key.fd

            # Raise ValueError after all.
            raise

    def register(self, fileobj, events, data=None):
        """ Register a file object for a set of events to monitor. """
        if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
            raise ValueError("Invalid events: {0!r}".format(events))

        key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)

        if key.fd in self._fd_to_key:
            raise KeyError("{0!r} (FD {1}) is already registered"
                           .format(fileobj, key.fd))

        self._fd_to_key[key.fd] = key
        return key

    def unregister(self, fileobj):
        """ Unregister a file object from being monitored. """
        try:
            key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
        except KeyError:
            raise KeyError("{0!r} is not registered".format(fileobj))

        # Getting the fileno of a closed socket on Windows errors with EBADF.
        except socket.error as e:  # Platform-specific: Windows.
            if e.errno != errno.EBADF:
                raise
            else:
                for key in self._fd_to_key.values():
                    if key.fileobj is fileobj:
                        self._fd_to_key.pop(key.fd)
                        break
                else:
                    raise KeyError("{0!r} is not registered".format(fileobj))
        return key

    def modify(self, fileobj, events, data=None):
        """ Change a registered file object monitored events and data. """
        # NOTE: Some subclasses optimize this operation even further.
        try:
            key = self._fd_to_key[self._fileobj_lookup(fileobj)]
        except KeyError:
            raise KeyError("{0!r} is not registered".format(fileobj))

        if events != key.events:
            self.unregister(fileobj)
            key = self.register(fileobj, events, data)

        elif data != key.data:
            # Use a shortcut to update the data.
            key = key._replace(data=data)
            self._fd_to_key[key.fd] = key

        return key

    def select(self, timeout=None):
        """ Perform the actual selection until some monitored file objects
        are ready or the timeout expires. """
        raise NotImplementedError()

    def close(self):
        """ Close the selector. This must be called to ensure that all
        underlying resources are freed. """
        self._fd_to_key.clear()
        self._map = None

    def get_key(self, fileobj):
        """ Return the key associated with a registered file object. """
        mapping = self.get_map()
        if mapping is None:
            raise RuntimeError("Selector is closed")
        try:
            return mapping[fileobj]
        except KeyError:
            raise KeyError("{0!r} is not registered".format(fileobj))

    def get_map(self):
        """ Return a mapping of file objects to selector keys """
        return self._map

    def _key_from_fd(self, fd):
        """ Return the key associated to a given file descriptor
         Return None if it is not found. """
        try:
            return self._fd_to_key[fd]
        except KeyError:
            return None

    def __enter__(self):
        return self

    def __exit__(self, *args):
        self.close()


# Almost all platforms have select.select()
if hasattr(select, "select"):
    class SelectSelector(BaseSelector):
        """ Select-based selector. """
        def __init__(self):
            super(SelectSelector, self).__init__()
            self._readers = set()
            self._writers = set()

        def register(self, fileobj, events, data=None):
            key = super(SelectSelector, self).register(fileobj, events, data)
            if events & EVENT_READ:
                self._readers.add(key.fd)
            if events & EVENT_WRITE:
                self._writers.add(key.fd)
            return key

        def unregister(self, fileobj):
            key = super(SelectSelector, self).unregister(fileobj)
            self._readers.discard(key.fd)
            self._writers.discard(key.fd)
            return key

        def _select(self, r, w, timeout=None):
            """ Wrapper for select.select because timeout is a positional arg """
            return select.select(r, w, [], timeout)

        def select(self, timeout=None):
            # Selecting on empty lists on Windows errors out.
            if not len(self._readers) and not len(self._writers):
                return []

            timeout = None if timeout is None else max(timeout, 0.0)
            ready = []
            r, w, _ = _syscall_wrapper(self._select, True, self._readers,
                                       self._writers, timeout)
            r = set(r)
            w = set(w)
            for fd in r | w:
                events = 0
                if fd in r:
                    events |= EVENT_READ
                if fd in w:
                    events |= EVENT_WRITE

                key = self._key_from_fd(fd)
                if key:
                    ready.append((key, events & key.events))
            return ready


if hasattr(select, "poll"):
    class PollSelector(BaseSelector):
        """ Poll-based selector """
        def __init__(self):
            super(PollSelector, self).__init__()
            self._poll = select.poll()

        def register(self, fileobj, events, data=None):
            key = super(PollSelector, self).register(fileobj, events, data)
            event_mask = 0
            if events & EVENT_READ:
                event_mask |= select.POLLIN
            if events & EVENT_WRITE:
                event_mask |= select.POLLOUT
            self._poll.register(key.fd, event_mask)
            return key

        def unregister(self, fileobj):
            key = super(PollSelector, self).unregister(fileobj)
            self._poll.unregister(key.fd)
            return key

        def _wrap_poll(self, timeout=None):
            """ Wrapper function for select.poll.poll() so that
            _syscall_wrapper can work with only seconds. """
            if timeout is not None:
                if timeout <= 0:
                    timeout = 0
                else:
                    # select.poll.poll() has a resolution of 1 millisecond,
                    # round away from zero to wait *at least* timeout seconds.
                    timeout = math.ceil(timeout * 1e3)

            result = self._poll.poll(timeout)
            return result

        def select(self, timeout=None):
            ready = []
            fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout)
            for fd, event_mask in fd_events:
                events = 0
                if event_mask & ~select.POLLIN:
                    events |= EVENT_WRITE
                if event_mask & ~select.POLLOUT:
                    events |= EVENT_READ

                key = self._key_from_fd(fd)
                if key:
                    ready.append((key, events & key.events))

            return ready


if hasattr(select, "epoll"):
    class EpollSelector(BaseSelector):
        """ Epoll-based selector """
        def __init__(self):
            super(EpollSelector, self).__init__()
            self._epoll = select.epoll()

        def fileno(self):
            return self._epoll.fileno()

        def register(self, fileobj, events, data=None):
            key = super(EpollSelector, self).register(fileobj, events, data)
            events_mask = 0
            if events & EVENT_READ:
                events_mask |= select.EPOLLIN
            if events & EVENT_WRITE:
                events_mask |= select.EPOLLOUT
            _syscall_wrapper(self._epoll.register, False, key.fd, events_mask)
            return key

        def unregister(self, fileobj):
            key = super(EpollSelector, self).unregister(fileobj)
            try:
                _syscall_wrapper(self._epoll.unregister, False, key.fd)
            except SelectorError:
                # This can occur when the fd was closed since registry.
                pass
            return key

        def select(self, timeout=None):
            if timeout is not None:
                if timeout <= 0:
                    timeout = 0.0
                else:
                    # select.epoll.poll() has a resolution of 1 millisecond
                    # but luckily takes seconds so we don't need a wrapper
                    # like PollSelector. Just for better rounding.
                    timeout = math.ceil(timeout * 1e3) * 1e-3
                timeout = float(timeout)
            else:
                timeout = -1.0  # epoll.poll() must have a float.

            # We always want at least 1 to ensure that select can be called
            # with no file descriptors registered. Otherwise will fail.
            max_events = max(len(self._fd_to_key), 1)

            ready = []
            fd_events = _syscall_wrapper(self._epoll.poll, True,
                                         timeout=timeout,
                                         maxevents=max_events)
            for fd, event_mask in fd_events:
                events = 0
                if event_mask & ~select.EPOLLIN:
                    events |= EVENT_WRITE
                if event_mask & ~select.EPOLLOUT:
                    events |= EVENT_READ

                key = self._key_from_fd(fd)
                if key:
                    ready.append((key, events & key.events))
            return ready

        def close(self):
            self._epoll.close()
            super(EpollSelector, self).close()


if hasattr(select, "kqueue"):
    class KqueueSelector(BaseSelector):
        """ Kqueue / Kevent-based selector """
        def __init__(self):
            super(KqueueSelector, self).__init__()
            self._kqueue = select.kqueue()

        def fileno(self):
            return self._kqueue.fileno()

        def register(self, fileobj, events, data=None):
            key = super(KqueueSelector, self).register(fileobj, events, data)
            if events & EVENT_READ:
                kevent = select.kevent(key.fd,
                                       select.KQ_FILTER_READ,
                                       select.KQ_EV_ADD)

                _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)

            if events & EVENT_WRITE:
                kevent = select.kevent(key.fd,
                                       select.KQ_FILTER_WRITE,
                                       select.KQ_EV_ADD)

                _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)

            return key

        def unregister(self, fileobj):
            key = super(KqueueSelector, self).unregister(fileobj)
            if key.events & EVENT_READ:
                kevent = select.kevent(key.fd,
                                       select.KQ_FILTER_READ,
                                       select.KQ_EV_DELETE)
                try:
                    _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
                except SelectorError:
                    pass
            if key.events & EVENT_WRITE:
                kevent = select.kevent(key.fd,
                                       select.KQ_FILTER_WRITE,
                                       select.KQ_EV_DELETE)
                try:
                    _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0)
                except SelectorError:
                    pass

            return key

        def select(self, timeout=None):
            if timeout is not None:
                timeout = max(timeout, 0)

            max_events = len(self._fd_to_key) * 2
            ready_fds = {}

            kevent_list = _syscall_wrapper(self._kqueue.control, True,
                                           None, max_events, timeout)

            for kevent in kevent_list:
                fd = kevent.ident
                event_mask = kevent.filter
                events = 0
                if event_mask == select.KQ_FILTER_READ:
                    events |= EVENT_READ
                if event_mask == select.KQ_FILTER_WRITE:
                    events |= EVENT_WRITE

                key = self._key_from_fd(fd)
                if key:
                    if key.fd not in ready_fds:
                        ready_fds[key.fd] = (key, events & key.events)
                    else:
                        old_events = ready_fds[key.fd][1]
                        ready_fds[key.fd] = (key, (events | old_events) & key.events)

            return list(ready_fds.values())

        def close(self):
            self._kqueue.close()
            super(KqueueSelector, self).close()


if not hasattr(select, 'select'):  # Platform-specific: AppEngine
    HAS_SELECT = False


def _can_allocate(struct):
    """ Checks that select structs can be allocated by the underlying
    operating system, not just advertised by the select module. We don't
    check select() because we'll be hopeful that most platforms that
    don't have it available will not advertise it. (ie: GAE) """
    try:
        # select.poll() objects won't fail until used.
        if struct == 'poll':
            p = select.poll()
            p.poll(0)

        # All others will fail on allocation.
        else:
            getattr(select, struct)().close()
        return True
    except (OSError, AttributeError) as e:
        return False


# Choose the best implementation, roughly:
# kqueue == epoll > poll > select. Devpoll not supported. (See above)
# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
def DefaultSelector():
    """ This function serves as a first call for DefaultSelector to
    detect if the select module is being monkey-patched incorrectly
    by eventlet, greenlet, and preserve proper behavior. """
    global _DEFAULT_SELECTOR
    if _DEFAULT_SELECTOR is None:
        if _can_allocate('kqueue'):
            _DEFAULT_SELECTOR = KqueueSelector
        elif _can_allocate('epoll'):
            _DEFAULT_SELECTOR = EpollSelector
        elif _can_allocate('poll'):
            _DEFAULT_SELECTOR = PollSelector
        elif hasattr(select, 'select'):
            _DEFAULT_SELECTOR = SelectSelector
        else:  # Platform-specific: AppEngine
            raise ValueError('Platform does not have a selector')
    return _DEFAULT_SELECTOR()
_vendor/urllib3/connectionpool.py000064400000105036151733136360013174 0ustar00from __future__ import absolute_import
import errno
import logging
import sys
import warnings

from socket import error as SocketError, timeout as SocketTimeout
import socket


from .exceptions import (
    ClosedPoolError,
    ProtocolError,
    EmptyPoolError,
    HeaderParsingError,
    HostChangedError,
    LocationValueError,
    MaxRetryError,
    ProxyError,
    ReadTimeoutError,
    SSLError,
    TimeoutError,
    InsecureRequestWarning,
    NewConnectionError,
)
from .packages.ssl_match_hostname import CertificateError
from .packages import six
from .packages.six.moves import queue
from .connection import (
    port_by_scheme,
    DummyConnection,
    HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
    HTTPException, BaseSSLError,
)
from .request import RequestMethods
from .response import HTTPResponse

from .util.connection import is_connection_dropped
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host, Url


if six.PY2:
    # Queue is imported for side effects on MS Windows
    import Queue as _unused_module_Queue  # noqa: F401

xrange = six.moves.xrange

log = logging.getLogger(__name__)

_Default = object()


# Pool objects
class ConnectionPool(object):
    """
    Base class for all connection pools, such as
    :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
    """

    scheme = None
    QueueCls = queue.LifoQueue

    def __init__(self, host, port=None):
        if not host:
            raise LocationValueError("No host specified.")

        self.host = _ipv6_host(host).lower()
        self._proxy_host = host.lower()
        self.port = port

    def __str__(self):
        return '%s(host=%r, port=%r)' % (type(self).__name__,
                                         self.host, self.port)

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.close()
        # Return False to re-raise any potential exceptions
        return False

    def close(self):
        """
        Close all pooled connections and disable the pool.
        """
        pass


# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])


class HTTPConnectionPool(ConnectionPool, RequestMethods):
    """
    Thread-safe connection pool for one host.

    :param host:
        Host used for this HTTP Connection (e.g. "localhost"), passed into
        :class:`httplib.HTTPConnection`.

    :param port:
        Port used for this HTTP Connection (None is equivalent to 80), passed
        into :class:`httplib.HTTPConnection`.

    :param strict:
        Causes BadStatusLine to be raised if the status line can't be parsed
        as a valid HTTP/1.0 or 1.1 status line, passed into
        :class:`httplib.HTTPConnection`.

        .. note::
           Only works in Python 2. This parameter is ignored in Python 3.

    :param timeout:
        Socket timeout in seconds for each individual connection. This can
        be a float or integer, which sets the timeout for the HTTP request,
        or an instance of :class:`urllib3.util.Timeout` which gives you more
        fine-grained control over request timeouts. After the constructor has
        been parsed, this is always a `urllib3.util.Timeout` object.

    :param maxsize:
        Number of connections to save that can be reused. More than 1 is useful
        in multithreaded situations. If ``block`` is set to False, more
        connections will be created but they will not be saved once they've
        been used.

    :param block:
        If set to True, no more than ``maxsize`` connections will be used at
        a time. When no free connections are available, the call will block
        until a connection has been released. This is a useful side effect for
        particular multithreaded situations where one does not want to use more
        than maxsize connections per host to prevent flooding.

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.

    :param retries:
        Retry configuration to use by default with requests in this pool.

    :param _proxy:
        Parsed proxy URL, should not be used directly, instead, see
        :class:`urllib3.connectionpool.ProxyManager`"

    :param _proxy_headers:
        A dictionary with proxy headers, should not be used directly,
        instead, see :class:`urllib3.connectionpool.ProxyManager`"

    :param \\**conn_kw:
        Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
        :class:`urllib3.connection.HTTPSConnection` instances.
    """

    scheme = 'http'
    ConnectionCls = HTTPConnection
    ResponseCls = HTTPResponse

    def __init__(self, host, port=None, strict=False,
                 timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
                 headers=None, retries=None,
                 _proxy=None, _proxy_headers=None,
                 **conn_kw):
        ConnectionPool.__init__(self, host, port)
        RequestMethods.__init__(self, headers)

        self.strict = strict

        if not isinstance(timeout, Timeout):
            timeout = Timeout.from_float(timeout)

        if retries is None:
            retries = Retry.DEFAULT

        self.timeout = timeout
        self.retries = retries

        self.pool = self.QueueCls(maxsize)
        self.block = block

        self.proxy = _proxy
        self.proxy_headers = _proxy_headers or {}

        # Fill the queue up so that doing get() on it will block properly
        for _ in xrange(maxsize):
            self.pool.put(None)

        # These are mostly for testing and debugging purposes.
        self.num_connections = 0
        self.num_requests = 0
        self.conn_kw = conn_kw

        if self.proxy:
            # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
            # We cannot know if the user has added default socket options, so we cannot replace the
            # list.
            self.conn_kw.setdefault('socket_options', [])

    def _new_conn(self):
        """
        Return a fresh :class:`HTTPConnection`.
        """
        self.num_connections += 1
        log.debug("Starting new HTTP connection (%d): %s",
                  self.num_connections, self.host)

        conn = self.ConnectionCls(host=self.host, port=self.port,
                                  timeout=self.timeout.connect_timeout,
                                  strict=self.strict, **self.conn_kw)
        return conn

    def _get_conn(self, timeout=None):
        """
        Get a connection. Will return a pooled connection if one is available.

        If no connections are available and :prop:`.block` is ``False``, then a
        fresh connection is returned.

        :param timeout:
            Seconds to wait before giving up and raising
            :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
            :prop:`.block` is ``True``.
        """
        conn = None
        try:
            conn = self.pool.get(block=self.block, timeout=timeout)

        except AttributeError:  # self.pool is None
            raise ClosedPoolError(self, "Pool is closed.")

        except queue.Empty:
            if self.block:
                raise EmptyPoolError(self,
                                     "Pool reached maximum size and no more "
                                     "connections are allowed.")
            pass  # Oh well, we'll create a new connection then

        # If this is a persistent connection, check if it got disconnected
        if conn and is_connection_dropped(conn):
            log.debug("Resetting dropped connection: %s", self.host)
            conn.close()
            if getattr(conn, 'auto_open', 1) == 0:
                # This is a proxied connection that has been mutated by
                # httplib._tunnel() and cannot be reused (since it would
                # attempt to bypass the proxy)
                conn = None

        return conn or self._new_conn()

    def _put_conn(self, conn):
        """
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        """
        try:
            self.pool.put(conn, block=False)
            return  # Everything is dandy, done.
        except AttributeError:
            # self.pool is None.
            pass
        except queue.Full:
            # This should never happen if self.block == True
            log.warning(
                "Connection pool is full, discarding connection: %s",
                self.host)

        # Connection never got put back into the pool, close it.
        if conn:
            conn.close()

    def _validate_conn(self, conn):
        """
        Called right before a request is made, after the socket is created.
        """
        pass

    def _prepare_proxy(self, conn):
        # Nothing to do for HTTP connections.
        pass

    def _get_timeout(self, timeout):
        """ Helper that always returns a :class:`urllib3.util.Timeout` """
        if timeout is _Default:
            return self.timeout.clone()

        if isinstance(timeout, Timeout):
            return timeout.clone()
        else:
            # User passed us an int/float. This is for backwards compatibility,
            # can be removed later
            return Timeout.from_float(timeout)

    def _raise_timeout(self, err, url, timeout_value):
        """Is the error actually a timeout? Will raise a ReadTimeout or pass"""

        if isinstance(err, SocketTimeout):
            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)

        # See the above comment about EAGAIN in Python 3. In Python 2 we have
        # to specifically catch it and throw the timeout error
        if hasattr(err, 'errno') and err.errno in _blocking_errnos:
            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)

        # Catch possible read timeouts thrown as SSL errors. If not the
        # case, rethrow the original. We need to do this because of:
        # http://bugs.python.org/issue10272
        if 'timed out' in str(err) or 'did not complete (read)' in str(err):  # Python 2.6
            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)

    def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
                      **httplib_request_kw):
        """
        Perform a request on a given urllib connection object taken from our
        pool.

        :param conn:
            a connection from one of our connection pools

        :param timeout:
            Socket timeout in seconds for the request. This can be a
            float or integer, which will set the same timeout value for
            the socket connect and the socket read, or an instance of
            :class:`urllib3.util.Timeout`, which gives you more fine-grained
            control over your timeouts.
        """
        self.num_requests += 1

        timeout_obj = self._get_timeout(timeout)
        timeout_obj.start_connect()
        conn.timeout = timeout_obj.connect_timeout

        # Trigger any extra validation we need to do.
        try:
            self._validate_conn(conn)
        except (SocketTimeout, BaseSSLError) as e:
            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
            raise

        # conn.request() calls httplib.*.request, not the method in
        # urllib3.request. It also calls makefile (recv) on the socket.
        if chunked:
            conn.request_chunked(method, url, **httplib_request_kw)
        else:
            conn.request(method, url, **httplib_request_kw)

        # Reset the timeout for the recv() on the socket
        read_timeout = timeout_obj.read_timeout

        # App Engine doesn't have a sock attr
        if getattr(conn, 'sock', None):
            # In Python 3 socket.py will catch EAGAIN and return None when you
            # try and read into the file pointer created by http.client, which
            # instead raises a BadStatusLine exception. Instead of catching
            # the exception and assuming all BadStatusLine exceptions are read
            # timeouts, check for a zero timeout before making the request.
            if read_timeout == 0:
                raise ReadTimeoutError(
                    self, url, "Read timed out. (read timeout=%s)" % read_timeout)
            if read_timeout is Timeout.DEFAULT_TIMEOUT:
                conn.sock.settimeout(socket.getdefaulttimeout())
            else:  # None or a value
                conn.sock.settimeout(read_timeout)

        # Receive the response from the server
        try:
            try:  # Python 2.7, use buffering of HTTP responses
                httplib_response = conn.getresponse(buffering=True)
            except TypeError:  # Python 2.6 and older, Python 3
                try:
                    httplib_response = conn.getresponse()
                except Exception as e:
                    # Remove the TypeError from the exception chain in Python 3;
                    # otherwise it looks like a programming error was the cause.
                    six.raise_from(e, None)
        except (SocketTimeout, BaseSSLError, SocketError) as e:
            self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
            raise

        # AppEngine doesn't have a version attr.
        http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
        log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port,
                  method, url, http_version, httplib_response.status,
                  httplib_response.length)

        try:
            assert_header_parsing(httplib_response.msg)
        except (HeaderParsingError, TypeError) as hpe:  # Platform-specific: Python 3
            log.warning(
                'Failed to parse headers (url=%s): %s',
                self._absolute_url(url), hpe, exc_info=True)

        return httplib_response

    def _absolute_url(self, path):
        return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url

    def close(self):
        """
        Close all pooled connections and disable the pool.
        """
        # Disable access to the pool
        old_pool, self.pool = self.pool, None

        try:
            while True:
                conn = old_pool.get(block=False)
                if conn:
                    conn.close()

        except queue.Empty:
            pass  # Done.

    def is_same_host(self, url):
        """
        Check if the given ``url`` is a member of the same host as this
        connection pool.
        """
        if url.startswith('/'):
            return True

        # TODO: Add optional support for socket.gethostbyname checking.
        scheme, host, port = get_host(url)

        host = _ipv6_host(host).lower()

        # Use explicit default port for comparison when none is given
        if self.port and not port:
            port = port_by_scheme.get(scheme)
        elif not self.port and port == port_by_scheme.get(scheme):
            port = None

        return (scheme, host, port) == (self.scheme, self.host, self.port)

    def urlopen(self, method, url, body=None, headers=None, retries=None,
                redirect=True, assert_same_host=True, timeout=_Default,
                pool_timeout=None, release_conn=None, chunked=False,
                body_pos=None, **response_kw):
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.

        .. note::

           More commonly, it's appropriate to use a convenience method provided
           by :class:`.RequestMethods`, such as :meth:`request`.

        .. note::

           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.

        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)

        :param body:
            Data to send in the request body (useful for creating
            POST requests, see HTTPConnectionPool.post_url for
            more convenience).

        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.

        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.

            Pass ``None`` to retry until you receive a response. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.

            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.

        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.

        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.

        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When False, you can
            use the pool on an HTTP proxy and request foreign hosts.

        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.

        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.

        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of
            ``response_kw.get('preload_content', True)``.

        :param chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.

        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.

        :param \\**response_kw:
            Additional parameters are passed to
            :meth:`urllib3.response.HTTPResponse.from_httplib`
        """
        if headers is None:
            headers = self.headers

        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)

        if release_conn is None:
            release_conn = response_kw.get('preload_content', True)

        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)

        conn = None

        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/shazow/urllib3/issues/651>
        release_this_conn = release_conn

        # Merge the proxy headers. Only do this in HTTP. We have to copy the
        # headers dict so we can safely change it without those changes being
        # reflected in anyone else's copy.
        if self.scheme == 'http':
            headers = headers.copy()
            headers.update(self.proxy_headers)

        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None

        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False

        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)

        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)

            conn.timeout = timeout_obj.connect_timeout

            is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
            if is_new_proxy_conn:
                self._prepare_proxy(conn)

            # Make the request on the httplib connection object.
            httplib_response = self._make_request(conn, method, url,
                                                  timeout=timeout_obj,
                                                  body=body, headers=headers,
                                                  chunked=chunked)

            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None

            # Pass method to Response for length checking
            response_kw['request_method'] = method

            # Import httplib's response into our own wrapper object
            response = self.ResponseCls.from_httplib(httplib_response,
                                                     pool=self,
                                                     connection=response_conn,
                                                     retries=retries,
                                                     **response_kw)

            # Everything went great!
            clean_exit = True

        except queue.Empty:
            # Timed out by queue.
            raise EmptyPoolError(self, "No pool connections are available.")

        except (TimeoutError, HTTPException, SocketError, ProtocolError,
                BaseSSLError, SSLError, CertificateError) as e:
            # Discard the connection for these exceptions. It will be
            # replaced during the next _get_conn() call.
            clean_exit = False
            if isinstance(e, (BaseSSLError, CertificateError)):
                e = SSLError(e)
            elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
                e = ProxyError('Cannot connect to proxy.', e)
            elif isinstance(e, (SocketError, HTTPException)):
                e = ProtocolError('Connection aborted.', e)

            retries = retries.increment(method, url, error=e, _pool=self,
                                        _stacktrace=sys.exc_info()[2])
            retries.sleep()

            # Keep track of the error for the retry warning.
            err = e

        finally:
            if not clean_exit:
                # We hit some kind of exception, handled or otherwise. We need
                # to throw the connection away unless explicitly told not to.
                # Close the connection, set the variable to None, and make sure
                # we put the None back in the pool to avoid leaking it.
                conn = conn and conn.close()
                release_this_conn = True

            if release_this_conn:
                # Put the connection back to be reused. If the connection is
                # expired then it will be None, which will get replaced with a
                # fresh connection during _get_conn.
                self._put_conn(conn)

        if not conn:
            # Try again
            log.warning("Retrying (%r) after connection "
                        "broken by '%r': %s", retries, err, url)
            return self.urlopen(method, url, body, headers, retries,
                                redirect, assert_same_host,
                                timeout=timeout, pool_timeout=pool_timeout,
                                release_conn=release_conn, body_pos=body_pos,
                                **response_kw)

        def drain_and_release_conn(response):
            try:
                # discard any remaining response body, the connection will be
                # released back to the pool once the entire response is read
                response.read()
            except (TimeoutError, HTTPException, SocketError, ProtocolError,
                    BaseSSLError, SSLError) as e:
                pass

        # Handle redirect?
        redirect_location = redirect and response.get_redirect_location()
        if redirect_location:
            if response.status == 303:
                method = 'GET'

            try:
                retries = retries.increment(method, url, response=response, _pool=self)
            except MaxRetryError:
                if retries.raise_on_redirect:
                    # Drain and release the connection for this response, since
                    # we're not returning it to be released manually.
                    drain_and_release_conn(response)
                    raise
                return response

            # drain and return the connection to the pool before recursing
            drain_and_release_conn(response)

            retries.sleep_for_retry(response)
            log.debug("Redirecting %s -> %s", url, redirect_location)
            return self.urlopen(
                method, redirect_location, body, headers,
                retries=retries, redirect=redirect,
                assert_same_host=assert_same_host,
                timeout=timeout, pool_timeout=pool_timeout,
                release_conn=release_conn, body_pos=body_pos,
                **response_kw)

        # Check if we should retry the HTTP response.
        has_retry_after = bool(response.getheader('Retry-After'))
        if retries.is_retry(method, response.status, has_retry_after):
            try:
                retries = retries.increment(method, url, response=response, _pool=self)
            except MaxRetryError:
                if retries.raise_on_status:
                    # Drain and release the connection for this response, since
                    # we're not returning it to be released manually.
                    drain_and_release_conn(response)
                    raise
                return response

            # drain and return the connection to the pool before recursing
            drain_and_release_conn(response)

            retries.sleep(response)
            log.debug("Retry: %s", url)
            return self.urlopen(
                method, url, body, headers,
                retries=retries, redirect=redirect,
                assert_same_host=assert_same_host,
                timeout=timeout, pool_timeout=pool_timeout,
                release_conn=release_conn,
                body_pos=body_pos, **response_kw)

        return response


class HTTPSConnectionPool(HTTPConnectionPool):
    """
    Same as :class:`.HTTPConnectionPool`, but HTTPS.

    When Python is compiled with the :mod:`ssl` module, then
    :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
    instead of :class:`.HTTPSConnection`.

    :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
    ``assert_hostname`` and ``host`` in this order to verify connections.
    If ``assert_hostname`` is False, no verification is done.

    The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
    ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
    available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
    the connection socket into an SSL socket.
    """

    scheme = 'https'
    ConnectionCls = HTTPSConnection

    def __init__(self, host, port=None,
                 strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
                 block=False, headers=None, retries=None,
                 _proxy=None, _proxy_headers=None,
                 key_file=None, cert_file=None, cert_reqs=None,
                 ca_certs=None, ssl_version=None,
                 assert_hostname=None, assert_fingerprint=None,
                 ca_cert_dir=None, **conn_kw):

        HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
                                    block, headers, retries, _proxy, _proxy_headers,
                                    **conn_kw)

        if ca_certs and cert_reqs is None:
            cert_reqs = 'CERT_REQUIRED'

        self.key_file = key_file
        self.cert_file = cert_file
        self.cert_reqs = cert_reqs
        self.ca_certs = ca_certs
        self.ca_cert_dir = ca_cert_dir
        self.ssl_version = ssl_version
        self.assert_hostname = assert_hostname
        self.assert_fingerprint = assert_fingerprint

    def _prepare_conn(self, conn):
        """
        Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
        and establish the tunnel if proxy is used.
        """

        if isinstance(conn, VerifiedHTTPSConnection):
            conn.set_cert(key_file=self.key_file,
                          cert_file=self.cert_file,
                          cert_reqs=self.cert_reqs,
                          ca_certs=self.ca_certs,
                          ca_cert_dir=self.ca_cert_dir,
                          assert_hostname=self.assert_hostname,
                          assert_fingerprint=self.assert_fingerprint)
            conn.ssl_version = self.ssl_version
        return conn

    def _prepare_proxy(self, conn):
        """
        Establish tunnel connection early, because otherwise httplib
        would improperly set Host: header to proxy's IP:port.
        """
        # Python 2.7+
        try:
            set_tunnel = conn.set_tunnel
        except AttributeError:  # Platform-specific: Python 2.6
            set_tunnel = conn._set_tunnel

        if sys.version_info <= (2, 6, 4) and not self.proxy_headers:  # Python 2.6.4 and older
            set_tunnel(self._proxy_host, self.port)
        else:
            set_tunnel(self._proxy_host, self.port, self.proxy_headers)

        conn.connect()

    def _new_conn(self):
        """
        Return a fresh :class:`httplib.HTTPSConnection`.
        """
        self.num_connections += 1
        log.debug("Starting new HTTPS connection (%d): %s",
                  self.num_connections, self.host)

        if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
            raise SSLError("Can't connect to HTTPS URL because the SSL "
                           "module is not available.")

        actual_host = self.host
        actual_port = self.port
        if self.proxy is not None:
            actual_host = self.proxy.host
            actual_port = self.proxy.port

        conn = self.ConnectionCls(host=actual_host, port=actual_port,
                                  timeout=self.timeout.connect_timeout,
                                  strict=self.strict, **self.conn_kw)

        return self._prepare_conn(conn)

    def _validate_conn(self, conn):
        """
        Called right before a request is made, after the socket is created.
        """
        super(HTTPSConnectionPool, self)._validate_conn(conn)

        # Force connect early to allow us to validate the connection.
        if not getattr(conn, 'sock', None):  # AppEngine might not have  `.sock`
            conn.connect()

        if not conn.is_verified:
            warnings.warn((
                'Unverified HTTPS request is being made. '
                'Adding certificate verification is strongly advised. See: '
                'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
                '#ssl-warnings'),
                InsecureRequestWarning)


def connection_from_url(url, **kw):
    """
    Given a url, return an :class:`.ConnectionPool` instance of its host.

    This is a shortcut for not having to parse out the scheme, host, and port
    of the url before creating an :class:`.ConnectionPool` instance.

    :param url:
        Absolute URL string that must include the scheme. Port is optional.

    :param \\**kw:
        Passes additional parameters to the constructor of the appropriate
        :class:`.ConnectionPool`. Useful for specifying things like
        timeout, maxsize, headers, etc.

    Example::

        >>> conn = connection_from_url('http://google.com/')
        >>> r = conn.request('GET', '/')
    """
    scheme, host, port = get_host(url)
    port = port or port_by_scheme.get(scheme, 80)
    if scheme == 'https':
        return HTTPSConnectionPool(host, port=port, **kw)
    else:
        return HTTPConnectionPool(host, port=port, **kw)


def _ipv6_host(host):
    """
    Process IPv6 address literals
    """

    # httplib doesn't like it when we include brackets in IPv6 addresses
    # Specifically, if we include brackets but also pass the port then
    # httplib crazily doubles up the square brackets on the Host header.
    # Instead, we need to make sure we never pass ``None`` as the port.
    # However, for backward compatibility reasons we can't actually
    # *assert* that.  See http://bugs.python.org/issue28539
    #
    # Also if an IPv6 address literal has a zone identifier, the
    # percent sign might be URIencoded, convert it back into ASCII
    if host.startswith('[') and host.endswith(']'):
        host = host.replace('%25', '%').strip('[]')
    return host
_vendor/urllib3/connection.py000064400000031313151733136360012276 0ustar00from __future__ import absolute_import
import datetime
import logging
import os
import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings
from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException  # noqa: F401

try:  # Compiled with SSL?
    import ssl
    BaseSSLError = ssl.SSLError
except (ImportError, AttributeError):  # Platform-specific: No SSL.
    ssl = None

    class BaseSSLError(BaseException):
        pass


try:  # Python 3:
    # Not a no-op, we're adding this to the namespace so it can be imported.
    ConnectionError = ConnectionError
except NameError:  # Python 2:
    class ConnectionError(Exception):
        pass


from .exceptions import (
    NewConnectionError,
    ConnectTimeoutError,
    SubjectAltNameWarning,
    SystemTimeWarning,
)
from .packages.ssl_match_hostname import match_hostname, CertificateError

from .util.ssl_ import (
    resolve_cert_reqs,
    resolve_ssl_version,
    assert_fingerprint,
    create_urllib3_context,
    ssl_wrap_socket
)


from .util import connection

from ._collections import HTTPHeaderDict

log = logging.getLogger(__name__)

port_by_scheme = {
    'http': 80,
    'https': 443,
}

# When updating RECENT_DATE, move it to
# within two years of the current date, and no
# earlier than 6 months ago.
RECENT_DATE = datetime.date(2016, 1, 1)


class DummyConnection(object):
    """Used to detect a failed ConnectionCls import."""
    pass


class HTTPConnection(_HTTPConnection, object):
    """
    Based on httplib.HTTPConnection but provides an extra constructor
    backwards-compatibility layer between older and newer Pythons.

    Additional keyword parameters are used to configure attributes of the connection.
    Accepted parameters include:

      - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
      - ``source_address``: Set the source address for the current connection.

        .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x

      - ``socket_options``: Set specific options on the underlying socket. If not specified, then
        defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
        Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.

        For example, if you wish to enable TCP Keep Alive in addition to the defaults,
        you might pass::

            HTTPConnection.default_socket_options + [
                (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
            ]

        Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
    """

    default_port = port_by_scheme['http']

    #: Disable Nagle's algorithm by default.
    #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
    default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]

    #: Whether this connection verifies the host's certificate.
    is_verified = False

    def __init__(self, *args, **kw):
        if six.PY3:  # Python 3
            kw.pop('strict', None)

        # Pre-set source_address in case we have an older Python like 2.6.
        self.source_address = kw.get('source_address')

        if sys.version_info < (2, 7):  # Python 2.6
            # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
            # not newer versions. We can still use it when creating a
            # connection though, so we pop it *after* we have saved it as
            # self.source_address.
            kw.pop('source_address', None)

        #: The socket options provided by the user. If no options are
        #: provided, we use the default options.
        self.socket_options = kw.pop('socket_options', self.default_socket_options)

        # Superclass also sets self.source_address in Python 2.7+.
        _HTTPConnection.__init__(self, *args, **kw)

    def _new_conn(self):
        """ Establish a socket connection and set nodelay settings on it.

        :return: New socket connection.
        """
        extra_kw = {}
        if self.source_address:
            extra_kw['source_address'] = self.source_address

        if self.socket_options:
            extra_kw['socket_options'] = self.socket_options

        try:
            conn = connection.create_connection(
                (self.host, self.port), self.timeout, **extra_kw)

        except SocketTimeout as e:
            raise ConnectTimeoutError(
                self, "Connection to %s timed out. (connect timeout=%s)" %
                (self.host, self.timeout))

        except SocketError as e:
            raise NewConnectionError(
                self, "Failed to establish a new connection: %s" % e)

        return conn

    def _prepare_conn(self, conn):
        self.sock = conn
        # the _tunnel_host attribute was added in python 2.6.3 (via
        # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
        # not have them.
        if getattr(self, '_tunnel_host', None):
            # TODO: Fix tunnel so it doesn't depend on self.sock state.
            self._tunnel()
            # Mark this connection as not reusable
            self.auto_open = 0

    def connect(self):
        conn = self._new_conn()
        self._prepare_conn(conn)

    def request_chunked(self, method, url, body=None, headers=None):
        """
        Alternative to the common request method, which sends the
        body with chunked encoding and not as one block
        """
        headers = HTTPHeaderDict(headers if headers is not None else {})
        skip_accept_encoding = 'accept-encoding' in headers
        skip_host = 'host' in headers
        self.putrequest(
            method,
            url,
            skip_accept_encoding=skip_accept_encoding,
            skip_host=skip_host
        )
        for header, value in headers.items():
            self.putheader(header, value)
        if 'transfer-encoding' not in headers:
            self.putheader('Transfer-Encoding', 'chunked')
        self.endheaders()

        if body is not None:
            stringish_types = six.string_types + (six.binary_type,)
            if isinstance(body, stringish_types):
                body = (body,)
            for chunk in body:
                if not chunk:
                    continue
                if not isinstance(chunk, six.binary_type):
                    chunk = chunk.encode('utf8')
                len_str = hex(len(chunk))[2:]
                self.send(len_str.encode('utf-8'))
                self.send(b'\r\n')
                self.send(chunk)
                self.send(b'\r\n')

        # After the if clause, to always have a closed body
        self.send(b'0\r\n\r\n')


class HTTPSConnection(HTTPConnection):
    default_port = port_by_scheme['https']

    ssl_version = None

    def __init__(self, host, port=None, key_file=None, cert_file=None,
                 strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
                 ssl_context=None, **kw):

        HTTPConnection.__init__(self, host, port, strict=strict,
                                timeout=timeout, **kw)

        self.key_file = key_file
        self.cert_file = cert_file
        self.ssl_context = ssl_context

        # Required property for Google AppEngine 1.9.0 which otherwise causes
        # HTTPS requests to go out as HTTP. (See Issue #356)
        self._protocol = 'https'

    def connect(self):
        conn = self._new_conn()
        self._prepare_conn(conn)

        if self.ssl_context is None:
            self.ssl_context = create_urllib3_context(
                ssl_version=resolve_ssl_version(None),
                cert_reqs=resolve_cert_reqs(None),
            )

        self.sock = ssl_wrap_socket(
            sock=conn,
            keyfile=self.key_file,
            certfile=self.cert_file,
            ssl_context=self.ssl_context,
        )


class VerifiedHTTPSConnection(HTTPSConnection):
    """
    Based on httplib.HTTPSConnection but wraps the socket with
    SSL certification.
    """
    cert_reqs = None
    ca_certs = None
    ca_cert_dir = None
    ssl_version = None
    assert_fingerprint = None

    def set_cert(self, key_file=None, cert_file=None,
                 cert_reqs=None, ca_certs=None,
                 assert_hostname=None, assert_fingerprint=None,
                 ca_cert_dir=None):
        """
        This method should only be called once, before the connection is used.
        """
        # If cert_reqs is not provided, we can try to guess. If the user gave
        # us a cert database, we assume they want to use it: otherwise, if
        # they gave us an SSL Context object we should use whatever is set for
        # it.
        if cert_reqs is None:
            if ca_certs or ca_cert_dir:
                cert_reqs = 'CERT_REQUIRED'
            elif self.ssl_context is not None:
                cert_reqs = self.ssl_context.verify_mode

        self.key_file = key_file
        self.cert_file = cert_file
        self.cert_reqs = cert_reqs
        self.assert_hostname = assert_hostname
        self.assert_fingerprint = assert_fingerprint
        self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
        self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)

    def connect(self):
        # Add certificate verification
        conn = self._new_conn()

        hostname = self.host
        if getattr(self, '_tunnel_host', None):
            # _tunnel_host was added in Python 2.6.3
            # (See: http://hg.python.org/cpython/rev/0f57b30a152f)

            self.sock = conn
            # Calls self._set_hostport(), so self.host is
            # self._tunnel_host below.
            self._tunnel()
            # Mark this connection as not reusable
            self.auto_open = 0

            # Override the host with the one we're requesting data from.
            hostname = self._tunnel_host

        is_time_off = datetime.date.today() < RECENT_DATE
        if is_time_off:
            warnings.warn((
                'System time is way off (before {0}). This will probably '
                'lead to SSL verification errors').format(RECENT_DATE),
                SystemTimeWarning
            )

        # Wrap socket using verification with the root certs in
        # trusted_root_certs
        if self.ssl_context is None:
            self.ssl_context = create_urllib3_context(
                ssl_version=resolve_ssl_version(self.ssl_version),
                cert_reqs=resolve_cert_reqs(self.cert_reqs),
            )

        context = self.ssl_context
        context.verify_mode = resolve_cert_reqs(self.cert_reqs)
        self.sock = ssl_wrap_socket(
            sock=conn,
            keyfile=self.key_file,
            certfile=self.cert_file,
            ca_certs=self.ca_certs,
            ca_cert_dir=self.ca_cert_dir,
            server_hostname=hostname,
            ssl_context=context)

        if self.assert_fingerprint:
            assert_fingerprint(self.sock.getpeercert(binary_form=True),
                               self.assert_fingerprint)
        elif context.verify_mode != ssl.CERT_NONE \
                and not getattr(context, 'check_hostname', False) \
                and self.assert_hostname is not False:
            # While urllib3 attempts to always turn off hostname matching from
            # the TLS library, this cannot always be done. So we check whether
            # the TLS Library still thinks it's matching hostnames.
            cert = self.sock.getpeercert()
            if not cert.get('subjectAltName', ()):
                warnings.warn((
                    'Certificate for {0} has no `subjectAltName`, falling back to check for a '
                    '`commonName` for now. This feature is being removed by major browsers and '
                    'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
                    'for details.)'.format(hostname)),
                    SubjectAltNameWarning
                )
            _match_hostname(cert, self.assert_hostname or hostname)

        self.is_verified = (
            context.verify_mode == ssl.CERT_REQUIRED or
            self.assert_fingerprint is not None
        )


def _match_hostname(cert, asserted_hostname):
    try:
        match_hostname(cert, asserted_hostname)
    except CertificateError as e:
        log.error(
            'Certificate did not match expected hostname: %s. '
            'Certificate: %s', asserted_hostname, cert
        )
        # Add cert to exception and reraise so client code can inspect
        # the cert when catching the exception, if they want to
        e._peer_cert = cert
        raise


if ssl:
    # Make a copy for testing.
    UnverifiedHTTPSConnection = HTTPSConnection
    HTTPSConnection = VerifiedHTTPSConnection
else:
    HTTPSConnection = DummyConnection
_vendor/urllib3/request.py000064400000013472151733136360011635 0ustar00from __future__ import absolute_import

from .filepost import encode_multipart_formdata
from .packages.six.moves.urllib.parse import urlencode


__all__ = ['RequestMethods']


class RequestMethods(object):
    """
    Convenience mixin for classes who implement a :meth:`urlopen` method, such
    as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
    :class:`~urllib3.poolmanager.PoolManager`.

    Provides behavior for making common types of HTTP request methods and
    decides which type of request field encoding to use.

    Specifically,

    :meth:`.request_encode_url` is for sending requests whose fields are
    encoded in the URL (such as GET, HEAD, DELETE).

    :meth:`.request_encode_body` is for sending requests whose fields are
    encoded in the *body* of the request using multipart or www-form-urlencoded
    (such as for POST, PUT, PATCH).

    :meth:`.request` is for making any kind of request, it will look up the
    appropriate encoding format and use one of the above two methods to make
    the request.

    Initializer parameters:

    :param headers:
        Headers to include with all requests, unless other headers are given
        explicitly.
    """

    _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])

    def __init__(self, headers=None):
        self.headers = headers or {}

    def urlopen(self, method, url, body=None, headers=None,
                encode_multipart=True, multipart_boundary=None,
                **kw):  # Abstract
        raise NotImplemented("Classes extending RequestMethods must implement "
                             "their own ``urlopen`` method.")

    def request(self, method, url, fields=None, headers=None, **urlopen_kw):
        """
        Make a request using :meth:`urlopen` with the appropriate encoding of
        ``fields`` based on the ``method`` used.

        This is a convenience method that requires the least amount of manual
        effort. It can be used in most situations, while still having the
        option to drop down to more specific methods when necessary, such as
        :meth:`request_encode_url`, :meth:`request_encode_body`,
        or even the lowest level :meth:`urlopen`.
        """
        method = method.upper()

        if method in self._encode_url_methods:
            return self.request_encode_url(method, url, fields=fields,
                                           headers=headers,
                                           **urlopen_kw)
        else:
            return self.request_encode_body(method, url, fields=fields,
                                            headers=headers,
                                            **urlopen_kw)

    def request_encode_url(self, method, url, fields=None, headers=None,
                           **urlopen_kw):
        """
        Make a request using :meth:`urlopen` with the ``fields`` encoded in
        the url. This is useful for request methods like GET, HEAD, DELETE, etc.
        """
        if headers is None:
            headers = self.headers

        extra_kw = {'headers': headers}
        extra_kw.update(urlopen_kw)

        if fields:
            url += '?' + urlencode(fields)

        return self.urlopen(method, url, **extra_kw)

    def request_encode_body(self, method, url, fields=None, headers=None,
                            encode_multipart=True, multipart_boundary=None,
                            **urlopen_kw):
        """
        Make a request using :meth:`urlopen` with the ``fields`` encoded in
        the body. This is useful for request methods like POST, PUT, PATCH, etc.

        When ``encode_multipart=True`` (default), then
        :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
        the payload with the appropriate content type. Otherwise
        :meth:`urllib.urlencode` is used with the
        'application/x-www-form-urlencoded' content type.

        Multipart encoding must be used when posting files, and it's reasonably
        safe to use it in other times too. However, it may break request
        signing, such as with OAuth.

        Supports an optional ``fields`` parameter of key/value strings AND
        key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
        the MIME type is optional. For example::

            fields = {
                'foo': 'bar',
                'fakefile': ('foofile.txt', 'contents of foofile'),
                'realfile': ('barfile.txt', open('realfile').read()),
                'typedfile': ('bazfile.bin', open('bazfile').read(),
                              'image/jpeg'),
                'nonamefile': 'contents of nonamefile field',
            }

        When uploading a file, providing a filename (the first parameter of the
        tuple) is optional but recommended to best mimick behavior of browsers.

        Note that if ``headers`` are supplied, the 'Content-Type' header will
        be overwritten because it depends on the dynamic random boundary string
        which is used to compose the body of the request. The random boundary
        string can be explicitly set with the ``multipart_boundary`` parameter.
        """
        if headers is None:
            headers = self.headers

        extra_kw = {'headers': {}}

        if fields:
            if 'body' in urlopen_kw:
                raise TypeError(
                    "request got values for both 'fields' and 'body', can only specify one.")

            if encode_multipart:
                body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
            else:
                body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'

            extra_kw['body'] = body
            extra_kw['headers'] = {'Content-Type': content_type}

        extra_kw['headers'].update(headers)
        extra_kw.update(urlopen_kw)

        return self.urlopen(method, url, **extra_kw)
_vendor/urllib3/packages/__pycache__/ordered_dict.cpython-36.pyc000064400000020173151733136360020632 0ustar003

�Pf�"�@styddlmZWn ek
r0ddlmZYnXyddlmZmZmZWnek
r^YnXGdd�de	�Z
dS)�)�	get_ident)�KeysView�
ValuesView�	ItemsViewc@seZdZdZdd�Zejfdd�Zejfdd�Zdd	�Zd
d�Z	dd
�Z
d6dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�ZeZe�Zefdd �Zd7d"d#�Zifd$d%�Zd&d'�Zd(d)�Zed8d*d+��Zd,d-�Zd.d/�Zd0d1�Zd2d3�Z d4d5�Z!d!S)9�OrderedDictz)Dictionary that remembers insertion ordercOsnt|�dkrtdt|���y
|jWn6tk
r\g|_}||dg|dd�<i|_YnX|j||�dS)z�Initialize an ordered dictionary.  Signature is the same as for
        regular dictionaries, but keyword arguments are not recommended
        because their insertion order is arbitrary.

        �z$expected at most 1 arguments, got %dN)�len�	TypeError�_OrderedDict__root�AttributeError�_OrderedDict__map�_OrderedDict__update)�self�args�kwds�root�r�"/usr/lib/python3.6/ordered_dict.py�__init__s

zOrderedDict.__init__cCsF||kr6|j}|d}|||g|d<|d<|j|<||||�dS)z!od.__setitem__(i, y) <==> od[i]=yrrN)r
r)r�key�valueZdict_setitemr�lastrrr�__setitem__,s
 zOrderedDict.__setitem__cCs0|||�|jj|�\}}}||d<||d<dS)z od.__delitem__(y) <==> del od[y]rrN)r�pop)rrZdict_delitem�	link_prev�	link_nextrrr�__delitem__6s
zOrderedDict.__delitem__ccs2|j}|d}x||k	r,|dV|d}qWdS)zod.__iter__() <==> iter(od)r�N)r
)rr�currrrr�__iter__?s


zOrderedDict.__iter__ccs2|j}|d}x||k	r,|dV|d}qWdS)z#od.__reversed__() <==> reversed(od)rrN)r
)rrrrrr�__reversed__Gs


zOrderedDict.__reversed__cCshyDx|jj�D]}|dd�=qW|j}||dg|dd�<|jj�Wntk
rXYnXtj|�dS)z.od.clear() -> None.  Remove all items from od.N)r�
itervaluesr
�clearr�dict)rZnoderrrrr"OszOrderedDict.clearTcCs||std��|j}|r8|d}|d}||d<||d<n |d}|d}||d<||d<|d}|j|=tj||�}||fS)z�od.popitem() -> (k, v), return and remove a (key, value) pair.
        Pairs are returned in LIFO order if last is true or FIFO order if false.

        zdictionary is emptyrrr)�KeyErrorr
rr#r)rrr�linkrrrrrrr�popitem[s 
zOrderedDict.popitemcCst|�S)zod.keys() -> list of keys in od)�list)rrrr�keystszOrderedDict.keyscs�fdd��D�S)z#od.values() -> list of values in odcsg|]}�|�qSrr)�.0r)rrr�
<listcomp>zsz&OrderedDict.values.<locals>.<listcomp>r)rr)rr�valuesxszOrderedDict.valuescs�fdd��D�S)z.od.items() -> list of (key, value) pairs in odcsg|]}|�|f�qSrr)r)r)rrrr*~sz%OrderedDict.items.<locals>.<listcomp>r)rr)rr�items|szOrderedDict.itemscCst|�S)z0od.iterkeys() -> an iterator over the keys in od)�iter)rrrr�iterkeys�szOrderedDict.iterkeysccsx|D]}||VqWdS)z2od.itervalues -> an iterator over the values in odNr)r�krrrr!�s
zOrderedDict.itervaluesccs x|D]}|||fVqWdS)z=od.iteritems -> an iterator over the (key, value) items in odNr)rr/rrr�	iteritems�s
zOrderedDict.iteritemscOs�t|�dkr tdt|�f��n|s,td��|d}f}t|�dkrL|d}t|t�rrx^|D]}||||<q\WnDt|d�r�x8|j�D]}||||<q�Wnx|D]\}}|||<q�Wx|j�D]\}}|||<q�WdS)a�od.update(E, **F) -> None.  Update od from dict/iterable E and F.

        If E is a dict instance, does:           for k in E: od[k] = E[k]
        If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
        In either case, this is followed by:     for k, v in F.items(): od[k] = v

        rz8update() takes at most 2 positional arguments (%d given)z,update() takes at least 1 argument (0 given)rrr(N)rr	�
isinstancer#�hasattrr(r,)rrr�otherrrrrr�update�s&	


zOrderedDict.updatecCs0||kr||}||=|S||jkr,t|��|S)z�od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
        If key is not found, d is returned if given, otherwise KeyError is raised.

        )�_OrderedDict__markerr$)rr�default�resultrrrr�s
zOrderedDict.popNcCs||kr||S|||<|S)zDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in odr)rrr6rrr�
setdefault�szOrderedDict.setdefaultcCsVt|�t�f}||krdSd||<z&|s6d|jjfSd|jj|j�fS||=XdS)zod.__repr__() <==> repr(od)z...rz%s()z%s(%r)N)�id�
_get_ident�	__class__�__name__r,)rZ
_repr_runningZcall_keyrrr�__repr__�szOrderedDict.__repr__cs\�fdd��D�}t��j�}xtt��D]}|j|d�q*W|rP�j|f|fS�j|ffS)z%Return state information for picklingcsg|]}|�|g�qSrr)r)r/)rrrr*�sz*OrderedDict.__reduce__.<locals>.<listcomp>N)�vars�copyrrr;)rr,Z	inst_dictr/r)rr�
__reduce__�szOrderedDict.__reduce__cCs
|j|�S)z!od.copy() -> a shallow copy of od)r;)rrrrr?�szOrderedDict.copycCs |�}x|D]}|||<qW|S)z�OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
        and values equal to v (which defaults to None).

        r)�cls�iterabler�drrrr�fromkeys�s
zOrderedDict.fromkeyscCs6t|t�r*t|�t|�ko(|j�|j�kStj||�S)z�od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
        while comparison to a regular mapping is order-insensitive.

        )r1rrr,r#�__eq__)rr3rrrrE�s
 zOrderedDict.__eq__cCs
||kS)Nr)rr3rrr�__ne__�szOrderedDict.__ne__cCst|�S)z@od.viewkeys() -> a set-like object providing a view on od's keys)r)rrrr�viewkeys�szOrderedDict.viewkeyscCst|�S)z<od.viewvalues() -> an object providing a view on od's values)r)rrrr�
viewvalues�szOrderedDict.viewvaluescCst|�S)zBod.viewitems() -> a set-like object providing a view on od's items)r)rrrr�	viewitemsszOrderedDict.viewitems)T)N)N)"r<�
__module__�__qualname__�__doc__rr#rrrr r"r&r(r+r,r.r!r0r4r
�objectr5rr8r=r@r?�classmethodrDrErFrGrHrIrrrrrs:
	




	rN)Zthreadrr:�ImportErrorZdummy_threadZ_abcollrrrr#rrrrr�<module>s_vendor/urllib3/packages/__pycache__/six.cpython-36.opt-1.pyc000064400000057532151733136360017756 0ustar003

�Pf�u�I@srdZddlmZddlZddlZddlZddlZddlZdZdZ	ej
ddkZej
ddkZej
dd��dzkZ
er�efZefZefZeZeZejZn�efZeefZeejfZeZeZejjd	�r�e�d|�ZnLGdd
�d
e�Z ye!e ��Wn e"k
�re�d~�ZYnXe�d��Z[ dd�Z#dd�Z$Gdd�de�Z%Gdd�de%�Z&Gdd�dej'�Z(Gdd�de%�Z)Gdd�de�Z*e*e+�Z,Gdd�de(�Z-e)ddd d!�e)d"d#d$d%d"�e)d&d#d#d'd&�e)d(d)d$d*d(�e)d+d)d,�e)d-d#d$d.d-�e)d/d0d0d1d/�e)d2d0d0d/d2�e)d3d)d$d4d3�e)d5d)e
�rd6nd7d8�e)d9d)d:�e)d;d<d=d>�e)d!d!d �e)d?d?d@�e)dAdAd@�e)dBdBd@�e)d4d)d$d4d3�e)dCd#d$dDdC�e)dEd#d#dFdE�e&d$d)�e&dGdH�e&dIdJ�e&dKdLdM�e&dNdOdN�e&dPdQdR�e&dSdTdU�e&dVdWdX�e&dYdZd[�e&d\d]d^�e&d_d`da�e&dbdcdd�e&dedfdg�e&dhdidj�e&dkdkdl�e&dmdmdl�e&dndndl�e&dododp�e&dqdr�e&dsdt�e&dudv�e&dwdxdw�e&dydz�e&d{d|d}�e&d~dd��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�e+d�d��e&d�e+d�d��e&d�e+d�e+d��e&d�d�d��e&d�d�d��e&d�d�d��g>Z.ejd�k�rZe.e&d�d��g7Z.x:e.D]2Z/e0e-e/j1e/�e2e/e&��r`e,j3e/d�e/j1��q`W[/e.e-_.e-e+d��Z4e,j3e4d��Gd�d��d�e(�Z5e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d>d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��gZ6xe6D]Z/e0e5e/j1e/��q�W[/e6e5_.e,j3e5e+d��d�dӃGd�dՄd�e(�Z7e)d�d�d��e)d�d�d��e)d�d�d��gZ8xe8D]Z/e0e7e/j1e/��q$W[/e8e7_.e,j3e7e+d��d�d܃Gd�dބd�e(�Z9e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)�dd�d�g!Z:xe:D]Z/e0e9e/j1e/��q�W[/e:e9_.e,j3e9e+�d��d�d�G�d�d��de(�Z;e)�dd��d�e)�dd��d�e)�d	d��d�e)�d
d��d�gZ<xe<D]Z/e0e;e/j1e/��qTW[/e<e;_.e,j3e;e+�d��d�d
�G�d�d��de(�Z=e)�dd�d��gZ>xe>D]Z/e0e=e/j1e/��q�W[/e>e=_.e,j3e=e+�d��d�d�G�d�d��dej'�Z?e,j3e?e+d���d��d�d�Z@�d�d�ZAe�	rj�dZB�dZC�dZD�dZE�dZF�d ZGn$�d!ZB�d"ZC�d#ZD�d$ZE�d%ZF�d&ZGyeHZIWn"eJk
�	r��d'�d(�ZIYnXeIZHyeKZKWn"eJk
�	r��d)�d*�ZKYnXe�
r�d+�d,�ZLejMZN�d-�d.�ZOeZPn>�d/�d,�ZL�d0�d1�ZN�d2�d.�ZOG�d3�d4��d4e�ZPeKZKe#eL�d5�ejQeB�ZRejQeC�ZSejQeD�ZTejQeE�ZUejQeF�ZVejQeG�ZWe�
r��d6�d7�ZX�d8�d9�ZY�d:�d;�ZZ�d<�d=�Z[ej\�d>�Z]ej\�d?�Z^ej\�d@�Z_nT�dA�d7�ZX�dB�d9�ZY�dC�d;�ZZ�dD�d=�Z[ej\�dE�Z]ej\�dF�Z^ej\�dG�Z_e#eX�dH�e#eY�dI�e#eZ�dJ�e#e[�dK�e�r�dL�dM�Z`�dN�dO�ZaebZcddldZdedje�dP�jfZg[dejhd�ZiejjZkelZmddlnZnenjoZoenjpZp�dQZqej
d
d
k�r�dRZr�dSZsn�dTZr�dUZsnj�dV�dM�Z`�dW�dO�ZaecZcebZg�dX�dY�Zi�dZ�d[�Zkejtejuev�ZmddloZoeojoZoZp�d\Zq�dRZr�dSZse#e`�d]�e#ea�d^��d_�dQ�Zw�d`�dT�Zx�da�dU�Zye�r�eze4j{�db�Z|�d��dc�dd�Z}n�d��de�df�Z|e|�dg�ej
dd��d�k�
re|�dh�n.ej
dd��d�k�
r8e|�di�n�dj�dk�Z~eze4j{�dld�Zedk�
rj�dm�dn�Zej
dd��d�k�
r�eZ��do�dn�Ze#e}�dp�ej
dd��d�k�
r�ej�ej�f�dq�dr�Z�nej�Z��ds�dt�Z��du�dv�Z��dw�dx�Z�gZ�e+Z�e��j��dy�dk	�rge�_�ej��rbx>e�ej��D]0\Z�Z�ee��j+dk�r*e�j1e+k�r*ej�e�=P�q*W[�[�ej�j�e,�dS(�z6Utilities for writing code that runs on Python 2 and 3�)�absolute_importNz'Benjamin Peterson <benjamin@python.org>z1.10.0����java��c@seZdZdd�ZdS)�XcCsdS)Nrrl�)�selfr
r
�/usr/lib/python3.6/six.py�__len__>sz	X.__len__N)�__name__�
__module__�__qualname__r
r
r
r
rr	<sr	�?cCs
||_dS)z Add documentation to a function.N)�__doc__)�func�docr
r
r�_add_docKsrcCst|�tj|S)z7Import module, returning the module after the last dot.)�
__import__�sys�modules)�namer
r
r�_import_modulePsrc@seZdZdd�Zdd�ZdS)�
_LazyDescrcCs
||_dS)N)r)rrr
r
r�__init__Xsz_LazyDescr.__init__cCsB|j�}t||j|�yt|j|j�Wntk
r<YnX|S)N)�_resolve�setattrr�delattr�	__class__�AttributeError)r�obj�tp�resultr
r
r�__get__[sz_LazyDescr.__get__N)rrrrr%r
r
r
rrVsrcs.eZdZd�fdd�	Zdd�Zdd�Z�ZS)	�MovedModuleNcs2tt|�j|�tr(|dkr |}||_n||_dS)N)�superr&r�PY3�mod)rr�old�new)r r
rriszMovedModule.__init__cCs
t|j�S)N)rr))rr
r
rrrszMovedModule._resolvecCs"|j�}t||�}t|||�|S)N)r�getattrr)r�attr�_module�valuer
r
r�__getattr__us
zMovedModule.__getattr__)N)rrrrrr0�
__classcell__r
r
)r rr&gs	r&cs(eZdZ�fdd�Zdd�ZgZ�ZS)�_LazyModulecstt|�j|�|jj|_dS)N)r'r2rr r)rr)r r
rr~sz_LazyModule.__init__cCs ddg}|dd�|jD�7}|S)NrrcSsg|]
}|j�qSr
)r)�.0r-r
r
r�
<listcomp>�sz'_LazyModule.__dir__.<locals>.<listcomp>)�_moved_attributes)rZattrsr
r
r�__dir__�sz_LazyModule.__dir__)rrrrr6r5r1r
r
)r rr2|sr2cs&eZdZd�fdd�	Zdd�Z�ZS)�MovedAttributeNcsdtt|�j|�trH|dkr |}||_|dkr@|dkr<|}n|}||_n||_|dkrZ|}||_dS)N)r'r7rr(r)r-)rrZold_modZnew_modZold_attrZnew_attr)r r
rr�szMovedAttribute.__init__cCst|j�}t||j�S)N)rr)r,r-)r�moduler
r
rr�s
zMovedAttribute._resolve)NN)rrrrrr1r
r
)r rr7�sr7c@sVeZdZdZdd�Zdd�Zdd�Zdd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZeZdS)�_SixMetaPathImporterz�
    A meta path importer to import six.moves and its submodules.

    This class implements a PEP302 finder and loader. It should be compatible
    with Python 2.5 and all existing versions of Python3
    cCs||_i|_dS)N)r�
known_modules)rZsix_module_namer
r
rr�sz_SixMetaPathImporter.__init__cGs&x |D]}||j|jd|<qWdS)N�.)r:r)rr)Z	fullnames�fullnamer
r
r�_add_module�s
z _SixMetaPathImporter._add_modulecCs|j|jd|S)Nr;)r:r)rr<r
r
r�_get_module�sz _SixMetaPathImporter._get_moduleNcCs||jkr|SdS)N)r:)rr<�pathr
r
r�find_module�s
z _SixMetaPathImporter.find_modulecCs0y
|j|Stk
r*td|��YnXdS)Nz!This loader does not know module )r:�KeyError�ImportError)rr<r
r
rZ__get_module�s
z!_SixMetaPathImporter.__get_modulecCsRy
tj|Stk
rYnX|j|�}t|t�r>|j�}n||_|tj|<|S)N)rrrA� _SixMetaPathImporter__get_module�
isinstancer&r�
__loader__)rr<r)r
r
r�load_module�s




z _SixMetaPathImporter.load_modulecCst|j|�d�S)z�
        Return true, if the named module is a package.

        We need this method to get correct spec objects with
        Python 3.4 (see PEP451)
        �__path__)�hasattrrC)rr<r
r
r�
is_package�sz_SixMetaPathImporter.is_packagecCs|j|�dS)z;Return None

        Required, if is_package is implementedN)rC)rr<r
r
r�get_code�s
z_SixMetaPathImporter.get_code)N)
rrrrrr=r>r@rCrFrIrJ�
get_sourcer
r
r
rr9�s
	r9c@seZdZdZgZdS)�_MovedItemszLazy loading of moved objectsN)rrrrrGr
r
r
rrL�srLZ	cStringIO�io�StringIO�filter�	itertools�builtinsZifilter�filterfalseZifilterfalse�inputZ__builtin__Z	raw_input�internr�map�imap�getcwd�osZgetcwdu�getcwdb�rangeZxrangeZ
reload_module�	importlibZimp�reload�reduce�	functoolsZshlex_quoteZpipesZshlexZquote�UserDict�collections�UserList�
UserString�zipZizip�zip_longestZizip_longestZconfigparserZConfigParser�copyregZcopy_regZdbm_gnuZgdbmzdbm.gnuZ
_dummy_threadZdummy_threadZhttp_cookiejarZ	cookielibzhttp.cookiejarZhttp_cookiesZCookiezhttp.cookiesZ
html_entitiesZhtmlentitydefsz
html.entitiesZhtml_parserZ
HTMLParserzhtml.parserZhttp_clientZhttplibzhttp.clientZemail_mime_multipartzemail.MIMEMultipartzemail.mime.multipartZemail_mime_nonmultipartzemail.MIMENonMultipartzemail.mime.nonmultipartZemail_mime_textzemail.MIMETextzemail.mime.textZemail_mime_basezemail.MIMEBasezemail.mime.baseZBaseHTTPServerzhttp.serverZ
CGIHTTPServerZSimpleHTTPServerZcPickle�pickleZqueueZQueue�reprlib�reprZsocketserverZSocketServer�_threadZthreadZtkinterZTkinterZtkinter_dialogZDialogztkinter.dialogZtkinter_filedialogZ
FileDialogztkinter.filedialogZtkinter_scrolledtextZScrolledTextztkinter.scrolledtextZtkinter_simpledialogZSimpleDialogztkinter.simpledialogZtkinter_tixZTixztkinter.tixZtkinter_ttkZttkztkinter.ttkZtkinter_constantsZTkconstantsztkinter.constantsZtkinter_dndZTkdndztkinter.dndZtkinter_colorchooserZtkColorChooserztkinter.colorchooserZtkinter_commondialogZtkCommonDialogztkinter.commondialogZtkinter_tkfiledialogZtkFileDialogZtkinter_fontZtkFontztkinter.fontZtkinter_messageboxZtkMessageBoxztkinter.messageboxZtkinter_tksimpledialogZtkSimpleDialogZurllib_parsez.moves.urllib_parsezurllib.parseZurllib_errorz.moves.urllib_errorzurllib.errorZurllibz
.moves.urllibZurllib_robotparser�robotparserzurllib.robotparserZ
xmlrpc_clientZ	xmlrpclibz
xmlrpc.clientZ
xmlrpc_serverZSimpleXMLRPCServerz
xmlrpc.serverZwin32�winreg�_winregzmoves.z.moves�movesc@seZdZdZdS)�Module_six_moves_urllib_parsez7Lazy loading of moved objects in six.moves.urllib_parseN)rrrrr
r
r
rrn@srnZParseResultZurlparseZSplitResultZparse_qsZ	parse_qslZ	urldefragZurljoinZurlsplitZ
urlunparseZ
urlunsplitZ
quote_plusZunquoteZunquote_plusZ	urlencodeZ
splitqueryZsplittagZ	splituserZ
uses_fragmentZuses_netlocZuses_paramsZ
uses_queryZ
uses_relativezmoves.urllib_parsezmoves.urllib.parsec@seZdZdZdS)�Module_six_moves_urllib_errorz7Lazy loading of moved objects in six.moves.urllib_errorN)rrrrr
r
r
rrohsroZURLErrorZurllib2Z	HTTPErrorZContentTooShortErrorz.moves.urllib.errorzmoves.urllib_errorzmoves.urllib.errorc@seZdZdZdS)�Module_six_moves_urllib_requestz9Lazy loading of moved objects in six.moves.urllib_requestN)rrrrr
r
r
rrp|srpZurlopenzurllib.requestZinstall_openerZbuild_openerZpathname2urlZurl2pathnameZ
getproxiesZRequestZOpenerDirectorZHTTPDefaultErrorHandlerZHTTPRedirectHandlerZHTTPCookieProcessorZProxyHandlerZBaseHandlerZHTTPPasswordMgrZHTTPPasswordMgrWithDefaultRealmZAbstractBasicAuthHandlerZHTTPBasicAuthHandlerZProxyBasicAuthHandlerZAbstractDigestAuthHandlerZHTTPDigestAuthHandlerZProxyDigestAuthHandlerZHTTPHandlerZHTTPSHandlerZFileHandlerZ
FTPHandlerZCacheFTPHandlerZUnknownHandlerZHTTPErrorProcessorZurlretrieveZ
urlcleanupZ	URLopenerZFancyURLopenerZproxy_bypassz.moves.urllib.requestzmoves.urllib_requestzmoves.urllib.requestc@seZdZdZdS)� Module_six_moves_urllib_responsez:Lazy loading of moved objects in six.moves.urllib_responseN)rrrrr
r
r
rrq�srqZaddbasezurllib.responseZaddclosehookZaddinfoZ
addinfourlz.moves.urllib.responsezmoves.urllib_responsezmoves.urllib.responsec@seZdZdZdS)�#Module_six_moves_urllib_robotparserz=Lazy loading of moved objects in six.moves.urllib_robotparserN)rrrrr
r
r
rrr�srrZRobotFileParserz.moves.urllib.robotparserzmoves.urllib_robotparserzmoves.urllib.robotparserc@sNeZdZdZgZejd�Zejd�Zejd�Z	ejd�Z
ejd�Zdd�Zd	S)
�Module_six_moves_urllibzICreate a six.moves.urllib namespace that resembles the Python 3 namespacezmoves.urllib_parsezmoves.urllib_errorzmoves.urllib_requestzmoves.urllib_responsezmoves.urllib_robotparsercCsdddddgS)N�parse�error�request�responserjr
)rr
r
rr6�szModule_six_moves_urllib.__dir__N)
rrrrrG�	_importerr>rtrurvrwrjr6r
r
r
rrs�s




rszmoves.urllibcCstt|j|�dS)zAdd an item to six.moves.N)rrLr)Zmover
r
r�add_move�srycCsXytt|�WnDtk
rRytj|=Wn"tk
rLtd|f��YnXYnXdS)zRemove item from six.moves.zno such move, %rN)rrLr!rm�__dict__rA)rr
r
r�remove_move�sr{�__func__�__self__�__closure__�__code__�__defaults__�__globals__�im_funcZim_selfZfunc_closureZ	func_codeZ
func_defaultsZfunc_globalscCs|j�S)N)�next)�itr
r
r�advance_iteratorsr�cCstdd�t|�jD��S)Ncss|]}d|jkVqdS)�__call__N)rz)r3�klassr
r
r�	<genexpr>szcallable.<locals>.<genexpr>)�any�type�__mro__)r"r
r
r�callablesr�cCs|S)Nr
)�unboundr
r
r�get_unbound_functionsr�cCs|S)Nr
)r�clsr
r
r�create_unbound_methodsr�cCs|jS)N)r�)r�r
r
rr�"scCstj|||j�S)N)�types�
MethodTyper )rr"r
r
r�create_bound_method%sr�cCstj|d|�S)N)r�r�)rr�r
r
rr�(sc@seZdZdd�ZdS)�IteratorcCst|�j|�S)N)r��__next__)rr
r
rr�-sz
Iterator.nextN)rrrr�r
r
r
rr�+sr�z3Get the function out of a possibly unbound functioncKst|jf|��S)N)�iter�keys)�d�kwr
r
r�iterkeys>sr�cKst|jf|��S)N)r��values)r�r�r
r
r�
itervaluesAsr�cKst|jf|��S)N)r��items)r�r�r
r
r�	iteritemsDsr�cKst|jf|��S)N)r�Zlists)r�r�r
r
r�	iterlistsGsr�r�r�r�cKs|jf|�S)N)r�)r�r�r
r
rr�PscKs|jf|�S)N)r�)r�r�r
r
rr�SscKs|jf|�S)N)r�)r�r�r
r
rr�VscKs|jf|�S)N)r�)r�r�r
r
rr�Ys�viewkeys�
viewvalues�	viewitemsz1Return an iterator over the keys of a dictionary.z3Return an iterator over the values of a dictionary.z?Return an iterator over the (key, value) pairs of a dictionary.zBReturn an iterator over the (key, [values]) pairs of a dictionary.cCs
|jd�S)Nzlatin-1)�encode)�sr
r
r�bksr�cCs|S)Nr
)r�r
r
r�unsr�z>B�assertCountEqualZassertRaisesRegexpZassertRegexpMatches�assertRaisesRegex�assertRegexcCs|S)Nr
)r�r
r
rr��scCst|jdd�d�S)Nz\\z\\\\Zunicode_escape)�unicode�replace)r�r
r
rr��scCst|d�S)Nr)�ord)Zbsr
r
r�byte2int�sr�cCst||�S)N)r�)Zbuf�ir
r
r�
indexbytes�sr�ZassertItemsEqualzByte literalzText literalcOst|t�||�S)N)r,�_assertCountEqual)r�args�kwargsr
r
rr��scOst|t�||�S)N)r,�_assertRaisesRegex)rr�r�r
r
rr��scOst|t�||�S)N)r,�_assertRegex)rr�r�r
r
rr��s�execcCs*|dkr|�}|j|k	r"|j|��|�dS)N)�
__traceback__�with_traceback)r#r/�tbr
r
r�reraise�s


r�cCsB|dkr*tjd�}|j}|dkr&|j}~n|dkr6|}td�dS)zExecute code in a namespace.Nrzexec _code_ in _globs_, _locs_)r�	_getframe�	f_globals�f_localsr�)Z_code_Z_globs_Z_locs_�framer
r
r�exec_�s
r�z9def reraise(tp, value, tb=None):
    raise tp, value, tb
zrdef raise_from(value, from_value):
    if from_value is None:
        raise value
    raise value from from_value
zCdef raise_from(value, from_value):
    raise value from from_value
cCs|�dS)Nr
)r/Z
from_valuer
r
r�
raise_from�sr��printc
s6|jdtj���dkrdS�fdd�}d}|jdd�}|dk	r`t|t�rNd}nt|t�s`td��|jd	d�}|dk	r�t|t�r�d}nt|t�s�td
��|r�td��|s�x|D]}t|t�r�d}Pq�W|r�td�}td
�}nd}d
}|dkr�|}|dk�r�|}x,t|�D] \}	}|	�r||�||��qW||�dS)z4The new-style print function for Python 2.4 and 2.5.�fileNcsdt|t�st|�}t�t�rVt|t�rV�jdk	rVt�dd�}|dkrHd}|j�j|�}�j|�dS)N�errors�strict)	rD�
basestring�strr�r��encodingr,r��write)�datar�)�fpr
rr��s



zprint_.<locals>.writeF�sepTzsep must be None or a string�endzend must be None or a stringz$invalid keyword arguments to print()�
� )�popr�stdoutrDr�r��	TypeError�	enumerate)
r�r�r�Zwant_unicoder�r��arg�newlineZspacer�r
)r�r�print_�sL







r�cOs<|jdtj�}|jdd�}t||�|r8|dk	r8|j�dS)Nr��flushF)�getrr�r��_printr�)r�r�r�r�r
r
rr�s

zReraise an exception.cs���fdd�}|S)Ncstj����|�}�|_|S)N)r^�wraps�__wrapped__)�f)�assigned�updated�wrappedr
r�wrapperszwraps.<locals>.wrapperr
)r�r�r�r�r
)r�r�r�rr�sr�cs&G��fdd�d��}tj|dfi�S)z%Create a base class with a metaclass.cseZdZ��fdd�ZdS)z!with_metaclass.<locals>.metaclasscs�|�|�S)Nr
)r�rZ
this_basesr�)�bases�metar
r�__new__'sz)with_metaclass.<locals>.metaclass.__new__N)rrrr�r
)r�r�r
r�	metaclass%sr�Ztemporary_class)r�r�)r�r�r�r
)r�r�r�with_metaclass sr�cs�fdd�}|S)z6Class decorator for creating a class with a metaclass.csl|jj�}|jd�}|dk	rDt|t�r,|g}x|D]}|j|�q2W|jdd�|jdd��|j|j|�S)N�	__slots__rz�__weakref__)rz�copyr�rDr�r�r�	__bases__)r�Z	orig_vars�slotsZ	slots_var)r�r
rr�.s



zadd_metaclass.<locals>.wrapperr
)r�r�r
)r�r�
add_metaclass,sr�cCs2tr.d|jkrtd|j��|j|_dd�|_|S)a
    A decorator that defines __unicode__ and __str__ methods under Python 2.
    Under Python 3 it does nothing.

    To support Python 2 and 3 with a single code base, define a __str__ method
    returning text and apply this decorator to the class.
    �__str__zY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cSs|j�jd�S)Nzutf-8)�__unicode__r�)rr
r
r�<lambda>Jsz-python_2_unicode_compatible.<locals>.<lambda>)�PY2rz�
ValueErrorrr�r�)r�r
r
r�python_2_unicode_compatible<s


r��__spec__)rrli���li���ll����)N)NN)rr)rr)rr)rr)�rZ
__future__rr^rP�operatorrr��
__author__�__version__�version_infor�r(ZPY34r�Zstring_types�intZ
integer_typesr�Zclass_typesZ	text_type�bytesZbinary_type�maxsizeZMAXSIZEr�ZlongZ	ClassTyper��platform�
startswith�objectr	�len�
OverflowErrorrrrr&�
ModuleTyper2r7r9rrxrLr5r-rrrDr=rmrnZ_urllib_parse_moved_attributesroZ_urllib_error_moved_attributesrpZ _urllib_request_moved_attributesrqZ!_urllib_response_moved_attributesrrZ$_urllib_robotparser_moved_attributesrsryr{Z
_meth_funcZ
_meth_selfZ
_func_closureZ
_func_codeZ_func_defaultsZ
_func_globalsr�r��	NameErrorr�r�r�r�r�r��
attrgetterZget_method_functionZget_method_selfZget_function_closureZget_function_codeZget_function_defaultsZget_function_globalsr�r�r�r��methodcallerr�r�r�r�r��chrZunichr�struct�Struct�packZint2byte�
itemgetterr��getitemr�r�Z	iterbytesrMrN�BytesIOr�r�r��partialrVr�r�r�r�r,rQr�r�r�r�r��WRAPPER_ASSIGNMENTS�WRAPPER_UPDATESr�r�r�r�rG�__package__�globalsr�r��submodule_search_locations�	meta_pathr�r�Zimporter�appendr
r
r
r�<module>s�

>












































































































5_vendor/urllib3/packages/__pycache__/six.cpython-36.pyc000064400000057532151733136360017017 0ustar003

�Pf�u�I@srdZddlmZddlZddlZddlZddlZddlZdZdZ	ej
ddkZej
ddkZej
dd��dzkZ
er�efZefZefZeZeZejZn�efZeefZeejfZeZeZejjd	�r�e�d|�ZnLGdd
�d
e�Z ye!e ��Wn e"k
�re�d~�ZYnXe�d��Z[ dd�Z#dd�Z$Gdd�de�Z%Gdd�de%�Z&Gdd�dej'�Z(Gdd�de%�Z)Gdd�de�Z*e*e+�Z,Gdd�de(�Z-e)ddd d!�e)d"d#d$d%d"�e)d&d#d#d'd&�e)d(d)d$d*d(�e)d+d)d,�e)d-d#d$d.d-�e)d/d0d0d1d/�e)d2d0d0d/d2�e)d3d)d$d4d3�e)d5d)e
�rd6nd7d8�e)d9d)d:�e)d;d<d=d>�e)d!d!d �e)d?d?d@�e)dAdAd@�e)dBdBd@�e)d4d)d$d4d3�e)dCd#d$dDdC�e)dEd#d#dFdE�e&d$d)�e&dGdH�e&dIdJ�e&dKdLdM�e&dNdOdN�e&dPdQdR�e&dSdTdU�e&dVdWdX�e&dYdZd[�e&d\d]d^�e&d_d`da�e&dbdcdd�e&dedfdg�e&dhdidj�e&dkdkdl�e&dmdmdl�e&dndndl�e&dododp�e&dqdr�e&dsdt�e&dudv�e&dwdxdw�e&dydz�e&d{d|d}�e&d~dd��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�d�d��e&d�e+d�d��e&d�e+d�d��e&d�e+d�e+d��e&d�d�d��e&d�d�d��e&d�d�d��g>Z.ejd�k�rZe.e&d�d��g7Z.x:e.D]2Z/e0e-e/j1e/�e2e/e&��r`e,j3e/d�e/j1��q`W[/e.e-_.e-e+d��Z4e,j3e4d��Gd�d��d�e(�Z5e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d>d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��e)d�d�d��gZ6xe6D]Z/e0e5e/j1e/��q�W[/e6e5_.e,j3e5e+d��d�dӃGd�dՄd�e(�Z7e)d�d�d��e)d�d�d��e)d�d�d��gZ8xe8D]Z/e0e7e/j1e/��q$W[/e8e7_.e,j3e7e+d��d�d܃Gd�dބd�e(�Z9e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)d�d�d�e)�dd�d�g!Z:xe:D]Z/e0e9e/j1e/��q�W[/e:e9_.e,j3e9e+�d��d�d�G�d�d��de(�Z;e)�dd��d�e)�dd��d�e)�d	d��d�e)�d
d��d�gZ<xe<D]Z/e0e;e/j1e/��qTW[/e<e;_.e,j3e;e+�d��d�d
�G�d�d��de(�Z=e)�dd�d��gZ>xe>D]Z/e0e=e/j1e/��q�W[/e>e=_.e,j3e=e+�d��d�d�G�d�d��dej'�Z?e,j3e?e+d���d��d�d�Z@�d�d�ZAe�	rj�dZB�dZC�dZD�dZE�dZF�d ZGn$�d!ZB�d"ZC�d#ZD�d$ZE�d%ZF�d&ZGyeHZIWn"eJk
�	r��d'�d(�ZIYnXeIZHyeKZKWn"eJk
�	r��d)�d*�ZKYnXe�
r�d+�d,�ZLejMZN�d-�d.�ZOeZPn>�d/�d,�ZL�d0�d1�ZN�d2�d.�ZOG�d3�d4��d4e�ZPeKZKe#eL�d5�ejQeB�ZRejQeC�ZSejQeD�ZTejQeE�ZUejQeF�ZVejQeG�ZWe�
r��d6�d7�ZX�d8�d9�ZY�d:�d;�ZZ�d<�d=�Z[ej\�d>�Z]ej\�d?�Z^ej\�d@�Z_nT�dA�d7�ZX�dB�d9�ZY�dC�d;�ZZ�dD�d=�Z[ej\�dE�Z]ej\�dF�Z^ej\�dG�Z_e#eX�dH�e#eY�dI�e#eZ�dJ�e#e[�dK�e�r�dL�dM�Z`�dN�dO�ZaebZcddldZdedje�dP�jfZg[dejhd�ZiejjZkelZmddlnZnenjoZoenjpZp�dQZqej
d
d
k�r�dRZr�dSZsn�dTZr�dUZsnj�dV�dM�Z`�dW�dO�ZaecZcebZg�dX�dY�Zi�dZ�d[�Zkejtejuev�ZmddloZoeojoZoZp�d\Zq�dRZr�dSZse#e`�d]�e#ea�d^��d_�dQ�Zw�d`�dT�Zx�da�dU�Zye�r�eze4j{�db�Z|�d��dc�dd�Z}n�d��de�df�Z|e|�dg�ej
dd��d�k�
re|�dh�n.ej
dd��d�k�
r8e|�di�n�dj�dk�Z~eze4j{�dld�Zedk�
rj�dm�dn�Zej
dd��d�k�
r�eZ��do�dn�Ze#e}�dp�ej
dd��d�k�
r�ej�ej�f�dq�dr�Z�nej�Z��ds�dt�Z��du�dv�Z��dw�dx�Z�gZ�e+Z�e��j��dy�dk	�rge�_�ej��rbx>e�ej��D]0\Z�Z�ee��j+dk�r*e�j1e+k�r*ej�e�=P�q*W[�[�ej�j�e,�dS(�z6Utilities for writing code that runs on Python 2 and 3�)�absolute_importNz'Benjamin Peterson <benjamin@python.org>z1.10.0����java��c@seZdZdd�ZdS)�XcCsdS)Nrrl�)�selfr
r
�/usr/lib/python3.6/six.py�__len__>sz	X.__len__N)�__name__�
__module__�__qualname__r
r
r
r
rr	<sr	�?cCs
||_dS)z Add documentation to a function.N)�__doc__)�func�docr
r
r�_add_docKsrcCst|�tj|S)z7Import module, returning the module after the last dot.)�
__import__�sys�modules)�namer
r
r�_import_modulePsrc@seZdZdd�Zdd�ZdS)�
_LazyDescrcCs
||_dS)N)r)rrr
r
r�__init__Xsz_LazyDescr.__init__cCsB|j�}t||j|�yt|j|j�Wntk
r<YnX|S)N)�_resolve�setattrr�delattr�	__class__�AttributeError)r�obj�tp�resultr
r
r�__get__[sz_LazyDescr.__get__N)rrrrr%r
r
r
rrVsrcs.eZdZd�fdd�	Zdd�Zdd�Z�ZS)	�MovedModuleNcs2tt|�j|�tr(|dkr |}||_n||_dS)N)�superr&r�PY3�mod)rr�old�new)r r
rriszMovedModule.__init__cCs
t|j�S)N)rr))rr
r
rrrszMovedModule._resolvecCs"|j�}t||�}t|||�|S)N)r�getattrr)r�attr�_module�valuer
r
r�__getattr__us
zMovedModule.__getattr__)N)rrrrrr0�
__classcell__r
r
)r rr&gs	r&cs(eZdZ�fdd�Zdd�ZgZ�ZS)�_LazyModulecstt|�j|�|jj|_dS)N)r'r2rr r)rr)r r
rr~sz_LazyModule.__init__cCs ddg}|dd�|jD�7}|S)NrrcSsg|]
}|j�qSr
)r)�.0r-r
r
r�
<listcomp>�sz'_LazyModule.__dir__.<locals>.<listcomp>)�_moved_attributes)rZattrsr
r
r�__dir__�sz_LazyModule.__dir__)rrrrr6r5r1r
r
)r rr2|sr2cs&eZdZd�fdd�	Zdd�Z�ZS)�MovedAttributeNcsdtt|�j|�trH|dkr |}||_|dkr@|dkr<|}n|}||_n||_|dkrZ|}||_dS)N)r'r7rr(r)r-)rrZold_modZnew_modZold_attrZnew_attr)r r
rr�szMovedAttribute.__init__cCst|j�}t||j�S)N)rr)r,r-)r�moduler
r
rr�s
zMovedAttribute._resolve)NN)rrrrrr1r
r
)r rr7�sr7c@sVeZdZdZdd�Zdd�Zdd�Zdd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZeZdS)�_SixMetaPathImporterz�
    A meta path importer to import six.moves and its submodules.

    This class implements a PEP302 finder and loader. It should be compatible
    with Python 2.5 and all existing versions of Python3
    cCs||_i|_dS)N)r�
known_modules)rZsix_module_namer
r
rr�sz_SixMetaPathImporter.__init__cGs&x |D]}||j|jd|<qWdS)N�.)r:r)rr)Z	fullnames�fullnamer
r
r�_add_module�s
z _SixMetaPathImporter._add_modulecCs|j|jd|S)Nr;)r:r)rr<r
r
r�_get_module�sz _SixMetaPathImporter._get_moduleNcCs||jkr|SdS)N)r:)rr<�pathr
r
r�find_module�s
z _SixMetaPathImporter.find_modulecCs0y
|j|Stk
r*td|��YnXdS)Nz!This loader does not know module )r:�KeyError�ImportError)rr<r
r
rZ__get_module�s
z!_SixMetaPathImporter.__get_modulecCsRy
tj|Stk
rYnX|j|�}t|t�r>|j�}n||_|tj|<|S)N)rrrA� _SixMetaPathImporter__get_module�
isinstancer&r�
__loader__)rr<r)r
r
r�load_module�s




z _SixMetaPathImporter.load_modulecCst|j|�d�S)z�
        Return true, if the named module is a package.

        We need this method to get correct spec objects with
        Python 3.4 (see PEP451)
        �__path__)�hasattrrC)rr<r
r
r�
is_package�sz_SixMetaPathImporter.is_packagecCs|j|�dS)z;Return None

        Required, if is_package is implementedN)rC)rr<r
r
r�get_code�s
z_SixMetaPathImporter.get_code)N)
rrrrrr=r>r@rCrFrIrJ�
get_sourcer
r
r
rr9�s
	r9c@seZdZdZgZdS)�_MovedItemszLazy loading of moved objectsN)rrrrrGr
r
r
rrL�srLZ	cStringIO�io�StringIO�filter�	itertools�builtinsZifilter�filterfalseZifilterfalse�inputZ__builtin__Z	raw_input�internr�map�imap�getcwd�osZgetcwdu�getcwdb�rangeZxrangeZ
reload_module�	importlibZimp�reload�reduce�	functoolsZshlex_quoteZpipesZshlexZquote�UserDict�collections�UserList�
UserString�zipZizip�zip_longestZizip_longestZconfigparserZConfigParser�copyregZcopy_regZdbm_gnuZgdbmzdbm.gnuZ
_dummy_threadZdummy_threadZhttp_cookiejarZ	cookielibzhttp.cookiejarZhttp_cookiesZCookiezhttp.cookiesZ
html_entitiesZhtmlentitydefsz
html.entitiesZhtml_parserZ
HTMLParserzhtml.parserZhttp_clientZhttplibzhttp.clientZemail_mime_multipartzemail.MIMEMultipartzemail.mime.multipartZemail_mime_nonmultipartzemail.MIMENonMultipartzemail.mime.nonmultipartZemail_mime_textzemail.MIMETextzemail.mime.textZemail_mime_basezemail.MIMEBasezemail.mime.baseZBaseHTTPServerzhttp.serverZ
CGIHTTPServerZSimpleHTTPServerZcPickle�pickleZqueueZQueue�reprlib�reprZsocketserverZSocketServer�_threadZthreadZtkinterZTkinterZtkinter_dialogZDialogztkinter.dialogZtkinter_filedialogZ
FileDialogztkinter.filedialogZtkinter_scrolledtextZScrolledTextztkinter.scrolledtextZtkinter_simpledialogZSimpleDialogztkinter.simpledialogZtkinter_tixZTixztkinter.tixZtkinter_ttkZttkztkinter.ttkZtkinter_constantsZTkconstantsztkinter.constantsZtkinter_dndZTkdndztkinter.dndZtkinter_colorchooserZtkColorChooserztkinter.colorchooserZtkinter_commondialogZtkCommonDialogztkinter.commondialogZtkinter_tkfiledialogZtkFileDialogZtkinter_fontZtkFontztkinter.fontZtkinter_messageboxZtkMessageBoxztkinter.messageboxZtkinter_tksimpledialogZtkSimpleDialogZurllib_parsez.moves.urllib_parsezurllib.parseZurllib_errorz.moves.urllib_errorzurllib.errorZurllibz
.moves.urllibZurllib_robotparser�robotparserzurllib.robotparserZ
xmlrpc_clientZ	xmlrpclibz
xmlrpc.clientZ
xmlrpc_serverZSimpleXMLRPCServerz
xmlrpc.serverZwin32�winreg�_winregzmoves.z.moves�movesc@seZdZdZdS)�Module_six_moves_urllib_parsez7Lazy loading of moved objects in six.moves.urllib_parseN)rrrrr
r
r
rrn@srnZParseResultZurlparseZSplitResultZparse_qsZ	parse_qslZ	urldefragZurljoinZurlsplitZ
urlunparseZ
urlunsplitZ
quote_plusZunquoteZunquote_plusZ	urlencodeZ
splitqueryZsplittagZ	splituserZ
uses_fragmentZuses_netlocZuses_paramsZ
uses_queryZ
uses_relativezmoves.urllib_parsezmoves.urllib.parsec@seZdZdZdS)�Module_six_moves_urllib_errorz7Lazy loading of moved objects in six.moves.urllib_errorN)rrrrr
r
r
rrohsroZURLErrorZurllib2Z	HTTPErrorZContentTooShortErrorz.moves.urllib.errorzmoves.urllib_errorzmoves.urllib.errorc@seZdZdZdS)�Module_six_moves_urllib_requestz9Lazy loading of moved objects in six.moves.urllib_requestN)rrrrr
r
r
rrp|srpZurlopenzurllib.requestZinstall_openerZbuild_openerZpathname2urlZurl2pathnameZ
getproxiesZRequestZOpenerDirectorZHTTPDefaultErrorHandlerZHTTPRedirectHandlerZHTTPCookieProcessorZProxyHandlerZBaseHandlerZHTTPPasswordMgrZHTTPPasswordMgrWithDefaultRealmZAbstractBasicAuthHandlerZHTTPBasicAuthHandlerZProxyBasicAuthHandlerZAbstractDigestAuthHandlerZHTTPDigestAuthHandlerZProxyDigestAuthHandlerZHTTPHandlerZHTTPSHandlerZFileHandlerZ
FTPHandlerZCacheFTPHandlerZUnknownHandlerZHTTPErrorProcessorZurlretrieveZ
urlcleanupZ	URLopenerZFancyURLopenerZproxy_bypassz.moves.urllib.requestzmoves.urllib_requestzmoves.urllib.requestc@seZdZdZdS)� Module_six_moves_urllib_responsez:Lazy loading of moved objects in six.moves.urllib_responseN)rrrrr
r
r
rrq�srqZaddbasezurllib.responseZaddclosehookZaddinfoZ
addinfourlz.moves.urllib.responsezmoves.urllib_responsezmoves.urllib.responsec@seZdZdZdS)�#Module_six_moves_urllib_robotparserz=Lazy loading of moved objects in six.moves.urllib_robotparserN)rrrrr
r
r
rrr�srrZRobotFileParserz.moves.urllib.robotparserzmoves.urllib_robotparserzmoves.urllib.robotparserc@sNeZdZdZgZejd�Zejd�Zejd�Z	ejd�Z
ejd�Zdd�Zd	S)
�Module_six_moves_urllibzICreate a six.moves.urllib namespace that resembles the Python 3 namespacezmoves.urllib_parsezmoves.urllib_errorzmoves.urllib_requestzmoves.urllib_responsezmoves.urllib_robotparsercCsdddddgS)N�parse�error�request�responserjr
)rr
r
rr6�szModule_six_moves_urllib.__dir__N)
rrrrrG�	_importerr>rtrurvrwrjr6r
r
r
rrs�s




rszmoves.urllibcCstt|j|�dS)zAdd an item to six.moves.N)rrLr)Zmover
r
r�add_move�srycCsXytt|�WnDtk
rRytj|=Wn"tk
rLtd|f��YnXYnXdS)zRemove item from six.moves.zno such move, %rN)rrLr!rm�__dict__rA)rr
r
r�remove_move�sr{�__func__�__self__�__closure__�__code__�__defaults__�__globals__�im_funcZim_selfZfunc_closureZ	func_codeZ
func_defaultsZfunc_globalscCs|j�S)N)�next)�itr
r
r�advance_iteratorsr�cCstdd�t|�jD��S)Ncss|]}d|jkVqdS)�__call__N)rz)r3�klassr
r
r�	<genexpr>szcallable.<locals>.<genexpr>)�any�type�__mro__)r"r
r
r�callablesr�cCs|S)Nr
)�unboundr
r
r�get_unbound_functionsr�cCs|S)Nr
)r�clsr
r
r�create_unbound_methodsr�cCs|jS)N)r�)r�r
r
rr�"scCstj|||j�S)N)�types�
MethodTyper )rr"r
r
r�create_bound_method%sr�cCstj|d|�S)N)r�r�)rr�r
r
rr�(sc@seZdZdd�ZdS)�IteratorcCst|�j|�S)N)r��__next__)rr
r
rr�-sz
Iterator.nextN)rrrr�r
r
r
rr�+sr�z3Get the function out of a possibly unbound functioncKst|jf|��S)N)�iter�keys)�d�kwr
r
r�iterkeys>sr�cKst|jf|��S)N)r��values)r�r�r
r
r�
itervaluesAsr�cKst|jf|��S)N)r��items)r�r�r
r
r�	iteritemsDsr�cKst|jf|��S)N)r�Zlists)r�r�r
r
r�	iterlistsGsr�r�r�r�cKs|jf|�S)N)r�)r�r�r
r
rr�PscKs|jf|�S)N)r�)r�r�r
r
rr�SscKs|jf|�S)N)r�)r�r�r
r
rr�VscKs|jf|�S)N)r�)r�r�r
r
rr�Ys�viewkeys�
viewvalues�	viewitemsz1Return an iterator over the keys of a dictionary.z3Return an iterator over the values of a dictionary.z?Return an iterator over the (key, value) pairs of a dictionary.zBReturn an iterator over the (key, [values]) pairs of a dictionary.cCs
|jd�S)Nzlatin-1)�encode)�sr
r
r�bksr�cCs|S)Nr
)r�r
r
r�unsr�z>B�assertCountEqualZassertRaisesRegexpZassertRegexpMatches�assertRaisesRegex�assertRegexcCs|S)Nr
)r�r
r
rr��scCst|jdd�d�S)Nz\\z\\\\Zunicode_escape)�unicode�replace)r�r
r
rr��scCst|d�S)Nr)�ord)Zbsr
r
r�byte2int�sr�cCst||�S)N)r�)Zbuf�ir
r
r�
indexbytes�sr�ZassertItemsEqualzByte literalzText literalcOst|t�||�S)N)r,�_assertCountEqual)r�args�kwargsr
r
rr��scOst|t�||�S)N)r,�_assertRaisesRegex)rr�r�r
r
rr��scOst|t�||�S)N)r,�_assertRegex)rr�r�r
r
rr��s�execcCs*|dkr|�}|j|k	r"|j|��|�dS)N)�
__traceback__�with_traceback)r#r/�tbr
r
r�reraise�s


r�cCsB|dkr*tjd�}|j}|dkr&|j}~n|dkr6|}td�dS)zExecute code in a namespace.Nrzexec _code_ in _globs_, _locs_)r�	_getframe�	f_globals�f_localsr�)Z_code_Z_globs_Z_locs_�framer
r
r�exec_�s
r�z9def reraise(tp, value, tb=None):
    raise tp, value, tb
zrdef raise_from(value, from_value):
    if from_value is None:
        raise value
    raise value from from_value
zCdef raise_from(value, from_value):
    raise value from from_value
cCs|�dS)Nr
)r/Z
from_valuer
r
r�
raise_from�sr��printc
s6|jdtj���dkrdS�fdd�}d}|jdd�}|dk	r`t|t�rNd}nt|t�s`td��|jd	d�}|dk	r�t|t�r�d}nt|t�s�td
��|r�td��|s�x|D]}t|t�r�d}Pq�W|r�td�}td
�}nd}d
}|dkr�|}|dk�r�|}x,t|�D] \}	}|	�r||�||��qW||�dS)z4The new-style print function for Python 2.4 and 2.5.�fileNcsdt|t�st|�}t�t�rVt|t�rV�jdk	rVt�dd�}|dkrHd}|j�j|�}�j|�dS)N�errors�strict)	rD�
basestring�strr�r��encodingr,r��write)�datar�)�fpr
rr��s



zprint_.<locals>.writeF�sepTzsep must be None or a string�endzend must be None or a stringz$invalid keyword arguments to print()�
� )�popr�stdoutrDr�r��	TypeError�	enumerate)
r�r�r�Zwant_unicoder�r��arg�newlineZspacer�r
)r�r�print_�sL







r�cOs<|jdtj�}|jdd�}t||�|r8|dk	r8|j�dS)Nr��flushF)�getrr�r��_printr�)r�r�r�r�r
r
rr�s

zReraise an exception.cs���fdd�}|S)Ncstj����|�}�|_|S)N)r^�wraps�__wrapped__)�f)�assigned�updated�wrappedr
r�wrapperszwraps.<locals>.wrapperr
)r�r�r�r�r
)r�r�r�rr�sr�cs&G��fdd�d��}tj|dfi�S)z%Create a base class with a metaclass.cseZdZ��fdd�ZdS)z!with_metaclass.<locals>.metaclasscs�|�|�S)Nr
)r�rZ
this_basesr�)�bases�metar
r�__new__'sz)with_metaclass.<locals>.metaclass.__new__N)rrrr�r
)r�r�r
r�	metaclass%sr�Ztemporary_class)r�r�)r�r�r�r
)r�r�r�with_metaclass sr�cs�fdd�}|S)z6Class decorator for creating a class with a metaclass.csl|jj�}|jd�}|dk	rDt|t�r,|g}x|D]}|j|�q2W|jdd�|jdd��|j|j|�S)N�	__slots__rz�__weakref__)rz�copyr�rDr�r�r�	__bases__)r�Z	orig_vars�slotsZ	slots_var)r�r
rr�.s



zadd_metaclass.<locals>.wrapperr
)r�r�r
)r�r�
add_metaclass,sr�cCs2tr.d|jkrtd|j��|j|_dd�|_|S)a
    A decorator that defines __unicode__ and __str__ methods under Python 2.
    Under Python 3 it does nothing.

    To support Python 2 and 3 with a single code base, define a __str__ method
    returning text and apply this decorator to the class.
    �__str__zY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cSs|j�jd�S)Nzutf-8)�__unicode__r�)rr
r
r�<lambda>Jsz-python_2_unicode_compatible.<locals>.<lambda>)�PY2rz�
ValueErrorrr�r�)r�r
r
r�python_2_unicode_compatible<s


r��__spec__)rrli���li���ll����)N)NN)rr)rr)rr)rr)�rZ
__future__rr^rP�operatorrr��
__author__�__version__�version_infor�r(ZPY34r�Zstring_types�intZ
integer_typesr�Zclass_typesZ	text_type�bytesZbinary_type�maxsizeZMAXSIZEr�ZlongZ	ClassTyper��platform�
startswith�objectr	�len�
OverflowErrorrrrr&�
ModuleTyper2r7r9rrxrLr5r-rrrDr=rmrnZ_urllib_parse_moved_attributesroZ_urllib_error_moved_attributesrpZ _urllib_request_moved_attributesrqZ!_urllib_response_moved_attributesrrZ$_urllib_robotparser_moved_attributesrsryr{Z
_meth_funcZ
_meth_selfZ
_func_closureZ
_func_codeZ_func_defaultsZ
_func_globalsr�r��	NameErrorr�r�r�r�r�r��
attrgetterZget_method_functionZget_method_selfZget_function_closureZget_function_codeZget_function_defaultsZget_function_globalsr�r�r�r��methodcallerr�r�r�r�r��chrZunichr�struct�Struct�packZint2byte�
itemgetterr��getitemr�r�Z	iterbytesrMrN�BytesIOr�r�r��partialrVr�r�r�r�r,rQr�r�r�r�r��WRAPPER_ASSIGNMENTS�WRAPPER_UPDATESr�r�r�r�rG�__package__�globalsr�r��submodule_search_locations�	meta_pathr�r�Zimporter�appendr
r
r
r�<module>s�

>












































































































5_vendor/urllib3/packages/__pycache__/ordered_dict.cpython-36.opt-1.pyc000064400000020173151733136360021571 0ustar003

�Pf�"�@styddlmZWn ek
r0ddlmZYnXyddlmZmZmZWnek
r^YnXGdd�de	�Z
dS)�)�	get_ident)�KeysView�
ValuesView�	ItemsViewc@seZdZdZdd�Zejfdd�Zejfdd�Zdd	�Zd
d�Z	dd
�Z
d6dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�ZeZe�Zefdd �Zd7d"d#�Zifd$d%�Zd&d'�Zd(d)�Zed8d*d+��Zd,d-�Zd.d/�Zd0d1�Zd2d3�Z d4d5�Z!d!S)9�OrderedDictz)Dictionary that remembers insertion ordercOsnt|�dkrtdt|���y
|jWn6tk
r\g|_}||dg|dd�<i|_YnX|j||�dS)z�Initialize an ordered dictionary.  Signature is the same as for
        regular dictionaries, but keyword arguments are not recommended
        because their insertion order is arbitrary.

        �z$expected at most 1 arguments, got %dN)�len�	TypeError�_OrderedDict__root�AttributeError�_OrderedDict__map�_OrderedDict__update)�self�args�kwds�root�r�"/usr/lib/python3.6/ordered_dict.py�__init__s

zOrderedDict.__init__cCsF||kr6|j}|d}|||g|d<|d<|j|<||||�dS)z!od.__setitem__(i, y) <==> od[i]=yrrN)r
r)r�key�valueZdict_setitemr�lastrrr�__setitem__,s
 zOrderedDict.__setitem__cCs0|||�|jj|�\}}}||d<||d<dS)z od.__delitem__(y) <==> del od[y]rrN)r�pop)rrZdict_delitem�	link_prev�	link_nextrrr�__delitem__6s
zOrderedDict.__delitem__ccs2|j}|d}x||k	r,|dV|d}qWdS)zod.__iter__() <==> iter(od)r�N)r
)rr�currrrr�__iter__?s


zOrderedDict.__iter__ccs2|j}|d}x||k	r,|dV|d}qWdS)z#od.__reversed__() <==> reversed(od)rrN)r
)rrrrrr�__reversed__Gs


zOrderedDict.__reversed__cCshyDx|jj�D]}|dd�=qW|j}||dg|dd�<|jj�Wntk
rXYnXtj|�dS)z.od.clear() -> None.  Remove all items from od.N)r�
itervaluesr
�clearr�dict)rZnoderrrrr"OszOrderedDict.clearTcCs||std��|j}|r8|d}|d}||d<||d<n |d}|d}||d<||d<|d}|j|=tj||�}||fS)z�od.popitem() -> (k, v), return and remove a (key, value) pair.
        Pairs are returned in LIFO order if last is true or FIFO order if false.

        zdictionary is emptyrrr)�KeyErrorr
rr#r)rrr�linkrrrrrrr�popitem[s 
zOrderedDict.popitemcCst|�S)zod.keys() -> list of keys in od)�list)rrrr�keystszOrderedDict.keyscs�fdd��D�S)z#od.values() -> list of values in odcsg|]}�|�qSrr)�.0r)rrr�
<listcomp>zsz&OrderedDict.values.<locals>.<listcomp>r)rr)rr�valuesxszOrderedDict.valuescs�fdd��D�S)z.od.items() -> list of (key, value) pairs in odcsg|]}|�|f�qSrr)r)r)rrrr*~sz%OrderedDict.items.<locals>.<listcomp>r)rr)rr�items|szOrderedDict.itemscCst|�S)z0od.iterkeys() -> an iterator over the keys in od)�iter)rrrr�iterkeys�szOrderedDict.iterkeysccsx|D]}||VqWdS)z2od.itervalues -> an iterator over the values in odNr)r�krrrr!�s
zOrderedDict.itervaluesccs x|D]}|||fVqWdS)z=od.iteritems -> an iterator over the (key, value) items in odNr)rr/rrr�	iteritems�s
zOrderedDict.iteritemscOs�t|�dkr tdt|�f��n|s,td��|d}f}t|�dkrL|d}t|t�rrx^|D]}||||<q\WnDt|d�r�x8|j�D]}||||<q�Wnx|D]\}}|||<q�Wx|j�D]\}}|||<q�WdS)a�od.update(E, **F) -> None.  Update od from dict/iterable E and F.

        If E is a dict instance, does:           for k in E: od[k] = E[k]
        If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
        In either case, this is followed by:     for k, v in F.items(): od[k] = v

        rz8update() takes at most 2 positional arguments (%d given)z,update() takes at least 1 argument (0 given)rrr(N)rr	�
isinstancer#�hasattrr(r,)rrr�otherrrrrr�update�s&	


zOrderedDict.updatecCs0||kr||}||=|S||jkr,t|��|S)z�od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
        If key is not found, d is returned if given, otherwise KeyError is raised.

        )�_OrderedDict__markerr$)rr�default�resultrrrr�s
zOrderedDict.popNcCs||kr||S|||<|S)zDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in odr)rrr6rrr�
setdefault�szOrderedDict.setdefaultcCsVt|�t�f}||krdSd||<z&|s6d|jjfSd|jj|j�fS||=XdS)zod.__repr__() <==> repr(od)z...rz%s()z%s(%r)N)�id�
_get_ident�	__class__�__name__r,)rZ
_repr_runningZcall_keyrrr�__repr__�szOrderedDict.__repr__cs\�fdd��D�}t��j�}xtt��D]}|j|d�q*W|rP�j|f|fS�j|ffS)z%Return state information for picklingcsg|]}|�|g�qSrr)r)r/)rrrr*�sz*OrderedDict.__reduce__.<locals>.<listcomp>N)�vars�copyrrr;)rr,Z	inst_dictr/r)rr�
__reduce__�szOrderedDict.__reduce__cCs
|j|�S)z!od.copy() -> a shallow copy of od)r;)rrrrr?�szOrderedDict.copycCs |�}x|D]}|||<qW|S)z�OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
        and values equal to v (which defaults to None).

        r)�cls�iterabler�drrrr�fromkeys�s
zOrderedDict.fromkeyscCs6t|t�r*t|�t|�ko(|j�|j�kStj||�S)z�od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
        while comparison to a regular mapping is order-insensitive.

        )r1rrr,r#�__eq__)rr3rrrrE�s
 zOrderedDict.__eq__cCs
||kS)Nr)rr3rrr�__ne__�szOrderedDict.__ne__cCst|�S)z@od.viewkeys() -> a set-like object providing a view on od's keys)r)rrrr�viewkeys�szOrderedDict.viewkeyscCst|�S)z<od.viewvalues() -> an object providing a view on od's values)r)rrrr�
viewvalues�szOrderedDict.viewvaluescCst|�S)zBod.viewitems() -> a set-like object providing a view on od's items)r)rrrr�	viewitemsszOrderedDict.viewitems)T)N)N)"r<�
__module__�__qualname__�__doc__rr#rrrr r"r&r(r+r,r.r!r0r4r
�objectr5rr8r=r@r?�classmethodrDrErFrGrHrIrrrrrs:
	




	rN)Zthreadrr:�ImportErrorZdummy_threadZ_abcollrrrr#rrrrr�<module>s_vendor/urllib3/packages/__pycache__/__init__.cpython-36.pyc000064400000000356151733136360017743 0ustar003

�Pfm�@s ddlmZddlmZdZdS)�)�absolute_import�)�ssl_match_hostnamerN)r)Z
__future__r�r�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/packages/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000356151733136360020702 0ustar003

�Pfm�@s ddlmZddlmZdZdS)�)�absolute_import�)�ssl_match_hostnamerN)r)Z
__future__r�r�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.pyc000064400000000740151733136360023613 0ustar003

�Pf��@s�ddlZy&ejd	kred��ddlmZmZWnNek
r|yddlmZmZWn$ek
rvddlmZmZYnXYnXd
ZdS)�N��zFallback to vendored code)�CertificateError�match_hostname�rr)rr)rr)	�sys�version_info�ImportErrorZsslrrZbackports.ssl_match_hostnameZ_implementation�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s
_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.opt-1.pyc000064400000006157151733136360026207 0ustar003

�PfF�@stdZddlZddlZyddlZWnek
r8dZYnXdZGdd�de�Zddd�Zd	d
�Z	dd�Z
d
d�ZdS)zJThe match_hostname() function from Python 3.3.3, essential when using SSL.�Nz3.5.0.1c@seZdZdS)�CertificateErrorN)�__name__�
__module__�__qualname__�rr�%/usr/lib/python3.6/_implementation.pyrsr�c
Cs�g}|sdS|jd�}|d}|dd�}|jd�}||krLtdt|���|s`|j�|j�kS|dkrt|jd�n>|jd	�s�|jd	�r�|jtj|��n|jtj|�j	d
d��x|D]}|jtj|��q�Wtj
dd
j|�dtj�}	|	j
|�S)zhMatching according to RFC 6125, section 6.4.3

    http://tools.ietf.org/html/rfc6125#section-6.4.3
    F�.rrN�*z,too many wildcards in certificate DNS name: z[^.]+zxn--z\*z[^.]*z\Az\.z\Z)�split�countr�repr�lower�append�
startswith�re�escape�replace�compile�join�
IGNORECASE�match)
Zdn�hostnameZ
max_wildcardsZpats�partsZleftmostZ	remainderZ	wildcardsZfragZpatrrr�_dnsname_matchs*


rcCs&t|t�r"tjdkr"t|ddd�}|S)N��ascii�strict)�encoding�errors)r)�
isinstance�str�sys�version_infoZunicode)�objrrr�_to_unicodeOsr%cCstjt|�j��}||kS)z�Exact matching of IP addresses.

    RFC 6125 explicitly doesn't define an algorithm for this
    (section 1.7.2 - "Out of Scope").
    )�	ipaddress�
ip_addressr%�rstrip)Zipname�host_ipZiprrr�_ipaddress_matchTsr*cCs�|std��ytjt|��}WnPtk
r6d}Yn:tk
rLd}Yn$tk
rntdkrhd}n�YnXg}|jdf�}xb|D]Z\}}|dkr�|dkr�t||�r�dS|j|�q�|dkr�|dk	r�t	||�r�dS|j|�q�W|�s8xL|jdf�D]<}x6|D].\}}|dk�rt||��r$dS|j|��qWq�Wt
|�dk�rdtd	|d
jt
t|��f��n,t
|�dk�r�td||df��ntd
��dS)a)Verify that *cert* (in decoded format as returned by
    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
    rules are followed, but IP addresses are not accepted for *hostname*.

    CertificateError is raised on failure. On success, the function
    returns nothing.
    ztempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDNZsubjectAltNameZDNSz
IP AddressZsubjectZ
commonNamerz&hostname %r doesn't match either of %sz, zhostname %r doesn't match %rrz=no appropriate commonName or subjectAltName fields were found)�
ValueErrorr&r'r%�UnicodeError�AttributeError�getrrr*�lenrr�mapr
)Zcertrr)ZdnsnamesZsan�key�value�subrrr�match_hostname`sJ
r4)r)�__doc__rr"r&�ImportError�__version__r+rrr%r*r4rrrr�<module>s

5_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000740151733136360024552 0ustar003

�Pf��@s�ddlZy&ejd	kred��ddlmZmZWnNek
r|yddlmZmZWn$ek
rvddlmZmZYnXYnXd
ZdS)�N��zFallback to vendored code)�CertificateError�match_hostname�rr)rr)rr)	�sys�version_info�ImportErrorZsslrrZbackports.ssl_match_hostnameZ_implementation�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s
_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-36.pyc000064400000006157151733136360025250 0ustar003

�PfF�@stdZddlZddlZyddlZWnek
r8dZYnXdZGdd�de�Zddd�Zd	d
�Z	dd�Z
d
d�ZdS)zJThe match_hostname() function from Python 3.3.3, essential when using SSL.�Nz3.5.0.1c@seZdZdS)�CertificateErrorN)�__name__�
__module__�__qualname__�rr�%/usr/lib/python3.6/_implementation.pyrsr�c
Cs�g}|sdS|jd�}|d}|dd�}|jd�}||krLtdt|���|s`|j�|j�kS|dkrt|jd�n>|jd	�s�|jd	�r�|jtj|��n|jtj|�j	d
d��x|D]}|jtj|��q�Wtj
dd
j|�dtj�}	|	j
|�S)zhMatching according to RFC 6125, section 6.4.3

    http://tools.ietf.org/html/rfc6125#section-6.4.3
    F�.rrN�*z,too many wildcards in certificate DNS name: z[^.]+zxn--z\*z[^.]*z\Az\.z\Z)�split�countr�repr�lower�append�
startswith�re�escape�replace�compile�join�
IGNORECASE�match)
Zdn�hostnameZ
max_wildcardsZpats�partsZleftmostZ	remainderZ	wildcardsZfragZpatrrr�_dnsname_matchs*


rcCs&t|t�r"tjdkr"t|ddd�}|S)N��ascii�strict)�encoding�errors)r)�
isinstance�str�sys�version_infoZunicode)�objrrr�_to_unicodeOsr%cCstjt|�j��}||kS)z�Exact matching of IP addresses.

    RFC 6125 explicitly doesn't define an algorithm for this
    (section 1.7.2 - "Out of Scope").
    )�	ipaddress�
ip_addressr%�rstrip)Zipname�host_ipZiprrr�_ipaddress_matchTsr*cCs�|std��ytjt|��}WnPtk
r6d}Yn:tk
rLd}Yn$tk
rntdkrhd}n�YnXg}|jdf�}xb|D]Z\}}|dkr�|dkr�t||�r�dS|j|�q�|dkr�|dk	r�t	||�r�dS|j|�q�W|�s8xL|jdf�D]<}x6|D].\}}|dk�rt||��r$dS|j|��qWq�Wt
|�dk�rdtd	|d
jt
t|��f��n,t
|�dk�r�td||df��ntd
��dS)a)Verify that *cert* (in decoded format as returned by
    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
    rules are followed, but IP addresses are not accepted for *hostname*.

    CertificateError is raised on failure. On success, the function
    returns nothing.
    ztempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDNZsubjectAltNameZDNSz
IP AddressZsubjectZ
commonNamerz&hostname %r doesn't match either of %sz, zhostname %r doesn't match %rrz=no appropriate commonName or subjectAltName fields were found)�
ValueErrorr&r'r%�UnicodeError�AttributeError�getrrr*�lenrr�mapr
)Zcertrr)ZdnsnamesZsan�key�value�subrrr�match_hostname`sJ
r4)r)�__doc__rr"r&�ImportError�__version__r+rrr%r*r4rrrr�<module>s

5_vendor/urllib3/packages/ssl_match_hostname/__init__.py000064400000001260151733136360017325 0ustar00import sys

try:
    # Our match_hostname function is the same as 3.5's, so we only want to
    # import the match_hostname function if it's at least that good.
    if sys.version_info < (3, 5):
        raise ImportError("Fallback to vendored code")

    from ssl import CertificateError, match_hostname
except ImportError:
    try:
        # Backport of the function from a pypi module
        from backports.ssl_match_hostname import CertificateError, match_hostname
    except ImportError:
        # Our vendored copy
        from ._implementation import CertificateError, match_hostname

# Not needed, but documenting what we provide.
__all__ = ('CertificateError', 'match_hostname')
_vendor/urllib3/packages/ssl_match_hostname/_implementation.py000064400000013106151733136360020754 0ustar00"""The match_hostname() function from Python 3.3.3, essential when using SSL."""

# Note: This file is under the PSF license as the code comes from the python
# stdlib.   http://docs.python.org/3/license.html

import re
import sys

# ipaddress has been backported to 2.6+ in pypi.  If it is installed on the
# system, use it to handle IPAddress ServerAltnames (this was added in
# python-3.5) otherwise only do DNS matching.  This allows
# backports.ssl_match_hostname to continue to be used all the way back to
# python-2.4.
try:
    import ipaddress
except ImportError:
    ipaddress = None

__version__ = '3.5.0.1'


class CertificateError(ValueError):
    pass


def _dnsname_match(dn, hostname, max_wildcards=1):
    """Matching according to RFC 6125, section 6.4.3

    http://tools.ietf.org/html/rfc6125#section-6.4.3
    """
    pats = []
    if not dn:
        return False

    # Ported from python3-syntax:
    # leftmost, *remainder = dn.split(r'.')
    parts = dn.split(r'.')
    leftmost = parts[0]
    remainder = parts[1:]

    wildcards = leftmost.count('*')
    if wildcards > max_wildcards:
        # Issue #17980: avoid denials of service by refusing more
        # than one wildcard per fragment.  A survey of established
        # policy among SSL implementations showed it to be a
        # reasonable choice.
        raise CertificateError(
            "too many wildcards in certificate DNS name: " + repr(dn))

    # speed up common case w/o wildcards
    if not wildcards:
        return dn.lower() == hostname.lower()

    # RFC 6125, section 6.4.3, subitem 1.
    # The client SHOULD NOT attempt to match a presented identifier in which
    # the wildcard character comprises a label other than the left-most label.
    if leftmost == '*':
        # When '*' is a fragment by itself, it matches a non-empty dotless
        # fragment.
        pats.append('[^.]+')
    elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
        # RFC 6125, section 6.4.3, subitem 3.
        # The client SHOULD NOT attempt to match a presented identifier
        # where the wildcard character is embedded within an A-label or
        # U-label of an internationalized domain name.
        pats.append(re.escape(leftmost))
    else:
        # Otherwise, '*' matches any dotless string, e.g. www*
        pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))

    # add the remaining fragments, ignore any wildcards
    for frag in remainder:
        pats.append(re.escape(frag))

    pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
    return pat.match(hostname)


def _to_unicode(obj):
    if isinstance(obj, str) and sys.version_info < (3,):
        obj = unicode(obj, encoding='ascii', errors='strict')
    return obj

def _ipaddress_match(ipname, host_ip):
    """Exact matching of IP addresses.

    RFC 6125 explicitly doesn't define an algorithm for this
    (section 1.7.2 - "Out of Scope").
    """
    # OpenSSL may add a trailing newline to a subjectAltName's IP address
    # Divergence from upstream: ipaddress can't handle byte str
    ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
    return ip == host_ip


def match_hostname(cert, hostname):
    """Verify that *cert* (in decoded format as returned by
    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
    rules are followed, but IP addresses are not accepted for *hostname*.

    CertificateError is raised on failure. On success, the function
    returns nothing.
    """
    if not cert:
        raise ValueError("empty or no certificate, match_hostname needs a "
                         "SSL socket or SSL context with either "
                         "CERT_OPTIONAL or CERT_REQUIRED")
    try:
        # Divergence from upstream: ipaddress can't handle byte str
        host_ip = ipaddress.ip_address(_to_unicode(hostname))
    except ValueError:
        # Not an IP address (common case)
        host_ip = None
    except UnicodeError:
        # Divergence from upstream: Have to deal with ipaddress not taking
        # byte strings.  addresses should be all ascii, so we consider it not
        # an ipaddress in this case
        host_ip = None
    except AttributeError:
        # Divergence from upstream: Make ipaddress library optional
        if ipaddress is None:
            host_ip = None
        else:
            raise
    dnsnames = []
    san = cert.get('subjectAltName', ())
    for key, value in san:
        if key == 'DNS':
            if host_ip is None and _dnsname_match(value, hostname):
                return
            dnsnames.append(value)
        elif key == 'IP Address':
            if host_ip is not None and _ipaddress_match(value, host_ip):
                return
            dnsnames.append(value)
    if not dnsnames:
        # The subject is only checked when there is no dNSName entry
        # in subjectAltName
        for sub in cert.get('subject', ()):
            for key, value in sub:
                # XXX according to RFC 2818, the most specific Common Name
                # must be used.
                if key == 'commonName':
                    if _dnsname_match(value, hostname):
                        return
                    dnsnames.append(value)
    if len(dnsnames) > 1:
        raise CertificateError("hostname %r "
            "doesn't match either of %s"
            % (hostname, ', '.join(map(repr, dnsnames))))
    elif len(dnsnames) == 1:
        raise CertificateError("hostname %r "
            "doesn't match %r"
            % (hostname, dnsnames[0]))
    else:
        raise CertificateError("no appropriate commonName or "
            "subjectAltName fields were found")
_vendor/urllib3/packages/__init__.py000064400000000155151733136360013454 0ustar00from __future__ import absolute_import

from . import ssl_match_hostname

__all__ = ('ssl_match_hostname', )
_vendor/urllib3/packages/six.py000064400000072622151733136360012530 0ustar00"""Utilities for writing code that runs on Python 2 and 3"""

# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

from __future__ import absolute_import

import functools
import itertools
import operator
import sys
import types

__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"


# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)

if PY3:
    string_types = str,
    integer_types = int,
    class_types = type,
    text_type = str
    binary_type = bytes

    MAXSIZE = sys.maxsize
else:
    string_types = basestring,
    integer_types = (int, long)
    class_types = (type, types.ClassType)
    text_type = unicode
    binary_type = str

    if sys.platform.startswith("java"):
        # Jython always uses 32 bits.
        MAXSIZE = int((1 << 31) - 1)
    else:
        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
        class X(object):

            def __len__(self):
                return 1 << 31
        try:
            len(X())
        except OverflowError:
            # 32-bit
            MAXSIZE = int((1 << 31) - 1)
        else:
            # 64-bit
            MAXSIZE = int((1 << 63) - 1)
        del X


def _add_doc(func, doc):
    """Add documentation to a function."""
    func.__doc__ = doc


def _import_module(name):
    """Import module, returning the module after the last dot."""
    __import__(name)
    return sys.modules[name]


class _LazyDescr(object):

    def __init__(self, name):
        self.name = name

    def __get__(self, obj, tp):
        result = self._resolve()
        setattr(obj, self.name, result)  # Invokes __set__.
        try:
            # This is a bit ugly, but it avoids running this again by
            # removing this descriptor.
            delattr(obj.__class__, self.name)
        except AttributeError:
            pass
        return result


class MovedModule(_LazyDescr):

    def __init__(self, name, old, new=None):
        super(MovedModule, self).__init__(name)
        if PY3:
            if new is None:
                new = name
            self.mod = new
        else:
            self.mod = old

    def _resolve(self):
        return _import_module(self.mod)

    def __getattr__(self, attr):
        _module = self._resolve()
        value = getattr(_module, attr)
        setattr(self, attr, value)
        return value


class _LazyModule(types.ModuleType):

    def __init__(self, name):
        super(_LazyModule, self).__init__(name)
        self.__doc__ = self.__class__.__doc__

    def __dir__(self):
        attrs = ["__doc__", "__name__"]
        attrs += [attr.name for attr in self._moved_attributes]
        return attrs

    # Subclasses should override this
    _moved_attributes = []


class MovedAttribute(_LazyDescr):

    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
        super(MovedAttribute, self).__init__(name)
        if PY3:
            if new_mod is None:
                new_mod = name
            self.mod = new_mod
            if new_attr is None:
                if old_attr is None:
                    new_attr = name
                else:
                    new_attr = old_attr
            self.attr = new_attr
        else:
            self.mod = old_mod
            if old_attr is None:
                old_attr = name
            self.attr = old_attr

    def _resolve(self):
        module = _import_module(self.mod)
        return getattr(module, self.attr)


class _SixMetaPathImporter(object):

    """
    A meta path importer to import six.moves and its submodules.

    This class implements a PEP302 finder and loader. It should be compatible
    with Python 2.5 and all existing versions of Python3
    """

    def __init__(self, six_module_name):
        self.name = six_module_name
        self.known_modules = {}

    def _add_module(self, mod, *fullnames):
        for fullname in fullnames:
            self.known_modules[self.name + "." + fullname] = mod

    def _get_module(self, fullname):
        return self.known_modules[self.name + "." + fullname]

    def find_module(self, fullname, path=None):
        if fullname in self.known_modules:
            return self
        return None

    def __get_module(self, fullname):
        try:
            return self.known_modules[fullname]
        except KeyError:
            raise ImportError("This loader does not know module " + fullname)

    def load_module(self, fullname):
        try:
            # in case of a reload
            return sys.modules[fullname]
        except KeyError:
            pass
        mod = self.__get_module(fullname)
        if isinstance(mod, MovedModule):
            mod = mod._resolve()
        else:
            mod.__loader__ = self
        sys.modules[fullname] = mod
        return mod

    def is_package(self, fullname):
        """
        Return true, if the named module is a package.

        We need this method to get correct spec objects with
        Python 3.4 (see PEP451)
        """
        return hasattr(self.__get_module(fullname), "__path__")

    def get_code(self, fullname):
        """Return None

        Required, if is_package is implemented"""
        self.__get_module(fullname)  # eventually raises ImportError
        return None
    get_source = get_code  # same as get_code

_importer = _SixMetaPathImporter(__name__)


class _MovedItems(_LazyModule):

    """Lazy loading of moved objects"""
    __path__ = []  # mark as package


_moved_attributes = [
    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
    MovedAttribute("intern", "__builtin__", "sys"),
    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
    MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
    MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
    MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
    MovedAttribute("reduce", "__builtin__", "functools"),
    MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
    MovedAttribute("StringIO", "StringIO", "io"),
    MovedAttribute("UserDict", "UserDict", "collections"),
    MovedAttribute("UserList", "UserList", "collections"),
    MovedAttribute("UserString", "UserString", "collections"),
    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
    MovedModule("builtins", "__builtin__"),
    MovedModule("configparser", "ConfigParser"),
    MovedModule("copyreg", "copy_reg"),
    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
    MovedModule("http_cookies", "Cookie", "http.cookies"),
    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
    MovedModule("html_parser", "HTMLParser", "html.parser"),
    MovedModule("http_client", "httplib", "http.client"),
    MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
    MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
    MovedModule("cPickle", "cPickle", "pickle"),
    MovedModule("queue", "Queue"),
    MovedModule("reprlib", "repr"),
    MovedModule("socketserver", "SocketServer"),
    MovedModule("_thread", "thread", "_thread"),
    MovedModule("tkinter", "Tkinter"),
    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
    MovedModule("tkinter_colorchooser", "tkColorChooser",
                "tkinter.colorchooser"),
    MovedModule("tkinter_commondialog", "tkCommonDialog",
                "tkinter.commondialog"),
    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
                "tkinter.simpledialog"),
    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
    MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
    _moved_attributes += [
        MovedModule("winreg", "_winreg"),
    ]

for attr in _moved_attributes:
    setattr(_MovedItems, attr.name, attr)
    if isinstance(attr, MovedModule):
        _importer._add_module(attr, "moves." + attr.name)
del attr

_MovedItems._moved_attributes = _moved_attributes

moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")


class Module_six_moves_urllib_parse(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_parse"""


_urllib_parse_moved_attributes = [
    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
    MovedAttribute("quote", "urllib", "urllib.parse"),
    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
    MovedAttribute("unquote", "urllib", "urllib.parse"),
    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
    MovedAttribute("urlencode", "urllib", "urllib.parse"),
    MovedAttribute("splitquery", "urllib", "urllib.parse"),
    MovedAttribute("splittag", "urllib", "urllib.parse"),
    MovedAttribute("splituser", "urllib", "urllib.parse"),
    MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
    MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
    MovedAttribute("uses_params", "urlparse", "urllib.parse"),
    MovedAttribute("uses_query", "urlparse", "urllib.parse"),
    MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
    setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr

Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes

_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
                      "moves.urllib_parse", "moves.urllib.parse")


class Module_six_moves_urllib_error(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_error"""


_urllib_error_moved_attributes = [
    MovedAttribute("URLError", "urllib2", "urllib.error"),
    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
    setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr

Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes

_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
                      "moves.urllib_error", "moves.urllib.error")


class Module_six_moves_urllib_request(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_request"""


_urllib_request_moved_attributes = [
    MovedAttribute("urlopen", "urllib2", "urllib.request"),
    MovedAttribute("install_opener", "urllib2", "urllib.request"),
    MovedAttribute("build_opener", "urllib2", "urllib.request"),
    MovedAttribute("pathname2url", "urllib", "urllib.request"),
    MovedAttribute("url2pathname", "urllib", "urllib.request"),
    MovedAttribute("getproxies", "urllib", "urllib.request"),
    MovedAttribute("Request", "urllib2", "urllib.request"),
    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
    MovedAttribute("URLopener", "urllib", "urllib.request"),
    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
    setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr

Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes

_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
                      "moves.urllib_request", "moves.urllib.request")


class Module_six_moves_urllib_response(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_response"""


_urllib_response_moved_attributes = [
    MovedAttribute("addbase", "urllib", "urllib.response"),
    MovedAttribute("addclosehook", "urllib", "urllib.response"),
    MovedAttribute("addinfo", "urllib", "urllib.response"),
    MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
    setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr

Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes

_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
                      "moves.urllib_response", "moves.urllib.response")


class Module_six_moves_urllib_robotparser(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_robotparser"""


_urllib_robotparser_moved_attributes = [
    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr

Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes

_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
                      "moves.urllib_robotparser", "moves.urllib.robotparser")


class Module_six_moves_urllib(types.ModuleType):

    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
    __path__ = []  # mark as package
    parse = _importer._get_module("moves.urllib_parse")
    error = _importer._get_module("moves.urllib_error")
    request = _importer._get_module("moves.urllib_request")
    response = _importer._get_module("moves.urllib_response")
    robotparser = _importer._get_module("moves.urllib_robotparser")

    def __dir__(self):
        return ['parse', 'error', 'request', 'response', 'robotparser']

_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
                      "moves.urllib")


def add_move(move):
    """Add an item to six.moves."""
    setattr(_MovedItems, move.name, move)


def remove_move(name):
    """Remove item from six.moves."""
    try:
        delattr(_MovedItems, name)
    except AttributeError:
        try:
            del moves.__dict__[name]
        except KeyError:
            raise AttributeError("no such move, %r" % (name,))


if PY3:
    _meth_func = "__func__"
    _meth_self = "__self__"

    _func_closure = "__closure__"
    _func_code = "__code__"
    _func_defaults = "__defaults__"
    _func_globals = "__globals__"
else:
    _meth_func = "im_func"
    _meth_self = "im_self"

    _func_closure = "func_closure"
    _func_code = "func_code"
    _func_defaults = "func_defaults"
    _func_globals = "func_globals"


try:
    advance_iterator = next
except NameError:
    def advance_iterator(it):
        return it.next()
next = advance_iterator


try:
    callable = callable
except NameError:
    def callable(obj):
        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)


if PY3:
    def get_unbound_function(unbound):
        return unbound

    create_bound_method = types.MethodType

    def create_unbound_method(func, cls):
        return func

    Iterator = object
else:
    def get_unbound_function(unbound):
        return unbound.im_func

    def create_bound_method(func, obj):
        return types.MethodType(func, obj, obj.__class__)

    def create_unbound_method(func, cls):
        return types.MethodType(func, None, cls)

    class Iterator(object):

        def next(self):
            return type(self).__next__(self)

    callable = callable
_add_doc(get_unbound_function,
         """Get the function out of a possibly unbound function""")


get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)


if PY3:
    def iterkeys(d, **kw):
        return iter(d.keys(**kw))

    def itervalues(d, **kw):
        return iter(d.values(**kw))

    def iteritems(d, **kw):
        return iter(d.items(**kw))

    def iterlists(d, **kw):
        return iter(d.lists(**kw))

    viewkeys = operator.methodcaller("keys")

    viewvalues = operator.methodcaller("values")

    viewitems = operator.methodcaller("items")
else:
    def iterkeys(d, **kw):
        return d.iterkeys(**kw)

    def itervalues(d, **kw):
        return d.itervalues(**kw)

    def iteritems(d, **kw):
        return d.iteritems(**kw)

    def iterlists(d, **kw):
        return d.iterlists(**kw)

    viewkeys = operator.methodcaller("viewkeys")

    viewvalues = operator.methodcaller("viewvalues")

    viewitems = operator.methodcaller("viewitems")

_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
         "Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
         "Return an iterator over the (key, [values]) pairs of a dictionary.")


if PY3:
    def b(s):
        return s.encode("latin-1")

    def u(s):
        return s
    unichr = chr
    import struct
    int2byte = struct.Struct(">B").pack
    del struct
    byte2int = operator.itemgetter(0)
    indexbytes = operator.getitem
    iterbytes = iter
    import io
    StringIO = io.StringIO
    BytesIO = io.BytesIO
    _assertCountEqual = "assertCountEqual"
    if sys.version_info[1] <= 1:
        _assertRaisesRegex = "assertRaisesRegexp"
        _assertRegex = "assertRegexpMatches"
    else:
        _assertRaisesRegex = "assertRaisesRegex"
        _assertRegex = "assertRegex"
else:
    def b(s):
        return s
    # Workaround for standalone backslash

    def u(s):
        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
    unichr = unichr
    int2byte = chr

    def byte2int(bs):
        return ord(bs[0])

    def indexbytes(buf, i):
        return ord(buf[i])
    iterbytes = functools.partial(itertools.imap, ord)
    import StringIO
    StringIO = BytesIO = StringIO.StringIO
    _assertCountEqual = "assertItemsEqual"
    _assertRaisesRegex = "assertRaisesRegexp"
    _assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")


def assertCountEqual(self, *args, **kwargs):
    return getattr(self, _assertCountEqual)(*args, **kwargs)


def assertRaisesRegex(self, *args, **kwargs):
    return getattr(self, _assertRaisesRegex)(*args, **kwargs)


def assertRegex(self, *args, **kwargs):
    return getattr(self, _assertRegex)(*args, **kwargs)


if PY3:
    exec_ = getattr(moves.builtins, "exec")

    def reraise(tp, value, tb=None):
        if value is None:
            value = tp()
        if value.__traceback__ is not tb:
            raise value.with_traceback(tb)
        raise value

else:
    def exec_(_code_, _globs_=None, _locs_=None):
        """Execute code in a namespace."""
        if _globs_ is None:
            frame = sys._getframe(1)
            _globs_ = frame.f_globals
            if _locs_ is None:
                _locs_ = frame.f_locals
            del frame
        elif _locs_ is None:
            _locs_ = _globs_
        exec("""exec _code_ in _globs_, _locs_""")

    exec_("""def reraise(tp, value, tb=None):
    raise tp, value, tb
""")


if sys.version_info[:2] == (3, 2):
    exec_("""def raise_from(value, from_value):
    if from_value is None:
        raise value
    raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
    exec_("""def raise_from(value, from_value):
    raise value from from_value
""")
else:
    def raise_from(value, from_value):
        raise value


print_ = getattr(moves.builtins, "print", None)
if print_ is None:
    def print_(*args, **kwargs):
        """The new-style print function for Python 2.4 and 2.5."""
        fp = kwargs.pop("file", sys.stdout)
        if fp is None:
            return

        def write(data):
            if not isinstance(data, basestring):
                data = str(data)
            # If the file has an encoding, encode unicode with it.
            if (isinstance(fp, file) and
                    isinstance(data, unicode) and
                    fp.encoding is not None):
                errors = getattr(fp, "errors", None)
                if errors is None:
                    errors = "strict"
                data = data.encode(fp.encoding, errors)
            fp.write(data)
        want_unicode = False
        sep = kwargs.pop("sep", None)
        if sep is not None:
            if isinstance(sep, unicode):
                want_unicode = True
            elif not isinstance(sep, str):
                raise TypeError("sep must be None or a string")
        end = kwargs.pop("end", None)
        if end is not None:
            if isinstance(end, unicode):
                want_unicode = True
            elif not isinstance(end, str):
                raise TypeError("end must be None or a string")
        if kwargs:
            raise TypeError("invalid keyword arguments to print()")
        if not want_unicode:
            for arg in args:
                if isinstance(arg, unicode):
                    want_unicode = True
                    break
        if want_unicode:
            newline = unicode("\n")
            space = unicode(" ")
        else:
            newline = "\n"
            space = " "
        if sep is None:
            sep = space
        if end is None:
            end = newline
        for i, arg in enumerate(args):
            if i:
                write(sep)
            write(arg)
        write(end)
if sys.version_info[:2] < (3, 3):
    _print = print_

    def print_(*args, **kwargs):
        fp = kwargs.get("file", sys.stdout)
        flush = kwargs.pop("flush", False)
        _print(*args, **kwargs)
        if flush and fp is not None:
            fp.flush()

_add_doc(reraise, """Reraise an exception.""")

if sys.version_info[0:2] < (3, 4):
    def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
              updated=functools.WRAPPER_UPDATES):
        def wrapper(f):
            f = functools.wraps(wrapped, assigned, updated)(f)
            f.__wrapped__ = wrapped
            return f
        return wrapper
else:
    wraps = functools.wraps


def with_metaclass(meta, *bases):
    """Create a base class with a metaclass."""
    # This requires a bit of explanation: the basic idea is to make a dummy
    # metaclass for one level of class instantiation that replaces itself with
    # the actual metaclass.
    class metaclass(meta):

        def __new__(cls, name, this_bases, d):
            return meta(name, bases, d)
    return type.__new__(metaclass, 'temporary_class', (), {})


def add_metaclass(metaclass):
    """Class decorator for creating a class with a metaclass."""
    def wrapper(cls):
        orig_vars = cls.__dict__.copy()
        slots = orig_vars.get('__slots__')
        if slots is not None:
            if isinstance(slots, str):
                slots = [slots]
            for slots_var in slots:
                orig_vars.pop(slots_var)
        orig_vars.pop('__dict__', None)
        orig_vars.pop('__weakref__', None)
        return metaclass(cls.__name__, cls.__bases__, orig_vars)
    return wrapper


def python_2_unicode_compatible(klass):
    """
    A decorator that defines __unicode__ and __str__ methods under Python 2.
    Under Python 3 it does nothing.

    To support Python 2 and 3 with a single code base, define a __str__ method
    returning text and apply this decorator to the class.
    """
    if PY2:
        if '__str__' not in klass.__dict__:
            raise ValueError("@python_2_unicode_compatible cannot be applied "
                             "to %s because it doesn't define __str__()." %
                             klass.__name__)
        klass.__unicode__ = klass.__str__
        klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
    return klass


# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = []  # required for PEP 302 and PEP 451
__package__ = __name__  # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
    __spec__.submodule_search_locations = []  # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
    for i, importer in enumerate(sys.meta_path):
        # Here's some real nastiness: Another "instance" of the six module might
        # be floating around. Therefore, we can't use isinstance() to check for
        # the six meta path importer, since the other six instance will have
        # inserted an importer with different class.
        if (type(importer).__name__ == "_SixMetaPathImporter" and
                importer.name == __name__):
            del sys.meta_path[i]
            break
    del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
_vendor/urllib3/packages/ordered_dict.py000064400000021347151733136360014352 0ustar00# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
    from thread import get_ident as _get_ident
except ImportError:
    from dummy_thread import get_ident as _get_ident

try:
    from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
    pass


class OrderedDict(dict):
    'Dictionary that remembers insertion order'
    # An inherited dict maps keys to values.
    # The inherited dict provides __getitem__, __len__, __contains__, and get.
    # The remaining methods are order-aware.
    # Big-O running times for all methods are the same as for regular dictionaries.

    # The internal self.__map dictionary maps keys to links in a doubly linked list.
    # The circular doubly linked list starts and ends with a sentinel element.
    # The sentinel element never gets deleted (this simplifies the algorithm).
    # Each link is stored as a list of length three:  [PREV, NEXT, KEY].

    def __init__(self, *args, **kwds):
        '''Initialize an ordered dictionary.  Signature is the same as for
        regular dictionaries, but keyword arguments are not recommended
        because their insertion order is arbitrary.

        '''
        if len(args) > 1:
            raise TypeError('expected at most 1 arguments, got %d' % len(args))
        try:
            self.__root
        except AttributeError:
            self.__root = root = []                     # sentinel node
            root[:] = [root, root, None]
            self.__map = {}
        self.__update(*args, **kwds)

    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
        'od.__setitem__(i, y) <==> od[i]=y'
        # Setting a new item creates a new link which goes at the end of the linked
        # list, and the inherited dictionary is updated with the new key/value pair.
        if key not in self:
            root = self.__root
            last = root[0]
            last[1] = root[0] = self.__map[key] = [last, root, key]
        dict_setitem(self, key, value)

    def __delitem__(self, key, dict_delitem=dict.__delitem__):
        'od.__delitem__(y) <==> del od[y]'
        # Deleting an existing item uses self.__map to find the link which is
        # then removed by updating the links in the predecessor and successor nodes.
        dict_delitem(self, key)
        link_prev, link_next, key = self.__map.pop(key)
        link_prev[1] = link_next
        link_next[0] = link_prev

    def __iter__(self):
        'od.__iter__() <==> iter(od)'
        root = self.__root
        curr = root[1]
        while curr is not root:
            yield curr[2]
            curr = curr[1]

    def __reversed__(self):
        'od.__reversed__() <==> reversed(od)'
        root = self.__root
        curr = root[0]
        while curr is not root:
            yield curr[2]
            curr = curr[0]

    def clear(self):
        'od.clear() -> None.  Remove all items from od.'
        try:
            for node in self.__map.itervalues():
                del node[:]
            root = self.__root
            root[:] = [root, root, None]
            self.__map.clear()
        except AttributeError:
            pass
        dict.clear(self)

    def popitem(self, last=True):
        '''od.popitem() -> (k, v), return and remove a (key, value) pair.
        Pairs are returned in LIFO order if last is true or FIFO order if false.

        '''
        if not self:
            raise KeyError('dictionary is empty')
        root = self.__root
        if last:
            link = root[0]
            link_prev = link[0]
            link_prev[1] = root
            root[0] = link_prev
        else:
            link = root[1]
            link_next = link[1]
            root[1] = link_next
            link_next[0] = root
        key = link[2]
        del self.__map[key]
        value = dict.pop(self, key)
        return key, value

    # -- the following methods do not depend on the internal structure --

    def keys(self):
        'od.keys() -> list of keys in od'
        return list(self)

    def values(self):
        'od.values() -> list of values in od'
        return [self[key] for key in self]

    def items(self):
        'od.items() -> list of (key, value) pairs in od'
        return [(key, self[key]) for key in self]

    def iterkeys(self):
        'od.iterkeys() -> an iterator over the keys in od'
        return iter(self)

    def itervalues(self):
        'od.itervalues -> an iterator over the values in od'
        for k in self:
            yield self[k]

    def iteritems(self):
        'od.iteritems -> an iterator over the (key, value) items in od'
        for k in self:
            yield (k, self[k])

    def update(*args, **kwds):
        '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.

        If E is a dict instance, does:           for k in E: od[k] = E[k]
        If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
        In either case, this is followed by:     for k, v in F.items(): od[k] = v

        '''
        if len(args) > 2:
            raise TypeError('update() takes at most 2 positional '
                            'arguments (%d given)' % (len(args),))
        elif not args:
            raise TypeError('update() takes at least 1 argument (0 given)')
        self = args[0]
        # Make progressively weaker assumptions about "other"
        other = ()
        if len(args) == 2:
            other = args[1]
        if isinstance(other, dict):
            for key in other:
                self[key] = other[key]
        elif hasattr(other, 'keys'):
            for key in other.keys():
                self[key] = other[key]
        else:
            for key, value in other:
                self[key] = value
        for key, value in kwds.items():
            self[key] = value

    __update = update  # let subclasses override update without breaking __init__

    __marker = object()

    def pop(self, key, default=__marker):
        '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
        If key is not found, d is returned if given, otherwise KeyError is raised.

        '''
        if key in self:
            result = self[key]
            del self[key]
            return result
        if default is self.__marker:
            raise KeyError(key)
        return default

    def setdefault(self, key, default=None):
        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
        if key in self:
            return self[key]
        self[key] = default
        return default

    def __repr__(self, _repr_running={}):
        'od.__repr__() <==> repr(od)'
        call_key = id(self), _get_ident()
        if call_key in _repr_running:
            return '...'
        _repr_running[call_key] = 1
        try:
            if not self:
                return '%s()' % (self.__class__.__name__,)
            return '%s(%r)' % (self.__class__.__name__, self.items())
        finally:
            del _repr_running[call_key]

    def __reduce__(self):
        'Return state information for pickling'
        items = [[k, self[k]] for k in self]
        inst_dict = vars(self).copy()
        for k in vars(OrderedDict()):
            inst_dict.pop(k, None)
        if inst_dict:
            return (self.__class__, (items,), inst_dict)
        return self.__class__, (items,)

    def copy(self):
        'od.copy() -> a shallow copy of od'
        return self.__class__(self)

    @classmethod
    def fromkeys(cls, iterable, value=None):
        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
        and values equal to v (which defaults to None).

        '''
        d = cls()
        for key in iterable:
            d[key] = value
        return d

    def __eq__(self, other):
        '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
        while comparison to a regular mapping is order-insensitive.

        '''
        if isinstance(other, OrderedDict):
            return len(self)==len(other) and self.items() == other.items()
        return dict.__eq__(self, other)

    def __ne__(self, other):
        return not self == other

    # -- the following methods are only used in Python 2.7 --

    def viewkeys(self):
        "od.viewkeys() -> a set-like object providing a view on od's keys"
        return KeysView(self)

    def viewvalues(self):
        "od.viewvalues() -> an object providing a view on od's values"
        return ValuesView(self)

    def viewitems(self):
        "od.viewitems() -> a set-like object providing a view on od's items"
        return ItemsView(self)
_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-36.opt-1.pyc000064400000002223151733136360022703 0ustar003

�Pf��@s&dZddlZddlmZddd�ZdS)z�
backports.makefile
~~~~~~~~~~~~~~~~~~

Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
�N)�SocketIO�rc
Cst|�tdddg�ks$td|f��d|k}d|kp8|}d|k}d}	|rR|	d7}	|r^|	d7}	t||	�}
|jd7_|dkr�d
}|dkr�tj}|dkr�|s�td	��|
S|r�|r�tj|
|
|�}n|r�tj|
|�}ntj|
|�}|r�|Stj	||||�}||_
|S)z:
    Backport of ``socket.makefile`` from Python 3.5.
    r�w�bz&invalid mode %r (only r, w, b allowed)��Nrz!unbuffered streams must be binary���)�set�
ValueErrorrZ_makefile_refs�io�DEFAULT_BUFFER_SIZE�BufferedRWPair�BufferedReader�BufferedWriter�
TextIOWrapper�mode)
�selfr�	buffering�encoding�errors�newlineZwritingZreadingZbinaryZrawmode�raw�buffer�text�r�/usr/lib/python3.6/makefile.py�backport_makefiles>
r)rNNNN)�__doc__rZsocketrrrrrr�<module>s_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-36.pyc000064400000000161151733136360021725 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000161151733136360022664 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-36.pyc000064400000002275151733136360021753 0ustar003

�Pf��@s&dZddlZddlmZddd�ZdS)z�
backports.makefile
~~~~~~~~~~~~~~~~~~

Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
�N)�SocketIO�rc
Cst|�tdddg�ks$td|f��d|k}d|kp8|}|sF|sFt�d|k}d}	|r^|	d7}	|rj|	d7}	t||	�}
|jd7_|dkr�d
}|dkr�tj}|dkr�|s�td	��|
S|r�|r�tj|
|
|�}n&|r�tj|
|�}n|s�t�tj	|
|�}|�r�|Stj
||||�}||_|S)z:
    Backport of ``socket.makefile`` from Python 3.5.
    r�w�bz&invalid mode %r (only r, w, b allowed)��Nrz!unbuffered streams must be binary���)�set�
ValueError�AssertionErrorrZ_makefile_refs�io�DEFAULT_BUFFER_SIZE�BufferedRWPair�BufferedReader�BufferedWriter�
TextIOWrapper�mode)
�selfr�	buffering�encoding�errors�newlineZwritingZreadingZbinaryZrawmode�raw�buffer�text�r�/usr/lib/python3.6/makefile.py�backport_makefilesB
r)rNNNN)�__doc__rZsocketrrrrrr�<module>s_vendor/urllib3/packages/backports/__init__.py000064400000000000151733136360015431 0ustar00_vendor/urllib3/packages/backports/makefile.py000064400000002665151733136360015472 0ustar00# -*- coding: utf-8 -*-
"""
backports.makefile
~~~~~~~~~~~~~~~~~~

Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
"""
import io

from socket import SocketIO


def backport_makefile(self, mode="r", buffering=None, encoding=None,
                      errors=None, newline=None):
    """
    Backport of ``socket.makefile`` from Python 3.5.
    """
    if not set(mode) <= set(["r", "w", "b"]):
        raise ValueError(
            "invalid mode %r (only r, w, b allowed)" % (mode,)
        )
    writing = "w" in mode
    reading = "r" in mode or not writing
    assert reading or writing
    binary = "b" in mode
    rawmode = ""
    if reading:
        rawmode += "r"
    if writing:
        rawmode += "w"
    raw = SocketIO(self, rawmode)
    self._makefile_refs += 1
    if buffering is None:
        buffering = -1
    if buffering < 0:
        buffering = io.DEFAULT_BUFFER_SIZE
    if buffering == 0:
        if not binary:
            raise ValueError("unbuffered streams must be binary")
        return raw
    if reading and writing:
        buffer = io.BufferedRWPair(raw, raw, buffering)
    elif reading:
        buffer = io.BufferedReader(raw, buffering)
    else:
        assert writing
        buffer = io.BufferedWriter(raw, buffering)
    if binary:
        return buffer
    text = io.TextIOWrapper(buffer, encoding, errors, newline)
    text.mode = mode
    return text
_vendor/urllib3/_collections.py000064400000023734151733136370012625 0ustar00from __future__ import absolute_import
from collections import Mapping, MutableMapping
try:
    from threading import RLock
except ImportError:  # Platform-specific: No threads available
    class RLock:
        def __enter__(self):
            pass

        def __exit__(self, exc_type, exc_value, traceback):
            pass


try:  # Python 2.7+
    from collections import OrderedDict
except ImportError:
    from .packages.ordered_dict import OrderedDict
from .packages.six import iterkeys, itervalues, PY3


__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']


_Null = object()


class RecentlyUsedContainer(MutableMapping):
    """
    Provides a thread-safe dict-like container which maintains up to
    ``maxsize`` keys while throwing away the least-recently-used keys beyond
    ``maxsize``.

    :param maxsize:
        Maximum number of recent elements to retain.

    :param dispose_func:
        Every time an item is evicted from the container,
        ``dispose_func(value)`` is called.  Callback which will get called
    """

    ContainerCls = OrderedDict

    def __init__(self, maxsize=10, dispose_func=None):
        self._maxsize = maxsize
        self.dispose_func = dispose_func

        self._container = self.ContainerCls()
        self.lock = RLock()

    def __getitem__(self, key):
        # Re-insert the item, moving it to the end of the eviction line.
        with self.lock:
            item = self._container.pop(key)
            self._container[key] = item
            return item

    def __setitem__(self, key, value):
        evicted_value = _Null
        with self.lock:
            # Possibly evict the existing value of 'key'
            evicted_value = self._container.get(key, _Null)
            self._container[key] = value

            # If we didn't evict an existing value, we might have to evict the
            # least recently used item from the beginning of the container.
            if len(self._container) > self._maxsize:
                _key, evicted_value = self._container.popitem(last=False)

        if self.dispose_func and evicted_value is not _Null:
            self.dispose_func(evicted_value)

    def __delitem__(self, key):
        with self.lock:
            value = self._container.pop(key)

        if self.dispose_func:
            self.dispose_func(value)

    def __len__(self):
        with self.lock:
            return len(self._container)

    def __iter__(self):
        raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')

    def clear(self):
        with self.lock:
            # Copy pointers to all values, then wipe the mapping
            values = list(itervalues(self._container))
            self._container.clear()

        if self.dispose_func:
            for value in values:
                self.dispose_func(value)

    def keys(self):
        with self.lock:
            return list(iterkeys(self._container))


class HTTPHeaderDict(MutableMapping):
    """
    :param headers:
        An iterable of field-value pairs. Must not contain multiple field names
        when compared case-insensitively.

    :param kwargs:
        Additional field-value pairs to pass in to ``dict.update``.

    A ``dict`` like container for storing HTTP Headers.

    Field names are stored and compared case-insensitively in compliance with
    RFC 7230. Iteration provides the first case-sensitive key seen for each
    case-insensitive pair.

    Using ``__setitem__`` syntax overwrites fields that compare equal
    case-insensitively in order to maintain ``dict``'s api. For fields that
    compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
    in a loop.

    If multiple fields that are equal case-insensitively are passed to the
    constructor or ``.update``, the behavior is undefined and some will be
    lost.

    >>> headers = HTTPHeaderDict()
    >>> headers.add('Set-Cookie', 'foo=bar')
    >>> headers.add('set-cookie', 'baz=quxx')
    >>> headers['content-length'] = '7'
    >>> headers['SET-cookie']
    'foo=bar, baz=quxx'
    >>> headers['Content-Length']
    '7'
    """

    def __init__(self, headers=None, **kwargs):
        super(HTTPHeaderDict, self).__init__()
        self._container = OrderedDict()
        if headers is not None:
            if isinstance(headers, HTTPHeaderDict):
                self._copy_from(headers)
            else:
                self.extend(headers)
        if kwargs:
            self.extend(kwargs)

    def __setitem__(self, key, val):
        self._container[key.lower()] = [key, val]
        return self._container[key.lower()]

    def __getitem__(self, key):
        val = self._container[key.lower()]
        return ', '.join(val[1:])

    def __delitem__(self, key):
        del self._container[key.lower()]

    def __contains__(self, key):
        return key.lower() in self._container

    def __eq__(self, other):
        if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
            return False
        if not isinstance(other, type(self)):
            other = type(self)(other)
        return (dict((k.lower(), v) for k, v in self.itermerged()) ==
                dict((k.lower(), v) for k, v in other.itermerged()))

    def __ne__(self, other):
        return not self.__eq__(other)

    if not PY3:  # Python 2
        iterkeys = MutableMapping.iterkeys
        itervalues = MutableMapping.itervalues

    __marker = object()

    def __len__(self):
        return len(self._container)

    def __iter__(self):
        # Only provide the originally cased names
        for vals in self._container.values():
            yield vals[0]

    def pop(self, key, default=__marker):
        '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
          If key is not found, d is returned if given, otherwise KeyError is raised.
        '''
        # Using the MutableMapping function directly fails due to the private marker.
        # Using ordinary dict.pop would expose the internal structures.
        # So let's reinvent the wheel.
        try:
            value = self[key]
        except KeyError:
            if default is self.__marker:
                raise
            return default
        else:
            del self[key]
            return value

    def discard(self, key):
        try:
            del self[key]
        except KeyError:
            pass

    def add(self, key, val):
        """Adds a (name, value) pair, doesn't overwrite the value if it already
        exists.

        >>> headers = HTTPHeaderDict(foo='bar')
        >>> headers.add('Foo', 'baz')
        >>> headers['foo']
        'bar, baz'
        """
        key_lower = key.lower()
        new_vals = [key, val]
        # Keep the common case aka no item present as fast as possible
        vals = self._container.setdefault(key_lower, new_vals)
        if new_vals is not vals:
            vals.append(val)

    def extend(self, *args, **kwargs):
        """Generic import function for any type of header-like object.
        Adapted version of MutableMapping.update in order to insert items
        with self.add instead of self.__setitem__
        """
        if len(args) > 1:
            raise TypeError("extend() takes at most 1 positional "
                            "arguments ({0} given)".format(len(args)))
        other = args[0] if len(args) >= 1 else ()

        if isinstance(other, HTTPHeaderDict):
            for key, val in other.iteritems():
                self.add(key, val)
        elif isinstance(other, Mapping):
            for key in other:
                self.add(key, other[key])
        elif hasattr(other, "keys"):
            for key in other.keys():
                self.add(key, other[key])
        else:
            for key, value in other:
                self.add(key, value)

        for key, value in kwargs.items():
            self.add(key, value)

    def getlist(self, key, default=__marker):
        """Returns a list of all the values for the named field. Returns an
        empty list if the key doesn't exist."""
        try:
            vals = self._container[key.lower()]
        except KeyError:
            if default is self.__marker:
                return []
            return default
        else:
            return vals[1:]

    # Backwards compatibility for httplib
    getheaders = getlist
    getallmatchingheaders = getlist
    iget = getlist

    # Backwards compatibility for http.cookiejar
    get_all = getlist

    def __repr__(self):
        return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))

    def _copy_from(self, other):
        for key in other:
            val = other.getlist(key)
            if isinstance(val, list):
                # Don't need to convert tuples
                val = list(val)
            self._container[key.lower()] = [key] + val

    def copy(self):
        clone = type(self)()
        clone._copy_from(self)
        return clone

    def iteritems(self):
        """Iterate over all header lines, including duplicate ones."""
        for key in self:
            vals = self._container[key.lower()]
            for val in vals[1:]:
                yield vals[0], val

    def itermerged(self):
        """Iterate over all headers, merging duplicate ones together."""
        for key in self:
            val = self._container[key.lower()]
            yield val[0], ', '.join(val[1:])

    def items(self):
        return list(self.iteritems())

    @classmethod
    def from_httplib(cls, message):  # Python 2
        """Read headers from a Python 2 httplib message object."""
        # python2.7 does not expose a proper API for exporting multiheaders
        # efficiently. This function re-reads raw lines from the message
        # object and extracts the multiheaders properly.
        headers = []

        for line in message.headers:
            if line.startswith((' ', '\t')):
                key, value = headers[-1]
                headers[-1] = (key, value + '\r\n' + line.rstrip())
                continue

            key, value = line.split(':', 1)
            headers.append((key, value.strip()))

        return cls(headers)
_vendor/urllib3/filepost.py000064400000004421151733136370011765 0ustar00from __future__ import absolute_import
import codecs

from uuid import uuid4
from io import BytesIO

from .packages import six
from .packages.six import b
from .fields import RequestField

writer = codecs.lookup('utf-8')[3]


def choose_boundary():
    """
    Our embarrassingly-simple replacement for mimetools.choose_boundary.
    """
    return uuid4().hex


def iter_field_objects(fields):
    """
    Iterate over fields.

    Supports list of (k, v) tuples and dicts, and lists of
    :class:`~urllib3.fields.RequestField`.

    """
    if isinstance(fields, dict):
        i = six.iteritems(fields)
    else:
        i = iter(fields)

    for field in i:
        if isinstance(field, RequestField):
            yield field
        else:
            yield RequestField.from_tuples(*field)


def iter_fields(fields):
    """
    .. deprecated:: 1.6

    Iterate over fields.

    The addition of :class:`~urllib3.fields.RequestField` makes this function
    obsolete. Instead, use :func:`iter_field_objects`, which returns
    :class:`~urllib3.fields.RequestField` objects.

    Supports list of (k, v) tuples and dicts.
    """
    if isinstance(fields, dict):
        return ((k, v) for k, v in six.iteritems(fields))

    return ((k, v) for k, v in fields)


def encode_multipart_formdata(fields, boundary=None):
    """
    Encode a dictionary of ``fields`` using the multipart/form-data MIME format.

    :param fields:
        Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).

    :param boundary:
        If not specified, then a random boundary will be generated using
        :func:`mimetools.choose_boundary`.
    """
    body = BytesIO()
    if boundary is None:
        boundary = choose_boundary()

    for field in iter_field_objects(fields):
        body.write(b('--%s\r\n' % (boundary)))

        writer(body).write(field.render_headers())
        data = field.data

        if isinstance(data, int):
            data = str(data)  # Backwards compatibility

        if isinstance(data, six.text_type):
            writer(body).write(data)
        else:
            body.write(data)

        body.write(b'\r\n')

    body.write(b('--%s--\r\n' % (boundary)))

    content_type = str('multipart/form-data; boundary=%s' % boundary)

    return body.getvalue(), content_type
_vendor/urllib3/exceptions.py000064400000014713151733136370012326 0ustar00from __future__ import absolute_import
from .packages.six.moves.http_client import (
    IncompleteRead as httplib_IncompleteRead
)
# Base Exceptions


class HTTPError(Exception):
    "Base exception used by this module."
    pass


class HTTPWarning(Warning):
    "Base warning used by this module."
    pass


class PoolError(HTTPError):
    "Base exception for errors caused within a pool."
    def __init__(self, pool, message):
        self.pool = pool
        HTTPError.__init__(self, "%s: %s" % (pool, message))

    def __reduce__(self):
        # For pickling purposes.
        return self.__class__, (None, None)


class RequestError(PoolError):
    "Base exception for PoolErrors that have associated URLs."
    def __init__(self, pool, url, message):
        self.url = url
        PoolError.__init__(self, pool, message)

    def __reduce__(self):
        # For pickling purposes.
        return self.__class__, (None, self.url, None)


class SSLError(HTTPError):
    "Raised when SSL certificate fails in an HTTPS connection."
    pass


class ProxyError(HTTPError):
    "Raised when the connection to a proxy fails."
    pass


class DecodeError(HTTPError):
    "Raised when automatic decoding based on Content-Type fails."
    pass


class ProtocolError(HTTPError):
    "Raised when something unexpected happens mid-request/response."
    pass


#: Renamed to ProtocolError but aliased for backwards compatibility.
ConnectionError = ProtocolError


# Leaf Exceptions

class MaxRetryError(RequestError):
    """Raised when the maximum number of retries is exceeded.

    :param pool: The connection pool
    :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
    :param string url: The requested Url
    :param exceptions.Exception reason: The underlying error

    """

    def __init__(self, pool, url, reason=None):
        self.reason = reason

        message = "Max retries exceeded with url: %s (Caused by %r)" % (
            url, reason)

        RequestError.__init__(self, pool, url, message)


class HostChangedError(RequestError):
    "Raised when an existing pool gets a request for a foreign host."

    def __init__(self, pool, url, retries=3):
        message = "Tried to open a foreign host with url: %s" % url
        RequestError.__init__(self, pool, url, message)
        self.retries = retries


class TimeoutStateError(HTTPError):
    """ Raised when passing an invalid state to a timeout """
    pass


class TimeoutError(HTTPError):
    """ Raised when a socket timeout error occurs.

    Catching this error will catch both :exc:`ReadTimeoutErrors
    <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
    """
    pass


class ReadTimeoutError(TimeoutError, RequestError):
    "Raised when a socket timeout occurs while receiving data from a server"
    pass


# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
    "Raised when a socket timeout occurs while connecting to a server"
    pass


class NewConnectionError(ConnectTimeoutError, PoolError):
    "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
    pass


class EmptyPoolError(PoolError):
    "Raised when a pool runs out of connections and no more are allowed."
    pass


class ClosedPoolError(PoolError):
    "Raised when a request enters a pool after the pool has been closed."
    pass


class LocationValueError(ValueError, HTTPError):
    "Raised when there is something wrong with a given URL input."
    pass


class LocationParseError(LocationValueError):
    "Raised when get_host or similar fails to parse the URL input."

    def __init__(self, location):
        message = "Failed to parse: %s" % location
        HTTPError.__init__(self, message)

        self.location = location


class ResponseError(HTTPError):
    "Used as a container for an error reason supplied in a MaxRetryError."
    GENERIC_ERROR = 'too many error responses'
    SPECIFIC_ERROR = 'too many {status_code} error responses'


class SecurityWarning(HTTPWarning):
    "Warned when perfoming security reducing actions"
    pass


class SubjectAltNameWarning(SecurityWarning):
    "Warned when connecting to a host with a certificate missing a SAN."
    pass


class InsecureRequestWarning(SecurityWarning):
    "Warned when making an unverified HTTPS request."
    pass


class SystemTimeWarning(SecurityWarning):
    "Warned when system time is suspected to be wrong"
    pass


class InsecurePlatformWarning(SecurityWarning):
    "Warned when certain SSL configuration is not available on a platform."
    pass


class SNIMissingWarning(HTTPWarning):
    "Warned when making a HTTPS request without SNI available."
    pass


class DependencyWarning(HTTPWarning):
    """
    Warned when an attempt is made to import a module with missing optional
    dependencies.
    """
    pass


class ResponseNotChunked(ProtocolError, ValueError):
    "Response needs to be chunked in order to read it as chunks."
    pass


class BodyNotHttplibCompatible(HTTPError):
    """
    Body should be httplib.HTTPResponse like (have an fp attribute which
    returns raw chunks) for read_chunked().
    """
    pass


class IncompleteRead(HTTPError, httplib_IncompleteRead):
    """
    Response length doesn't match expected Content-Length

    Subclass of http_client.IncompleteRead to allow int value
    for `partial` to avoid creating large objects on streamed
    reads.
    """
    def __init__(self, partial, expected):
        super(IncompleteRead, self).__init__(partial, expected)

    def __repr__(self):
        return ('IncompleteRead(%i bytes read, '
                '%i more expected)' % (self.partial, self.expected))


class InvalidHeader(HTTPError):
    "The header provided was somehow invalid."
    pass


class ProxySchemeUnknown(AssertionError, ValueError):
    "ProxyManager does not support the supplied scheme"
    # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.

    def __init__(self, scheme):
        message = "Not supported proxy scheme %s" % scheme
        super(ProxySchemeUnknown, self).__init__(message)


class HeaderParsingError(HTTPError):
    "Raised by assert_header_parsing, but we convert it to a log.warning statement."
    def __init__(self, defects, unparsed_data):
        message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
        super(HeaderParsingError, self).__init__(message)


class UnrewindableBodyError(HTTPError):
    "urllib3 encountered an error when trying to rewind a body"
    pass
_vendor/appdirs.py000064400000053540151733136370010234 0ustar00#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2005-2010 ActiveState Software Inc.
# Copyright (c) 2013 Eddy Petrișor

"""Utilities for determining application-specific dirs.

See <http://github.com/ActiveState/appdirs> for details and usage.
"""
# Dev Notes:
# - MSDN on where to store app data files:
#   http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
# - macOS: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html

__version_info__ = (1, 4, 0)
__version__ = '.'.join(map(str, __version_info__))


import sys
import os

PY3 = sys.version_info[0] == 3

if PY3:
    unicode = str

if sys.platform.startswith('java'):
    import platform
    os_name = platform.java_ver()[3][0]
    if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
        system = 'win32'
    elif os_name.startswith('Mac'): # "macOS", etc.
        system = 'darwin'
    else: # "Linux", "SunOS", "FreeBSD", etc.
        # Setting this to "linux2" is not ideal, but only Windows or Mac
        # are actually checked for and the rest of the module expects
        # *sys.platform* style strings.
        system = 'linux2'
else:
    system = sys.platform



def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
    r"""Return full path to the user-specific data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  ~/Library/Application Support/<AppName>
        Unix:                   ~/.local/share/<AppName>    # or in $XDG_DATA_HOME, if defined
        Win XP (not roaming):   C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
        Win XP (roaming):       C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
        Win 7  (not roaming):   C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
        Win 7  (roaming):       C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>

    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
    That means, by default "~/.local/share/<AppName>".
    """
    if system == "win32":
        if appauthor is None:
            appauthor = appname
        const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
        path = os.path.normpath(_get_win_folder(const))
        if appname:
            if appauthor is not False:
                path = os.path.join(path, appauthor, appname)
            else:
                path = os.path.join(path, appname)
    elif system == 'darwin':
        path = os.path.expanduser('~/Library/Application Support/')
        if appname:
            path = os.path.join(path, appname)
    else:
        path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
        if appname:
            path = os.path.join(path, appname)
    if appname and version:
        path = os.path.join(path, version)
    return path


def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
    """Return full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of data dirs should be
            returned. By default, the first item from XDG_DATA_DIRS is
            returned, or '/usr/local/share/<AppName>',
            if XDG_DATA_DIRS is not set

    Typical user data directories are:
        macOS:      /Library/Application Support/<AppName>
        Unix:       /usr/local/share/<AppName> or /usr/share/<AppName>
        Win XP:     C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
        Win 7:      C:\ProgramData\<AppAuthor>\<AppName>   # Hidden, but writeable on Win 7.

    For Unix, this is using the $XDG_DATA_DIRS[0] default.

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    """
    if system == "win32":
        if appauthor is None:
            appauthor = appname
        path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
        if appname:
            if appauthor is not False:
                path = os.path.join(path, appauthor, appname)
            else:
                path = os.path.join(path, appname)
    elif system == 'darwin':
        path = os.path.expanduser('/Library/Application Support')
        if appname:
            path = os.path.join(path, appname)
    else:
        # XDG default for $XDG_DATA_DIRS
        # only first, if multipath is False
        path = os.getenv('XDG_DATA_DIRS',
                         os.pathsep.join(['/usr/local/share', '/usr/share']))
        pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
        if appname:
            if version:
                appname = os.path.join(appname, version)
            pathlist = [os.sep.join([x, appname]) for x in pathlist]

        if multipath:
            path = os.pathsep.join(pathlist)
        else:
            path = pathlist[0]
        return path

    if appname and version:
        path = os.path.join(path, version)
    return path


def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
    r"""Return full path to the user-specific config dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  same as user_data_dir
        Unix:                   ~/.config/<AppName>     # or in $XDG_CONFIG_HOME, if defined
        Win *:                  same as user_data_dir

    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
    That means, by deafult "~/.config/<AppName>".
    """
    if system in ["win32", "darwin"]:
        path = user_data_dir(appname, appauthor, None, roaming)
    else:
        path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
        if appname:
            path = os.path.join(path, appname)
    if appname and version:
        path = os.path.join(path, version)
    return path


def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
    """Return full path to the user-shared data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "multipath" is an optional parameter only applicable to *nix
            which indicates that the entire list of config dirs should be
            returned. By default, the first item from XDG_CONFIG_DIRS is
            returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set

    Typical user data directories are:
        macOS:      same as site_data_dir
        Unix:       /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
                    $XDG_CONFIG_DIRS
        Win *:      same as site_data_dir
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)

    For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False

    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
    """
    if system in ["win32", "darwin"]:
        path = site_data_dir(appname, appauthor)
        if appname and version:
            path = os.path.join(path, version)
    else:
        # XDG default for $XDG_CONFIG_DIRS
        # only first, if multipath is False
        path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
        pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
        if appname:
            if version:
                appname = os.path.join(appname, version)
            pathlist = [os.sep.join([x, appname]) for x in pathlist]

        if multipath:
            path = os.pathsep.join(pathlist)
        else:
            path = pathlist[0]
    return path


def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
    r"""Return full path to the user-specific cache dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Cache" to the base app data dir for Windows. See
            discussion below.

    Typical user cache directories are:
        macOS:      ~/Library/Caches/<AppName>
        Unix:       ~/.cache/<AppName> (XDG default)
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache

    On Windows the only suggestion in the MSDN docs is that local settings go in
    the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
    app data dir (the default returned by `user_data_dir` above). Apps typically
    put cache data somewhere *under* the given dir here. Some examples:
        ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
        ...\Acme\SuperApp\Cache\1.0
    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
    This can be disabled with the `opinion=False` option.
    """
    if system == "win32":
        if appauthor is None:
            appauthor = appname
        path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
        if appname:
            if appauthor is not False:
                path = os.path.join(path, appauthor, appname)
            else:
                path = os.path.join(path, appname)
            if opinion:
                path = os.path.join(path, "Cache")
    elif system == 'darwin':
        path = os.path.expanduser('~/Library/Caches')
        if appname:
            path = os.path.join(path, appname)
    else:
        path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
        if appname:
            path = os.path.join(path, appname)
    if appname and version:
        path = os.path.join(path, version)
    return path


def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
    r"""Return full path to the user-specific log dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "appauthor" (only used on Windows) is the name of the
            appauthor or distributing body for this application. Typically
            it is the owning company name. This falls back to appname. You may
            pass False to disable it.
        "version" is an optional version path element to append to the
            path. You might want to use this if you want multiple versions
            of your app to be able to run independently. If used, this
            would typically be "<major>.<minor>".
            Only applied when appname is present.
        "opinion" (boolean) can be False to disable the appending of
            "Logs" to the base app data dir for Windows, and "log" to the
            base cache dir for Unix. See discussion below.

    Typical user cache directories are:
        macOS:      ~/Library/Logs/<AppName>
        Unix:       ~/.cache/<AppName>/log  # or under $XDG_CACHE_HOME if defined
        Win XP:     C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
        Vista:      C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs

    On Windows the only suggestion in the MSDN docs is that local settings
    go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
    examples of what some windows apps use for a logs dir.)

    OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
    value for Windows and appends "log" to the user cache dir for Unix.
    This can be disabled with the `opinion=False` option.
    """
    if system == "darwin":
        path = os.path.join(
            os.path.expanduser('~/Library/Logs'),
            appname)
    elif system == "win32":
        path = user_data_dir(appname, appauthor, version)
        version = False
        if opinion:
            path = os.path.join(path, "Logs")
    else:
        path = user_cache_dir(appname, appauthor, version)
        version = False
        if opinion:
            path = os.path.join(path, "log")
    if appname and version:
        path = os.path.join(path, version)
    return path


class AppDirs(object):
    """Convenience wrapper for getting application dirs."""
    def __init__(self, appname, appauthor=None, version=None, roaming=False,
                 multipath=False):
        self.appname = appname
        self.appauthor = appauthor
        self.version = version
        self.roaming = roaming
        self.multipath = multipath

    @property
    def user_data_dir(self):
        return user_data_dir(self.appname, self.appauthor,
                             version=self.version, roaming=self.roaming)

    @property
    def site_data_dir(self):
        return site_data_dir(self.appname, self.appauthor,
                             version=self.version, multipath=self.multipath)

    @property
    def user_config_dir(self):
        return user_config_dir(self.appname, self.appauthor,
                               version=self.version, roaming=self.roaming)

    @property
    def site_config_dir(self):
        return site_config_dir(self.appname, self.appauthor,
                             version=self.version, multipath=self.multipath)

    @property
    def user_cache_dir(self):
        return user_cache_dir(self.appname, self.appauthor,
                              version=self.version)

    @property
    def user_log_dir(self):
        return user_log_dir(self.appname, self.appauthor,
                            version=self.version)


#---- internal support stuff

def _get_win_folder_from_registry(csidl_name):
    """This is a fallback technique at best. I'm not sure if using the
    registry for this guarantees us the correct answer for all CSIDL_*
    names.
    """
    import _winreg

    shell_folder_name = {
        "CSIDL_APPDATA": "AppData",
        "CSIDL_COMMON_APPDATA": "Common AppData",
        "CSIDL_LOCAL_APPDATA": "Local AppData",
    }[csidl_name]

    key = _winreg.OpenKey(
        _winreg.HKEY_CURRENT_USER,
        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
    )
    dir, type = _winreg.QueryValueEx(key, shell_folder_name)
    return dir


def _get_win_folder_with_pywin32(csidl_name):
    from win32com.shell import shellcon, shell
    dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
    # Try to make this a unicode path because SHGetFolderPath does
    # not return unicode strings when there is unicode data in the
    # path.
    try:
        dir = unicode(dir)

        # Downgrade to short path name if have highbit chars. See
        # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
        has_high_char = False
        for c in dir:
            if ord(c) > 255:
                has_high_char = True
                break
        if has_high_char:
            try:
                import win32api
                dir = win32api.GetShortPathName(dir)
            except ImportError:
                pass
    except UnicodeError:
        pass
    return dir


def _get_win_folder_with_ctypes(csidl_name):
    import ctypes

    csidl_const = {
        "CSIDL_APPDATA": 26,
        "CSIDL_COMMON_APPDATA": 35,
        "CSIDL_LOCAL_APPDATA": 28,
    }[csidl_name]

    buf = ctypes.create_unicode_buffer(1024)
    ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)

    # Downgrade to short path name if have highbit chars. See
    # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
    has_high_char = False
    for c in buf:
        if ord(c) > 255:
            has_high_char = True
            break
    if has_high_char:
        buf2 = ctypes.create_unicode_buffer(1024)
        if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
            buf = buf2

    return buf.value

def _get_win_folder_with_jna(csidl_name):
    import array
    from com.sun import jna
    from com.sun.jna.platform import win32

    buf_size = win32.WinDef.MAX_PATH * 2
    buf = array.zeros('c', buf_size)
    shell = win32.Shell32.INSTANCE
    shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
    dir = jna.Native.toString(buf.tostring()).rstrip("\0")

    # Downgrade to short path name if have highbit chars. See
    # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
    has_high_char = False
    for c in dir:
        if ord(c) > 255:
            has_high_char = True
            break
    if has_high_char:
        buf = array.zeros('c', buf_size)
        kernel = win32.Kernel32.INSTANCE
        if kernal.GetShortPathName(dir, buf, buf_size):
            dir = jna.Native.toString(buf.tostring()).rstrip("\0")

    return dir

if system == "win32":
    try:
        import win32com.shell
        _get_win_folder = _get_win_folder_with_pywin32
    except ImportError:
        try:
            from ctypes import windll
            _get_win_folder = _get_win_folder_with_ctypes
        except ImportError:
            try:
                import com.sun.jna
                _get_win_folder = _get_win_folder_with_jna
            except ImportError:
                _get_win_folder = _get_win_folder_from_registry


#---- self test code

if __name__ == "__main__":
    appname = "MyApp"
    appauthor = "MyCompany"

    props = ("user_data_dir", "site_data_dir",
             "user_config_dir", "site_config_dir",
             "user_cache_dir", "user_log_dir")

    print("-- app dirs (with optional 'version')")
    dirs = AppDirs(appname, appauthor, version="1.0")
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))

    print("\n-- app dirs (without optional 'version')")
    dirs = AppDirs(appname, appauthor)
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))

    print("\n-- app dirs (without optional 'appauthor')")
    dirs = AppDirs(appname)
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))

    print("\n-- app dirs (with disabled 'appauthor')")
    dirs = AppDirs(appname, appauthor=False)
    for prop in props:
        print("%s: %s" % (prop, getattr(dirs, prop)))
_vendor/colorama/initialise.py000064400000003575151733136370012524 0ustar00# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import atexit
import contextlib
import sys

from .ansitowin32 import AnsiToWin32


orig_stdout = None
orig_stderr = None

wrapped_stdout = None
wrapped_stderr = None

atexit_done = False


def reset_all():
    if AnsiToWin32 is not None:    # Issue #74: objects might become None at exit
        AnsiToWin32(orig_stdout).reset_all()


def init(autoreset=False, convert=None, strip=None, wrap=True):

    if not wrap and any([autoreset, convert, strip]):
        raise ValueError('wrap=False conflicts with any other arg=True')

    global wrapped_stdout, wrapped_stderr
    global orig_stdout, orig_stderr

    orig_stdout = sys.stdout
    orig_stderr = sys.stderr

    if sys.stdout is None:
        wrapped_stdout = None
    else:
        sys.stdout = wrapped_stdout = \
            wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
    if sys.stderr is None:
        wrapped_stderr = None
    else:
        sys.stderr = wrapped_stderr = \
            wrap_stream(orig_stderr, convert, strip, autoreset, wrap)

    global atexit_done
    if not atexit_done:
        atexit.register(reset_all)
        atexit_done = True


def deinit():
    if orig_stdout is not None:
        sys.stdout = orig_stdout
    if orig_stderr is not None:
        sys.stderr = orig_stderr


@contextlib.contextmanager
def colorama_text(*args, **kwargs):
    init(*args, **kwargs)
    try:
        yield
    finally:
        deinit()


def reinit():
    if wrapped_stdout is not None:
        sys.stdout = wrapped_stdout
    if wrapped_stderr is not None:
        sys.stderr = wrapped_stderr


def wrap_stream(stream, convert, strip, autoreset, wrap):
    if wrap:
        wrapper = AnsiToWin32(stream,
            convert=convert, strip=strip, autoreset=autoreset)
        if wrapper.should_wrap():
            stream = wrapper.stream
    return stream


_vendor/colorama/__pycache__/win32.cpython-36.pyc000064400000006743151733136370015620 0ustar003

�Pf��@s�dZdZy.ddlZddlmZeej�ZddlmZWn.eefk
rddZdd�Z	dd�Z
Y�nfXdd	lmZmZm
Z
mZejZGd
d�de�ZejjZejge_eje_ejjZejee�ge_eje_ejj	Zejejge_eje_ejjZejege_eje_ejj Z!eje
ejeeej�ge!_eje!_ejj"Z#ejejejeeej�ge#_eje#_ejj$Z%ej&ge%_eje%_eee�eee�iZ'dd
�Z
efdd�Zdd�Z	ddd�Zdd�Z(dd�Z"dd�Z)dS)���N)�
LibraryLoader)�wintypescGsdS)N�)�_rr�/usr/lib/python3.6/win32.py�<lambda>sr	cGsdS)Nr)rrrrr	s)�byref�	Structure�c_char�POINTERc@s>eZdZdZdefdefdejfdejfdefgZdd�Z	d	S)
�CONSOLE_SCREEN_BUFFER_INFOzstruct in wincon.h.�dwSize�dwCursorPosition�wAttributes�srWindow�dwMaximumWindowSizecCsHd|jj|jj|jj|jj|j|jj|jj|jj|jj	|j
j|j
jfS)Nz"(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d))r�Y�Xrrr�Top�LeftZBottomZRightr)�selfrrr�__str__sz"CONSOLE_SCREEN_BUFFER_INFO.__str__N)
�__name__�
__module__�__qualname__�__doc__�COORDr�WORDZ
SMALL_RECTZ_fields_rrrrrrs
rcCs$tt}t�}t|t|��}t|�S)N)�handles�STDOUTr�_GetConsoleScreenBufferInfor
�bool)�handle�csbi�successrrr�winapi_testas
r'cCs t|}t�}t|t|��}|S)N)r rr"r
)�	stream_idr$r%r&rrr�GetConsoleScreenBufferInfohs
r)cCst|}t||�S)N)r �_SetConsoleTextAttribute)r(Zattrsr$rrr�SetConsoleTextAttributeosr+TcCsvt|�}|jdks|jdkr dSt|jd|jd�}|rdtt�j}|j|j7_|j|j7_t|}t	||�S)Nr�)
rrrr)r!rrrr �_SetConsoleCursorPosition)r(ZpositionZadjustZadjusted_position�srr$rrr�SetConsoleCursorPositionss
r/cCsBt|}t|j��}tj|�}tjd�}t||||t|��}|jS)Nr)r r�encoder�DWORD�_FillConsoleOutputCharacterAr
�value)r(�char�length�startr$�num_writtenr&rrr�FillConsoleOutputCharacter�s

r8cCs:t|}tj|�}tj|�}tjd�}t||||t|��S)za FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )r)r rrr1�_FillConsoleOutputAttributer
)r(�attrr5r6r$Z	attributer7rrr�FillConsoleOutputAttribute�s


r;cCst|�S)N)�_SetConsoleTitleW)�titlerrr�SetConsoleTitle�sr>i����i�)T)*r!ZSTDERRZctypesrZWinDLLZwindllr�AttributeError�ImportErrorr+r'r
rrr
Z_COORDrrZkernel32ZGetStdHandleZ
_GetStdHandler1ZargtypesZHANDLEZrestyper)r"ZBOOLr*rr/r-ZFillConsoleOutputCharacterAr2r;r9ZSetConsoleTitleAr<ZLPCSTRr r8r>rrrr�<module>sl






_vendor/colorama/__pycache__/win32.cpython-36.opt-1.pyc000064400000006743151733136370016557 0ustar003

�Pf��@s�dZdZy.ddlZddlmZeej�ZddlmZWn.eefk
rddZdd�Z	dd�Z
Y�nfXdd	lmZmZm
Z
mZejZGd
d�de�ZejjZejge_eje_ejjZejee�ge_eje_ejj	Zejejge_eje_ejjZejege_eje_ejj Z!eje
ejeeej�ge!_eje!_ejj"Z#ejejejeeej�ge#_eje#_ejj$Z%ej&ge%_eje%_eee�eee�iZ'dd
�Z
efdd�Zdd�Z	ddd�Zdd�Z(dd�Z"dd�Z)dS)���N)�
LibraryLoader)�wintypescGsdS)N�)�_rr�/usr/lib/python3.6/win32.py�<lambda>sr	cGsdS)Nr)rrrrr	s)�byref�	Structure�c_char�POINTERc@s>eZdZdZdefdefdejfdejfdefgZdd�Z	d	S)
�CONSOLE_SCREEN_BUFFER_INFOzstruct in wincon.h.�dwSize�dwCursorPosition�wAttributes�srWindow�dwMaximumWindowSizecCsHd|jj|jj|jj|jj|j|jj|jj|jj|jj	|j
j|j
jfS)Nz"(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d))r�Y�Xrrr�Top�LeftZBottomZRightr)�selfrrr�__str__sz"CONSOLE_SCREEN_BUFFER_INFO.__str__N)
�__name__�
__module__�__qualname__�__doc__�COORDr�WORDZ
SMALL_RECTZ_fields_rrrrrrs
rcCs$tt}t�}t|t|��}t|�S)N)�handles�STDOUTr�_GetConsoleScreenBufferInfor
�bool)�handle�csbi�successrrr�winapi_testas
r'cCs t|}t�}t|t|��}|S)N)r rr"r
)�	stream_idr$r%r&rrr�GetConsoleScreenBufferInfohs
r)cCst|}t||�S)N)r �_SetConsoleTextAttribute)r(Zattrsr$rrr�SetConsoleTextAttributeosr+TcCsvt|�}|jdks|jdkr dSt|jd|jd�}|rdtt�j}|j|j7_|j|j7_t|}t	||�S)Nr�)
rrrr)r!rrrr �_SetConsoleCursorPosition)r(ZpositionZadjustZadjusted_position�srr$rrr�SetConsoleCursorPositionss
r/cCsBt|}t|j��}tj|�}tjd�}t||||t|��}|jS)Nr)r r�encoder�DWORD�_FillConsoleOutputCharacterAr
�value)r(�char�length�startr$�num_writtenr&rrr�FillConsoleOutputCharacter�s

r8cCs:t|}tj|�}tj|�}tjd�}t||||t|��S)za FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )r)r rrr1�_FillConsoleOutputAttributer
)r(�attrr5r6r$Z	attributer7rrr�FillConsoleOutputAttribute�s


r;cCst|�S)N)�_SetConsoleTitleW)�titlerrr�SetConsoleTitle�sr>i����i�)T)*r!ZSTDERRZctypesrZWinDLLZwindllr�AttributeError�ImportErrorr+r'r
rrr
Z_COORDrrZkernel32ZGetStdHandleZ
_GetStdHandler1ZargtypesZHANDLEZrestyper)r"ZBOOLr*rr/r-ZFillConsoleOutputCharacterAr2r;r9ZSetConsoleTitleAr<ZLPCSTRr r8r>rrrr�<module>sl






_vendor/colorama/__pycache__/ansi.cpython-36.opt-1.pyc000064400000006276151733136370016550 0ustar003

�Pf�	�@s�dZdZdZdZdd�Zdd�Zdd	d
�Zddd�ZGd
d�de�Z	Gdd�de�Z
Gdd�de	�ZGdd�de	�ZGdd�de	�Z
e�Ze�Ze
�Ze
�ZdS)z�
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
z]�cCstt|�dS)N�m)�CSI�str)�code�r�/usr/lib/python3.6/ansi.py�
code_to_charssrcCstd|tS)Nz2;)�OSC�BEL)�titlerrr�	set_titlesr�cCstt|�dS)N�J)rr)�moderrr�clear_screensrcCstt|�dS)N�K)rr)rrrr�
clear_linesrc@seZdZdd�ZdS)�	AnsiCodescCs:x4t|�D](}|jd�s
t||�}t||t|��q
WdS)N�_)�dir�
startswith�getattr�setattrr)�self�name�valuerrr�__init__s

zAnsiCodes.__init__N)�__name__�
__module__�__qualname__rrrrrrsrc@s>eZdZd
dd�Zddd�Zddd�Zddd	�Zdd
d�ZdS)�
AnsiCursor�cCstt|�dS)N�A)rr)r�nrrr�UP%sz
AnsiCursor.UPcCstt|�dS)N�B)rr)rr#rrr�DOWN'szAnsiCursor.DOWNcCstt|�dS)N�C)rr)rr#rrr�FORWARD)szAnsiCursor.FORWARDcCstt|�dS)N�D)rr)rr#rrr�BACK+szAnsiCursor.BACKcCstt|�dt|�dS)N�;�H)rr)r�x�yrrr�POS-szAnsiCursor.POSN)r!)r!)r!)r!)r!r!)rrrr$r&r(r*r/rrrrr $s




r c@sPeZdZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZd
ZdZdZdZdZdS)�AnsiFore��� �!�"�#�$�%�'�Z�[�\�]�^�_�`�aN)rrr�BLACK�RED�GREEN�YELLOW�BLUE�MAGENTA�CYAN�WHITE�RESET�
LIGHTBLACK_EX�LIGHTRED_EX�
LIGHTGREEN_EX�LIGHTYELLOW_EX�LIGHTBLUE_EX�LIGHTMAGENTA_EX�LIGHTCYAN_EX�
LIGHTWHITE_EXrrrrr01s"r0c@sPeZdZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZd
ZdZdZdZdZdS)�AnsiBack�(�)�*�+�,�-�.�/�1�d�e�f�g�h�i�j�kN)rrrrBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrrrrrSGs"rSc@seZdZdZdZdZdZdS)�	AnsiStyler!r
��N)rrrZBRIGHTZDIMZNORMALZ	RESET_ALLrrrrre]sreN)r
)r
)�__doc__rr	r
rrrr�objectrr r0rSreZForeZBackZStyleZCursorrrrr�<module>s 


_vendor/colorama/__pycache__/winterm.cpython-36.pyc000064400000010607151733136370016335 0ustar003

�Pf��@s@ddlmZGdd�de�ZGdd�de�ZGdd�de�ZdS)	�)�win32c@s,eZdZdZdZdZdZdZdZdZ	dZ
d	S)
�WinColor�r������N)�__name__�
__module__�__qualname__ZBLACKZBLUEZGREENZCYANZREDZMAGENTAZYELLOWZGREY�rr�/usr/lib/python3.6/winterm.pyrsrc@seZdZdZdZdZdS)�WinStyler��N)rrr
ZNORMAL�BRIGHT�BRIGHT_BACKGROUNDrrrrrsrc@s�eZdZdd�Zdd�Zdd�Zd dd	�Zd!dd�Zd"d
d�Zd#dd�Z	d$dd�Z
dd�Zd%dd�Zd&dd�Z
d'dd�Zd(dd�Zdd�ZdS))�WinTermcCs>tjtj�j|_|j|j�|j|_|j|_	|j
|_d|_dS)Nr)
r�GetConsoleScreenBufferInfo�STDOUTZwAttributes�_default�	set_attrs�_fore�
_default_fore�_back�
_default_back�_style�_default_style�_light)�selfrrr�__init__szWinTerm.__init__cCs|j|jd|j|jBS)N�)rrrr )r!rrr�	get_attrs$szWinTerm.get_attrscCs.|d@|_|d?d@|_|tjtjB@|_dS)Nr
r)rrrrrr)r!�valuerrrr's
zWinTerm.set_attrsNcCs|j|j�|j|jd�dS)N)�attrs)rr�set_console)r!�	on_stderrrrr�	reset_all,szWinTerm.reset_allFcCsL|dkr|j}||_|r*|jtjO_n|jtjM_|j|d�dS)N)r()rrr rrr')r!�fore�lightr(rrrr*0szWinTerm.forecCsL|dkr|j}||_|r*|jtjO_n|jtjM_|j|d�dS)N)r()rrr rrr')r!�backr+r(rrrr,;szWinTerm.backcCs$|dkr|j}||_|j|d�dS)N)r()rrr')r!�styler(rrrr-Fsz
WinTerm.stylecCs0|dkr|j�}tj}|r tj}tj||�dS)N)r$rr�STDERRZSetConsoleTextAttribute)r!r&r(�handlerrrr'LszWinTerm.set_consolecCs,tj|�j}|jd7_|jd7_|S)Nr)rr�dwCursorPosition�X�Y)r!r/�positionrrr�get_positionTszWinTerm.get_positioncCs,|dkrdStj}|rtj}tj||�dS)N)rrr.�SetConsoleCursorPosition)r!r3r(r/rrr�set_cursor_position\szWinTerm.set_cursor_positioncCsBtj}|rtj}|j|�}|j||j|f}tj||dd�dS)NF)Zadjust)rrr.r4r2r1r5)r!�x�yr(r/r3Zadjusted_positionrrr�
cursor_adjustfs
zWinTerm.cursor_adjustrc	Cs�tj}|rtj}tj|�}|jj|jj}|jj|jj|jj}|dkrX|j}||}|dkrrtjdd�}|}n|dkr�tjdd�}|}tj	|d||�tj
||j�||�|dkr�tj|d�dS)Nrrr� )rr)
rrr.r�dwSizer1r2r0�COORD�FillConsoleOutputCharacter�FillConsoleOutputAttributer$r5)	r!�moder(r/�csbiZcells_in_screenZcells_before_cursor�
from_coord�cells_to_eraserrr�erase_screenns&
zWinTerm.erase_screencCs�tj}|rtj}tj|�}|dkr8|j}|jj|jj}|dkrZtjd|jj�}|jj}n |dkrztjd|jj�}|jj}tj	|d||�tj
||j�||�dS)Nrrrr:)rrr.rr0r;r1r<r2r=r>r$)r!r?r(r/r@rArBrrr�
erase_line�s

zWinTerm.erase_linecCstj|�dS)N)rZSetConsoleTitle)r!�titlerrr�	set_title�szWinTerm.set_title)N)NFF)NFF)NF)NF)NF)F)rF)rF)rrr
r"r$rr)r*r,r-r'r4r6r9rCrDrFrrrrrs









rN)�r�objectrrrrrrr�<module>s_vendor/colorama/__pycache__/ansitowin32.cpython-36.pyc000064400000015522151733136370017031 0ustar003

�Pf�%�@s�ddlZddlZddlZddlmZmZmZmZddlm	Z	m
Z
mZddlm
Z
mZdZe
dk	rfe	�Zdd�Zdd	�ZGd
d�de�ZGdd
�d
e�ZdS)�N�)�AnsiFore�AnsiBack�	AnsiStyle�Style)�WinTerm�WinColor�WinStyle)�windll�winapi_testcCst|d�p|jS)N�closed)�hasattrr)�stream�r�!/usr/lib/python3.6/ansitowin32.py�is_stream_closedsrcCst|d�o|j�S)N�isatty)r
r)rrrr�is_a_ttysrc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�
StreamWrapperz�
    Wraps a stream (such as stdout), acting as a transparent proxy for all
    attribute access apart from method 'write()', which is delegated to our
    Converter instance.
    cCs||_||_dS)N)�_StreamWrapper__wrapped�_StreamWrapper__convertor)�self�wrappedZ	converterrrr�__init__szStreamWrapper.__init__cCst|j|�S)N)�getattrr)r�namerrr�__getattr__$szStreamWrapper.__getattr__cCs|jj|�dS)N)r�write)r�textrrrr'szStreamWrapper.writeN)�__name__�
__module__�__qualname__�__doc__rrrrrrrrsrc@s~eZdZdZejd�Zejd�Zddd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�ZdS)�AnsiToWin32z�
    Implements a 'write()' method which, on Windows, will strip ANSI character
    sequences from the text, and if outputting to a tty, will convert them into
    win32 function calls.
    z?\[((?:\d|;)*)([a-zA-Z])?z?\]((?:.|;)*?)()?NFcCs�||_||_t||�|_tjdk}|o*t�}|dkrL|pJt|�oJt|�}||_	|dkrp|ont|�ont|�}||_
|j�|_|jt
jk|_dS)N�nt)r�	autoresetrr�osrrrr�strip�convert�get_win32_calls�win32_calls�sys�stderr�	on_stderr)rrr(r'r%Z
on_windowsZconversion_supportedrrrr4s


zAnsiToWin32.__init__cCs|jp|jp|jS)aj
        True if this class is actually needed. If false, then the output
        stream will not be affected, nor will win32 calls be issued, so
        wrapping stdout is not actually required. This will generally be
        False on non-Windows platforms, unless optional functionality like
        autoreset has been requested using kwargs to init()
        )r(r'r%)rrrr�should_wrapUszAnsiToWin32.should_wrapcNCs>|jot�r8tjtjftjtjtjftjtjtj	ftj	tjtj	ft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj ftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdfi&St!�S)NT)"r(�wintermr�	RESET_ALL�	reset_allZBRIGHTZstyler	ZDIMZNORMALrZBLACKZforerZREDZGREENZYELLOWZBLUEZMAGENTAZCYANZWHITEZGREYZRESETZ
LIGHTBLACK_EXZLIGHTRED_EXZ
LIGHTGREEN_EXZLIGHTYELLOW_EXZLIGHTBLUE_EXZLIGHTMAGENTA_EXZLIGHTCYAN_EXZ
LIGHTWHITE_EXrZback�dict)rrrrr)_sP


zAnsiToWin32.get_win32_callscCs@|js|jr|j|�n|jj|�|jj�|jr<|j�dS)N)r'r(�write_and_convertrr�flushr%r1)rrrrrr�s
zAnsiToWin32.writecCs:|jr|jdd�n"|jr6t|j�r6|jjtj�dS)N�mr)r)r(�
call_win32r'rrrrr0)rrrrr1�szAnsiToWin32.reset_allcCsfd}|j|�}x@|jj|�D]0}|j�\}}|j|||�|j|j��|}qW|j||t|��dS)z�
        Write the given text to our wrapped stream, stripping any ANSI
        sequences from the text, and optionally converting them into win32
        calls.
        rN)�convert_osc�ANSI_CSI_RE�finditer�span�write_plain_text�convert_ansi�groups�len)rrZcursor�match�start�endrrrr3�s
zAnsiToWin32.write_and_convertcCs*||kr&|jj|||��|jj�dS)N)rrr4)rrr@rArrrr;�szAnsiToWin32.write_plain_textcCs"|jr|j||�}|j||�dS)N)r(�extract_paramsr6)r�paramstring�command�paramsrrrr<�szAnsiToWin32.convert_ansicCs~|dkr<tdd�|jd�D��}xXt|�dkr8|d}q"Wn>tdd�|jd�D��}t|�dkrz|d	krnd}n|d
krzd
}|S)N�Hfcss&|]}t|�dkrt|�ndVqdS)rrN)r>�int)�.0�prrr�	<genexpr>�sz-AnsiToWin32.extract_params.<locals>.<genexpr>�;�rcss"|]}t|�dkrt|�VqdS)rN)r>rG)rHrIrrrrJ�srZJKm�ABCD)r)r)r)�tuple�splitr>)rrDrCrErrrrB�szAnsiToWin32.extract_paramscCs�|dkrVx�|D]B}||jkr|j|}|d}|dd�}t|jd�}|||�qWn�|dkrttj|d|jd�n�|dkr�tj|d|jd�nf|dkr�tj||jd�nL|dkr�|d}d|fd|f|df|dfd	�|\}	}
tj|	|
|jd�dS)
Nr5rr)r-�J�KrFrM)�A�B�C�D)r*r2r-r/Zerase_screenZ
erase_lineZset_cursor_positionZ
cursor_adjust)rrDrEZparamZ	func_args�func�args�kwargs�n�x�yrrrr6�s$


*zAnsiToWin32.call_win32cCsvxp|jj|�D]`}|j�\}}|d|�||d�}|j�\}}|dkr|jd�}|ddkrtj|d�qW|S)N�rKrZ02r)�ANSI_OSC_REr9r:r=rOr/Z	set_title)rrr?r@rArCrDrErrrr7�s
zAnsiToWin32.convert_osc)NNF)rr r!r"�re�compiler8r]rr.r)rr1r3r;r<rBr6r7rrrrr#+s


!
,
r#)r^r+r&�ansirrrrr/rrr	Zwin32r
rrr�objectrr#rrrr�<module>s_vendor/colorama/__pycache__/initialise.cpython-36.pyc000064400000003061151733136370016776 0ustar003

�Pf}�@stddlZddlZddlZddlmZdadadadada	dd�Z
ddd	�Zd
d�Zej
dd
��Zdd�Zdd�ZdS)�N�)�AnsiToWin32FcCstdk	rtt�j�dS)N)r�orig_stdout�	reset_all�rr� /usr/lib/python3.6/initialise.pyrsrTcCs�|rt|||g�rtd��tjatjatjdkr8dantt||||�t_atjdkr^da	ntt||||�t_a	t
s�tjt
�da
dS)Nz,wrap=False conflicts with any other arg=TrueT)�any�
ValueError�sys�stdoutr�stderr�orig_stderr�wrapped_stdout�wrap_stream�wrapped_stderr�atexit_done�atexit�registerr)�	autoreset�convert�strip�wraprrr�inits


rcCs tdk	rtt_tdk	rtt_dS)N)rr
rr
rrrrr�deinit3src
os"t||�z
dVWdt�XdS)N)rr)�args�kwargsrrr�
colorama_text:s

rcCs tdk	rtt_tdk	rtt_dS)N)rr
rrrrrrr�reinitCsrcCs&|r"t||||d�}|j�r"|j}|S)N)rrr)rZshould_wrap�stream)rrrrr�wrapperrrrrJsr)FNNT)r�
contextlibr
Zansitowin32rrr
rrrrrr�contextmanagerrrrrrrr�<module>s
	_vendor/colorama/__pycache__/__init__.cpython-36.pyc000064400000000554151733136370016407 0ustar003

�Pf��@sDddlmZmZmZmZddlmZmZmZm	Z	ddl
mZdZdS)�)�init�deinit�reinit�
colorama_text)�Fore�Back�Style�Cursor)�AnsiToWin32z0.3.7N)
Z
initialiserrrr�ansirrrr	Zansitowin32r
�__version__�r
r
�/usr/lib/python3.6/__init__.py�<module>s_vendor/colorama/__pycache__/ansitowin32.cpython-36.opt-1.pyc000064400000015522151733136370017770 0ustar003

�Pf�%�@s�ddlZddlZddlZddlmZmZmZmZddlm	Z	m
Z
mZddlm
Z
mZdZe
dk	rfe	�Zdd�Zdd	�ZGd
d�de�ZGdd
�d
e�ZdS)�N�)�AnsiFore�AnsiBack�	AnsiStyle�Style)�WinTerm�WinColor�WinStyle)�windll�winapi_testcCst|d�p|jS)N�closed)�hasattrr)�stream�r�!/usr/lib/python3.6/ansitowin32.py�is_stream_closedsrcCst|d�o|j�S)N�isatty)r
r)rrrr�is_a_ttysrc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�
StreamWrapperz�
    Wraps a stream (such as stdout), acting as a transparent proxy for all
    attribute access apart from method 'write()', which is delegated to our
    Converter instance.
    cCs||_||_dS)N)�_StreamWrapper__wrapped�_StreamWrapper__convertor)�self�wrappedZ	converterrrr�__init__szStreamWrapper.__init__cCst|j|�S)N)�getattrr)r�namerrr�__getattr__$szStreamWrapper.__getattr__cCs|jj|�dS)N)r�write)r�textrrrr'szStreamWrapper.writeN)�__name__�
__module__�__qualname__�__doc__rrrrrrrrsrc@s~eZdZdZejd�Zejd�Zddd�Zdd	�Z	d
d�Z
dd
�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�ZdS)�AnsiToWin32z�
    Implements a 'write()' method which, on Windows, will strip ANSI character
    sequences from the text, and if outputting to a tty, will convert them into
    win32 function calls.
    z?\[((?:\d|;)*)([a-zA-Z])?z?\]((?:.|;)*?)()?NFcCs�||_||_t||�|_tjdk}|o*t�}|dkrL|pJt|�oJt|�}||_	|dkrp|ont|�ont|�}||_
|j�|_|jt
jk|_dS)N�nt)r�	autoresetrr�osrrrr�strip�convert�get_win32_calls�win32_calls�sys�stderr�	on_stderr)rrr(r'r%Z
on_windowsZconversion_supportedrrrr4s


zAnsiToWin32.__init__cCs|jp|jp|jS)aj
        True if this class is actually needed. If false, then the output
        stream will not be affected, nor will win32 calls be issued, so
        wrapping stdout is not actually required. This will generally be
        False on non-Windows platforms, unless optional functionality like
        autoreset has been requested using kwargs to init()
        )r(r'r%)rrrr�should_wrapUszAnsiToWin32.should_wrapcNCs>|jot�r8tjtjftjtjtjftjtjtj	ftj	tjtj	ft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjt
jft
jtjft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdft
jtjt
jdftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj t
jftjtj ftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdftjtj t
jdfi&St!�S)NT)"r(�wintermr�	RESET_ALL�	reset_allZBRIGHTZstyler	ZDIMZNORMALrZBLACKZforerZREDZGREENZYELLOWZBLUEZMAGENTAZCYANZWHITEZGREYZRESETZ
LIGHTBLACK_EXZLIGHTRED_EXZ
LIGHTGREEN_EXZLIGHTYELLOW_EXZLIGHTBLUE_EXZLIGHTMAGENTA_EXZLIGHTCYAN_EXZ
LIGHTWHITE_EXrZback�dict)rrrrr)_sP


zAnsiToWin32.get_win32_callscCs@|js|jr|j|�n|jj|�|jj�|jr<|j�dS)N)r'r(�write_and_convertrr�flushr%r1)rrrrrr�s
zAnsiToWin32.writecCs:|jr|jdd�n"|jr6t|j�r6|jjtj�dS)N�mr)r)r(�
call_win32r'rrrrr0)rrrrr1�szAnsiToWin32.reset_allcCsfd}|j|�}x@|jj|�D]0}|j�\}}|j|||�|j|j��|}qW|j||t|��dS)z�
        Write the given text to our wrapped stream, stripping any ANSI
        sequences from the text, and optionally converting them into win32
        calls.
        rN)�convert_osc�ANSI_CSI_RE�finditer�span�write_plain_text�convert_ansi�groups�len)rrZcursor�match�start�endrrrr3�s
zAnsiToWin32.write_and_convertcCs*||kr&|jj|||��|jj�dS)N)rrr4)rrr@rArrrr;�szAnsiToWin32.write_plain_textcCs"|jr|j||�}|j||�dS)N)r(�extract_paramsr6)r�paramstring�command�paramsrrrr<�szAnsiToWin32.convert_ansicCs~|dkr<tdd�|jd�D��}xXt|�dkr8|d}q"Wn>tdd�|jd�D��}t|�dkrz|d	krnd}n|d
krzd
}|S)N�Hfcss&|]}t|�dkrt|�ndVqdS)rrN)r>�int)�.0�prrr�	<genexpr>�sz-AnsiToWin32.extract_params.<locals>.<genexpr>�;�rcss"|]}t|�dkrt|�VqdS)rN)r>rG)rHrIrrrrJ�srZJKm�ABCD)r)r)r)�tuple�splitr>)rrDrCrErrrrB�szAnsiToWin32.extract_paramscCs�|dkrVx�|D]B}||jkr|j|}|d}|dd�}t|jd�}|||�qWn�|dkrttj|d|jd�n�|dkr�tj|d|jd�nf|dkr�tj||jd�nL|dkr�|d}d|fd|f|df|dfd	�|\}	}
tj|	|
|jd�dS)
Nr5rr)r-�J�KrFrM)�A�B�C�D)r*r2r-r/Zerase_screenZ
erase_lineZset_cursor_positionZ
cursor_adjust)rrDrEZparamZ	func_args�func�args�kwargs�n�x�yrrrr6�s$


*zAnsiToWin32.call_win32cCsvxp|jj|�D]`}|j�\}}|d|�||d�}|j�\}}|dkr|jd�}|ddkrtj|d�qW|S)N�rKrZ02r)�ANSI_OSC_REr9r:r=rOr/Z	set_title)rrr?r@rArCrDrErrrr7�s
zAnsiToWin32.convert_osc)NNF)rr r!r"�re�compiler8r]rr.r)rr1r3r;r<rBr6r7rrrrr#+s


!
,
r#)r^r+r&�ansirrrrr/rrr	Zwin32r
rrr�objectrr#rrrr�<module>s_vendor/colorama/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000554151733136370017346 0ustar003

�Pf��@sDddlmZmZmZmZddlmZmZmZm	Z	ddl
mZdZdS)�)�init�deinit�reinit�
colorama_text)�Fore�Back�Style�Cursor)�AnsiToWin32z0.3.7N)
Z
initialiserrrr�ansirrrr	Zansitowin32r
�__version__�r
r
�/usr/lib/python3.6/__init__.py�<module>s_vendor/colorama/__pycache__/winterm.cpython-36.opt-1.pyc000064400000010607151733136370017274 0ustar003

�Pf��@s@ddlmZGdd�de�ZGdd�de�ZGdd�de�ZdS)	�)�win32c@s,eZdZdZdZdZdZdZdZdZ	dZ
d	S)
�WinColor�r������N)�__name__�
__module__�__qualname__ZBLACKZBLUEZGREENZCYANZREDZMAGENTAZYELLOWZGREY�rr�/usr/lib/python3.6/winterm.pyrsrc@seZdZdZdZdZdS)�WinStyler��N)rrr
ZNORMAL�BRIGHT�BRIGHT_BACKGROUNDrrrrrsrc@s�eZdZdd�Zdd�Zdd�Zd dd	�Zd!dd�Zd"d
d�Zd#dd�Z	d$dd�Z
dd�Zd%dd�Zd&dd�Z
d'dd�Zd(dd�Zdd�ZdS))�WinTermcCs>tjtj�j|_|j|j�|j|_|j|_	|j
|_d|_dS)Nr)
r�GetConsoleScreenBufferInfo�STDOUTZwAttributes�_default�	set_attrs�_fore�
_default_fore�_back�
_default_back�_style�_default_style�_light)�selfrrr�__init__szWinTerm.__init__cCs|j|jd|j|jBS)N�)rrrr )r!rrr�	get_attrs$szWinTerm.get_attrscCs.|d@|_|d?d@|_|tjtjB@|_dS)Nr
r)rrrrrr)r!�valuerrrr's
zWinTerm.set_attrsNcCs|j|j�|j|jd�dS)N)�attrs)rr�set_console)r!�	on_stderrrrr�	reset_all,szWinTerm.reset_allFcCsL|dkr|j}||_|r*|jtjO_n|jtjM_|j|d�dS)N)r()rrr rrr')r!�fore�lightr(rrrr*0szWinTerm.forecCsL|dkr|j}||_|r*|jtjO_n|jtjM_|j|d�dS)N)r()rrr rrr')r!�backr+r(rrrr,;szWinTerm.backcCs$|dkr|j}||_|j|d�dS)N)r()rrr')r!�styler(rrrr-Fsz
WinTerm.stylecCs0|dkr|j�}tj}|r tj}tj||�dS)N)r$rr�STDERRZSetConsoleTextAttribute)r!r&r(�handlerrrr'LszWinTerm.set_consolecCs,tj|�j}|jd7_|jd7_|S)Nr)rr�dwCursorPosition�X�Y)r!r/�positionrrr�get_positionTszWinTerm.get_positioncCs,|dkrdStj}|rtj}tj||�dS)N)rrr.�SetConsoleCursorPosition)r!r3r(r/rrr�set_cursor_position\szWinTerm.set_cursor_positioncCsBtj}|rtj}|j|�}|j||j|f}tj||dd�dS)NF)Zadjust)rrr.r4r2r1r5)r!�x�yr(r/r3Zadjusted_positionrrr�
cursor_adjustfs
zWinTerm.cursor_adjustrc	Cs�tj}|rtj}tj|�}|jj|jj}|jj|jj|jj}|dkrX|j}||}|dkrrtjdd�}|}n|dkr�tjdd�}|}tj	|d||�tj
||j�||�|dkr�tj|d�dS)Nrrr� )rr)
rrr.r�dwSizer1r2r0�COORD�FillConsoleOutputCharacter�FillConsoleOutputAttributer$r5)	r!�moder(r/�csbiZcells_in_screenZcells_before_cursor�
from_coord�cells_to_eraserrr�erase_screenns&
zWinTerm.erase_screencCs�tj}|rtj}tj|�}|dkr8|j}|jj|jj}|dkrZtjd|jj�}|jj}n |dkrztjd|jj�}|jj}tj	|d||�tj
||j�||�dS)Nrrrr:)rrr.rr0r;r1r<r2r=r>r$)r!r?r(r/r@rArBrrr�
erase_line�s

zWinTerm.erase_linecCstj|�dS)N)rZSetConsoleTitle)r!�titlerrr�	set_title�szWinTerm.set_title)N)NFF)NFF)NF)NF)NF)F)rF)rF)rrr
r"r$rr)r*r,r-r'r4r6r9rCrDrFrrrrrs









rN)�r�objectrrrrrrr�<module>s_vendor/colorama/__pycache__/initialise.cpython-36.opt-1.pyc000064400000003061151733136370017735 0ustar003

�Pf}�@stddlZddlZddlZddlmZdadadadada	dd�Z
ddd	�Zd
d�Zej
dd
��Zdd�Zdd�ZdS)�N�)�AnsiToWin32FcCstdk	rtt�j�dS)N)r�orig_stdout�	reset_all�rr� /usr/lib/python3.6/initialise.pyrsrTcCs�|rt|||g�rtd��tjatjatjdkr8dantt||||�t_atjdkr^da	ntt||||�t_a	t
s�tjt
�da
dS)Nz,wrap=False conflicts with any other arg=TrueT)�any�
ValueError�sys�stdoutr�stderr�orig_stderr�wrapped_stdout�wrap_stream�wrapped_stderr�atexit_done�atexit�registerr)�	autoreset�convert�strip�wraprrr�inits


rcCs tdk	rtt_tdk	rtt_dS)N)rr
rr
rrrrr�deinit3src
os"t||�z
dVWdt�XdS)N)rr)�args�kwargsrrr�
colorama_text:s

rcCs tdk	rtt_tdk	rtt_dS)N)rr
rrrrrrr�reinitCsrcCs&|r"t||||d�}|j�r"|j}|S)N)rrr)rZshould_wrap�stream)rrrrr�wrapperrrrrJsr)FNNT)r�
contextlibr
Zansitowin32rrr
rrrrrr�contextmanagerrrrrrrr�<module>s
	_vendor/colorama/__pycache__/ansi.cpython-36.pyc000064400000006276151733136370015611 0ustar003

�Pf�	�@s�dZdZdZdZdd�Zdd�Zdd	d
�Zddd�ZGd
d�de�Z	Gdd�de�Z
Gdd�de	�ZGdd�de	�ZGdd�de	�Z
e�Ze�Ze
�Ze
�ZdS)z�
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
z]�cCstt|�dS)N�m)�CSI�str)�code�r�/usr/lib/python3.6/ansi.py�
code_to_charssrcCstd|tS)Nz2;)�OSC�BEL)�titlerrr�	set_titlesr�cCstt|�dS)N�J)rr)�moderrr�clear_screensrcCstt|�dS)N�K)rr)rrrr�
clear_linesrc@seZdZdd�ZdS)�	AnsiCodescCs:x4t|�D](}|jd�s
t||�}t||t|��q
WdS)N�_)�dir�
startswith�getattr�setattrr)�self�name�valuerrr�__init__s

zAnsiCodes.__init__N)�__name__�
__module__�__qualname__rrrrrrsrc@s>eZdZd
dd�Zddd�Zddd�Zddd	�Zdd
d�ZdS)�
AnsiCursor�cCstt|�dS)N�A)rr)r�nrrr�UP%sz
AnsiCursor.UPcCstt|�dS)N�B)rr)rr#rrr�DOWN'szAnsiCursor.DOWNcCstt|�dS)N�C)rr)rr#rrr�FORWARD)szAnsiCursor.FORWARDcCstt|�dS)N�D)rr)rr#rrr�BACK+szAnsiCursor.BACKcCstt|�dt|�dS)N�;�H)rr)r�x�yrrr�POS-szAnsiCursor.POSN)r!)r!)r!)r!)r!r!)rrrr$r&r(r*r/rrrrr $s




r c@sPeZdZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZd
ZdZdZdZdZdS)�AnsiFore��� �!�"�#�$�%�'�Z�[�\�]�^�_�`�aN)rrr�BLACK�RED�GREEN�YELLOW�BLUE�MAGENTA�CYAN�WHITE�RESET�
LIGHTBLACK_EX�LIGHTRED_EX�
LIGHTGREEN_EX�LIGHTYELLOW_EX�LIGHTBLUE_EX�LIGHTMAGENTA_EX�LIGHTCYAN_EX�
LIGHTWHITE_EXrrrrr01s"r0c@sPeZdZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZd
ZdZdZdZdZdS)�AnsiBack�(�)�*�+�,�-�.�/�1�d�e�f�g�h�i�j�kN)rrrrBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrrrrrSGs"rSc@seZdZdZdZdZdZdS)�	AnsiStyler!r
��N)rrrZBRIGHTZDIMZNORMALZ	RESET_ALLrrrrre]sreN)r
)r
)�__doc__rr	r
rrrr�objectrr r0rSreZForeZBackZStyleZCursorrrrr�<module>s 


_vendor/colorama/ansi.py000064400000004734151733136370011322 0ustar00# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
'''
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
'''

CSI = '\033['
OSC = '\033]'
BEL = '\007'


def code_to_chars(code):
    return CSI + str(code) + 'm'

def set_title(title):
    return OSC + '2;' + title + BEL

def clear_screen(mode=2):
    return CSI + str(mode) + 'J'

def clear_line(mode=2):
    return CSI + str(mode) + 'K'


class AnsiCodes(object):
    def __init__(self):
        # the subclasses declare class attributes which are numbers.
        # Upon instantiation we define instance attributes, which are the same
        # as the class attributes but wrapped with the ANSI escape sequence
        for name in dir(self):
            if not name.startswith('_'):
                value = getattr(self, name)
                setattr(self, name, code_to_chars(value))


class AnsiCursor(object):
    def UP(self, n=1):
        return CSI + str(n) + 'A'
    def DOWN(self, n=1):
        return CSI + str(n) + 'B'
    def FORWARD(self, n=1):
        return CSI + str(n) + 'C'
    def BACK(self, n=1):
        return CSI + str(n) + 'D'
    def POS(self, x=1, y=1):
        return CSI + str(y) + ';' + str(x) + 'H'


class AnsiFore(AnsiCodes):
    BLACK           = 30
    RED             = 31
    GREEN           = 32
    YELLOW          = 33
    BLUE            = 34
    MAGENTA         = 35
    CYAN            = 36
    WHITE           = 37
    RESET           = 39

    # These are fairly well supported, but not part of the standard.
    LIGHTBLACK_EX   = 90
    LIGHTRED_EX     = 91
    LIGHTGREEN_EX   = 92
    LIGHTYELLOW_EX  = 93
    LIGHTBLUE_EX    = 94
    LIGHTMAGENTA_EX = 95
    LIGHTCYAN_EX    = 96
    LIGHTWHITE_EX   = 97


class AnsiBack(AnsiCodes):
    BLACK           = 40
    RED             = 41
    GREEN           = 42
    YELLOW          = 43
    BLUE            = 44
    MAGENTA         = 45
    CYAN            = 46
    WHITE           = 47
    RESET           = 49

    # These are fairly well supported, but not part of the standard.
    LIGHTBLACK_EX   = 100
    LIGHTRED_EX     = 101
    LIGHTGREEN_EX   = 102
    LIGHTYELLOW_EX  = 103
    LIGHTBLUE_EX    = 104
    LIGHTMAGENTA_EX = 105
    LIGHTCYAN_EX    = 106
    LIGHTWHITE_EX   = 107


class AnsiStyle(AnsiCodes):
    BRIGHT    = 1
    DIM       = 2
    NORMAL    = 22
    RESET_ALL = 0

Fore   = AnsiFore()
Back   = AnsiBack()
Style  = AnsiStyle()
Cursor = AnsiCursor()
_vendor/colorama/win32.py000064400000012365151733136370011331 0ustar00# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.

# from winbase.h
STDOUT = -11
STDERR = -12

try:
    import ctypes
    from ctypes import LibraryLoader
    windll = LibraryLoader(ctypes.WinDLL)
    from ctypes import wintypes
except (AttributeError, ImportError):
    windll = None
    SetConsoleTextAttribute = lambda *_: None
    winapi_test = lambda *_: None
else:
    from ctypes import byref, Structure, c_char, POINTER

    COORD = wintypes._COORD

    class CONSOLE_SCREEN_BUFFER_INFO(Structure):
        """struct in wincon.h."""
        _fields_ = [
            ("dwSize", COORD),
            ("dwCursorPosition", COORD),
            ("wAttributes", wintypes.WORD),
            ("srWindow", wintypes.SMALL_RECT),
            ("dwMaximumWindowSize", COORD),
        ]
        def __str__(self):
            return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
                self.dwSize.Y, self.dwSize.X
                , self.dwCursorPosition.Y, self.dwCursorPosition.X
                , self.wAttributes
                , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
                , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
            )

    _GetStdHandle = windll.kernel32.GetStdHandle
    _GetStdHandle.argtypes = [
        wintypes.DWORD,
    ]
    _GetStdHandle.restype = wintypes.HANDLE

    _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
    _GetConsoleScreenBufferInfo.argtypes = [
        wintypes.HANDLE,
        POINTER(CONSOLE_SCREEN_BUFFER_INFO),
    ]
    _GetConsoleScreenBufferInfo.restype = wintypes.BOOL

    _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
    _SetConsoleTextAttribute.argtypes = [
        wintypes.HANDLE,
        wintypes.WORD,
    ]
    _SetConsoleTextAttribute.restype = wintypes.BOOL

    _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
    _SetConsoleCursorPosition.argtypes = [
        wintypes.HANDLE,
        COORD,
    ]
    _SetConsoleCursorPosition.restype = wintypes.BOOL

    _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
    _FillConsoleOutputCharacterA.argtypes = [
        wintypes.HANDLE,
        c_char,
        wintypes.DWORD,
        COORD,
        POINTER(wintypes.DWORD),
    ]
    _FillConsoleOutputCharacterA.restype = wintypes.BOOL

    _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
    _FillConsoleOutputAttribute.argtypes = [
        wintypes.HANDLE,
        wintypes.WORD,
        wintypes.DWORD,
        COORD,
        POINTER(wintypes.DWORD),
    ]
    _FillConsoleOutputAttribute.restype = wintypes.BOOL

    _SetConsoleTitleW = windll.kernel32.SetConsoleTitleA
    _SetConsoleTitleW.argtypes = [
        wintypes.LPCSTR
    ]
    _SetConsoleTitleW.restype = wintypes.BOOL

    handles = {
        STDOUT: _GetStdHandle(STDOUT),
        STDERR: _GetStdHandle(STDERR),
    }

    def winapi_test():
        handle = handles[STDOUT]
        csbi = CONSOLE_SCREEN_BUFFER_INFO()
        success = _GetConsoleScreenBufferInfo(
            handle, byref(csbi))
        return bool(success)

    def GetConsoleScreenBufferInfo(stream_id=STDOUT):
        handle = handles[stream_id]
        csbi = CONSOLE_SCREEN_BUFFER_INFO()
        success = _GetConsoleScreenBufferInfo(
            handle, byref(csbi))
        return csbi

    def SetConsoleTextAttribute(stream_id, attrs):
        handle = handles[stream_id]
        return _SetConsoleTextAttribute(handle, attrs)

    def SetConsoleCursorPosition(stream_id, position, adjust=True):
        position = COORD(*position)
        # If the position is out of range, do nothing.
        if position.Y <= 0 or position.X <= 0:
            return
        # Adjust for Windows' SetConsoleCursorPosition:
        #    1. being 0-based, while ANSI is 1-based.
        #    2. expecting (x,y), while ANSI uses (y,x).
        adjusted_position = COORD(position.Y - 1, position.X - 1)
        if adjust:
            # Adjust for viewport's scroll position
            sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
            adjusted_position.Y += sr.Top
            adjusted_position.X += sr.Left
        # Resume normal processing
        handle = handles[stream_id]
        return _SetConsoleCursorPosition(handle, adjusted_position)

    def FillConsoleOutputCharacter(stream_id, char, length, start):
        handle = handles[stream_id]
        char = c_char(char.encode())
        length = wintypes.DWORD(length)
        num_written = wintypes.DWORD(0)
        # Note that this is hard-coded for ANSI (vs wide) bytes.
        success = _FillConsoleOutputCharacterA(
            handle, char, length, start, byref(num_written))
        return num_written.value

    def FillConsoleOutputAttribute(stream_id, attr, length, start):
        ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
        handle = handles[stream_id]
        attribute = wintypes.WORD(attr)
        length = wintypes.DWORD(length)
        num_written = wintypes.DWORD(0)
        # Note that this is hard-coded for ANSI (vs wide) bytes.
        return _FillConsoleOutputAttribute(
            handle, attribute, length, start, byref(num_written))

    def SetConsoleTitle(title):
        return _SetConsoleTitleW(title)
_vendor/colorama/__init__.py000064400000000360151733136370012116 0ustar00# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from .initialise import init, deinit, reinit, colorama_text
from .ansi import Fore, Back, Style, Cursor
from .ansitowin32 import AnsiToWin32

__version__ = '0.3.7'

_vendor/colorama/ansitowin32.py000064400000022704151733136370012545 0ustar00# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
import os

from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll, winapi_test


winterm = None
if windll is not None:
    winterm = WinTerm()


def is_stream_closed(stream):
    return not hasattr(stream, 'closed') or stream.closed


def is_a_tty(stream):
    return hasattr(stream, 'isatty') and stream.isatty()


class StreamWrapper(object):
    '''
    Wraps a stream (such as stdout), acting as a transparent proxy for all
    attribute access apart from method 'write()', which is delegated to our
    Converter instance.
    '''
    def __init__(self, wrapped, converter):
        # double-underscore everything to prevent clashes with names of
        # attributes on the wrapped stream object.
        self.__wrapped = wrapped
        self.__convertor = converter

    def __getattr__(self, name):
        return getattr(self.__wrapped, name)

    def write(self, text):
        self.__convertor.write(text)


class AnsiToWin32(object):
    '''
    Implements a 'write()' method which, on Windows, will strip ANSI character
    sequences from the text, and if outputting to a tty, will convert them into
    win32 function calls.
    '''
    ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?')     # Control Sequence Introducer
    ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?')         # Operating System Command

    def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
        # The wrapped stream (normally sys.stdout or sys.stderr)
        self.wrapped = wrapped

        # should we reset colors to defaults after every .write()
        self.autoreset = autoreset

        # create the proxy wrapping our output stream
        self.stream = StreamWrapper(wrapped, self)

        on_windows = os.name == 'nt'
        # We test if the WinAPI works, because even if we are on Windows
        # we may be using a terminal that doesn't support the WinAPI
        # (e.g. Cygwin Terminal). In this case it's up to the terminal
        # to support the ANSI codes.
        conversion_supported = on_windows and winapi_test()

        # should we strip ANSI sequences from our output?
        if strip is None:
            strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))
        self.strip = strip

        # should we should convert ANSI sequences into win32 calls?
        if convert is None:
            convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)
        self.convert = convert

        # dict of ansi codes to win32 functions and parameters
        self.win32_calls = self.get_win32_calls()

        # are we wrapping stderr?
        self.on_stderr = self.wrapped is sys.stderr

    def should_wrap(self):
        '''
        True if this class is actually needed. If false, then the output
        stream will not be affected, nor will win32 calls be issued, so
        wrapping stdout is not actually required. This will generally be
        False on non-Windows platforms, unless optional functionality like
        autoreset has been requested using kwargs to init()
        '''
        return self.convert or self.strip or self.autoreset

    def get_win32_calls(self):
        if self.convert and winterm:
            return {
                AnsiStyle.RESET_ALL: (winterm.reset_all, ),
                AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
                AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
                AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
                AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
                AnsiFore.RED: (winterm.fore, WinColor.RED),
                AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
                AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
                AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
                AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
                AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
                AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
                AnsiFore.RESET: (winterm.fore, ),
                AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
                AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
                AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
                AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
                AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
                AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
                AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
                AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
                AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
                AnsiBack.RED: (winterm.back, WinColor.RED),
                AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
                AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
                AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
                AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
                AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
                AnsiBack.WHITE: (winterm.back, WinColor.GREY),
                AnsiBack.RESET: (winterm.back, ),
                AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
                AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
                AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
                AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
                AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
                AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
                AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
                AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
            }
        return dict()

    def write(self, text):
        if self.strip or self.convert:
            self.write_and_convert(text)
        else:
            self.wrapped.write(text)
            self.wrapped.flush()
        if self.autoreset:
            self.reset_all()


    def reset_all(self):
        if self.convert:
            self.call_win32('m', (0,))
        elif not self.strip and not is_stream_closed(self.wrapped):
            self.wrapped.write(Style.RESET_ALL)


    def write_and_convert(self, text):
        '''
        Write the given text to our wrapped stream, stripping any ANSI
        sequences from the text, and optionally converting them into win32
        calls.
        '''
        cursor = 0
        text = self.convert_osc(text)
        for match in self.ANSI_CSI_RE.finditer(text):
            start, end = match.span()
            self.write_plain_text(text, cursor, start)
            self.convert_ansi(*match.groups())
            cursor = end
        self.write_plain_text(text, cursor, len(text))


    def write_plain_text(self, text, start, end):
        if start < end:
            self.wrapped.write(text[start:end])
            self.wrapped.flush()


    def convert_ansi(self, paramstring, command):
        if self.convert:
            params = self.extract_params(command, paramstring)
            self.call_win32(command, params)


    def extract_params(self, command, paramstring):
        if command in 'Hf':
            params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
            while len(params) < 2:
                # defaults:
                params = params + (1,)
        else:
            params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
            if len(params) == 0:
                # defaults:
                if command in 'JKm':
                    params = (0,)
                elif command in 'ABCD':
                    params = (1,)

        return params


    def call_win32(self, command, params):
        if command == 'm':
            for param in params:
                if param in self.win32_calls:
                    func_args = self.win32_calls[param]
                    func = func_args[0]
                    args = func_args[1:]
                    kwargs = dict(on_stderr=self.on_stderr)
                    func(*args, **kwargs)
        elif command in 'J':
            winterm.erase_screen(params[0], on_stderr=self.on_stderr)
        elif command in 'K':
            winterm.erase_line(params[0], on_stderr=self.on_stderr)
        elif command in 'Hf':     # cursor position - absolute
            winterm.set_cursor_position(params, on_stderr=self.on_stderr)
        elif command in 'ABCD':   # cursor position - relative
            n = params[0]
            # A - up, B - down, C - forward, D - back
            x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
            winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)


    def convert_osc(self, text):
        for match in self.ANSI_OSC_RE.finditer(text):
            start, end = match.span()
            text = text[:start] + text[end:]
            paramstring, command = match.groups()
            if command in '\x07':       # \x07 = BEL
                params = paramstring.split(";")
                # 0 - change title and icon (we will only change title)
                # 1 - change icon (we don't support this)
                # 2 - change title
                if params[0] in '02':
                    winterm.set_title(params[1])
        return text
_vendor/colorama/winterm.py000064400000014222151733136370012046 0ustar00# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32


# from wincon.h
class WinColor(object):
    BLACK   = 0
    BLUE    = 1
    GREEN   = 2
    CYAN    = 3
    RED     = 4
    MAGENTA = 5
    YELLOW  = 6
    GREY    = 7

# from wincon.h
class WinStyle(object):
    NORMAL              = 0x00 # dim text, dim background
    BRIGHT              = 0x08 # bright text, dim background
    BRIGHT_BACKGROUND   = 0x80 # dim text, bright background

class WinTerm(object):

    def __init__(self):
        self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
        self.set_attrs(self._default)
        self._default_fore = self._fore
        self._default_back = self._back
        self._default_style = self._style
        # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
        # So that LIGHT_EX colors and BRIGHT style do not clobber each other,
        # we track them separately, since LIGHT_EX is overwritten by Fore/Back
        # and BRIGHT is overwritten by Style codes.
        self._light = 0

    def get_attrs(self):
        return self._fore + self._back * 16 + (self._style | self._light)

    def set_attrs(self, value):
        self._fore = value & 7
        self._back = (value >> 4) & 7
        self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)

    def reset_all(self, on_stderr=None):
        self.set_attrs(self._default)
        self.set_console(attrs=self._default)

    def fore(self, fore=None, light=False, on_stderr=False):
        if fore is None:
            fore = self._default_fore
        self._fore = fore
        # Emulate LIGHT_EX with BRIGHT Style
        if light:
            self._light |= WinStyle.BRIGHT
        else:
            self._light &= ~WinStyle.BRIGHT
        self.set_console(on_stderr=on_stderr)

    def back(self, back=None, light=False, on_stderr=False):
        if back is None:
            back = self._default_back
        self._back = back
        # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
        if light:
            self._light |= WinStyle.BRIGHT_BACKGROUND
        else:
            self._light &= ~WinStyle.BRIGHT_BACKGROUND
        self.set_console(on_stderr=on_stderr)

    def style(self, style=None, on_stderr=False):
        if style is None:
            style = self._default_style
        self._style = style
        self.set_console(on_stderr=on_stderr)

    def set_console(self, attrs=None, on_stderr=False):
        if attrs is None:
            attrs = self.get_attrs()
        handle = win32.STDOUT
        if on_stderr:
            handle = win32.STDERR
        win32.SetConsoleTextAttribute(handle, attrs)

    def get_position(self, handle):
        position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
        # Because Windows coordinates are 0-based,
        # and win32.SetConsoleCursorPosition expects 1-based.
        position.X += 1
        position.Y += 1
        return position

    def set_cursor_position(self, position=None, on_stderr=False):
        if position is None:
            # I'm not currently tracking the position, so there is no default.
            # position = self.get_position()
            return
        handle = win32.STDOUT
        if on_stderr:
            handle = win32.STDERR
        win32.SetConsoleCursorPosition(handle, position)

    def cursor_adjust(self, x, y, on_stderr=False):
        handle = win32.STDOUT
        if on_stderr:
            handle = win32.STDERR
        position = self.get_position(handle)
        adjusted_position = (position.Y + y, position.X + x)
        win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)

    def erase_screen(self, mode=0, on_stderr=False):
        # 0 should clear from the cursor to the end of the screen.
        # 1 should clear from the cursor to the beginning of the screen.
        # 2 should clear the entire screen, and move cursor to (1,1)
        handle = win32.STDOUT
        if on_stderr:
            handle = win32.STDERR
        csbi = win32.GetConsoleScreenBufferInfo(handle)
        # get the number of character cells in the current buffer
        cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
        # get number of character cells before current cursor position
        cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
        if mode == 0:
            from_coord = csbi.dwCursorPosition
            cells_to_erase = cells_in_screen - cells_before_cursor
        if mode == 1:
            from_coord = win32.COORD(0, 0)
            cells_to_erase = cells_before_cursor
        elif mode == 2:
            from_coord = win32.COORD(0, 0)
            cells_to_erase = cells_in_screen
        # fill the entire screen with blanks
        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
        # now set the buffer's attributes accordingly
        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
        if mode == 2:
            # put the cursor where needed
            win32.SetConsoleCursorPosition(handle, (1, 1))

    def erase_line(self, mode=0, on_stderr=False):
        # 0 should clear from the cursor to the end of the line.
        # 1 should clear from the cursor to the beginning of the line.
        # 2 should clear the entire line.
        handle = win32.STDOUT
        if on_stderr:
            handle = win32.STDERR
        csbi = win32.GetConsoleScreenBufferInfo(handle)
        if mode == 0:
            from_coord = csbi.dwCursorPosition
            cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
        if mode == 1:
            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
            cells_to_erase = csbi.dwCursorPosition.X
        elif mode == 2:
            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
            cells_to_erase = csbi.dwSize.X
        # fill the entire screen with blanks
        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
        # now set the buffer's attributes accordingly
        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)

    def set_title(self, title):
        win32.SetConsoleTitle(title)
_vendor/requests/structures.py000064400000005704151733136370012667 0ustar00# -*- coding: utf-8 -*-

"""
requests.structures
~~~~~~~~~~~~~~~~~~~

Data structures that power Requests.
"""

import collections

from .compat import OrderedDict


class CaseInsensitiveDict(collections.MutableMapping):
    """A case-insensitive ``dict``-like object.

    Implements all methods and operations of
    ``collections.MutableMapping`` as well as dict's ``copy``. Also
    provides ``lower_items``.

    All keys are expected to be strings. The structure remembers the
    case of the last key to be set, and ``iter(instance)``,
    ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
    will contain case-sensitive keys. However, querying and contains
    testing is case insensitive::

        cid = CaseInsensitiveDict()
        cid['Accept'] = 'application/json'
        cid['aCCEPT'] == 'application/json'  # True
        list(cid) == ['Accept']  # True

    For example, ``headers['content-encoding']`` will return the
    value of a ``'Content-Encoding'`` response header, regardless
    of how the header name was originally stored.

    If the constructor, ``.update``, or equality comparison
    operations are given keys that have equal ``.lower()``s, the
    behavior is undefined.
    """

    def __init__(self, data=None, **kwargs):
        self._store = OrderedDict()
        if data is None:
            data = {}
        self.update(data, **kwargs)

    def __setitem__(self, key, value):
        # Use the lowercased key for lookups, but store the actual
        # key alongside the value.
        self._store[key.lower()] = (key, value)

    def __getitem__(self, key):
        return self._store[key.lower()][1]

    def __delitem__(self, key):
        del self._store[key.lower()]

    def __iter__(self):
        return (casedkey for casedkey, mappedvalue in self._store.values())

    def __len__(self):
        return len(self._store)

    def lower_items(self):
        """Like iteritems(), but with all lowercase keys."""
        return (
            (lowerkey, keyval[1])
            for (lowerkey, keyval)
            in self._store.items()
        )

    def __eq__(self, other):
        if isinstance(other, collections.Mapping):
            other = CaseInsensitiveDict(other)
        else:
            return NotImplemented
        # Compare insensitively
        return dict(self.lower_items()) == dict(other.lower_items())

    # Copy is required
    def copy(self):
        return CaseInsensitiveDict(self._store.values())

    def __repr__(self):
        return str(dict(self.items()))


class LookupDict(dict):
    """Dictionary lookup object."""

    def __init__(self, name=None):
        self.name = name
        super(LookupDict, self).__init__()

    def __repr__(self):
        return '<lookup \'%s\'>' % (self.name)

    def __getitem__(self, key):
        # We allow fall-through here, so values default to None

        return self.__dict__.get(key, None)

    def get(self, key, default=None):
        return self.__dict__.get(key, default)
_vendor/requests/api.py000064400000014135151733136370011213 0ustar00# -*- coding: utf-8 -*-

"""
requests.api
~~~~~~~~~~~~

This module implements the Requests API.

:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""

from . import sessions


def request(method, url, **kwargs):
    """Constructs and sends a :class:`Request <Request>`.

    :param method: method for the new :class:`Request` object.
    :param url: URL for the new :class:`Request` object.
    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
    :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
    :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
    :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
        ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
        or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
        defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
        to add for the file.
    :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
    :param timeout: (optional) How many seconds to wait for the server to send data
        before giving up, as a float, or a :ref:`(connect timeout, read
        timeout) <timeouts>` tuple.
    :type timeout: float or tuple
    :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
    :type allow_redirects: bool
    :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
    :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use. Defaults to ``True``.
    :param stream: (optional) if ``False``, the response content will be immediately downloaded.
    :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response

    Usage::

      >>> import requests
      >>> req = requests.request('GET', 'http://httpbin.org/get')
      <Response [200]>
    """

    # By using the 'with' statement we are sure the session is closed, thus we
    # avoid leaving sockets open which can trigger a ResourceWarning in some
    # cases, and look like a memory leak in others.
    with sessions.Session() as session:
        return session.request(method=method, url=url, **kwargs)


def get(url, params=None, **kwargs):
    r"""Sends a GET request.

    :param url: URL for the new :class:`Request` object.
    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    """

    kwargs.setdefault('allow_redirects', True)
    return request('get', url, params=params, **kwargs)


def options(url, **kwargs):
    r"""Sends an OPTIONS request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    """

    kwargs.setdefault('allow_redirects', True)
    return request('options', url, **kwargs)


def head(url, **kwargs):
    r"""Sends a HEAD request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    """

    kwargs.setdefault('allow_redirects', False)
    return request('head', url, **kwargs)


def post(url, data=None, json=None, **kwargs):
    r"""Sends a POST request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    """

    return request('post', url, data=data, json=json, **kwargs)


def put(url, data=None, **kwargs):
    r"""Sends a PUT request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    """

    return request('put', url, data=data, **kwargs)


def patch(url, data=None, **kwargs):
    r"""Sends a PATCH request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    """

    return request('patch', url, data=data, **kwargs)


def delete(url, **kwargs):
    r"""Sends a DELETE request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    """

    return request('delete', url, **kwargs)
_vendor/requests/__pycache__/api.cpython-36.pyc000064400000014374151733136370015504 0ustar003

�Pf]�@s\dZddlmZdd�Zddd�Zdd	�Zd
d�Zddd
�Zddd�Zddd�Z	dd�Z
dS)z�
requests.api
~~~~~~~~~~~~

This module implements the Requests API.

:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
�)�sessionscKs*tj��}|jf||d�|��SQRXdS)a�	Constructs and sends a :class:`Request <Request>`.

    :param method: method for the new :class:`Request` object.
    :param url: URL for the new :class:`Request` object.
    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
    :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
    :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
    :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
        ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
        or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
        defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
        to add for the file.
    :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
    :param timeout: (optional) How many seconds to wait for the server to send data
        before giving up, as a float, or a :ref:`(connect timeout, read
        timeout) <timeouts>` tuple.
    :type timeout: float or tuple
    :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
    :type allow_redirects: bool
    :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
    :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use. Defaults to ``True``.
    :param stream: (optional) if ``False``, the response content will be immediately downloaded.
    :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response

    Usage::

      >>> import requests
      >>> req = requests.request('GET', 'http://httpbin.org/get')
      <Response [200]>
    )�method�urlN)rZSession�request)rr�kwargsZsession�r�/usr/lib/python3.6/api.pyrs)
rNcKs"|jdd�td|fd|i|��S)aOSends a GET request.

    :param url: URL for the new :class:`Request` object.
    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �allow_redirectsT�get�params)�
setdefaultr)rrrrrrr
=s
r
cKs|jdd�td|f|�S)z�Sends an OPTIONS request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    r	T�options)rr)rrrrrr
Ks	r
cKs|jdd�td|f|�S)z�Sends a HEAD request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    r	F�head)rr)rrrrrrXs	rcKstd|f||d�|��S)a�Sends a POST request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �post)�data�json)r)rrrrrrrresrcKstd|fd|i|��S)a�Sends a PUT request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �putr)r)rrrrrrrssrcKstd|fd|i|��S)a�Sends a PATCH request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �patchr)r)rrrrrrr�srcKstd|f|�S)z�Sends a DELETE request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �delete)r)rrrrrr�s	r)N)NN)N)N)�__doc__�rrr
r
rrrrrrrrr�<module>s-




_vendor/requests/__pycache__/hooks.cpython-36.pyc000064400000001626151733136370016052 0ustar003

�Pf��@sdZdgZdd�Zdd�ZdS)z�
requests.hooks
~~~~~~~~~~~~~~

This module provides the capabilities for the Requests hooks system.

Available hooks:

``response``:
    The response generated from a Request.
ZresponsecCstdd�tD��S)Ncss|]}|gfVqdS)N�)�.0Zeventrr�/usr/lib/python3.6/hooks.py�	<genexpr>sz default_hooks.<locals>.<genexpr>)�dict�HOOKSrrrr�
default_hookssrcKsR|pt�}|j|�}|rNt|d�r(|g}x$|D]}||f|�}|dk	r.|}q.W|S)z6Dispatches a hook dictionary on a given piece of data.�__call__N)r�get�hasattr)�keyZhooksZ	hook_data�kwargs�hookZ
_hook_datarrr�
dispatch_hooks



rN)�__doc__rrrrrrr�<module>
s_vendor/requests/__pycache__/packages.cpython-36.pyc000064400000000676151733136370016511 0ustar003

�Pf��@s~ddlZxpdD]hZdeZee�e�e<xLeej�D]>ZeeksNejed�r4ee	d�d�Z
ejeejde
<q4WqWdS)	�N�urllib3�idna�chardetzpip._vendor.�.zpip._vendor.requests.packages.)rrr)�sys�packageZvendored_package�
__import__�locals�list�modules�mod�
startswith�lenZunprefixed_mod�rr�/usr/lib/python3.6/packages.py�<module>s
_vendor/requests/__pycache__/adapters.cpython-36.pyc000064400000040074151733136370016532 0ustar003

�PfR�@s�dZddlZddlZddlmZmZddlmZddl	m
Zddlm
Z
ddlmZddlmZdd	lmZdd
lmZddlmZddlmZdd
lmZddlmZddlmZddlmZddlmZddlmZm Z ddl!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+ddl,m-Z-m.Z.m/Z/mZmZm0Z0m1Z1ddl2m3Z3yddl4m5Z5Wne6k
�rrdd�Z5YnXdZ7dZ8dZ9dZ:Gdd�de;�Z<Gd d!�d!e<�Z=dS)"z�
requests.adapters
~~~~~~~~~~~~~~~~~

This module contains the transport adapters that Requests uses to define
and maintain connections.
�N)�PoolManager�proxy_from_url)�HTTPResponse)�Timeout)�Retry)�ClosedPoolError)�ConnectTimeoutError)�	HTTPError)�
MaxRetryError)�NewConnectionError)�
ProxyError)�
ProtocolError)�ReadTimeoutError)�SSLError)�
ResponseError�)�Response)�urlparse�
basestring)�DEFAULT_CA_BUNDLE_PATH�get_encoding_from_headers�prepend_scheme_if_needed�get_auth_from_url�
urldefragauth�select_proxy)�CaseInsensitiveDict)�extract_cookies_to_jar)�ConnectionError�ConnectTimeout�ReadTimeoutrr�
RetryError�
InvalidSchema)�_basic_auth_str)�SOCKSProxyManagercOstd��dS)Nz'Missing dependencies for SOCKS support.)r!)�args�kwargs�r&�/usr/lib/python3.6/adapters.pyr#+sr#F�
cs2eZdZdZ�fdd�Zddd�Zd	d
�Z�ZS)�BaseAdapterzThe Base Transport Adaptercstt|�j�dS)N)�superr)�__init__)�self)�	__class__r&r'r+7szBaseAdapter.__init__FNTcCst�dS)aCSends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        N)�NotImplementedError)r,�request�stream�timeout�verify�cert�proxiesr&r&r'�send:szBaseAdapter.sendcCst�dS)z!Cleans up adapter specific items.N)r.)r,r&r&r'�closeLszBaseAdapter.close)FNTNN)�__name__�
__module__�__qualname__�__doc__r+r5r6�
__classcell__r&r&)r-r'r)4s

r)cs�eZdZdZdddddgZeeeef�fdd�	Zd	d
�Z	dd�Z
efd
d�Zdd�Zdd�Z
dd�Zd$dd�Zdd�Zdd�Zdd�Zdd�Zd%d"d#�Z�ZS)&�HTTPAdaptera�The built-in HTTP Adapter for urllib3.

    Provides a general-case interface for Requests sessions to contact HTTP and
    HTTPS urls by implementing the Transport Adapter interface. This class will
    usually be created by the :class:`Session <Session>` class under the
    covers.

    :param pool_connections: The number of urllib3 connection pools to cache.
    :param pool_maxsize: The maximum number of connections to save in the pool.
    :param max_retries: The maximum number of retries each connection
        should attempt. Note, this applies only to failed DNS lookups, socket
        connections and connection timeouts, never to requests where data has
        made it to the server. By default, Requests does not retry failed
        connections. If you need granular control over the conditions under
        which we retry a request, import urllib3's ``Retry`` class and pass
        that instead.
    :param pool_block: Whether the connection pool should block for connections.

    Usage::

      >>> import requests
      >>> s = requests.Session()
      >>> a = requests.adapters.HTTPAdapter(max_retries=3)
      >>> s.mount('http://', a)
    �max_retries�config�_pool_connections�
_pool_maxsize�_pool_blockcsd|tkrtddd�|_ntj|�|_i|_i|_tt|�j�||_	||_
||_|j|||d�dS)NrF)�read)�block)
�DEFAULT_RETRIESrr=Zfrom_intr>�
proxy_managerr*r<r+r?r@rA�init_poolmanager)r,Zpool_connectionsZpool_maxsizer=Z
pool_block)r-r&r'r+nszHTTPAdapter.__init__cst�fdd��jD��S)Nc3s|]}|t�|d�fVqdS)N)�getattr)�.0�attr)r,r&r'�	<genexpr>�sz+HTTPAdapter.__getstate__.<locals>.<genexpr>)�dict�	__attrs__)r,r&)r,r'�__getstate__�szHTTPAdapter.__getstate__cCsHi|_i|_x |j�D]\}}t|||�qW|j|j|j|jd�dS)N)rC)rEr>�items�setattrrFr?r@rA)r,�staterI�valuer&r&r'�__setstate__�szHTTPAdapter.__setstate__cKs0||_||_||_tf|||dd�|��|_dS)aInitializes a urllib3 PoolManager.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param connections: The number of urllib3 connection pools to cache.
        :param maxsize: The maximum number of connections to save in the pool.
        :param block: Block when no free connections are available.
        :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
        T)�	num_pools�maxsizerC�strictN)r?r@rAr�poolmanager)r,ZconnectionsrTrCZpool_kwargsr&r&r'rF�s

zHTTPAdapter.init_poolmanagercKs�||jkr|j|}n||j�jd�r^t|�\}}t|f|||j|j|jd�|��}|j|<n4|j|�}t	|f||j|j|jd�|��}|j|<|S)a�Return urllib3 ProxyManager for the given proxy.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxy: The proxy to return a urllib3 ProxyManager for.
        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
        :returns: ProxyManager
        :rtype: urllib3.ProxyManager
        �socks)�username�passwordrSrTrC)�
proxy_headersrSrTrC)
rE�lower�
startswithrr#r?r@rArZr)r,�proxyZproxy_kwargsZmanagerrXrYrZr&r&r'�proxy_manager_for�s*

zHTTPAdapter.proxy_manager_forcCs|j�jd�rn|rnd}|dk	r"|}|s*t}|s>tjj|�rLtdj|���d|_tjj	|�sf||_
q�||_nd|_d|_
d|_|�rt|t
�s�|d|_|d|_n||_d|_|jr�tjj|j�r�td	j|j���|jo�tjj|j��rtd
j|j���dS)aAVerify a SSL certificate. This method should not be called from user
        code, and is only exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param conn: The urllib3 connection object associated with the cert.
        :param url: The requested URL.
        :param verify: Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use
        :param cert: The SSL certificate to verify.
        �httpsNTzFCould not find a suitable TLS CA certificate bundle, invalid path: {0}Z
CERT_REQUIREDZ	CERT_NONErrz:Could not find the TLS certificate file, invalid path: {0}z2Could not find the TLS key file, invalid path: {0})r[r\r�os�path�exists�IOError�formatZ	cert_reqs�isdirZca_certsZca_cert_dir�
isinstancerZ	cert_fileZkey_file)r,�conn�urlr2r3Zcert_locr&r&r'�cert_verify�s8


zHTTPAdapter.cert_verifycCs�t�}t|dd�|_tt|di��|_t|j�|_||_|jj|_t	|j
t�r^|j
jd�|_
n|j
|_
t
|j||�||_||_|S)a�Builds a :class:`Response <requests.Response>` object from a urllib3
        response. This should not be called from user code, and is only exposed
        for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`

        :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
        :param resp: The urllib3 response object.
        :rtype: requests.Response
        ZstatusN�headerszutf-8)rrGZstatus_coderrjr�encoding�raw�reasonrfrh�bytes�decoder�cookiesr/�
connection)r,Zreq�respZresponser&r&r'�build_response�s

zHTTPAdapter.build_responseNcCsNt||�}|r.t|d�}|j|�}|j|�}nt|�}|j�}|jj|�}|S)a�Returns a urllib3 connection for the given URL. This should not be
        called from user code, and is only exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param url: The URL to connect to.
        :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
        :rtype: urllib3.ConnectionPool
        Zhttp)rrr^Zconnection_from_urlrZgeturlrV)r,rhr4r]rErgZparsedr&r&r'�get_connection"s	


zHTTPAdapter.get_connectioncCs*|jj�x|jj�D]}|j�qWdS)z�Disposes of any internal state.

        Currently, this closes the PoolManager and any active ProxyManager,
        which closes any pooled connections.
        N)rV�clearrE�values)r,r]r&r&r'r69s
zHTTPAdapter.closec	Csbt|j|�}t|j�j}|o"|dk}d}|rDt|�jj�}|jd�}|j}|r^|r^t|j�}|S)a?Obtain the url to use when making the final request.

        If the message is being sent through a HTTP proxy, the full URL has to
        be used. Otherwise, we should only use the path portion of the URL.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
        :rtype: str
        r_FrW)rrhr�schemer[r\Zpath_urlr)	r,r/r4r]rwZis_proxied_http_requestZusing_socks_proxyZproxy_schemerhr&r&r'�request_urlCs


zHTTPAdapter.request_urlcKsdS)a"Add any headers needed by the connection. As of v2.0 this does
        nothing by default, but is left for overriding by users that subclass
        the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
        :param kwargs: The keyword arguments from the call to send().
        Nr&)r,r/r%r&r&r'�add_headers`szHTTPAdapter.add_headerscCs&i}t|�\}}|r"t||�|d<|S)a
Returns a dictionary of the headers to add to any request sent
        through a proxy. This works with urllib3 magic to ensure that they are
        correctly sent to the proxy, rather than in a tunnelled request if
        CONNECT is being used.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxies: The url of the proxy being used for this request.
        :rtype: dict
        zProxy-Authorization)rr")r,r]rjrXrYr&r&r'rZns

zHTTPAdapter.proxy_headersFTc)Cs�|j|j|�}|j||j||�|j||�}|j|�|jdkpHd|jk}	t|t�r�y|\}
}t	|
|d�}Wq�t
k
r�}zdj|�}
t
|
��WYdd}~Xq�Xnt|t	�r�nt	||d�}�yL|	s�|j|j
||j|jdddd|j|d�
}�nt|d��r|j}|jtd�}y�|j|j
|d	d
�x$|jj�D]\}}|j||��q.W|j�xN|jD]D}|jtt|��dd�jd��|jd
�|j|�|jd
��qXW|jd�y|jd	d�}Wntk
�r�|j�}YnXtj|||ddd�}Wn|j��YnXW�n�t t!j"fk
�rD}
zt#|
|d��WYdd}
~
X�nZt$k
�r�}z�t|j%t&��r~t|j%t'��s~t(||d��t|j%t)��r�t*||d��t|j%t+��r�t,||d��t|j%t-��r�t.||d��t#||d��WYdd}~Xn�t/k
�r}zt#||d��WYdd}~Xn�t+k
�r@}zt,|��WYdd}~Xn^t-t0fk
�r�}z<t|t-��rpt.||d��nt|t1��r�t2||d��n�WYdd}~XnX|j3||�S)aSends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        NzContent-Length)�connectrBzsInvalid timeout {0}. Pass a (connect, read) timeout tuple, or a single float to set both timeouts to the same valueF)
�methodrh�bodyrjZredirectZassert_same_host�preload_content�decode_contentZretriesr1�
proxy_pool)r1T)Zskip_accept_encoding�zutf-8s
s0

)�	buffering)Zpoolrqr}r~)r/)4rtrhrirxryr|rjrf�tuple�TimeoutSauce�
ValueErrorrdZurlopenr{r=�hasattrrZ	_get_conn�DEFAULT_POOL_TIMEOUTZ
putrequestrNZ	putheaderZ
endheadersr5�hex�len�encodeZgetresponse�	TypeErrorrZfrom_httplibr6r
�socket�errorrr
rmrrrrr �_ProxyErrorr�	_SSLErrorrr�
_HTTPErrorrrrs)r,r/r0r1r2r3r4rgrhZchunkedrzrB�e�errrrZlow_conn�headerrQ�i�rr&r&r'r5�s�


 


zHTTPAdapter.send)N)FNTNN)r7r8r9r:rL�DEFAULT_POOLSIZErD�DEFAULT_POOLBLOCKr+rMrRrFr^rirsrtr6rxryrZr5r;r&r&)r-r'r<Qs$%4%

r<)>r:Zos.pathr`r�Zpip._vendor.urllib3.poolmanagerrrZpip._vendor.urllib3.responserZpip._vendor.urllib3.utilrr�Zpip._vendor.urllib3.util.retryrZpip._vendor.urllib3.exceptionsrrr	r�r
rrr�r
rrr�rZmodelsr�compatrrZutilsrrrrrrZ
structuresrrpr�
exceptionsrrrr r!Zauthr"Z!pip._vendor.urllib3.contrib.socksr#�ImportErrorr�r�rDr��objectr)r<r&r&r&r'�<module>	sB $_vendor/requests/__pycache__/help.cpython-36.pyc000064400000005067151733136370015662 0ustar003

�PfS�@s�dZddlmZddlZddlZddlZddlZddlmZddlm	Z	ddlm
Z
ddlmZ
ydd	lmZWn ek
r�dZdZdZYnXddlZddlZd
d�Zdd
�Zdd�Zedkr�e�dS)z'Module containing bug report helper(s).�)�print_functionN)�idna)�urllib3)�chardet�)�__version__)�	pyopensslcCs�tj�}|dkrtj�}nj|dkr\dtjjtjjtjjf}tjjdkr�dj	|tjjg�}n(|dkrntj�}n|dkr�tj�}nd}||d	�S)
a�Return a dict with the Python implementation and version.

    Provide both the name and the version of the Python implementation
    currently running. For example, on CPython 2.7.5 it will return
    {'name': 'CPython', 'version': '2.7.5'}.

    This function works best on CPython and PyPy: in particular, it probably
    doesn't work for Jython or IronPython. Future investigation should be done
    to work out the correct shape of the code for those platforms.
    ZCPythonZPyPyz%s.%s.%s�final�ZJythonZ
IronPython�Unknown)�name�version)
�platformZpython_implementationZpython_version�sysZpypy_version_info�major�minor�micro�releaselevel�join)�implementationZimplementation_version�r�/usr/lib/python3.6/help.py�_implementations 


rc	Cs�ytj�tj�d�}Wntk
r4ddd�}YnXt�}dtji}dtji}ddd�}trrtjdtj	j
d�}dttdd�i}dtt
dd�i}ttd	d�}d|dk	r�d|ndi}|||tdk	|||||dtid
�
S)z&Generate information for a bug report.)�system�releaserr
Nr
)r
Zopenssl_versionz%xr�OPENSSL_VERSION_NUMBER)
rr�
system_sslZusing_pyopensslZ	pyOpenSSLrr�cryptographyrZrequests)rrr�IOErrorrrrr�OpenSSLZSSLr�getattrrr�sslr�requests_version)	Z
platform_infoZimplementation_infoZurllib3_infoZchardet_infoZpyopenssl_infoZcryptography_infoZ	idna_inforZsystem_ssl_inforrr�info;s8

r#cCsttjt�ddd��dS)z)Pretty-print the bug information as JSON.T�)Z	sort_keys�indentN)�print�json�dumpsr#rrrr�mainrsr)�__main__)�__doc__Z
__future__rr'rrr!Zpip._vendorrrrr
rr"Zpackages.urllib3.contribr�ImportErrorrrrr#r)�__name__rrrr�<module>s,
!7_vendor/requests/__pycache__/api.cpython-36.opt-1.pyc000064400000014374151733136370016443 0ustar003

�Pf]�@s\dZddlmZdd�Zddd�Zdd	�Zd
d�Zddd
�Zddd�Zddd�Z	dd�Z
dS)z�
requests.api
~~~~~~~~~~~~

This module implements the Requests API.

:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
�)�sessionscKs*tj��}|jf||d�|��SQRXdS)a�	Constructs and sends a :class:`Request <Request>`.

    :param method: method for the new :class:`Request` object.
    :param url: URL for the new :class:`Request` object.
    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
    :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
    :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
    :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
        ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
        or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
        defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
        to add for the file.
    :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
    :param timeout: (optional) How many seconds to wait for the server to send data
        before giving up, as a float, or a :ref:`(connect timeout, read
        timeout) <timeouts>` tuple.
    :type timeout: float or tuple
    :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
    :type allow_redirects: bool
    :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
    :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use. Defaults to ``True``.
    :param stream: (optional) if ``False``, the response content will be immediately downloaded.
    :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response

    Usage::

      >>> import requests
      >>> req = requests.request('GET', 'http://httpbin.org/get')
      <Response [200]>
    )�method�urlN)rZSession�request)rr�kwargsZsession�r�/usr/lib/python3.6/api.pyrs)
rNcKs"|jdd�td|fd|i|��S)aOSends a GET request.

    :param url: URL for the new :class:`Request` object.
    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �allow_redirectsT�get�params)�
setdefaultr)rrrrrrr
=s
r
cKs|jdd�td|f|�S)z�Sends an OPTIONS request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    r	T�options)rr)rrrrrr
Ks	r
cKs|jdd�td|f|�S)z�Sends a HEAD request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    r	F�head)rr)rrrrrrXs	rcKstd|f||d�|��S)a�Sends a POST request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �post)�data�json)r)rrrrrrrresrcKstd|fd|i|��S)a�Sends a PUT request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �putr)r)rrrrrrrssrcKstd|fd|i|��S)a�Sends a PATCH request.

    :param url: URL for the new :class:`Request` object.
    :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
    :param json: (optional) json data to send in the body of the :class:`Request`.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �patchr)r)rrrrrrr�srcKstd|f|�S)z�Sends a DELETE request.

    :param url: URL for the new :class:`Request` object.
    :param \*\*kwargs: Optional arguments that ``request`` takes.
    :return: :class:`Response <Response>` object
    :rtype: requests.Response
    �delete)r)rrrrrr�s	r)N)NN)N)N)�__doc__�rrr
r
rrrrrrrrr�<module>s-




_vendor/requests/__pycache__/help.cpython-36.opt-1.pyc000064400000005067151733136370016621 0ustar003

�PfS�@s�dZddlmZddlZddlZddlZddlZddlmZddlm	Z	ddlm
Z
ddlmZ
ydd	lmZWn ek
r�dZdZdZYnXddlZddlZd
d�Zdd
�Zdd�Zedkr�e�dS)z'Module containing bug report helper(s).�)�print_functionN)�idna)�urllib3)�chardet�)�__version__)�	pyopensslcCs�tj�}|dkrtj�}nj|dkr\dtjjtjjtjjf}tjjdkr�dj	|tjjg�}n(|dkrntj�}n|dkr�tj�}nd}||d	�S)
a�Return a dict with the Python implementation and version.

    Provide both the name and the version of the Python implementation
    currently running. For example, on CPython 2.7.5 it will return
    {'name': 'CPython', 'version': '2.7.5'}.

    This function works best on CPython and PyPy: in particular, it probably
    doesn't work for Jython or IronPython. Future investigation should be done
    to work out the correct shape of the code for those platforms.
    ZCPythonZPyPyz%s.%s.%s�final�ZJythonZ
IronPython�Unknown)�name�version)
�platformZpython_implementationZpython_version�sysZpypy_version_info�major�minor�micro�releaselevel�join)�implementationZimplementation_version�r�/usr/lib/python3.6/help.py�_implementations 


rc	Cs�ytj�tj�d�}Wntk
r4ddd�}YnXt�}dtji}dtji}ddd�}trrtjdtj	j
d�}dttdd�i}dtt
dd�i}ttd	d�}d|dk	r�d|ndi}|||tdk	|||||dtid
�
S)z&Generate information for a bug report.)�system�releaserr
Nr
)r
Zopenssl_versionz%xr�OPENSSL_VERSION_NUMBER)
rr�
system_sslZusing_pyopensslZ	pyOpenSSLrr�cryptographyrZrequests)rrr�IOErrorrrrr�OpenSSLZSSLr�getattrrr�sslr�requests_version)	Z
platform_infoZimplementation_infoZurllib3_infoZchardet_infoZpyopenssl_infoZcryptography_infoZ	idna_inforZsystem_ssl_inforrr�info;s8

r#cCsttjt�ddd��dS)z)Pretty-print the bug information as JSON.T�)Z	sort_keys�indentN)�print�json�dumpsr#rrrr�mainrsr)�__main__)�__doc__Z
__future__rr'rrr!Zpip._vendorrrrr
rr"Zpackages.urllib3.contribr�ImportErrorrrrr#r)�__name__rrrr�<module>s,
!7_vendor/requests/__pycache__/cookies.cpython-36.opt-1.pyc000064400000044057151733136370017327 0ustar003

�Pf G�
@sdZddlZddlZddlZddlZddlmZddlmZm	Z	m
Z
mZyddlZWne
k
rpddlZYnXGdd�de�ZGdd	�d	e�Zd
d�Zdd
�Zddd�ZGdd�de�ZGdd�dejej�Zdd�Zdd�Zdd�Zd dd�Zdd�ZdS)!z�
requests.cookies
~~~~~~~~~~~~~~~~

Compatibility code to be able to use `cookielib.CookieJar` with requests.

requests.utils imports from here, so be careful with imports.
�N�)�to_native_string)�	cookielib�urlparse�
urlunparse�Morselc@s�eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
ddd�Zdd�Zdd�Z
dd�Zedd��Zedd��Zedd��ZdS) �MockRequesta�Wraps a `requests.Request` to mimic a `urllib2.Request`.

    The code in `cookielib.CookieJar` expects this interface in order to correctly
    manage cookie policies, i.e., determine whether a cookie can be set, given the
    domains of the request and the cookie.

    The original request object is read-only. The client is responsible for collecting
    the new headers via `get_new_headers()` and interpreting them appropriately. You
    probably want `get_cookie_header`, defined below.
    cCs ||_i|_t|jj�j|_dS)N)�_r�_new_headersr�url�scheme�type)�self�request�r�/usr/lib/python3.6/cookies.py�__init__&szMockRequest.__init__cCs|jS)N)r
)rrrr�get_type+szMockRequest.get_typecCst|jj�jS)N)rr	rZnetloc)rrrr�get_host.szMockRequest.get_hostcCs|j�S)N)r)rrrr�get_origin_req_host1szMockRequest.get_origin_req_hostcCsT|jjjd�s|jjSt|jjddd�}t|jj�}t|j||j|j	|j
|jg�S)NZHostzutf-8)�encoding)r	�headers�getrrrrr�pathZparamsZqueryZfragment)r�hostZparsedrrr�get_full_url4szMockRequest.get_full_urlcCsdS)NTr)rrrr�is_unverifiableBszMockRequest.is_unverifiablecCs||jjkp||jkS)N)r	rr
)r�namerrr�
has_headerEszMockRequest.has_headerNcCs|jjj||jj||��S)N)r	rrr
)rr�defaultrrr�
get_headerHszMockRequest.get_headercCstd��dS)zMcookielib has no legitimate use for this method; add it back if you find one.z=Cookie headers should be added with add_unredirected_header()N)�NotImplementedError)r�key�valrrr�
add_headerKszMockRequest.add_headercCs||j|<dS)N)r
)rr�valuerrr�add_unredirected_headerOsz#MockRequest.add_unredirected_headercCs|jS)N)r
)rrrr�get_new_headersRszMockRequest.get_new_headerscCs|j�S)N)r)rrrr�unverifiableUszMockRequest.unverifiablecCs|j�S)N)r)rrrr�origin_req_hostYszMockRequest.origin_req_hostcCs|j�S)N)r)rrrrr]szMockRequest.host)N)�__name__�
__module__�__qualname__�__doc__rrrrrrrr r$r&r'�propertyr(r)rrrrrrs

rc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�MockResponsez�Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.

    ...what? Basically, expose the parsed HTTP headers from the server response
    the way `cookielib` expects to see them.
    cCs
||_dS)z�Make a MockResponse for `cookielib` to read.

        :param headers: a httplib.HTTPMessage or analogous carrying the headers
        N)�_headers)rrrrrriszMockResponse.__init__cCs|jS)N)r0)rrrr�infopszMockResponse.infocCs|jj|�dS)N)r0�
getheaders)rrrrrr2sszMockResponse.getheadersN)r*r+r,r-rr1r2rrrrr/bsr/cCs8t|d�o|jsdSt|�}t|jj�}|j||�dS)z�Extract the cookies from the response into a CookieJar.

    :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
    :param request: our own requests.Request object
    :param response: urllib3.HTTPResponse object
    �_original_responseN)�hasattrr3rr/�msgZextract_cookies)�jarrZresponseZreq�resrrr�extract_cookies_to_jarws
r8cCs t|�}|j|�|j�jd�S)zj
    Produce an appropriate Cookie header string to be sent with `request`, or None.

    :rtype: str
    �Cookie)rZadd_cookie_headerr'r)r6r�rrrr�get_cookie_header�s
r;cCs�g}xV|D]N}|j|krq
|dk	r.||jkr.q
|dk	rB||jkrBq
|j|j|j|jf�q
Wx |D]\}}}|j|||�qbWdS)zkUnsets a cookie by name, by default over all domains and paths.

    Wraps CookieJar.clear(), is O(n).
    N)r�domainr�append�clear)�	cookiejarrr<rZ
clearables�cookierrr�remove_cookie_by_name�s

rAc@seZdZdZdS)�CookieConflictErrorz�There are two cookies that meet the criteria specified in the cookie jar.
    Use .get and .set and include domain and path args in order to be more specific.
    N)r*r+r,r-rrrrrB�srBcs�eZdZdZd1dd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zd2dd�Z�fdd�Zdd�Zdd �Zd!d"�Z�fd#d$�Z�fd%d&�Zd3d'd(�Zd4d)d*�Zd+d,�Zd-d.�Zd/d0�Z�ZS)5�RequestsCookieJara�Compatibility class; is a cookielib.CookieJar, but exposes a dict
    interface.

    This is the CookieJar we create by default for requests and sessions that
    don't specify one, since some clients may expect response.cookies and
    session.cookies to support dict operations.

    Requests does not use the dict interface internally; it's just for
    compatibility with external client code. All requests code should work
    out of the box with externally provided instances of ``CookieJar``, e.g.
    ``LWPCookieJar`` and ``FileCookieJar``.

    Unlike a regular CookieJar, this class is pickleable.

    .. warning:: dictionary operations that are normally O(1) may be O(n).
    NcCs(y|j|||�Stk
r"|SXdS)z�Dict-like get() that also supports optional domain and path args in
        order to resolve naming collisions from using one cookie jar over
        multiple domains.

        .. warning:: operation is O(n), not O(1).
        N)�_find_no_duplicates�KeyError)rrrr<rrrrr�szRequestsCookieJar.getcKsX|dkr(t|||jd�|jd�d�dSt|t�r<t|�}nt||f|�}|j|�|S)z�Dict-like set() that also supports optional domain and path args in
        order to resolve naming collisions from using one cookie jar over
        multiple domains.
        Nr<r)r<r)rAr�
isinstancer�morsel_to_cookie�
create_cookie�
set_cookie)rrr%�kwargs�crrr�set�s


zRequestsCookieJar.setccsxt|�D]}|jVq
WdS)z�Dict-like iterkeys() that returns an iterator of names of cookies
        from the jar.

        .. seealso:: itervalues() and iteritems().
        N)�iterr)rr@rrr�iterkeys�szRequestsCookieJar.iterkeyscCst|j��S)z�Dict-like keys() that returns a list of names of cookies from the
        jar.

        .. seealso:: values() and items().
        )�listrN)rrrr�keys�szRequestsCookieJar.keysccsxt|�D]}|jVq
WdS)z�Dict-like itervalues() that returns an iterator of values of cookies
        from the jar.

        .. seealso:: iterkeys() and iteritems().
        N)rMr%)rr@rrr�
itervalues�szRequestsCookieJar.itervaluescCst|j��S)z�Dict-like values() that returns a list of values of cookies from the
        jar.

        .. seealso:: keys() and items().
        )rOrQ)rrrr�values�szRequestsCookieJar.valuesccs$xt|�D]}|j|jfVq
WdS)z�Dict-like iteritems() that returns an iterator of name-value tuples
        from the jar.

        .. seealso:: iterkeys() and itervalues().
        N)rMrr%)rr@rrr�	iteritems�szRequestsCookieJar.iteritemscCst|j��S)z�Dict-like items() that returns a list of name-value tuples from the
        jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
        vanilla python dict of key value pairs.

        .. seealso:: keys() and values().
        )rOrS)rrrr�itemsszRequestsCookieJar.itemscCs0g}x&t|�D]}|j|kr|j|j�qW|S)z2Utility method to list all the domains in the jar.)rMr<r=)r�domainsr@rrr�list_domainss

zRequestsCookieJar.list_domainscCs0g}x&t|�D]}|j|kr|j|j�qW|S)z0Utility method to list all the paths in the jar.)rMrr=)r�pathsr@rrr�
list_pathss

zRequestsCookieJar.list_pathscCs>g}x4t|�D](}|jdk	r*|j|kr*dS|j|j�qWdS)zvReturns True if there are multiple domains in the jar.
        Returns False otherwise.

        :rtype: bool
        NTF)rMr<r=)rrUr@rrr�multiple_domainssz"RequestsCookieJar.multiple_domainscCsJi}x@t|�D]4}|dks$|j|kr|dks6|j|kr|j||j<qW|S)z�Takes as an argument an optional domain and path and returns a plain
        old Python dict of name-value pairs of cookies that meet the
        requirements.

        :rtype: dict
        N)rMr<rr%r)rr<rZ
dictionaryr@rrr�get_dict,szRequestsCookieJar.get_dictcs*ytt|�j|�Stk
r$dSXdS)NT)�superrC�__contains__rB)rr)�	__class__rrr\<szRequestsCookieJar.__contains__cCs
|j|�S)z�Dict-like __getitem__() for compatibility with client code. Throws
        exception if there are more than one cookie with name. In that case,
        use the more explicit get() method instead.

        .. warning:: operation is O(n), not O(1).
        )rD)rrrrr�__getitem__BszRequestsCookieJar.__getitem__cCs|j||�dS)z�Dict-like __setitem__ for compatibility with client code. Throws
        exception if there is already a cookie of that name in the jar. In that
        case, use the more explicit set() method instead.
        N)rL)rrr%rrr�__setitem__KszRequestsCookieJar.__setitem__cCst||�dS)zlDeletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
        ``remove_cookie_by_name()``.
        N)rA)rrrrr�__delitem__RszRequestsCookieJar.__delitem__csLt|jd�r4|jjd�r4|jjd�r4|jjdd�|_tt|�j|f|�|�S)N�
startswith�"z\"�)r4r%ra�endswith�replacer[rCrI)rr@�argsrJ)r]rrrIXs$zRequestsCookieJar.set_cookiecs@t|tj�r,x.|D]}|jtj|��qWntt|�j|�dS)zAUpdates this jar with cookies from another CookieJar or dict-likeN)rFr�	CookieJarrI�copyr[rC�update)r�otherr@)r]rrri]s
zRequestsCookieJar.updatecCs\xDt|�D]8}|j|kr
|dks*|j|kr
|dks<|j|kr
|jSq
Wtd|||f��dS)a�Requests uses this method internally to get cookie values.

        If there are conflicting cookies, _find arbitrarily chooses one.
        See _find_no_duplicates if you want an exception thrown if there are
        conflicting cookies.

        :param name: a string containing name of cookie
        :param domain: (optional) string containing domain of cookie
        :param path: (optional) string containing path of cookie
        :return: cookie.value
        Nzname=%r, domain=%r, path=%r)rMrr<rr%rE)rrr<rr@rrr�_findes

zRequestsCookieJar._findcCs|d}xXt|�D]L}|j|kr|dks.|j|kr|dks@|j|kr|dk	rTtd|��|j}qW|rf|Std|||f��dS)a�Both ``__get_item__`` and ``get`` call this function: it's never
        used elsewhere in Requests.

        :param name: a string containing name of cookie
        :param domain: (optional) string containing domain of cookie
        :param path: (optional) string containing path of cookie
        :raises KeyError: if cookie is not found
        :raises CookieConflictError: if there are multiple cookies
            that match name and optionally domain and path
        :return: cookie.value
        Nz(There are multiple cookies with name, %rzname=%r, domain=%r, path=%r)rMrr<rrBr%rE)rrr<rZtoReturnr@rrrrDys

z%RequestsCookieJar._find_no_duplicatescCs|jj�}|jd�|S)z4Unlike a normal CookieJar, this class is pickleable.�
_cookies_lock)�__dict__rh�pop)r�staterrr�__getstate__�s

zRequestsCookieJar.__getstate__cCs$|jj|�d|jkr tj�|_dS)z4Unlike a normal CookieJar, this class is pickleable.rlN)rmri�	threading�RLockrl)rrorrr�__setstate__�s
zRequestsCookieJar.__setstate__cCst�}|j|�|S)z(Return a copy of this RequestsCookieJar.)rCri)rZnew_cjrrrrh�s
zRequestsCookieJar.copy)NNN)NN)NN)NN)r*r+r,r-rrLrNrPrQrRrSrTrVrXrYrZr\r^r_r`rIrirkrDrprsrh�
__classcell__rr)r]rrC�s0
				
	

rCcCsR|dkrdSt|d�r|j�Stj|�}|j�x|D]}|jtj|��q6W|S)Nrh)r4rhr>rI)r6Znew_jarr@rrr�_copy_cookie_jar�s


rucKs�td||ddddddddddidd�
}t|�t|�}|rNd	}t|t|���|j|�t|d
�|d<t|d�|d
<|djd�|d<t|d�|d<tjf|�S)z�Make a cookie from underspecified parameters.

    By default, the pair of `name` and `value` will be set for the domain ''
    and sent on every request (this is sometimes called a "supercookie").
    rNrc�/FT�HttpOnly)
�versionrr%�portr<r�secure�expires�discard�comment�comment_url�rest�rfc2109z4create_cookie() got unexpected keyword arguments: %sryZport_specifiedr<Zdomain_specified�.Zdomain_initial_dotrZpath_specified)	�dictrL�	TypeErrorrOri�boolrarr9)rr%rJ�resultZbadargs�errrrrrH�s0
rHcCs�d}|drPyttj�t|d��}Wqrtk
rLtd|d��YqrXn"|drrd}tjtj|d|��}t|dt|d�d|d||j	|d	dd
|didt|d�|j
|d
p�dd�
S)zBConvert a Morsel object into a Cookie containing the one k/v pair.Nzmax-agezmax-age: %s must be integerr{z%a, %d-%b-%Y %H:%M:%S GMTr}Fr<rrwZhttponlyrzrxr)
r}r~r|r<r{rrryrr�rzr%rx)�int�time�
ValueErrorr��calendarZtimegmZstrptimerHr�r"r%)Zmorselr{Z
time_templaterrrrG�s0


rGTcCsV|dkrt�}|dk	rRdd�|D�}x,|D]$}|s:||kr*|jt|||��q*W|S)a-Returns a CookieJar from a key/value dictionary.

    :param cookie_dict: Dict of key/values to insert into CookieJar.
    :param cookiejar: (optional) A cookiejar to add the cookies to.
    :param overwrite: (optional) If False, will not replace cookies
        already in the jar with new ones.
    NcSsg|]
}|j�qSr)r)�.0r@rrr�
<listcomp>sz'cookiejar_from_dict.<locals>.<listcomp>)rCrIrH)Zcookie_dictr?�	overwriteZnames_from_jarrrrr�cookiejar_from_dict�s
r�cCszt|tj�std��t|t�r.t||dd�}nHt|tj�rvy|j|�Wn,tk
rtx|D]}|j|�q^WYnX|S)z�Add cookies to cookiejar and returns a merged CookieJar.

    :param cookiejar: CookieJar object to add the cookies to.
    :param cookies: Dictionary or CookieJar object to be added.
    z!You can only merge into CookieJarF)r?r�)	rFrrgr�r�r�ri�AttributeErrorrI)r?ZcookiesZ
cookie_in_jarrrr�
merge_cookiess

r�)NN)NT)r-rhr�r��collectionsZ_internal_utilsr�compatrrrrrq�ImportErrorZdummy_threading�objectrr/r8r;rA�RuntimeErrorrBrg�MutableMappingrCrurHrGr�r�rrrr�<module>
s.H
{#
_vendor/requests/__pycache__/hooks.cpython-36.opt-1.pyc000064400000001626151733136370017011 0ustar003

�Pf��@sdZdgZdd�Zdd�ZdS)z�
requests.hooks
~~~~~~~~~~~~~~

This module provides the capabilities for the Requests hooks system.

Available hooks:

``response``:
    The response generated from a Request.
ZresponsecCstdd�tD��S)Ncss|]}|gfVqdS)N�)�.0Zeventrr�/usr/lib/python3.6/hooks.py�	<genexpr>sz default_hooks.<locals>.<genexpr>)�dict�HOOKSrrrr�
default_hookssrcKsR|pt�}|j|�}|rNt|d�r(|g}x$|D]}||f|�}|dk	r.|}q.W|S)z6Dispatches a hook dictionary on a given piece of data.�__call__N)r�get�hasattr)�keyZhooksZ	hook_data�kwargs�hookZ
_hook_datarrr�
dispatch_hooks



rN)�__doc__rrrrrrr�<module>
s_vendor/requests/__pycache__/models.cpython-36.opt-1.pyc000064400000056505151733136370017157 0ustar003

�Pf��@s�dZddlZddlZddlZddlZddlmZddlm	Z	ddl
mZddlm
Z
mZmZmZddlmZdd	lmZdd
lmZddlmZddlmZmZmZdd
lmZmZm Z m!Z!m"Z"m#Z#m$Z$ddl%m&Z&m'Z'ddl(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2ddl3m4Z4m5Z5m6Z6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=ddl3m>Z?ddl@mAZAeAjBeAjCeAjDeAjEeAjFfZGdZHd!ZIdZJGdd�deK�ZLGdd�deK�ZMGdd�deM�ZNGdd�deLeM�ZOGdd �d eK�ZPdS)"z`
requests.models
~~~~~~~~~~~~~~~

This module contains the primary objects that power Requests.
�N)�RequestField)�encode_multipart_formdata)�	parse_url)�DecodeError�ReadTimeoutError�
ProtocolError�LocationParseError)�UnsupportedOperation�)�
default_hooks)�CaseInsensitiveDict)�
HTTPBasicAuth)�cookiejar_from_dict�get_cookie_header�_copy_cookie_jar)�	HTTPError�
MissingSchema�
InvalidURL�ChunkedEncodingError�ContentDecodingError�ConnectionError�StreamConsumedError)�to_native_string�unicode_is_ascii)
�guess_filename�get_auth_from_url�requote_uri�stream_decode_response_unicode�to_key_val_list�parse_header_links�iter_slices�guess_json_utf�	super_len�check_header_validity)
�	cookielib�
urlunparse�urlsplit�	urlencode�str�bytes�is_py2�chardet�builtin_str�
basestring)�json)�codes��
iic@s0eZdZedd��Zedd��Zedd��ZdS)�RequestEncodingMixincCsNg}t|j�}|j}|sd}|j|�|j}|rD|jd�|j|�dj|�S)zBuild the path URL to use.�/�?�)r&�url�path�append�query�join)�selfr6�pr7r9�r=�/usr/lib/python3.6/models.py�path_url=s



zRequestEncodingMixin.path_urlcCs�t|ttf�r|St|d�r |St|d�r�g}x|t|�D]p\}}t|t�sVt|d�r\|g}xJ|D]B}|dk	rb|jt|t�r�|jd�n|t|t�r�|jd�n|f�qbWq8Wt|dd�S|SdS)z�Encode parameters in a piece of data.

        Will successfully encode parameters when passed as a dict or a list of
        2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
        if parameters are supplied as a dict.
        �read�__iter__Nzutf-8T)Zdoseq)	�
isinstancer(r)�hasattrrr-r8�encoder')�data�result�kZvs�vr=r=r>�_encode_paramsRs 	


$z#RequestEncodingMixin._encode_paramscCs�|std��nt|t�r td��g}t|p,i�}t|p8i�}x�|D]�\}}t|t�s`t|d�rf|g}x\|D]T}|dk	rlt|t�s�t|�}|jt|t�r�|jd�n|t|t�r�|j	d�n|f�qlWqBWx�|D]�\}}d}d}	t|t
tf��r.t|�dk�r|\}
}n&t|�dk�r |\}
}}n|\}
}}}	nt
|��p:|}
|}t|tttf��rX|}n|j�}t|||
|	d�}
|
j|d	�|j|
�q�Wt|�\}}||fS)
a�Build the body for a multipart/form-data request.

        Will successfully encode files when passed as a dict or a list of
        tuples. Order is retained if data is a list of tuples but arbitrary
        if parameters are supplied as a dict.
        The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
        or 4-tuples (filename, fileobj, contentype, custom_headers).
        zFiles must be provided.zData must not be a string.rANzutf-8��)�namerE�filename�headers)�content_type)�
ValueErrorrBr-rrCr)r(r8�decoderD�tuple�list�lenr�	bytearrayr@rZmake_multipartr)�filesrEZ
new_fieldsZfieldsZfield�valrHrGZftZfh�fn�fpZfdataZrf�bodyrOr=r=r>�
_encode_filesmsH




$
z"RequestEncodingMixin._encode_filesN)�__name__�
__module__�__qualname__�propertyr?�staticmethodrIr[r=r=r=r>r2<sr2c@seZdZdd�Zdd�ZdS)�RequestHooksMixincCs\||jkrtd|��t|tj�r4|j|j|�n$t|d�rX|j|jdd�|D��dS)zProperly register a hook.z1Unsupported event specified, with event name "%s"rAcss|]}t|tj�r|VqdS)N)rB�collections�Callable)�.0�hr=r=r>�	<genexpr>�sz2RequestHooksMixin.register_hook.<locals>.<genexpr>N)�hooksrPrBrbrcr8rC�extend)r;�event�hookr=r=r>�
register_hook�s

zRequestHooksMixin.register_hookcCs.y|j|j|�dStk
r(dSXdS)ziDeregister a previously registered hook.
        Returns True if the hook existed, False if not.
        TFN)rg�removerP)r;rirjr=r=r>�deregister_hook�s
z!RequestHooksMixin.deregister_hookN)r\r]r^rkrmr=r=r=r>ra�srac
@s*eZdZdZd	dd�Zdd�Zdd�ZdS)
�Requesta�A user-created :class:`Request <Request>` object.

    Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.

    :param method: HTTP method to use.
    :param url: URL to send.
    :param headers: dictionary of headers to send.
    :param files: dictionary of {filename: fileobject} files to multipart upload.
    :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
    :param json: json for the body to attach to the request (if files or data is not specified).
    :param params: dictionary of URL parameters to append to the URL.
    :param auth: Auth handler or (user, pass) tuple.
    :param cookies: dictionary or CookieJar of cookies to attach to this request.
    :param hooks: dictionary of callback hooks, for internal usage.

    Usage::

      >>> import requests
      >>> req = requests.Request('GET', 'http://httpbin.org/get')
      >>> req.prepare()
      <PreparedRequest [GET]>
    Nc
Cs�|dkrgn|}|dkrgn|}|dkr,in|}|dkr<in|}|	dkrLin|	}	t�|_x&t|	j��D]\}}|j||d�qfW||_||_||_||_||_	|
|_
||_||_||_
dS)N)rirj)rrgrS�itemsrk�methodr6rNrVrEr.�params�auth�cookies)
r;rpr6rNrVrErqrrrsrgr.rGrHr=r=r>�__init__�s"zRequest.__init__cCs
d|jS)Nz<Request [%s]>)rp)r;r=r=r>�__repr__�szRequest.__repr__cCs<t�}|j|j|j|j|j|j|j|j|j	|j
|jd�
|S)zXConstructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.)
rpr6rNrVrEr.rqrrrsrg)�PreparedRequest�preparerpr6rNrVrEr.rqrrrsrg)r;r<r=r=r>rw�s
zRequest.prepare)
NNNNNNNNNN)r\r]r^�__doc__rtrurwr=r=r=r>rn�s

rnc
@s�eZdZdZdd�Zddd�Zdd�Zd	d
�Zdd�Ze	d
d��Z
dd�Zdd�Zddd�Z
dd�Zd dd�Zdd�Zdd�ZdS)!rva�The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
    containing the exact bytes that will be sent to the server.

    Generated from either a :class:`Request <Request>` object or manually.

    Usage::

      >>> import requests
      >>> req = requests.Request('GET', 'http://httpbin.org/get')
      >>> r = req.prepare()
      <PreparedRequest [GET]>

      >>> s = requests.Session()
      >>> s.send(r)
      <Response [200]>
    cCs0d|_d|_d|_d|_d|_t�|_d|_dS)N)rpr6rN�_cookiesrZrrg�_body_position)r;r=r=r>rtszPreparedRequest.__init__NcCsR|j|�|j||�|j|�|j|�|j|||
�|j||�|j|	�dS)z6Prepares the entire request with the given parameters.N)�prepare_method�prepare_url�prepare_headers�prepare_cookies�prepare_body�prepare_auth�
prepare_hooks)r;rpr6rNrVrErqrrrsrgr.r=r=r>rw+s


zPreparedRequest.preparecCs
d|jS)Nz<PreparedRequest [%s]>)rp)r;r=r=r>ru=szPreparedRequest.__repr__cCsXt�}|j|_|j|_|jdk	r*|jj�nd|_t|j�|_|j|_|j|_|j	|_	|S)N)
rvrpr6rN�copyrryrZrgrz)r;r<r=r=r>r�@szPreparedRequest.copycCs$||_|jdk	r t|jj��|_dS)zPrepares the given HTTP method.N)rpr�upper)r;rpr=r=r>r{Ks
zPreparedRequest.prepare_methodcCs@ddl}y|j|dd�jd�}Wn|jk
r:t�YnX|S)NrT)Zuts46zutf-8)�idnarDrQZ	IDNAError�UnicodeError)�hostr�r=r=r>�_get_idna_encoded_hostQs
z&PreparedRequest._get_idna_encoded_hostcCs0t|t�r|jd�}ntr"t|�nt|�}|j�}d|krT|j�jd�rT||_	dSyt
|�\}}}}}}}	Wn,tk
r�}
zt|
j
��WYdd}
~
XnX|s�d}|jt|d��}t|��|s�td|��t|��sy|j|�}Wntk
�rtd��YnXn|jd��rtd��|�p"d	}|�r2|d
7}||7}|�rP|dt|�7}|�sZd}t�r�t|t��rv|jd�}t|t��r�|jd�}t|t��r�|jd�}t|t��r�|jd�}t|	t��r�|	jd�}	t|ttf��r�t|�}|j|�}
|
�r|�r
d
||
f}n|
}tt|||d||	g��}||_	dS)zPrepares the given HTTP URL.�utf8�:ZhttpNzDInvalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?z Invalid URL %r: No host suppliedzURL has an invalid label.�*r5�@r3zutf-8z%s&%s)rBr)rQr*Zunicoder(�lstrip�lower�
startswithr6rrr�args�formatrrrr�r�rDrIrr%)r;r6rq�schemerrr�Zportr7r9Zfragment�e�errorZnetlocZ
enc_paramsr=r=r>r|[sh








zPreparedRequest.prepare_urlcCs@t�|_|r<x.|j�D]"}t|�|\}}||jt|�<qWdS)z Prepares the given HTTP headers.N)rrNror#r)r;rN�headerrL�valuer=r=r>r}�szPreparedRequest.prepare_headerscCsvd}d}|r8|dk	r8d}tj|�}t|t�s8|jd�}tt|d�t|ttt	t
jf�g�}yt|�}Wnt
ttfk
r�d}YnX|r�|}t|dd�dk	r�y|j�|_Wn ttfk
r�t�|_YnX|r�td��|r�t|�|jd<n
d|jd	<np|�r|j||�\}}n2|�rF|j|�}t|t��s<t|d
��rBd}nd}|j|�|�rld|jk�rl||jd
<||_dS)z"Prepares the given HTTP body data.Nzapplication/jsonzutf-8rA�tellz1Streamed bodies and files are mutually exclusive.zContent-LengthZchunkedzTransfer-Encodingr@z!application/x-www-form-urlencodedzcontent-typezContent-Type)�complexjson�dumpsrBr)rD�allrCr-rSrRrb�Mappingr"�	TypeError�AttributeErrorr	�getattrr�rz�IOError�OSError�object�NotImplementedErrorr,rNr[rI�prepare_content_lengthrZ)r;rErVr.rZrOZ	is_stream�lengthr=r=r>r�sJ






zPreparedRequest.prepare_bodycCsL|dk	r$t|�}|rHt|�|jd<n$|jdkrH|jjd�dkrHd|jd<dS)z>Prepare Content-Length header based on request method and bodyNzContent-Length�GET�HEAD�0)r�r�)r"r,rNrp�get)r;rZr�r=r=r>r�sz&PreparedRequest.prepare_content_lengthr5cCsj|dkr"t|j�}t|�r|nd}|rft|t�rDt|�dkrDt|�}||�}|jj|j�|j	|j
�dS)z"Prepares the given HTTP auth data.NrJ)rr6�anyrBrRrTr
�__dict__�updater�rZ)r;rrr6Zurl_auth�rr=r=r>r�s
zPreparedRequest.prepare_authcCs@t|tj�r||_n
t|�|_t|j|�}|dk	r<||jd<dS)aPrepares the given HTTP cookie data.

        This function eventually generates a ``Cookie`` header from the
        given cookies using cookielib. Due to cookielib's design, the header
        will not be regenerated if it already exists, meaning this function
        can only be called once for the life of the
        :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
        to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
        header is removed beforehand.
        NZCookie)rBr$Z	CookieJarryrrrN)r;rsZ
cookie_headerr=r=r>r~$s
zPreparedRequest.prepare_cookiescCs*|pg}x|D]}|j|||�qWdS)zPrepares the given hooks.N)rk)r;rgrir=r=r>r�8s
zPreparedRequest.prepare_hooks)
NNNNNNNNNN)N)r5)r\r]r^rxrtrwrur�r{r`r�r|r}rr�r�r~r�r=r=r=r>rvs

V
E
rvc
@seZdZdZdddddddd	d
dg
Zdd
�Zdd�Zdd�Zdd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
edd��Zed d!��Zed"d#��Zed$d%��Zed&d'��Zd;d*d+�Zed,d,fd-d.�Zed/d0��Zed1d2��Zd3d4�Zed5d6��Zd7d8�Zd9d:�Zd,S)<�ResponsezhThe :class:`Response <Response>` object, which contains a
    server's response to an HTTP request.
    �_content�status_coderNr6�history�encoding�reasonrs�elapsed�requestcCs^d|_d|_d|_d|_t�|_d|_d|_d|_g|_	d|_
ti�|_t
jd�|_d|_dS)NFr)r��_content_consumed�_nextr�rrN�rawr6r�r�r�rrs�datetimeZ	timedeltar�r�)r;r=r=r>rtLs
zResponse.__init__cCs|S)Nr=)r;r=r=r>�	__enter__{szResponse.__enter__cGs|j�dS)N)�close)r;r�r=r=r>�__exit__~szResponse.__exit__cs$�js�jt�fdd��jD��S)Nc3s|]}|t�|d�fVqdS)N)r�)rd�attr)r;r=r>rf�sz(Response.__getstate__.<locals>.<genexpr>)r��content�dict�	__attrs__)r;r=)r;r>�__getstate__�s

zResponse.__getstate__cCs>x |j�D]\}}t|||�q
Wt|dd�t|dd�dS)Nr�Tr�)ro�setattr)r;�staterLr�r=r=r>�__setstate__�szResponse.__setstate__cCs
d|jS)Nz<Response [%s]>)r�)r;r=r=r>ru�szResponse.__repr__cCs|jS)akReturns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        )�ok)r;r=r=r>�__bool__�szResponse.__bool__cCs|jS)akReturns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        )r�)r;r=r=r>�__nonzero__�szResponse.__nonzero__cCs
|jd�S)z,Allows you to use a response as an iterator.�)�iter_content)r;r=r=r>rA�szResponse.__iter__cCs&y|j�Wntk
r dSXdS)akReturns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        FT)�raise_for_statusr)r;r=r=r>r��s
	zResponse.okcCsd|jko|jtkS)z�True if this Response is a well-formed HTTP redirect that could have
        been processed automatically (by :meth:`Session.resolve_redirects`).
        �location)rNr��REDIRECT_STATI)r;r=r=r>�is_redirect�szResponse.is_redirectcCsd|jko|jtjtjfkS)z@True if this Response one of the permanent versions of redirect.r�)rNr�r/Zmoved_permanently�permanent_redirect)r;r=r=r>�is_permanent_redirect�szResponse.is_permanent_redirectcCs|jS)zTReturns a PreparedRequest for the next request in a redirect chain, if there is one.)r�)r;r=r=r>�next�sz
Response.nextcCstj|j�dS)z7The apparent encoding, provided by the chardet library.r�)r+Zdetectr�)r;r=r=r>�apparent_encoding�szResponse.apparent_encodingr
Fcs~��fdd�}�jr(t�jt�r(t��n$�dk	rLt�t�rLtdt����t�j��}|�}�jrh|n|}|rzt	|��}|S)a�Iterates over the response data.  When stream=True is set on the
        request, this avoids reading the content at once into memory for
        large responses.  The chunk size is the number of bytes it should
        read into memory.  This is not necessarily the length of each item
        returned as decoding can take place.

        chunk_size must be of type int or None. A value of None will
        function differently depending on the value of `stream`.
        stream=True will read data as it arrives in whatever size the
        chunks are received. If stream=False, data is returned as
        a single chunk.

        If decode_unicode is True, content will be decoded using the best
        available encoding based on the response.
        c3s�t�jd�r�y$x�jj�dd�D]
}|Vq WWq�tk
rZ}zt|��WYdd}~Xq�tk
r�}zt|��WYdd}~Xq�tk
r�}zt|��WYdd}~Xq�Xnx�jj	��}|s�P|Vq�Wd�_
dS)N�streamT)Zdecode_content)rCr�r�rrrrrrr@r�)�chunkr�)�
chunk_sizer;r=r>�generate�s 
z'Response.iter_content.<locals>.generateNz.chunk_size must be an int, it is instead a %s.)
r�rBr��boolr�intr��typer r)r;r��decode_unicoder�Z
reused_chunksZ
stream_chunksZchunksr=)r�r;r>r��s
zResponse.iter_contentNccs�d}x�|j||d�D]r}|dk	r(||}|r8|j|�}n|j�}|rn|drn|rn|dd|dkrn|j�}nd}x|D]
}|VqxWqW|dk	r�|VdS)z�Iterates over the response data, one line at a time.  When
        stream=True is set on the request, this avoids reading the
        content at once into memory for large responses.

        .. note:: This method is not reentrant safe.
        N)r�r�r
���r�r�r�)r��split�
splitlines�pop)r;r�r�Z	delimiter�pendingr��lines�liner=r=r>�
iter_liness$

zResponse.iter_linescCsZ|jdkrN|jrtd��|jdks,|jdkr4d|_nt�j|jt��pJt�|_d|_|jS)z"Content of the response, in bytes.Fz2The content for this response was already consumedrNT)	r�r��RuntimeErrorr�r�r)r:r��CONTENT_CHUNK_SIZE)r;r=r=r>r�*s
zResponse.contentcCshd}|j}|jstd�S|jdkr(|j}yt|j|dd�}Wn&ttfk
rbt|jdd�}YnX|S)a�Content of the response, in unicode.

        If Response.encoding is None, encoding will be guessed using
        ``chardet``.

        The encoding of the response content is determined based solely on HTTP
        headers, following RFC 2616 to the letter. If you can take advantage of
        non-HTTP knowledge to make a better guess at the encoding, you should
        set ``r.encoding`` appropriately before accessing this property.
        Nr5�replace)�errors)r�r�r(r��LookupErrorr�)r;r�r�r=r=r>�text>s
z
Response.textcKsj|jrZ|jrZt|j�dkrZt|j�}|dk	rZytj|jj|�f|�Stk
rXYnXtj|jf|�S)z�Returns the json-encoded content of a response, if any.

        :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
        :raises ValueError: If the response body does not contain valid json.
        rKN)	r�r�rTr!r��loadsrQ�UnicodeDecodeErrorr�)r;�kwargsr�r=r=r>r.ds
z
Response.jsoncCsJ|jjd�}i}|rFt|�}x(|D] }|jd�p8|jd�}|||<q"W|S)z8Returns the parsed header links of the response, if any.�linkZrelr6)rNr�r)r;r��l�linksr��keyr=r=r>r�~s
zResponse.linkscCs�d}t|jt�rDy|jjd�}WqJtk
r@|jjd�}YqJXn|j}d|jko^dknrxd|j||jf}n,d|jko�dknr�d|j||jf}|r�t||d	��d
S)z2Raises stored :class:`HTTPError`, if one occurred.r5zutf-8z
iso-8859-1i�i�z%s Client Error: %s for url: %siXz%s Server Error: %s for url: %s)ZresponseN)rBr�r)rQr�r�r6r)r;Zhttp_error_msgr�r=r=r>r��szResponse.raise_for_statuscCs0|js|jj�t|jdd�}|dk	r,|�dS)z�Releases the connection back to the pool. Once this method has been
        called the underlying ``raw`` object must not be accessed again.

        *Note: Should not normally need to be called explicitly.*
        �release_connN)r�r�r�r�)r;r�r=r=r>r��s

zResponse.close)r
F)r\r]r^rxr�rtr�r�r�r�rur�r�rAr_r�r�r�r�r�r��ITER_CHUNK_SIZEr�r�r�r.r�r�r�r=r=r=r>r�Bs2
/


7&r�i()Qrxrbr��sysZencodings.idnaZ	encodingsZpip._vendor.urllib3.fieldsrZpip._vendor.urllib3.filepostrZpip._vendor.urllib3.utilrZpip._vendor.urllib3.exceptionsrrrr�ior	rgrZ
structuresrrrr
rsrrr�
exceptionsrrrrrrrZ_internal_utilsrrZutilsrrrrrrr r!r"r#�compatr$r%r&r'r(r)r*r+r,r-r.r�Zstatus_codesr/Zmoved�found�otherZtemporary_redirectr�r�ZDEFAULT_REDIRECT_LIMITr�r�r�r2rarnrvr�r=r=r=r>�<module>sD$00nF<_vendor/requests/__pycache__/_internal_utils.cpython-36.opt-1.pyc000064400000002243151733136370021055 0ustar003

�PfH�@s.dZddlmZmZmZd	dd�Zdd�ZdS)
z�
requests._internal_utils
~~~~~~~~~~~~~~

Provides utility functions that are consumed internally by Requests
which depend on extremely few external helpers (such as compat)
�)�is_py2�builtin_str�str�asciicCs.t|t�r|}ntr |j|�}n
|j|�}|S)z�Given a string object, regardless of type, returns a representation of
    that string in the native string type, encoding and decoding where
    necessary. This assumes ASCII unless told otherwise.
    )�
isinstancerr�encode�decode)�string�encoding�out�r�%/usr/lib/python3.6/_internal_utils.py�to_native_strings

rcCs(y|jd�dStk
r"dSXdS)z�Determine if unicode string only contains ASCII characters.

    :param str u_string: unicode string to check. Must be unicode
        and not Python 2 `str`.
    :rtype: bool
    rTFN)r�UnicodeEncodeError)Zu_stringrrr
�unicode_is_asciis

rN)r)�__doc__�compatrrrrrrrrr
�<module>	s
_vendor/requests/__pycache__/cookies.cpython-36.pyc000064400000044057151733136370016370 0ustar003

�Pf G�
@sdZddlZddlZddlZddlZddlmZddlmZm	Z	m
Z
mZyddlZWne
k
rpddlZYnXGdd�de�ZGdd	�d	e�Zd
d�Zdd
�Zddd�ZGdd�de�ZGdd�dejej�Zdd�Zdd�Zdd�Zd dd�Zdd�ZdS)!z�
requests.cookies
~~~~~~~~~~~~~~~~

Compatibility code to be able to use `cookielib.CookieJar` with requests.

requests.utils imports from here, so be careful with imports.
�N�)�to_native_string)�	cookielib�urlparse�
urlunparse�Morselc@s�eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
ddd�Zdd�Zdd�Z
dd�Zedd��Zedd��Zedd��ZdS) �MockRequesta�Wraps a `requests.Request` to mimic a `urllib2.Request`.

    The code in `cookielib.CookieJar` expects this interface in order to correctly
    manage cookie policies, i.e., determine whether a cookie can be set, given the
    domains of the request and the cookie.

    The original request object is read-only. The client is responsible for collecting
    the new headers via `get_new_headers()` and interpreting them appropriately. You
    probably want `get_cookie_header`, defined below.
    cCs ||_i|_t|jj�j|_dS)N)�_r�_new_headersr�url�scheme�type)�self�request�r�/usr/lib/python3.6/cookies.py�__init__&szMockRequest.__init__cCs|jS)N)r
)rrrr�get_type+szMockRequest.get_typecCst|jj�jS)N)rr	rZnetloc)rrrr�get_host.szMockRequest.get_hostcCs|j�S)N)r)rrrr�get_origin_req_host1szMockRequest.get_origin_req_hostcCsT|jjjd�s|jjSt|jjddd�}t|jj�}t|j||j|j	|j
|jg�S)NZHostzutf-8)�encoding)r	�headers�getrrrrr�pathZparamsZqueryZfragment)r�hostZparsedrrr�get_full_url4szMockRequest.get_full_urlcCsdS)NTr)rrrr�is_unverifiableBszMockRequest.is_unverifiablecCs||jjkp||jkS)N)r	rr
)r�namerrr�
has_headerEszMockRequest.has_headerNcCs|jjj||jj||��S)N)r	rrr
)rr�defaultrrr�
get_headerHszMockRequest.get_headercCstd��dS)zMcookielib has no legitimate use for this method; add it back if you find one.z=Cookie headers should be added with add_unredirected_header()N)�NotImplementedError)r�key�valrrr�
add_headerKszMockRequest.add_headercCs||j|<dS)N)r
)rr�valuerrr�add_unredirected_headerOsz#MockRequest.add_unredirected_headercCs|jS)N)r
)rrrr�get_new_headersRszMockRequest.get_new_headerscCs|j�S)N)r)rrrr�unverifiableUszMockRequest.unverifiablecCs|j�S)N)r)rrrr�origin_req_hostYszMockRequest.origin_req_hostcCs|j�S)N)r)rrrrr]szMockRequest.host)N)�__name__�
__module__�__qualname__�__doc__rrrrrrrr r$r&r'�propertyr(r)rrrrrrs

rc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�MockResponsez�Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.

    ...what? Basically, expose the parsed HTTP headers from the server response
    the way `cookielib` expects to see them.
    cCs
||_dS)z�Make a MockResponse for `cookielib` to read.

        :param headers: a httplib.HTTPMessage or analogous carrying the headers
        N)�_headers)rrrrrriszMockResponse.__init__cCs|jS)N)r0)rrrr�infopszMockResponse.infocCs|jj|�dS)N)r0�
getheaders)rrrrrr2sszMockResponse.getheadersN)r*r+r,r-rr1r2rrrrr/bsr/cCs8t|d�o|jsdSt|�}t|jj�}|j||�dS)z�Extract the cookies from the response into a CookieJar.

    :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
    :param request: our own requests.Request object
    :param response: urllib3.HTTPResponse object
    �_original_responseN)�hasattrr3rr/�msgZextract_cookies)�jarrZresponseZreq�resrrr�extract_cookies_to_jarws
r8cCs t|�}|j|�|j�jd�S)zj
    Produce an appropriate Cookie header string to be sent with `request`, or None.

    :rtype: str
    �Cookie)rZadd_cookie_headerr'r)r6r�rrrr�get_cookie_header�s
r;cCs�g}xV|D]N}|j|krq
|dk	r.||jkr.q
|dk	rB||jkrBq
|j|j|j|jf�q
Wx |D]\}}}|j|||�qbWdS)zkUnsets a cookie by name, by default over all domains and paths.

    Wraps CookieJar.clear(), is O(n).
    N)r�domainr�append�clear)�	cookiejarrr<rZ
clearables�cookierrr�remove_cookie_by_name�s

rAc@seZdZdZdS)�CookieConflictErrorz�There are two cookies that meet the criteria specified in the cookie jar.
    Use .get and .set and include domain and path args in order to be more specific.
    N)r*r+r,r-rrrrrB�srBcs�eZdZdZd1dd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zd2dd�Z�fdd�Zdd�Zdd �Zd!d"�Z�fd#d$�Z�fd%d&�Zd3d'd(�Zd4d)d*�Zd+d,�Zd-d.�Zd/d0�Z�ZS)5�RequestsCookieJara�Compatibility class; is a cookielib.CookieJar, but exposes a dict
    interface.

    This is the CookieJar we create by default for requests and sessions that
    don't specify one, since some clients may expect response.cookies and
    session.cookies to support dict operations.

    Requests does not use the dict interface internally; it's just for
    compatibility with external client code. All requests code should work
    out of the box with externally provided instances of ``CookieJar``, e.g.
    ``LWPCookieJar`` and ``FileCookieJar``.

    Unlike a regular CookieJar, this class is pickleable.

    .. warning:: dictionary operations that are normally O(1) may be O(n).
    NcCs(y|j|||�Stk
r"|SXdS)z�Dict-like get() that also supports optional domain and path args in
        order to resolve naming collisions from using one cookie jar over
        multiple domains.

        .. warning:: operation is O(n), not O(1).
        N)�_find_no_duplicates�KeyError)rrrr<rrrrr�szRequestsCookieJar.getcKsX|dkr(t|||jd�|jd�d�dSt|t�r<t|�}nt||f|�}|j|�|S)z�Dict-like set() that also supports optional domain and path args in
        order to resolve naming collisions from using one cookie jar over
        multiple domains.
        Nr<r)r<r)rAr�
isinstancer�morsel_to_cookie�
create_cookie�
set_cookie)rrr%�kwargs�crrr�set�s


zRequestsCookieJar.setccsxt|�D]}|jVq
WdS)z�Dict-like iterkeys() that returns an iterator of names of cookies
        from the jar.

        .. seealso:: itervalues() and iteritems().
        N)�iterr)rr@rrr�iterkeys�szRequestsCookieJar.iterkeyscCst|j��S)z�Dict-like keys() that returns a list of names of cookies from the
        jar.

        .. seealso:: values() and items().
        )�listrN)rrrr�keys�szRequestsCookieJar.keysccsxt|�D]}|jVq
WdS)z�Dict-like itervalues() that returns an iterator of values of cookies
        from the jar.

        .. seealso:: iterkeys() and iteritems().
        N)rMr%)rr@rrr�
itervalues�szRequestsCookieJar.itervaluescCst|j��S)z�Dict-like values() that returns a list of values of cookies from the
        jar.

        .. seealso:: keys() and items().
        )rOrQ)rrrr�values�szRequestsCookieJar.valuesccs$xt|�D]}|j|jfVq
WdS)z�Dict-like iteritems() that returns an iterator of name-value tuples
        from the jar.

        .. seealso:: iterkeys() and itervalues().
        N)rMrr%)rr@rrr�	iteritems�szRequestsCookieJar.iteritemscCst|j��S)z�Dict-like items() that returns a list of name-value tuples from the
        jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
        vanilla python dict of key value pairs.

        .. seealso:: keys() and values().
        )rOrS)rrrr�itemsszRequestsCookieJar.itemscCs0g}x&t|�D]}|j|kr|j|j�qW|S)z2Utility method to list all the domains in the jar.)rMr<r=)r�domainsr@rrr�list_domainss

zRequestsCookieJar.list_domainscCs0g}x&t|�D]}|j|kr|j|j�qW|S)z0Utility method to list all the paths in the jar.)rMrr=)r�pathsr@rrr�
list_pathss

zRequestsCookieJar.list_pathscCs>g}x4t|�D](}|jdk	r*|j|kr*dS|j|j�qWdS)zvReturns True if there are multiple domains in the jar.
        Returns False otherwise.

        :rtype: bool
        NTF)rMr<r=)rrUr@rrr�multiple_domainssz"RequestsCookieJar.multiple_domainscCsJi}x@t|�D]4}|dks$|j|kr|dks6|j|kr|j||j<qW|S)z�Takes as an argument an optional domain and path and returns a plain
        old Python dict of name-value pairs of cookies that meet the
        requirements.

        :rtype: dict
        N)rMr<rr%r)rr<rZ
dictionaryr@rrr�get_dict,szRequestsCookieJar.get_dictcs*ytt|�j|�Stk
r$dSXdS)NT)�superrC�__contains__rB)rr)�	__class__rrr\<szRequestsCookieJar.__contains__cCs
|j|�S)z�Dict-like __getitem__() for compatibility with client code. Throws
        exception if there are more than one cookie with name. In that case,
        use the more explicit get() method instead.

        .. warning:: operation is O(n), not O(1).
        )rD)rrrrr�__getitem__BszRequestsCookieJar.__getitem__cCs|j||�dS)z�Dict-like __setitem__ for compatibility with client code. Throws
        exception if there is already a cookie of that name in the jar. In that
        case, use the more explicit set() method instead.
        N)rL)rrr%rrr�__setitem__KszRequestsCookieJar.__setitem__cCst||�dS)zlDeletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
        ``remove_cookie_by_name()``.
        N)rA)rrrrr�__delitem__RszRequestsCookieJar.__delitem__csLt|jd�r4|jjd�r4|jjd�r4|jjdd�|_tt|�j|f|�|�S)N�
startswith�"z\"�)r4r%ra�endswith�replacer[rCrI)rr@�argsrJ)r]rrrIXs$zRequestsCookieJar.set_cookiecs@t|tj�r,x.|D]}|jtj|��qWntt|�j|�dS)zAUpdates this jar with cookies from another CookieJar or dict-likeN)rFr�	CookieJarrI�copyr[rC�update)r�otherr@)r]rrri]s
zRequestsCookieJar.updatecCs\xDt|�D]8}|j|kr
|dks*|j|kr
|dks<|j|kr
|jSq
Wtd|||f��dS)a�Requests uses this method internally to get cookie values.

        If there are conflicting cookies, _find arbitrarily chooses one.
        See _find_no_duplicates if you want an exception thrown if there are
        conflicting cookies.

        :param name: a string containing name of cookie
        :param domain: (optional) string containing domain of cookie
        :param path: (optional) string containing path of cookie
        :return: cookie.value
        Nzname=%r, domain=%r, path=%r)rMrr<rr%rE)rrr<rr@rrr�_findes

zRequestsCookieJar._findcCs|d}xXt|�D]L}|j|kr|dks.|j|kr|dks@|j|kr|dk	rTtd|��|j}qW|rf|Std|||f��dS)a�Both ``__get_item__`` and ``get`` call this function: it's never
        used elsewhere in Requests.

        :param name: a string containing name of cookie
        :param domain: (optional) string containing domain of cookie
        :param path: (optional) string containing path of cookie
        :raises KeyError: if cookie is not found
        :raises CookieConflictError: if there are multiple cookies
            that match name and optionally domain and path
        :return: cookie.value
        Nz(There are multiple cookies with name, %rzname=%r, domain=%r, path=%r)rMrr<rrBr%rE)rrr<rZtoReturnr@rrrrDys

z%RequestsCookieJar._find_no_duplicatescCs|jj�}|jd�|S)z4Unlike a normal CookieJar, this class is pickleable.�
_cookies_lock)�__dict__rh�pop)r�staterrr�__getstate__�s

zRequestsCookieJar.__getstate__cCs$|jj|�d|jkr tj�|_dS)z4Unlike a normal CookieJar, this class is pickleable.rlN)rmri�	threading�RLockrl)rrorrr�__setstate__�s
zRequestsCookieJar.__setstate__cCst�}|j|�|S)z(Return a copy of this RequestsCookieJar.)rCri)rZnew_cjrrrrh�s
zRequestsCookieJar.copy)NNN)NN)NN)NN)r*r+r,r-rrLrNrPrQrRrSrTrVrXrYrZr\r^r_r`rIrirkrDrprsrh�
__classcell__rr)r]rrC�s0
				
	

rCcCsR|dkrdSt|d�r|j�Stj|�}|j�x|D]}|jtj|��q6W|S)Nrh)r4rhr>rI)r6Znew_jarr@rrr�_copy_cookie_jar�s


rucKs�td||ddddddddddidd�
}t|�t|�}|rNd	}t|t|���|j|�t|d
�|d<t|d�|d
<|djd�|d<t|d�|d<tjf|�S)z�Make a cookie from underspecified parameters.

    By default, the pair of `name` and `value` will be set for the domain ''
    and sent on every request (this is sometimes called a "supercookie").
    rNrc�/FT�HttpOnly)
�versionrr%�portr<r�secure�expires�discard�comment�comment_url�rest�rfc2109z4create_cookie() got unexpected keyword arguments: %sryZport_specifiedr<Zdomain_specified�.Zdomain_initial_dotrZpath_specified)	�dictrL�	TypeErrorrOri�boolrarr9)rr%rJ�resultZbadargs�errrrrrH�s0
rHcCs�d}|drPyttj�t|d��}Wqrtk
rLtd|d��YqrXn"|drrd}tjtj|d|��}t|dt|d�d|d||j	|d	dd
|didt|d�|j
|d
p�dd�
S)zBConvert a Morsel object into a Cookie containing the one k/v pair.Nzmax-agezmax-age: %s must be integerr{z%a, %d-%b-%Y %H:%M:%S GMTr}Fr<rrwZhttponlyrzrxr)
r}r~r|r<r{rrryrr�rzr%rx)�int�time�
ValueErrorr��calendarZtimegmZstrptimerHr�r"r%)Zmorselr{Z
time_templaterrrrG�s0


rGTcCsV|dkrt�}|dk	rRdd�|D�}x,|D]$}|s:||kr*|jt|||��q*W|S)a-Returns a CookieJar from a key/value dictionary.

    :param cookie_dict: Dict of key/values to insert into CookieJar.
    :param cookiejar: (optional) A cookiejar to add the cookies to.
    :param overwrite: (optional) If False, will not replace cookies
        already in the jar with new ones.
    NcSsg|]
}|j�qSr)r)�.0r@rrr�
<listcomp>sz'cookiejar_from_dict.<locals>.<listcomp>)rCrIrH)Zcookie_dictr?�	overwriteZnames_from_jarrrrr�cookiejar_from_dict�s
r�cCszt|tj�std��t|t�r.t||dd�}nHt|tj�rvy|j|�Wn,tk
rtx|D]}|j|�q^WYnX|S)z�Add cookies to cookiejar and returns a merged CookieJar.

    :param cookiejar: CookieJar object to add the cookies to.
    :param cookies: Dictionary or CookieJar object to be added.
    z!You can only merge into CookieJarF)r?r�)	rFrrgr�r�r�ri�AttributeErrorrI)r?ZcookiesZ
cookie_in_jarrrr�
merge_cookiess

r�)NN)NT)r-rhr�r��collectionsZ_internal_utilsr�compatrrrrrq�ImportErrorZdummy_threading�objectrr/r8r;rA�RuntimeErrorrBrg�MutableMappingrCrurHrGr�r�rrrr�<module>
s.H
{#
_vendor/requests/__pycache__/_internal_utils.cpython-36.pyc000064400000002315151733136370020116 0ustar003

�PfH�@s.dZddlmZmZmZd	dd�Zdd�ZdS)
z�
requests._internal_utils
~~~~~~~~~~~~~~

Provides utility functions that are consumed internally by Requests
which depend on extremely few external helpers (such as compat)
�)�is_py2�builtin_str�str�asciicCs.t|t�r|}ntr |j|�}n
|j|�}|S)z�Given a string object, regardless of type, returns a representation of
    that string in the native string type, encoding and decoding where
    necessary. This assumes ASCII unless told otherwise.
    )�
isinstancerr�encode�decode)�string�encoding�out�r�%/usr/lib/python3.6/_internal_utils.py�to_native_strings

rcCs6t|t�st�y|jd�dStk
r0dSXdS)z�Determine if unicode string only contains ASCII characters.

    :param str u_string: unicode string to check. Must be unicode
        and not Python 2 `str`.
    :rtype: bool
    rTFN)rr�AssertionErrorr�UnicodeEncodeError)Zu_stringrrr
�unicode_is_asciis
rN)r)�__doc__�compatrrrrrrrrr
�<module>	s
_vendor/requests/__pycache__/structures.cpython-36.opt-1.pyc000064400000010346151733136400020102 0ustar003

�Pf��@s>dZddlZddlmZGdd�dej�ZGdd�de�ZdS)	zO
requests.structures
~~~~~~~~~~~~~~~~~~~

Data structures that power Requests.
�N�)�OrderedDictc@sbeZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dS)�CaseInsensitiveDicta�A case-insensitive ``dict``-like object.

    Implements all methods and operations of
    ``collections.MutableMapping`` as well as dict's ``copy``. Also
    provides ``lower_items``.

    All keys are expected to be strings. The structure remembers the
    case of the last key to be set, and ``iter(instance)``,
    ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
    will contain case-sensitive keys. However, querying and contains
    testing is case insensitive::

        cid = CaseInsensitiveDict()
        cid['Accept'] = 'application/json'
        cid['aCCEPT'] == 'application/json'  # True
        list(cid) == ['Accept']  # True

    For example, ``headers['content-encoding']`` will return the
    value of a ``'Content-Encoding'`` response header, regardless
    of how the header name was originally stored.

    If the constructor, ``.update``, or equality comparison
    operations are given keys that have equal ``.lower()``s, the
    behavior is undefined.
    NcKs&t�|_|dkri}|j|f|�dS)N)r�_store�update)�self�data�kwargs�r
� /usr/lib/python3.6/structures.py�__init__*szCaseInsensitiveDict.__init__cCs||f|j|j�<dS)N)r�lower)r�key�valuer
r
r�__setitem__0szCaseInsensitiveDict.__setitem__cCs|j|j�dS)Nr)rr
)rrr
r
r�__getitem__5szCaseInsensitiveDict.__getitem__cCs|j|j�=dS)N)rr
)rrr
r
r�__delitem__8szCaseInsensitiveDict.__delitem__cCsdd�|jj�D�S)Ncss|]\}}|VqdS)Nr
)�.0ZcasedkeyZmappedvaluer
r
r�	<genexpr><sz/CaseInsensitiveDict.__iter__.<locals>.<genexpr>)r�values)rr
r
r�__iter__;szCaseInsensitiveDict.__iter__cCs
t|j�S)N)�lenr)rr
r
r�__len__>szCaseInsensitiveDict.__len__cCsdd�|jj�D�S)z.Like iteritems(), but with all lowercase keys.css|]\}}||dfVqdS)rNr
)rZlowerkeyZkeyvalr
r
rrDsz2CaseInsensitiveDict.lower_items.<locals>.<genexpr>)r�items)rr
r
r�lower_itemsAszCaseInsensitiveDict.lower_itemscCs2t|tj�rt|�}ntSt|j��t|j��kS)N)�
isinstance�collections�Mappingr�NotImplemented�dictr)r�otherr
r
r�__eq__Is
zCaseInsensitiveDict.__eq__cCst|jj��S)N)rrr)rr
r
r�copyRszCaseInsensitiveDict.copycCstt|j���S)N)�strrr)rr
r
r�__repr__UszCaseInsensitiveDict.__repr__)N)�__name__�
__module__�__qualname__�__doc__rrrrrrrr!r"r$r
r
r
rrs
	rcs<eZdZdZd�fdd�	Zdd�Zdd�Zdd	d
�Z�ZS)
�
LookupDictzDictionary lookup object.Ncs||_tt|�j�dS)N)�name�superr)r)rr*)�	__class__r
rr\szLookupDict.__init__cCs
d|jS)Nz
<lookup '%s'>)r*)rr
r
rr$`szLookupDict.__repr__cCs|jj|d�S)N)�__dict__�get)rrr
r
rrcszLookupDict.__getitem__cCs|jj||�S)N)r-r.)rr�defaultr
r
rr.hszLookupDict.get)N)N)	r%r&r'r(rr$rr.�
__classcell__r
r
)r,rr)Ys
r))r(r�compatr�MutableMappingrrr)r
r
r
r�<module>sJ_vendor/requests/__pycache__/status_codes.cpython-36.opt-1.pyc000064400000007012151733136400020353 0ustar003

�Pf��F@s�ddlmZd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�dz�DZed{d|�ZxNej�D]B\ZZx6eD].Zeeee�ej	dă�s�eeej
�e��q�W�q�WdS)��)�
LookupDict�continue�switching_protocols�
processing�
checkpoint�uri_too_long�request_uri_too_long�ok�okay�all_ok�all_okay�all_good�\o/�✓�created�accepted�non_authoritative_info�non_authoritative_information�
no_content�
reset_content�reset�partial_content�partial�multi_status�multiple_status�multi_stati�multiple_stati�already_reported�im_used�multiple_choices�moved_permanently�moved�\o-�found�	see_other�other�not_modified�	use_proxy�switch_proxy�temporary_redirect�temporary_moved�	temporary�permanent_redirect�resume_incomplete�resume�bad_request�bad�unauthorized�payment_required�payment�	forbidden�	not_found�-o-�method_not_allowed�not_allowed�not_acceptable�proxy_authentication_required�
proxy_auth�proxy_authentication�request_timeout�timeout�conflict�gone�length_required�precondition_failed�precondition�request_entity_too_large�request_uri_too_large�unsupported_media_type�unsupported_media�
media_type�requested_range_not_satisfiable�requested_range�range_not_satisfiable�expectation_failed�im_a_teapot�teapot�
i_am_a_teapot�misdirected_request�unprocessable_entity�
unprocessable�locked�failed_dependency�
dependency�unordered_collection�	unordered�upgrade_required�upgrade�precondition_required�too_many_requests�too_many�header_fields_too_large�fields_too_large�no_response�none�
retry_with�retry�$blocked_by_windows_parental_controls�parental_controls�unavailable_for_legal_reasons�
legal_reasons�client_closed_request�internal_server_error�server_error�/o\�✗�not_implemented�bad_gateway�service_unavailable�unavailable�gateway_timeout�http_version_not_supported�http_version�variant_also_negotiates�insufficient_storage�bandwidth_limit_exceeded�	bandwidth�not_extended�network_authentication_required�network_auth�network_authentication)D�d�e�f�g�z��������������������i,i-i.i/i0i1i2i3i4i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�Zstatus_codes)�name�\�/N)r)r)r)r)rr)r	r
rrr
rr)r)r)rr)r)rr)rr)rrrr)r)r)r)r r!r")r#)r$r%)r&)r')r()r)r*r+)r,r-r.)r/r0)r1)r2r3)r4)r5r6)r7r8)r9)r:r;r<)r=r>)r?)r@)rA)rBrC)rD)rE)rFrGrH)rIrJrK)rL)rMrNrO)rP)rQrR)rS)rTrU)rVrW)rXrY)rZrC)r[r\)r]r^)r_r`)rarb)rcrd)rerf)rg)rhrirjrk)rl)rm)rnro)rp)rqrr)rs)rt)rurv)rw)rxryrz)r�r�)Z
structuresrZ_codesZcodes�items�codeZtitles�title�setattr�
startswith�upper�r�r��"/usr/lib/python3.6/status_codes.py�<module>s�

_vendor/requests/__pycache__/exceptions.cpython-36.pyc000064400000012174151733136400017102 0ustar003

�Pf+�@s�dZddlmZGdd�de�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZGdd�de�ZGd
d�de�Z	Gdd�dee	�Z
Gdd�de	�ZGdd�de�ZGdd�de�Z
Gdd�dee�ZGdd�dee�ZGdd�dee�ZGdd�dee�ZGdd �d e�ZGd!d"�d"ee�ZGd#d$�d$ee�ZGd%d&�d&e�ZGd'd(�d(e�ZGd)d*�d*e�ZGd+d,�d,ee�ZGd-d.�d.e�Zd/S)0z`
requests.exceptions
~~~~~~~~~~~~~~~~~~~

This module contains the set of Requests' exceptions.
�)�	HTTPErrorcs eZdZdZ�fdd�Z�ZS)�RequestExceptionzTThere was an ambiguous exception that occurred while handling your
    request.
    csZ|jdd�}||_|jdd�|_|dk	rD|jrDt|d�rD|jj|_tt|�j||�dS)zBInitialize RequestException with `request` and `response` objects.�responseN�request)�poprr�hasattr�superr�__init__)�self�args�kwargsr)�	__class__�� /usr/lib/python3.6/exceptions.pyr	s

zRequestException.__init__)�__name__�
__module__�__qualname__�__doc__r	�
__classcell__rr)r
rrsrc@seZdZdZdS)rzAn HTTP error occurred.N)rrrrrrrrrsrc@seZdZdZdS)�ConnectionErrorzA Connection error occurred.N)rrrrrrrrr src@seZdZdZdS)�
ProxyErrorzA proxy error occurred.N)rrrrrrrrr$src@seZdZdZdS)�SSLErrorzAn SSL error occurred.N)rrrrrrrrr(src@seZdZdZdS)�Timeoutz�The request timed out.

    Catching this error will catch both
    :exc:`~requests.exceptions.ConnectTimeout` and
    :exc:`~requests.exceptions.ReadTimeout` errors.
    N)rrrrrrrrr,src@seZdZdZdS)�ConnectTimeoutz�The request timed out while trying to connect to the remote server.

    Requests that produced this error are safe to retry.
    N)rrrrrrrrr5src@seZdZdZdS)�ReadTimeoutz@The server did not send any data in the allotted amount of time.N)rrrrrrrrr<src@seZdZdZdS)�URLRequiredz*A valid URL is required to make a request.N)rrrrrrrrr@src@seZdZdZdS)�TooManyRedirectszToo many redirects.N)rrrrrrrrrDsrc@seZdZdZdS)�
MissingSchemaz/The URL schema (e.g. http or https) is missing.N)rrrrrrrrrHsrc@seZdZdZdS)�
InvalidSchemaz"See defaults.py for valid schemas.N)rrrrrrrrrLsrc@seZdZdZdS)�
InvalidURLz%The URL provided was somehow invalid.N)rrrrrrrrrPsrc@seZdZdZdS)�
InvalidHeaderz.The header value provided was somehow invalid.N)rrrrrrrrr Tsr c@seZdZdZdS)�ChunkedEncodingErrorz?The server declared chunked encoding but sent an invalid chunk.N)rrrrrrrrr!Xsr!c@seZdZdZdS)�ContentDecodingErrorz!Failed to decode response contentN)rrrrrrrrr"\sr"c@seZdZdZdS)�StreamConsumedErrorz2The content for this response was already consumedN)rrrrrrrrr#`sr#c@seZdZdZdS)�
RetryErrorzCustom retries logic failedN)rrrrrrrrr$dsr$c@seZdZdZdS)�UnrewindableBodyErrorz:Requests encountered an error when trying to rewind a bodyN)rrrrrrrrr%hsr%c@seZdZdZdS)�RequestsWarningzBase warning for Requests.N)rrrrrrrrr&nsr&c@seZdZdZdS)�FileModeWarningzJA file was opened in text mode, but Requests determined its binary length.N)rrrrrrrrr'ssr'c@seZdZdZdS)�RequestsDependencyWarningz@An imported dependency doesn't match the expected version range.N)rrrrrrrrr(xsr(N)rZpip._vendor.urllib3.exceptionsrZ
BaseHTTPError�IOErrorrrrrrrrrr�
ValueErrorrrrr r!r"�	TypeErrorr#r$r%�Warningr&�DeprecationWarningr'r(rrrr�<module>s.	_vendor/requests/__pycache__/models.cpython-36.pyc000064400000056505151733136400016212 0ustar003

�Pf��@s�dZddlZddlZddlZddlZddlmZddlm	Z	ddl
mZddlm
Z
mZmZmZddlmZdd	lmZdd
lmZddlmZddlmZmZmZdd
lmZmZm Z m!Z!m"Z"m#Z#m$Z$ddl%m&Z&m'Z'ddl(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2ddl3m4Z4m5Z5m6Z6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=ddl3m>Z?ddl@mAZAeAjBeAjCeAjDeAjEeAjFfZGdZHd!ZIdZJGdd�deK�ZLGdd�deK�ZMGdd�deM�ZNGdd�deLeM�ZOGdd �d eK�ZPdS)"z`
requests.models
~~~~~~~~~~~~~~~

This module contains the primary objects that power Requests.
�N)�RequestField)�encode_multipart_formdata)�	parse_url)�DecodeError�ReadTimeoutError�
ProtocolError�LocationParseError)�UnsupportedOperation�)�
default_hooks)�CaseInsensitiveDict)�
HTTPBasicAuth)�cookiejar_from_dict�get_cookie_header�_copy_cookie_jar)�	HTTPError�
MissingSchema�
InvalidURL�ChunkedEncodingError�ContentDecodingError�ConnectionError�StreamConsumedError)�to_native_string�unicode_is_ascii)
�guess_filename�get_auth_from_url�requote_uri�stream_decode_response_unicode�to_key_val_list�parse_header_links�iter_slices�guess_json_utf�	super_len�check_header_validity)
�	cookielib�
urlunparse�urlsplit�	urlencode�str�bytes�is_py2�chardet�builtin_str�
basestring)�json)�codes��
iic@s0eZdZedd��Zedd��Zedd��ZdS)�RequestEncodingMixincCsNg}t|j�}|j}|sd}|j|�|j}|rD|jd�|j|�dj|�S)zBuild the path URL to use.�/�?�)r&�url�path�append�query�join)�selfr6�pr7r9�r=�/usr/lib/python3.6/models.py�path_url=s



zRequestEncodingMixin.path_urlcCs�t|ttf�r|St|d�r |St|d�r�g}x|t|�D]p\}}t|t�sVt|d�r\|g}xJ|D]B}|dk	rb|jt|t�r�|jd�n|t|t�r�|jd�n|f�qbWq8Wt|dd�S|SdS)z�Encode parameters in a piece of data.

        Will successfully encode parameters when passed as a dict or a list of
        2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
        if parameters are supplied as a dict.
        �read�__iter__Nzutf-8T)Zdoseq)	�
isinstancer(r)�hasattrrr-r8�encoder')�data�result�kZvs�vr=r=r>�_encode_paramsRs 	


$z#RequestEncodingMixin._encode_paramscCs�|std��nt|t�r td��g}t|p,i�}t|p8i�}x�|D]�\}}t|t�s`t|d�rf|g}x\|D]T}|dk	rlt|t�s�t|�}|jt|t�r�|jd�n|t|t�r�|j	d�n|f�qlWqBWx�|D]�\}}d}d}	t|t
tf��r.t|�dk�r|\}
}n&t|�dk�r |\}
}}n|\}
}}}	nt
|��p:|}
|}t|tttf��rX|}n|j�}t|||
|	d�}
|
j|d	�|j|
�q�Wt|�\}}||fS)
a�Build the body for a multipart/form-data request.

        Will successfully encode files when passed as a dict or a list of
        tuples. Order is retained if data is a list of tuples but arbitrary
        if parameters are supplied as a dict.
        The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
        or 4-tuples (filename, fileobj, contentype, custom_headers).
        zFiles must be provided.zData must not be a string.rANzutf-8��)�namerE�filename�headers)�content_type)�
ValueErrorrBr-rrCr)r(r8�decoderD�tuple�list�lenr�	bytearrayr@rZmake_multipartr)�filesrEZ
new_fieldsZfieldsZfield�valrHrGZftZfh�fn�fpZfdataZrf�bodyrOr=r=r>�
_encode_filesmsH




$
z"RequestEncodingMixin._encode_filesN)�__name__�
__module__�__qualname__�propertyr?�staticmethodrIr[r=r=r=r>r2<sr2c@seZdZdd�Zdd�ZdS)�RequestHooksMixincCs\||jkrtd|��t|tj�r4|j|j|�n$t|d�rX|j|jdd�|D��dS)zProperly register a hook.z1Unsupported event specified, with event name "%s"rAcss|]}t|tj�r|VqdS)N)rB�collections�Callable)�.0�hr=r=r>�	<genexpr>�sz2RequestHooksMixin.register_hook.<locals>.<genexpr>N)�hooksrPrBrbrcr8rC�extend)r;�event�hookr=r=r>�
register_hook�s

zRequestHooksMixin.register_hookcCs.y|j|j|�dStk
r(dSXdS)ziDeregister a previously registered hook.
        Returns True if the hook existed, False if not.
        TFN)rg�removerP)r;rirjr=r=r>�deregister_hook�s
z!RequestHooksMixin.deregister_hookN)r\r]r^rkrmr=r=r=r>ra�srac
@s*eZdZdZd	dd�Zdd�Zdd�ZdS)
�Requesta�A user-created :class:`Request <Request>` object.

    Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.

    :param method: HTTP method to use.
    :param url: URL to send.
    :param headers: dictionary of headers to send.
    :param files: dictionary of {filename: fileobject} files to multipart upload.
    :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
    :param json: json for the body to attach to the request (if files or data is not specified).
    :param params: dictionary of URL parameters to append to the URL.
    :param auth: Auth handler or (user, pass) tuple.
    :param cookies: dictionary or CookieJar of cookies to attach to this request.
    :param hooks: dictionary of callback hooks, for internal usage.

    Usage::

      >>> import requests
      >>> req = requests.Request('GET', 'http://httpbin.org/get')
      >>> req.prepare()
      <PreparedRequest [GET]>
    Nc
Cs�|dkrgn|}|dkrgn|}|dkr,in|}|dkr<in|}|	dkrLin|	}	t�|_x&t|	j��D]\}}|j||d�qfW||_||_||_||_||_	|
|_
||_||_||_
dS)N)rirj)rrgrS�itemsrk�methodr6rNrVrEr.�params�auth�cookies)
r;rpr6rNrVrErqrrrsrgr.rGrHr=r=r>�__init__�s"zRequest.__init__cCs
d|jS)Nz<Request [%s]>)rp)r;r=r=r>�__repr__�szRequest.__repr__cCs<t�}|j|j|j|j|j|j|j|j|j	|j
|jd�
|S)zXConstructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.)
rpr6rNrVrEr.rqrrrsrg)�PreparedRequest�preparerpr6rNrVrEr.rqrrrsrg)r;r<r=r=r>rw�s
zRequest.prepare)
NNNNNNNNNN)r\r]r^�__doc__rtrurwr=r=r=r>rn�s

rnc
@s�eZdZdZdd�Zddd�Zdd�Zd	d
�Zdd�Ze	d
d��Z
dd�Zdd�Zddd�Z
dd�Zd dd�Zdd�Zdd�ZdS)!rva�The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
    containing the exact bytes that will be sent to the server.

    Generated from either a :class:`Request <Request>` object or manually.

    Usage::

      >>> import requests
      >>> req = requests.Request('GET', 'http://httpbin.org/get')
      >>> r = req.prepare()
      <PreparedRequest [GET]>

      >>> s = requests.Session()
      >>> s.send(r)
      <Response [200]>
    cCs0d|_d|_d|_d|_d|_t�|_d|_dS)N)rpr6rN�_cookiesrZrrg�_body_position)r;r=r=r>rtszPreparedRequest.__init__NcCsR|j|�|j||�|j|�|j|�|j|||
�|j||�|j|	�dS)z6Prepares the entire request with the given parameters.N)�prepare_method�prepare_url�prepare_headers�prepare_cookies�prepare_body�prepare_auth�
prepare_hooks)r;rpr6rNrVrErqrrrsrgr.r=r=r>rw+s


zPreparedRequest.preparecCs
d|jS)Nz<PreparedRequest [%s]>)rp)r;r=r=r>ru=szPreparedRequest.__repr__cCsXt�}|j|_|j|_|jdk	r*|jj�nd|_t|j�|_|j|_|j|_|j	|_	|S)N)
rvrpr6rN�copyrryrZrgrz)r;r<r=r=r>r�@szPreparedRequest.copycCs$||_|jdk	r t|jj��|_dS)zPrepares the given HTTP method.N)rpr�upper)r;rpr=r=r>r{Ks
zPreparedRequest.prepare_methodcCs@ddl}y|j|dd�jd�}Wn|jk
r:t�YnX|S)NrT)Zuts46zutf-8)�idnarDrQZ	IDNAError�UnicodeError)�hostr�r=r=r>�_get_idna_encoded_hostQs
z&PreparedRequest._get_idna_encoded_hostcCs0t|t�r|jd�}ntr"t|�nt|�}|j�}d|krT|j�jd�rT||_	dSyt
|�\}}}}}}}	Wn,tk
r�}
zt|
j
��WYdd}
~
XnX|s�d}|jt|d��}t|��|s�td|��t|��sy|j|�}Wntk
�rtd��YnXn|jd��rtd��|�p"d	}|�r2|d
7}||7}|�rP|dt|�7}|�sZd}t�r�t|t��rv|jd�}t|t��r�|jd�}t|t��r�|jd�}t|t��r�|jd�}t|	t��r�|	jd�}	t|ttf��r�t|�}|j|�}
|
�r|�r
d
||
f}n|
}tt|||d||	g��}||_	dS)zPrepares the given HTTP URL.�utf8�:ZhttpNzDInvalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?z Invalid URL %r: No host suppliedzURL has an invalid label.�*r5�@r3zutf-8z%s&%s)rBr)rQr*Zunicoder(�lstrip�lower�
startswithr6rrr�args�formatrrrr�r�rDrIrr%)r;r6rq�schemerrr�Zportr7r9Zfragment�e�errorZnetlocZ
enc_paramsr=r=r>r|[sh








zPreparedRequest.prepare_urlcCs@t�|_|r<x.|j�D]"}t|�|\}}||jt|�<qWdS)z Prepares the given HTTP headers.N)rrNror#r)r;rN�headerrL�valuer=r=r>r}�szPreparedRequest.prepare_headerscCsvd}d}|r8|dk	r8d}tj|�}t|t�s8|jd�}tt|d�t|ttt	t
jf�g�}yt|�}Wnt
ttfk
r�d}YnX|r�|}t|dd�dk	r�y|j�|_Wn ttfk
r�t�|_YnX|r�td��|r�t|�|jd<n
d|jd	<np|�r|j||�\}}n2|�rF|j|�}t|t��s<t|d
��rBd}nd}|j|�|�rld|jk�rl||jd
<||_dS)z"Prepares the given HTTP body data.Nzapplication/jsonzutf-8rA�tellz1Streamed bodies and files are mutually exclusive.zContent-LengthZchunkedzTransfer-Encodingr@z!application/x-www-form-urlencodedzcontent-typezContent-Type)�complexjson�dumpsrBr)rD�allrCr-rSrRrb�Mappingr"�	TypeError�AttributeErrorr	�getattrr�rz�IOError�OSError�object�NotImplementedErrorr,rNr[rI�prepare_content_lengthrZ)r;rErVr.rZrOZ	is_stream�lengthr=r=r>r�sJ






zPreparedRequest.prepare_bodycCsL|dk	r$t|�}|rHt|�|jd<n$|jdkrH|jjd�dkrHd|jd<dS)z>Prepare Content-Length header based on request method and bodyNzContent-Length�GET�HEAD�0)r�r�)r"r,rNrp�get)r;rZr�r=r=r>r�sz&PreparedRequest.prepare_content_lengthr5cCsj|dkr"t|j�}t|�r|nd}|rft|t�rDt|�dkrDt|�}||�}|jj|j�|j	|j
�dS)z"Prepares the given HTTP auth data.NrJ)rr6�anyrBrRrTr
�__dict__�updater�rZ)r;rrr6Zurl_auth�rr=r=r>r�s
zPreparedRequest.prepare_authcCs@t|tj�r||_n
t|�|_t|j|�}|dk	r<||jd<dS)aPrepares the given HTTP cookie data.

        This function eventually generates a ``Cookie`` header from the
        given cookies using cookielib. Due to cookielib's design, the header
        will not be regenerated if it already exists, meaning this function
        can only be called once for the life of the
        :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
        to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
        header is removed beforehand.
        NZCookie)rBr$Z	CookieJarryrrrN)r;rsZ
cookie_headerr=r=r>r~$s
zPreparedRequest.prepare_cookiescCs*|pg}x|D]}|j|||�qWdS)zPrepares the given hooks.N)rk)r;rgrir=r=r>r�8s
zPreparedRequest.prepare_hooks)
NNNNNNNNNN)N)r5)r\r]r^rxrtrwrur�r{r`r�r|r}rr�r�r~r�r=r=r=r>rvs

V
E
rvc
@seZdZdZdddddddd	d
dg
Zdd
�Zdd�Zdd�Zdd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
edd��Zed d!��Zed"d#��Zed$d%��Zed&d'��Zd;d*d+�Zed,d,fd-d.�Zed/d0��Zed1d2��Zd3d4�Zed5d6��Zd7d8�Zd9d:�Zd,S)<�ResponsezhThe :class:`Response <Response>` object, which contains a
    server's response to an HTTP request.
    �_content�status_coderNr6�history�encoding�reasonrs�elapsed�requestcCs^d|_d|_d|_d|_t�|_d|_d|_d|_g|_	d|_
ti�|_t
jd�|_d|_dS)NFr)r��_content_consumed�_nextr�rrN�rawr6r�r�r�rrs�datetimeZ	timedeltar�r�)r;r=r=r>rtLs
zResponse.__init__cCs|S)Nr=)r;r=r=r>�	__enter__{szResponse.__enter__cGs|j�dS)N)�close)r;r�r=r=r>�__exit__~szResponse.__exit__cs$�js�jt�fdd��jD��S)Nc3s|]}|t�|d�fVqdS)N)r�)rd�attr)r;r=r>rf�sz(Response.__getstate__.<locals>.<genexpr>)r��content�dict�	__attrs__)r;r=)r;r>�__getstate__�s

zResponse.__getstate__cCs>x |j�D]\}}t|||�q
Wt|dd�t|dd�dS)Nr�Tr�)ro�setattr)r;�staterLr�r=r=r>�__setstate__�szResponse.__setstate__cCs
d|jS)Nz<Response [%s]>)r�)r;r=r=r>ru�szResponse.__repr__cCs|jS)akReturns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        )�ok)r;r=r=r>�__bool__�szResponse.__bool__cCs|jS)akReturns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        )r�)r;r=r=r>�__nonzero__�szResponse.__nonzero__cCs
|jd�S)z,Allows you to use a response as an iterator.�)�iter_content)r;r=r=r>rA�szResponse.__iter__cCs&y|j�Wntk
r dSXdS)akReturns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        FT)�raise_for_statusr)r;r=r=r>r��s
	zResponse.okcCsd|jko|jtkS)z�True if this Response is a well-formed HTTP redirect that could have
        been processed automatically (by :meth:`Session.resolve_redirects`).
        �location)rNr��REDIRECT_STATI)r;r=r=r>�is_redirect�szResponse.is_redirectcCsd|jko|jtjtjfkS)z@True if this Response one of the permanent versions of redirect.r�)rNr�r/Zmoved_permanently�permanent_redirect)r;r=r=r>�is_permanent_redirect�szResponse.is_permanent_redirectcCs|jS)zTReturns a PreparedRequest for the next request in a redirect chain, if there is one.)r�)r;r=r=r>�next�sz
Response.nextcCstj|j�dS)z7The apparent encoding, provided by the chardet library.r�)r+Zdetectr�)r;r=r=r>�apparent_encoding�szResponse.apparent_encodingr
Fcs~��fdd�}�jr(t�jt�r(t��n$�dk	rLt�t�rLtdt����t�j��}|�}�jrh|n|}|rzt	|��}|S)a�Iterates over the response data.  When stream=True is set on the
        request, this avoids reading the content at once into memory for
        large responses.  The chunk size is the number of bytes it should
        read into memory.  This is not necessarily the length of each item
        returned as decoding can take place.

        chunk_size must be of type int or None. A value of None will
        function differently depending on the value of `stream`.
        stream=True will read data as it arrives in whatever size the
        chunks are received. If stream=False, data is returned as
        a single chunk.

        If decode_unicode is True, content will be decoded using the best
        available encoding based on the response.
        c3s�t�jd�r�y$x�jj�dd�D]
}|Vq WWq�tk
rZ}zt|��WYdd}~Xq�tk
r�}zt|��WYdd}~Xq�tk
r�}zt|��WYdd}~Xq�Xnx�jj	��}|s�P|Vq�Wd�_
dS)N�streamT)Zdecode_content)rCr�r�rrrrrrr@r�)�chunkr�)�
chunk_sizer;r=r>�generate�s 
z'Response.iter_content.<locals>.generateNz.chunk_size must be an int, it is instead a %s.)
r�rBr��boolr�intr��typer r)r;r��decode_unicoder�Z
reused_chunksZ
stream_chunksZchunksr=)r�r;r>r��s
zResponse.iter_contentNccs�d}x�|j||d�D]r}|dk	r(||}|r8|j|�}n|j�}|rn|drn|rn|dd|dkrn|j�}nd}x|D]
}|VqxWqW|dk	r�|VdS)z�Iterates over the response data, one line at a time.  When
        stream=True is set on the request, this avoids reading the
        content at once into memory for large responses.

        .. note:: This method is not reentrant safe.
        N)r�r�r
���r�r�r�)r��split�
splitlines�pop)r;r�r�Z	delimiter�pendingr��lines�liner=r=r>�
iter_liness$

zResponse.iter_linescCsZ|jdkrN|jrtd��|jdks,|jdkr4d|_nt�j|jt��pJt�|_d|_|jS)z"Content of the response, in bytes.Fz2The content for this response was already consumedrNT)	r�r��RuntimeErrorr�r�r)r:r��CONTENT_CHUNK_SIZE)r;r=r=r>r�*s
zResponse.contentcCshd}|j}|jstd�S|jdkr(|j}yt|j|dd�}Wn&ttfk
rbt|jdd�}YnX|S)a�Content of the response, in unicode.

        If Response.encoding is None, encoding will be guessed using
        ``chardet``.

        The encoding of the response content is determined based solely on HTTP
        headers, following RFC 2616 to the letter. If you can take advantage of
        non-HTTP knowledge to make a better guess at the encoding, you should
        set ``r.encoding`` appropriately before accessing this property.
        Nr5�replace)�errors)r�r�r(r��LookupErrorr�)r;r�r�r=r=r>�text>s
z
Response.textcKsj|jrZ|jrZt|j�dkrZt|j�}|dk	rZytj|jj|�f|�Stk
rXYnXtj|jf|�S)z�Returns the json-encoded content of a response, if any.

        :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
        :raises ValueError: If the response body does not contain valid json.
        rKN)	r�r�rTr!r��loadsrQ�UnicodeDecodeErrorr�)r;�kwargsr�r=r=r>r.ds
z
Response.jsoncCsJ|jjd�}i}|rFt|�}x(|D] }|jd�p8|jd�}|||<q"W|S)z8Returns the parsed header links of the response, if any.�linkZrelr6)rNr�r)r;r��l�linksr��keyr=r=r>r�~s
zResponse.linkscCs�d}t|jt�rDy|jjd�}WqJtk
r@|jjd�}YqJXn|j}d|jko^dknrxd|j||jf}n,d|jko�dknr�d|j||jf}|r�t||d	��d
S)z2Raises stored :class:`HTTPError`, if one occurred.r5zutf-8z
iso-8859-1i�i�z%s Client Error: %s for url: %siXz%s Server Error: %s for url: %s)ZresponseN)rBr�r)rQr�r�r6r)r;Zhttp_error_msgr�r=r=r>r��szResponse.raise_for_statuscCs0|js|jj�t|jdd�}|dk	r,|�dS)z�Releases the connection back to the pool. Once this method has been
        called the underlying ``raw`` object must not be accessed again.

        *Note: Should not normally need to be called explicitly.*
        �release_connN)r�r�r�r�)r;r�r=r=r>r��s

zResponse.close)r
F)r\r]r^rxr�rtr�r�r�r�rur�r�rAr_r�r�r�r�r�r��ITER_CHUNK_SIZEr�r�r�r.r�r�r�r=r=r=r>r�Bs2
/


7&r�i()Qrxrbr��sysZencodings.idnaZ	encodingsZpip._vendor.urllib3.fieldsrZpip._vendor.urllib3.filepostrZpip._vendor.urllib3.utilrZpip._vendor.urllib3.exceptionsrrrr�ior	rgrZ
structuresrrrr
rsrrr�
exceptionsrrrrrrrZ_internal_utilsrrZutilsrrrrrrr r!r"r#�compatr$r%r&r'r(r)r*r+r,r-r.r�Zstatus_codesr/Zmoved�found�otherZtemporary_redirectr�r�ZDEFAULT_REDIRECT_LIMITr�r�r�r2rarnrvr�r=r=r=r>�<module>sD$00nF<_vendor/requests/__pycache__/__version__.cpython-36.opt-1.pyc000064400000000727151733136400020142 0ustar003

�Pf��@s,dZdZdZdZdZdZdZdZdZd	Z	d
S)ZrequestszPython HTTP for Humans.zhttp://python-requests.orgz2.18.4iz
Kenneth Reitzzme@kennethreitz.orgz
Apache 2.0zCopyright 2017 Kenneth Reitzu✨ 🍰 ✨N)
Z	__title__Z__description__Z__url__�__version__Z	__build__�
__author__Z__author_email__Z__license__Z
__copyright__Z__cake__�rr�!/usr/lib/python3.6/__version__.py�<module>s_vendor/requests/__pycache__/utils.cpython-36.opt-1.pyc000064400000050320151733136400017013 0ustar003

�Pf/l�@s�dZddlZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlmZddl
mZddlmZddlmZddlmZmZmZmZmZmZmZmZmZmZmZmZm Z m!Z!dd	l"m#Z#dd
l$m%Z%ddl&m'Z'm(Z(m)Z)m*Z*dfZ+ej,�Z-ddd�Z.ej/�dk�r0dd�Z0dd�Zdd�Z1dd�Z2dgdd�Z3dd�Z4dd �Z5d!d"�Z6d#d$�Z7d%d&�Z8dhd'd(�Z9d)d*�Z:d+d,�Z;d-d.�Z<d/d0�Z=d1d2�Z>d3d4�Z?d5d6�Z@eAdi�ZBd9d:�ZCd;d<�ZDd=d>�ZEd?d@�ZFdAdB�ZGdCdD�ZHejIdEdF��ZJdGdH�ZKdjdIdJ�ZLdKdL�ZMdkdNdO�ZNdPdQ�ZOdRdS�ZPdTjQdU�ZReRdVZSeRdWZTdXdY�ZUdZd[�ZVd\d]�ZWejXd^�ZYejXd_�ZZd`da�Z[dbdc�Z\ddde�Z]dS)lz�
requests.utils
~~~~~~~~~~~~~~

This module provides utility functions that are used within Requests
that are also useful for external consumption.
�N�)�__version__)�certs)�to_native_string)�parse_http_list)�quote�urlparse�bytes�str�OrderedDict�unquote�
getproxies�proxy_bypass�
urlunparse�
basestring�
integer_types�is_py3�proxy_bypass_environment�getproxies_environment)�cookiejar_from_dict)�CaseInsensitiveDict)�
InvalidURL�
InvalidHeader�FileModeWarning�UnrewindableBodyError�.netrc�_netrc�Pi�)ZhttpZhttpsZWindowsc
Cs�trddl}nddl}y2|j|jd�}|j|d�d}|j|d�d}Wntk
r\dSX|sj|rndS|jd�}xX|D]P}|dkr�d|kr�d	S|jdd
�}|jdd�}|jd
d�}t	j
||t	j�r~d	Sq~WdS)Nrz;Software\Microsoft\Windows\CurrentVersion\Internet SettingsZProxyEnableZ
ProxyOverrideF�;z<local>�.Tz\.�*z.*�?)r�winreg�_winreg�OpenKey�HKEY_CURRENT_USERZQueryValueEx�OSError�split�replace�re�match�I)�hostr"ZinternetSettingsZproxyEnableZ
proxyOverrideZtest�r-�/usr/lib/python3.6/utils.py�proxy_bypass_registry.s2



r/cCst�rt|�St|�SdS)z�Return True, if the host should be bypassed.

        Checks proxy settings gathered from the environment, if specified,
        or the registry.
        N)rrr/)r,r-r-r.rOsrcCst|d�r|j�}|S)z/Returns an internal sequence dictionary update.�items)�hasattrr0)�dr-r-r.�dict_to_sequence[s
r3cCs2d}d}t|d�rt|�}nbt|d�r.|j}nPt|d�r~y|j�}Wntjk
rZYn$Xtj|�j}d|jkr~t	j
dt�t|d��ry|j�}Wn$t
tfk
r�|dk	r�|}Yn\Xt|d�o�|dk�ry&|jdd	�|j�}|j|p�d�Wnt
tfk
�rd}YnX|dk�r$d}td||�S)
Nr�__len__�len�fileno�ba%Requests has determined the content-length for this request using the binary size of the file: however, the file has been opened in text mode (i.e. without the 'b' flag in the mode). This may lead to an incorrect content-length. In Requests 3.0, support will be removed for files in text mode.�tell�seek�)r1r5r6�io�UnsupportedOperation�os�fstat�st_size�mode�warnings�warnrr8r&�IOErrorr9�max)�oZtotal_lengthZcurrent_positionr6r-r-r.�	super_lends@







rFFcCsy�ddlm}m}d}xJtD]B}ytjjdj|��}Wntk
rJdSXtjj|�r|}PqW|dkrndSt	|�}d}t
|t�r�|jd�}|j
j|�d}	y6||�j|	�}
|
r�|
dr�dnd}|
||
dfSWn|tfk
r�|r�YnXWnttfk
�rYnXdS)	z;Returns the Requests tuple auth for a given url from netrc.r)�netrc�NetrcParseErrorNz~/{0}�:�asciirr:)rGrH�NETRC_FILESr=�path�
expanduser�format�KeyError�existsr�
isinstancer
�decode�netlocr'ZauthenticatorsrC�ImportError�AttributeError)�urlZraise_errorsrGrHZ
netrc_path�f�locZriZsplitstrr,rZlogin_ir-r-r.�get_netrc_auth�s8


rYcCsBt|dd�}|r>t|t�r>|ddkr>|ddkr>tjj|�SdS)z0Tries to guess the filename of the given object.�nameNr�<r�>���)�getattrrQrr=rL�basename)�objrZr-r-r.�guess_filename�sracCs.|dkrdSt|ttttf�r&td��t|�S)a�Take an object and test to see if it can be represented as a
    dictionary. Unless it can not be represented as such, return an
    OrderedDict, e.g.,

    ::

        >>> from_key_val_list([('key', 'val')])
        OrderedDict([('key', 'val')])
        >>> from_key_val_list('string')
        ValueError: need more than 1 value to unpack
        >>> from_key_val_list({'key': 'val'})
        OrderedDict([('key', 'val')])

    :rtype: OrderedDict
    Nz+cannot encode objects that are not 2-tuples)rQr
r	�bool�int�
ValueErrorr)�valuer-r-r.�from_key_val_list�s
rfcCsB|dkrdSt|ttttf�r&td��t|tj�r:|j�}t	|�S)a�Take an object and test to see if it can be represented as a
    dictionary. If it can be, return a list of tuples, e.g.,

    ::

        >>> to_key_val_list([('key', 'val')])
        [('key', 'val')]
        >>> to_key_val_list({'key': 'val'})
        [('key', 'val')]
        >>> to_key_val_list('string')
        ValueError: cannot encode objects that are not 2-tuples.

    :rtype: list
    Nz+cannot encode objects that are not 2-tuples)
rQr
r	rbrcrd�collections�Mappingr0�list)rer-r-r.�to_key_val_list�srjcCs\g}xRt|�D]F}|dd�|dd�ko4dknrJt|dd��}|j|�qW|S)aParse lists as described by RFC 2068 Section 2.

    In particular, parse comma-separated lists where the elements of
    the list may include quoted-strings.  A quoted-string could
    contain a comma.  A non-quoted string could have quotes in the
    middle.  Quotes are removed automatically after parsing.

    It basically works like :func:`parse_set_header` just that items
    may appear multiple times and case sensitivity is preserved.

    The return value is a standard :class:`list`:

    >>> parse_list_header('token, "quoted value"')
    ['token', 'quoted value']

    To create a header from the :class:`list` again, use the
    :func:`dump_header` function.

    :param value: a string with a list header.
    :return: :class:`list`
    :rtype: list
    Nr�"r]r])�_parse_list_header�unquote_header_value�append)re�result�itemr-r-r.�parse_list_headers(rqcCs|i}xrt|�D]f}d|kr$d||<q|jdd�\}}|dd�|dd�koVdknrlt|dd��}|||<qW|S)a^Parse lists of key, value pairs as described by RFC 2068 Section 2 and
    convert them into a python dict:

    >>> d = parse_dict_header('foo="is a fish", bar="as well"')
    >>> type(d) is dict
    True
    >>> sorted(d.items())
    [('bar', 'as well'), ('foo', 'is a fish')]

    If there is no value for a key it will be `None`:

    >>> parse_dict_header('key_without_value')
    {'key_without_value': None}

    To create a header from the :class:`dict` again, use the
    :func:`dump_header` function.

    :param value: a string with a dict header.
    :return: :class:`dict`
    :rtype: dict
    �=Nrrkr]r])rlr'rm)rerorprZr-r-r.�parse_dict_header1s(rscCs^|rZ|d|d	kodknrZ|dd
�}|sF|dd�dkrZ|jdd�jdd�S|S)z�Unquotes a header value.  (Reversal of :func:`quote_header_value`).
    This does not use the real unquoting but what browsers are actually
    using for quoting.

    :param value: the header value to unquote.
    :rtype: str
    rrrkNr:z\\�\z\"r]r])r()reZis_filenamer-r-r.rmTs
$rmcCs"i}x|D]}|j||j<q
W|S)z�Returns a key/value dictionary from a CookieJar.

    :param cj: CookieJar object to extract cookies from.
    :rtype: dict
    )rerZ)�cj�cookie_dictZcookier-r-r.�dict_from_cookiejarms
rwcCs
t||�S)z�Returns a CookieJar from a key/value dictionary.

    :param cj: CookieJar to insert cookies into.
    :param cookie_dict: Dict of key/values to insert into CookieJar.
    :rtype: CookieJar
    )r)rurvr-r-r.�add_dict_to_cookiejar|srxcCsTtjdt�tjdtjd�}tjdtjd�}tjd�}|j|�|j|�|j|�S)zlReturns encodings from given content string.

    :param content: bytestring to extract encodings from.
    z�In requests 3.0, get_encodings_from_content will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)z!<meta.*?charset=["\']*(.+?)["\'>])�flagsz+<meta.*?content=["\']*;?charset=(.+?)["\'>]z$^<\?xml.*?encoding=["\']*(.+?)["\'>])rArB�DeprecationWarningr)�compiler+�findall)�contentZ
charset_reZ	pragma_reZxml_rer-r-r.�get_encodings_from_content�s
r~cCsF|jd�}|sdStj|�\}}d|kr6|djd�Sd|krBdSdS)z}Returns encodings from given HTTP Header Dict.

    :param headers: dictionary to extract encoding from.
    :rtype: str
    zcontent-typeN�charsetz'"�textz
ISO-8859-1)�get�cgiZparse_header�strip)�headersZcontent_type�paramsr-r-r.�get_encoding_from_headers�s
r�ccsr|jdkr"x|D]
}|VqWdStj|j�dd�}x |D]}|j|�}|r:|Vq:W|jddd�}|rn|VdS)zStream decodes a iterator.Nr()�errors�T)�final)�encoding�codecs�getincrementaldecoderrR)�iterator�rrp�decoder�chunk�rvr-r-r.�stream_decode_response_unicode�s





r�ccsLd}|dks|dkrt|�}x*|t|�krF||||�V||7}qWdS)z Iterate over slices of a string.rN)r5)�stringZslice_length�posr-r-r.�iter_slices�sr�cCsvtjdt�g}t|j�}|rJyt|j|�Stk
rH|j|�YnXyt|j|dd�St	k
rp|jSXdS)z�Returns the requested content back in unicode.

    :param r: Response object to get unicode content from.

    Tried:

    1. charset from content-type
    2. fall back and replace all unicode characters

    :rtype: str
    z�In requests 3.0, get_unicode_from_response will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)r()r�N)
rArBrzr�r�r
r}�UnicodeErrorrn�	TypeError)r�Ztried_encodingsr�r-r-r.�get_unicode_from_response�s
r�Z4ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzz0123456789-._~c
Cs�|jd�}x�tdt|��D]�}||dd�}t|�dkr�|j�r�ytt|d��}Wn tk
rttd|��YnX|tkr�|||dd�||<q�d||||<qd||||<qWdj	|�S)	z�Un-escape any percent-escape sequences in a URI that are unreserved
    characters. This leaves all reserved, illegal and non-ASCII bytes encoded.

    :rtype: str
    �%rrr:�z%Invalid percent-escape sequence: '%s'N�)
r'�ranger5�isalnum�chrrcrdr�UNRESERVED_SET�join)�uri�parts�i�h�cr-r-r.�unquote_unreserved�s
r�cCs:d}d}ytt|�|d�Stk
r4t||d�SXdS)z�Re-quote the given URI.

    This function passes the given URI through an unquote/quote cycle to
    ensure that it is fully and consistently quoted.

    :rtype: str
    z!#$%&'()*+,/:;=?@[]~z!#$&'()*+,/:;=?@[]~)ZsafeN)rr�r)r�Zsafe_with_percentZsafe_without_percentr-r-r.�requote_uri
sr�cCsltjdtj|��d}|jd�\}}tjdtjtt|����d}tjdtj|��d|@}||@||@kS)z�This function allows you to check if an IP belongs to a network subnet

    Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
             returns False if ip = 192.168.1.1 and net = 192.168.100.0/24

    :rtype: bool
    z=Lr�/)�struct�unpack�socket�	inet_atonr'�dotted_netmaskrc)�ipZnetZipaddrZnetaddr�bitsZnetmaskZnetworkr-r-r.�address_in_network#s
r�cCs&ddd|>dA}tjtjd|��S)z�Converts mask from /xx format to xxx.xxx.xxx.xxx

    Example: if mask is 24 function returns 255.255.255.0

    :rtype: str
    l��r� z>I)r�Z	inet_ntoar��pack)�maskr�r-r-r.r�2sr�cCs*ytj|�Wntjk
r$dSXdS)z
    :rtype: bool
    FT)r�r��error)Z	string_ipr-r-r.�is_ipv4_address=s
r�cCs�|jd�dkr�yt|jd�d�}Wntk
r8dSX|dksJ|dkrNdSytj|jd�d�Wq�tjk
r|dSXndSdS)zV
    Very simple check of the cidr format in no_proxy variable.

    :rtype: bool
    r�rFr�rT)�countrcr'rdr�r�r�)Zstring_networkr�r-r-r.�
is_valid_cidrHsr�ccsT|dk	}|r"tjj|�}|tj|<z
dVWd|rN|dkrDtj|=n
|tj|<XdS)z�Set the environment variable 'env_name' to 'value'

    Save previous value, yield, and then restore the previous value stored in
    the environment variable 'env_name'.

    If 'value' is None, do nothingN)r=�environr�)Zenv_namereZ
value_changedZ	old_valuer-r-r.�set_environ`s


r�c	Csdd�}|}|dkr|d�}t|�j}|r�dd�|jdd�jd	�D�}|jd
�d}t|�r�xb|D](}t|�r~t||�r�dSqb||krbdSqbWn0x.|D]&}|j|�s�|jd
�dj|�r�dSq�Wtd|��2yt	|�}Wnt
tjfk
r�d
}YnXWdQRX|�rdSd
S)zL
    Returns whether we should bypass proxies or not.

    :rtype: bool
    cSstjj|�ptjj|j��S)N)r=r�r��upper)�kr-r-r.�<lambda>|sz'should_bypass_proxies.<locals>.<lambda>N�no_proxycss|]}|r|VqdS)Nr-)�.0r,r-r-r.�	<genexpr>�sz(should_bypass_proxies.<locals>.<genexpr>� r��,�:rTF)
rrSr(r'r�r�r��endswithr�rr�r�Zgaierror)	rVr�Z	get_proxyZno_proxy_argrSr�Zproxy_ipr,Zbypassr-r-r.�should_bypass_proxiesvs4




r�cCst||d�riSt�SdS)zA
    Return a dict of environment proxies.

    :rtype: dict
    )r�N)r�r
)rVr�r-r-r.�get_environ_proxies�sr�cCsv|pi}t|�}|jdkr.|j|j|jd��S|jd|j|jd|jdg}d}x|D]}||krX||}PqXW|S)z�Select a proxy for the url, if applicable.

    :param url: The url being for the request
    :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
    N�allz://zall://)rZhostnamer��scheme)rVZproxiesZurlpartsZ
proxy_keys�proxyZ	proxy_keyr-r-r.�select_proxy�s

r��python-requestscCsd|tfS)zO
    Return a string representing the default user agent.

    :rtype: str
    z%s/%s)r)rZr-r-r.�default_user_agent�sr�cCstt�djd�ddd��S)z9
    :rtype: requests.structures.CaseInsensitiveDict
    z, �gzip�deflatez*/*z
keep-alive)z
User-AgentzAccept-EncodingZAcceptZ
Connection)r�r�)rr�r�r-r-r-r.�default_headers�s
r�c	Cs�g}d}x�tjd|�D]�}y|jdd�\}}Wntk
rL|d}}YnXd|jd�i}xP|jd�D]B}y|jd�\}}Wntk
r�PYnX|j|�||j|�<qhW|j|�qW|S)	z�Return a dict of parsed link headers proxies.

    i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"

    :rtype: list
    z '"z, *<rrr�rVz<> '"rr)r)r'rdr�rn)	reZlinksZ
replace_chars�valrVr��linkZparam�keyr-r-r.�parse_header_links�s r��rJr:�cCs�|dd�}|tjtjfkr dS|dd�tjkr6dS|dd�tjtjfkrRdS|jt�}|dkrhd	S|dkr�|ddd�tkr�d
S|ddd�tkr�dS|dkr�|dd�t	kr�d
S|dd�t	kr�dSdS)z
    :rtype: str
    N�zutf-32r�z	utf-8-sigr:zutf-16rzutf-8z	utf-16-berz	utf-16-lez	utf-32-bez	utf-32-le)
r��BOM_UTF32_LE�BOM_UTF32_BE�BOM_UTF8�BOM_UTF16_LE�BOM_UTF16_BEr��_null�_null2�_null3)�dataZsampleZ	nullcountr-r-r.�guess_json_utfs*
r�cCs8t||�\}}}}}}|s$||}}t||||||f�S)z�Given a URL that may or may not have a scheme, prepend the given scheme.
    Does not replace a present scheme with the one provided as an argument.

    :rtype: str
    )rr)rVZ
new_schemer�rSrLr��query�fragmentr-r-r.�prepend_scheme_if_needed1s
r�cCsBt|�}yt|j�t|j�f}Wnttfk
r<d}YnX|S)z{Given a url with authentication components, extract them into a tuple of
    username,password.

    :rtype: (str,str)
    r�)r�r�)rrZusernameZpasswordrUr�)rVZparsedZauthr-r-r.�get_auth_from_urlBs
r�s^\S[^\r\n]*$|^$z^\S[^\r\n]*$|^$cCsf|\}}t|t�rt}nt}y|j|�s4td|��Wn*tk
r`td||t|�f��YnXdS)z�Verifies that header value is a string which doesn't contain
    leading whitespace or return characters. This prevents unintended
    header injection.

    :param header: tuple, in the format (name, value).
    z7Invalid return character or leading space in header: %sz>Value for header {%s: %s} must be of type str or bytes, not %sN)rQr	�_CLEAN_HEADER_REGEX_BYTE�_CLEAN_HEADER_REGEX_STRr*rr��type)�headerrZreZpatr-r-r.�check_header_validityWs

r�cCsFt|�\}}}}}}|s"||}}|jdd�d}t|||||df�S)zW
    Given a url remove the fragment and the authentication part.

    :rtype: str
    �@rr�r])r�rsplitr)rVr�rSrLr�r�r�r-r-r.�
urldefragauthls

r�cCs`t|jdd�}|dk	rTt|jt�rTy||j�Wq\ttfk
rPtd��Yq\Xntd��dS)zfMove file pointer back to its recorded starting position
    so it can be read again on redirect.
    r9Nz;An error occurred when rewinding request body for redirect.z+Unable to rewind request body for redirect.)r^ZbodyrQZ_body_positionrrCr&r)Zprepared_requestZ	body_seekr-r-r.�rewind_body}sr�)rr)F)FzBABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~)N)r�)^�__doc__r�r�rg�
contextlibr;r=�platformr)r�r�rArr�rZ_internal_utilsr�compatrrlrrr	r
rrr
rrrrrrrZcookiesrZ
structuresr�
exceptionsrrrrrK�whereZDEFAULT_CA_BUNDLE_PATHZ
DEFAULT_PORTS�systemr/r3rFrYrarfrjrqrsrmrwrxr~r�r�r�r��	frozensetr�r�r�r�r�r�r��contextmanagerr�r�r�r�r�r�r��encoder�r�r�r�r�r�r{r�r�r�r�r�r-r-r-r.�<module>	s�@
!	=
3 #

%9

	"
 

_vendor/requests/__pycache__/__init__.cpython-36.pyc000064400000006060151733136400016455 0ustar003

�Pf�
�@s�dZddlmZddlmZddlZddlmZdd�Zyeejej�Wn0e	e
fk
rzejd	jejej�e�YnXdd
l
mZejde�ddlmZmZmZmZdd
lmZmZmZmZddlmZmZddlmZddlmZddlmZmZmZddl m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+ddl,m-Z-ddlm.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6ddl7Z7yddl7m8Z8Wn(e9k
�r�Gdd�de7j:�Z8YnXe7j;e<�j=e8��ejde4dd�dS)a�
Requests HTTP Library
~~~~~~~~~~~~~~~~~~~~~

Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:

   >>> import requests
   >>> r = requests.get('https://www.python.org')
   >>> r.status_code
   200
   >>> 'Python is a programming language' in r.content
   True

... or POST:

   >>> payload = dict(key1='value1', key2='value2')
   >>> r = requests.post('http://httpbin.org/post', data=payload)
   >>> print(r.text)
   {
     ...
     "form": {
       "key2": "value2",
       "key1": "value1"
     },
     ...
   }

The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.

:copyright: (c) 2017 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
�)�urllib3)�chardetN�)�RequestsDependencyWarningcCs�|jd�}|dgkst�t|�dkr.|jd�|\}}}t|�t|�t|�}}}|dks`t�|dkslt�|dksxt�|jd�dd�\}}}t|�t|�t|�}}}|dks�t�|dks�t�|dks�t�dS)	N�.Zdev��0r���)�split�AssertionError�len�append�int)Zurllib3_versionZchardet_version�major�minor�patch�r�/usr/lib/python3.6/__init__.py�check_compatibility1s


rzAurllib3 ({0}) or chardet ({1}) doesn't match a supported version!)�DependencyWarning�ignore)�	__title__�__description__�__url__�__version__)�	__build__�
__author__�__author_email__�__license__)�
__copyright__�__cake__)�utils)�packages)�Request�Response�PreparedRequest)�request�get�head�postr�put�delete�options)�session�Session)�codes)	�RequestException�Timeout�URLRequired�TooManyRedirects�	HTTPError�ConnectionError�FileModeWarning�ConnectTimeout�ReadTimeout)�NullHandlerc@seZdZdd�ZdS)r;cCsdS)Nr)�self�recordrrr�emitsszNullHandler.emitN)�__name__�
__module__�__qualname__r>rrrrr;rsr;�defaultT)r)>�__doc__Zpip._vendorrr�warnings�
exceptionsrrrr
�
ValueError�warn�formatZpip._vendor.urllib3.exceptionsr�simplefilterrrrrrrr r!r"�r#r$Zmodelsr%r&r'Zapir(r)r*r+rr,r-r.Zsessionsr/r0Zstatus_codesr1r2r3r4r5r6r7r8r9r:Zloggingr;�ImportErrorZHandlerZ	getLoggerr?Z
addHandlerrrrr�<module>)s<

(,_vendor/requests/__pycache__/compat.cpython-36.opt-1.pyc000064400000002653151733136400017144 0ustar003

�PfZ�@s�dZddlmZddlZejZeddkZeddkZddlZer�ddl	m
Z
mZmZm
Z
mZmZmZmZmZddlmZmZmZmZmZddlmZddlZdd	lmZdd
lmZddlmZe Z!e Z"e#Z e$Z$e%e&e'fZ(e%e&fZ)n�e�r�ddl*mZmZmZmZmZm
Z
mZmZm
Z
mZdd
l+mZmZmZmZmZddl,m-Zdd	l.mZdd
l/mZddl0mZe Z!e Z e"Z"e e"fZ$e%e'fZ(e%fZ)dS)zq
requests.compat
~~~~~~~~~~~~~~~

This module handles import compatibility issues between Python 2 and
Python 3.
�)�chardetN��)	�quote�unquote�
quote_plus�unquote_plus�	urlencode�
getproxies�proxy_bypass�proxy_bypass_environment�getproxies_environment)�urlparse�
urlunparse�urljoin�urlsplit�	urldefrag)�parse_http_list)�Morsel)�StringIO)�OrderedDict)
rrrrr	rrrrr)rr
rrr
)�	cookiejar)1�__doc__Zpip._vendorr�sys�version_infoZ_verZis_py2Zis_py3ZjsonZurllibrrrrr	r
rrr
rrrrrZurllib2rZ	cookielibZCookierrZ)pip._vendor.urllib3.packages.ordered_dictr�strZbuiltin_str�bytesZunicodeZ
basestring�intZlong�floatZ
numeric_typesZ
integer_typesZurllib.parseZurllib.requestZhttprZhttp.cookies�io�collections�r!r!�/usr/lib/python3.6/compat.py�<module>	sB,

0_vendor/requests/__pycache__/status_codes.cpython-36.pyc000064400000007012151733136400017414 0ustar003

�Pf��F@s�ddlmZd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�dz�DZed{d|�ZxNej�D]B\ZZx6eD].Zeeee�ej	dă�s�eeej
�e��q�W�q�WdS)��)�
LookupDict�continue�switching_protocols�
processing�
checkpoint�uri_too_long�request_uri_too_long�ok�okay�all_ok�all_okay�all_good�\o/�✓�created�accepted�non_authoritative_info�non_authoritative_information�
no_content�
reset_content�reset�partial_content�partial�multi_status�multiple_status�multi_stati�multiple_stati�already_reported�im_used�multiple_choices�moved_permanently�moved�\o-�found�	see_other�other�not_modified�	use_proxy�switch_proxy�temporary_redirect�temporary_moved�	temporary�permanent_redirect�resume_incomplete�resume�bad_request�bad�unauthorized�payment_required�payment�	forbidden�	not_found�-o-�method_not_allowed�not_allowed�not_acceptable�proxy_authentication_required�
proxy_auth�proxy_authentication�request_timeout�timeout�conflict�gone�length_required�precondition_failed�precondition�request_entity_too_large�request_uri_too_large�unsupported_media_type�unsupported_media�
media_type�requested_range_not_satisfiable�requested_range�range_not_satisfiable�expectation_failed�im_a_teapot�teapot�
i_am_a_teapot�misdirected_request�unprocessable_entity�
unprocessable�locked�failed_dependency�
dependency�unordered_collection�	unordered�upgrade_required�upgrade�precondition_required�too_many_requests�too_many�header_fields_too_large�fields_too_large�no_response�none�
retry_with�retry�$blocked_by_windows_parental_controls�parental_controls�unavailable_for_legal_reasons�
legal_reasons�client_closed_request�internal_server_error�server_error�/o\�✗�not_implemented�bad_gateway�service_unavailable�unavailable�gateway_timeout�http_version_not_supported�http_version�variant_also_negotiates�insufficient_storage�bandwidth_limit_exceeded�	bandwidth�not_extended�network_authentication_required�network_auth�network_authentication)D�d�e�f�g�z��������������������i,i-i.i/i0i1i2i3i4i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�Zstatus_codes)�name�\�/N)r)r)r)r)rr)r	r
rrr
rr)r)r)rr)r)rr)rr)rrrr)r)r)r)r r!r")r#)r$r%)r&)r')r()r)r*r+)r,r-r.)r/r0)r1)r2r3)r4)r5r6)r7r8)r9)r:r;r<)r=r>)r?)r@)rA)rBrC)rD)rE)rFrGrH)rIrJrK)rL)rMrNrO)rP)rQrR)rS)rTrU)rVrW)rXrY)rZrC)r[r\)r]r^)r_r`)rarb)rcrd)rerf)rg)rhrirjrk)rl)rm)rnro)rp)rqrr)rs)rt)rurv)rw)rxryrz)r�r�)Z
structuresrZ_codesZcodes�items�codeZtitles�title�setattr�
startswith�upper�r�r��"/usr/lib/python3.6/status_codes.py�<module>s�

_vendor/requests/__pycache__/compat.cpython-36.pyc000064400000002653151733136400016205 0ustar003

�PfZ�@s�dZddlmZddlZejZeddkZeddkZddlZer�ddl	m
Z
mZmZm
Z
mZmZmZmZmZddlmZmZmZmZmZddlmZddlZdd	lmZdd
lmZddlmZe Z!e Z"e#Z e$Z$e%e&e'fZ(e%e&fZ)n�e�r�ddl*mZmZmZmZmZm
Z
mZmZm
Z
mZdd
l+mZmZmZmZmZddl,m-Zdd	l.mZdd
l/mZddl0mZe Z!e Z e"Z"e e"fZ$e%e'fZ(e%fZ)dS)zq
requests.compat
~~~~~~~~~~~~~~~

This module handles import compatibility issues between Python 2 and
Python 3.
�)�chardetN��)	�quote�unquote�
quote_plus�unquote_plus�	urlencode�
getproxies�proxy_bypass�proxy_bypass_environment�getproxies_environment)�urlparse�
urlunparse�urljoin�urlsplit�	urldefrag)�parse_http_list)�Morsel)�StringIO)�OrderedDict)
rrrrr	rrrrr)rr
rrr
)�	cookiejar)1�__doc__Zpip._vendorr�sys�version_infoZ_verZis_py2Zis_py3ZjsonZurllibrrrrr	r
rrr
rrrrrZurllib2rZ	cookielibZCookierrZ)pip._vendor.urllib3.packages.ordered_dictr�strZbuiltin_str�bytesZunicodeZ
basestring�intZlong�floatZ
numeric_typesZ
integer_typesZurllib.parseZurllib.requestZhttprZhttp.cookies�io�collections�r!r!�/usr/lib/python3.6/compat.py�<module>	sB,

0_vendor/requests/__pycache__/auth.cpython-36.pyc000064400000017115151733136400015662 0ustar003

�Pf&�@s�dZddlZddlZddlZddlZddlZddlZddlmZddl	m
Z
mZmZddl
mZddlmZddlmZd	Zd
Zdd�ZGd
d�de�ZGdd�de�ZGdd�de�ZGdd�de�ZdS)z]
requests.auth
~~~~~~~~~~~~~

This module contains the authentication handlers for Requests.
�N)�	b64encode�)�urlparse�str�
basestring)�extract_cookies_to_jar)�to_native_string)�parse_dict_headerz!application/x-www-form-urlencodedzmultipart/form-datacCs�t|t�s&tjdj|�td�t|�}t|t�sLtjdj|�td�t|�}t|t�r`|jd�}t|t�rt|jd�}dtt	dj
||f��j��}|S)zReturns a Basic Auth string.z�Non-string usernames will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.)�categoryz�Non-string passwords will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.�latin1zBasic �:)�
isinstancer�warnings�warn�format�DeprecationWarningr�encoderr�join�strip)�username�passwordZauthstr�r�/usr/lib/python3.6/auth.py�_basic_auth_strs&






rc@seZdZdZdd�ZdS)�AuthBasez4Base class that all auth implementations derive fromcCstd��dS)NzAuth hooks must be callable.)�NotImplementedError)�self�rrrr�__call__KszAuthBase.__call__N)�__name__�
__module__�__qualname__�__doc__rrrrrrHsrc@s0eZdZdZdd�Zdd�Zdd�Zdd	�Zd
S)�
HTTPBasicAuthz?Attaches HTTP Basic Authentication to the given Request object.cCs||_||_dS)N)rr)rrrrrr�__init__RszHTTPBasicAuth.__init__cCs(t|jt|dd�k|jt|dd�kg�S)Nrr)�allr�getattrr)r�otherrrr�__eq__VszHTTPBasicAuth.__eq__cCs
||kS)Nr)rr'rrr�__ne__\szHTTPBasicAuth.__ne__cCst|j|j�|jd<|S)N�
Authorization)rrr�headers)rrrrrr_szHTTPBasicAuth.__call__N)rr r!r"r$r(r)rrrrrr#Os
r#c@seZdZdZdd�ZdS)�
HTTPProxyAuthz=Attaches HTTP Proxy Authentication to a given Request object.cCst|j|j�|jd<|S)NzProxy-Authorization)rrrr+)rrrrrrgszHTTPProxyAuth.__call__N)rr r!r"rrrrrr,dsr,c@sPeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�ZdS)�HTTPDigestAuthz@Attaches HTTP Digest Authentication to the given Request object.cCs||_||_tj�|_dS)N)rr�	threadingZlocal�
_thread_local)rrrrrrr$oszHTTPDigestAuth.__init__cCs@t|jd�s<d|j_d|j_d|j_i|j_d|j_d|j_dS)N�initT�r)�hasattrr/r0�
last_nonce�nonce_count�chal�pos�
num_401_calls)rrrr�init_per_thread_stateusz$HTTPDigestAuth.init_per_thread_statecsj|jjd}|jjd}|jjjd�}|jjjd�}|jjjd�}d�|dkrTd}n|j�}|dksl|dkrzd	d
�}	|	�n|dkr�dd
�}
|
��fdd�}�dkr�dSd}t|�}
|
jp�d}|
jr�|d|
j7}d|j||jf}d||f}�|�}�|�}||jj	k�r|jj
d7_
nd|j_
d|jj
}t|jj
�jd�}||jd�7}|t
j�jd�7}|tjd�7}tj|�j�dd�}|dk�r��d|||f�}|�s�||d||f�}n<|dk�s�d|jd�k�r�d|||d|f}|||�}ndS||j_	d|j||||f}|�r(|d|7}|�r:|d|7}|�rL|d|7}|�rb|d ||f7}d!|S)"z
        :rtype: str
        �realm�nonce�qop�	algorithm�opaqueNZMD5zMD5-SESScSs"t|t�r|jd�}tj|�j�S)Nzutf-8)r
rr�hashlibZmd5�	hexdigest)�xrrr�md5_utf8�s

z4HTTPDigestAuth.build_digest_header.<locals>.md5_utf8ZSHAcSs"t|t�r|jd�}tj|�j�S)Nzutf-8)r
rrr>�sha1r?)r@rrr�sha_utf8�s

z4HTTPDigestAuth.build_digest_header.<locals>.sha_utf8cs�d||f�S)Nz%s:%sr)�s�d)�	hash_utf8rr�<lambda>�sz4HTTPDigestAuth.build_digest_header.<locals>.<lambda>�/�?z%s:%s:%sz%s:%srz%08xzutf-8��Zauth�,z%s:%s:%s:%s:%sz>username="%s", realm="%s", nonce="%s", uri="%s", response="%s"z
, opaque="%s"z, algorithm="%s"z
, digest="%s"z , qop="auth", nc=%s, cnonce="%s"z	Digest %s)r/r5�get�upperr�pathZqueryrrr3r4rr�timeZctime�os�urandomr>rBr?�split)r�method�urlr9r:r;r<r=Z
_algorithmrArCZKDZentdigZp_parsedrOZA1ZA2ZHA1ZHA2ZncvaluerDZcnonceZrespdigZnoncebit�baser)rFr�build_digest_headersr

z"HTTPDigestAuth.build_digest_headercKs|jrd|j_dS)z)Reset num_401_calls counter on redirects.rN)Zis_redirectr/r7)rr�kwargsrrr�handle_redirect�szHTTPDigestAuth.handle_redirectcKs"d|jkodkns&d|j_|S|jjdk	rD|jjj|jj�|jjdd�}d|j	�koh|jjdk�r|jjd7_t
jd	t
jd
�}t
|jd|dd��|j_|j|j�|jj�}t|j|j|j�|j|j�|j|j|j�|jd<|jj|f|�}|jj|�||_|Sd|j_|S)
zo
        Takes the given response and tries digest-auth, if needed.

        :rtype: requests.Response
        i�i�rNzwww-authenticater1Zdigest�zdigest )�flags)�countr*)Zstatus_coder/r7r6Zrequest�body�seekr+rM�lower�re�compile�
IGNORECASEr	�subr5Zcontent�close�copyrZ_cookies�rawZprepare_cookiesrWrTrUZ
connection�send�history�append)rrrXZs_authZpatZprepZ_rrrr�
handle_401�s.	
zHTTPDigestAuth.handle_401cCs~|j�|jjr&|j|j|j�|jd<y|jj�|j_	Wnt
k
rTd|j_	YnX|jd|j�|jd|j
�d|j_|S)Nr*Zresponser)r8r/r3rWrTrUr+r]�tellr6�AttributeErrorZ
register_hookrjrYr7)rrrrrr
szHTTPDigestAuth.__call__cCs(t|jt|dd�k|jt|dd�kg�S)Nrr)r%rr&r)rr'rrrr(szHTTPDigestAuth.__eq__cCs
||kS)Nr)rr'rrrr)$szHTTPDigestAuth.__ne__N)rr r!r"r$r8rWrYrjrr(r)rrrrr-ls
Z,r-)r"rQr`rPr>r.r�base64r�compatrrrZcookiesrZ_internal_utilsrZutilsr	ZCONTENT_TYPE_FORM_URLENCODEDZCONTENT_TYPE_MULTI_PARTr�objectrr#r,r-rrrr�<module>s$,_vendor/requests/__pycache__/sessions.cpython-36.opt-1.pyc000064400000044731151733136400017532 0ustar003

�Pfp�@s�dZddlZddlZddlZddlmZddlmZddlm	Z	ddl
mZmZm
Z
mZmZddlmZmZmZmZdd	lmZmZmZdd
lmZmZddlmZddlmZm Z dd
l!m"Z"m#Z#m$Z$m%Z%ddl&m'Z'ddl(m)Z)ddlm*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0ddl1m2Z2ddlm3Z3ej4�dk�rXy
ej5Z6Wne7k
�rTej8Z6YnXnejZ6e
fdd�Z9e
fdd�Z:Gdd�de;�Z<Gdd�de<�Z=dd�Z>dS)z�
requests.session
~~~~~~~~~~~~~~~~

This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
�N)�Mapping)�	timedelta�)�_basic_auth_str)�	cookielib�is_py3�OrderedDict�urljoin�urlparse)�cookiejar_from_dict�extract_cookies_to_jar�RequestsCookieJar�
merge_cookies)�Request�PreparedRequest�DEFAULT_REDIRECT_LIMIT)�
default_hooks�
dispatch_hook)�to_native_string)�to_key_val_list�default_headers)�TooManyRedirects�
InvalidSchema�ChunkedEncodingError�ContentDecodingError)�CaseInsensitiveDict)�HTTPAdapter)�requote_uri�get_environ_proxies�get_netrc_auth�should_bypass_proxies�get_auth_from_url�rewind_body�
DEFAULT_PORTS)�codes)�REDIRECT_STATIZWindowscCst|dkr|S|dkr|St|t�o*t|t�s0|S|t|��}|jt|��dd�|j�D�}x|D]
}||=qbW|S)z�Determines appropriate setting for a given request, taking into account
    the explicit setting on that request, and the setting in the session. If a
    setting is a dictionary, they will be merged together using `dict_class`
    NcSsg|]\}}|dkr|�qS)N�)�.0�k�vr&r&�/usr/lib/python3.6/sessions.py�
<listcomp>Jsz!merge_setting.<locals>.<listcomp>)�
isinstancerr�update�items)Zrequest_settingZsession_setting�
dict_classZmerged_settingZ	none_keys�keyr&r&r*�
merge_setting2s



r1cCs@|dks|jd�gkr|S|dks0|jd�gkr4|St|||�S)z�Properly merges both requests and session hooks.

    This is necessary because when request_hooks == {'response': []}, the
    merge breaks Session hooks entirely.
    N�response)�getr1)Z
request_hooksZ
session_hooksr/r&r&r*�merge_hooksQs
r4c@s>eZdZdd�Zdd�Zddd	�Zd
d�Zdd
�Zdd�ZdS)�SessionRedirectMixincCs,|jr(|jd}tr|jd�}t|d�SdS)z7Receives a Response. Returns a redirect URI or ``None``�location�latin1�utf8N)Zis_redirect�headersr�encoder)�self�respr6r&r&r*�get_redirect_targetbs


z(SessionRedirectMixin.get_redirect_targetcCs�t|�}t|�}|j|jkr dS|jdkrL|jdkrL|jdkrL|jd	krLdS|j|jk}|j|jk}tj|jd�df}|r�|j|kr�|j|kr�dS|p�|S)
zFDecide whether Authorization header should be removed when redirectingTZhttp�PNZhttps�F)r>N)r?N)r
Zhostname�schemeZportr#r3)r;Zold_urlZnew_urlZ
old_parsedZ
new_parsedZchanged_portZchanged_schemeZdefault_portr&r&r*�should_strip_authxs
z&SessionRedirectMixin.should_strip_authFNTc	ks.g}
|j|�}�x|�r(|j�}|
j|�|
dd�|_y
|jWn(tttfk
rj|jj	dd�YnXt
|j�|jkr�td|j|d��|j
�|jd�r�t|j�}
dt|
j�|f}t|�}|j�}|js�t|jt|��}nt|�}t|�|_|j||�|jtjtjfk�r>d}x|D]}|jj|d��q Wd|_|j}y
|d=Wntk
�rdYnXt |j!||j�t"|j!|j#�|j$|j!�|j%||�}|j&||�|j'dk	�o�d	|k�p�d|k}|�r�t(|�|}|�r�|Vq|j)|f|||||dd
�|	��}t |j#||j�|j|�}|VqWdS)zBReceives a Response. Returns a generator of Responses or Requests.rNF)Zdecode_contentzExceeded %s redirects.)r2z//z%s:%s�Content-Length�Content-Type�Transfer-EncodingZCookie)�stream�timeout�verify�cert�proxies�allow_redirects)rBrCrD)*r=�copy�append�history�contentrr�RuntimeError�raw�read�len�
max_redirectsr�close�
startswithr
�urlrr@ZgeturlZnetlocr	r�rebuild_method�status_coder$Ztemporary_redirectZpermanent_redirectr9�popZbody�KeyErrorrZ_cookiesr�cookiesZprepare_cookies�rebuild_proxies�rebuild_authZ_body_positionr"�send)r;r<�reqrErFrGrHrI�yield_requestsZadapter_kwargsZhistrV�prepared_requestZparsed_rurlZparsedZpurged_headers�headerr9Z
rewindabler&r&r*�resolve_redirects�sr









z&SessionRedirectMixin.resolve_redirectscCsR|j}|j}d|kr*|j|jj|�r*|d=|jr8t|�nd}|dk	rN|j|�dS)z�When being redirected we may want to strip authentication from the
        request to avoid leaking credentials. This method intelligently removes
        and reapplies authentication where possible to avoid credential loss.
        Z
AuthorizationN)r9rVrA�request�	trust_envrZprepare_auth)r;rar2r9rVZnew_authr&r&r*r]�s
z!SessionRedirectMixin.rebuild_authc
Cs�|dk	r|ni}|j}|j}t|�j}|j�}|jd�}t||d�}|jr~|r~t||d�}	|	j||	jd��}
|
r~|j	||
�d|kr�|d=yt
||�\}}Wntk
r�d\}}YnX|r�|r�t||�|d<|S)a�This method re-evaluates the proxy configuration by considering the
        environment variables. If we are redirected to a URL covered by
        NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
        proxy keys for this URL (in case they were stripped by a previous
        redirect).

        This method also replaces the Proxy-Authorization header where
        necessary.

        :rtype: dict
        N�no_proxy)rf�allzProxy-Authorization)NN)
r9rVr
r@rKr3r rer�
setdefaultr!rZr)
r;rarIr9rVr@Znew_proxiesrfZbypass_proxyZenviron_proxies�proxyZusernameZpasswordr&r&r*r\s*

z$SessionRedirectMixin.rebuild_proxiescCsX|j}|jtjkr|dkrd}|jtjkr6|dkr6d}|jtjkrN|dkrNd}||_dS)z�When being redirected we may want to change the method of the request
        based on certain specs or browser behavior.
        �HEAD�GET�POSTN)�methodrXr$Z	see_other�foundZmoved)r;rar2rmr&r&r*rW:sz#SessionRedirectMixin.rebuild_method)FNTNNF)	�__name__�
__module__�__qualname__r=rArcr]r\rWr&r&r&r*r5`s
k)r5c@s�eZdZdZdddddddd	d
ddd
dg
Zdd�Zdd�Zdd�Zdd�Zd7dd�Z	dd�Z
dd�Zdd �Zd8d!d"�Z
d9d#d$�Zd:d%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�Zd3d4�Zd5d6�ZdS);�Sessiona~A Requests session.

    Provides cookie persistence, connection-pooling, and configuration.

    Basic Usage::

      >>> import requests
      >>> s = requests.Session()
      >>> s.get('http://httpbin.org/get')
      <Response [200]>

    Or as a context manager::

      >>> with requests.Session() as s:
      >>>     s.get('http://httpbin.org/get')
      <Response [200]>
    r9r[�authrI�hooks�paramsrGrHZprefetch�adaptersrErerScCsrt�|_d|_i|_t�|_i|_d|_d|_d|_	t
|_d|_t
i�|_t�|_|jdt��|jdt��dS)NFTzhttps://zhttp://)rr9rsrIrrtrurErGrHrrSrerr[rrv�mountr)r;r&r&r*�__init__js
zSession.__init__cCs|S)Nr&)r;r&r&r*�	__enter__�szSession.__enter__cGs|j�dS)N)rT)r;�argsr&r&r*�__exit__�szSession.__exit__c
Cs�|jpi}t|tj�st|�}ttt�|j�|�}|j}|jrV|rV|jrVt	|j
�}t�}|j|j
j�|j
|j|j|jt|j|jtd�t|j|j�t||j�|t|j|j�d�
|S)a�Constructs a :class:`PreparedRequest <PreparedRequest>` for
        transmission and returns it. The :class:`PreparedRequest` has settings
        merged from the :class:`Request <Request>` instance and those of the
        :class:`Session`.

        :param request: :class:`Request` instance to prepare with this
            session's settings.
        :rtype: requests.PreparedRequest
        )r/)
rmrV�files�data�jsonr9rursr[rt)r[r,rZ	CookieJarrrr
rsrerrVrZpreparerm�upperr|r}r~r1r9rrur4rt)r;rdr[Zmerged_cookiesrs�pr&r&r*�prepare_request�s*



zSession.prepare_requestNTcCstt|j�||||pi||pi|||d�
}|j|�}|p8i}|j|j||
||�}|	|
d�}|j|�|j|f|�}|S)a�Constructs a :class:`Request <Request>`, prepares it and sends it.
        Returns :class:`Response <Response>` object.

        :param method: method for the new :class:`Request` object.
        :param url: URL for the new :class:`Request` object.
        :param params: (optional) Dictionary or bytes to be sent in the query
            string for the :class:`Request`.
        :param data: (optional) Dictionary, bytes, or file-like object to send
            in the body of the :class:`Request`.
        :param json: (optional) json to send in the body of the
            :class:`Request`.
        :param headers: (optional) Dictionary of HTTP Headers to send with the
            :class:`Request`.
        :param cookies: (optional) Dict or CookieJar object to send with the
            :class:`Request`.
        :param files: (optional) Dictionary of ``'filename': file-like-objects``
            for multipart encoding upload.
        :param auth: (optional) Auth tuple or callable to enable
            Basic/Digest/Custom HTTP Auth.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param allow_redirects: (optional) Set to True by default.
        :type allow_redirects: bool
        :param proxies: (optional) Dictionary mapping protocol or protocol and
            hostname to the URL of the proxy.
        :param stream: (optional) whether to immediately download the response
            content. Defaults to ``False``.
        :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use. Defaults to ``True``.
        :param cert: (optional) if String, path to ssl client cert file (.pem).
            If Tuple, ('cert', 'key') pair.
        :rtype: requests.Response
        )
rmrVr9r|r}r~rursr[rt)rFrJ)rrr��merge_environment_settingsrVr-r^)r;rmrVrur}r9r[r|rsrFrJrIrtrErGrHr~r_ZprepZsettingsZsend_kwargsr<r&r&r*rd�s()

zSession.requestcKs|jdd�|jd|f|�S)z�Sends a GET request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rJTrk)rhrd)r;rV�kwargsr&r&r*r3szSession.getcKs|jdd�|jd|f|�S)z�Sends a OPTIONS request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rJTZOPTIONS)rhrd)r;rVr�r&r&r*�options!szSession.optionscKs|jdd�|jd|f|�S)z�Sends a HEAD request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rJFrj)rhrd)r;rVr�r&r&r*�head,szSession.headcKs|jd|f||d�|��S)a�Sends a POST request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param json: (optional) json to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rl)r}r~)rd)r;rVr}r~r�r&r&r*�post7s
zSession.postcKs|jd|fd|i|��S)aYSends a PUT request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        ZPUTr})rd)r;rVr}r�r&r&r*�putCs	zSession.putcKs|jd|fd|i|��S)a[Sends a PATCH request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        ZPATCHr})rd)r;rVr}r�r&r&r*�patchNs	z
Session.patchcKs|jd|f|�S)z�Sends a DELETE request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        ZDELETE)rd)r;rVr�r&r&r*�deleteYszSession.deletec
Ks~|jd|j�|jd|j�|jd|j�|jd|j�t|t�rJtd��|jdd�}|j	d�}|j
}|j|jd�}t
�}|j|f|�}t
�|}	t|	d	�|_td
||f|�}|jr�x |jD]}
t|j|
j|
j�q�Wt|j||j�|j||f|�}|�r
dd�|D�ng}|�r.|jd
|�|j�}||_|�sny"t|j||fddi|���|_Wntk
�rlYnX|�sz|j|S)zISend a given PreparedRequest.

        :rtype: requests.Response
        rErGrHrIz#You can only send PreparedRequests.rJT)rV)Zsecondsr2cSsg|]}|�qSr&r&)r'r<r&r&r*r+�sz Session.send.<locals>.<listcomp>rr`)rhrErGrHrIr,r�
ValueErrorrYr3rt�get_adapterrV�preferred_clockr^r�elapsedrrMrr[rdrPrc�insert�nextZ_next�
StopIterationrN)
r;rdr�rJrErt�adapter�start�rr�r<�genrMr&r&r*r^csB


"zSession.sendc
Cs�|jrr|dk	r|jd�nd}t||d�}x |j�D]\}}	|j||	�q2W|dksZ|dkrrtjjd�pptjjd�}t||j�}t||j	�}t||j
�}t||j�}||||d�S)z^
        Check the environment and merge it with some settings.

        :rtype: dict
        Nrf)rfTZREQUESTS_CA_BUNDLEZCURL_CA_BUNDLE)rGrIrErH)rer3rr.rh�os�environr1rIrErGrH)
r;rVrIrErGrHrfZenv_proxiesr(r)r&r&r*r��sz"Session.merge_environment_settingscCs:x(|jj�D]\}}|j�j|�r|SqWtd|��dS)z~
        Returns the appropriate connection adapter for the given URL.

        :rtype: requests.adapters.BaseAdapter
        z*No connection adapters were found for '%s'N)rvr.�lowerrUr)r;rV�prefixr�r&r&r*r��szSession.get_adaptercCs x|jj�D]}|j�qWdS)z+Closes all adapters and as such the sessionN)rv�valuesrT)r;r)r&r&r*rT�sz
Session.closecsB||j�<�fdd�|jD�}x|D]}|jj|�|j|<q$WdS)zwRegisters a connection adapter to a prefix.

        Adapters are sorted in descending order by prefix length.
        cs g|]}t|�t��kr|�qSr&)rR)r'r()r�r&r*r+�sz!Session.mount.<locals>.<listcomp>N)rvrY)r;r�r�Zkeys_to_mover0r&)r�r*rw�s

z
Session.mountcst�fdd��jD��}|S)Nc3s|]}|t�|d�fVqdS)N)�getattr)r'�attr)r;r&r*�	<genexpr>�sz'Session.__getstate__.<locals>.<genexpr>)�dict�	__attrs__)r;�stater&)r;r*�__getstate__�szSession.__getstate__cCs&x |j�D]\}}t|||�q
WdS)N)r.�setattr)r;r�r��valuer&r&r*�__setstate__�szSession.__setstate__)NNNNNNNTNNNNNN)NN)N)N)rorprq�__doc__r�rxryr{r�rdr3r�r�r�r�r�r�r^r�r�rTrwr�r�r&r&r&r*rrQs2
7)
D



IrrcCst�S)zQ
    Returns a :class:`Session` for context-management.

    :rtype: Session
    )rrr&r&r&r*�session�sr�)?r�r��platformZtime�collectionsrZdatetimerrsr�compatrrrr	r
r[rrr
rZmodelsrrrrtrrZ_internal_utilsrZutilsrr�
exceptionsrrrrZ
structuresrrvrrrrr r!r"r#Zstatus_codesr$r%�systemZperf_counterr��AttributeErrorZclockr1r4�objectr5rrr�r&r&r&r*�<module>	sB$
r"_vendor/requests/__pycache__/auth.cpython-36.opt-1.pyc000064400000017115151733136400016621 0ustar003

�Pf&�@s�dZddlZddlZddlZddlZddlZddlZddlmZddl	m
Z
mZmZddl
mZddlmZddlmZd	Zd
Zdd�ZGd
d�de�ZGdd�de�ZGdd�de�ZGdd�de�ZdS)z]
requests.auth
~~~~~~~~~~~~~

This module contains the authentication handlers for Requests.
�N)�	b64encode�)�urlparse�str�
basestring)�extract_cookies_to_jar)�to_native_string)�parse_dict_headerz!application/x-www-form-urlencodedzmultipart/form-datacCs�t|t�s&tjdj|�td�t|�}t|t�sLtjdj|�td�t|�}t|t�r`|jd�}t|t�rt|jd�}dtt	dj
||f��j��}|S)zReturns a Basic Auth string.z�Non-string usernames will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.)�categoryz�Non-string passwords will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.�latin1zBasic �:)�
isinstancer�warnings�warn�format�DeprecationWarningr�encoderr�join�strip)�username�passwordZauthstr�r�/usr/lib/python3.6/auth.py�_basic_auth_strs&






rc@seZdZdZdd�ZdS)�AuthBasez4Base class that all auth implementations derive fromcCstd��dS)NzAuth hooks must be callable.)�NotImplementedError)�self�rrrr�__call__KszAuthBase.__call__N)�__name__�
__module__�__qualname__�__doc__rrrrrrHsrc@s0eZdZdZdd�Zdd�Zdd�Zdd	�Zd
S)�
HTTPBasicAuthz?Attaches HTTP Basic Authentication to the given Request object.cCs||_||_dS)N)rr)rrrrrr�__init__RszHTTPBasicAuth.__init__cCs(t|jt|dd�k|jt|dd�kg�S)Nrr)�allr�getattrr)r�otherrrr�__eq__VszHTTPBasicAuth.__eq__cCs
||kS)Nr)rr'rrr�__ne__\szHTTPBasicAuth.__ne__cCst|j|j�|jd<|S)N�
Authorization)rrr�headers)rrrrrr_szHTTPBasicAuth.__call__N)rr r!r"r$r(r)rrrrrr#Os
r#c@seZdZdZdd�ZdS)�
HTTPProxyAuthz=Attaches HTTP Proxy Authentication to a given Request object.cCst|j|j�|jd<|S)NzProxy-Authorization)rrrr+)rrrrrrgszHTTPProxyAuth.__call__N)rr r!r"rrrrrr,dsr,c@sPeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�ZdS)�HTTPDigestAuthz@Attaches HTTP Digest Authentication to the given Request object.cCs||_||_tj�|_dS)N)rr�	threadingZlocal�
_thread_local)rrrrrrr$oszHTTPDigestAuth.__init__cCs@t|jd�s<d|j_d|j_d|j_i|j_d|j_d|j_dS)N�initT�r)�hasattrr/r0�
last_nonce�nonce_count�chal�pos�
num_401_calls)rrrr�init_per_thread_stateusz$HTTPDigestAuth.init_per_thread_statecsj|jjd}|jjd}|jjjd�}|jjjd�}|jjjd�}d�|dkrTd}n|j�}|dksl|dkrzd	d
�}	|	�n|dkr�dd
�}
|
��fdd�}�dkr�dSd}t|�}
|
jp�d}|
jr�|d|
j7}d|j||jf}d||f}�|�}�|�}||jj	k�r|jj
d7_
nd|j_
d|jj
}t|jj
�jd�}||jd�7}|t
j�jd�7}|tjd�7}tj|�j�dd�}|dk�r��d|||f�}|�s�||d||f�}n<|dk�s�d|jd�k�r�d|||d|f}|||�}ndS||j_	d|j||||f}|�r(|d|7}|�r:|d|7}|�rL|d|7}|�rb|d ||f7}d!|S)"z
        :rtype: str
        �realm�nonce�qop�	algorithm�opaqueNZMD5zMD5-SESScSs"t|t�r|jd�}tj|�j�S)Nzutf-8)r
rr�hashlibZmd5�	hexdigest)�xrrr�md5_utf8�s

z4HTTPDigestAuth.build_digest_header.<locals>.md5_utf8ZSHAcSs"t|t�r|jd�}tj|�j�S)Nzutf-8)r
rrr>�sha1r?)r@rrr�sha_utf8�s

z4HTTPDigestAuth.build_digest_header.<locals>.sha_utf8cs�d||f�S)Nz%s:%sr)�s�d)�	hash_utf8rr�<lambda>�sz4HTTPDigestAuth.build_digest_header.<locals>.<lambda>�/�?z%s:%s:%sz%s:%srz%08xzutf-8��Zauth�,z%s:%s:%s:%s:%sz>username="%s", realm="%s", nonce="%s", uri="%s", response="%s"z
, opaque="%s"z, algorithm="%s"z
, digest="%s"z , qop="auth", nc=%s, cnonce="%s"z	Digest %s)r/r5�get�upperr�pathZqueryrrr3r4rr�timeZctime�os�urandomr>rBr?�split)r�method�urlr9r:r;r<r=Z
_algorithmrArCZKDZentdigZp_parsedrOZA1ZA2ZHA1ZHA2ZncvaluerDZcnonceZrespdigZnoncebit�baser)rFr�build_digest_headersr

z"HTTPDigestAuth.build_digest_headercKs|jrd|j_dS)z)Reset num_401_calls counter on redirects.rN)Zis_redirectr/r7)rr�kwargsrrr�handle_redirect�szHTTPDigestAuth.handle_redirectcKs"d|jkodkns&d|j_|S|jjdk	rD|jjj|jj�|jjdd�}d|j	�koh|jjdk�r|jjd7_t
jd	t
jd
�}t
|jd|dd��|j_|j|j�|jj�}t|j|j|j�|j|j�|j|j|j�|jd<|jj|f|�}|jj|�||_|Sd|j_|S)
zo
        Takes the given response and tries digest-auth, if needed.

        :rtype: requests.Response
        i�i�rNzwww-authenticater1Zdigest�zdigest )�flags)�countr*)Zstatus_coder/r7r6Zrequest�body�seekr+rM�lower�re�compile�
IGNORECASEr	�subr5Zcontent�close�copyrZ_cookies�rawZprepare_cookiesrWrTrUZ
connection�send�history�append)rrrXZs_authZpatZprepZ_rrrr�
handle_401�s.	
zHTTPDigestAuth.handle_401cCs~|j�|jjr&|j|j|j�|jd<y|jj�|j_	Wnt
k
rTd|j_	YnX|jd|j�|jd|j
�d|j_|S)Nr*Zresponser)r8r/r3rWrTrUr+r]�tellr6�AttributeErrorZ
register_hookrjrYr7)rrrrrr
szHTTPDigestAuth.__call__cCs(t|jt|dd�k|jt|dd�kg�S)Nrr)r%rr&r)rr'rrrr(szHTTPDigestAuth.__eq__cCs
||kS)Nr)rr'rrrr)$szHTTPDigestAuth.__ne__N)rr r!r"r$r8rWrYrjrr(r)rrrrr-ls
Z,r-)r"rQr`rPr>r.r�base64r�compatrrrZcookiesrZ_internal_utilsrZutilsr	ZCONTENT_TYPE_FORM_URLENCODEDZCONTENT_TYPE_MULTI_PARTr�objectrr#r,r-rrrr�<module>s$,_vendor/requests/__pycache__/certs.cpython-36.opt-1.pyc000064400000001052151733136400016771 0ustar003

�Pf��@s&dZddlmZedkr"ee��dS)uF
requests.certs
~~~~~~~~~~~~~~

This module returns the preferred default CA certificate bundle. There is
only one — the one from the certifi package.

If you are packaging Requests, e.g., for a Linux distribution or a managed
environment, you can change the definition of where() to return a separately
packaged CA bundle.
�)�where�__main__N)�__doc__Zpip._vendor.certifir�__name__�print�rr�/usr/lib/python3.6/certs.py�<module>s_vendor/requests/__pycache__/exceptions.cpython-36.opt-1.pyc000064400000012174151733136400020041 0ustar003

�Pf+�@s�dZddlmZGdd�de�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZGdd�de�ZGd
d�de�Z	Gdd�dee	�Z
Gdd�de	�ZGdd�de�ZGdd�de�Z
Gdd�dee�ZGdd�dee�ZGdd�dee�ZGdd�dee�ZGdd �d e�ZGd!d"�d"ee�ZGd#d$�d$ee�ZGd%d&�d&e�ZGd'd(�d(e�ZGd)d*�d*e�ZGd+d,�d,ee�ZGd-d.�d.e�Zd/S)0z`
requests.exceptions
~~~~~~~~~~~~~~~~~~~

This module contains the set of Requests' exceptions.
�)�	HTTPErrorcs eZdZdZ�fdd�Z�ZS)�RequestExceptionzTThere was an ambiguous exception that occurred while handling your
    request.
    csZ|jdd�}||_|jdd�|_|dk	rD|jrDt|d�rD|jj|_tt|�j||�dS)zBInitialize RequestException with `request` and `response` objects.�responseN�request)�poprr�hasattr�superr�__init__)�self�args�kwargsr)�	__class__�� /usr/lib/python3.6/exceptions.pyr	s

zRequestException.__init__)�__name__�
__module__�__qualname__�__doc__r	�
__classcell__rr)r
rrsrc@seZdZdZdS)rzAn HTTP error occurred.N)rrrrrrrrrsrc@seZdZdZdS)�ConnectionErrorzA Connection error occurred.N)rrrrrrrrr src@seZdZdZdS)�
ProxyErrorzA proxy error occurred.N)rrrrrrrrr$src@seZdZdZdS)�SSLErrorzAn SSL error occurred.N)rrrrrrrrr(src@seZdZdZdS)�Timeoutz�The request timed out.

    Catching this error will catch both
    :exc:`~requests.exceptions.ConnectTimeout` and
    :exc:`~requests.exceptions.ReadTimeout` errors.
    N)rrrrrrrrr,src@seZdZdZdS)�ConnectTimeoutz�The request timed out while trying to connect to the remote server.

    Requests that produced this error are safe to retry.
    N)rrrrrrrrr5src@seZdZdZdS)�ReadTimeoutz@The server did not send any data in the allotted amount of time.N)rrrrrrrrr<src@seZdZdZdS)�URLRequiredz*A valid URL is required to make a request.N)rrrrrrrrr@src@seZdZdZdS)�TooManyRedirectszToo many redirects.N)rrrrrrrrrDsrc@seZdZdZdS)�
MissingSchemaz/The URL schema (e.g. http or https) is missing.N)rrrrrrrrrHsrc@seZdZdZdS)�
InvalidSchemaz"See defaults.py for valid schemas.N)rrrrrrrrrLsrc@seZdZdZdS)�
InvalidURLz%The URL provided was somehow invalid.N)rrrrrrrrrPsrc@seZdZdZdS)�
InvalidHeaderz.The header value provided was somehow invalid.N)rrrrrrrrr Tsr c@seZdZdZdS)�ChunkedEncodingErrorz?The server declared chunked encoding but sent an invalid chunk.N)rrrrrrrrr!Xsr!c@seZdZdZdS)�ContentDecodingErrorz!Failed to decode response contentN)rrrrrrrrr"\sr"c@seZdZdZdS)�StreamConsumedErrorz2The content for this response was already consumedN)rrrrrrrrr#`sr#c@seZdZdZdS)�
RetryErrorzCustom retries logic failedN)rrrrrrrrr$dsr$c@seZdZdZdS)�UnrewindableBodyErrorz:Requests encountered an error when trying to rewind a bodyN)rrrrrrrrr%hsr%c@seZdZdZdS)�RequestsWarningzBase warning for Requests.N)rrrrrrrrr&nsr&c@seZdZdZdS)�FileModeWarningzJA file was opened in text mode, but Requests determined its binary length.N)rrrrrrrrr'ssr'c@seZdZdZdS)�RequestsDependencyWarningz@An imported dependency doesn't match the expected version range.N)rrrrrrrrr(xsr(N)rZpip._vendor.urllib3.exceptionsrZ
BaseHTTPError�IOErrorrrrrrrrrr�
ValueErrorrrrr r!r"�	TypeErrorr#r$r%�Warningr&�DeprecationWarningr'r(rrrr�<module>s.	_vendor/requests/__pycache__/__init__.cpython-36.opt-1.pyc000064400000005665151733136400017426 0ustar003

�Pf�
�@s�dZddlmZddlmZddlZddlmZdd�Zyeejej�Wn0e	e
fk
rzejd	jejej�e�YnXdd
l
mZejde�ddlmZmZmZmZdd
lmZmZmZmZddlmZmZddlmZddlmZddlmZmZmZddl m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+ddl,m-Z-ddlm.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6ddl7Z7yddl7m8Z8Wn(e9k
�r�Gdd�de7j:�Z8YnXe7j;e<�j=e8��ejde4dd�dS)a�
Requests HTTP Library
~~~~~~~~~~~~~~~~~~~~~

Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:

   >>> import requests
   >>> r = requests.get('https://www.python.org')
   >>> r.status_code
   200
   >>> 'Python is a programming language' in r.content
   True

... or POST:

   >>> payload = dict(key1='value1', key2='value2')
   >>> r = requests.post('http://httpbin.org/post', data=payload)
   >>> print(r.text)
   {
     ...
     "form": {
       "key2": "value2",
       "key1": "value1"
     },
     ...
   }

The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.

:copyright: (c) 2017 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
�)�urllib3)�chardetN�)�RequestsDependencyWarningcCs~|jd�}t|�dkr |jd�|\}}}t|�t|�t|�}}}|jd�dd�\}}}t|�t|�t|�}}}dS)N�.��0�)�split�len�append�int)Zurllib3_versionZchardet_version�major�minor�patch�r�/usr/lib/python3.6/__init__.py�check_compatibility1s


rzAurllib3 ({0}) or chardet ({1}) doesn't match a supported version!)�DependencyWarning�ignore)�	__title__�__description__�__url__�__version__)�	__build__�
__author__�__author_email__�__license__)�
__copyright__�__cake__)�utils)�packages)�Request�Response�PreparedRequest)�request�get�head�postr�put�delete�options)�session�Session)�codes)	�RequestException�Timeout�URLRequired�TooManyRedirects�	HTTPError�ConnectionError�FileModeWarning�ConnectTimeout�ReadTimeout)�NullHandlerc@seZdZdd�ZdS)r8cCsdS)Nr)�self�recordrrr�emitsszNullHandler.emitN)�__name__�
__module__�__qualname__r;rrrrr8rsr8�defaultT)r)>�__doc__Zpip._vendorrr�warnings�
exceptionsrrr�AssertionError�
ValueError�warn�formatZpip._vendor.urllib3.exceptionsr�simplefilterrrrrrrrrr�r r!Zmodelsr"r#r$Zapir%r&r'r(rr)r*r+Zsessionsr,r-Zstatus_codesr.r/r0r1r2r3r4r5r6r7Zloggingr8�ImportErrorZHandlerZ	getLoggerr<Z
addHandlerrrrr�<module>)s<

(,_vendor/requests/__pycache__/certs.cpython-36.pyc000064400000001052151733136400016032 0ustar003

�Pf��@s&dZddlmZedkr"ee��dS)uF
requests.certs
~~~~~~~~~~~~~~

This module returns the preferred default CA certificate bundle. There is
only one — the one from the certifi package.

If you are packaging Requests, e.g., for a Linux distribution or a managed
environment, you can change the definition of where() to return a separately
packaged CA bundle.
�)�where�__main__N)�__doc__Zpip._vendor.certifir�__name__�print�rr�/usr/lib/python3.6/certs.py�<module>s_vendor/requests/__pycache__/sessions.cpython-36.pyc000064400000044731151733136400016573 0ustar003

�Pfp�@s�dZddlZddlZddlZddlmZddlmZddlm	Z	ddl
mZmZm
Z
mZmZddlmZmZmZmZdd	lmZmZmZdd
lmZmZddlmZddlmZm Z dd
l!m"Z"m#Z#m$Z$m%Z%ddl&m'Z'ddl(m)Z)ddlm*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0ddl1m2Z2ddlm3Z3ej4�dk�rXy
ej5Z6Wne7k
�rTej8Z6YnXnejZ6e
fdd�Z9e
fdd�Z:Gdd�de;�Z<Gdd�de<�Z=dd�Z>dS)z�
requests.session
~~~~~~~~~~~~~~~~

This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
�N)�Mapping)�	timedelta�)�_basic_auth_str)�	cookielib�is_py3�OrderedDict�urljoin�urlparse)�cookiejar_from_dict�extract_cookies_to_jar�RequestsCookieJar�
merge_cookies)�Request�PreparedRequest�DEFAULT_REDIRECT_LIMIT)�
default_hooks�
dispatch_hook)�to_native_string)�to_key_val_list�default_headers)�TooManyRedirects�
InvalidSchema�ChunkedEncodingError�ContentDecodingError)�CaseInsensitiveDict)�HTTPAdapter)�requote_uri�get_environ_proxies�get_netrc_auth�should_bypass_proxies�get_auth_from_url�rewind_body�
DEFAULT_PORTS)�codes)�REDIRECT_STATIZWindowscCst|dkr|S|dkr|St|t�o*t|t�s0|S|t|��}|jt|��dd�|j�D�}x|D]
}||=qbW|S)z�Determines appropriate setting for a given request, taking into account
    the explicit setting on that request, and the setting in the session. If a
    setting is a dictionary, they will be merged together using `dict_class`
    NcSsg|]\}}|dkr|�qS)N�)�.0�k�vr&r&�/usr/lib/python3.6/sessions.py�
<listcomp>Jsz!merge_setting.<locals>.<listcomp>)�
isinstancerr�update�items)Zrequest_settingZsession_setting�
dict_classZmerged_settingZ	none_keys�keyr&r&r*�
merge_setting2s



r1cCs@|dks|jd�gkr|S|dks0|jd�gkr4|St|||�S)z�Properly merges both requests and session hooks.

    This is necessary because when request_hooks == {'response': []}, the
    merge breaks Session hooks entirely.
    N�response)�getr1)Z
request_hooksZ
session_hooksr/r&r&r*�merge_hooksQs
r4c@s>eZdZdd�Zdd�Zddd	�Zd
d�Zdd
�Zdd�ZdS)�SessionRedirectMixincCs,|jr(|jd}tr|jd�}t|d�SdS)z7Receives a Response. Returns a redirect URI or ``None``�location�latin1�utf8N)Zis_redirect�headersr�encoder)�self�respr6r&r&r*�get_redirect_targetbs


z(SessionRedirectMixin.get_redirect_targetcCs�t|�}t|�}|j|jkr dS|jdkrL|jdkrL|jdkrL|jd	krLdS|j|jk}|j|jk}tj|jd�df}|r�|j|kr�|j|kr�dS|p�|S)
zFDecide whether Authorization header should be removed when redirectingTZhttp�PNZhttps�F)r>N)r?N)r
Zhostname�schemeZportr#r3)r;Zold_urlZnew_urlZ
old_parsedZ
new_parsedZchanged_portZchanged_schemeZdefault_portr&r&r*�should_strip_authxs
z&SessionRedirectMixin.should_strip_authFNTc	ks.g}
|j|�}�x|�r(|j�}|
j|�|
dd�|_y
|jWn(tttfk
rj|jj	dd�YnXt
|j�|jkr�td|j|d��|j
�|jd�r�t|j�}
dt|
j�|f}t|�}|j�}|js�t|jt|��}nt|�}t|�|_|j||�|jtjtjfk�r>d}x|D]}|jj|d��q Wd|_|j}y
|d=Wntk
�rdYnXt |j!||j�t"|j!|j#�|j$|j!�|j%||�}|j&||�|j'dk	�o�d	|k�p�d|k}|�r�t(|�|}|�r�|Vq|j)|f|||||dd
�|	��}t |j#||j�|j|�}|VqWdS)zBReceives a Response. Returns a generator of Responses or Requests.rNF)Zdecode_contentzExceeded %s redirects.)r2z//z%s:%s�Content-Length�Content-Type�Transfer-EncodingZCookie)�stream�timeout�verify�cert�proxies�allow_redirects)rBrCrD)*r=�copy�append�history�contentrr�RuntimeError�raw�read�len�
max_redirectsr�close�
startswithr
�urlrr@ZgeturlZnetlocr	r�rebuild_method�status_coder$Ztemporary_redirectZpermanent_redirectr9�popZbody�KeyErrorrZ_cookiesr�cookiesZprepare_cookies�rebuild_proxies�rebuild_authZ_body_positionr"�send)r;r<�reqrErFrGrHrI�yield_requestsZadapter_kwargsZhistrV�prepared_requestZparsed_rurlZparsedZpurged_headers�headerr9Z
rewindabler&r&r*�resolve_redirects�sr









z&SessionRedirectMixin.resolve_redirectscCsR|j}|j}d|kr*|j|jj|�r*|d=|jr8t|�nd}|dk	rN|j|�dS)z�When being redirected we may want to strip authentication from the
        request to avoid leaking credentials. This method intelligently removes
        and reapplies authentication where possible to avoid credential loss.
        Z
AuthorizationN)r9rVrA�request�	trust_envrZprepare_auth)r;rar2r9rVZnew_authr&r&r*r]�s
z!SessionRedirectMixin.rebuild_authc
Cs�|dk	r|ni}|j}|j}t|�j}|j�}|jd�}t||d�}|jr~|r~t||d�}	|	j||	jd��}
|
r~|j	||
�d|kr�|d=yt
||�\}}Wntk
r�d\}}YnX|r�|r�t||�|d<|S)a�This method re-evaluates the proxy configuration by considering the
        environment variables. If we are redirected to a URL covered by
        NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
        proxy keys for this URL (in case they were stripped by a previous
        redirect).

        This method also replaces the Proxy-Authorization header where
        necessary.

        :rtype: dict
        N�no_proxy)rf�allzProxy-Authorization)NN)
r9rVr
r@rKr3r rer�
setdefaultr!rZr)
r;rarIr9rVr@Znew_proxiesrfZbypass_proxyZenviron_proxies�proxyZusernameZpasswordr&r&r*r\s*

z$SessionRedirectMixin.rebuild_proxiescCsX|j}|jtjkr|dkrd}|jtjkr6|dkr6d}|jtjkrN|dkrNd}||_dS)z�When being redirected we may want to change the method of the request
        based on certain specs or browser behavior.
        �HEAD�GET�POSTN)�methodrXr$Z	see_other�foundZmoved)r;rar2rmr&r&r*rW:sz#SessionRedirectMixin.rebuild_method)FNTNNF)	�__name__�
__module__�__qualname__r=rArcr]r\rWr&r&r&r*r5`s
k)r5c@s�eZdZdZdddddddd	d
ddd
dg
Zdd�Zdd�Zdd�Zdd�Zd7dd�Z	dd�Z
dd�Zdd �Zd8d!d"�Z
d9d#d$�Zd:d%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�Zd3d4�Zd5d6�ZdS);�Sessiona~A Requests session.

    Provides cookie persistence, connection-pooling, and configuration.

    Basic Usage::

      >>> import requests
      >>> s = requests.Session()
      >>> s.get('http://httpbin.org/get')
      <Response [200]>

    Or as a context manager::

      >>> with requests.Session() as s:
      >>>     s.get('http://httpbin.org/get')
      <Response [200]>
    r9r[�authrI�hooks�paramsrGrHZprefetch�adaptersrErerScCsrt�|_d|_i|_t�|_i|_d|_d|_d|_	t
|_d|_t
i�|_t�|_|jdt��|jdt��dS)NFTzhttps://zhttp://)rr9rsrIrrtrurErGrHrrSrerr[rrv�mountr)r;r&r&r*�__init__js
zSession.__init__cCs|S)Nr&)r;r&r&r*�	__enter__�szSession.__enter__cGs|j�dS)N)rT)r;�argsr&r&r*�__exit__�szSession.__exit__c
Cs�|jpi}t|tj�st|�}ttt�|j�|�}|j}|jrV|rV|jrVt	|j
�}t�}|j|j
j�|j
|j|j|jt|j|jtd�t|j|j�t||j�|t|j|j�d�
|S)a�Constructs a :class:`PreparedRequest <PreparedRequest>` for
        transmission and returns it. The :class:`PreparedRequest` has settings
        merged from the :class:`Request <Request>` instance and those of the
        :class:`Session`.

        :param request: :class:`Request` instance to prepare with this
            session's settings.
        :rtype: requests.PreparedRequest
        )r/)
rmrV�files�data�jsonr9rursr[rt)r[r,rZ	CookieJarrrr
rsrerrVrZpreparerm�upperr|r}r~r1r9rrur4rt)r;rdr[Zmerged_cookiesrs�pr&r&r*�prepare_request�s*



zSession.prepare_requestNTcCstt|j�||||pi||pi|||d�
}|j|�}|p8i}|j|j||
||�}|	|
d�}|j|�|j|f|�}|S)a�Constructs a :class:`Request <Request>`, prepares it and sends it.
        Returns :class:`Response <Response>` object.

        :param method: method for the new :class:`Request` object.
        :param url: URL for the new :class:`Request` object.
        :param params: (optional) Dictionary or bytes to be sent in the query
            string for the :class:`Request`.
        :param data: (optional) Dictionary, bytes, or file-like object to send
            in the body of the :class:`Request`.
        :param json: (optional) json to send in the body of the
            :class:`Request`.
        :param headers: (optional) Dictionary of HTTP Headers to send with the
            :class:`Request`.
        :param cookies: (optional) Dict or CookieJar object to send with the
            :class:`Request`.
        :param files: (optional) Dictionary of ``'filename': file-like-objects``
            for multipart encoding upload.
        :param auth: (optional) Auth tuple or callable to enable
            Basic/Digest/Custom HTTP Auth.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param allow_redirects: (optional) Set to True by default.
        :type allow_redirects: bool
        :param proxies: (optional) Dictionary mapping protocol or protocol and
            hostname to the URL of the proxy.
        :param stream: (optional) whether to immediately download the response
            content. Defaults to ``False``.
        :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use. Defaults to ``True``.
        :param cert: (optional) if String, path to ssl client cert file (.pem).
            If Tuple, ('cert', 'key') pair.
        :rtype: requests.Response
        )
rmrVr9r|r}r~rursr[rt)rFrJ)rrr��merge_environment_settingsrVr-r^)r;rmrVrur}r9r[r|rsrFrJrIrtrErGrHr~r_ZprepZsettingsZsend_kwargsr<r&r&r*rd�s()

zSession.requestcKs|jdd�|jd|f|�S)z�Sends a GET request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rJTrk)rhrd)r;rV�kwargsr&r&r*r3szSession.getcKs|jdd�|jd|f|�S)z�Sends a OPTIONS request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rJTZOPTIONS)rhrd)r;rVr�r&r&r*�options!szSession.optionscKs|jdd�|jd|f|�S)z�Sends a HEAD request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rJFrj)rhrd)r;rVr�r&r&r*�head,szSession.headcKs|jd|f||d�|��S)a�Sends a POST request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param json: (optional) json to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        rl)r}r~)rd)r;rVr}r~r�r&r&r*�post7s
zSession.postcKs|jd|fd|i|��S)aYSends a PUT request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        ZPUTr})rd)r;rVr}r�r&r&r*�putCs	zSession.putcKs|jd|fd|i|��S)a[Sends a PATCH request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        ZPATCHr})rd)r;rVr}r�r&r&r*�patchNs	z
Session.patchcKs|jd|f|�S)z�Sends a DELETE request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        ZDELETE)rd)r;rVr�r&r&r*�deleteYszSession.deletec
Ks~|jd|j�|jd|j�|jd|j�|jd|j�t|t�rJtd��|jdd�}|j	d�}|j
}|j|jd�}t
�}|j|f|�}t
�|}	t|	d	�|_td
||f|�}|jr�x |jD]}
t|j|
j|
j�q�Wt|j||j�|j||f|�}|�r
dd�|D�ng}|�r.|jd
|�|j�}||_|�sny"t|j||fddi|���|_Wntk
�rlYnX|�sz|j|S)zISend a given PreparedRequest.

        :rtype: requests.Response
        rErGrHrIz#You can only send PreparedRequests.rJT)rV)Zsecondsr2cSsg|]}|�qSr&r&)r'r<r&r&r*r+�sz Session.send.<locals>.<listcomp>rr`)rhrErGrHrIr,r�
ValueErrorrYr3rt�get_adapterrV�preferred_clockr^r�elapsedrrMrr[rdrPrc�insert�nextZ_next�
StopIterationrN)
r;rdr�rJrErt�adapter�start�rr�r<�genrMr&r&r*r^csB


"zSession.sendc
Cs�|jrr|dk	r|jd�nd}t||d�}x |j�D]\}}	|j||	�q2W|dksZ|dkrrtjjd�pptjjd�}t||j�}t||j	�}t||j
�}t||j�}||||d�S)z^
        Check the environment and merge it with some settings.

        :rtype: dict
        Nrf)rfTZREQUESTS_CA_BUNDLEZCURL_CA_BUNDLE)rGrIrErH)rer3rr.rh�os�environr1rIrErGrH)
r;rVrIrErGrHrfZenv_proxiesr(r)r&r&r*r��sz"Session.merge_environment_settingscCs:x(|jj�D]\}}|j�j|�r|SqWtd|��dS)z~
        Returns the appropriate connection adapter for the given URL.

        :rtype: requests.adapters.BaseAdapter
        z*No connection adapters were found for '%s'N)rvr.�lowerrUr)r;rV�prefixr�r&r&r*r��szSession.get_adaptercCs x|jj�D]}|j�qWdS)z+Closes all adapters and as such the sessionN)rv�valuesrT)r;r)r&r&r*rT�sz
Session.closecsB||j�<�fdd�|jD�}x|D]}|jj|�|j|<q$WdS)zwRegisters a connection adapter to a prefix.

        Adapters are sorted in descending order by prefix length.
        cs g|]}t|�t��kr|�qSr&)rR)r'r()r�r&r*r+�sz!Session.mount.<locals>.<listcomp>N)rvrY)r;r�r�Zkeys_to_mover0r&)r�r*rw�s

z
Session.mountcst�fdd��jD��}|S)Nc3s|]}|t�|d�fVqdS)N)�getattr)r'�attr)r;r&r*�	<genexpr>�sz'Session.__getstate__.<locals>.<genexpr>)�dict�	__attrs__)r;�stater&)r;r*�__getstate__�szSession.__getstate__cCs&x |j�D]\}}t|||�q
WdS)N)r.�setattr)r;r�r��valuer&r&r*�__setstate__�szSession.__setstate__)NNNNNNNTNNNNNN)NN)N)N)rorprq�__doc__r�rxryr{r�rdr3r�r�r�r�r�r�r^r�r�rTrwr�r�r&r&r&r*rrQs2
7)
D



IrrcCst�S)zQ
    Returns a :class:`Session` for context-management.

    :rtype: Session
    )rrr&r&r&r*�session�sr�)?r�r��platformZtime�collectionsrZdatetimerrsr�compatrrrr	r
r[rrr
rZmodelsrrrrtrrZ_internal_utilsrZutilsrr�
exceptionsrrrrZ
structuresrrvrrrrr r!r"r#Zstatus_codesr$r%�systemZperf_counterr��AttributeErrorZclockr1r4�objectr5rrr�r&r&r&r*�<module>	sB$
r"_vendor/requests/__pycache__/structures.cpython-36.pyc000064400000010346151733136400017143 0ustar003

�Pf��@s>dZddlZddlmZGdd�dej�ZGdd�de�ZdS)	zO
requests.structures
~~~~~~~~~~~~~~~~~~~

Data structures that power Requests.
�N�)�OrderedDictc@sbeZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dS)�CaseInsensitiveDicta�A case-insensitive ``dict``-like object.

    Implements all methods and operations of
    ``collections.MutableMapping`` as well as dict's ``copy``. Also
    provides ``lower_items``.

    All keys are expected to be strings. The structure remembers the
    case of the last key to be set, and ``iter(instance)``,
    ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
    will contain case-sensitive keys. However, querying and contains
    testing is case insensitive::

        cid = CaseInsensitiveDict()
        cid['Accept'] = 'application/json'
        cid['aCCEPT'] == 'application/json'  # True
        list(cid) == ['Accept']  # True

    For example, ``headers['content-encoding']`` will return the
    value of a ``'Content-Encoding'`` response header, regardless
    of how the header name was originally stored.

    If the constructor, ``.update``, or equality comparison
    operations are given keys that have equal ``.lower()``s, the
    behavior is undefined.
    NcKs&t�|_|dkri}|j|f|�dS)N)r�_store�update)�self�data�kwargs�r
� /usr/lib/python3.6/structures.py�__init__*szCaseInsensitiveDict.__init__cCs||f|j|j�<dS)N)r�lower)r�key�valuer
r
r�__setitem__0szCaseInsensitiveDict.__setitem__cCs|j|j�dS)Nr)rr
)rrr
r
r�__getitem__5szCaseInsensitiveDict.__getitem__cCs|j|j�=dS)N)rr
)rrr
r
r�__delitem__8szCaseInsensitiveDict.__delitem__cCsdd�|jj�D�S)Ncss|]\}}|VqdS)Nr
)�.0ZcasedkeyZmappedvaluer
r
r�	<genexpr><sz/CaseInsensitiveDict.__iter__.<locals>.<genexpr>)r�values)rr
r
r�__iter__;szCaseInsensitiveDict.__iter__cCs
t|j�S)N)�lenr)rr
r
r�__len__>szCaseInsensitiveDict.__len__cCsdd�|jj�D�S)z.Like iteritems(), but with all lowercase keys.css|]\}}||dfVqdS)rNr
)rZlowerkeyZkeyvalr
r
rrDsz2CaseInsensitiveDict.lower_items.<locals>.<genexpr>)r�items)rr
r
r�lower_itemsAszCaseInsensitiveDict.lower_itemscCs2t|tj�rt|�}ntSt|j��t|j��kS)N)�
isinstance�collections�Mappingr�NotImplemented�dictr)r�otherr
r
r�__eq__Is
zCaseInsensitiveDict.__eq__cCst|jj��S)N)rrr)rr
r
r�copyRszCaseInsensitiveDict.copycCstt|j���S)N)�strrr)rr
r
r�__repr__UszCaseInsensitiveDict.__repr__)N)�__name__�
__module__�__qualname__�__doc__rrrrrrrr!r"r$r
r
r
rrs
	rcs<eZdZdZd�fdd�	Zdd�Zdd�Zdd	d
�Z�ZS)
�
LookupDictzDictionary lookup object.Ncs||_tt|�j�dS)N)�name�superr)r)rr*)�	__class__r
rr\szLookupDict.__init__cCs
d|jS)Nz
<lookup '%s'>)r*)rr
r
rr$`szLookupDict.__repr__cCs|jj|d�S)N)�__dict__�get)rrr
r
rrcszLookupDict.__getitem__cCs|jj||�S)N)r-r.)rr�defaultr
r
rr.hszLookupDict.get)N)N)	r%r&r'r(rr$rr.�
__classcell__r
r
)r,rr)Ys
r))r(r�compatr�MutableMappingrrr)r
r
r
r�<module>sJ_vendor/requests/__pycache__/__version__.cpython-36.pyc000064400000000727151733136400017203 0ustar003

�Pf��@s,dZdZdZdZdZdZdZdZdZd	Z	d
S)ZrequestszPython HTTP for Humans.zhttp://python-requests.orgz2.18.4iz
Kenneth Reitzzme@kennethreitz.orgz
Apache 2.0zCopyright 2017 Kenneth Reitzu✨ 🍰 ✨N)
Z	__title__Z__description__Z__url__�__version__Z	__build__�
__author__Z__author_email__Z__license__Z
__copyright__Z__cake__�rr�!/usr/lib/python3.6/__version__.py�<module>s_vendor/requests/__pycache__/utils.cpython-36.pyc000064400000050320151733136400016054 0ustar003

�Pf/l�@s�dZddlZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlmZddl
mZddlmZddlmZddlmZmZmZmZmZmZmZmZmZmZmZmZm Z m!Z!dd	l"m#Z#dd
l$m%Z%ddl&m'Z'm(Z(m)Z)m*Z*dfZ+ej,�Z-ddd�Z.ej/�dk�r0dd�Z0dd�Zdd�Z1dd�Z2dgdd�Z3dd�Z4dd �Z5d!d"�Z6d#d$�Z7d%d&�Z8dhd'd(�Z9d)d*�Z:d+d,�Z;d-d.�Z<d/d0�Z=d1d2�Z>d3d4�Z?d5d6�Z@eAdi�ZBd9d:�ZCd;d<�ZDd=d>�ZEd?d@�ZFdAdB�ZGdCdD�ZHejIdEdF��ZJdGdH�ZKdjdIdJ�ZLdKdL�ZMdkdNdO�ZNdPdQ�ZOdRdS�ZPdTjQdU�ZReRdVZSeRdWZTdXdY�ZUdZd[�ZVd\d]�ZWejXd^�ZYejXd_�ZZd`da�Z[dbdc�Z\ddde�Z]dS)lz�
requests.utils
~~~~~~~~~~~~~~

This module provides utility functions that are used within Requests
that are also useful for external consumption.
�N�)�__version__)�certs)�to_native_string)�parse_http_list)�quote�urlparse�bytes�str�OrderedDict�unquote�
getproxies�proxy_bypass�
urlunparse�
basestring�
integer_types�is_py3�proxy_bypass_environment�getproxies_environment)�cookiejar_from_dict)�CaseInsensitiveDict)�
InvalidURL�
InvalidHeader�FileModeWarning�UnrewindableBodyError�.netrc�_netrc�Pi�)ZhttpZhttpsZWindowsc
Cs�trddl}nddl}y2|j|jd�}|j|d�d}|j|d�d}Wntk
r\dSX|sj|rndS|jd�}xX|D]P}|dkr�d|kr�d	S|jdd
�}|jdd�}|jd
d�}t	j
||t	j�r~d	Sq~WdS)Nrz;Software\Microsoft\Windows\CurrentVersion\Internet SettingsZProxyEnableZ
ProxyOverrideF�;z<local>�.Tz\.�*z.*�?)r�winreg�_winreg�OpenKey�HKEY_CURRENT_USERZQueryValueEx�OSError�split�replace�re�match�I)�hostr"ZinternetSettingsZproxyEnableZ
proxyOverrideZtest�r-�/usr/lib/python3.6/utils.py�proxy_bypass_registry.s2



r/cCst�rt|�St|�SdS)z�Return True, if the host should be bypassed.

        Checks proxy settings gathered from the environment, if specified,
        or the registry.
        N)rrr/)r,r-r-r.rOsrcCst|d�r|j�}|S)z/Returns an internal sequence dictionary update.�items)�hasattrr0)�dr-r-r.�dict_to_sequence[s
r3cCs2d}d}t|d�rt|�}nbt|d�r.|j}nPt|d�r~y|j�}Wntjk
rZYn$Xtj|�j}d|jkr~t	j
dt�t|d��ry|j�}Wn$t
tfk
r�|dk	r�|}Yn\Xt|d�o�|dk�ry&|jdd	�|j�}|j|p�d�Wnt
tfk
�rd}YnX|dk�r$d}td||�S)
Nr�__len__�len�fileno�ba%Requests has determined the content-length for this request using the binary size of the file: however, the file has been opened in text mode (i.e. without the 'b' flag in the mode). This may lead to an incorrect content-length. In Requests 3.0, support will be removed for files in text mode.�tell�seek�)r1r5r6�io�UnsupportedOperation�os�fstat�st_size�mode�warnings�warnrr8r&�IOErrorr9�max)�oZtotal_lengthZcurrent_positionr6r-r-r.�	super_lends@







rFFcCsy�ddlm}m}d}xJtD]B}ytjjdj|��}Wntk
rJdSXtjj|�r|}PqW|dkrndSt	|�}d}t
|t�r�|jd�}|j
j|�d}	y6||�j|	�}
|
r�|
dr�dnd}|
||
dfSWn|tfk
r�|r�YnXWnttfk
�rYnXdS)	z;Returns the Requests tuple auth for a given url from netrc.r)�netrc�NetrcParseErrorNz~/{0}�:�asciirr:)rGrH�NETRC_FILESr=�path�
expanduser�format�KeyError�existsr�
isinstancer
�decode�netlocr'ZauthenticatorsrC�ImportError�AttributeError)�urlZraise_errorsrGrHZ
netrc_path�f�locZriZsplitstrr,rZlogin_ir-r-r.�get_netrc_auth�s8


rYcCsBt|dd�}|r>t|t�r>|ddkr>|ddkr>tjj|�SdS)z0Tries to guess the filename of the given object.�nameNr�<r�>���)�getattrrQrr=rL�basename)�objrZr-r-r.�guess_filename�sracCs.|dkrdSt|ttttf�r&td��t|�S)a�Take an object and test to see if it can be represented as a
    dictionary. Unless it can not be represented as such, return an
    OrderedDict, e.g.,

    ::

        >>> from_key_val_list([('key', 'val')])
        OrderedDict([('key', 'val')])
        >>> from_key_val_list('string')
        ValueError: need more than 1 value to unpack
        >>> from_key_val_list({'key': 'val'})
        OrderedDict([('key', 'val')])

    :rtype: OrderedDict
    Nz+cannot encode objects that are not 2-tuples)rQr
r	�bool�int�
ValueErrorr)�valuer-r-r.�from_key_val_list�s
rfcCsB|dkrdSt|ttttf�r&td��t|tj�r:|j�}t	|�S)a�Take an object and test to see if it can be represented as a
    dictionary. If it can be, return a list of tuples, e.g.,

    ::

        >>> to_key_val_list([('key', 'val')])
        [('key', 'val')]
        >>> to_key_val_list({'key': 'val'})
        [('key', 'val')]
        >>> to_key_val_list('string')
        ValueError: cannot encode objects that are not 2-tuples.

    :rtype: list
    Nz+cannot encode objects that are not 2-tuples)
rQr
r	rbrcrd�collections�Mappingr0�list)rer-r-r.�to_key_val_list�srjcCs\g}xRt|�D]F}|dd�|dd�ko4dknrJt|dd��}|j|�qW|S)aParse lists as described by RFC 2068 Section 2.

    In particular, parse comma-separated lists where the elements of
    the list may include quoted-strings.  A quoted-string could
    contain a comma.  A non-quoted string could have quotes in the
    middle.  Quotes are removed automatically after parsing.

    It basically works like :func:`parse_set_header` just that items
    may appear multiple times and case sensitivity is preserved.

    The return value is a standard :class:`list`:

    >>> parse_list_header('token, "quoted value"')
    ['token', 'quoted value']

    To create a header from the :class:`list` again, use the
    :func:`dump_header` function.

    :param value: a string with a list header.
    :return: :class:`list`
    :rtype: list
    Nr�"r]r])�_parse_list_header�unquote_header_value�append)re�result�itemr-r-r.�parse_list_headers(rqcCs|i}xrt|�D]f}d|kr$d||<q|jdd�\}}|dd�|dd�koVdknrlt|dd��}|||<qW|S)a^Parse lists of key, value pairs as described by RFC 2068 Section 2 and
    convert them into a python dict:

    >>> d = parse_dict_header('foo="is a fish", bar="as well"')
    >>> type(d) is dict
    True
    >>> sorted(d.items())
    [('bar', 'as well'), ('foo', 'is a fish')]

    If there is no value for a key it will be `None`:

    >>> parse_dict_header('key_without_value')
    {'key_without_value': None}

    To create a header from the :class:`dict` again, use the
    :func:`dump_header` function.

    :param value: a string with a dict header.
    :return: :class:`dict`
    :rtype: dict
    �=Nrrkr]r])rlr'rm)rerorprZr-r-r.�parse_dict_header1s(rscCs^|rZ|d|d	kodknrZ|dd
�}|sF|dd�dkrZ|jdd�jdd�S|S)z�Unquotes a header value.  (Reversal of :func:`quote_header_value`).
    This does not use the real unquoting but what browsers are actually
    using for quoting.

    :param value: the header value to unquote.
    :rtype: str
    rrrkNr:z\\�\z\"r]r])r()reZis_filenamer-r-r.rmTs
$rmcCs"i}x|D]}|j||j<q
W|S)z�Returns a key/value dictionary from a CookieJar.

    :param cj: CookieJar object to extract cookies from.
    :rtype: dict
    )rerZ)�cj�cookie_dictZcookier-r-r.�dict_from_cookiejarms
rwcCs
t||�S)z�Returns a CookieJar from a key/value dictionary.

    :param cj: CookieJar to insert cookies into.
    :param cookie_dict: Dict of key/values to insert into CookieJar.
    :rtype: CookieJar
    )r)rurvr-r-r.�add_dict_to_cookiejar|srxcCsTtjdt�tjdtjd�}tjdtjd�}tjd�}|j|�|j|�|j|�S)zlReturns encodings from given content string.

    :param content: bytestring to extract encodings from.
    z�In requests 3.0, get_encodings_from_content will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)z!<meta.*?charset=["\']*(.+?)["\'>])�flagsz+<meta.*?content=["\']*;?charset=(.+?)["\'>]z$^<\?xml.*?encoding=["\']*(.+?)["\'>])rArB�DeprecationWarningr)�compiler+�findall)�contentZ
charset_reZ	pragma_reZxml_rer-r-r.�get_encodings_from_content�s
r~cCsF|jd�}|sdStj|�\}}d|kr6|djd�Sd|krBdSdS)z}Returns encodings from given HTTP Header Dict.

    :param headers: dictionary to extract encoding from.
    :rtype: str
    zcontent-typeN�charsetz'"�textz
ISO-8859-1)�get�cgiZparse_header�strip)�headersZcontent_type�paramsr-r-r.�get_encoding_from_headers�s
r�ccsr|jdkr"x|D]
}|VqWdStj|j�dd�}x |D]}|j|�}|r:|Vq:W|jddd�}|rn|VdS)zStream decodes a iterator.Nr()�errors�T)�final)�encoding�codecs�getincrementaldecoderrR)�iterator�rrp�decoder�chunk�rvr-r-r.�stream_decode_response_unicode�s





r�ccsLd}|dks|dkrt|�}x*|t|�krF||||�V||7}qWdS)z Iterate over slices of a string.rN)r5)�stringZslice_length�posr-r-r.�iter_slices�sr�cCsvtjdt�g}t|j�}|rJyt|j|�Stk
rH|j|�YnXyt|j|dd�St	k
rp|jSXdS)z�Returns the requested content back in unicode.

    :param r: Response object to get unicode content from.

    Tried:

    1. charset from content-type
    2. fall back and replace all unicode characters

    :rtype: str
    z�In requests 3.0, get_unicode_from_response will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)r()r�N)
rArBrzr�r�r
r}�UnicodeErrorrn�	TypeError)r�Ztried_encodingsr�r-r-r.�get_unicode_from_response�s
r�Z4ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzz0123456789-._~c
Cs�|jd�}x�tdt|��D]�}||dd�}t|�dkr�|j�r�ytt|d��}Wn tk
rttd|��YnX|tkr�|||dd�||<q�d||||<qd||||<qWdj	|�S)	z�Un-escape any percent-escape sequences in a URI that are unreserved
    characters. This leaves all reserved, illegal and non-ASCII bytes encoded.

    :rtype: str
    �%rrr:�z%Invalid percent-escape sequence: '%s'N�)
r'�ranger5�isalnum�chrrcrdr�UNRESERVED_SET�join)�uri�parts�i�h�cr-r-r.�unquote_unreserved�s
r�cCs:d}d}ytt|�|d�Stk
r4t||d�SXdS)z�Re-quote the given URI.

    This function passes the given URI through an unquote/quote cycle to
    ensure that it is fully and consistently quoted.

    :rtype: str
    z!#$%&'()*+,/:;=?@[]~z!#$&'()*+,/:;=?@[]~)ZsafeN)rr�r)r�Zsafe_with_percentZsafe_without_percentr-r-r.�requote_uri
sr�cCsltjdtj|��d}|jd�\}}tjdtjtt|����d}tjdtj|��d|@}||@||@kS)z�This function allows you to check if an IP belongs to a network subnet

    Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
             returns False if ip = 192.168.1.1 and net = 192.168.100.0/24

    :rtype: bool
    z=Lr�/)�struct�unpack�socket�	inet_atonr'�dotted_netmaskrc)�ipZnetZipaddrZnetaddr�bitsZnetmaskZnetworkr-r-r.�address_in_network#s
r�cCs&ddd|>dA}tjtjd|��S)z�Converts mask from /xx format to xxx.xxx.xxx.xxx

    Example: if mask is 24 function returns 255.255.255.0

    :rtype: str
    l��r� z>I)r�Z	inet_ntoar��pack)�maskr�r-r-r.r�2sr�cCs*ytj|�Wntjk
r$dSXdS)z
    :rtype: bool
    FT)r�r��error)Z	string_ipr-r-r.�is_ipv4_address=s
r�cCs�|jd�dkr�yt|jd�d�}Wntk
r8dSX|dksJ|dkrNdSytj|jd�d�Wq�tjk
r|dSXndSdS)zV
    Very simple check of the cidr format in no_proxy variable.

    :rtype: bool
    r�rFr�rT)�countrcr'rdr�r�r�)Zstring_networkr�r-r-r.�
is_valid_cidrHsr�ccsT|dk	}|r"tjj|�}|tj|<z
dVWd|rN|dkrDtj|=n
|tj|<XdS)z�Set the environment variable 'env_name' to 'value'

    Save previous value, yield, and then restore the previous value stored in
    the environment variable 'env_name'.

    If 'value' is None, do nothingN)r=�environr�)Zenv_namereZ
value_changedZ	old_valuer-r-r.�set_environ`s


r�c	Csdd�}|}|dkr|d�}t|�j}|r�dd�|jdd�jd	�D�}|jd
�d}t|�r�xb|D](}t|�r~t||�r�dSqb||krbdSqbWn0x.|D]&}|j|�s�|jd
�dj|�r�dSq�Wtd|��2yt	|�}Wnt
tjfk
r�d
}YnXWdQRX|�rdSd
S)zL
    Returns whether we should bypass proxies or not.

    :rtype: bool
    cSstjj|�ptjj|j��S)N)r=r�r��upper)�kr-r-r.�<lambda>|sz'should_bypass_proxies.<locals>.<lambda>N�no_proxycss|]}|r|VqdS)Nr-)�.0r,r-r-r.�	<genexpr>�sz(should_bypass_proxies.<locals>.<genexpr>� r��,�:rTF)
rrSr(r'r�r�r��endswithr�rr�r�Zgaierror)	rVr�Z	get_proxyZno_proxy_argrSr�Zproxy_ipr,Zbypassr-r-r.�should_bypass_proxiesvs4




r�cCst||d�riSt�SdS)zA
    Return a dict of environment proxies.

    :rtype: dict
    )r�N)r�r
)rVr�r-r-r.�get_environ_proxies�sr�cCsv|pi}t|�}|jdkr.|j|j|jd��S|jd|j|jd|jdg}d}x|D]}||krX||}PqXW|S)z�Select a proxy for the url, if applicable.

    :param url: The url being for the request
    :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
    N�allz://zall://)rZhostnamer��scheme)rVZproxiesZurlpartsZ
proxy_keys�proxyZ	proxy_keyr-r-r.�select_proxy�s

r��python-requestscCsd|tfS)zO
    Return a string representing the default user agent.

    :rtype: str
    z%s/%s)r)rZr-r-r.�default_user_agent�sr�cCstt�djd�ddd��S)z9
    :rtype: requests.structures.CaseInsensitiveDict
    z, �gzip�deflatez*/*z
keep-alive)z
User-AgentzAccept-EncodingZAcceptZ
Connection)r�r�)rr�r�r-r-r-r.�default_headers�s
r�c	Cs�g}d}x�tjd|�D]�}y|jdd�\}}Wntk
rL|d}}YnXd|jd�i}xP|jd�D]B}y|jd�\}}Wntk
r�PYnX|j|�||j|�<qhW|j|�qW|S)	z�Return a dict of parsed link headers proxies.

    i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"

    :rtype: list
    z '"z, *<rrr�rVz<> '"rr)r)r'rdr�rn)	reZlinksZ
replace_chars�valrVr��linkZparam�keyr-r-r.�parse_header_links�s r��rJr:�cCs�|dd�}|tjtjfkr dS|dd�tjkr6dS|dd�tjtjfkrRdS|jt�}|dkrhd	S|dkr�|ddd�tkr�d
S|ddd�tkr�dS|dkr�|dd�t	kr�d
S|dd�t	kr�dSdS)z
    :rtype: str
    N�zutf-32r�z	utf-8-sigr:zutf-16rzutf-8z	utf-16-berz	utf-16-lez	utf-32-bez	utf-32-le)
r��BOM_UTF32_LE�BOM_UTF32_BE�BOM_UTF8�BOM_UTF16_LE�BOM_UTF16_BEr��_null�_null2�_null3)�dataZsampleZ	nullcountr-r-r.�guess_json_utfs*
r�cCs8t||�\}}}}}}|s$||}}t||||||f�S)z�Given a URL that may or may not have a scheme, prepend the given scheme.
    Does not replace a present scheme with the one provided as an argument.

    :rtype: str
    )rr)rVZ
new_schemer�rSrLr��query�fragmentr-r-r.�prepend_scheme_if_needed1s
r�cCsBt|�}yt|j�t|j�f}Wnttfk
r<d}YnX|S)z{Given a url with authentication components, extract them into a tuple of
    username,password.

    :rtype: (str,str)
    r�)r�r�)rrZusernameZpasswordrUr�)rVZparsedZauthr-r-r.�get_auth_from_urlBs
r�s^\S[^\r\n]*$|^$z^\S[^\r\n]*$|^$cCsf|\}}t|t�rt}nt}y|j|�s4td|��Wn*tk
r`td||t|�f��YnXdS)z�Verifies that header value is a string which doesn't contain
    leading whitespace or return characters. This prevents unintended
    header injection.

    :param header: tuple, in the format (name, value).
    z7Invalid return character or leading space in header: %sz>Value for header {%s: %s} must be of type str or bytes, not %sN)rQr	�_CLEAN_HEADER_REGEX_BYTE�_CLEAN_HEADER_REGEX_STRr*rr��type)�headerrZreZpatr-r-r.�check_header_validityWs

r�cCsFt|�\}}}}}}|s"||}}|jdd�d}t|||||df�S)zW
    Given a url remove the fragment and the authentication part.

    :rtype: str
    �@rr�r])r�rsplitr)rVr�rSrLr�r�r�r-r-r.�
urldefragauthls

r�cCs`t|jdd�}|dk	rTt|jt�rTy||j�Wq\ttfk
rPtd��Yq\Xntd��dS)zfMove file pointer back to its recorded starting position
    so it can be read again on redirect.
    r9Nz;An error occurred when rewinding request body for redirect.z+Unable to rewind request body for redirect.)r^ZbodyrQZ_body_positionrrCr&r)Zprepared_requestZ	body_seekr-r-r.�rewind_body}sr�)rr)F)FzBABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~)N)r�)^�__doc__r�r�rg�
contextlibr;r=�platformr)r�r�rArr�rZ_internal_utilsr�compatrrlrrr	r
rrr
rrrrrrrZcookiesrZ
structuresr�
exceptionsrrrrrK�whereZDEFAULT_CA_BUNDLE_PATHZ
DEFAULT_PORTS�systemr/r3rFrYrarfrjrqrsrmrwrxr~r�r�r�r��	frozensetr�r�r�r�r�r�r��contextmanagerr�r�r�r�r�r�r��encoder�r�r�r�r�r�r{r�r�r�r�r�r-r-r-r.�<module>	s�@
!	=
3 #

%9

	"
 

_vendor/requests/__pycache__/adapters.cpython-36.opt-1.pyc000064400000040074151733136400017463 0ustar003

�PfR�@s�dZddlZddlZddlmZmZddlmZddl	m
Zddlm
Z
ddlmZddlmZdd	lmZdd
lmZddlmZddlmZdd
lmZddlmZddlmZddlmZddlmZddlmZm Z ddl!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+ddl,m-Z-m.Z.m/Z/mZmZm0Z0m1Z1ddl2m3Z3yddl4m5Z5Wne6k
�rrdd�Z5YnXdZ7dZ8dZ9dZ:Gdd�de;�Z<Gd d!�d!e<�Z=dS)"z�
requests.adapters
~~~~~~~~~~~~~~~~~

This module contains the transport adapters that Requests uses to define
and maintain connections.
�N)�PoolManager�proxy_from_url)�HTTPResponse)�Timeout)�Retry)�ClosedPoolError)�ConnectTimeoutError)�	HTTPError)�
MaxRetryError)�NewConnectionError)�
ProxyError)�
ProtocolError)�ReadTimeoutError)�SSLError)�
ResponseError�)�Response)�urlparse�
basestring)�DEFAULT_CA_BUNDLE_PATH�get_encoding_from_headers�prepend_scheme_if_needed�get_auth_from_url�
urldefragauth�select_proxy)�CaseInsensitiveDict)�extract_cookies_to_jar)�ConnectionError�ConnectTimeout�ReadTimeoutrr�
RetryError�
InvalidSchema)�_basic_auth_str)�SOCKSProxyManagercOstd��dS)Nz'Missing dependencies for SOCKS support.)r!)�args�kwargs�r&�/usr/lib/python3.6/adapters.pyr#+sr#F�
cs2eZdZdZ�fdd�Zddd�Zd	d
�Z�ZS)�BaseAdapterzThe Base Transport Adaptercstt|�j�dS)N)�superr)�__init__)�self)�	__class__r&r'r+7szBaseAdapter.__init__FNTcCst�dS)aCSends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        N)�NotImplementedError)r,�request�stream�timeout�verify�cert�proxiesr&r&r'�send:szBaseAdapter.sendcCst�dS)z!Cleans up adapter specific items.N)r.)r,r&r&r'�closeLszBaseAdapter.close)FNTNN)�__name__�
__module__�__qualname__�__doc__r+r5r6�
__classcell__r&r&)r-r'r)4s

r)cs�eZdZdZdddddgZeeeef�fdd�	Zd	d
�Z	dd�Z
efd
d�Zdd�Zdd�Z
dd�Zd$dd�Zdd�Zdd�Zdd�Zdd�Zd%d"d#�Z�ZS)&�HTTPAdaptera�The built-in HTTP Adapter for urllib3.

    Provides a general-case interface for Requests sessions to contact HTTP and
    HTTPS urls by implementing the Transport Adapter interface. This class will
    usually be created by the :class:`Session <Session>` class under the
    covers.

    :param pool_connections: The number of urllib3 connection pools to cache.
    :param pool_maxsize: The maximum number of connections to save in the pool.
    :param max_retries: The maximum number of retries each connection
        should attempt. Note, this applies only to failed DNS lookups, socket
        connections and connection timeouts, never to requests where data has
        made it to the server. By default, Requests does not retry failed
        connections. If you need granular control over the conditions under
        which we retry a request, import urllib3's ``Retry`` class and pass
        that instead.
    :param pool_block: Whether the connection pool should block for connections.

    Usage::

      >>> import requests
      >>> s = requests.Session()
      >>> a = requests.adapters.HTTPAdapter(max_retries=3)
      >>> s.mount('http://', a)
    �max_retries�config�_pool_connections�
_pool_maxsize�_pool_blockcsd|tkrtddd�|_ntj|�|_i|_i|_tt|�j�||_	||_
||_|j|||d�dS)NrF)�read)�block)
�DEFAULT_RETRIESrr=Zfrom_intr>�
proxy_managerr*r<r+r?r@rA�init_poolmanager)r,Zpool_connectionsZpool_maxsizer=Z
pool_block)r-r&r'r+nszHTTPAdapter.__init__cst�fdd��jD��S)Nc3s|]}|t�|d�fVqdS)N)�getattr)�.0�attr)r,r&r'�	<genexpr>�sz+HTTPAdapter.__getstate__.<locals>.<genexpr>)�dict�	__attrs__)r,r&)r,r'�__getstate__�szHTTPAdapter.__getstate__cCsHi|_i|_x |j�D]\}}t|||�qW|j|j|j|jd�dS)N)rC)rEr>�items�setattrrFr?r@rA)r,�staterI�valuer&r&r'�__setstate__�szHTTPAdapter.__setstate__cKs0||_||_||_tf|||dd�|��|_dS)aInitializes a urllib3 PoolManager.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param connections: The number of urllib3 connection pools to cache.
        :param maxsize: The maximum number of connections to save in the pool.
        :param block: Block when no free connections are available.
        :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
        T)�	num_pools�maxsizerC�strictN)r?r@rAr�poolmanager)r,ZconnectionsrTrCZpool_kwargsr&r&r'rF�s

zHTTPAdapter.init_poolmanagercKs�||jkr|j|}n||j�jd�r^t|�\}}t|f|||j|j|jd�|��}|j|<n4|j|�}t	|f||j|j|jd�|��}|j|<|S)a�Return urllib3 ProxyManager for the given proxy.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxy: The proxy to return a urllib3 ProxyManager for.
        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
        :returns: ProxyManager
        :rtype: urllib3.ProxyManager
        �socks)�username�passwordrSrTrC)�
proxy_headersrSrTrC)
rE�lower�
startswithrr#r?r@rArZr)r,�proxyZproxy_kwargsZmanagerrXrYrZr&r&r'�proxy_manager_for�s*

zHTTPAdapter.proxy_manager_forcCs|j�jd�rn|rnd}|dk	r"|}|s*t}|s>tjj|�rLtdj|���d|_tjj	|�sf||_
q�||_nd|_d|_
d|_|�rt|t
�s�|d|_|d|_n||_d|_|jr�tjj|j�r�td	j|j���|jo�tjj|j��rtd
j|j���dS)aAVerify a SSL certificate. This method should not be called from user
        code, and is only exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param conn: The urllib3 connection object associated with the cert.
        :param url: The requested URL.
        :param verify: Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use
        :param cert: The SSL certificate to verify.
        �httpsNTzFCould not find a suitable TLS CA certificate bundle, invalid path: {0}Z
CERT_REQUIREDZ	CERT_NONErrz:Could not find the TLS certificate file, invalid path: {0}z2Could not find the TLS key file, invalid path: {0})r[r\r�os�path�exists�IOError�formatZ	cert_reqs�isdirZca_certsZca_cert_dir�
isinstancerZ	cert_fileZkey_file)r,�conn�urlr2r3Zcert_locr&r&r'�cert_verify�s8


zHTTPAdapter.cert_verifycCs�t�}t|dd�|_tt|di��|_t|j�|_||_|jj|_t	|j
t�r^|j
jd�|_
n|j
|_
t
|j||�||_||_|S)a�Builds a :class:`Response <requests.Response>` object from a urllib3
        response. This should not be called from user code, and is only exposed
        for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`

        :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
        :param resp: The urllib3 response object.
        :rtype: requests.Response
        ZstatusN�headerszutf-8)rrGZstatus_coderrjr�encoding�raw�reasonrfrh�bytes�decoder�cookiesr/�
connection)r,Zreq�respZresponser&r&r'�build_response�s

zHTTPAdapter.build_responseNcCsNt||�}|r.t|d�}|j|�}|j|�}nt|�}|j�}|jj|�}|S)a�Returns a urllib3 connection for the given URL. This should not be
        called from user code, and is only exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param url: The URL to connect to.
        :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
        :rtype: urllib3.ConnectionPool
        Zhttp)rrr^Zconnection_from_urlrZgeturlrV)r,rhr4r]rErgZparsedr&r&r'�get_connection"s	


zHTTPAdapter.get_connectioncCs*|jj�x|jj�D]}|j�qWdS)z�Disposes of any internal state.

        Currently, this closes the PoolManager and any active ProxyManager,
        which closes any pooled connections.
        N)rV�clearrE�values)r,r]r&r&r'r69s
zHTTPAdapter.closec	Csbt|j|�}t|j�j}|o"|dk}d}|rDt|�jj�}|jd�}|j}|r^|r^t|j�}|S)a?Obtain the url to use when making the final request.

        If the message is being sent through a HTTP proxy, the full URL has to
        be used. Otherwise, we should only use the path portion of the URL.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
        :rtype: str
        r_FrW)rrhr�schemer[r\Zpath_urlr)	r,r/r4r]rwZis_proxied_http_requestZusing_socks_proxyZproxy_schemerhr&r&r'�request_urlCs


zHTTPAdapter.request_urlcKsdS)a"Add any headers needed by the connection. As of v2.0 this does
        nothing by default, but is left for overriding by users that subclass
        the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
        :param kwargs: The keyword arguments from the call to send().
        Nr&)r,r/r%r&r&r'�add_headers`szHTTPAdapter.add_headerscCs&i}t|�\}}|r"t||�|d<|S)a
Returns a dictionary of the headers to add to any request sent
        through a proxy. This works with urllib3 magic to ensure that they are
        correctly sent to the proxy, rather than in a tunnelled request if
        CONNECT is being used.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxies: The url of the proxy being used for this request.
        :rtype: dict
        zProxy-Authorization)rr")r,r]rjrXrYr&r&r'rZns

zHTTPAdapter.proxy_headersFTc)Cs�|j|j|�}|j||j||�|j||�}|j|�|jdkpHd|jk}	t|t�r�y|\}
}t	|
|d�}Wq�t
k
r�}zdj|�}
t
|
��WYdd}~Xq�Xnt|t	�r�nt	||d�}�yL|	s�|j|j
||j|jdddd|j|d�
}�nt|d��r|j}|jtd�}y�|j|j
|d	d
�x$|jj�D]\}}|j||��q.W|j�xN|jD]D}|jtt|��dd�jd��|jd
�|j|�|jd
��qXW|jd�y|jd	d�}Wntk
�r�|j�}YnXtj|||ddd�}Wn|j��YnXW�n�t t!j"fk
�rD}
zt#|
|d��WYdd}
~
X�nZt$k
�r�}z�t|j%t&��r~t|j%t'��s~t(||d��t|j%t)��r�t*||d��t|j%t+��r�t,||d��t|j%t-��r�t.||d��t#||d��WYdd}~Xn�t/k
�r}zt#||d��WYdd}~Xn�t+k
�r@}zt,|��WYdd}~Xn^t-t0fk
�r�}z<t|t-��rpt.||d��nt|t1��r�t2||d��n�WYdd}~XnX|j3||�S)aSends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        NzContent-Length)�connectrBzsInvalid timeout {0}. Pass a (connect, read) timeout tuple, or a single float to set both timeouts to the same valueF)
�methodrh�bodyrjZredirectZassert_same_host�preload_content�decode_contentZretriesr1�
proxy_pool)r1T)Zskip_accept_encoding�zutf-8s
s0

)�	buffering)Zpoolrqr}r~)r/)4rtrhrirxryr|rjrf�tuple�TimeoutSauce�
ValueErrorrdZurlopenr{r=�hasattrrZ	_get_conn�DEFAULT_POOL_TIMEOUTZ
putrequestrNZ	putheaderZ
endheadersr5�hex�len�encodeZgetresponse�	TypeErrorrZfrom_httplibr6r
�socket�errorrr
rmrrrrr �_ProxyErrorr�	_SSLErrorrr�
_HTTPErrorrrrs)r,r/r0r1r2r3r4rgrhZchunkedrzrB�e�errrrZlow_conn�headerrQ�i�rr&r&r'r5�s�


 


zHTTPAdapter.send)N)FNTNN)r7r8r9r:rL�DEFAULT_POOLSIZErD�DEFAULT_POOLBLOCKr+rMrRrFr^rirsrtr6rxryrZr5r;r&r&)r-r'r<Qs$%4%

r<)>r:Zos.pathr`r�Zpip._vendor.urllib3.poolmanagerrrZpip._vendor.urllib3.responserZpip._vendor.urllib3.utilrr�Zpip._vendor.urllib3.util.retryrZpip._vendor.urllib3.exceptionsrrr	r�r
rrr�r
rrr�rZmodelsr�compatrrZutilsrrrrrrZ
structuresrrpr�
exceptionsrrrr r!Zauthr"Z!pip._vendor.urllib3.contrib.socksr#�ImportErrorr�r�rDr��objectr)r<r&r&r&r'�<module>	sB $_vendor/requests/__pycache__/packages.cpython-36.opt-1.pyc000064400000000676151733136400017442 0ustar003

�Pf��@s~ddlZxpdD]hZdeZee�e�e<xLeej�D]>ZeeksNejed�r4ee	d�d�Z
ejeejde
<q4WqWdS)	�N�urllib3�idna�chardetzpip._vendor.�.zpip._vendor.requests.packages.)rrr)�sys�packageZvendored_package�
__import__�locals�list�modules�mod�
startswith�lenZunprefixed_mod�rr�/usr/lib/python3.6/packages.py�<module>s
_vendor/requests/utils.py000064400000066057151733136400011606 0ustar00# -*- coding: utf-8 -*-

"""
requests.utils
~~~~~~~~~~~~~~

This module provides utility functions that are used within Requests
that are also useful for external consumption.
"""

import cgi
import codecs
import collections
import contextlib
import io
import os
import platform
import re
import socket
import struct
import warnings

from .__version__ import __version__
from . import certs
# to_native_string is unused here, but imported here for backwards compatibility
from ._internal_utils import to_native_string
from .compat import parse_http_list as _parse_list_header
from .compat import (
    quote, urlparse, bytes, str, OrderedDict, unquote, getproxies,
    proxy_bypass, urlunparse, basestring, integer_types, is_py3,
    proxy_bypass_environment, getproxies_environment)
from .cookies import cookiejar_from_dict
from .structures import CaseInsensitiveDict
from .exceptions import (
    InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)

NETRC_FILES = ('.netrc', '_netrc')

DEFAULT_CA_BUNDLE_PATH = certs.where()

DEFAULT_PORTS = {'http': 80, 'https': 443}

if platform.system() == 'Windows':
    # provide a proxy_bypass version on Windows without DNS lookups

    def proxy_bypass_registry(host):
        if is_py3:
            import winreg
        else:
            import _winreg as winreg
        try:
            internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
                r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
            proxyEnable = winreg.QueryValueEx(internetSettings,
                                              'ProxyEnable')[0]
            proxyOverride = winreg.QueryValueEx(internetSettings,
                                                'ProxyOverride')[0]
        except OSError:
            return False
        if not proxyEnable or not proxyOverride:
            return False

        # make a check value list from the registry entry: replace the
        # '<local>' string by the localhost entry and the corresponding
        # canonical entry.
        proxyOverride = proxyOverride.split(';')
        # now check if we match one of the registry values.
        for test in proxyOverride:
            if test == '<local>':
                if '.' not in host:
                    return True
            test = test.replace(".", r"\.")     # mask dots
            test = test.replace("*", r".*")     # change glob sequence
            test = test.replace("?", r".")      # change glob char
            if re.match(test, host, re.I):
                return True
        return False

    def proxy_bypass(host):  # noqa
        """Return True, if the host should be bypassed.

        Checks proxy settings gathered from the environment, if specified,
        or the registry.
        """
        if getproxies_environment():
            return proxy_bypass_environment(host)
        else:
            return proxy_bypass_registry(host)


def dict_to_sequence(d):
    """Returns an internal sequence dictionary update."""

    if hasattr(d, 'items'):
        d = d.items()

    return d


def super_len(o):
    total_length = None
    current_position = 0

    if hasattr(o, '__len__'):
        total_length = len(o)

    elif hasattr(o, 'len'):
        total_length = o.len

    elif hasattr(o, 'fileno'):
        try:
            fileno = o.fileno()
        except io.UnsupportedOperation:
            pass
        else:
            total_length = os.fstat(fileno).st_size

            # Having used fstat to determine the file length, we need to
            # confirm that this file was opened up in binary mode.
            if 'b' not in o.mode:
                warnings.warn((
                    "Requests has determined the content-length for this "
                    "request using the binary size of the file: however, the "
                    "file has been opened in text mode (i.e. without the 'b' "
                    "flag in the mode). This may lead to an incorrect "
                    "content-length. In Requests 3.0, support will be removed "
                    "for files in text mode."),
                    FileModeWarning
                )

    if hasattr(o, 'tell'):
        try:
            current_position = o.tell()
        except (OSError, IOError):
            # This can happen in some weird situations, such as when the file
            # is actually a special file descriptor like stdin. In this
            # instance, we don't know what the length is, so set it to zero and
            # let requests chunk it instead.
            if total_length is not None:
                current_position = total_length
        else:
            if hasattr(o, 'seek') and total_length is None:
                # StringIO and BytesIO have seek but no useable fileno
                try:
                    # seek to end of file
                    o.seek(0, 2)
                    total_length = o.tell()

                    # seek back to current position to support
                    # partially read file-like objects
                    o.seek(current_position or 0)
                except (OSError, IOError):
                    total_length = 0

    if total_length is None:
        total_length = 0

    return max(0, total_length - current_position)


def get_netrc_auth(url, raise_errors=False):
    """Returns the Requests tuple auth for a given url from netrc."""

    try:
        from netrc import netrc, NetrcParseError

        netrc_path = None

        for f in NETRC_FILES:
            try:
                loc = os.path.expanduser('~/{0}'.format(f))
            except KeyError:
                # os.path.expanduser can fail when $HOME is undefined and
                # getpwuid fails. See http://bugs.python.org/issue20164 &
                # https://github.com/requests/requests/issues/1846
                return

            if os.path.exists(loc):
                netrc_path = loc
                break

        # Abort early if there isn't one.
        if netrc_path is None:
            return

        ri = urlparse(url)

        # Strip port numbers from netloc. This weird `if...encode`` dance is
        # used for Python 3.2, which doesn't support unicode literals.
        splitstr = b':'
        if isinstance(url, str):
            splitstr = splitstr.decode('ascii')
        host = ri.netloc.split(splitstr)[0]

        try:
            _netrc = netrc(netrc_path).authenticators(host)
            if _netrc:
                # Return with login / password
                login_i = (0 if _netrc[0] else 1)
                return (_netrc[login_i], _netrc[2])
        except (NetrcParseError, IOError):
            # If there was a parsing error or a permissions issue reading the file,
            # we'll just skip netrc auth unless explicitly asked to raise errors.
            if raise_errors:
                raise

    # AppEngine hackiness.
    except (ImportError, AttributeError):
        pass


def guess_filename(obj):
    """Tries to guess the filename of the given object."""
    name = getattr(obj, 'name', None)
    if (name and isinstance(name, basestring) and name[0] != '<' and
            name[-1] != '>'):
        return os.path.basename(name)


def from_key_val_list(value):
    """Take an object and test to see if it can be represented as a
    dictionary. Unless it can not be represented as such, return an
    OrderedDict, e.g.,

    ::

        >>> from_key_val_list([('key', 'val')])
        OrderedDict([('key', 'val')])
        >>> from_key_val_list('string')
        ValueError: need more than 1 value to unpack
        >>> from_key_val_list({'key': 'val'})
        OrderedDict([('key', 'val')])

    :rtype: OrderedDict
    """
    if value is None:
        return None

    if isinstance(value, (str, bytes, bool, int)):
        raise ValueError('cannot encode objects that are not 2-tuples')

    return OrderedDict(value)


def to_key_val_list(value):
    """Take an object and test to see if it can be represented as a
    dictionary. If it can be, return a list of tuples, e.g.,

    ::

        >>> to_key_val_list([('key', 'val')])
        [('key', 'val')]
        >>> to_key_val_list({'key': 'val'})
        [('key', 'val')]
        >>> to_key_val_list('string')
        ValueError: cannot encode objects that are not 2-tuples.

    :rtype: list
    """
    if value is None:
        return None

    if isinstance(value, (str, bytes, bool, int)):
        raise ValueError('cannot encode objects that are not 2-tuples')

    if isinstance(value, collections.Mapping):
        value = value.items()

    return list(value)


# From mitsuhiko/werkzeug (used with permission).
def parse_list_header(value):
    """Parse lists as described by RFC 2068 Section 2.

    In particular, parse comma-separated lists where the elements of
    the list may include quoted-strings.  A quoted-string could
    contain a comma.  A non-quoted string could have quotes in the
    middle.  Quotes are removed automatically after parsing.

    It basically works like :func:`parse_set_header` just that items
    may appear multiple times and case sensitivity is preserved.

    The return value is a standard :class:`list`:

    >>> parse_list_header('token, "quoted value"')
    ['token', 'quoted value']

    To create a header from the :class:`list` again, use the
    :func:`dump_header` function.

    :param value: a string with a list header.
    :return: :class:`list`
    :rtype: list
    """
    result = []
    for item in _parse_list_header(value):
        if item[:1] == item[-1:] == '"':
            item = unquote_header_value(item[1:-1])
        result.append(item)
    return result


# From mitsuhiko/werkzeug (used with permission).
def parse_dict_header(value):
    """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
    convert them into a python dict:

    >>> d = parse_dict_header('foo="is a fish", bar="as well"')
    >>> type(d) is dict
    True
    >>> sorted(d.items())
    [('bar', 'as well'), ('foo', 'is a fish')]

    If there is no value for a key it will be `None`:

    >>> parse_dict_header('key_without_value')
    {'key_without_value': None}

    To create a header from the :class:`dict` again, use the
    :func:`dump_header` function.

    :param value: a string with a dict header.
    :return: :class:`dict`
    :rtype: dict
    """
    result = {}
    for item in _parse_list_header(value):
        if '=' not in item:
            result[item] = None
            continue
        name, value = item.split('=', 1)
        if value[:1] == value[-1:] == '"':
            value = unquote_header_value(value[1:-1])
        result[name] = value
    return result


# From mitsuhiko/werkzeug (used with permission).
def unquote_header_value(value, is_filename=False):
    r"""Unquotes a header value.  (Reversal of :func:`quote_header_value`).
    This does not use the real unquoting but what browsers are actually
    using for quoting.

    :param value: the header value to unquote.
    :rtype: str
    """
    if value and value[0] == value[-1] == '"':
        # this is not the real unquoting, but fixing this so that the
        # RFC is met will result in bugs with internet explorer and
        # probably some other browsers as well.  IE for example is
        # uploading files with "C:\foo\bar.txt" as filename
        value = value[1:-1]

        # if this is a filename and the starting characters look like
        # a UNC path, then just return the value without quotes.  Using the
        # replace sequence below on a UNC path has the effect of turning
        # the leading double slash into a single slash and then
        # _fix_ie_filename() doesn't work correctly.  See #458.
        if not is_filename or value[:2] != '\\\\':
            return value.replace('\\\\', '\\').replace('\\"', '"')
    return value


def dict_from_cookiejar(cj):
    """Returns a key/value dictionary from a CookieJar.

    :param cj: CookieJar object to extract cookies from.
    :rtype: dict
    """

    cookie_dict = {}

    for cookie in cj:
        cookie_dict[cookie.name] = cookie.value

    return cookie_dict


def add_dict_to_cookiejar(cj, cookie_dict):
    """Returns a CookieJar from a key/value dictionary.

    :param cj: CookieJar to insert cookies into.
    :param cookie_dict: Dict of key/values to insert into CookieJar.
    :rtype: CookieJar
    """

    return cookiejar_from_dict(cookie_dict, cj)


def get_encodings_from_content(content):
    """Returns encodings from given content string.

    :param content: bytestring to extract encodings from.
    """
    warnings.warn((
        'In requests 3.0, get_encodings_from_content will be removed. For '
        'more information, please see the discussion on issue #2266. (This'
        ' warning should only appear once.)'),
        DeprecationWarning)

    charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
    pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
    xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')

    return (charset_re.findall(content) +
            pragma_re.findall(content) +
            xml_re.findall(content))


def get_encoding_from_headers(headers):
    """Returns encodings from given HTTP Header Dict.

    :param headers: dictionary to extract encoding from.
    :rtype: str
    """

    content_type = headers.get('content-type')

    if not content_type:
        return None

    content_type, params = cgi.parse_header(content_type)

    if 'charset' in params:
        return params['charset'].strip("'\"")

    if 'text' in content_type:
        return 'ISO-8859-1'


def stream_decode_response_unicode(iterator, r):
    """Stream decodes a iterator."""

    if r.encoding is None:
        for item in iterator:
            yield item
        return

    decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
    for chunk in iterator:
        rv = decoder.decode(chunk)
        if rv:
            yield rv
    rv = decoder.decode(b'', final=True)
    if rv:
        yield rv


def iter_slices(string, slice_length):
    """Iterate over slices of a string."""
    pos = 0
    if slice_length is None or slice_length <= 0:
        slice_length = len(string)
    while pos < len(string):
        yield string[pos:pos + slice_length]
        pos += slice_length


def get_unicode_from_response(r):
    """Returns the requested content back in unicode.

    :param r: Response object to get unicode content from.

    Tried:

    1. charset from content-type
    2. fall back and replace all unicode characters

    :rtype: str
    """
    warnings.warn((
        'In requests 3.0, get_unicode_from_response will be removed. For '
        'more information, please see the discussion on issue #2266. (This'
        ' warning should only appear once.)'),
        DeprecationWarning)

    tried_encodings = []

    # Try charset from content-type
    encoding = get_encoding_from_headers(r.headers)

    if encoding:
        try:
            return str(r.content, encoding)
        except UnicodeError:
            tried_encodings.append(encoding)

    # Fall back:
    try:
        return str(r.content, encoding, errors='replace')
    except TypeError:
        return r.content


# The unreserved URI characters (RFC 3986)
UNRESERVED_SET = frozenset(
    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~")


def unquote_unreserved(uri):
    """Un-escape any percent-escape sequences in a URI that are unreserved
    characters. This leaves all reserved, illegal and non-ASCII bytes encoded.

    :rtype: str
    """
    parts = uri.split('%')
    for i in range(1, len(parts)):
        h = parts[i][0:2]
        if len(h) == 2 and h.isalnum():
            try:
                c = chr(int(h, 16))
            except ValueError:
                raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)

            if c in UNRESERVED_SET:
                parts[i] = c + parts[i][2:]
            else:
                parts[i] = '%' + parts[i]
        else:
            parts[i] = '%' + parts[i]
    return ''.join(parts)


def requote_uri(uri):
    """Re-quote the given URI.

    This function passes the given URI through an unquote/quote cycle to
    ensure that it is fully and consistently quoted.

    :rtype: str
    """
    safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
    safe_without_percent = "!#$&'()*+,/:;=?@[]~"
    try:
        # Unquote only the unreserved characters
        # Then quote only illegal characters (do not quote reserved,
        # unreserved, or '%')
        return quote(unquote_unreserved(uri), safe=safe_with_percent)
    except InvalidURL:
        # We couldn't unquote the given URI, so let's try quoting it, but
        # there may be unquoted '%'s in the URI. We need to make sure they're
        # properly quoted so they do not cause issues elsewhere.
        return quote(uri, safe=safe_without_percent)


def address_in_network(ip, net):
    """This function allows you to check if an IP belongs to a network subnet

    Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
             returns False if ip = 192.168.1.1 and net = 192.168.100.0/24

    :rtype: bool
    """
    ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
    netaddr, bits = net.split('/')
    netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
    network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
    return (ipaddr & netmask) == (network & netmask)


def dotted_netmask(mask):
    """Converts mask from /xx format to xxx.xxx.xxx.xxx

    Example: if mask is 24 function returns 255.255.255.0

    :rtype: str
    """
    bits = 0xffffffff ^ (1 << 32 - mask) - 1
    return socket.inet_ntoa(struct.pack('>I', bits))


def is_ipv4_address(string_ip):
    """
    :rtype: bool
    """
    try:
        socket.inet_aton(string_ip)
    except socket.error:
        return False
    return True


def is_valid_cidr(string_network):
    """
    Very simple check of the cidr format in no_proxy variable.

    :rtype: bool
    """
    if string_network.count('/') == 1:
        try:
            mask = int(string_network.split('/')[1])
        except ValueError:
            return False

        if mask < 1 or mask > 32:
            return False

        try:
            socket.inet_aton(string_network.split('/')[0])
        except socket.error:
            return False
    else:
        return False
    return True


@contextlib.contextmanager
def set_environ(env_name, value):
    """Set the environment variable 'env_name' to 'value'

    Save previous value, yield, and then restore the previous value stored in
    the environment variable 'env_name'.

    If 'value' is None, do nothing"""
    value_changed = value is not None
    if value_changed:
        old_value = os.environ.get(env_name)
        os.environ[env_name] = value
    try:
        yield
    finally:
        if value_changed:
            if old_value is None:
                del os.environ[env_name]
            else:
                os.environ[env_name] = old_value


def should_bypass_proxies(url, no_proxy):
    """
    Returns whether we should bypass proxies or not.

    :rtype: bool
    """
    get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())

    # First check whether no_proxy is defined. If it is, check that the URL
    # we're getting isn't in the no_proxy list.
    no_proxy_arg = no_proxy
    if no_proxy is None:
        no_proxy = get_proxy('no_proxy')
    netloc = urlparse(url).netloc

    if no_proxy:
        # We need to check whether we match here. We need to see if we match
        # the end of the netloc, both with and without the port.
        no_proxy = (
            host for host in no_proxy.replace(' ', '').split(',') if host
        )

        ip = netloc.split(':')[0]
        if is_ipv4_address(ip):
            for proxy_ip in no_proxy:
                if is_valid_cidr(proxy_ip):
                    if address_in_network(ip, proxy_ip):
                        return True
                elif ip == proxy_ip:
                    # If no_proxy ip was defined in plain IP notation instead of cidr notation &
                    # matches the IP of the index
                    return True
        else:
            for host in no_proxy:
                if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
                    # The URL does match something in no_proxy, so we don't want
                    # to apply the proxies on this URL.
                    return True

    # If the system proxy settings indicate that this URL should be bypassed,
    # don't proxy.
    # The proxy_bypass function is incredibly buggy on OS X in early versions
    # of Python 2.6, so allow this call to fail. Only catch the specific
    # exceptions we've seen, though: this call failing in other ways can reveal
    # legitimate problems.
    with set_environ('no_proxy', no_proxy_arg):
        try:
            bypass = proxy_bypass(netloc)
        except (TypeError, socket.gaierror):
            bypass = False

    if bypass:
        return True

    return False


def get_environ_proxies(url, no_proxy=None):
    """
    Return a dict of environment proxies.

    :rtype: dict
    """
    if should_bypass_proxies(url, no_proxy=no_proxy):
        return {}
    else:
        return getproxies()


def select_proxy(url, proxies):
    """Select a proxy for the url, if applicable.

    :param url: The url being for the request
    :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
    """
    proxies = proxies or {}
    urlparts = urlparse(url)
    if urlparts.hostname is None:
        return proxies.get(urlparts.scheme, proxies.get('all'))

    proxy_keys = [
        urlparts.scheme + '://' + urlparts.hostname,
        urlparts.scheme,
        'all://' + urlparts.hostname,
        'all',
    ]
    proxy = None
    for proxy_key in proxy_keys:
        if proxy_key in proxies:
            proxy = proxies[proxy_key]
            break

    return proxy


def default_user_agent(name="python-requests"):
    """
    Return a string representing the default user agent.

    :rtype: str
    """
    return '%s/%s' % (name, __version__)


def default_headers():
    """
    :rtype: requests.structures.CaseInsensitiveDict
    """
    return CaseInsensitiveDict({
        'User-Agent': default_user_agent(),
        'Accept-Encoding': ', '.join(('gzip', 'deflate')),
        'Accept': '*/*',
        'Connection': 'keep-alive',
    })


def parse_header_links(value):
    """Return a dict of parsed link headers proxies.

    i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"

    :rtype: list
    """

    links = []

    replace_chars = ' \'"'

    for val in re.split(', *<', value):
        try:
            url, params = val.split(';', 1)
        except ValueError:
            url, params = val, ''

        link = {'url': url.strip('<> \'"')}

        for param in params.split(';'):
            try:
                key, value = param.split('=')
            except ValueError:
                break

            link[key.strip(replace_chars)] = value.strip(replace_chars)

        links.append(link)

    return links


# Null bytes; no need to recreate these on each call to guess_json_utf
_null = '\x00'.encode('ascii')  # encoding to ASCII for Python 3
_null2 = _null * 2
_null3 = _null * 3


def guess_json_utf(data):
    """
    :rtype: str
    """
    # JSON always starts with two ASCII characters, so detection is as
    # easy as counting the nulls and from their location and count
    # determine the encoding. Also detect a BOM, if present.
    sample = data[:4]
    if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
        return 'utf-32'     # BOM included
    if sample[:3] == codecs.BOM_UTF8:
        return 'utf-8-sig'  # BOM included, MS style (discouraged)
    if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
        return 'utf-16'     # BOM included
    nullcount = sample.count(_null)
    if nullcount == 0:
        return 'utf-8'
    if nullcount == 2:
        if sample[::2] == _null2:   # 1st and 3rd are null
            return 'utf-16-be'
        if sample[1::2] == _null2:  # 2nd and 4th are null
            return 'utf-16-le'
        # Did not detect 2 valid UTF-16 ascii-range characters
    if nullcount == 3:
        if sample[:3] == _null3:
            return 'utf-32-be'
        if sample[1:] == _null3:
            return 'utf-32-le'
        # Did not detect a valid UTF-32 ascii-range character
    return None


def prepend_scheme_if_needed(url, new_scheme):
    """Given a URL that may or may not have a scheme, prepend the given scheme.
    Does not replace a present scheme with the one provided as an argument.

    :rtype: str
    """
    scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)

    # urlparse is a finicky beast, and sometimes decides that there isn't a
    # netloc present. Assume that it's being over-cautious, and switch netloc
    # and path if urlparse decided there was no netloc.
    if not netloc:
        netloc, path = path, netloc

    return urlunparse((scheme, netloc, path, params, query, fragment))


def get_auth_from_url(url):
    """Given a url with authentication components, extract them into a tuple of
    username,password.

    :rtype: (str,str)
    """
    parsed = urlparse(url)

    try:
        auth = (unquote(parsed.username), unquote(parsed.password))
    except (AttributeError, TypeError):
        auth = ('', '')

    return auth


# Moved outside of function to avoid recompile every call
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')


def check_header_validity(header):
    """Verifies that header value is a string which doesn't contain
    leading whitespace or return characters. This prevents unintended
    header injection.

    :param header: tuple, in the format (name, value).
    """
    name, value = header

    if isinstance(value, bytes):
        pat = _CLEAN_HEADER_REGEX_BYTE
    else:
        pat = _CLEAN_HEADER_REGEX_STR
    try:
        if not pat.match(value):
            raise InvalidHeader("Invalid return character or leading space in header: %s" % name)
    except TypeError:
        raise InvalidHeader("Value for header {%s: %s} must be of type str or "
                            "bytes, not %s" % (name, value, type(value)))


def urldefragauth(url):
    """
    Given a url remove the fragment and the authentication part.

    :rtype: str
    """
    scheme, netloc, path, params, query, fragment = urlparse(url)

    # see func:`prepend_scheme_if_needed`
    if not netloc:
        netloc, path = path, netloc

    netloc = netloc.rsplit('@', 1)[-1]

    return urlunparse((scheme, netloc, path, params, query, ''))


def rewind_body(prepared_request):
    """Move file pointer back to its recorded starting position
    so it can be read again on redirect.
    """
    body_seek = getattr(prepared_request.body, 'seek', None)
    if body_seek is not None and isinstance(prepared_request._body_position, integer_types):
        try:
            body_seek(prepared_request._body_position)
        except (IOError, OSError):
            raise UnrewindableBodyError("An error occurred when rewinding request "
                                        "body for redirect.")
    else:
        raise UnrewindableBodyError("Unable to rewind request body for redirect.")
_vendor/requests/hooks.py000064400000001377151733136400011563 0ustar00# -*- coding: utf-8 -*-

"""
requests.hooks
~~~~~~~~~~~~~~

This module provides the capabilities for the Requests hooks system.

Available hooks:

``response``:
    The response generated from a Request.
"""
HOOKS = ['response']


def default_hooks():
    return dict((event, []) for event in HOOKS)

# TODO: response is the only one


def dispatch_hook(key, hooks, hook_data, **kwargs):
    """Dispatches a hook dictionary on a given piece of data."""
    hooks = hooks or dict()
    hooks = hooks.get(key)
    if hooks:
        if hasattr(hooks, '__call__'):
            hooks = [hooks]
        for hook in hooks:
            _hook_data = hook(hook_data, **kwargs)
            if _hook_data is not None:
                hook_data = _hook_data
    return hook_data
_vendor/requests/status_codes.py000064400000006373151733136400013141 0ustar00# -*- coding: utf-8 -*-

from .structures import LookupDict

_codes = {

    # Informational.
    100: ('continue',),
    101: ('switching_protocols',),
    102: ('processing',),
    103: ('checkpoint',),
    122: ('uri_too_long', 'request_uri_too_long'),
    200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
    201: ('created',),
    202: ('accepted',),
    203: ('non_authoritative_info', 'non_authoritative_information'),
    204: ('no_content',),
    205: ('reset_content', 'reset'),
    206: ('partial_content', 'partial'),
    207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
    208: ('already_reported',),
    226: ('im_used',),

    # Redirection.
    300: ('multiple_choices',),
    301: ('moved_permanently', 'moved', '\\o-'),
    302: ('found',),
    303: ('see_other', 'other'),
    304: ('not_modified',),
    305: ('use_proxy',),
    306: ('switch_proxy',),
    307: ('temporary_redirect', 'temporary_moved', 'temporary'),
    308: ('permanent_redirect',
          'resume_incomplete', 'resume',),  # These 2 to be removed in 3.0

    # Client Error.
    400: ('bad_request', 'bad'),
    401: ('unauthorized',),
    402: ('payment_required', 'payment'),
    403: ('forbidden',),
    404: ('not_found', '-o-'),
    405: ('method_not_allowed', 'not_allowed'),
    406: ('not_acceptable',),
    407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
    408: ('request_timeout', 'timeout'),
    409: ('conflict',),
    410: ('gone',),
    411: ('length_required',),
    412: ('precondition_failed', 'precondition'),
    413: ('request_entity_too_large',),
    414: ('request_uri_too_large',),
    415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
    416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
    417: ('expectation_failed',),
    418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
    421: ('misdirected_request',),
    422: ('unprocessable_entity', 'unprocessable'),
    423: ('locked',),
    424: ('failed_dependency', 'dependency'),
    425: ('unordered_collection', 'unordered'),
    426: ('upgrade_required', 'upgrade'),
    428: ('precondition_required', 'precondition'),
    429: ('too_many_requests', 'too_many'),
    431: ('header_fields_too_large', 'fields_too_large'),
    444: ('no_response', 'none'),
    449: ('retry_with', 'retry'),
    450: ('blocked_by_windows_parental_controls', 'parental_controls'),
    451: ('unavailable_for_legal_reasons', 'legal_reasons'),
    499: ('client_closed_request',),

    # Server Error.
    500: ('internal_server_error', 'server_error', '/o\\', '✗'),
    501: ('not_implemented',),
    502: ('bad_gateway',),
    503: ('service_unavailable', 'unavailable'),
    504: ('gateway_timeout',),
    505: ('http_version_not_supported', 'http_version'),
    506: ('variant_also_negotiates',),
    507: ('insufficient_storage',),
    509: ('bandwidth_limit_exceeded', 'bandwidth'),
    510: ('not_extended',),
    511: ('network_authentication_required', 'network_auth', 'network_authentication'),
}

codes = LookupDict(name='status_codes')

for code, titles in _codes.items():
    for title in titles:
        setattr(codes, title, code)
        if not title.startswith(('\\', '/')):
            setattr(codes, title.upper(), code)
_vendor/requests/adapters.py000064400000051030151733136410012233 0ustar00# -*- coding: utf-8 -*-

"""
requests.adapters
~~~~~~~~~~~~~~~~~

This module contains the transport adapters that Requests uses to define
and maintain connections.
"""

import os.path
import socket

from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
from pip._vendor.urllib3.response import HTTPResponse
from pip._vendor.urllib3.util import Timeout as TimeoutSauce
from pip._vendor.urllib3.util.retry import Retry
from pip._vendor.urllib3.exceptions import ClosedPoolError
from pip._vendor.urllib3.exceptions import ConnectTimeoutError
from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
from pip._vendor.urllib3.exceptions import MaxRetryError
from pip._vendor.urllib3.exceptions import NewConnectionError
from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
from pip._vendor.urllib3.exceptions import ProtocolError
from pip._vendor.urllib3.exceptions import ReadTimeoutError
from pip._vendor.urllib3.exceptions import SSLError as _SSLError
from pip._vendor.urllib3.exceptions import ResponseError

from .models import Response
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
                    prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
                    select_proxy)
from .structures import CaseInsensitiveDict
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
                         ProxyError, RetryError, InvalidSchema)
from .auth import _basic_auth_str

try:
    from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager
except ImportError:
    def SOCKSProxyManager(*args, **kwargs):
        raise InvalidSchema("Missing dependencies for SOCKS support.")

DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None


class BaseAdapter(object):
    """The Base Transport Adapter"""

    def __init__(self):
        super(BaseAdapter, self).__init__()

    def send(self, request, stream=False, timeout=None, verify=True,
             cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        """
        raise NotImplementedError

    def close(self):
        """Cleans up adapter specific items."""
        raise NotImplementedError


class HTTPAdapter(BaseAdapter):
    """The built-in HTTP Adapter for urllib3.

    Provides a general-case interface for Requests sessions to contact HTTP and
    HTTPS urls by implementing the Transport Adapter interface. This class will
    usually be created by the :class:`Session <Session>` class under the
    covers.

    :param pool_connections: The number of urllib3 connection pools to cache.
    :param pool_maxsize: The maximum number of connections to save in the pool.
    :param max_retries: The maximum number of retries each connection
        should attempt. Note, this applies only to failed DNS lookups, socket
        connections and connection timeouts, never to requests where data has
        made it to the server. By default, Requests does not retry failed
        connections. If you need granular control over the conditions under
        which we retry a request, import urllib3's ``Retry`` class and pass
        that instead.
    :param pool_block: Whether the connection pool should block for connections.

    Usage::

      >>> import requests
      >>> s = requests.Session()
      >>> a = requests.adapters.HTTPAdapter(max_retries=3)
      >>> s.mount('http://', a)
    """
    __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
                 '_pool_block']

    def __init__(self, pool_connections=DEFAULT_POOLSIZE,
                 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
                 pool_block=DEFAULT_POOLBLOCK):
        if max_retries == DEFAULT_RETRIES:
            self.max_retries = Retry(0, read=False)
        else:
            self.max_retries = Retry.from_int(max_retries)
        self.config = {}
        self.proxy_manager = {}

        super(HTTPAdapter, self).__init__()

        self._pool_connections = pool_connections
        self._pool_maxsize = pool_maxsize
        self._pool_block = pool_block

        self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)

    def __getstate__(self):
        return dict((attr, getattr(self, attr, None)) for attr in
                    self.__attrs__)

    def __setstate__(self, state):
        # Can't handle by adding 'proxy_manager' to self.__attrs__ because
        # self.poolmanager uses a lambda function, which isn't pickleable.
        self.proxy_manager = {}
        self.config = {}

        for attr, value in state.items():
            setattr(self, attr, value)

        self.init_poolmanager(self._pool_connections, self._pool_maxsize,
                              block=self._pool_block)

    def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
        """Initializes a urllib3 PoolManager.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param connections: The number of urllib3 connection pools to cache.
        :param maxsize: The maximum number of connections to save in the pool.
        :param block: Block when no free connections are available.
        :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
        """
        # save these values for pickling
        self._pool_connections = connections
        self._pool_maxsize = maxsize
        self._pool_block = block

        self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
                                       block=block, strict=True, **pool_kwargs)

    def proxy_manager_for(self, proxy, **proxy_kwargs):
        """Return urllib3 ProxyManager for the given proxy.

        This method should not be called from user code, and is only
        exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxy: The proxy to return a urllib3 ProxyManager for.
        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
        :returns: ProxyManager
        :rtype: urllib3.ProxyManager
        """
        if proxy in self.proxy_manager:
            manager = self.proxy_manager[proxy]
        elif proxy.lower().startswith('socks'):
            username, password = get_auth_from_url(proxy)
            manager = self.proxy_manager[proxy] = SOCKSProxyManager(
                proxy,
                username=username,
                password=password,
                num_pools=self._pool_connections,
                maxsize=self._pool_maxsize,
                block=self._pool_block,
                **proxy_kwargs
            )
        else:
            proxy_headers = self.proxy_headers(proxy)
            manager = self.proxy_manager[proxy] = proxy_from_url(
                proxy,
                proxy_headers=proxy_headers,
                num_pools=self._pool_connections,
                maxsize=self._pool_maxsize,
                block=self._pool_block,
                **proxy_kwargs)

        return manager

    def cert_verify(self, conn, url, verify, cert):
        """Verify a SSL certificate. This method should not be called from user
        code, and is only exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param conn: The urllib3 connection object associated with the cert.
        :param url: The requested URL.
        :param verify: Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use
        :param cert: The SSL certificate to verify.
        """
        if url.lower().startswith('https') and verify:

            cert_loc = None

            # Allow self-specified cert location.
            if verify is not True:
                cert_loc = verify

            if not cert_loc:
                cert_loc = DEFAULT_CA_BUNDLE_PATH

            if not cert_loc or not os.path.exists(cert_loc):
                raise IOError("Could not find a suitable TLS CA certificate bundle, "
                              "invalid path: {0}".format(cert_loc))

            conn.cert_reqs = 'CERT_REQUIRED'

            if not os.path.isdir(cert_loc):
                conn.ca_certs = cert_loc
            else:
                conn.ca_cert_dir = cert_loc
        else:
            conn.cert_reqs = 'CERT_NONE'
            conn.ca_certs = None
            conn.ca_cert_dir = None

        if cert:
            if not isinstance(cert, basestring):
                conn.cert_file = cert[0]
                conn.key_file = cert[1]
            else:
                conn.cert_file = cert
                conn.key_file = None
            if conn.cert_file and not os.path.exists(conn.cert_file):
                raise IOError("Could not find the TLS certificate file, "
                              "invalid path: {0}".format(conn.cert_file))
            if conn.key_file and not os.path.exists(conn.key_file):
                raise IOError("Could not find the TLS key file, "
                              "invalid path: {0}".format(conn.key_file))

    def build_response(self, req, resp):
        """Builds a :class:`Response <requests.Response>` object from a urllib3
        response. This should not be called from user code, and is only exposed
        for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`

        :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
        :param resp: The urllib3 response object.
        :rtype: requests.Response
        """
        response = Response()

        # Fallback to None if there's no status_code, for whatever reason.
        response.status_code = getattr(resp, 'status', None)

        # Make headers case-insensitive.
        response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))

        # Set encoding.
        response.encoding = get_encoding_from_headers(response.headers)
        response.raw = resp
        response.reason = response.raw.reason

        if isinstance(req.url, bytes):
            response.url = req.url.decode('utf-8')
        else:
            response.url = req.url

        # Add new cookies from the server.
        extract_cookies_to_jar(response.cookies, req, resp)

        # Give the Response some context.
        response.request = req
        response.connection = self

        return response

    def get_connection(self, url, proxies=None):
        """Returns a urllib3 connection for the given URL. This should not be
        called from user code, and is only exposed for use when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param url: The URL to connect to.
        :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
        :rtype: urllib3.ConnectionPool
        """
        proxy = select_proxy(url, proxies)

        if proxy:
            proxy = prepend_scheme_if_needed(proxy, 'http')
            proxy_manager = self.proxy_manager_for(proxy)
            conn = proxy_manager.connection_from_url(url)
        else:
            # Only scheme should be lower case
            parsed = urlparse(url)
            url = parsed.geturl()
            conn = self.poolmanager.connection_from_url(url)

        return conn

    def close(self):
        """Disposes of any internal state.

        Currently, this closes the PoolManager and any active ProxyManager,
        which closes any pooled connections.
        """
        self.poolmanager.clear()
        for proxy in self.proxy_manager.values():
            proxy.clear()

    def request_url(self, request, proxies):
        """Obtain the url to use when making the final request.

        If the message is being sent through a HTTP proxy, the full URL has to
        be used. Otherwise, we should only use the path portion of the URL.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
        :rtype: str
        """
        proxy = select_proxy(request.url, proxies)
        scheme = urlparse(request.url).scheme

        is_proxied_http_request = (proxy and scheme != 'https')
        using_socks_proxy = False
        if proxy:
            proxy_scheme = urlparse(proxy).scheme.lower()
            using_socks_proxy = proxy_scheme.startswith('socks')

        url = request.path_url
        if is_proxied_http_request and not using_socks_proxy:
            url = urldefragauth(request.url)

        return url

    def add_headers(self, request, **kwargs):
        """Add any headers needed by the connection. As of v2.0 this does
        nothing by default, but is left for overriding by users that subclass
        the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
        :param kwargs: The keyword arguments from the call to send().
        """
        pass

    def proxy_headers(self, proxy):
        """Returns a dictionary of the headers to add to any request sent
        through a proxy. This works with urllib3 magic to ensure that they are
        correctly sent to the proxy, rather than in a tunnelled request if
        CONNECT is being used.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.

        :param proxies: The url of the proxy being used for this request.
        :rtype: dict
        """
        headers = {}
        username, password = get_auth_from_url(proxy)

        if username:
            headers['Proxy-Authorization'] = _basic_auth_str(username,
                                                             password)

        return headers

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
        """Sends PreparedRequest object. Returns Response object.

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """

        conn = self.get_connection(request.url, proxies)

        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(request)

        chunked = not (request.body is None or 'Content-Length' in request.headers)

        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError as e:
                # this may raise a string formatting error.
                err = ("Invalid timeout {0}. Pass a (connect, read) "
                       "timeout tuple, or a single float to set "
                       "both timeouts to the same value".format(timeout))
                raise ValueError(err)
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)

        try:
            if not chunked:
                resp = conn.urlopen(
                    method=request.method,
                    url=url,
                    body=request.body,
                    headers=request.headers,
                    redirect=False,
                    assert_same_host=False,
                    preload_content=False,
                    decode_content=False,
                    retries=self.max_retries,
                    timeout=timeout
                )

            # Send the request.
            else:
                if hasattr(conn, 'proxy_pool'):
                    conn = conn.proxy_pool

                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)

                try:
                    low_conn.putrequest(request.method,
                                        url,
                                        skip_accept_encoding=True)

                    for header, value in request.headers.items():
                        low_conn.putheader(header, value)

                    low_conn.endheaders()

                    for i in request.body:
                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
                        low_conn.send(b'\r\n')
                        low_conn.send(i)
                        low_conn.send(b'\r\n')
                    low_conn.send(b'0\r\n\r\n')

                    # Receive the response from the server
                    try:
                        # For Python 2.7+ versions, use buffering of HTTP
                        # responses
                        r = low_conn.getresponse(buffering=True)
                    except TypeError:
                        # For compatibility with Python 2.6 versions and back
                        r = low_conn.getresponse()

                    resp = HTTPResponse.from_httplib(
                        r,
                        pool=conn,
                        connection=low_conn,
                        preload_content=False,
                        decode_content=False
                    )
                except:
                    # If we hit any problems here, clean up the connection.
                    # Then, reraise so that we can handle the actual exception.
                    low_conn.close()
                    raise

        except (ProtocolError, socket.error) as err:
            raise ConnectionError(err, request=request)

        except MaxRetryError as e:
            if isinstance(e.reason, ConnectTimeoutError):
                # TODO: Remove this in 3.0.0: see #2811
                if not isinstance(e.reason, NewConnectionError):
                    raise ConnectTimeout(e, request=request)

            if isinstance(e.reason, ResponseError):
                raise RetryError(e, request=request)

            if isinstance(e.reason, _ProxyError):
                raise ProxyError(e, request=request)

            if isinstance(e.reason, _SSLError):
                # This branch is for urllib3 v1.22 and later.
                raise SSLError(e, request=request)

            raise ConnectionError(e, request=request)

        except ClosedPoolError as e:
            raise ConnectionError(e, request=request)

        except _ProxyError as e:
            raise ProxyError(e)

        except (_SSLError, _HTTPError) as e:
            if isinstance(e, _SSLError):
                # This branch is for urllib3 versions earlier than v1.22
                raise SSLError(e, request=request)
            elif isinstance(e, ReadTimeoutError):
                raise ReadTimeout(e, request=request)
            else:
                raise

        return self.build_response(request, resp)
_vendor/requests/__init__.py000064400000006767151733136410012210 0ustar00# -*- coding: utf-8 -*-

#   __
#  /__)  _  _     _   _ _/   _
# / (   (- (/ (/ (- _)  /  _)
#          /

"""
Requests HTTP Library
~~~~~~~~~~~~~~~~~~~~~

Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:

   >>> import requests
   >>> r = requests.get('https://www.python.org')
   >>> r.status_code
   200
   >>> 'Python is a programming language' in r.content
   True

... or POST:

   >>> payload = dict(key1='value1', key2='value2')
   >>> r = requests.post('http://httpbin.org/post', data=payload)
   >>> print(r.text)
   {
     ...
     "form": {
       "key2": "value2",
       "key1": "value1"
     },
     ...
   }

The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.

:copyright: (c) 2017 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""

from pip._vendor import urllib3
from pip._vendor import chardet
import warnings
from .exceptions import RequestsDependencyWarning


def check_compatibility(urllib3_version, chardet_version):
    urllib3_version = urllib3_version.split('.')
    assert urllib3_version != ['dev']  # Verify urllib3 isn't installed from git.

    # Sometimes, urllib3 only reports its version as 16.1.
    if len(urllib3_version) == 2:
        urllib3_version.append('0')

    # Check urllib3 for compatibility.
    major, minor, patch = urllib3_version  # noqa: F811
    major, minor, patch = int(major), int(minor), int(patch)
    # urllib3 >= 1.21.1, <= 1.22
    assert major == 1
    assert minor >= 21
    assert minor <= 22

    # Check chardet for compatibility.
    major, minor, patch = chardet_version.split('.')[:3]
    major, minor, patch = int(major), int(minor), int(patch)
    # chardet >= 3.0.2, < 3.1.0
    assert major == 3
    assert minor < 1
    assert patch >= 2


# Check imported dependencies for compatibility.
try:
    check_compatibility(urllib3.__version__, chardet.__version__)
except (AssertionError, ValueError):
    warnings.warn("urllib3 ({0}) or chardet ({1}) doesn't match a supported "
                  "version!".format(urllib3.__version__, chardet.__version__),
                  RequestsDependencyWarning)

# Attempt to enable urllib3's SNI support, if possible
# try:
#     from pip._vendor.urllib3.contrib import pyopenssl
#     pyopenssl.inject_into_urllib3()
# except ImportError:
#     pass

# urllib3's DependencyWarnings should be silenced.
from pip._vendor.urllib3.exceptions import DependencyWarning
warnings.simplefilter('ignore', DependencyWarning)

from .__version__ import __title__, __description__, __url__, __version__
from .__version__ import __build__, __author__, __author_email__, __license__
from .__version__ import __copyright__, __cake__

from . import utils
from . import packages
from .models import Request, Response, PreparedRequest
from .api import request, get, head, post, patch, put, delete, options
from .sessions import session, Session
from .status_codes import codes
from .exceptions import (
    RequestException, Timeout, URLRequired,
    TooManyRedirects, HTTPError, ConnectionError,
    FileModeWarning, ConnectTimeout, ReadTimeout
)

# Set default logging handler to avoid "No handler found" warnings.
import logging
try:  # Python 2.7+
    from logging import NullHandler
except ImportError:
    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

logging.getLogger(__name__).addHandler(NullHandler())

# FileModeWarnings go off per the default.
warnings.simplefilter('default', FileModeWarning, append=True)
_vendor/requests/packages.py000064400000001267151733136410012215 0ustar00import sys

# This code exists for backwards compatibility reasons.
# I don't like it either. Just look the other way. :)

for package in ('urllib3', 'idna', 'chardet'):
    vendored_package = "pip._vendor." + package
    locals()[package] = __import__(vendored_package)
    # This traversal is apparently necessary such that the identities are
    # preserved (requests.packages.urllib3.* is urllib3.*)
    for mod in list(sys.modules):
        if mod == vendored_package or mod.startswith(vendored_package + '.'):
            unprefixed_mod = mod[len("pip._vendor."):]
            sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod]

# Kinda cool, though, right?
_vendor/requests/auth.py000064400000023000151733136410011365 0ustar00# -*- coding: utf-8 -*-

"""
requests.auth
~~~~~~~~~~~~~

This module contains the authentication handlers for Requests.
"""

import os
import re
import time
import hashlib
import threading
import warnings

from base64 import b64encode

from .compat import urlparse, str, basestring
from .cookies import extract_cookies_to_jar
from ._internal_utils import to_native_string
from .utils import parse_dict_header

CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'


def _basic_auth_str(username, password):
    """Returns a Basic Auth string."""

    # "I want us to put a big-ol' comment on top of it that
    # says that this behaviour is dumb but we need to preserve
    # it because people are relying on it."
    #    - Lukasa
    #
    # These are here solely to maintain backwards compatibility
    # for things like ints. This will be removed in 3.0.0.
    if not isinstance(username, basestring):
        warnings.warn(
            "Non-string usernames will no longer be supported in Requests "
            "3.0.0. Please convert the object you've passed in ({0!r}) to "
            "a string or bytes object in the near future to avoid "
            "problems.".format(username),
            category=DeprecationWarning,
        )
        username = str(username)

    if not isinstance(password, basestring):
        warnings.warn(
            "Non-string passwords will no longer be supported in Requests "
            "3.0.0. Please convert the object you've passed in ({0!r}) to "
            "a string or bytes object in the near future to avoid "
            "problems.".format(password),
            category=DeprecationWarning,
        )
        password = str(password)
    # -- End Removal --

    if isinstance(username, str):
        username = username.encode('latin1')

    if isinstance(password, str):
        password = password.encode('latin1')

    authstr = 'Basic ' + to_native_string(
        b64encode(b':'.join((username, password))).strip()
    )

    return authstr


class AuthBase(object):
    """Base class that all auth implementations derive from"""

    def __call__(self, r):
        raise NotImplementedError('Auth hooks must be callable.')


class HTTPBasicAuth(AuthBase):
    """Attaches HTTP Basic Authentication to the given Request object."""

    def __init__(self, username, password):
        self.username = username
        self.password = password

    def __eq__(self, other):
        return all([
            self.username == getattr(other, 'username', None),
            self.password == getattr(other, 'password', None)
        ])

    def __ne__(self, other):
        return not self == other

    def __call__(self, r):
        r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
        return r


class HTTPProxyAuth(HTTPBasicAuth):
    """Attaches HTTP Proxy Authentication to a given Request object."""

    def __call__(self, r):
        r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
        return r


class HTTPDigestAuth(AuthBase):
    """Attaches HTTP Digest Authentication to the given Request object."""

    def __init__(self, username, password):
        self.username = username
        self.password = password
        # Keep state in per-thread local storage
        self._thread_local = threading.local()

    def init_per_thread_state(self):
        # Ensure state is initialized just once per-thread
        if not hasattr(self._thread_local, 'init'):
            self._thread_local.init = True
            self._thread_local.last_nonce = ''
            self._thread_local.nonce_count = 0
            self._thread_local.chal = {}
            self._thread_local.pos = None
            self._thread_local.num_401_calls = None

    def build_digest_header(self, method, url):
        """
        :rtype: str
        """

        realm = self._thread_local.chal['realm']
        nonce = self._thread_local.chal['nonce']
        qop = self._thread_local.chal.get('qop')
        algorithm = self._thread_local.chal.get('algorithm')
        opaque = self._thread_local.chal.get('opaque')
        hash_utf8 = None

        if algorithm is None:
            _algorithm = 'MD5'
        else:
            _algorithm = algorithm.upper()
        # lambdas assume digest modules are imported at the top level
        if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
            def md5_utf8(x):
                if isinstance(x, str):
                    x = x.encode('utf-8')
                return hashlib.md5(x).hexdigest()
            hash_utf8 = md5_utf8
        elif _algorithm == 'SHA':
            def sha_utf8(x):
                if isinstance(x, str):
                    x = x.encode('utf-8')
                return hashlib.sha1(x).hexdigest()
            hash_utf8 = sha_utf8

        KD = lambda s, d: hash_utf8("%s:%s" % (s, d))

        if hash_utf8 is None:
            return None

        # XXX not implemented yet
        entdig = None
        p_parsed = urlparse(url)
        #: path is request-uri defined in RFC 2616 which should not be empty
        path = p_parsed.path or "/"
        if p_parsed.query:
            path += '?' + p_parsed.query

        A1 = '%s:%s:%s' % (self.username, realm, self.password)
        A2 = '%s:%s' % (method, path)

        HA1 = hash_utf8(A1)
        HA2 = hash_utf8(A2)

        if nonce == self._thread_local.last_nonce:
            self._thread_local.nonce_count += 1
        else:
            self._thread_local.nonce_count = 1
        ncvalue = '%08x' % self._thread_local.nonce_count
        s = str(self._thread_local.nonce_count).encode('utf-8')
        s += nonce.encode('utf-8')
        s += time.ctime().encode('utf-8')
        s += os.urandom(8)

        cnonce = (hashlib.sha1(s).hexdigest()[:16])
        if _algorithm == 'MD5-SESS':
            HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))

        if not qop:
            respdig = KD(HA1, "%s:%s" % (nonce, HA2))
        elif qop == 'auth' or 'auth' in qop.split(','):
            noncebit = "%s:%s:%s:%s:%s" % (
                nonce, ncvalue, cnonce, 'auth', HA2
            )
            respdig = KD(HA1, noncebit)
        else:
            # XXX handle auth-int.
            return None

        self._thread_local.last_nonce = nonce

        # XXX should the partial digests be encoded too?
        base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
               'response="%s"' % (self.username, realm, nonce, path, respdig)
        if opaque:
            base += ', opaque="%s"' % opaque
        if algorithm:
            base += ', algorithm="%s"' % algorithm
        if entdig:
            base += ', digest="%s"' % entdig
        if qop:
            base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)

        return 'Digest %s' % (base)

    def handle_redirect(self, r, **kwargs):
        """Reset num_401_calls counter on redirects."""
        if r.is_redirect:
            self._thread_local.num_401_calls = 1

    def handle_401(self, r, **kwargs):
        """
        Takes the given response and tries digest-auth, if needed.

        :rtype: requests.Response
        """

        # If response is not 4xx, do not auth
        # See https://github.com/requests/requests/issues/3772
        if not 400 <= r.status_code < 500:
            self._thread_local.num_401_calls = 1
            return r

        if self._thread_local.pos is not None:
            # Rewind the file position indicator of the body to where
            # it was to resend the request.
            r.request.body.seek(self._thread_local.pos)
        s_auth = r.headers.get('www-authenticate', '')

        if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:

            self._thread_local.num_401_calls += 1
            pat = re.compile(r'digest ', flags=re.IGNORECASE)
            self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))

            # Consume content and release the original connection
            # to allow our new request to reuse the same one.
            r.content
            r.close()
            prep = r.request.copy()
            extract_cookies_to_jar(prep._cookies, r.request, r.raw)
            prep.prepare_cookies(prep._cookies)

            prep.headers['Authorization'] = self.build_digest_header(
                prep.method, prep.url)
            _r = r.connection.send(prep, **kwargs)
            _r.history.append(r)
            _r.request = prep

            return _r

        self._thread_local.num_401_calls = 1
        return r

    def __call__(self, r):
        # Initialize per-thread state, if needed
        self.init_per_thread_state()
        # If we have a saved nonce, skip the 401
        if self._thread_local.last_nonce:
            r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
        try:
            self._thread_local.pos = r.body.tell()
        except AttributeError:
            # In the case of HTTPDigestAuth being reused and the body of
            # the previous request was a file-like object, pos has the
            # file position of the previous body. Ensure it's set to
            # None.
            self._thread_local.pos = None
        r.register_hook('response', self.handle_401)
        r.register_hook('response', self.handle_redirect)
        self._thread_local.num_401_calls = 1

        return r

    def __eq__(self, other):
        return all([
            self.username == getattr(other, 'username', None),
            self.password == getattr(other, 'password', None)
        ])

    def __ne__(self, other):
        return not self == other
_vendor/requests/help.py000064400000007123151733136410011364 0ustar00"""Module containing bug report helper(s)."""
from __future__ import print_function

import json
import platform
import sys
import ssl

from pip._vendor import idna
from pip._vendor import urllib3
from pip._vendor import chardet

from . import __version__ as requests_version

try:
    from .packages.urllib3.contrib import pyopenssl
except ImportError:
    pyopenssl = None
    OpenSSL = None
    cryptography = None
else:
    import OpenSSL
    import cryptography


def _implementation():
    """Return a dict with the Python implementation and version.

    Provide both the name and the version of the Python implementation
    currently running. For example, on CPython 2.7.5 it will return
    {'name': 'CPython', 'version': '2.7.5'}.

    This function works best on CPython and PyPy: in particular, it probably
    doesn't work for Jython or IronPython. Future investigation should be done
    to work out the correct shape of the code for those platforms.
    """
    implementation = platform.python_implementation()

    if implementation == 'CPython':
        implementation_version = platform.python_version()
    elif implementation == 'PyPy':
        implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
                                               sys.pypy_version_info.minor,
                                               sys.pypy_version_info.micro)
        if sys.pypy_version_info.releaselevel != 'final':
            implementation_version = ''.join([
                implementation_version, sys.pypy_version_info.releaselevel
            ])
    elif implementation == 'Jython':
        implementation_version = platform.python_version()  # Complete Guess
    elif implementation == 'IronPython':
        implementation_version = platform.python_version()  # Complete Guess
    else:
        implementation_version = 'Unknown'

    return {'name': implementation, 'version': implementation_version}


def info():
    """Generate information for a bug report."""
    try:
        platform_info = {
            'system': platform.system(),
            'release': platform.release(),
        }
    except IOError:
        platform_info = {
            'system': 'Unknown',
            'release': 'Unknown',
        }

    implementation_info = _implementation()
    urllib3_info = {'version': urllib3.__version__}
    chardet_info = {'version': chardet.__version__}

    pyopenssl_info = {
        'version': None,
        'openssl_version': '',
    }
    if OpenSSL:
        pyopenssl_info = {
            'version': OpenSSL.__version__,
            'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,
        }
    cryptography_info = {
        'version': getattr(cryptography, '__version__', ''),
    }
    idna_info = {
        'version': getattr(idna, '__version__', ''),
    }

    # OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module.
    system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None)
    system_ssl_info = {
        'version': '%x' % system_ssl if system_ssl is not None else ''
    }

    return {
        'platform': platform_info,
        'implementation': implementation_info,
        'system_ssl': system_ssl_info,
        'using_pyopenssl': pyopenssl is not None,
        'pyOpenSSL': pyopenssl_info,
        'urllib3': urllib3_info,
        'chardet': chardet_info,
        'cryptography': cryptography_info,
        'idna': idna_info,
        'requests': {
            'version': requests_version,
        },
    }


def main():
    """Pretty-print the bug information as JSON."""
    print(json.dumps(info(), sort_keys=True, indent=2))


if __name__ == '__main__':
    main()
_vendor/requests/_internal_utils.py000064400000002110151733136410013616 0ustar00# -*- coding: utf-8 -*-

"""
requests._internal_utils
~~~~~~~~~~~~~~

Provides utility functions that are consumed internally by Requests
which depend on extremely few external helpers (such as compat)
"""

from .compat import is_py2, builtin_str, str


def to_native_string(string, encoding='ascii'):
    """Given a string object, regardless of type, returns a representation of
    that string in the native string type, encoding and decoding where
    necessary. This assumes ASCII unless told otherwise.
    """
    if isinstance(string, builtin_str):
        out = string
    else:
        if is_py2:
            out = string.encode(encoding)
        else:
            out = string.decode(encoding)

    return out


def unicode_is_ascii(u_string):
    """Determine if unicode string only contains ASCII characters.

    :param str u_string: unicode string to check. Must be unicode
        and not Python 2 `str`.
    :rtype: bool
    """
    assert isinstance(u_string, str)
    try:
        u_string.encode('ascii')
        return True
    except UnicodeEncodeError:
        return False
_vendor/requests/compat.py000064400000003132151733136410011713 0ustar00# -*- coding: utf-8 -*-

"""
requests.compat
~~~~~~~~~~~~~~~

This module handles import compatibility issues between Python 2 and
Python 3.
"""

from pip._vendor import chardet

import sys

# -------
# Pythons
# -------

# Syntax sugar.
_ver = sys.version_info

#: Python 2.x?
is_py2 = (_ver[0] == 2)

#: Python 3.x?
is_py3 = (_ver[0] == 3)

# try:
#     import simplejson as json
# except ImportError:
import json

# ---------
# Specifics
# ---------

if is_py2:
    from urllib import (
        quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
        proxy_bypass, proxy_bypass_environment, getproxies_environment)
    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
    from urllib2 import parse_http_list
    import cookielib
    from Cookie import Morsel
    from StringIO import StringIO

    from pip._vendor.urllib3.packages.ordered_dict import OrderedDict

    builtin_str = str
    bytes = str
    str = unicode
    basestring = basestring
    numeric_types = (int, long, float)
    integer_types = (int, long)

elif is_py3:
    from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
    from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
    from http import cookiejar as cookielib
    from http.cookies import Morsel
    from io import StringIO
    from collections import OrderedDict

    builtin_str = str
    str = str
    bytes = bytes
    basestring = (str, bytes)
    numeric_types = (int, float)
    integer_types = (int,)
_vendor/requests/cookies.py000064400000043440151733136410012072 0ustar00# -*- coding: utf-8 -*-

"""
requests.cookies
~~~~~~~~~~~~~~~~

Compatibility code to be able to use `cookielib.CookieJar` with requests.

requests.utils imports from here, so be careful with imports.
"""

import copy
import time
import calendar
import collections

from ._internal_utils import to_native_string
from .compat import cookielib, urlparse, urlunparse, Morsel

try:
    import threading
except ImportError:
    import dummy_threading as threading


class MockRequest(object):
    """Wraps a `requests.Request` to mimic a `urllib2.Request`.

    The code in `cookielib.CookieJar` expects this interface in order to correctly
    manage cookie policies, i.e., determine whether a cookie can be set, given the
    domains of the request and the cookie.

    The original request object is read-only. The client is responsible for collecting
    the new headers via `get_new_headers()` and interpreting them appropriately. You
    probably want `get_cookie_header`, defined below.
    """

    def __init__(self, request):
        self._r = request
        self._new_headers = {}
        self.type = urlparse(self._r.url).scheme

    def get_type(self):
        return self.type

    def get_host(self):
        return urlparse(self._r.url).netloc

    def get_origin_req_host(self):
        return self.get_host()

    def get_full_url(self):
        # Only return the response's URL if the user hadn't set the Host
        # header
        if not self._r.headers.get('Host'):
            return self._r.url
        # If they did set it, retrieve it and reconstruct the expected domain
        host = to_native_string(self._r.headers['Host'], encoding='utf-8')
        parsed = urlparse(self._r.url)
        # Reconstruct the URL as we expect it
        return urlunparse([
            parsed.scheme, host, parsed.path, parsed.params, parsed.query,
            parsed.fragment
        ])

    def is_unverifiable(self):
        return True

    def has_header(self, name):
        return name in self._r.headers or name in self._new_headers

    def get_header(self, name, default=None):
        return self._r.headers.get(name, self._new_headers.get(name, default))

    def add_header(self, key, val):
        """cookielib has no legitimate use for this method; add it back if you find one."""
        raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")

    def add_unredirected_header(self, name, value):
        self._new_headers[name] = value

    def get_new_headers(self):
        return self._new_headers

    @property
    def unverifiable(self):
        return self.is_unverifiable()

    @property
    def origin_req_host(self):
        return self.get_origin_req_host()

    @property
    def host(self):
        return self.get_host()


class MockResponse(object):
    """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.

    ...what? Basically, expose the parsed HTTP headers from the server response
    the way `cookielib` expects to see them.
    """

    def __init__(self, headers):
        """Make a MockResponse for `cookielib` to read.

        :param headers: a httplib.HTTPMessage or analogous carrying the headers
        """
        self._headers = headers

    def info(self):
        return self._headers

    def getheaders(self, name):
        self._headers.getheaders(name)


def extract_cookies_to_jar(jar, request, response):
    """Extract the cookies from the response into a CookieJar.

    :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
    :param request: our own requests.Request object
    :param response: urllib3.HTTPResponse object
    """
    if not (hasattr(response, '_original_response') and
            response._original_response):
        return
    # the _original_response field is the wrapped httplib.HTTPResponse object,
    req = MockRequest(request)
    # pull out the HTTPMessage with the headers and put it in the mock:
    res = MockResponse(response._original_response.msg)
    jar.extract_cookies(res, req)


def get_cookie_header(jar, request):
    """
    Produce an appropriate Cookie header string to be sent with `request`, or None.

    :rtype: str
    """
    r = MockRequest(request)
    jar.add_cookie_header(r)
    return r.get_new_headers().get('Cookie')


def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
    """Unsets a cookie by name, by default over all domains and paths.

    Wraps CookieJar.clear(), is O(n).
    """
    clearables = []
    for cookie in cookiejar:
        if cookie.name != name:
            continue
        if domain is not None and domain != cookie.domain:
            continue
        if path is not None and path != cookie.path:
            continue
        clearables.append((cookie.domain, cookie.path, cookie.name))

    for domain, path, name in clearables:
        cookiejar.clear(domain, path, name)


class CookieConflictError(RuntimeError):
    """There are two cookies that meet the criteria specified in the cookie jar.
    Use .get and .set and include domain and path args in order to be more specific.
    """


class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
    """Compatibility class; is a cookielib.CookieJar, but exposes a dict
    interface.

    This is the CookieJar we create by default for requests and sessions that
    don't specify one, since some clients may expect response.cookies and
    session.cookies to support dict operations.

    Requests does not use the dict interface internally; it's just for
    compatibility with external client code. All requests code should work
    out of the box with externally provided instances of ``CookieJar``, e.g.
    ``LWPCookieJar`` and ``FileCookieJar``.

    Unlike a regular CookieJar, this class is pickleable.

    .. warning:: dictionary operations that are normally O(1) may be O(n).
    """

    def get(self, name, default=None, domain=None, path=None):
        """Dict-like get() that also supports optional domain and path args in
        order to resolve naming collisions from using one cookie jar over
        multiple domains.

        .. warning:: operation is O(n), not O(1).
        """
        try:
            return self._find_no_duplicates(name, domain, path)
        except KeyError:
            return default

    def set(self, name, value, **kwargs):
        """Dict-like set() that also supports optional domain and path args in
        order to resolve naming collisions from using one cookie jar over
        multiple domains.
        """
        # support client code that unsets cookies by assignment of a None value:
        if value is None:
            remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
            return

        if isinstance(value, Morsel):
            c = morsel_to_cookie(value)
        else:
            c = create_cookie(name, value, **kwargs)
        self.set_cookie(c)
        return c

    def iterkeys(self):
        """Dict-like iterkeys() that returns an iterator of names of cookies
        from the jar.

        .. seealso:: itervalues() and iteritems().
        """
        for cookie in iter(self):
            yield cookie.name

    def keys(self):
        """Dict-like keys() that returns a list of names of cookies from the
        jar.

        .. seealso:: values() and items().
        """
        return list(self.iterkeys())

    def itervalues(self):
        """Dict-like itervalues() that returns an iterator of values of cookies
        from the jar.

        .. seealso:: iterkeys() and iteritems().
        """
        for cookie in iter(self):
            yield cookie.value

    def values(self):
        """Dict-like values() that returns a list of values of cookies from the
        jar.

        .. seealso:: keys() and items().
        """
        return list(self.itervalues())

    def iteritems(self):
        """Dict-like iteritems() that returns an iterator of name-value tuples
        from the jar.

        .. seealso:: iterkeys() and itervalues().
        """
        for cookie in iter(self):
            yield cookie.name, cookie.value

    def items(self):
        """Dict-like items() that returns a list of name-value tuples from the
        jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
        vanilla python dict of key value pairs.

        .. seealso:: keys() and values().
        """
        return list(self.iteritems())

    def list_domains(self):
        """Utility method to list all the domains in the jar."""
        domains = []
        for cookie in iter(self):
            if cookie.domain not in domains:
                domains.append(cookie.domain)
        return domains

    def list_paths(self):
        """Utility method to list all the paths in the jar."""
        paths = []
        for cookie in iter(self):
            if cookie.path not in paths:
                paths.append(cookie.path)
        return paths

    def multiple_domains(self):
        """Returns True if there are multiple domains in the jar.
        Returns False otherwise.

        :rtype: bool
        """
        domains = []
        for cookie in iter(self):
            if cookie.domain is not None and cookie.domain in domains:
                return True
            domains.append(cookie.domain)
        return False  # there is only one domain in jar

    def get_dict(self, domain=None, path=None):
        """Takes as an argument an optional domain and path and returns a plain
        old Python dict of name-value pairs of cookies that meet the
        requirements.

        :rtype: dict
        """
        dictionary = {}
        for cookie in iter(self):
            if (
                (domain is None or cookie.domain == domain) and
                (path is None or cookie.path == path)
            ):
                dictionary[cookie.name] = cookie.value
        return dictionary

    def __contains__(self, name):
        try:
            return super(RequestsCookieJar, self).__contains__(name)
        except CookieConflictError:
            return True

    def __getitem__(self, name):
        """Dict-like __getitem__() for compatibility with client code. Throws
        exception if there are more than one cookie with name. In that case,
        use the more explicit get() method instead.

        .. warning:: operation is O(n), not O(1).
        """
        return self._find_no_duplicates(name)

    def __setitem__(self, name, value):
        """Dict-like __setitem__ for compatibility with client code. Throws
        exception if there is already a cookie of that name in the jar. In that
        case, use the more explicit set() method instead.
        """
        self.set(name, value)

    def __delitem__(self, name):
        """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
        ``remove_cookie_by_name()``.
        """
        remove_cookie_by_name(self, name)

    def set_cookie(self, cookie, *args, **kwargs):
        if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
            cookie.value = cookie.value.replace('\\"', '')
        return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)

    def update(self, other):
        """Updates this jar with cookies from another CookieJar or dict-like"""
        if isinstance(other, cookielib.CookieJar):
            for cookie in other:
                self.set_cookie(copy.copy(cookie))
        else:
            super(RequestsCookieJar, self).update(other)

    def _find(self, name, domain=None, path=None):
        """Requests uses this method internally to get cookie values.

        If there are conflicting cookies, _find arbitrarily chooses one.
        See _find_no_duplicates if you want an exception thrown if there are
        conflicting cookies.

        :param name: a string containing name of cookie
        :param domain: (optional) string containing domain of cookie
        :param path: (optional) string containing path of cookie
        :return: cookie.value
        """
        for cookie in iter(self):
            if cookie.name == name:
                if domain is None or cookie.domain == domain:
                    if path is None or cookie.path == path:
                        return cookie.value

        raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))

    def _find_no_duplicates(self, name, domain=None, path=None):
        """Both ``__get_item__`` and ``get`` call this function: it's never
        used elsewhere in Requests.

        :param name: a string containing name of cookie
        :param domain: (optional) string containing domain of cookie
        :param path: (optional) string containing path of cookie
        :raises KeyError: if cookie is not found
        :raises CookieConflictError: if there are multiple cookies
            that match name and optionally domain and path
        :return: cookie.value
        """
        toReturn = None
        for cookie in iter(self):
            if cookie.name == name:
                if domain is None or cookie.domain == domain:
                    if path is None or cookie.path == path:
                        if toReturn is not None:  # if there are multiple cookies that meet passed in criteria
                            raise CookieConflictError('There are multiple cookies with name, %r' % (name))
                        toReturn = cookie.value  # we will eventually return this as long as no cookie conflict

        if toReturn:
            return toReturn
        raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))

    def __getstate__(self):
        """Unlike a normal CookieJar, this class is pickleable."""
        state = self.__dict__.copy()
        # remove the unpickleable RLock object
        state.pop('_cookies_lock')
        return state

    def __setstate__(self, state):
        """Unlike a normal CookieJar, this class is pickleable."""
        self.__dict__.update(state)
        if '_cookies_lock' not in self.__dict__:
            self._cookies_lock = threading.RLock()

    def copy(self):
        """Return a copy of this RequestsCookieJar."""
        new_cj = RequestsCookieJar()
        new_cj.update(self)
        return new_cj


def _copy_cookie_jar(jar):
    if jar is None:
        return None

    if hasattr(jar, 'copy'):
        # We're dealing with an instance of RequestsCookieJar
        return jar.copy()
    # We're dealing with a generic CookieJar instance
    new_jar = copy.copy(jar)
    new_jar.clear()
    for cookie in jar:
        new_jar.set_cookie(copy.copy(cookie))
    return new_jar


def create_cookie(name, value, **kwargs):
    """Make a cookie from underspecified parameters.

    By default, the pair of `name` and `value` will be set for the domain ''
    and sent on every request (this is sometimes called a "supercookie").
    """
    result = dict(
        version=0,
        name=name,
        value=value,
        port=None,
        domain='',
        path='/',
        secure=False,
        expires=None,
        discard=True,
        comment=None,
        comment_url=None,
        rest={'HttpOnly': None},
        rfc2109=False,)

    badargs = set(kwargs) - set(result)
    if badargs:
        err = 'create_cookie() got unexpected keyword arguments: %s'
        raise TypeError(err % list(badargs))

    result.update(kwargs)
    result['port_specified'] = bool(result['port'])
    result['domain_specified'] = bool(result['domain'])
    result['domain_initial_dot'] = result['domain'].startswith('.')
    result['path_specified'] = bool(result['path'])

    return cookielib.Cookie(**result)


def morsel_to_cookie(morsel):
    """Convert a Morsel object into a Cookie containing the one k/v pair."""

    expires = None
    if morsel['max-age']:
        try:
            expires = int(time.time() + int(morsel['max-age']))
        except ValueError:
            raise TypeError('max-age: %s must be integer' % morsel['max-age'])
    elif morsel['expires']:
        time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
        expires = calendar.timegm(
            time.strptime(morsel['expires'], time_template)
        )
    return create_cookie(
        comment=morsel['comment'],
        comment_url=bool(morsel['comment']),
        discard=False,
        domain=morsel['domain'],
        expires=expires,
        name=morsel.key,
        path=morsel['path'],
        port=None,
        rest={'HttpOnly': morsel['httponly']},
        rfc2109=False,
        secure=bool(morsel['secure']),
        value=morsel.value,
        version=morsel['version'] or 0,
    )


def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
    """Returns a CookieJar from a key/value dictionary.

    :param cookie_dict: Dict of key/values to insert into CookieJar.
    :param cookiejar: (optional) A cookiejar to add the cookies to.
    :param overwrite: (optional) If False, will not replace cookies
        already in the jar with new ones.
    """
    if cookiejar is None:
        cookiejar = RequestsCookieJar()

    if cookie_dict is not None:
        names_from_jar = [cookie.name for cookie in cookiejar]
        for name in cookie_dict:
            if overwrite or (name not in names_from_jar):
                cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))

    return cookiejar


def merge_cookies(cookiejar, cookies):
    """Add cookies to cookiejar and returns a merged CookieJar.

    :param cookiejar: CookieJar object to add the cookies to.
    :param cookies: Dictionary or CookieJar object to be added.
    """
    if not isinstance(cookiejar, cookielib.CookieJar):
        raise ValueError('You can only merge into CookieJar')

    if isinstance(cookies, dict):
        cookiejar = cookiejar_from_dict(
            cookies, cookiejar=cookiejar, overwrite=False)
    elif isinstance(cookies, cookielib.CookieJar):
        try:
            cookiejar.update(cookies)
        except AttributeError:
            for cookie_in_jar in cookies:
                cookiejar.set_cookie(cookie_in_jar)

    return cookiejar
_vendor/requests/sessions.py000064400000070021151733136410012277 0ustar00# -*- coding: utf-8 -*-

"""
requests.session
~~~~~~~~~~~~~~~~

This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
import platform
import time
from collections import Mapping
from datetime import timedelta

from .auth import _basic_auth_str
from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse
from .cookies import (
    cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from ._internal_utils import to_native_string
from .utils import to_key_val_list, default_headers
from .exceptions import (
    TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)

from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter

from .utils import (
    requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
    get_auth_from_url, rewind_body, DEFAULT_PORTS
)

from .status_codes import codes

# formerly defined here, reexposed here for backward compatibility
from .models import REDIRECT_STATI

# Preferred clock, based on which one is more accurate on a given system.
if platform.system() == 'Windows':
    try:  # Python 3.3+
        preferred_clock = time.perf_counter
    except AttributeError:  # Earlier than Python 3.
        preferred_clock = time.clock
else:
    preferred_clock = time.time


def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
    """Determines appropriate setting for a given request, taking into account
    the explicit setting on that request, and the setting in the session. If a
    setting is a dictionary, they will be merged together using `dict_class`
    """

    if session_setting is None:
        return request_setting

    if request_setting is None:
        return session_setting

    # Bypass if not a dictionary (e.g. verify)
    if not (
            isinstance(session_setting, Mapping) and
            isinstance(request_setting, Mapping)
    ):
        return request_setting

    merged_setting = dict_class(to_key_val_list(session_setting))
    merged_setting.update(to_key_val_list(request_setting))

    # Remove keys that are set to None. Extract keys first to avoid altering
    # the dictionary during iteration.
    none_keys = [k for (k, v) in merged_setting.items() if v is None]
    for key in none_keys:
        del merged_setting[key]

    return merged_setting


def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
    """Properly merges both requests and session hooks.

    This is necessary because when request_hooks == {'response': []}, the
    merge breaks Session hooks entirely.
    """
    if session_hooks is None or session_hooks.get('response') == []:
        return request_hooks

    if request_hooks is None or request_hooks.get('response') == []:
        return session_hooks

    return merge_setting(request_hooks, session_hooks, dict_class)


class SessionRedirectMixin(object):

    def get_redirect_target(self, resp):
        """Receives a Response. Returns a redirect URI or ``None``"""
        # Due to the nature of how requests processes redirects this method will
        # be called at least once upon the original response and at least twice
        # on each subsequent redirect response (if any).
        # If a custom mixin is used to handle this logic, it may be advantageous
        # to cache the redirect location onto the response object as a private
        # attribute.
        if resp.is_redirect:
            location = resp.headers['location']
            # Currently the underlying http module on py3 decode headers
            # in latin1, but empirical evidence suggests that latin1 is very
            # rarely used with non-ASCII characters in HTTP headers.
            # It is more likely to get UTF8 header rather than latin1.
            # This causes incorrect handling of UTF8 encoded location headers.
            # To solve this, we re-encode the location in latin1.
            if is_py3:
                location = location.encode('latin1')
            return to_native_string(location, 'utf8')
        return None


    def should_strip_auth(self, old_url, new_url):
        """Decide whether Authorization header should be removed when redirecting"""
        old_parsed = urlparse(old_url)
        new_parsed = urlparse(new_url)
        if old_parsed.hostname != new_parsed.hostname:
            return True
        # Special case: allow http -> https redirect when using the standard
        # ports. This isn't specified by RFC 7235, but is kept to avoid
        # breaking backwards compatibility with older versions of requests
        # that allowed any redirects on the same host.
        if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)
                and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):
            return False

        # Handle default port usage corresponding to scheme.
        changed_port = old_parsed.port != new_parsed.port
        changed_scheme = old_parsed.scheme != new_parsed.scheme
        default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
        if (not changed_scheme and old_parsed.port in default_port
                and new_parsed.port in default_port):
            return False

        # Standard case: root URI must match
        return changed_port or changed_scheme

    def resolve_redirects(self, resp, req, stream=False, timeout=None,
                          verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):
        """Receives a Response. Returns a generator of Responses or Requests."""

        hist = []  # keep track of history

        url = self.get_redirect_target(resp)
        while url:
            prepared_request = req.copy()

            # Update history and keep track of redirects.
            # resp.history must ignore the original request in this loop
            hist.append(resp)
            resp.history = hist[1:]

            try:
                resp.content  # Consume socket so it can be released
            except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
                resp.raw.read(decode_content=False)

            if len(resp.history) >= self.max_redirects:
                raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp)

            # Release the connection back into the pool.
            resp.close()

            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if url.startswith('//'):
                parsed_rurl = urlparse(resp.url)
                url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url)

            # The scheme should be lower case...
            parsed = urlparse(url)
            url = parsed.geturl()

            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                url = urljoin(resp.url, requote_uri(url))
            else:
                url = requote_uri(url)

            prepared_request.url = to_native_string(url)

            self.rebuild_method(prepared_request, resp)

            # https://github.com/requests/requests/issues/1084
            if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
                # https://github.com/requests/requests/issues/3490
                purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')
                for header in purged_headers:
                    prepared_request.headers.pop(header, None)
                prepared_request.body = None

            headers = prepared_request.headers
            try:
                del headers['Cookie']
            except KeyError:
                pass

            # Extract any cookies sent on the response to the cookiejar
            # in the new request. Because we've mutated our copied prepared
            # request, use the old one that we haven't yet touched.
            extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
            merge_cookies(prepared_request._cookies, self.cookies)
            prepared_request.prepare_cookies(prepared_request._cookies)

            # Rebuild auth and proxy information.
            proxies = self.rebuild_proxies(prepared_request, proxies)
            self.rebuild_auth(prepared_request, resp)

            # A failed tell() sets `_body_position` to `object()`. This non-None
            # value ensures `rewindable` will be True, allowing us to raise an
            # UnrewindableBodyError, instead of hanging the connection.
            rewindable = (
                prepared_request._body_position is not None and
                ('Content-Length' in headers or 'Transfer-Encoding' in headers)
            )

            # Attempt to rewind consumed file-like object.
            if rewindable:
                rewind_body(prepared_request)

            # Override the original request.
            req = prepared_request

            if yield_requests:
                yield req
            else:

                resp = self.send(
                    req,
                    stream=stream,
                    timeout=timeout,
                    verify=verify,
                    cert=cert,
                    proxies=proxies,
                    allow_redirects=False,
                    **adapter_kwargs
                )

                extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)

                # extract redirect url, if any, for the next loop
                url = self.get_redirect_target(resp)
                yield resp

    def rebuild_auth(self, prepared_request, response):
        """When being redirected we may want to strip authentication from the
        request to avoid leaking credentials. This method intelligently removes
        and reapplies authentication where possible to avoid credential loss.
        """
        headers = prepared_request.headers
        url = prepared_request.url

        if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):
            # If we get redirected to a new host, we should strip out any
            # authentication headers.
            del headers['Authorization']

        # .netrc might have more auth for us on our new host.
        new_auth = get_netrc_auth(url) if self.trust_env else None
        if new_auth is not None:
            prepared_request.prepare_auth(new_auth)

        return

    def rebuild_proxies(self, prepared_request, proxies):
        """This method re-evaluates the proxy configuration by considering the
        environment variables. If we are redirected to a URL covered by
        NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
        proxy keys for this URL (in case they were stripped by a previous
        redirect).

        This method also replaces the Proxy-Authorization header where
        necessary.

        :rtype: dict
        """
        proxies = proxies if proxies is not None else {}
        headers = prepared_request.headers
        url = prepared_request.url
        scheme = urlparse(url).scheme
        new_proxies = proxies.copy()
        no_proxy = proxies.get('no_proxy')

        bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)
        if self.trust_env and not bypass_proxy:
            environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)

            proxy = environ_proxies.get(scheme, environ_proxies.get('all'))

            if proxy:
                new_proxies.setdefault(scheme, proxy)

        if 'Proxy-Authorization' in headers:
            del headers['Proxy-Authorization']

        try:
            username, password = get_auth_from_url(new_proxies[scheme])
        except KeyError:
            username, password = None, None

        if username and password:
            headers['Proxy-Authorization'] = _basic_auth_str(username, password)

        return new_proxies

    def rebuild_method(self, prepared_request, response):
        """When being redirected we may want to change the method of the request
        based on certain specs or browser behavior.
        """
        method = prepared_request.method

        # http://tools.ietf.org/html/rfc7231#section-6.4.4
        if response.status_code == codes.see_other and method != 'HEAD':
            method = 'GET'

        # Do what the browsers do, despite standards...
        # First, turn 302s into GETs.
        if response.status_code == codes.found and method != 'HEAD':
            method = 'GET'

        # Second, if a POST is responded to with a 301, turn it into a GET.
        # This bizarre behaviour is explained in Issue 1704.
        if response.status_code == codes.moved and method == 'POST':
            method = 'GET'

        prepared_request.method = method


class Session(SessionRedirectMixin):
    """A Requests session.

    Provides cookie persistence, connection-pooling, and configuration.

    Basic Usage::

      >>> import requests
      >>> s = requests.Session()
      >>> s.get('http://httpbin.org/get')
      <Response [200]>

    Or as a context manager::

      >>> with requests.Session() as s:
      >>>     s.get('http://httpbin.org/get')
      <Response [200]>
    """

    __attrs__ = [
        'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
        'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
        'max_redirects',
    ]

    def __init__(self):

        #: A case-insensitive dictionary of headers to be sent on each
        #: :class:`Request <Request>` sent from this
        #: :class:`Session <Session>`.
        self.headers = default_headers()

        #: Default Authentication tuple or object to attach to
        #: :class:`Request <Request>`.
        self.auth = None

        #: Dictionary mapping protocol or protocol and host to the URL of the proxy
        #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
        #: be used on each :class:`Request <Request>`.
        self.proxies = {}

        #: Event-handling hooks.
        self.hooks = default_hooks()

        #: Dictionary of querystring data to attach to each
        #: :class:`Request <Request>`. The dictionary values may be lists for
        #: representing multivalued query parameters.
        self.params = {}

        #: Stream response content default.
        self.stream = False

        #: SSL Verification default.
        self.verify = True

        #: SSL client certificate default, if String, path to ssl client
        #: cert file (.pem). If Tuple, ('cert', 'key') pair.
        self.cert = None

        #: Maximum number of redirects allowed. If the request exceeds this
        #: limit, a :class:`TooManyRedirects` exception is raised.
        #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is
        #: 30.
        self.max_redirects = DEFAULT_REDIRECT_LIMIT

        #: Trust environment settings for proxy configuration, default
        #: authentication and similar.
        self.trust_env = True

        #: A CookieJar containing all currently outstanding cookies set on this
        #: session. By default it is a
        #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
        #: may be any other ``cookielib.CookieJar`` compatible object.
        self.cookies = cookiejar_from_dict({})

        # Default connection adapters.
        self.adapters = OrderedDict()
        self.mount('https://', HTTPAdapter())
        self.mount('http://', HTTPAdapter())

    def __enter__(self):
        return self

    def __exit__(self, *args):
        self.close()

    def prepare_request(self, request):
        """Constructs a :class:`PreparedRequest <PreparedRequest>` for
        transmission and returns it. The :class:`PreparedRequest` has settings
        merged from the :class:`Request <Request>` instance and those of the
        :class:`Session`.

        :param request: :class:`Request` instance to prepare with this
            session's settings.
        :rtype: requests.PreparedRequest
        """
        cookies = request.cookies or {}

        # Bootstrap CookieJar.
        if not isinstance(cookies, cookielib.CookieJar):
            cookies = cookiejar_from_dict(cookies)

        # Merge with session cookies
        merged_cookies = merge_cookies(
            merge_cookies(RequestsCookieJar(), self.cookies), cookies)

        # Set environment's basic authentication if not explicitly set.
        auth = request.auth
        if self.trust_env and not auth and not self.auth:
            auth = get_netrc_auth(request.url)

        p = PreparedRequest()
        p.prepare(
            method=request.method.upper(),
            url=request.url,
            files=request.files,
            data=request.data,
            json=request.json,
            headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
            params=merge_setting(request.params, self.params),
            auth=merge_setting(auth, self.auth),
            cookies=merged_cookies,
            hooks=merge_hooks(request.hooks, self.hooks),
        )
        return p

    def request(self, method, url,
            params=None, data=None, headers=None, cookies=None, files=None,
            auth=None, timeout=None, allow_redirects=True, proxies=None,
            hooks=None, stream=None, verify=None, cert=None, json=None):
        """Constructs a :class:`Request <Request>`, prepares it and sends it.
        Returns :class:`Response <Response>` object.

        :param method: method for the new :class:`Request` object.
        :param url: URL for the new :class:`Request` object.
        :param params: (optional) Dictionary or bytes to be sent in the query
            string for the :class:`Request`.
        :param data: (optional) Dictionary, bytes, or file-like object to send
            in the body of the :class:`Request`.
        :param json: (optional) json to send in the body of the
            :class:`Request`.
        :param headers: (optional) Dictionary of HTTP Headers to send with the
            :class:`Request`.
        :param cookies: (optional) Dict or CookieJar object to send with the
            :class:`Request`.
        :param files: (optional) Dictionary of ``'filename': file-like-objects``
            for multipart encoding upload.
        :param auth: (optional) Auth tuple or callable to enable
            Basic/Digest/Custom HTTP Auth.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param allow_redirects: (optional) Set to True by default.
        :type allow_redirects: bool
        :param proxies: (optional) Dictionary mapping protocol or protocol and
            hostname to the URL of the proxy.
        :param stream: (optional) whether to immediately download the response
            content. Defaults to ``False``.
        :param verify: (optional) Either a boolean, in which case it controls whether we verify
            the server's TLS certificate, or a string, in which case it must be a path
            to a CA bundle to use. Defaults to ``True``.
        :param cert: (optional) if String, path to ssl client cert file (.pem).
            If Tuple, ('cert', 'key') pair.
        :rtype: requests.Response
        """
        # Create the Request.
        req = Request(
            method=method.upper(),
            url=url,
            headers=headers,
            files=files,
            data=data or {},
            json=json,
            params=params or {},
            auth=auth,
            cookies=cookies,
            hooks=hooks,
        )
        prep = self.prepare_request(req)

        proxies = proxies or {}

        settings = self.merge_environment_settings(
            prep.url, proxies, stream, verify, cert
        )

        # Send the request.
        send_kwargs = {
            'timeout': timeout,
            'allow_redirects': allow_redirects,
        }
        send_kwargs.update(settings)
        resp = self.send(prep, **send_kwargs)

        return resp

    def get(self, url, **kwargs):
        r"""Sends a GET request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        """

        kwargs.setdefault('allow_redirects', True)
        return self.request('GET', url, **kwargs)

    def options(self, url, **kwargs):
        r"""Sends a OPTIONS request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        """

        kwargs.setdefault('allow_redirects', True)
        return self.request('OPTIONS', url, **kwargs)

    def head(self, url, **kwargs):
        r"""Sends a HEAD request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        """

        kwargs.setdefault('allow_redirects', False)
        return self.request('HEAD', url, **kwargs)

    def post(self, url, data=None, json=None, **kwargs):
        r"""Sends a POST request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param json: (optional) json to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        """

        return self.request('POST', url, data=data, json=json, **kwargs)

    def put(self, url, data=None, **kwargs):
        r"""Sends a PUT request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        """

        return self.request('PUT', url, data=data, **kwargs)

    def patch(self, url, data=None, **kwargs):
        r"""Sends a PATCH request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        """

        return self.request('PATCH', url, data=data, **kwargs)

    def delete(self, url, **kwargs):
        r"""Sends a DELETE request. Returns :class:`Response` object.

        :param url: URL for the new :class:`Request` object.
        :param \*\*kwargs: Optional arguments that ``request`` takes.
        :rtype: requests.Response
        """

        return self.request('DELETE', url, **kwargs)

    def send(self, request, **kwargs):
        """Send a given PreparedRequest.

        :rtype: requests.Response
        """
        # Set defaults that the hooks can utilize to ensure they always have
        # the correct parameters to reproduce the previous request.
        kwargs.setdefault('stream', self.stream)
        kwargs.setdefault('verify', self.verify)
        kwargs.setdefault('cert', self.cert)
        kwargs.setdefault('proxies', self.proxies)

        # It's possible that users might accidentally send a Request object.
        # Guard against that specific failure case.
        if isinstance(request, Request):
            raise ValueError('You can only send PreparedRequests.')

        # Set up variables needed for resolve_redirects and dispatching of hooks
        allow_redirects = kwargs.pop('allow_redirects', True)
        stream = kwargs.get('stream')
        hooks = request.hooks

        # Get the appropriate adapter to use
        adapter = self.get_adapter(url=request.url)

        # Start time (approximately) of the request
        start = preferred_clock()

        # Send the request
        r = adapter.send(request, **kwargs)

        # Total elapsed time of the request (approximately)
        elapsed = preferred_clock() - start
        r.elapsed = timedelta(seconds=elapsed)

        # Response manipulation hooks
        r = dispatch_hook('response', hooks, r, **kwargs)

        # Persist cookies
        if r.history:

            # If the hooks create history then we want those cookies too
            for resp in r.history:
                extract_cookies_to_jar(self.cookies, resp.request, resp.raw)

        extract_cookies_to_jar(self.cookies, request, r.raw)

        # Redirect resolving generator.
        gen = self.resolve_redirects(r, request, **kwargs)

        # Resolve redirects if allowed.
        history = [resp for resp in gen] if allow_redirects else []

        # Shuffle things around if there's history.
        if history:
            # Insert the first (original) request at the start
            history.insert(0, r)
            # Get the last request made
            r = history.pop()
            r.history = history

        # If redirects aren't being followed, store the response on the Request for Response.next().
        if not allow_redirects:
            try:
                r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))
            except StopIteration:
                pass

        if not stream:
            r.content

        return r

    def merge_environment_settings(self, url, proxies, stream, verify, cert):
        """
        Check the environment and merge it with some settings.

        :rtype: dict
        """
        # Gather clues from the surrounding environment.
        if self.trust_env:
            # Set environment's proxies.
            no_proxy = proxies.get('no_proxy') if proxies is not None else None
            env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
            for (k, v) in env_proxies.items():
                proxies.setdefault(k, v)

            # Look for requests environment configuration and be compatible
            # with cURL.
            if verify is True or verify is None:
                verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
                          os.environ.get('CURL_CA_BUNDLE'))

        # Merge all the kwargs.
        proxies = merge_setting(proxies, self.proxies)
        stream = merge_setting(stream, self.stream)
        verify = merge_setting(verify, self.verify)
        cert = merge_setting(cert, self.cert)

        return {'verify': verify, 'proxies': proxies, 'stream': stream,
                'cert': cert}

    def get_adapter(self, url):
        """
        Returns the appropriate connection adapter for the given URL.

        :rtype: requests.adapters.BaseAdapter
        """
        for (prefix, adapter) in self.adapters.items():

            if url.lower().startswith(prefix):
                return adapter

        # Nothing matches :-/
        raise InvalidSchema("No connection adapters were found for '%s'" % url)

    def close(self):
        """Closes all adapters and as such the session"""
        for v in self.adapters.values():
            v.close()

    def mount(self, prefix, adapter):
        """Registers a connection adapter to a prefix.

        Adapters are sorted in descending order by prefix length.
        """
        self.adapters[prefix] = adapter
        keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]

        for key in keys_to_move:
            self.adapters[key] = self.adapters.pop(key)

    def __getstate__(self):
        state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
        return state

    def __setstate__(self, state):
        for attr, value in state.items():
            setattr(self, attr, value)


def session():
    """
    Returns a :class:`Session` for context-management.

    :rtype: Session
    """

    return Session()
_vendor/requests/certs.py000064400000000721151733136410011551 0ustar00#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
requests.certs
~~~~~~~~~~~~~~

This module returns the preferred default CA certificate bundle. There is
only one — the one from the certifi package.

If you are packaging Requests, e.g., for a Linux distribution or a managed
environment, you can change the definition of where() to return a separately
packaged CA bundle.
"""
from pip._vendor.certifi import where

if __name__ == '__main__':
    print(where())
_vendor/requests/__version__.py000064400000000664151733136410012720 0ustar00# .-. .-. .-. . . .-. .-. .-. .-.
# |(  |-  |.| | | |-  `-.  |  `-.
# ' ' `-' `-`.`-' `-' `-'  '  `-'

__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
__url__ = 'http://python-requests.org'
__version__ = '2.18.4'
__build__ = 0x021804
__author__ = 'Kenneth Reitz'
__author_email__ = 'me@kennethreitz.org'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2017 Kenneth Reitz'
__cake__ = u'\u2728 \U0001f370 \u2728'
_vendor/requests/exceptions.py000064400000006053151733136410012616 0ustar00# -*- coding: utf-8 -*-

"""
requests.exceptions
~~~~~~~~~~~~~~~~~~~

This module contains the set of Requests' exceptions.
"""
from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError


class RequestException(IOError):
    """There was an ambiguous exception that occurred while handling your
    request.
    """

    def __init__(self, *args, **kwargs):
        """Initialize RequestException with `request` and `response` objects."""
        response = kwargs.pop('response', None)
        self.response = response
        self.request = kwargs.pop('request', None)
        if (response is not None and not self.request and
                hasattr(response, 'request')):
            self.request = self.response.request
        super(RequestException, self).__init__(*args, **kwargs)


class HTTPError(RequestException):
    """An HTTP error occurred."""


class ConnectionError(RequestException):
    """A Connection error occurred."""


class ProxyError(ConnectionError):
    """A proxy error occurred."""


class SSLError(ConnectionError):
    """An SSL error occurred."""


class Timeout(RequestException):
    """The request timed out.

    Catching this error will catch both
    :exc:`~requests.exceptions.ConnectTimeout` and
    :exc:`~requests.exceptions.ReadTimeout` errors.
    """


class ConnectTimeout(ConnectionError, Timeout):
    """The request timed out while trying to connect to the remote server.

    Requests that produced this error are safe to retry.
    """


class ReadTimeout(Timeout):
    """The server did not send any data in the allotted amount of time."""


class URLRequired(RequestException):
    """A valid URL is required to make a request."""


class TooManyRedirects(RequestException):
    """Too many redirects."""


class MissingSchema(RequestException, ValueError):
    """The URL schema (e.g. http or https) is missing."""


class InvalidSchema(RequestException, ValueError):
    """See defaults.py for valid schemas."""


class InvalidURL(RequestException, ValueError):
    """The URL provided was somehow invalid."""


class InvalidHeader(RequestException, ValueError):
    """The header value provided was somehow invalid."""


class ChunkedEncodingError(RequestException):
    """The server declared chunked encoding but sent an invalid chunk."""


class ContentDecodingError(RequestException, BaseHTTPError):
    """Failed to decode response content"""


class StreamConsumedError(RequestException, TypeError):
    """The content for this response was already consumed"""


class RetryError(RequestException):
    """Custom retries logic failed"""


class UnrewindableBodyError(RequestException):
    """Requests encountered an error when trying to rewind a body"""

# Warnings


class RequestsWarning(Warning):
    """Base warning for Requests."""
    pass


class FileModeWarning(RequestsWarning, DeprecationWarning):
    """A file was opened in text mode, but Requests determined its binary length."""
    pass


class RequestsDependencyWarning(RequestsWarning):
    """An imported dependency doesn't match the expected version range."""
    pass
_vendor/requests/models.py000064400000102403151733136410011714 0ustar00# -*- coding: utf-8 -*-

"""
requests.models
~~~~~~~~~~~~~~~

This module contains the primary objects that power Requests.
"""

import collections
import datetime
import sys

# Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
# such as in Embedded Python. See https://github.com/requests/requests/issues/3578.
import encodings.idna

from pip._vendor.urllib3.fields import RequestField
from pip._vendor.urllib3.filepost import encode_multipart_formdata
from pip._vendor.urllib3.util import parse_url
from pip._vendor.urllib3.exceptions import (
    DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)

from io import UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict

from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
    HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
    ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
    guess_filename, get_auth_from_url, requote_uri,
    stream_decode_response_unicode, to_key_val_list, parse_header_links,
    iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
    cookielib, urlunparse, urlsplit, urlencode, str, bytes,
    is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes

#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
    codes.moved,               # 301
    codes.found,               # 302
    codes.other,               # 303
    codes.temporary_redirect,  # 307
    codes.permanent_redirect,  # 308
)

DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512


class RequestEncodingMixin(object):
    @property
    def path_url(self):
        """Build the path URL to use."""

        url = []

        p = urlsplit(self.url)

        path = p.path
        if not path:
            path = '/'

        url.append(path)

        query = p.query
        if query:
            url.append('?')
            url.append(query)

        return ''.join(url)

    @staticmethod
    def _encode_params(data):
        """Encode parameters in a piece of data.

        Will successfully encode parameters when passed as a dict or a list of
        2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
        if parameters are supplied as a dict.
        """

        if isinstance(data, (str, bytes)):
            return data
        elif hasattr(data, 'read'):
            return data
        elif hasattr(data, '__iter__'):
            result = []
            for k, vs in to_key_val_list(data):
                if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
                    vs = [vs]
                for v in vs:
                    if v is not None:
                        result.append(
                            (k.encode('utf-8') if isinstance(k, str) else k,
                             v.encode('utf-8') if isinstance(v, str) else v))
            return urlencode(result, doseq=True)
        else:
            return data

    @staticmethod
    def _encode_files(files, data):
        """Build the body for a multipart/form-data request.

        Will successfully encode files when passed as a dict or a list of
        tuples. Order is retained if data is a list of tuples but arbitrary
        if parameters are supplied as a dict.
        The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
        or 4-tuples (filename, fileobj, contentype, custom_headers).
        """
        if (not files):
            raise ValueError("Files must be provided.")
        elif isinstance(data, basestring):
            raise ValueError("Data must not be a string.")

        new_fields = []
        fields = to_key_val_list(data or {})
        files = to_key_val_list(files or {})

        for field, val in fields:
            if isinstance(val, basestring) or not hasattr(val, '__iter__'):
                val = [val]
            for v in val:
                if v is not None:
                    # Don't call str() on bytestrings: in Py3 it all goes wrong.
                    if not isinstance(v, bytes):
                        v = str(v)

                    new_fields.append(
                        (field.decode('utf-8') if isinstance(field, bytes) else field,
                         v.encode('utf-8') if isinstance(v, str) else v))

        for (k, v) in files:
            # support for explicit filename
            ft = None
            fh = None
            if isinstance(v, (tuple, list)):
                if len(v) == 2:
                    fn, fp = v
                elif len(v) == 3:
                    fn, fp, ft = v
                else:
                    fn, fp, ft, fh = v
            else:
                fn = guess_filename(v) or k
                fp = v

            if isinstance(fp, (str, bytes, bytearray)):
                fdata = fp
            else:
                fdata = fp.read()

            rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
            rf.make_multipart(content_type=ft)
            new_fields.append(rf)

        body, content_type = encode_multipart_formdata(new_fields)

        return body, content_type


class RequestHooksMixin(object):
    def register_hook(self, event, hook):
        """Properly register a hook."""

        if event not in self.hooks:
            raise ValueError('Unsupported event specified, with event name "%s"' % (event))

        if isinstance(hook, collections.Callable):
            self.hooks[event].append(hook)
        elif hasattr(hook, '__iter__'):
            self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))

    def deregister_hook(self, event, hook):
        """Deregister a previously registered hook.
        Returns True if the hook existed, False if not.
        """

        try:
            self.hooks[event].remove(hook)
            return True
        except ValueError:
            return False


class Request(RequestHooksMixin):
    """A user-created :class:`Request <Request>` object.

    Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.

    :param method: HTTP method to use.
    :param url: URL to send.
    :param headers: dictionary of headers to send.
    :param files: dictionary of {filename: fileobject} files to multipart upload.
    :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
    :param json: json for the body to attach to the request (if files or data is not specified).
    :param params: dictionary of URL parameters to append to the URL.
    :param auth: Auth handler or (user, pass) tuple.
    :param cookies: dictionary or CookieJar of cookies to attach to this request.
    :param hooks: dictionary of callback hooks, for internal usage.

    Usage::

      >>> import requests
      >>> req = requests.Request('GET', 'http://httpbin.org/get')
      >>> req.prepare()
      <PreparedRequest [GET]>
    """

    def __init__(self,
            method=None, url=None, headers=None, files=None, data=None,
            params=None, auth=None, cookies=None, hooks=None, json=None):

        # Default empty dicts for dict params.
        data = [] if data is None else data
        files = [] if files is None else files
        headers = {} if headers is None else headers
        params = {} if params is None else params
        hooks = {} if hooks is None else hooks

        self.hooks = default_hooks()
        for (k, v) in list(hooks.items()):
            self.register_hook(event=k, hook=v)

        self.method = method
        self.url = url
        self.headers = headers
        self.files = files
        self.data = data
        self.json = json
        self.params = params
        self.auth = auth
        self.cookies = cookies

    def __repr__(self):
        return '<Request [%s]>' % (self.method)

    def prepare(self):
        """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
        p = PreparedRequest()
        p.prepare(
            method=self.method,
            url=self.url,
            headers=self.headers,
            files=self.files,
            data=self.data,
            json=self.json,
            params=self.params,
            auth=self.auth,
            cookies=self.cookies,
            hooks=self.hooks,
        )
        return p


class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
    """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
    containing the exact bytes that will be sent to the server.

    Generated from either a :class:`Request <Request>` object or manually.

    Usage::

      >>> import requests
      >>> req = requests.Request('GET', 'http://httpbin.org/get')
      >>> r = req.prepare()
      <PreparedRequest [GET]>

      >>> s = requests.Session()
      >>> s.send(r)
      <Response [200]>
    """

    def __init__(self):
        #: HTTP verb to send to the server.
        self.method = None
        #: HTTP URL to send the request to.
        self.url = None
        #: dictionary of HTTP headers.
        self.headers = None
        # The `CookieJar` used to create the Cookie header will be stored here
        # after prepare_cookies is called
        self._cookies = None
        #: request body to send to the server.
        self.body = None
        #: dictionary of callback hooks, for internal usage.
        self.hooks = default_hooks()
        #: integer denoting starting position of a readable file-like body.
        self._body_position = None

    def prepare(self,
            method=None, url=None, headers=None, files=None, data=None,
            params=None, auth=None, cookies=None, hooks=None, json=None):
        """Prepares the entire request with the given parameters."""

        self.prepare_method(method)
        self.prepare_url(url, params)
        self.prepare_headers(headers)
        self.prepare_cookies(cookies)
        self.prepare_body(data, files, json)
        self.prepare_auth(auth, url)

        # Note that prepare_auth must be last to enable authentication schemes
        # such as OAuth to work on a fully prepared request.

        # This MUST go after prepare_auth. Authenticators could add a hook
        self.prepare_hooks(hooks)

    def __repr__(self):
        return '<PreparedRequest [%s]>' % (self.method)

    def copy(self):
        p = PreparedRequest()
        p.method = self.method
        p.url = self.url
        p.headers = self.headers.copy() if self.headers is not None else None
        p._cookies = _copy_cookie_jar(self._cookies)
        p.body = self.body
        p.hooks = self.hooks
        p._body_position = self._body_position
        return p

    def prepare_method(self, method):
        """Prepares the given HTTP method."""
        self.method = method
        if self.method is not None:
            self.method = to_native_string(self.method.upper())

    @staticmethod
    def _get_idna_encoded_host(host):
        import idna

        try:
            host = idna.encode(host, uts46=True).decode('utf-8')
        except idna.IDNAError:
            raise UnicodeError
        return host

    def prepare_url(self, url, params):
        """Prepares the given HTTP URL."""
        #: Accept objects that have string representations.
        #: We're unable to blindly call unicode/str functions
        #: as this will include the bytestring indicator (b'')
        #: on python 3.x.
        #: https://github.com/requests/requests/pull/2238
        if isinstance(url, bytes):
            url = url.decode('utf8')
        else:
            url = unicode(url) if is_py2 else str(url)

        # Remove leading whitespaces from url
        url = url.lstrip()

        # Don't do any URL preparation for non-HTTP schemes like `mailto`,
        # `data` etc to work around exceptions from `url_parse`, which
        # handles RFC 3986 only.
        if ':' in url and not url.lower().startswith('http'):
            self.url = url
            return

        # Support for unicode domain names and paths.
        try:
            scheme, auth, host, port, path, query, fragment = parse_url(url)
        except LocationParseError as e:
            raise InvalidURL(*e.args)

        if not scheme:
            error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
            error = error.format(to_native_string(url, 'utf8'))

            raise MissingSchema(error)

        if not host:
            raise InvalidURL("Invalid URL %r: No host supplied" % url)

        # In general, we want to try IDNA encoding the hostname if the string contains
        # non-ASCII characters. This allows users to automatically get the correct IDNA
        # behaviour. For strings containing only ASCII characters, we need to also verify
        # it doesn't start with a wildcard (*), before allowing the unencoded hostname.
        if not unicode_is_ascii(host):
            try:
                host = self._get_idna_encoded_host(host)
            except UnicodeError:
                raise InvalidURL('URL has an invalid label.')
        elif host.startswith(u'*'):
            raise InvalidURL('URL has an invalid label.')

        # Carefully reconstruct the network location
        netloc = auth or ''
        if netloc:
            netloc += '@'
        netloc += host
        if port:
            netloc += ':' + str(port)

        # Bare domains aren't valid URLs.
        if not path:
            path = '/'

        if is_py2:
            if isinstance(scheme, str):
                scheme = scheme.encode('utf-8')
            if isinstance(netloc, str):
                netloc = netloc.encode('utf-8')
            if isinstance(path, str):
                path = path.encode('utf-8')
            if isinstance(query, str):
                query = query.encode('utf-8')
            if isinstance(fragment, str):
                fragment = fragment.encode('utf-8')

        if isinstance(params, (str, bytes)):
            params = to_native_string(params)

        enc_params = self._encode_params(params)
        if enc_params:
            if query:
                query = '%s&%s' % (query, enc_params)
            else:
                query = enc_params

        url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
        self.url = url

    def prepare_headers(self, headers):
        """Prepares the given HTTP headers."""

        self.headers = CaseInsensitiveDict()
        if headers:
            for header in headers.items():
                # Raise exception on invalid header value.
                check_header_validity(header)
                name, value = header
                self.headers[to_native_string(name)] = value

    def prepare_body(self, data, files, json=None):
        """Prepares the given HTTP body data."""

        # Check if file, fo, generator, iterator.
        # If not, run through normal process.

        # Nottin' on you.
        body = None
        content_type = None

        if not data and json is not None:
            # urllib3 requires a bytes-like body. Python 2's json.dumps
            # provides this natively, but Python 3 gives a Unicode string.
            content_type = 'application/json'
            body = complexjson.dumps(json)
            if not isinstance(body, bytes):
                body = body.encode('utf-8')

        is_stream = all([
            hasattr(data, '__iter__'),
            not isinstance(data, (basestring, list, tuple, collections.Mapping))
        ])

        try:
            length = super_len(data)
        except (TypeError, AttributeError, UnsupportedOperation):
            length = None

        if is_stream:
            body = data

            if getattr(body, 'tell', None) is not None:
                # Record the current file position before reading.
                # This will allow us to rewind a file in the event
                # of a redirect.
                try:
                    self._body_position = body.tell()
                except (IOError, OSError):
                    # This differentiates from None, allowing us to catch
                    # a failed `tell()` later when trying to rewind the body
                    self._body_position = object()

            if files:
                raise NotImplementedError('Streamed bodies and files are mutually exclusive.')

            if length:
                self.headers['Content-Length'] = builtin_str(length)
            else:
                self.headers['Transfer-Encoding'] = 'chunked'
        else:
            # Multi-part file uploads.
            if files:
                (body, content_type) = self._encode_files(files, data)
            else:
                if data:
                    body = self._encode_params(data)
                    if isinstance(data, basestring) or hasattr(data, 'read'):
                        content_type = None
                    else:
                        content_type = 'application/x-www-form-urlencoded'

            self.prepare_content_length(body)

            # Add content-type if it wasn't explicitly provided.
            if content_type and ('content-type' not in self.headers):
                self.headers['Content-Type'] = content_type

        self.body = body

    def prepare_content_length(self, body):
        """Prepare Content-Length header based on request method and body"""
        if body is not None:
            length = super_len(body)
            if length:
                # If length exists, set it. Otherwise, we fallback
                # to Transfer-Encoding: chunked.
                self.headers['Content-Length'] = builtin_str(length)
        elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
            # Set Content-Length to 0 for methods that can have a body
            # but don't provide one. (i.e. not GET or HEAD)
            self.headers['Content-Length'] = '0'

    def prepare_auth(self, auth, url=''):
        """Prepares the given HTTP auth data."""

        # If no Auth is explicitly provided, extract it from the URL first.
        if auth is None:
            url_auth = get_auth_from_url(self.url)
            auth = url_auth if any(url_auth) else None

        if auth:
            if isinstance(auth, tuple) and len(auth) == 2:
                # special-case basic HTTP auth
                auth = HTTPBasicAuth(*auth)

            # Allow auth to make its changes.
            r = auth(self)

            # Update self to reflect the auth changes.
            self.__dict__.update(r.__dict__)

            # Recompute Content-Length
            self.prepare_content_length(self.body)

    def prepare_cookies(self, cookies):
        """Prepares the given HTTP cookie data.

        This function eventually generates a ``Cookie`` header from the
        given cookies using cookielib. Due to cookielib's design, the header
        will not be regenerated if it already exists, meaning this function
        can only be called once for the life of the
        :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
        to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
        header is removed beforehand.
        """
        if isinstance(cookies, cookielib.CookieJar):
            self._cookies = cookies
        else:
            self._cookies = cookiejar_from_dict(cookies)

        cookie_header = get_cookie_header(self._cookies, self)
        if cookie_header is not None:
            self.headers['Cookie'] = cookie_header

    def prepare_hooks(self, hooks):
        """Prepares the given hooks."""
        # hooks can be passed as None to the prepare method and to this
        # method. To prevent iterating over None, simply use an empty list
        # if hooks is False-y
        hooks = hooks or []
        for event in hooks:
            self.register_hook(event, hooks[event])


class Response(object):
    """The :class:`Response <Response>` object, which contains a
    server's response to an HTTP request.
    """

    __attrs__ = [
        '_content', 'status_code', 'headers', 'url', 'history',
        'encoding', 'reason', 'cookies', 'elapsed', 'request'
    ]

    def __init__(self):
        self._content = False
        self._content_consumed = False
        self._next = None

        #: Integer Code of responded HTTP Status, e.g. 404 or 200.
        self.status_code = None

        #: Case-insensitive Dictionary of Response Headers.
        #: For example, ``headers['content-encoding']`` will return the
        #: value of a ``'Content-Encoding'`` response header.
        self.headers = CaseInsensitiveDict()

        #: File-like object representation of response (for advanced usage).
        #: Use of ``raw`` requires that ``stream=True`` be set on the request.
        # This requirement does not apply for use internally to Requests.
        self.raw = None

        #: Final URL location of Response.
        self.url = None

        #: Encoding to decode with when accessing r.text.
        self.encoding = None

        #: A list of :class:`Response <Response>` objects from
        #: the history of the Request. Any redirect responses will end
        #: up here. The list is sorted from the oldest to the most recent request.
        self.history = []

        #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
        self.reason = None

        #: A CookieJar of Cookies the server sent back.
        self.cookies = cookiejar_from_dict({})

        #: The amount of time elapsed between sending the request
        #: and the arrival of the response (as a timedelta).
        #: This property specifically measures the time taken between sending
        #: the first byte of the request and finishing parsing the headers. It
        #: is therefore unaffected by consuming the response content or the
        #: value of the ``stream`` keyword argument.
        self.elapsed = datetime.timedelta(0)

        #: The :class:`PreparedRequest <PreparedRequest>` object to which this
        #: is a response.
        self.request = None

    def __enter__(self):
        return self

    def __exit__(self, *args):
        self.close()

    def __getstate__(self):
        # Consume everything; accessing the content attribute makes
        # sure the content has been fully read.
        if not self._content_consumed:
            self.content

        return dict(
            (attr, getattr(self, attr, None))
            for attr in self.__attrs__
        )

    def __setstate__(self, state):
        for name, value in state.items():
            setattr(self, name, value)

        # pickled objects do not have .raw
        setattr(self, '_content_consumed', True)
        setattr(self, 'raw', None)

    def __repr__(self):
        return '<Response [%s]>' % (self.status_code)

    def __bool__(self):
        """Returns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        """
        return self.ok

    def __nonzero__(self):
        """Returns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        """
        return self.ok

    def __iter__(self):
        """Allows you to use a response as an iterator."""
        return self.iter_content(128)

    @property
    def ok(self):
        """Returns True if :attr:`status_code` is less than 400.

        This attribute checks if the status code of the response is between
        400 and 600 to see if there was a client error or a server error. If
        the status code, is between 200 and 400, this will return True. This
        is **not** a check to see if the response code is ``200 OK``.
        """
        try:
            self.raise_for_status()
        except HTTPError:
            return False
        return True

    @property
    def is_redirect(self):
        """True if this Response is a well-formed HTTP redirect that could have
        been processed automatically (by :meth:`Session.resolve_redirects`).
        """
        return ('location' in self.headers and self.status_code in REDIRECT_STATI)

    @property
    def is_permanent_redirect(self):
        """True if this Response one of the permanent versions of redirect."""
        return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))

    @property
    def next(self):
        """Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
        return self._next

    @property
    def apparent_encoding(self):
        """The apparent encoding, provided by the chardet library."""
        return chardet.detect(self.content)['encoding']

    def iter_content(self, chunk_size=1, decode_unicode=False):
        """Iterates over the response data.  When stream=True is set on the
        request, this avoids reading the content at once into memory for
        large responses.  The chunk size is the number of bytes it should
        read into memory.  This is not necessarily the length of each item
        returned as decoding can take place.

        chunk_size must be of type int or None. A value of None will
        function differently depending on the value of `stream`.
        stream=True will read data as it arrives in whatever size the
        chunks are received. If stream=False, data is returned as
        a single chunk.

        If decode_unicode is True, content will be decoded using the best
        available encoding based on the response.
        """

        def generate():
            # Special case for urllib3.
            if hasattr(self.raw, 'stream'):
                try:
                    for chunk in self.raw.stream(chunk_size, decode_content=True):
                        yield chunk
                except ProtocolError as e:
                    raise ChunkedEncodingError(e)
                except DecodeError as e:
                    raise ContentDecodingError(e)
                except ReadTimeoutError as e:
                    raise ConnectionError(e)
            else:
                # Standard file-like object.
                while True:
                    chunk = self.raw.read(chunk_size)
                    if not chunk:
                        break
                    yield chunk

            self._content_consumed = True

        if self._content_consumed and isinstance(self._content, bool):
            raise StreamConsumedError()
        elif chunk_size is not None and not isinstance(chunk_size, int):
            raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
        # simulate reading small chunks of the content
        reused_chunks = iter_slices(self._content, chunk_size)

        stream_chunks = generate()

        chunks = reused_chunks if self._content_consumed else stream_chunks

        if decode_unicode:
            chunks = stream_decode_response_unicode(chunks, self)

        return chunks

    def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
        """Iterates over the response data, one line at a time.  When
        stream=True is set on the request, this avoids reading the
        content at once into memory for large responses.

        .. note:: This method is not reentrant safe.
        """

        pending = None

        for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):

            if pending is not None:
                chunk = pending + chunk

            if delimiter:
                lines = chunk.split(delimiter)
            else:
                lines = chunk.splitlines()

            if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
                pending = lines.pop()
            else:
                pending = None

            for line in lines:
                yield line

        if pending is not None:
            yield pending

    @property
    def content(self):
        """Content of the response, in bytes."""

        if self._content is False:
            # Read the contents.
            if self._content_consumed:
                raise RuntimeError(
                    'The content for this response was already consumed')

            if self.status_code == 0 or self.raw is None:
                self._content = None
            else:
                self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()

        self._content_consumed = True
        # don't need to release the connection; that's been handled by urllib3
        # since we exhausted the data.
        return self._content

    @property
    def text(self):
        """Content of the response, in unicode.

        If Response.encoding is None, encoding will be guessed using
        ``chardet``.

        The encoding of the response content is determined based solely on HTTP
        headers, following RFC 2616 to the letter. If you can take advantage of
        non-HTTP knowledge to make a better guess at the encoding, you should
        set ``r.encoding`` appropriately before accessing this property.
        """

        # Try charset from content-type
        content = None
        encoding = self.encoding

        if not self.content:
            return str('')

        # Fallback to auto-detected encoding.
        if self.encoding is None:
            encoding = self.apparent_encoding

        # Decode unicode from given encoding.
        try:
            content = str(self.content, encoding, errors='replace')
        except (LookupError, TypeError):
            # A LookupError is raised if the encoding was not found which could
            # indicate a misspelling or similar mistake.
            #
            # A TypeError can be raised if encoding is None
            #
            # So we try blindly encoding.
            content = str(self.content, errors='replace')

        return content

    def json(self, **kwargs):
        r"""Returns the json-encoded content of a response, if any.

        :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
        :raises ValueError: If the response body does not contain valid json.
        """

        if not self.encoding and self.content and len(self.content) > 3:
            # No encoding set. JSON RFC 4627 section 3 states we should expect
            # UTF-8, -16 or -32. Detect which one to use; If the detection or
            # decoding fails, fall back to `self.text` (using chardet to make
            # a best guess).
            encoding = guess_json_utf(self.content)
            if encoding is not None:
                try:
                    return complexjson.loads(
                        self.content.decode(encoding), **kwargs
                    )
                except UnicodeDecodeError:
                    # Wrong UTF codec detected; usually because it's not UTF-8
                    # but some other 8-bit codec.  This is an RFC violation,
                    # and the server didn't bother to tell us what codec *was*
                    # used.
                    pass
        return complexjson.loads(self.text, **kwargs)

    @property
    def links(self):
        """Returns the parsed header links of the response, if any."""

        header = self.headers.get('link')

        # l = MultiDict()
        l = {}

        if header:
            links = parse_header_links(header)

            for link in links:
                key = link.get('rel') or link.get('url')
                l[key] = link

        return l

    def raise_for_status(self):
        """Raises stored :class:`HTTPError`, if one occurred."""

        http_error_msg = ''
        if isinstance(self.reason, bytes):
            # We attempt to decode utf-8 first because some servers
            # choose to localize their reason strings. If the string
            # isn't utf-8, we fall back to iso-8859-1 for all other
            # encodings. (See PR #3538)
            try:
                reason = self.reason.decode('utf-8')
            except UnicodeDecodeError:
                reason = self.reason.decode('iso-8859-1')
        else:
            reason = self.reason

        if 400 <= self.status_code < 500:
            http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)

        elif 500 <= self.status_code < 600:
            http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)

        if http_error_msg:
            raise HTTPError(http_error_msg, response=self)

    def close(self):
        """Releases the connection back to the pool. Once this method has been
        called the underlying ``raw`` object must not be accessed again.

        *Note: Should not normally need to be called explicitly.*
        """
        if not self._content_consumed:
            self.raw.close()

        release_conn = getattr(self.raw, 'release_conn', None)
        if release_conn is not None:
            release_conn()
_vendor/html5lib/__pycache__/_utils.cpython-36.pyc000064400000006332151733136410016065 0ustar003

�Pf�@s
ddlmZmZmZddlZddlmZddlmZyddl	j
jZWn e
k
rdddlj
jZYnXddddd	d
dgZejddko�ejd
dkZy,ed�Zeee�s�ed�Zeee�s�t�WndZYnXdZGdd�de�Zdd�Zdd�Zdd	�Zdd�ZdS)�)�absolute_import�division�unicode_literalsN)�
ModuleType)�	text_type�
default_etree�MethodDispatcher�isSurrogatePair�surrogatePairToCodepoint�moduleFactoryFactory�supports_lone_surrogates�PY27���z"\uD800"z	u"\uD800"FTc@s$eZdZdZffdd�Zdd�ZdS)rapDict with 2 special properties:

    On initiation, keys that are lists, sets or tuples are converted to
    multiple keys so accessing any one of the items in the original
    list-like object returns the matching value

    md = MethodDispatcher({("foo", "bar"):"baz"})
    md["foo"] == "baz"

    A default value which can be set through the default attribute.
    cCs~g}xN|D]F\}}t|ttttf�rBx*|D]}|j||f�q*Wq
|j||f�q
Wtj||�t|�t|�kstt	�d|_
dS)N)�
isinstance�list�tuple�	frozenset�set�append�dict�__init__�len�AssertionError�default)�self�itemsZ_dictEntries�name�value�item�r!�/usr/lib/python3.6/_utils.pyr4s
zMethodDispatcher.__init__cCstj|||j�S)N)r�getr)r�keyr!r!r"�__getitem__CszMethodDispatcher.__getitem__N)�__name__�
__module__�__qualname__�__doc__rr%r!r!r!r"r'scCsLt|�dkoJt|d�dkoJt|d�dkoJt|d�dkoJt|d�dkS)Nrri�i��ri�i��)r�ord)�datar!r!r"r	Js cCs,dt|d�ddt|d�d}|S)Niri�iri�)r*)r+Zchar_valr!r!r"r
Pscsi���fdd�}|S)Ncs�ttjtd��rd|j}n
d|j}t|j��}y�|||Stk
r�t|�}�|f|�|�}|jj|�d�kr�i�|<d�|kr�i�||<d�||kr�i�|||<|�|||<|SXdS)N�z_%s_factorys_%s_factoryr�args�kwargs)	rrr&�typerr�KeyError�__dict__�update)Z
baseModuler-r.rZkwargs_tuple�modZobjs)�factory�moduleCacher!r"�
moduleFactory\s$
z+moduleFactoryFactory.<locals>.moduleFactoryr!)r4r6r!)r4r5r"rYscsi���fdd�}|S)Ncs2t|�t|j��f}|�kr*�||��|<�|S)N)rr)r-r.r$)�cache�funcr!r"�wrappedyszmemoize.<locals>.wrappedr!)r8r9r!)r7r8r"�memoizevsr:)Z
__future__rrr�sys�typesrZpip._vendor.sixrZxml.etree.cElementTreeZetreeZcElementTreer�ImportErrorZxml.etree.ElementTreeZElementTree�__all__�version_infor
�evalZ_xrrrrrr	r
rr:r!r!r!r"�<module>s0

#	_vendor/html5lib/__pycache__/_inputstream.cpython-36.opt-1.pyc000064400000053240151733136420020240 0ustar003

�Pf�)@s�ddlmZmZmZddlmZmZddlmZm	Z	ddl
Z
ddlZddlm
Z
ddlmZmZmZmZddlmZdd	lmZdd
lmZyddlmZWnek
r�eZYnXedd
�eD��Zedd
�eD��Zedd
�eD��Zeeddg�BZdZej �r(ej!eddF�e"d�d�Z#n
ej!e�Z#e$dddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4g �Z%ej!d5�Z&iZ'Gd6d7�d7e(�Z)d8d9�Z*Gd:d;�d;e(�Z+Gd<d=�d=e+�Z,Gd>d?�d?e-�Z.Gd@dA�dAe(�Z/GdBdC�dCe(�Z0dDdE�Z1dS)G�)�absolute_import�division�unicode_literals)�	text_type�binary_type)�http_client�urllibN)�webencodings�)�EOF�spaceCharacters�asciiLetters�asciiUppercase)�ReparseException)�_utils)�StringIO)�BytesIOcCsg|]}|jd��qS)�ascii)�encode)�.0�item�r�"/usr/lib/python3.6/_inputstream.py�
<listcomp>srcCsg|]}|jd��qS)r)r)rrrrrrscCsg|]}|jd��qS)r)r)rrrrrrs�>�<u�[---Ÿ﷐-﷯￾￿🿾🿿𯿾𯿿𿿾𿿿񏿾񏿿񟿾񟿿񯿾񯿿񿿾񿿿򏿾򏿿򟿾򟿿򯿾򯿿򿿾򿿿󏿾󏿿󟿾󟿿󯿾󯿿󿿾󿿿􏿾􏿿]z"\uD800-\uDFFF"�]i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��	i��	i��
i��
i��i��i��i��i��
i��
i��i��i��i��i��i��z[	-
 -/:-@[-`{-~]c@sHeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dS)�BufferedStreamz�Buffering for streams that do not have buffering of their own

    The buffer is implemented as a list of chunks on the assumption that
    joining many strings will be slow since it is O(n**2)
    cCs||_g|_ddg|_dS)Nr
r���)�stream�buffer�position)�selfrrrr�__init__@szBufferedStream.__init__cCs@d}x(|jd|jd�D]}|t|�7}qW||jd7}|S)Nrr
)r r!�len)r"�pos�chunkrrr�tellEs
zBufferedStream.tellcCsH|}d}x0t|j|�|kr8|t|j|�8}|d7}q
W||g|_dS)Nrr
)r$r r!)r"r%�offset�irrr�seekLszBufferedStream.seekcCsT|js|j|�S|jdt|j�krF|jdt|jd�krF|j|�S|j|�SdS)Nrr
r)r �_readStreamr!r$�_readFromBuffer)r"�bytesrrr�readUs

zBufferedStream.readcCstdd�|jD��S)NcSsg|]}t|��qSr)r$)rrrrrr_sz1BufferedStream._bufferedBytes.<locals>.<listcomp>)�sumr )r"rrr�_bufferedBytes^szBufferedStream._bufferedBytescCs<|jj|�}|jj|�|jdd7<t|�|jd<|S)Nrr
)rr.r �appendr!r$)r"r-�datarrrr+as
zBufferedStream._readStreamcCs�|}g}|jd}|jd}x�|t|j�kr�|dkr�|j|}|t|�|krb|}|||g|_n"t|�|}|t|�g|_|d7}|j||||��||8}d}qW|r�|j|j|��dj|�S)Nrr
�)r!r$r r1r+�join)r"r-ZremainingBytes�rvZbufferIndexZbufferOffsetZbufferedDataZbytesToReadrrrr,hs$


zBufferedStream._readFromBufferN)�__name__�
__module__�__qualname__�__doc__r#r'r*r.r0r+r,rrrrr9s		rcKs�t|tj�s(t|tjj�r.t|jtj�r.d}n&t|d�rJt|jd�t	�}n
t|t	�}|r�dd�|D�}|rvt
d|��t|f|�St|f|�SdS)NFr.rcSsg|]}|jd�r|�qS)Z	_encoding)�endswith)r�xrrrr�sz#HTMLInputStream.<locals>.<listcomp>z3Cannot set an encoding with a unicode input, set %r)
�
isinstancerZHTTPResponserZresponseZaddbase�fp�hasattrr.r�	TypeError�HTMLUnicodeInputStream�HTMLBinaryInputStream)�source�kwargsZ	isUnicodeZ	encodingsrrr�HTMLInputStream�s

rDc@speZdZdZdZdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
d�Z
ddd�Zdd�Zdd�Z
ddd�Zdd�ZdS)r@z�Provides a unicode stream of characters to the HTMLTokenizer.

    This class takes care of character encoding and removing or replacing
    incorrect byte-sequences and also provides column and line tracking.

    i(cCsZtjsd|_ntd�dkr$|j|_n|j|_dg|_td�df|_|j	|�|_
|j�dS)a�Initialises the HTMLInputStream.

        HTMLInputStream(source, [encoding]) -> Normalized stream from source
        for use by html5lib.

        source can be either a file-object, local filename or a string.

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        Nu􏿿r
rzutf-8�certain)r�supports_lone_surrogates�reportCharacterErrorsr$�characterErrorsUCS4�characterErrorsUCS2ZnewLines�lookupEncoding�charEncoding�
openStream�
dataStream�reset)r"rBrrrr#�s
zHTMLUnicodeInputStream.__init__cCs.d|_d|_d|_g|_d|_d|_d|_dS)N�r)r&�	chunkSize�chunkOffset�errors�prevNumLines�prevNumCols�_bufferedCharacter)r"rrrrN�szHTMLUnicodeInputStream.resetcCst|d�r|}nt|�}|S)zvProduces a file object from source.

        source can be either a file object, local filename or a string.

        r.)r>r)r"rBrrrrrL�s
z!HTMLUnicodeInputStream.openStreamcCsT|j}|jdd|�}|j|}|jdd|�}|dkr@|j|}n||d}||fS)N�
rr
r)r&�countrS�rfindrT)r"r(r&ZnLinesZpositionLineZlastLinePosZpositionColumnrrr�	_position�s
z HTMLUnicodeInputStream._positioncCs|j|j�\}}|d|fS)z:Returns (line, col) of the current position in the stream.r
)rYrQ)r"�line�colrrrr!�szHTMLUnicodeInputStream.positioncCs6|j|jkr|j�stS|j}|j|}|d|_|S)zo Read one character from the stream or queue if available. Return
            EOF when EOF is reached.
        r
)rQrP�	readChunkrr&)r"rQ�charrrrr]�s

zHTMLUnicodeInputStream.charNcCs�|dkr|j}|j|j�\|_|_d|_d|_d|_|jj|�}|j	rX|j	|}d|_	n|s`dSt
|�dkr�t|d�}|dks�d|ko�dknr�|d
|_	|dd�}|jr�|j|�|j
dd	�}|j
d
d	�}||_t
|�|_dS)NrOrFr
�
i�i��z
rV�
Trrr)�_defaultChunkSizerYrPrSrTr&rQrMr.rUr$�ordrG�replace)r"rPr2Zlastvrrrr\�s0
 


z HTMLUnicodeInputStream.readChunkcCs,x&tttj|���D]}|jjd�qWdS)Nzinvalid-codepoint)�ranger$�invalid_unicode_re�findallrRr1)r"r2�_rrrrH%sz*HTMLUnicodeInputStream.characterErrorsUCS4cCs�d}x�tj|�D]�}|rqt|j��}|j�}tj|||d��rttj|||d��}|tkrn|j	j
d�d}q|dkr�|dkr�|t|�dkr�|j	j
d�qd}|j	j
d�qWdS)NF�zinvalid-codepointTi�i��r
)rd�finditerra�group�startrZisSurrogatePairZsurrogatePairToCodepoint�non_bmp_invalid_codepointsrRr1r$)r"r2�skip�matchZ	codepointr%Zchar_valrrrrI)s z*HTMLUnicodeInputStream.characterErrorsUCS2Fc	Cs�yt||f}WnNtk
r^djdd�|D��}|s@d|}tjd|�}t||f<YnXg}x||j|j|j�}|dkr�|j|jkr�Pn0|j	�}||jkr�|j
|j|j|��||_P|j
|j|jd��|j�sfPqfWdj|�}|S)z� Returns a string of characters from the stream up to but not
        including any character in 'characters' or EOF. 'characters' must be
        a container that supports the 'in' method and iteration over its
        characters.
        rOcSsg|]}dt|��qS)z\x%02x)ra)r�crrrrNsz5HTMLUnicodeInputStream.charsUntil.<locals>.<listcomp>z^%sz[%s]+N)�charsUntilRegEx�KeyErrorr4�re�compilermr&rQrP�endr1r\)	r"Z
charactersZopposite�charsZregexr5�mrs�rrrr�
charsUntil@s. 

z!HTMLUnicodeInputStream.charsUntilcCs@|dk	r<|jdkr.||j|_|jd7_n|jd8_dS)Nrr
)rQr&rP)r"r]rrr�ungetos
zHTMLUnicodeInputStream.unget)N)F)r6r7r8r9r`r#rNrLrYr!r]r\rHrIrwrxrrrrr@�s 
&
/r@c@sLeZdZdZddd�Zdd�Zd	d
�Zddd�Zd
d�Zdd�Z	dd�Z
dS)rAz�Provides a unicode stream of characters to the HTMLTokenizer.

    This class takes care of character encoding and removing or replacing
    incorrect byte-sequences and also provides column and line tracking.

    N�windows-1252TcCs\|j|�|_tj||j�d|_d|_||_||_||_||_	||_
|j|�|_|j
�dS)a�Initialises the HTMLInputStream.

        HTMLInputStream(source, [encoding]) -> Normalized stream from source
        for use by html5lib.

        source can be either a file-object, local filename or a string.

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        i�dN)rL�	rawStreamr@r#�numBytesMeta�numBytesChardet�override_encoding�transport_encoding�same_origin_parent_encoding�likely_encoding�default_encoding�determineEncodingrKrN)r"rBr~rr�r�r�Z
useChardetrrrr#�szHTMLBinaryInputStream.__init__cCs&|jdjj|jd�|_tj|�dS)Nrrb)rKZ
codec_info�streamreaderr{rMr@rN)r"rrrrN�szHTMLBinaryInputStream.resetc	CsDt|d�r|}nt|�}y|j|j��Wnt|�}YnX|S)zvProduces a file object from source.

        source can be either a file object, local filename or a string.

        r.)r>rr*r'r)r"rBrrrrrL�s
z HTMLBinaryInputStream.openStreamcCs�|j�df}|ddk	r|St|j�df}|ddk	r:|St|j�df}|ddk	rX|S|j�df}|ddk	rt|St|j�df}|ddk	r�|djjd�r�|St|j�df}|ddk	r�|S|�rdyddl	m
}Wntk
r�YnxXg}|�}x6|j�s.|j
j|j�}|�sP|j|�|j|�q�W|j�t|jd�}|j
jd�|dk	�rd|dfSt|j�df}|ddk	�r�|Std�dfS)NrErZ	tentativezutf-16)�UniversalDetector�encodingzwindows-1252)�	detectBOMrJr~r�detectEncodingMetar��name�
startswithr�Zchardet.universaldetectorr��ImportError�doner{r.r}r1Zfeed�close�resultr*r�)r"ZchardetrKr�ZbuffersZdetectorr r�rrrr��sP


z'HTMLBinaryInputStream.determineEncodingcCs�t|�}|dkrdS|jdkr(td�}nT||jdkrH|jddf|_n4|jjd�|df|_|j�td|jd|f��dS)N�utf-16be�utf-16lezutf-8rrEzEncoding changed from %s to %s)r�r�)rJr�rKr{r*rNr)r"ZnewEncodingrrr�changeEncodings

z$HTMLBinaryInputStream.changeEncodingc
Cs�tjdtjdtjdtjdtjdi}|jjd�}|j|dd��}d}|sp|j|�}d}|sp|j|dd	��}d	}|r�|jj	|�t
|�S|jj	d
�dSdS)z�Attempts to detect at BOM at the start of the stream. If
        an encoding can be determined from the BOM return the name of the
        encoding otherwise return Nonezutf-8zutf-16lezutf-16bezutf-32lezutf-32be�N�rgr)�codecs�BOM_UTF8�BOM_UTF16_LE�BOM_UTF16_BE�BOM_UTF32_LE�BOM_UTF32_BEr{r.�getr*rJ)r"ZbomDict�stringr�r*rrrr�s"
zHTMLBinaryInputStream.detectBOMcCsH|jj|j�}t|�}|jjd�|j�}|dk	rD|jdkrDtd�}|S)z9Report the encoding declared by the meta element
        rN�utf-16be�utf-16lezutf-8)r�r�)r{r.r|�EncodingParserr*�getEncodingr�rJ)r"r �parserr�rrrr�9sz(HTMLBinaryInputStream.detectEncodingMeta)NNNNryT)T)r6r7r8r9r#rNrLr�r�r�r�rrrrrA�s
(
>"rAc@s�eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zeee
�Z
dd�Zee�Zefdd�Zdd�Zdd�Zdd�ZdS)�
EncodingBytesz�String-like object with an associated position and various extra methods
    If the position is ever greater than the string length then an exception is
    raisedcCstj||j��S)N)r-�__new__�lower)r"�valuerrrr�LszEncodingBytes.__new__cCs
d|_dS)Nr
r)rY)r"r�rrrr#PszEncodingBytes.__init__cCs|S)Nr)r"rrr�__iter__TszEncodingBytes.__iter__cCs>|jd}|_|t|�kr"t�n|dkr.t�|||d�S)Nr
r)rYr$�
StopIterationr?)r"�prrr�__next__WszEncodingBytes.__next__cCs|j�S)N)r�)r"rrr�next_szEncodingBytes.nextcCsB|j}|t|�krt�n|dkr$t�|d|_}|||d�S)Nrr
)rYr$r�r?)r"r�rrr�previouscszEncodingBytes.previouscCs|jt|�krt�||_dS)N)rYr$r�)r"r!rrr�setPositionlszEncodingBytes.setPositioncCs*|jt|�krt�|jdkr"|jSdSdS)Nr)rYr$r�)r"rrr�getPositionqs

zEncodingBytes.getPositioncCs||j|jd�S)Nr
)r!)r"rrr�getCurrentByte{szEncodingBytes.getCurrentBytecCsL|j}x:|t|�kr@|||d�}||kr6||_|S|d7}qW||_dS)zSkip past a list of charactersr
N)r!r$rY)r"rtr�rnrrrrl�szEncodingBytes.skipcCsL|j}x:|t|�kr@|||d�}||kr6||_|S|d7}qW||_dS)Nr
)r!r$rY)r"rtr�rnrrr�	skipUntil�szEncodingBytes.skipUntilcCs>|j}|||t|��}|j|�}|r:|jt|�7_|S)z�Look for a sequence of bytes at the start of a string. If the bytes
        are found return True and advance the position to the byte after the
        match. Otherwise return False and leave the position alone)r!r$r�)r"r-r�r2r5rrr�
matchBytes�s
zEncodingBytes.matchBytescCsR||jd�j|�}|dkrJ|jdkr,d|_|j|t|�d7_dSt�dS)z�Look for the next sequence of bytes matching a given sequence. If
        a match is found advance the position to the last byte of the matchNr
rTrr)r!�findrYr$r�)r"r-ZnewPositionrrr�jumpTo�s
zEncodingBytes.jumpToN)r6r7r8r9r�r#r�r�r�r�r�r��propertyr!r��currentByte�spaceCharactersBytesrlr�r�r�rrrrr�Hs 	
r�c@sXeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�ZdS)r�z?Mini parser for detecting character encoding from meta elementscCst|�|_d|_dS)z3string - the data to work on for encoding detectionN)r�r2r�)r"r2rrrr#�s
zEncodingParser.__init__c
Cs�d|jfd|jfd|jfd|jfd|jfd|jff}x^|jD]T}d}xD|D]<\}}|jj|�rJy|�}PWqJtk
r�d}PYqJXqJW|s<Pq<W|jS)	Ns<!--s<metas</s<!s<?rTF)	�
handleComment�
handleMeta�handlePossibleEndTag�handleOther�handlePossibleStartTagr2r�r�r�)r"ZmethodDispatchrfZkeepParsing�key�methodrrrr��s&zEncodingParser.getEncodingcCs|jjd�S)zSkip over commentss-->)r2r�)r"rrrr��szEncodingParser.handleCommentcCs�|jjtkrdSd}d}x�|j�}|dkr.dS|ddkr^|ddk}|r�|dk	r�||_dSq|ddkr�|d}t|�}|dk	r�||_dSq|ddkrtt|d��}|j�}|dk	rt|�}|dk	r|r�||_dS|}qWdS)	NTFrs
http-equivr
scontent-typescharsetscontent)	r2r�r��getAttributer�rJ�ContentAttrParserr��parse)r"Z	hasPragmaZpendingEncoding�attrZtentativeEncoding�codecZ
contentParserrrrr��s:zEncodingParser.handleMetacCs
|jd�S)NF)�handlePossibleTag)r"rrrr��sz%EncodingParser.handlePossibleStartTagcCst|j�|jd�S)NT)r�r2r�)r"rrrr��s
z#EncodingParser.handlePossibleEndTagcCsf|j}|jtkr(|r$|j�|j�dS|jt�}|dkrD|j�n|j�}x|dk	r`|j�}qNWdS)NTr)r2r��asciiLettersBytesr�r�r��spacesAngleBracketsr�)r"ZendTagr2rnr�rrrr��s



z EncodingParser.handlePossibleTagcCs|jjd�S)Nr)r2r�)r"rrrr�szEncodingParser.handleOthercCs�|j}|jttdg�B�}|dkr&dSg}g}xt|dkr@|r@PnX|tkrT|j�}PnD|d	krjdj|�dfS|tkr�|j|j��n|dkr�dS|j|�t|�}q0W|dkr�|j	�dj|�dfSt|�|j�}|d
k�r:|}x�t|�}||k�rt|�dj|�dj|�fS|tk�r*|j|j��q�|j|�q�WnJ|dk�rRdj|�dfS|tk�rl|j|j��n|dk�rzdS|j|�x^t|�}|t
k�r�dj|�dj|�fS|tk�r�|j|j��n|dk�r�dS|j|��q�WdS)z_Return a name,value pair for the next attribute in the stream,
        if one is found, or None�/rN�=r3�'�")rN)r�r)r�r�)r2rlr��	frozensetr4�asciiUppercaseBytesr1r�r�r�r�)r"r2rnZattrNameZ	attrValueZ	quoteCharrrrr�sf










zEncodingParser.getAttributeN)
r6r7r8r9r#r�r�r�r�r�r�r�r�rrrrr��s$r�c@seZdZdd�Zdd�ZdS)r�cCs
||_dS)N)r2)r"r2rrrr#fszContentAttrParser.__init__cCsy�|jjd�|jjd7_|jj�|jjdks8dS|jjd7_|jj�|jjdkr�|jj}|jjd7_|jj}|jj|�r�|j||jj�SdSnF|jj}y|jjt�|j||jj�Stk
r�|j|d�SXWntk
�rdSXdS)Nscharsetr
r�r�r�)r�r�)r2r�r!rlr�r�r�r�)r"Z	quoteMarkZoldPositionrrrr�js.

zContentAttrParser.parseN)r6r7r8r#r�rrrrr�esr�cCs`t|t�r.y|jd�}Wntk
r,dSX|dk	rXy
tj|�Stk
rTdSXndSdS)z{Return the python codec name corresponding to an encoding or None if the
    string doesn't correspond to a valid encoding.rN)r<r�decode�UnicodeDecodeErrorr	�lookup�AttributeError)r�rrrrJ�s

rJr)2Z
__future__rrrZpip._vendor.sixrrZpip._vendor.six.movesrrr�rqZpip._vendorr	Z	constantsrrr
rrrOr�iorrr�r�r�r�r�r�Zinvalid_unicode_no_surrogaterFrr�evalrd�setrkZascii_punctuation_rero�objectrrDr@rAr-r�r�r�rJrrrr�<module>sV









JgIh6'_vendor/html5lib/__pycache__/serializer.cpython-36.pyc000064400000022106151733136420016735 0ustar003

�Pfa7�@s�ddlmZmZmZddlmZddlZddlmZm	Z	ddl
mZmZm
Z
ddl
mZmZmZddlmZmZdd	lmZd
je
�dZejded
�Zejded�ZiZed�dkZx�eej��D]p\Z Z!er�ee!�dks�er�ee!�dkr�q�e!dkr�ee!�dk�rej"e!�Z!ne#e!�Z!e!ek�s4e j$�r�e ee!<q�Wdd�Z%ede%�ddd�Z&Gdd�de'�Z(Gdd�de)�Z*dS)�)�absolute_import�division�unicode_literals)�	text_typeN)�register_error�xmlcharrefreplace_errors�)�voidElements�booleanAttributes�spaceCharacters)�rcdataElements�entities�xmlEntities)�treewalkers�_utils)�escape�z"'=<>`�[�]u_	

 /`  ᠎᠏           

   ]u􏿿��&c
Cs"t|ttf��rg}g}d}x�t|j|j|j��D]n\}}|rFd}q4||j}tj|j|t	|j|dg���r�tj
|j||d��}d}nt|�}|j|�q4Wx^|D]V}t
j|�}	|	r�|jd�|j|	�|	jd�s�|jd�q�|jdt|�dd��q�Wdj|�|jfSt|�SdS)NFrTr�;z&#x%s;r)�
isinstance�UnicodeEncodeError�UnicodeTranslateError�	enumerate�object�start�endrZisSurrogatePair�min�surrogatePairToCodepoint�ord�append�_encode_entity_map�get�endswith�hex�joinr)
�exc�resZ
codepoints�skip�i�c�indexZ	codepointZcp�e�r/� /usr/lib/python3.6/serializer.py�htmlentityreplace_errors*s0 
"




r1�htmlentityreplace�etreecKs$tj|�}tf|�}|j||�|�S)N)rZ
getTreeWalker�HTMLSerializer�render)�inputZtree�encodingZserializer_optsZwalker�sr/r/r0�	serializeJs

r9c@s~eZdZdZdZdZdZdZdZdZ	dZ
dZdZdZ
dZdZdZd!Zdd�Zdd�Zdd�Zd"dd�Zd#dd�Zd$dd �ZdS)%r4�legacy�"TF�quote_attr_values�
quote_char�use_best_quote_char�omit_optional_tags�minimize_boolean_attributes�use_trailing_solidus�space_before_trailing_solidus�escape_lt_in_attrs�
escape_rcdata�resolve_entities�alphabetical_attributes�inject_meta_charset�strip_whitespace�sanitizec	Kszt|�t|j�}t|�dkr2tdtt|����d|kr@d|_x(|jD]}t|||j|t	||���qHWg|_
d|_dS)a6	Initialize HTMLSerializer.

        Keyword options (default given first unless specified) include:

        inject_meta_charset=True|False
          Whether it insert a meta element to define the character set of the
          document.
        quote_attr_values="legacy"|"spec"|"always"
          Whether to quote attribute values that don't require quoting
          per legacy browser behaviour, when required by the standard, or always.
        quote_char=u'"'|u"'"
          Use given quote character for attribute quoting. Default is to
          use double quote unless attribute value contains a double quote,
          in which case single quotes are used instead.
        escape_lt_in_attrs=False|True
          Whether to escape < in attribute values.
        escape_rcdata=False|True
          Whether to escape characters that need to be escaped within normal
          elements within rcdata elements such as style.
        resolve_entities=True|False
          Whether to resolve named character entities that appear in the
          source tree. The XML predefined entities &lt; &gt; &amp; &quot; &apos;
          are unaffected by this setting.
        strip_whitespace=False|True
          Whether to remove semantically meaningless whitespace. (This
          compresses all whitespace to a single space except within pre.)
        minimize_boolean_attributes=True|False
          Shortens boolean attributes to give just the attribute value,
          for example <input disabled="disabled"> becomes <input disabled>.
        use_trailing_solidus=False|True
          Includes a close-tag slash at the end of the start tag of void
          elements (empty elements whose end tag is forbidden). E.g. <hr/>.
        space_before_trailing_solidus=True|False
          Places a space immediately before the closing slash in a tag
          using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
        sanitize=False|True
          Strip all unsafe or unknown constructs from output.
          See `html5lib user documentation`_
        omit_optional_tags=True|False
          Omit start/end tags that are optional.
        alphabetical_attributes=False|True
          Reorder attributes to be in alphabetical order.

        .. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
        rz2__init__() got an unexpected keyword argument '%s'r=FN)�	frozenset�options�len�	TypeError�next�iterr>�setattrr$�getattr�errors�strict)�self�kwargsZunexpected_args�attrr/r/r0�__init__ps.zHTMLSerializer.__init__cCs*t|t�st�|jr"|j|jd�S|SdS)Nr2)rr�AssertionErrorr7�encode)rT�stringr/r/r0rY�szHTMLSerializer.encodecCs*t|t�st�|jr"|j|jd�S|SdS)NrS)rrrXr7rY)rTrZr/r/r0�encodeStrict�szHTMLSerializer.encodeStrictNccs�||_d}g|_|r0|jr0ddlm}|||�}|jrJddlm}||�}|jrdddlm}||�}|j	r~ddl
m}||�}|jr�ddlm}||�}�xR|D�]H}|d}|dk�r`d|d}|dr�|d	|d7}n|d
r�|d7}|d
�rJ|d
j
d�d
k�r0|d
j
d�d
k�r*|jd�d}nd}|d||d
|f7}|d7}|j|�Vq�|d5k�r�|dk�sz|�r�|�r�|dj
d�d
k�r�|jd�|j|d�Vn|jt|d��Vq�|d6k�r�|d}	|jd|	�V|	tk�r|j�rd}n|�r|jd��x�|dj�D�]�\\}
}}|}
|}|jd�V|j|
�V|j�s�|
tj|	t��k�r"|
tjdt��k�r"|jd�V|jdk�s�t|�d
k�r�d}n@|jd k�r�tj|�dk	}n$|jd!k�r�tj|�dk	}ntd"��|jd#d$�}|j �r|jd%d&�}|�r�|j!}|j"�rTd|k�r<d|k�r<d}nd|k�rTd|k�rTd}|dk�rl|jdd'�}n|jdd(�}|j|�V|j|�V|j|�Vn|j|�V�q"W|	t#k�r�|j$�r�|j%�r�|jd)�Vn|jd*�V|jd�Vq�|d+k�r6|d}	|	tk�rd}n|�r$|jd�|jd,|	�Vq�|d-k�rx|d}|j
d.�d
k�rb|jd/�|jd0|d�Vq�|d1k�r�|d}	|	d2}|t&k�r�|jd3|	�|j'�r�|t(k�r�t&|}nd4|	}|j|�Vq�|j|d�q�WdS)7NFr)�Filter�typeZDoctypez<!DOCTYPE %s�nameZpublicIdz PUBLIC "%s"ZsystemIdz SYSTEMr;r�'zASystem identifer contains both single and double quote charactersz %s%s%s�>�
Characters�SpaceCharacters�dataz</zUnexpected </ in CDATA�StartTag�EmptyTagz<%sTz+Unexpected child element of a CDATA element� r�=�always�specr:z?quote_attr_values must be one of: 'always', 'spec', or 'legacy'rz&amp;�<z&lt;z&#39;z&quot;z /�/ZEndTagz</%s>�Commentz--zComment contains --z	<!--%s-->ZEntityrzEntity %s not recognizedz&%s;)rarb)rdre))r7rRrGZfilters.inject_meta_charsetr\rFZfilters.alphabeticalattributesrHZfilters.whitespacerIZfilters.sanitizerr?Zfilters.optionaltags�find�serializeErrorr[rYrrrD�itemsr@r
r$�tupler<rL�_quoteAttributeSpec�search�_quoteAttributeLegacy�
ValueError�replacerCr=r>r	rArBr
rEr)rT�
treewalkerr7Zin_cdatar\�tokenr]Zdoctyper=r^�_Z	attr_nameZ
attr_value�k�vZ
quote_attrrc�keyr/r/r0r9�s�


















zHTMLSerializer.serializecCs2|rdjt|j||���Sdjt|j|���SdS)N�r)r'�listr9)rTrvr7r/r/r0r5?szHTMLSerializer.render�XXX ERROR MESSAGE NEEDEDcCs|jj|�|jrt�dS)N)rRr"rS�SerializeError)rTrcr/r/r0rnEszHTMLSerializer.serializeError)r<r=r>r?r@rArBrCrDrErFrGrHrI)N)N)r~)�__name__�
__module__�__qualname__r<r=r>r?r@rArBrCrDrErFrGrHrIrKrWrYr[r9r5rnr/r/r/r0r4Qs68


r4c@seZdZdZdS)rzError in serialized treeN)r�r�r��__doc__r/r/r/r0rLsr)r3N)+Z
__future__rrrZpip._vendor.sixr�re�codecsrrZ	constantsr	r
rrr
rrrrZxml.sax.saxutilsrr'Z_quoteAttributeSpecChars�compilerqrsr#rLZ_is_ucs4r}roryrzr r!�islowerr1r9rr4�	Exceptionrr/r/r/r0�<module>s:
	

|_vendor/html5lib/__pycache__/html5parser.cpython-36.pyc000064400000277205151733136420017046 0ustar003

�Pf���@sFddlmZmZmZddlmZmZmZddlZyddl	m
Z
Wn ek
r`ddlm
Z
YnXddl
mZddl
mZddl
mZdd	lmZdd
l
mZddlmZmZmZmZmZmZmZmZmZmZmZm Z!m"Z"m#Z#m$Z$m%Z%d!dd�Z&d"dd�Z'dd�Z(Gdd�de)�Z*ej+dd��Z,dd�Z-d#dd�Z.Gdd �d e/�Z0dS)$�)�absolute_import�division�unicode_literals)�with_metaclass�viewkeys�PY3N)�OrderedDict�)�_inputstream)�
_tokenizer)�treebuilders)�Marker)�_utils)�spaceCharacters�asciiUpper2Lower�specialElements�headingElements�
cdataElements�rcdataElements�
tokenTypes�
tagTokenTypes�
namespaces�htmlIntegrationPointElements�"mathmlTextIntegrationPointElements�adjustForeignAttributes�adjustMathMLAttributes�adjustSVGAttributes�E�ReparseException�etreeTcKs$tj|�}t||d�}|j|f|�S)z.Parse a string or file-like object into a tree)�namespaceHTMLElements)r�getTreeBuilder�
HTMLParser�parse)�doc�treebuilderr �kwargs�tb�p�r)�!/usr/lib/python3.6/html5parser.pyr#s
r#�divcKs,tj|�}t||d�}|j|fd|i|��S)N)r �	container)rr!r"�
parseFragment)r$r,r%r r&r'r(r)r)r*r-&s
r-csG�fdd�dt�}|S)NcseZdZ�fdd�ZdS)z-method_decorator_metaclass.<locals>.DecoratedcsBx0|j�D]$\}}t|tj�r&�|�}|||<q
Wtj||||�S)N)�items�
isinstance�types�FunctionType�type�__new__)�metaZ	classname�basesZ	classDictZ
attributeNameZ	attribute)�functionr)r*r3.s
z5method_decorator_metaclass.<locals>.Decorated.__new__N)�__name__�
__module__�__qualname__r3r))r6r)r*�	Decorated-sr:)r2)r6r:r))r6r*�method_decorator_metaclass,sr;c@s�eZdZdZd+dd�Zd,dd	�Zd
d�Zedd
��Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zd-dd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�ZdS).r"zZHTML parser. Generates a tree structure from a stream of (possibly
        malformed) HTMLNFTcsL|�_|dkrtjd�}||��_g�_t�fdd�t|�j�D���_dS)a
        strict - raise an exception when a parse error is encountered

        tree - a treebuilder class controlling the type of tree that will be
        returned. Built in treebuilders can be accessed through
        html5lib.treebuilders.getTreeBuilder(treeType)
        Nrcs g|]\}}||��j�f�qSr))�tree)�.0�name�cls)�selfr)r*�
<listcomp>Msz'HTMLParser.__init__.<locals>.<listcomp>)	�strictrr!r<�errors�dict�	getPhasesr.�phases)r@r<rBr �debugr))r@r*�__init__<s


zHTMLParser.__init__r+cKsh||_||_||_tj|fd|i|��|_|j�y|j�Wn$tk
rb|j�|j�YnXdS)N�parser)	�
innerHTMLModer,�	scriptingrZ
HTMLTokenizer�	tokenizer�reset�mainLoopr)r@�stream�	innerHTMLr,rKr&r)r)r*�_parsePszHTMLParser._parsecCs�|jj�d|_g|_g|_d|_|jr�|jj�|_	|j	t
krL|jj|j_
n0|j	tkrd|jj|j_
n|j	dkr||jj|j_
n|jd|_|jj�|j�nd|_	|jd|_d|_d|_d|_dS)NFz	no quirks�	plaintext�
beforeHtml�initialT)r<rM�
firstStartTagrC�log�
compatModerJr,�lowerrPrrL�rcdataState�stater�rawtextState�plaintextStaterF�phase�insertHtmlElement�resetInsertionModeZ	lastPhaseZbeforeRCDataPhase�
framesetOK)r@r)r)r*rM^s*





zHTMLParser.resetcCst|d�sdS|jjjdjS)z�The name of the character encoding
        that was used to decode the input stream,
        or :obj:`None` if that is not determined yet.

        rLNr)�hasattrrLrO�charEncodingr>)r@r)r)r*�documentEncoding�s
zHTMLParser.documentEncodingcCsJ|jdkr6|jtdkr6d|jko4|jdjt�dkS|j|jftkSdS)Nzannotation-xml�mathml�encoding�	text/html�application/xhtml+xml)rfrg)r>�	namespacer�
attributes�	translaterr)r@�elementr)r)r*�isHTMLIntegrationPoint�s


z!HTMLParser.isHTMLIntegrationPointcCs|j|jftkS)N)rhr>r)r@rkr)r)r*�isMathMLTextIntegrationPoint�sz'HTMLParser.isMathMLTextIntegrationPointcCsztd}td}td}td}td}td}td}�x�|j�D�]�}d}	|}
�x�|
dk	�r|
}	|jjrx|jjdnd}|r�|jnd}|r�|jnd}
|
d	}||kr�|j|
d
|
jdi��d}
qVt|jj�dk�sl||jj	k�sl|j
|��r ||k�r|d
tddg�k�sl|||fk�sl|tdk�rP|
dk�rP||k�rP|d
dk�sl|j
|��rt||||fk�rt|j}n
|jd}||k�r�|j|
�}
qV||k�r�|j|
�}
qV||k�r�|j|
�}
qV||k�r�|j|
�}
qV||k�r�|j|
�}
qV||krV|j|
�}
qVW||krD|	drD|	drD|jdd
|	d
i�qDWd}g}x8|�rt|j|j�|jj�}|�r>|j|k�s>t��q>WdS)N�
CharactersZSpaceCharacters�StartTag�EndTag�CommentZDoctype�
ParseErrorr	r2�data�datavarsrr>ZmglyphZ
malignmarkrdzannotation-xml�svg�inForeignContent�selfClosing�selfClosingAcknowledgedz&non-void-element-with-trailing-solidusT���)r�normalizedTokensr<�openElementsrhr>�
parseError�get�len�defaultNamespacerm�	frozensetrrlr]rF�processCharacters�processSpaceCharacters�processStartTag�
processEndTag�processComment�processDoctype�append�
processEOF�AssertionError)r@ZCharactersTokenZSpaceCharactersTokenZ
StartTagTokenZEndTagTokenZCommentTokenZDoctypeTokenZParseErrorToken�tokenZ
prev_token�	new_token�currentNodeZcurrentNodeNamespaceZcurrentNodeNamer2r]Z	reprocessrFr)r)r*rN�sp










zHTMLParser.mainLoopccs x|jD]}|j|�VqWdS)N)rL�normalizeToken)r@r�r)r)r*rz�szHTMLParser.normalizedTokenscOs |j|ddf|�|�|jj�S)a�Parse a HTML document into a well-formed tree

        stream - a filelike object or string containing the HTML to be parsed

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        scripting - treat noscript elements as if javascript was turned on
        FN)rQr<ZgetDocument)r@rO�argsr&r)r)r*r#�szHTMLParser.parsecOs|j|df|�|�|jj�S)a2Parse a HTML fragment into a well-formed tree fragment

        container - name of the element we're setting the innerHTML property
        if set to None, default to 'div'

        stream - a filelike object or string containing the HTML to be parsed

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        scripting - treat noscript elements as if javascript was turned on
        T)rQr<ZgetFragment)r@rOr�r&r)r)r*r-�szHTMLParser.parseFragment�XXX-undefined-errorcCs@|dkri}|jj|jjj�||f�|jr<tt||��dS)N)rCr�rLrOZpositionrBrrr)r@�	errorcodertr)r)r*r|s
zHTMLParser.parseErrorcCsT|dtdkrP|d}t|�|d<t|�t|d�krP|dj|ddd��|S)z3 HTML5 specific normalizations to the token stream r2rorsNr	ry)rrr~�update)r@r��rawr)r)r*r�szHTMLParser.normalizeTokencCst|t�dS)N)�adjust_attributesr)r@r�r)r)r*rsz!HTMLParser.adjustMathMLAttributescCst|t�dS)N)r�r)r@r�r)r)r*rszHTMLParser.adjustSVGAttributescCst|t�dS)N)r��adjustForeignAttributesMap)r@r�r)r)r*rsz"HTMLParser.adjustForeignAttributescCs|jj�dS)N)rIr])r@r�r)r)r*�reparseTokenNormalszHTMLParser.reparseTokenNormalcCs�d}ddddddddddd	d	d
dd�}x�|jjddd�D]�}|j}d}||jjdkrl|jsbt�d}|j}|dkr~|js~t�|r�|j|jjkr�q:||kr�|j||}Pq:|r:|jd	}Pq:W||_dS)NF�inSelect�inCell�inRow�inTableBody�	inCaption�
inColumnGroup�inTable�inBody�
inFrameset�
beforeHead)�select�td�th�tr�tbody�thead�tfoot�caption�colgroup�table�head�body�frameset�htmlr	rTr�r�r�r�ry)r�r�r�r�)	r<r{r>rPr�rhrrFr])r@ZlastZnewModes�nodeZnodeNameZ	new_phaser)r)r*r_!sB


zHTMLParser.resetInsertionModecCsR|dkst�|jj|�|dkr.|jj|j_n|jj|j_|j|_|j	d|_dS)zYGeneric RCDATA/RAWTEXT Parsing algorithm
        contentType - RCDATA or RAWTEXT
        �RAWTEXT�RCDATA�textN)r�r�)
r�r<�
insertElementrLr[rZrYr]�
originalPhaserF)r@r�ZcontentTyper)r)r*�parseRCDataRawtextMszHTMLParser.parseRCDataRawtext)NFTF)Fr+F)r�N)r7r8r9�__doc__rHrQrM�propertyrcrlrmrNrzr#r-r|r�rrrr�r_r�r)r)r)r*r"8s&

"
C
,r"cs"dd�}dd�}Gdd�dt|||����Gdd�d��}Gd	d
�d
��}G�fdd�d��}G�fd
d�d��}G�fdd�d��}G�fdd�d��}G�fdd�d��}	G�fdd�d��}
G�fdd�d��}G�fdd�d��}G�fdd�d��}
G�fdd�d��}G�fdd �d ��}G�fd!d"�d"��}G�fd#d$�d$��}G�fd%d&�d&��}G�fd'd(�d(��}G�fd)d*�d*��}G�fd+d,�d,��}G�fd-d.�d.��}G�fd/d0�d0��}G�fd1d2�d2��}G�fd3d4�d4��}|||||||	|
|||
||||||||||||d5�S)6Ncs(tdd�tj�D�����fdd�}|S)z4Logger that records which phase processes each tokencss|]\}}||fVqdS)Nr))r=�key�valuer)r)r*�	<genexpr>csz)getPhases.<locals>.log.<locals>.<genexpr>cs��jjd�r�t|�dkr�|d}yd�|di}Wn�YnX|dtkr\|d|d<|jjj|jjjj|jj	j
j|j
j�j|f��|f|�|�S�|f|�|�SdS)NZprocessrr2r>)r7�
startswithr~rrIrVr�rLrZr]�	__class__)r@r�r&r��info)r6�
type_namesr)r*�wrappedfs
z'getPhases.<locals>.log.<locals>.wrapped)rDrr.)r6r�r))r6r�r*rVaszgetPhases.<locals>.logcSs|rt|�StSdS)N)r;r2)Z
use_metaclassZmetaclass_funcr)r)r*�getMetaclasszszgetPhases.<locals>.getMetaclassc@sXeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�ZdS)zgetPhases.<locals>.PhasezNBase class for helper object that implements each phase of processing
        cSs||_||_dS)N)rIr<)r@rIr<r)r)r*rH�sz!getPhases.<locals>.Phase.__init__cSst�dS)N)�NotImplementedError)r@r)r)r*r��sz#getPhases.<locals>.Phase.processEOFcSs|jj||jjd�dS)Nr	ry)r<�
insertCommentr{)r@r�r)r)r*r��sz'getPhases.<locals>.Phase.processCommentcSs|jjd�dS)Nzunexpected-doctype)rIr|)r@r�r)r)r*r��sz'getPhases.<locals>.Phase.processDoctypecSs|jj|d�dS)Nrs)r<�
insertText)r@r�r)r)r*r��sz*getPhases.<locals>.Phase.processCharacterscSs|jj|d�dS)Nrs)r<r�)r@r�r)r)r*r��sz/getPhases.<locals>.Phase.processSpaceCharacterscSs|j|d|�S)Nr>)�startTagHandler)r@r�r)r)r*r��sz(getPhases.<locals>.Phase.processStartTagcSsl|jjr"|ddkr"|jjd�x<|dj�D],\}}||jjdjkr0||jjdj|<q0Wd|j_dS)Nr>r�z
non-html-rootrsrF)rIrUr|r.r<r{ri)r@r��attrr�r)r)r*�startTagHtml�sz%getPhases.<locals>.Phase.startTagHtmlcSs|j|d|�S)Nr>)�
endTagHandler)r@r�r)r)r*r��sz&getPhases.<locals>.Phase.processEndTagN)
r7r8r9r�rHr�r�r�r�r�r�r�r�r)r)r)r*�Phase�s
r�c@sLeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)zgetPhases.<locals>.InitialPhasecSsdS)Nr))r@r�r)r)r*r��sz6getPhases.<locals>.InitialPhase.processSpaceCharacterscSs|jj||jj�dS)N)r<r��document)r@r�r)r)r*r��sz.getPhases.<locals>.InitialPhase.processCommentc8Ss|d}|d}|d}|d}|dks@|dk	s@|dk	rL|dkrL|jjd�|dkrXd}|jj|�|dkrv|jt�}|�s�|ddk�s�|jdJ��s�|dKk�s�|jdL��r�|dk�s�|�r�|j�dDk�r�dE|j_n*|jdM��s�|jdN��r|dk	�rdH|j_|jj	dI|j_
dS)ONr>�publicId�systemId�correctr�zabout:legacy-compatzunknown-doctype��*+//silmaril//dtd html pro v0r11 19970101//�4-//advasoft ltd//dtd html 3.0 aswedit + extensions//�*-//as//dtd html 3.0 aswedit + extensions//�-//ietf//dtd html 2.0 level 1//�-//ietf//dtd html 2.0 level 2//�&-//ietf//dtd html 2.0 strict level 1//�&-//ietf//dtd html 2.0 strict level 2//�-//ietf//dtd html 2.0 strict//�-//ietf//dtd html 2.0//�-//ietf//dtd html 2.1e//�-//ietf//dtd html 3.0//�-//ietf//dtd html 3.2 final//�-//ietf//dtd html 3.2//�-//ietf//dtd html 3//�-//ietf//dtd html level 0//�-//ietf//dtd html level 1//�-//ietf//dtd html level 2//�-//ietf//dtd html level 3//�"-//ietf//dtd html strict level 0//�"-//ietf//dtd html strict level 1//�"-//ietf//dtd html strict level 2//�"-//ietf//dtd html strict level 3//�-//ietf//dtd html strict//�-//ietf//dtd html//�(-//metrius//dtd metrius presentational//�5-//microsoft//dtd internet explorer 2.0 html strict//�.-//microsoft//dtd internet explorer 2.0 html//�0-//microsoft//dtd internet explorer 2.0 tables//�5-//microsoft//dtd internet explorer 3.0 html strict//�.-//microsoft//dtd internet explorer 3.0 html//�0-//microsoft//dtd internet explorer 3.0 tables//�#-//netscape comm. corp.//dtd html//�*-//netscape comm. corp.//dtd strict html//�*-//o'reilly and associates//dtd html 2.0//�3-//o'reilly and associates//dtd html extended 1.0//�;-//o'reilly and associates//dtd html extended relaxed 1.0//�N-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//�E-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//�$-//spyglass//dtd html 2.0 extended//�+-//sq//dtd html 2.0 hotmetal + extensions//�--//sun microsystems corp.//dtd hotjava html//�4-//sun microsystems corp.//dtd hotjava strict html//�-//w3c//dtd html 3 1995-03-24//�-//w3c//dtd html 3.2 draft//�-//w3c//dtd html 3.2 final//�-//w3c//dtd html 3.2//�-//w3c//dtd html 3.2s draft//�-//w3c//dtd html 4.0 frameset//�#-//w3c//dtd html 4.0 transitional//�(-//w3c//dtd html experimental 19960712//�&-//w3c//dtd html experimental 970421//�-//w3c//dtd w3 html//�-//w3o//dtd w3 html 3.0//�#-//webtechs//dtd mozilla html 2.0//�-//webtechs//dtd mozilla html//�$-//w3o//dtd w3 html strict 3.0//en//�"-/w3c/dtd html 4.0 transitional/en� -//w3c//dtd html 4.01 frameset//�$-//w3c//dtd html 4.01 transitional//z:http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd�quirks� -//w3c//dtd xhtml 1.0 frameset//�$-//w3c//dtd xhtml 1.0 transitional//zlimited quirksrS)7r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrr)rrr�)rr)r	r
)rr)rIr|r<Z
insertDoctyperjrr�rXrWrFr])r@r�r>r�r�r�r)r)r*r��s�



z.getPhases.<locals>.InitialPhase.processDoctypecSsd|j_|jjd|j_dS)NrrS)rIrWrFr])r@r)r)r*�anythingElsesz,getPhases.<locals>.InitialPhase.anythingElsecSs|jjd�|j�|S)Nzexpected-doctype-but-got-chars)rIr|r)r@r�r)r)r*r�sz1getPhases.<locals>.InitialPhase.processCharacterscSs"|jjdd|di�|j�|S)Nz"expected-doctype-but-got-start-tagr>)rIr|r)r@r�r)r)r*r�sz/getPhases.<locals>.InitialPhase.processStartTagcSs"|jjdd|di�|j�|S)Nz expected-doctype-but-got-end-tagr>)rIr|r)r@r�r)r)r*r�sz-getPhases.<locals>.InitialPhase.processEndTagcSs|jjd�|j�dS)Nzexpected-doctype-but-got-eofT)rIr|r)r@r)r)r*r�%sz*getPhases.<locals>.InitialPhase.processEOFN)r7r8r9r�r�r�rr�r�r�r�r)r)r)r*�InitialPhase�s_rc@sDeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)z"getPhases.<locals>.BeforeHtmlPhasecSs&|jjtdd��|jjd|j_dS)Nr�ror�)r<Z
insertRoot�impliedTagTokenrIrFr])r@r)r)r*r^,sz4getPhases.<locals>.BeforeHtmlPhase.insertHtmlElementcSs|j�dS)NT)r^)r@r)r)r*r�1sz-getPhases.<locals>.BeforeHtmlPhase.processEOFcSs|jj||jj�dS)N)r<r�r�)r@r�r)r)r*r�5sz1getPhases.<locals>.BeforeHtmlPhase.processCommentcSsdS)Nr))r@r�r)r)r*r�8sz9getPhases.<locals>.BeforeHtmlPhase.processSpaceCharacterscSs|j�|S)N)r^)r@r�r)r)r*r�;sz4getPhases.<locals>.BeforeHtmlPhase.processCharacterscSs |ddkrd|j_|j�|S)Nr>r�T)rIrUr^)r@r�r)r)r*r�?sz2getPhases.<locals>.BeforeHtmlPhase.processStartTagcSs4|ddkr$|jjdd|di�n|j�|SdS)Nr>r�r�r��brzunexpected-end-tag-before-html)r�r�r�r)rIr|r^)r@r�r)r)r*r�Es
z0getPhases.<locals>.BeforeHtmlPhase.processEndTagN)
r7r8r9r^r�r�r�r�r�r�r)r)r)r*�BeforeHtmlPhase*srcsXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z"getPhases.<locals>.BeforeHeadPhasecsV�j|||�tjd|jfd|jfg�|_|j|j_tjd|jfg�|_	|j
|j	_dS)Nr�r�r�r)r�r�r�r)rHr�MethodDispatcherr��startTagHeadr��
startTagOther�default�endTagImplyHeadr��endTagOther)r@rIr<)r�r)r*rHNs
z+getPhases.<locals>.BeforeHeadPhase.__init__cSs|jtdd��dS)Nr�roT)rr
)r@r)r)r*r�\sz-getPhases.<locals>.BeforeHeadPhase.processEOFcSsdS)Nr))r@r�r)r)r*r�`sz9getPhases.<locals>.BeforeHeadPhase.processSpaceCharacterscSs|jtdd��|S)Nr�ro)rr
)r@r�r)r)r*r�csz4getPhases.<locals>.BeforeHeadPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�gsz/getPhases.<locals>.BeforeHeadPhase.startTagHtmlcSs0|jj|�|jjd|j_|jjd|j_dS)Nr	�inHeadry)r<r�r{�headPointerrIrFr])r@r�r)r)r*rjsz/getPhases.<locals>.BeforeHeadPhase.startTagHeadcSs|jtdd��|S)Nr�ro)rr
)r@r�r)r)r*rosz0getPhases.<locals>.BeforeHeadPhase.startTagOthercSs|jtdd��|S)Nr�ro)rr
)r@r�r)r)r*rssz2getPhases.<locals>.BeforeHeadPhase.endTagImplyHeadcSs|jjdd|di�dS)Nzend-tag-after-implied-rootr>)rIr|)r@r�r)r)r*rwsz.getPhases.<locals>.BeforeHeadPhase.endTagOtherN)r7r8r9rHr�r�r�r�rrrrr))r�r)r*�BeforeHeadPhaseMsrcs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd �Zd!S)"zgetPhases.<locals>.InHeadPhasecs��j|||�tjd|jfd|jfd|jfd|jfd|jfd|jfd|j	fd
|j
fg�|_|j|j_
tjd
|jfd|jfg�|_|j|j_
dS)Nr��title�noframes�style�noscript�script�base�basefont�bgsound�command�linkr4r�rr�)rr)rrr r!r")rr�r�)rHrrr��
startTagTitle�startTagNoFramesStyle�startTagNoscript�startTagScript�startTagBaseLinkCommand�startTagMetarr�rr�
endTagHead�endTagHtmlBodyBrr�r)r@rIr<)r�r)r*rH|s 
z'getPhases.<locals>.InHeadPhase.__init__cSs|j�dS)NT)r)r@r)r)r*r��sz)getPhases.<locals>.InHeadPhase.processEOFcSs|j�|S)N)r)r@r�r)r)r*r��sz0getPhases.<locals>.InHeadPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��sz+getPhases.<locals>.InHeadPhase.startTagHtmlcSs|jjd�dS)Nz!two-heads-are-not-better-than-one)rIr|)r@r�r)r)r*r�sz+getPhases.<locals>.InHeadPhase.startTagHeadcSs$|jj|�|jjj�d|d<dS)NTrx)r<r�r{�pop)r@r�r)r)r*r'�sz6getPhases.<locals>.InHeadPhase.startTagBaseLinkCommandcSs�|jj|�|jjj�d|d<|d}|jjjjddkr�d|krZ|jjjj|d�nVd|kr�d|kr�|dj	�d	kr�t
j|djd
��}t
j
|�}|j�}|jjjj|�dS)NTrxrsr	Z	tentative�charsetZcontentz
http-equivzcontent-typezutf-8)r<r�r{r+rIrLrOrbZchangeEncodingrXr
Z
EncodingBytes�encodeZContentAttrParserr#)r@r�rirsrI�codecr)r)r*r(�s
z+getPhases.<locals>.InHeadPhase.startTagMetacSs|jj|d�dS)Nr�)rIr�)r@r�r)r)r*r#�sz,getPhases.<locals>.InHeadPhase.startTagTitlecSs|jj|d�dS)Nr�)rIr�)r@r�r)r)r*r$�sz4getPhases.<locals>.InHeadPhase.startTagNoFramesStylecSs8|jjr|jj|d�n|jj|�|jjd|j_dS)Nr��inHeadNoscript)rIrKr�r<r�rFr])r@r�r)r)r*r%�sz/getPhases.<locals>.InHeadPhase.startTagNoscriptcSs<|jj|�|jjj|jj_|jj|j_|jjd|j_dS)Nr�)	r<r�rIrLZscriptDataStaterZr]r�rF)r@r�r)r)r*r&�sz-getPhases.<locals>.InHeadPhase.startTagScriptcSs|j�|S)N)r)r@r�r)r)r*r�sz,getPhases.<locals>.InHeadPhase.startTagOthercSs:|jjjj�}|jdks&td|j��|jjd|j_dS)Nr�zExpected head got %s�	afterHead)rIr<r{r+r>r�rFr])r@r�r�r)r)r*r)�sz)getPhases.<locals>.InHeadPhase.endTagHeadcSs|j�|S)N)r)r@r�r)r)r*r*�sz/getPhases.<locals>.InHeadPhase.endTagHtmlBodyBrcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r�sz*getPhases.<locals>.InHeadPhase.endTagOthercSs|jtd��dS)Nr�)r)r
)r@r)r)r*r�sz+getPhases.<locals>.InHeadPhase.anythingElseN)r7r8r9rHr�r�r�rr'r(r#r$r%r&rr)r*rrr))r�r)r*�InHeadPhase{s r1csxeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)z&getPhases.<locals>.InHeadNoscriptPhasecsf�j|||�tjd|jfd|jfd|jfg�|_|j|j_tjd	|j	fd
|j
fg�|_|j|j_dS)
Nr�rr r"r4rrr�rr)rr r"r4rr)r�r)
rHrrr�r'�startTagHeadNoscriptr�rr�endTagNoscript�endTagBrr�r)r@rIr<)r�r)r*rH�s
z/getPhases.<locals>.InHeadNoscriptPhase.__init__cSs|jjd�|j�dS)Nzeof-in-head-noscriptT)rIr|r)r@r)r)r*r��sz1getPhases.<locals>.InHeadNoscriptPhase.processEOFcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r��sz5getPhases.<locals>.InHeadNoscriptPhase.processCommentcSs|jjd�|j�|S)Nzchar-in-head-noscript)rIr|r)r@r�r)r)r*r��sz8getPhases.<locals>.InHeadNoscriptPhase.processCharacterscSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r�sz=getPhases.<locals>.InHeadNoscriptPhase.processSpaceCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz3getPhases.<locals>.InHeadNoscriptPhase.startTagHtmlcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r'sz>getPhases.<locals>.InHeadNoscriptPhase.startTagBaseLinkCommandcSs|jjdd|di�dS)Nzunexpected-start-tagr>)rIr|)r@r�r)r)r*r2	sz;getPhases.<locals>.InHeadNoscriptPhase.startTagHeadNoscriptcSs"|jjdd|di�|j�|S)Nzunexpected-inhead-noscript-tagr>)rIr|r)r@r�r)r)r*rsz4getPhases.<locals>.InHeadNoscriptPhase.startTagOthercSs:|jjjj�}|jdks&td|j��|jjd|j_dS)NrzExpected noscript got %sr)rIr<r{r+r>r�rFr])r@r�r�r)r)r*r3sz5getPhases.<locals>.InHeadNoscriptPhase.endTagNoscriptcSs"|jjdd|di�|j�|S)Nzunexpected-inhead-noscript-tagr>)rIr|r)r@r�r)r)r*r4sz/getPhases.<locals>.InHeadNoscriptPhase.endTagBrcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*rsz2getPhases.<locals>.InHeadNoscriptPhase.endTagOthercSs|jtd��dS)Nr)r3r
)r@r)r)r*rsz3getPhases.<locals>.InHeadNoscriptPhase.anythingElseN)r7r8r9rHr�r�r�r�r�r'r2rr3r4rrr))r�r)r*�InHeadNoscriptPhase�sr5cspeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�ZdS)z!getPhases.<locals>.AfterHeadPhasec
sn�j|||�tjd|jfd|jfd|jfd|jfd
|jfg�|_|j	|j_
tjd|jfg�|_|j
|j_
dS)Nr�r�r�rrr r"r4rrrrr�r)	rrr r"r4rrrr)r�r�r)rHrrr��startTagBody�startTagFrameset�startTagFromHeadrr�rrr*r�r)r@rIr<)r�r)r*rH#s
z*getPhases.<locals>.AfterHeadPhase.__init__cSs|j�dS)NT)r)r@r)r)r*r�4sz,getPhases.<locals>.AfterHeadPhase.processEOFcSs|j�|S)N)r)r@r�r)r)r*r�8sz3getPhases.<locals>.AfterHeadPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�<sz.getPhases.<locals>.AfterHeadPhase.startTagHtmlcSs(d|j_|jj|�|jjd|j_dS)NFr�)rIr`r<r�rFr])r@r�r)r)r*r6?sz.getPhases.<locals>.AfterHeadPhase.startTagBodycSs |jj|�|jjd|j_dS)Nr�)r<r�rIrFr])r@r�r)r)r*r7Dsz2getPhases.<locals>.AfterHeadPhase.startTagFramesetcSst|jjdd|di�|jjj|jj�|jjdj|�x4|jjddd�D]}|jdkrN|jjj	|�PqNWdS)Nz#unexpected-start-tag-out-of-my-headr>rr	r�ry)
rIr|r<r{r�rrFr�r>�remove)r@r�r�r)r)r*r8Hs
z2getPhases.<locals>.AfterHeadPhase.startTagFromHeadcSs|jjdd|di�dS)Nzunexpected-start-tagr>)rIr|)r@r�r)r)r*rRsz.getPhases.<locals>.AfterHeadPhase.startTagHeadcSs|j�|S)N)r)r@r�r)r)r*rUsz/getPhases.<locals>.AfterHeadPhase.startTagOthercSs|j�|S)N)r)r@r�r)r)r*r*Ysz2getPhases.<locals>.AfterHeadPhase.endTagHtmlBodyBrcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r]sz-getPhases.<locals>.AfterHeadPhase.endTagOthercSs.|jjtdd��|jjd|j_d|j_dS)Nr�ror�T)r<r�r
rIrFr]r`)r@r)r)r*r`sz.getPhases.<locals>.AfterHeadPhase.anythingElseN)r7r8r9rHr�r�r�r6r7r8rrr*rrr))r�r)r*�AfterHeadPhase"s
r:cs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�Zd3d4�Zd5d6�Zd7d8�Zd9d:�Zd;d<�Z d=d>�Z!d?d@�Z"dAdB�Z#dCdD�Z$dEdF�Z%dGdH�Z&dIdJ�Z'dKdL�Z(dMdN�Z)dOdP�Z*dQdR�Z+dSdT�Z,dUdV�Z-dWdX�Z.dYdZ�Z/d[d\�Z0d]d^�Z1d_d`�Z2dadb�Z3dcdd�Z4dedf�Z5dgS)hzgetPhases.<locals>.InBodyPhasec,s��j|||�|j|_tjd|jfdd|jfd|jfd|jfde|j	ft
|jfdf|jfd&|j
fdg|jfd*|jfd+|jfdh|jfd8|jfd9|jfdi|jfd=|jfd>|jfdj|jfdk|jfdH|jfdI|jfdJ|jfdK|jfdL|jfdM|jfdN|jfdl|j fdQ|j!fdm|j"fdn|j#fdV|j$fdW|j%fdo|j&fg!�|_'|j(|j'_)tjd|j*fd|j+fdp|j,fd&|j-fd |j.fdq|j/ft
|j0fdr|j1fds|j2fd@|j3fg
�|_4|j5|j4_)dS)tNr�rrr r!r"r4rrrr�r��address�article�aside�
blockquote�center�details�dirr+�dl�fieldset�
figcaption�figure�footer�header�hgroup�main�menu�nav�olr(�section�summary�ul�pre�listing�form�li�dd�dtrR�a�b�big�code�em�font�i�s�small�strike�strong�tt�u�nobr�button�applet�marquee�objectZxmpr��arear�embed�img�keygen�wbr�param�source�track�input�hr�image�isindex�textareaZiframer�noembedrr��rp�rt�option�optgroupZmathrur��colr��framer�r�r�r�r�r�r��dialog)	rrr r!r"r4rrr)r;r<r=r>r?r@rAr+rBrCrDrErFrGrHrIrJrKrLr(rMrNrO)rPrQ)rSrTrU)rWrXrYrZr[r\r]r^r_r`rarb)rerfrg)rhrrirjrkrl)rmrnro)rur)rvrw)rxry)r�rzr�r{r�r�r�r�r�r�r�)r;r<r=r>rdr?r@r|rAr+rBrCrDrErFrGrHrQrIrJrKrLrPrMrNrO)rTrUrS)rVrWrXrYrZr[r\rcr]r^r_r`rarb)rerfrg)6rH�processSpaceCharactersNonPrer�rrr��startTagProcessInHeadr6r7�startTagClosePr�startTagHeading�startTagPreListing�startTagForm�startTagListItem�startTagPlaintext�	startTagA�startTagFormatting�startTagNobr�startTagButton�startTagAppletMarqueeObject�startTagXmp�
startTagTable�startTagVoidFormatting�startTagParamSource�
startTagInput�
startTagHr�
startTagImage�startTagIsIndex�startTagTextarea�startTagIFramer%�startTagRawtext�startTagSelect�startTagRpRt�startTagOpt�startTagMath�startTagSvg�startTagMisplacedr�rr�
endTagBody�
endTagHtml�endTagBlock�
endTagForm�endTagP�endTagListItem�
endTagHeading�endTagFormatting�endTagAppletMarqueeObjectr4r�r)r@rIr<)r�r)r*rHhs~
z'getPhases.<locals>.InBodyPhase.__init__cSs$|j|jko"|j|jko"|j|jkS)N)r>rhri)r@Znode1Znode2r)r)r*�isMatchingFormattingElement�sz:getPhases.<locals>.InBodyPhase.isMatchingFormattingElementcSs�|jj|�|jjd}g}x<|jjddd�D]&}|tkr@Pq0|j||�r0|j|�q0Wt|�dksjt�t|�dkr�|jjj	|d�|jjj|�dS)Nr	�ryryry)
r<r�r{�activeFormattingElementsr
r�r�r~r�r9)r@r�rkZmatchingElementsr�r)r)r*�addFormattingElement�sz3getPhases.<locals>.InBodyPhase.addFormattingElementc
Ss@td�}x2|jjddd�D]}|j|kr|jjd�PqWdS)NrTrUrSr(r�r�r�r�r�r�r�r�r	z expected-closing-tag-but-got-eof)rTrUrSr(r�r�r�r�r�r�r�r�ry)r�r<r{r>rIr|)r@Zallowed_elementsr�r)r)r*r��s
z)getPhases.<locals>.InBodyPhase.processEOFcSsh|d}|j|_|jd�rJ|jjdjdkrJ|jjd	j�rJ|dd�}|rd|jj�|jj|�dS)
Nrs�
r	rPrQrtry)rPrQrtry)	r}r�r�r<r{r>Z
hasContent�#reconstructActiveFormattingElementsr�)r@r�rsr)r)r*�!processSpaceCharactersDropNewline�s

z@getPhases.<locals>.InBodyPhase.processSpaceCharactersDropNewlinecSsT|ddkrdS|jj�|jj|d�|jjrPtdd�|dD��rPd|j_dS)Nrs�cSsg|]}|tk�qSr))r)r=�charr)r)r*rA�szDgetPhases.<locals>.InBodyPhase.processCharacters.<locals>.<listcomp>F)r<r�r�rIr`�any)r@r�r)r)r*r��s
z0getPhases.<locals>.InBodyPhase.processCharacterscSs|jj�|jj|d�dS)Nrs)r<r�r�)r@r�r)r)r*r}�s
z;getPhases.<locals>.InBodyPhase.processSpaceCharactersNonPrecSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r~�sz4getPhases.<locals>.InBodyPhase.startTagProcessInHeadcSs�|jjdddi�t|jj�dks4|jjdjdkrB|jjs�t�nFd|j_x<|dj	�D],\}}||jjdj
krX||jjdj
|<qXWdS)Nzunexpected-start-tagr>r�r	Frs)rIr|r~r<r{r>rPr�r`r.ri)r@r�r�r�r)r)r*r6�sz+getPhases.<locals>.InBodyPhase.startTagBodycSs�|jjdddi�t|jj�dks4|jjdjdkrB|jjs�t�nt|jjsLnj|jjdj	rv|jjdj	j
|jjd�x"|jjdjdkr�|jjj�qxW|jj|�|jj
d|j_dS)	Nzunexpected-start-tagr>r�r	r�r�r�ry)rIr|r~r<r{r>rPr�r`�parent�removeChildr+r�rFr])r@r�r)r)r*r7�s"z/getPhases.<locals>.InBodyPhase.startTagFramesetcSs.|jjddd�r|jtd��|jj|�dS)Nr(rd)�variant)r<�elementInScoper�r
r�)r@r�r)r)r*r	sz-getPhases.<locals>.InBodyPhase.startTagClosePcSs>|jjddd�r|jtd��|jj|�d|j_|j|_dS)Nr(rd)r�F)	r<r�r�r
r�rIr`r�r�)r@r�r)r)r*r�s
z1getPhases.<locals>.InBodyPhase.startTagPreListingcSsZ|jjr|jjdddi�n:|jjddd�r:|jtd��|jj|�|jjd|j_dS)	Nzunexpected-start-tagr>rRr(rd)r�r	ry)	r<�formPointerrIr|r�r�r
r�r{)r@r�r)r)r*r�sz+getPhases.<locals>.InBodyPhase.startTagFormcSs�d|j_dgddgddgd�}||d}xLt|jj�D]<}|j|kr^|jjjt|jd��P|j	t
kr8|jd
kr8Pq8W|jjd
dd�r�|jjjtd
d��|jj|�dS)NFrSrUrT)rSrUrTr>rpr;r+r(rd)r�)r;r+r()
rIr`�reversedr<r{r>r]r�r
�	nameTuplerr�r�)r@r�ZstopNamesMapZ	stopNamesr�r)r)r*r�s"


z/getPhases.<locals>.InBodyPhase.startTagListItemcSs>|jjddd�r|jtd��|jj|�|jjj|jj_dS)Nr(rd)r�)	r<r�r�r
r�rIrLr\rZ)r@r�r)r)r*r�4sz0getPhases.<locals>.InBodyPhase.startTagPlaintextcSsb|jjddd�r|jtd��|jjdjtkrR|jjdd|di�|jjj	�|jj
|�dS)Nr(rd)r�r	zunexpected-start-tagr>ry)r<r�r�r
r{r>rrIr|r+r�)r@r�r)r)r*r�:sz.getPhases.<locals>.InBodyPhase.startTagHeadingcSs~|jjd�}|rf|jjdddd��|jtd��||jjkrL|jjj|�||jjkrf|jjj|�|jj	�|j
|�dS)NrVz$unexpected-start-tag-implies-end-tag)�	startName�endName)r<�!elementInActiveFormattingElementsrIr|r�r
r{r9r�r�r�)r@r�ZafeAElementr)r)r*r�Bs
z(getPhases.<locals>.InBodyPhase.startTagAcSs|jj�|j|�dS)N)r<r�r�)r@r�r)r)r*r�Os
z1getPhases.<locals>.InBodyPhase.startTagFormattingcSsP|jj�|jjd�rB|jjdddd��|jtd��|jj�|j|�dS)Nrcz$unexpected-start-tag-implies-end-tag)r�r�)r<r�r�rIr|r�r
r�)r@r�r)r)r*r�Ss

z+getPhases.<locals>.InBodyPhase.startTagNobrcSsT|jjd�r2|jjdddd��|jtd��|S|jj�|jj|�d|j_dS)Nrdz$unexpected-start-tag-implies-end-tag)r�r�F)	r<r�rIr|r�r
r�r�r`)r@r�r)r)r*r�]s
z-getPhases.<locals>.InBodyPhase.startTagButtoncSs0|jj�|jj|�|jjjt�d|j_dS)NF)r<r�r�r�r�r
rIr`)r@r�r)r)r*r�hs
z:getPhases.<locals>.InBodyPhase.startTagAppletMarqueeObjectcSsB|jjddd�r|jtd��|jj�d|j_|jj|d�dS)Nr(rd)r�Fr�)r<r�r�r
r�rIr`r�)r@r�r)r)r*r�ns

z*getPhases.<locals>.InBodyPhase.startTagXmpcSsR|jjdkr*|jjddd�r*|jtd��|jj|�d|j_|jjd|j_	dS)Nrr(rd)r�Fr�)
rIrWr<r�r�r
r�r`rFr])r@r�r)r)r*r�usz,getPhases.<locals>.InBodyPhase.startTagTablecSs6|jj�|jj|�|jjj�d|d<d|j_dS)NTrxF)r<r�r�r{r+rIr`)r@r�r)r)r*r�}s

z5getPhases.<locals>.InBodyPhase.startTagVoidFormattingcSs@|jj}|j|�d|dkr<|ddjt�dkr<||j_dS)Nr2rs�hidden)rIr`r�rjr)r@r�r`r)r)r*r��s

z,getPhases.<locals>.InBodyPhase.startTagInputcSs$|jj|�|jjj�d|d<dS)NTrx)r<r�r{r+)r@r�r)r)r*r��sz2getPhases.<locals>.InBodyPhase.startTagParamSourcecSsJ|jjddd�r|jtd��|jj|�|jjj�d|d<d|j_dS)Nr(rd)r�TrxF)	r<r�r�r
r�r{r+rIr`)r@r�r)r)r*r��sz)getPhases.<locals>.InBodyPhase.startTagHrcSs6|jjdddd��|jtdd|d|dd��dS)	Nzunexpected-start-tag-treated-asrrrj)�originalName�newNamerorsrw)rirw)rIr|r�r
)r@r�r)r)r*r��s

z,getPhases.<locals>.InBodyPhase.startTagImagecSs|jjdddi�|jjrdSi}d|dkr>|dd|d<|jtdd|d��|jtd	d��|jtd
d��d|dkr�|dd}nd}|jtd
|d��|dj�}d|kr�|d=d|kr�|d=d|d<|jtdd||dd��|j	td
��|jtd	d��|j	td��dS)Nzdeprecated-tagr>rs�actionrsrRro)rirqZlabel�promptz3This is a searchable index. Enter search keywords: rn)r2rsrprw)rirw)
rIr|r<r�r�r
r�r�copyr�)r@r�Z
form_attrsr�rir)r)r*r��s6


z.getPhases.<locals>.InBodyPhase.startTagIsIndexcSs0|jj|�|jjj|jj_|j|_d|j_dS)NF)	r<r�rIrLrYrZr�r�r`)r@r�r)r)r*r��sz/getPhases.<locals>.InBodyPhase.startTagTextareacSsd|j_|j|�dS)NF)rIr`r�)r@r�r)r)r*r��sz-getPhases.<locals>.InBodyPhase.startTagIFramecSs"|jjr|j|�n
|j|�dS)N)rIrKr�r)r@r�r)r)r*r%�sz/getPhases.<locals>.InBodyPhase.startTagNoscriptcSs|jj|d�dS)z8iframe, noembed noframes, noscript(if scripting enabled)r�N)rIr�)r@r�r)r)r*r��sz.getPhases.<locals>.InBodyPhase.startTagRawtextcSs@|jjdjdkr$|jjjtd��|jj�|jjj|�dS)Nr	rxry)	r<r{r>rIr]r�r
r�r�)r@r�r)r)r*r��s
z*getPhases.<locals>.InBodyPhase.startTagOptcSs�|jj�|jj|�d|j_|jj|jjd|jjd|jjd|jjd|jjd|jjdfkrx|jjd|j_n|jjd	|j_dS)
NFr�r�r�r�r�r��inSelectInTabler�)r<r�r�rIr`r]rF)r@r�r)r)r*r��s




z-getPhases.<locals>.InBodyPhase.startTagSelectcSsB|jjd�r2|jj�|jjdjdkr2|jj�|jj|�dS)N�rubyr	ry)r<r��generateImpliedEndTagsr{r>rIr|r�)r@r�r)r)r*r��s


z+getPhases.<locals>.InBodyPhase.startTagRpRtcSsZ|jj�|jj|�|jj|�td|d<|jj|�|drV|jjj�d|d<dS)NrdrhrwTrx)	r<r�rIrrrr�r{r+)r@r�r)r)r*r��s
z+getPhases.<locals>.InBodyPhase.startTagMathcSsZ|jj�|jj|�|jj|�td|d<|jj|�|drV|jjj�d|d<dS)NrurhrwTrx)	r<r�rIrrrr�r{r+)r@r�r)r)r*r��s
z*getPhases.<locals>.InBodyPhase.startTagSvgcSs|jjdd|di�dS)a5 Elements that should be children of other elements that have a
            different insertion mode; here they are ignored
            "caption", "col", "colgroup", "frame", "frameset", "head",
            "option", "optgroup", "tbody", "td", "tfoot", "th", "thead",
            "tr", "noscript"
            zunexpected-start-tag-ignoredr>N)rIr|)r@r�r)r)r*r�sz0getPhases.<locals>.InBodyPhase.startTagMisplacedcSs|jj�|jj|�dS)N)r<r�r�)r@r�r)r)r*rs
z,getPhases.<locals>.InBodyPhase.startTagOthercSs�|jjddd�sD|jtdd��|jjdddi�|jtdd��nX|jjd�|jjd	j	dkrt|jjdddi�|jjj
�}x|j	dkr�|jjj
�}q�WdS)
Nr(rd)r�rozunexpected-end-tagr>rpr	ry)r<r�rr
rIr|r�r�r{r>r+)r@r�r�r)r)r*r�sz&getPhases.<locals>.InBodyPhase.endTagPcSs�|jjd�s|jj�dS|jjdjdkrlx>|jjdd�D]*}|jtd�kr>|jjdd|jd��Pq>W|jjd|j_dS)Nr�r	�rTrUrSryrxr(rvrwr�r�r�r�r�r�r�z$expected-one-end-tag-but-got-another)�gotName�expectedName�	afterBodyry)rTrUrSryrxr(rvrwr�r�r�r�r�r�r�r�)	r<r�rIr|r{r>r�rFr])r@r�r�r)r)r*r�!s
z)getPhases.<locals>.InBodyPhase.endTagBodycSs"|jjd�r|jtd��|SdS)Nr�)r<r�r�r
)r@r�r)r)r*r�3sz)getPhases.<locals>.InBodyPhase.endTagHtmlcSs�|ddkr|j|_|jj|d�}|r2|jj�|jjdj|dkr^|jjdd|di�|r�|jjj	�}x|j|dkr�|jjj	�}qpWdS)Nr>rPr	zend-tag-too-earlyry)
r}r�r<r�r�r{r>rIr|r+)r@r�ZinScoper�r)r)r*r�9s
z*getPhases.<locals>.InBodyPhase.endTagBlockcSsx|jj}d|j_|dks&|jj|�r:|jjdddi�n:|jj�|jjd|krf|jjdddi�|jjj|�dS)Nzunexpected-end-tagr>rRr	zend-tag-too-early-ignoredry)r<r�r�rIr|r�r{r9)r@r�r�r)r)r*r�Gs

z)getPhases.<locals>.InBodyPhase.endTagFormcSs�|ddkrd}nd}|jj|d|d�sB|jjdd|di�nj|jj|dd�|jjd	j|dkr�|jjdd|di�|jjj�}x|j|dkr�|jjj�}q�WdS)
Nr>rS�list)r�zunexpected-end-tag)�excluder	zend-tag-too-earlyry)r<r�rIr|r�r{r>r+)r@r�r�r�r)r)r*r�Tsz-getPhases.<locals>.InBodyPhase.endTagListItemcSs�x$tD]}|jj|�r|jj�PqW|jjdj|dkrR|jjdd|di�xBtD]:}|jj|�rX|jjj�}x|jtkr�|jjj�}qvWPqXWdS)Nr	r>zend-tag-too-earlyry)	rr<r�r�r{r>rIr|r+)r@r��itemr)r)r*r�es


z,getPhases.<locals>.InBodyPhase.endTagHeadingcSs"d}�x|dk�r|d7}|jj|d�}|sL||jjkrZ|jj|j�rZ|j|�dS||jjkr�|jjdd|di�|jjj	|�dS|jj|j�s�|jjdd|di�dS||jjdkr�|jjdd|di�|jjj
|�}d}x,|jj|d�D]}|jtk�r|}P�qW|dk�rb|jjj
�}x||k�rN|jjj
�}�q4W|jjj	|�dS|jj|d}|jjj
|�}|}	}
d}|jjj
|
�}x�|d	k�rh|d7}|d8}|jj|}
|
|jjk�r�|jjj	|
��q�|
|k�r�P|	|k�r
|jjj
|
�d}|
j�}
|
|jj|jjj
|
�<|
|jj|jjj
|
�<|
}
|	j�rV|	jj|	�|
j|	�|
}	�q�W|	j�r~|	jj|	�|jtd�k�r�|jj�\}}|j|	|�n
|j|	�|j�}
|j|
�|j|
�|jjj	|�|jjj||
�|jjj	|�|jjj|jjj
|�d|
�qWdS)z)The much-feared adoption agency algorithmr�r	r>Nzadoption-agency-1.2zadoption-agency-4.4zadoption-agency-1.3r�r�r�r�r�r�ry)r�r�r�r�r�)r<r�r{r�r>rrIr|r�r9�indexr�rr+Z	cloneNoder�r�ZappendChildr�ZgetTableMisnestedNodePosition�insertBeforeZreparentChildren�insert)r@r�ZouterLoopCounterZformattingElementZafeIndexZ
furthestBlockrkZcommonAncestorZbookmarkZlastNoder�ZinnerLoopCounterr�Zcloner�r�r)r)r*r�ts�











z/getPhases.<locals>.InBodyPhase.endTagFormattingcSs�|jj|d�r|jj�|jjdj|dkrF|jjdd|di�|jj|d�r�|jjj�}x|j|dkr�|jjj�}qdW|jj�dS)Nr>r	zend-tag-too-earlyry)	r<r�r�r{r>rIr|r+�clearActiveFormattingElements)r@r�rkr)r)r*r�s
z8getPhases.<locals>.InBodyPhase.endTagAppletMarqueeObjectcSs@|jjdddd��|jj�|jjtdd��|jjj�dS)Nzunexpected-end-tag-treated-asrz
br element)r�r�ro)rIr|r<r�r�r
r{r+)r@r�r)r)r*r4#s

z'getPhases.<locals>.InBodyPhase.endTagBrcSs�x�|jjddd�D]�}|j|dkr~|jj|dd�|jjdj|dkrd|jjdd|di�x|jjj�|krxqfWPq|jtkr|jjdd|di�PqWdS)Nr	r>)r�zunexpected-end-tagryry)	r<r{r>r�rIr|r+r�r)r@r�r�r)r)r*r*s
z*getPhases.<locals>.InBodyPhase.endTagOtherN)6r7r8r9rHr�r�r�r�r�r}r~r6r7rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r%r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r4rr))r�r)r*�InBodyPhaseeshG

	

	

$r�cs@eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
S)zgetPhases.<locals>.TextPhasecsF�j|||�tjg�|_|j|j_tjd|jfg�|_|j|j_dS)Nr)	rHrrr�rr�endTagScriptr�r)r@rIr<)r�r)r*rH9s
z%getPhases.<locals>.TextPhase.__init__cSs|jj|d�dS)Nrs)r<r�)r@r�r)r)r*r�Asz.getPhases.<locals>.TextPhase.processCharacterscSs8|jjdd|jjdji�|jjj�|jj|j_dS)Nz&expected-named-closing-tag-but-got-eofr>r	Try)rIr|r<r{r>r+r�r])r@r)r)r*r�Ds
z'getPhases.<locals>.TextPhase.processEOFcSsdstd|d��dS)NFz4Tried to process start tag %s in RCDATA/RAWTEXT moder>)r�)r@r�r)r)r*rKsz*getPhases.<locals>.TextPhase.startTagOthercSs*|jjj�}|jdkst�|jj|j_dS)Nr)r<r{r+r>r�rIr�r])r@r�r�r)r)r*r�Nsz)getPhases.<locals>.TextPhase.endTagScriptcSs|jjj�|jj|j_dS)N)r<r{r+rIr�r])r@r�r)r)r*rUsz(getPhases.<locals>.TextPhase.endTagOtherN)	r7r8r9rHr�r�rr�rr))r�r)r*�	TextPhase8sr�cs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'S)(zgetPhases.<locals>.InTablePhasec
s��j|||�tjd|jfd|jfd|jfd|jfd|jfd|jfd|j	fd|j
fd|jfd|jfg
�|_
|j|j
_tjd|jfd|jfg�|_|j|j_dS)Nr�r�r�rzr�r�r�r�r�r�r�rrrprRr�)r�r�r�)r�r�r�)rr)r�r�rzr�r�r�r�r�r�r�r�)rHrrr��startTagCaption�startTagColgroup�startTagCol�startTagRowGroup�startTagImplyTbodyr��startTagStyleScriptr�r�r�rr�endTagTable�endTagIgnorer�r)r@rIr<)r�r)r*rH[s$
z(getPhases.<locals>.InTablePhase.__init__cSs(x"|jjdjdkr"|jjj�qWdS)Nr	r�r�ry)r�r�)r<r{r>r+)r@r)r)r*�clearStackToTableContextssz8getPhases.<locals>.InTablePhase.clearStackToTableContextcSs0|jjdjdkr |jjd�n|jjs,t�dS)Nr	r�zeof-in-tablery)r<r{r>rIr|rPr�)r@r)r)r*r�|sz*getPhases.<locals>.InTablePhase.processEOFcSs4|jj}|jjd|j_||jj_|jjj|�dS)N�inTableText)rIr]rFr�r�)r@r�r�r)r)r*r��s
z6getPhases.<locals>.InTablePhase.processSpaceCharacterscSs4|jj}|jjd|j_||jj_|jjj|�dS)Nr�)rIr]rFr�r�)r@r�r�r)r)r*r��s
z1getPhases.<locals>.InTablePhase.processCharacterscSs&d|j_|jjdj|�d|j_dS)NTr�F)r<�insertFromTablerIrFr�)r@r�r)r)r*r��sz*getPhases.<locals>.InTablePhase.insertTextcSs6|j�|jjjt�|jj|�|jjd|j_dS)Nr�)	r�r<r�r�r
r�rIrFr])r@r�r)r)r*r��sz/getPhases.<locals>.InTablePhase.startTagCaptioncSs(|j�|jj|�|jjd|j_dS)Nr�)r�r<r�rIrFr])r@r�r)r)r*r��sz0getPhases.<locals>.InTablePhase.startTagColgroupcSs|jtdd��|S)Nr�ro)r�r
)r@r�r)r)r*r��sz+getPhases.<locals>.InTablePhase.startTagColcSs(|j�|jj|�|jjd|j_dS)Nr�)r�r<r�rIrFr])r@r�r)r)r*r��sz0getPhases.<locals>.InTablePhase.startTagRowGroupcSs|jtdd��|S)Nr�ro)r�r
)r@r�r)r)r*r��sz2getPhases.<locals>.InTablePhase.startTagImplyTbodycSs6|jjdddd��|jjjtd��|jjs2|SdS)Nz$unexpected-start-tag-implies-end-tagr�)r�r�)rIr|r]r�r
rP)r@r�r)r)r*r��s
z-getPhases.<locals>.InTablePhase.startTagTablecSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r��sz3getPhases.<locals>.InTablePhase.startTagStyleScriptcSsVd|dkrH|ddjt�dkrH|jjd�|jj|�|jjj�n
|j|�dS)Nr2rsr�z unexpected-hidden-input-in-table)	rjrrIr|r<r�r{r+r)r@r�r)r)r*r��sz-getPhases.<locals>.InTablePhase.startTagInputcSsD|jjd�|jjdkr@|jj|�|jjd|j_|jjj�dS)Nzunexpected-form-in-tabler	ry)rIr|r<r�r�r{r+)r@r�r)r)r*r��s
z,getPhases.<locals>.InTablePhase.startTagFormcSs<|jjdd|di�d|j_|jjdj|�d|j_dS)Nz)unexpected-start-tag-implies-table-voodoor>Tr�F)rIr|r<r�rFr�)r@r�r)r)r*r�sz-getPhases.<locals>.InTablePhase.startTagOthercSs�|jjddd�r�|jj�|jjdjdkrJ|jjdd|jjdjd��x"|jjdjdkrl|jjj�qLW|jjj�|jj�n|jj	s�t
�|jj�dS)	Nr�)r�r	zend-tag-too-early-named)r�r�ryryry)r<r�r�r{r>rIr|r+r_rPr�)r@r�r)r)r*r��s
z+getPhases.<locals>.InTablePhase.endTagTablecSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r��sz,getPhases.<locals>.InTablePhase.endTagIgnorecSs<|jjdd|di�d|j_|jjdj|�d|j_dS)Nz'unexpected-end-tag-implies-table-voodoor>Tr�F)rIr|r<r�rFr�)r@r�r)r)r*r�sz+getPhases.<locals>.InTablePhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�rr))r�r)r*�InTablePhaseYs&	
r�csPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)z#getPhases.<locals>.InTableTextPhasecs�j|||�d|_g|_dS)N)rHr��characterTokens)r@rIr<)r�r)r*rH�sz,getPhases.<locals>.InTableTextPhase.__init__cSsddjdd�|jD��}tdd�|D��rJtd|d�}|jjdj|�n|rZ|jj|�g|_dS)Nr�cSsg|]}|d�qS)rsr))r=r�r)r)r*rA�szGgetPhases.<locals>.InTableTextPhase.flushCharacters.<locals>.<listcomp>cSsg|]}|tk�qSr))r)r=r�r)r)r*rA�srn)r2rsr�)�joinr�r�rrIrFr�r<)r@rsr�r)r)r*�flushCharacters�sz3getPhases.<locals>.InTableTextPhase.flushCharacterscSs|j�|j|j_|S)N)r�r�rIr])r@r�r)r)r*r��s
z2getPhases.<locals>.InTableTextPhase.processCommentcSs|j�|j|j_dS)NT)r�r�rIr])r@r)r)r*r��s
z.getPhases.<locals>.InTableTextPhase.processEOFcSs |ddkrdS|jj|�dS)Nrsr�)r�r�)r@r�r)r)r*r�sz5getPhases.<locals>.InTableTextPhase.processCharacterscSs|jj|�dS)N)r�r�)r@r�r)r)r*r�sz:getPhases.<locals>.InTableTextPhase.processSpaceCharacterscSs|j�|j|j_|S)N)r�r�rIr])r@r�r)r)r*r�
s
z3getPhases.<locals>.InTableTextPhase.processStartTagcSs|j�|j|j_|S)N)r�r�rIr])r@r�r)r)r*r�s
z1getPhases.<locals>.InTableTextPhase.processEndTagN)r7r8r9rHr�r�r�r�r�r�r�r))r�r)r*�InTableTextPhase�s	r�cs`eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�ZdS)z!getPhases.<locals>.InCaptionPhasec
sf�j|||�tjd|jfd
|jfg�|_|j|j_tjd|jfd|j	fd|j
fg�|_|j|j_dS)Nr�r�rzr�r�r�r�r�r�r�r�r�)	r�rzr�r�r�r�r�r�r�)
r�rzr�r�r�r�r�r�r�r�)
rHrrr��startTagTableElementr�rr�
endTagCaptionr�r�r�r)r@rIr<)r�r)r*rHs
z*getPhases.<locals>.InCaptionPhase.__init__cSs|jjddd�S)Nr�r�)r�)r<r�)r@r)r)r*�ignoreEndTagCaption+sz5getPhases.<locals>.InCaptionPhase.ignoreEndTagCaptioncSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�.sz,getPhases.<locals>.InCaptionPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�1sz3getPhases.<locals>.InCaptionPhase.processCharacterscSs0|jj�|j�}|jjjtd��|s,|SdS)Nr�)rIr|r�r]r�r
)r@r��ignoreEndTagr)r)r*r�4s

z6getPhases.<locals>.InCaptionPhase.startTagTableElementcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r<sz/getPhases.<locals>.InCaptionPhase.startTagOthercSs�|j�s�|jj�|jjdjdkrB|jjdd|jjdjd��x"|jjdjdkrd|jjj�qDW|jjj�|jj�|jj	d|j_
n|jjs�t�|jj�dS)	Nr	r�z$expected-one-end-tag-but-got-another)r�r�r�ryryry)
r�r<r�r{r>rIr|r+r�rFr]rPr�)r@r�r)r)r*r�?s

z/getPhases.<locals>.InCaptionPhase.endTagCaptioncSs0|jj�|j�}|jjjtd��|s,|SdS)Nr�)rIr|r�r]r�r
)r@r�r�r)r)r*r�Qs

z-getPhases.<locals>.InCaptionPhase.endTagTablecSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r�Xsz.getPhases.<locals>.InCaptionPhase.endTagIgnorecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r[sz-getPhases.<locals>.InCaptionPhase.endTagOtherN)
r7r8r9rHr�r�r�r�rr�r�r�rr))r�r)r*�InCaptionPhasesr�csXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z%getPhases.<locals>.InColumnGroupPhasecs^�j|||�tjd|jfd|jfg�|_|j|j_tjd|jfd|j	fg�|_
|j|j
_dS)Nr�rzr�)rHrrr�r�r�rr�endTagColgroup�	endTagColr�r)r@rIr<)r�r)r*rHas
z.getPhases.<locals>.InColumnGroupPhase.__init__cSs|jjdjdkS)Nr	r�ry)r<r{r>)r@r)r)r*�ignoreEndTagColgrouppsz:getPhases.<locals>.InColumnGroupPhase.ignoreEndTagColgroupcSsD|jjdjdkr"|jjst�dS|j�}|jtd��|s@dSdS)Nr	r�r�Try)	r<r{r>rIrPr�r�r�r
)r@r�r)r)r*r�ssz0getPhases.<locals>.InColumnGroupPhase.processEOFcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r
)r@r�r�r)r)r*r�}sz7getPhases.<locals>.InColumnGroupPhase.processCharacterscSs$|jj|�|jjj�d|d<dS)NTrx)r<r�r{r+)r@r�r)r)r*r��sz1getPhases.<locals>.InColumnGroupPhase.startTagColcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r
)r@r�r�r)r)r*r�sz3getPhases.<locals>.InColumnGroupPhase.startTagOthercSs@|j�r |jjst�|jj�n|jjj�|jjd|j_	dS)Nr�)
r�rIrPr�r|r<r{r+rFr])r@r�r)r)r*r��s
z4getPhases.<locals>.InColumnGroupPhase.endTagColgroupcSs|jjdddi�dS)Nz
no-end-tagr>rz)rIr|)r@r�r)r)r*r��sz/getPhases.<locals>.InColumnGroupPhase.endTagColcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r
)r@r�r�r)r)r*r�sz1getPhases.<locals>.InColumnGroupPhase.endTagOtherN)r7r8r9rHr�r�r�r�rr�r�rr))r�r)r*�InColumnGroupPhase^s
	r�csxeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)z#getPhases.<locals>.InTableBodyPhasecsv�j|||�tjd|jfd|jfd
|jfd|jfg�|_|j|j_	tjd|j
fd|jfd|jfg�|_
|j|j
_	dS)Nr�r�r�r�r�rzr�r�r�r�r�r�)r�r�)r�rzr�r�r�r�)r�r�r�)r�r�rzr�r�r�r�r�)rHrrr��
startTagTr�startTagTableCell�startTagTableOtherr�rr�endTagTableRowGroupr�r�r�r)r@rIr<)r�r)r*rH�s
z,getPhases.<locals>.InTableBodyPhase.__init__cSsFx"|jjdjdkr"|jjj�qW|jjdjdkrB|jjsBt�dS)	Nr	r�r�r�r�ry)r�r�r�r�ry)r<r{r>r+rIrPr�)r@r)r)r*�clearStackToTableBodyContext�s
z@getPhases.<locals>.InTableBodyPhase.clearStackToTableBodyContextcSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r��sz.getPhases.<locals>.InTableBodyPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��sz:getPhases.<locals>.InTableBodyPhase.processSpaceCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��sz5getPhases.<locals>.InTableBodyPhase.processCharacterscSs(|j�|jj|�|jjd|j_dS)Nr�)r�r<r�rIrFr])r@r�r)r)r*r��sz.getPhases.<locals>.InTableBodyPhase.startTagTrcSs*|jjdd|di�|jtdd��|S)Nzunexpected-cell-in-table-bodyr>r�ro)rIr|r�r
)r@r�r)r)r*r��sz5getPhases.<locals>.InTableBodyPhase.startTagTableCellcSsn|jjddd�s0|jjddd�s0|jjddd�rT|j�|jt|jjdj��|S|jjs`t	�|jj
�dS)Nr�r�)r�r�r�r	ry)r<r�r�r�r
r{r>rIrPr�r|)r@r�r)r)r*r��sz6getPhases.<locals>.InTableBodyPhase.startTagTableOthercSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz1getPhases.<locals>.InTableBodyPhase.startTagOthercSsT|jj|ddd�r:|j�|jjj�|jjd|j_n|jjdd|di�dS)Nr>r�)r�r�z unexpected-end-tag-in-table-body)	r<r�r�r{r+rIrFr]r|)r@r�r)r)r*r��sz7getPhases.<locals>.InTableBodyPhase.endTagTableRowGroupcSsn|jjddd�s0|jjddd�s0|jjddd�rT|j�|jt|jjdj��|S|jjs`t	�|jj
�dS)Nr�r�)r�r�r�r	ry)r<r�r�r�r
r{r>rIrPr�r|)r@r�r)r)r*r��sz/getPhases.<locals>.InTableBodyPhase.endTagTablecSs|jjdd|di�dS)Nz unexpected-end-tag-in-table-bodyr>)rIr|)r@r�r)r)r*r��sz0getPhases.<locals>.InTableBodyPhase.endTagIgnorecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz/getPhases.<locals>.InTableBodyPhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r�rr�r�r�rr))r�r)r*�InTableBodyPhase�s
	
r�cs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�ZdS)zgetPhases.<locals>.InRowPhasecsv�j|||�tjd|jfd
|jfd|jfg�|_|j|j_tjd
|j	fd|j
fd|jfd|jfg�|_
|j|j
_dS)Nr�r�r�r�rzr�r�r�r�r�r�r�)r�r�)r�rzr�r�r�r�r�)r�r�r�)r�r�rzr�r�r�r�)rHrrr�r�r�r�rr�endTagTrr�r�r�r�r)r@rIr<)r�r)r*rHs
z&getPhases.<locals>.InRowPhase.__init__cSsDx>|jjdjdkr>|jjdd|jjdji�|jjj�qWdS)	Nr	r�r�z'unexpected-implied-end-tag-in-table-rowr>ry)r�r�ry)r<r{r>rIr|r+)r@r)r)r*�clearStackToTableRowContextsz9getPhases.<locals>.InRowPhase.clearStackToTableRowContextcSs|jjddd�S)Nr�r�)r�)r<r�)r@r)r)r*�ignoreEndTagTrsz,getPhases.<locals>.InRowPhase.ignoreEndTagTrcSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�"sz(getPhases.<locals>.InRowPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�%sz4getPhases.<locals>.InRowPhase.processSpaceCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�(sz/getPhases.<locals>.InRowPhase.processCharacterscSs6|j�|jj|�|jjd|j_|jjjt�dS)Nr�)	r�r<r�rIrFr]r�r�r
)r@r�r)r)r*r�+sz/getPhases.<locals>.InRowPhase.startTagTableCellcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r
)r@r�r�r)r)r*r�1sz0getPhases.<locals>.InRowPhase.startTagTableOthercSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r8sz+getPhases.<locals>.InRowPhase.startTagOthercSsH|j�s.|j�|jjj�|jjd|j_n|jjs:t	�|jj
�dS)Nr�)r�r�r<r{r+rIrFr]rPr�r|)r@r�r)r)r*r�;sz&getPhases.<locals>.InRowPhase.endTagTrcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r
)r@r�r�r)r)r*r�Esz)getPhases.<locals>.InRowPhase.endTagTablecSs4|jj|ddd�r&|jtd��|S|jj�dS)Nr>r�)r�r�)r<r�r�r
rIr|)r@r�r)r)r*r�Msz1getPhases.<locals>.InRowPhase.endTagTableRowGroupcSs|jjdd|di�dS)Nzunexpected-end-tag-in-table-rowr>)rIr|)r@r�r)r)r*r�Tsz*getPhases.<locals>.InRowPhase.endTagIgnorecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*rXsz)getPhases.<locals>.InRowPhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r�rr�r�r�r�rr))r�r)r*�
InRowPhases
r�cs`eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�ZdS)zgetPhases.<locals>.InCellPhasecsf�j|||�tjd|jfd
|jfg�|_|j|j_tjd|jfd|j	fd|j
fg�|_|j|j_dS)Nr�r�rzr�r�r�r�r�r�r�r�r�)	r�rzr�r�r�r�r�r�r�)r�r�)r�r�rzr�r�)r�r�r�r�r�)
rHrrr�r�r�rr�endTagTableCellr��endTagImplyr�r)r@rIr<)r�r)r*rH]s
z'getPhases.<locals>.InCellPhase.__init__cSsB|jjddd�r |jtd��n|jjddd�r>|jtd��dS)Nr�r�)r�r�)r<r�r�r
)r@r)r)r*�	closeCellnsz(getPhases.<locals>.InCellPhase.closeCellcSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�usz)getPhases.<locals>.InCellPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�xsz0getPhases.<locals>.InCellPhase.processCharacterscSsF|jjddd�s |jjddd�r,|j�|S|jjs8t�|jj�dS)Nr�r�)r�r�)r<r�r�rIrPr�r|)r@r�r)r)r*r�{sz1getPhases.<locals>.InCellPhase.startTagTableOthercSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz,getPhases.<locals>.InCellPhase.startTagOthercSs�|jj|ddd�r�|jj|d�|jjdj|dkrt|jjdd|di�x.|jjj�}|j|dkrRPqRWn|jjj�|jj�|jj	d|j_
n|jjdd|di�dS)	Nr>r�)r�r	zunexpected-cell-end-tagr�zunexpected-end-tagry)r<r�r�r{r>rIr|r+r�rFr])r@r�r�r)r)r*r��s
z.getPhases.<locals>.InCellPhase.endTagTableCellcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r��sz+getPhases.<locals>.InCellPhase.endTagIgnorecSs.|jj|ddd�r |j�|S|jj�dS)Nr>r�)r�)r<r�r�rIr|)r@r�r)r)r*r��sz*getPhases.<locals>.InCellPhase.endTagImplycSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz*getPhases.<locals>.InCellPhase.endTagOtherN)
r7r8r9rHr�r�r�r�rr�r�r�rr))r�r)r*�InCellPhase[s
r�csxeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)z getPhases.<locals>.InSelectPhasecs��j|||�tjd|jfd|jfd|jfd|jfd	|jfd|jfg�|_	|j
|j	_tjd|jfd|j
fd|jfg�|_|j|j_dS)
Nr�rxryr�rprkrtr)rprkrt)rHrrr��startTagOption�startTagOptgroupr�r�r&r�rr�endTagOption�endTagOptgroup�endTagSelectr�r)r@rIr<)r�r)r*rH�s
z)getPhases.<locals>.InSelectPhase.__init__cSs0|jjdjdkr |jjd�n|jjs,t�dS)Nr	r�z
eof-in-selectry)r<r{r>rIr|rPr�)r@r)r)r*r��sz+getPhases.<locals>.InSelectPhase.processEOFcSs$|ddkrdS|jj|d�dS)Nrsr�)r<r�)r@r�r)r)r*r��sz2getPhases.<locals>.InSelectPhase.processCharacterscSs.|jjdjdkr|jjj�|jj|�dS)Nr	rxry)r<r{r>r+r�)r@r�r)r)r*r��sz/getPhases.<locals>.InSelectPhase.startTagOptioncSsL|jjdjdkr|jjj�|jjdjdkr<|jjj�|jj|�dS)Nr	rxryryry)r<r{r>r+r�)r@r�r)r)r*r��s
z1getPhases.<locals>.InSelectPhase.startTagOptgroupcSs|jjd�|jtd��dS)Nzunexpected-select-in-selectr�)rIr|r�r
)r@r�r)r)r*r��sz/getPhases.<locals>.InSelectPhase.startTagSelectcSs>|jjd�|jjddd�r.|jtd��|S|jjs:t�dS)Nzunexpected-input-in-selectr�)r�)rIr|r<r�r�r
rPr�)r@r�r)r)r*r��s
z.getPhases.<locals>.InSelectPhase.startTagInputcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r&�sz/getPhases.<locals>.InSelectPhase.startTagScriptcSs|jjdd|di�dS)Nzunexpected-start-tag-in-selectr>)rIr|)r@r�r)r)r*r�sz.getPhases.<locals>.InSelectPhase.startTagOthercSs6|jjdjdkr |jjj�n|jjdddi�dS)Nr	rxzunexpected-end-tag-in-selectr>ry)r<r{r>r+rIr|)r@r�r)r)r*r��sz-getPhases.<locals>.InSelectPhase.endTagOptioncSsf|jjdjdkr0|jjdjdkr0|jjj�|jjd	jdkrP|jjj�n|jjdddi�dS)
Nr	rxr�ryzunexpected-end-tag-in-selectr>ry���ry)r<r{r>r+rIr|)r@r�r)r)r*r��sz/getPhases.<locals>.InSelectPhase.endTagOptgroupcSs^|jjddd�rD|jjj�}x|jdkr6|jjj�}qW|jj�n|jjsPt�|jj	�dS)Nr�)r�)
r<r�r{r+r>rIr_rPr�r|)r@r�r�r)r)r*r��sz-getPhases.<locals>.InSelectPhase.endTagSelectcSs|jjdd|di�dS)Nzunexpected-end-tag-in-selectr>)rIr|)r@r�r)r)r*r	sz,getPhases.<locals>.InSelectPhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r&rr�r�r�rr))r�r)r*�
InSelectPhase�s
r�csHeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)z'getPhases.<locals>.InSelectInTablePhasec	sN�j|||�tjd	|jfg�|_|j|j_tjd
|jfg�|_|j	|j_dS)Nr�r�r�r�r�r�r�r�)r�r�r�r�r�r�r�r�)r�r�r�r�r�r�r�r�)
rHrrr�r�rrr�r�r)r@rIr<)r�r)r*rH	s
z0getPhases.<locals>.InSelectInTablePhase.__init__cSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�	sz2getPhases.<locals>.InSelectInTablePhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�	sz9getPhases.<locals>.InSelectInTablePhase.processCharacterscSs(|jjdd|di�|jtd��|S)Nz5unexpected-table-element-start-tag-in-select-in-tabler>r�)rIr|rr
)r@r�r)r)r*r�!	sz5getPhases.<locals>.InSelectInTablePhase.startTagTablecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r&	sz5getPhases.<locals>.InSelectInTablePhase.startTagOthercSs@|jjdd|di�|jj|ddd�r<|jtd��|SdS)Nz3unexpected-table-element-end-tag-in-select-in-tabler>r�)r�r�)rIr|r<r�rr
)r@r�r)r)r*r�)	sz3getPhases.<locals>.InSelectInTablePhase.endTagTablecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r/	sz3getPhases.<locals>.InSelectInTablePhase.endTagOtherN)
r7r8r9rHr�r�r�rr�rr))r�r)r*�InSelectInTablePhase	sr�c-s�eZdZeddddddddd	d
ddd
ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,g,�Z�fd-d.�Zd/d0�Z�fd1d2�Zd3d4�Zd5d6�Z	d7S)8z(getPhases.<locals>.InForeignContentPhaserWrXr>r�rr?rYrTr+rBrUrZriZh1Zh2Zh3Zh4Zh5Zh6r�rqr\rjrSrQrJr4rcrLr(rPr�r]r^�spanr`r_�subZsupr�rarbrO�varcs�j|||�dS)N)rH)r@rIr<)r�r)r*rH<	sz1getPhases.<locals>.InForeignContentPhase.__init__c%Ssnddddddddd	d
ddd
ddddddddddddddddddd d!d"d#d$d%�$}|d&|krj||d&|d&<dS)'NZaltGlyphZaltGlyphDefZaltGlyphItemZanimateColorZ
animateMotionZanimateTransformZclipPathZfeBlendZ
feColorMatrixZfeComponentTransferZfeCompositeZfeConvolveMatrixZfeDiffuseLightingZfeDisplacementMapZfeDistantLightZfeFloodZfeFuncAZfeFuncBZfeFuncGZfeFuncRZfeGaussianBlurZfeImageZfeMergeZfeMergeNodeZfeMorphologyZfeOffsetZfePointLightZfeSpecularLightingZfeSpotLightZfeTileZfeTurbulenceZ
foreignObjectZglyphRefZlinearGradientZradialGradientZtextPath)$ZaltglyphZaltglyphdefZaltglyphitemZanimatecolorZ
animatemotionZanimatetransformZclippathZfeblendZ
fecolormatrixZfecomponenttransferZfecompositeZfeconvolvematrixZfediffuselightingZfedisplacementmapZfedistantlightZfefloodZfefuncaZfefuncbZfefuncgZfefuncrZfegaussianblurZfeimageZfemergeZfemergenodeZfemorphologyZfeoffsetZfepointlightZfespecularlightingZfespotlightZfetileZfeturbulenceZ
foreignobjectZglyphrefZlineargradientZradialgradientZtextpathr>r))r@r��replacementsr)r)r*�adjustSVGTagNames?	sLz:getPhases.<locals>.InForeignContentPhase.adjustSVGTagNamescsL|ddkrd|d<n&|jjr<tdd�|dD��r<d|j_�j||�dS)Nrsr�u�css|]}|tkVqdS)N)r)r=r�r)r)r*r�l	szMgetPhases.<locals>.InForeignContentPhase.processCharacters.<locals>.<genexpr>F)rIr`r�r�)r@r�)r�r)r*r�h	s
z:getPhases.<locals>.InForeignContentPhase.processCharacterscSs6|jjd}|d|jksD|ddkr�t|dj��tdddg�@r�|jjdd|di�xR|jjdj|jjkr�|jj	|jjd�r�|jj
|jjd�r�|jjj�q\W|S|jtd	kr�|jj
|�n$|jtd
kr�|j|�|jj|�|jj|�|j|d<|jj|�|d�r2|jjj�d
|d<dS)Nr	r>r[rsZcolorZface�sizez*unexpected-html-element-in-foreign-contentrdrurhrwTrxryryryry)r<r{�breakoutElements�set�keysrIr|rhrrlrmr+rrrrrr�)r@r�r�r)r)r*r�p	s.



z8getPhases.<locals>.InForeignContentPhase.processStartTagcSs�t|jj�d}|jjd}|jjt�|dkrF|jjdd|di�x�|jjt�|dkr�|jj|jj	dkr�|jjj
�|jjj|j_x |jjj�|kr�|jjs�t
�q�Wd}P|d8}|jj|}|j|jjkr�qHqH|jjj|�}PqHW|S)Nr	r>zunexpected-end-tagr�ry)r~r<r{r>rjrrIr|r]rFr�r�r+r�rhrr�)r@r�Z	nodeIndexr�r�r)r)r*r��	s(z6getPhases.<locals>.InForeignContentPhase.processEndTagN)
r7r8r9r�rrHrr�r�r�r))r�r)r*�InForeignContentPhase2	s


)rcsPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)z!getPhases.<locals>.AfterBodyPhasecsN�j|||�tjd|jfg�|_|j|j_tjd|jfg�|_|j	|j_dS)Nr�)
rHrrr�r�rrr�r�r)r@rIr<)r�r)r*rH�	s
z*getPhases.<locals>.AfterBodyPhase.__init__cSsdS)Nr))r@r)r)r*r��	sz,getPhases.<locals>.AfterBodyPhase.processEOFcSs|jj||jjd�dS)Nr)r<r�r{)r@r�r)r)r*r��	sz0getPhases.<locals>.AfterBodyPhase.processCommentcSs |jjd�|jjd|j_|S)Nzunexpected-char-after-bodyr�)rIr|rFr])r@r�r)r)r*r��	sz3getPhases.<locals>.AfterBodyPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��	sz.getPhases.<locals>.AfterBodyPhase.startTagHtmlcSs*|jjdd|di�|jjd|j_|S)Nzunexpected-start-tag-after-bodyr>r�)rIr|rFr])r@r�r)r)r*r�	sz/getPhases.<locals>.AfterBodyPhase.startTagOthercSs*|jjr|jjd�n|jjd|j_dS)Nz'unexpected-end-tag-after-body-innerhtml�afterAfterBody)rIrPr|rFr])r@r>r)r)r*r��	sz,getPhases.<locals>.AfterBodyPhase.endTagHtmlcSs*|jjdd|di�|jjd|j_|S)Nzunexpected-end-tag-after-bodyr>r�)rIr|rFr])r@r�r)r)r*r�	sz-getPhases.<locals>.AfterBodyPhase.endTagOtherN)r7r8r9rHr�r�r�r�rr�rr))r�r)r*�AfterBodyPhase�	sr
csXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z"getPhases.<locals>.InFramesetPhasecsf�j|||�tjd|jfd|jfd|jfd|jfg�|_|j|j_	tjd|j
fg�|_|j|j_	dS)Nr�r�r{r)
rHrrr�r7�
startTagFrame�startTagNoframesr�rr�endTagFramesetr�r)r@rIr<)r�r)r*rH�	s
z+getPhases.<locals>.InFramesetPhase.__init__cSs0|jjdjdkr |jjd�n|jjs,t�dS)Nr	r�zeof-in-framesetry)r<r{r>rIr|rPr�)r@r)r)r*r��	sz-getPhases.<locals>.InFramesetPhase.processEOFcSs|jjd�dS)Nzunexpected-char-in-frameset)rIr|)r@r�r)r)r*r��	sz4getPhases.<locals>.InFramesetPhase.processCharacterscSs|jj|�dS)N)r<r�)r@r�r)r)r*r7�	sz3getPhases.<locals>.InFramesetPhase.startTagFramesetcSs|jj|�|jjj�dS)N)r<r�r{r+)r@r�r)r)r*r�	sz0getPhases.<locals>.InFramesetPhase.startTagFramecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�	sz3getPhases.<locals>.InFramesetPhase.startTagNoframescSs|jjdd|di�dS)Nz unexpected-start-tag-in-framesetr>)rIr|)r@r�r)r)r*r�	sz0getPhases.<locals>.InFramesetPhase.startTagOthercSs\|jjdjdkr |jjd�n|jjj�|jjrX|jjdjdkrX|jjd|j_dS)Nr	r�z)unexpected-frameset-in-frameset-innerhtmlr��
afterFramesetryry)	r<r{r>rIr|r+rPrFr])r@r�r)r)r*r
�	s
z1getPhases.<locals>.InFramesetPhase.endTagFramesetcSs|jjdd|di�dS)Nzunexpected-end-tag-in-framesetr>)rIr|)r@r�r)r)r*r	
sz.getPhases.<locals>.InFramesetPhase.endTagOtherN)r7r8r9rHr�r�r7rrrr
rr))r�r)r*�InFramesetPhase�	srcsHeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)z%getPhases.<locals>.AfterFramesetPhasecsV�j|||�tjd|jfd|jfg�|_|j|j_tjd|jfg�|_	|j
|j	_dS)Nr�r)rHrrr�rr�rrr�r�r)r@rIr<)r�r)r*rH
s
z.getPhases.<locals>.AfterFramesetPhase.__init__cSsdS)Nr))r@r)r)r*r�
sz0getPhases.<locals>.AfterFramesetPhase.processEOFcSs|jjd�dS)Nzunexpected-char-after-frameset)rIr|)r@r�r)r)r*r�!
sz7getPhases.<locals>.AfterFramesetPhase.processCharacterscSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r$
sz6getPhases.<locals>.AfterFramesetPhase.startTagNoframescSs|jjdd|di�dS)Nz#unexpected-start-tag-after-framesetr>)rIr|)r@r�r)r)r*r'
sz3getPhases.<locals>.AfterFramesetPhase.startTagOthercSs|jjd|j_dS)N�afterAfterFrameset)rIrFr])r@r�r)r)r*r�+
sz0getPhases.<locals>.AfterFramesetPhase.endTagHtmlcSs|jjdd|di�dS)Nz!unexpected-end-tag-after-framesetr>)rIr|)r@r�r)r)r*r.
sz1getPhases.<locals>.AfterFramesetPhase.endTagOtherN)
r7r8r9rHr�r�rrr�rr))r�r)r*�AfterFramesetPhase
srcsPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)z&getPhases.<locals>.AfterAfterBodyPhasecs0�j|||�tjd|jfg�|_|j|j_dS)Nr�)rHrrr�r�rr)r@rIr<)r�r)r*rH3
sz/getPhases.<locals>.AfterAfterBodyPhase.__init__cSsdS)Nr))r@r)r)r*r�;
sz1getPhases.<locals>.AfterAfterBodyPhase.processEOFcSs|jj||jj�dS)N)r<r�r�)r@r�r)r)r*r�>
sz5getPhases.<locals>.AfterAfterBodyPhase.processCommentcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�A
sz=getPhases.<locals>.AfterAfterBodyPhase.processSpaceCharacterscSs |jjd�|jjd|j_|S)Nzexpected-eof-but-got-charr�)rIr|rFr])r@r�r)r)r*r�D
sz8getPhases.<locals>.AfterAfterBodyPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�I
sz3getPhases.<locals>.AfterAfterBodyPhase.startTagHtmlcSs*|jjdd|di�|jjd|j_|S)Nzexpected-eof-but-got-start-tagr>r�)rIr|rFr])r@r�r)r)r*rL
sz4getPhases.<locals>.AfterAfterBodyPhase.startTagOthercSs*|jjdd|di�|jjd|j_|S)Nzexpected-eof-but-got-end-tagr>r�)rIr|rFr])r@r�r)r)r*r�R
sz4getPhases.<locals>.AfterAfterBodyPhase.processEndTagN)r7r8r9rHr�r�r�r�r�rr�r))r�r)r*�AfterAfterBodyPhase2
srcsXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z*getPhases.<locals>.AfterAfterFramesetPhasecs8�j|||�tjd|jfd|jfg�|_|j|j_dS)Nr�r)rHrrr��startTagNoFramesr�rr)r@rIr<)r�r)r*rHY
s
z3getPhases.<locals>.AfterAfterFramesetPhase.__init__cSsdS)Nr))r@r)r)r*r�b
sz5getPhases.<locals>.AfterAfterFramesetPhase.processEOFcSs|jj||jj�dS)N)r<r�r�)r@r�r)r)r*r�e
sz9getPhases.<locals>.AfterAfterFramesetPhase.processCommentcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�h
szAgetPhases.<locals>.AfterAfterFramesetPhase.processSpaceCharacterscSs|jjd�dS)Nzexpected-eof-but-got-char)rIr|)r@r�r)r)r*r�k
sz<getPhases.<locals>.AfterAfterFramesetPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�n
sz7getPhases.<locals>.AfterAfterFramesetPhase.startTagHtmlcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*rq
sz;getPhases.<locals>.AfterAfterFramesetPhase.startTagNoFramescSs|jjdd|di�dS)Nzexpected-eof-but-got-start-tagr>)rIr|)r@r�r)r)r*rt
sz8getPhases.<locals>.AfterAfterFramesetPhase.startTagOthercSs|jjdd|di�dS)Nzexpected-eof-but-got-end-tagr>)rIr|)r@r�r)r)r*r�x
sz8getPhases.<locals>.AfterAfterFramesetPhase.processEndTagN)r7r8r9rHr�r�r�r�r�rrr�r))r�r)r*�AfterAfterFramesetPhaseX
s	r)rTrSr�rr/r0r�r�r�r�r�r�r�r�r�r�r�rvr�r�rr	r)r)rGrVr�rrrr1r5r:r�r�r�r�r�r�r�r�r�r�r�rr
rrrrr))r�r*rE_sp)#.g@CX!-GBbYLd's/9%&&rEcs^ts
tjr t|d�t��@}nt|d�t��@}|rZt�fdd�|dj�D��|d<dS)Nrsc3s"|]\}}�j||�|fVqdS)N)r})r=�k�v)rr)r*r��
sz$adjust_attributes.<locals>.<genexpr>)rrZPY27rr�rr.)r�rZneeds_adjustmentr))rr*r��
s
r�rpFcCs|dkri}t||||d�S)N)r2r>rsrw)r)r>r2rirwr)r)r*r
�
s
r
c@seZdZdZdS)rrzError in parsed documentN)r7r8r9r�r)r)r)r*rr�
srr)rT)r+rT)rpNF)1Z
__future__rrrZpip._vendor.sixrrrr0�collectionsr�ImportErrorZpip._vendor.ordereddictr�r
rrZtreebuilders.baser
rZ	constantsrrrrrrrrrrrrr�rrrrr#r-r;rgr"ZmemoizerEr�r
�	Exceptionrrr)r)r)r*�<module>sRH

)L

_vendor/html5lib/__pycache__/constants.cpython-36.opt-1.pyc000064400000201300151733136420017532 0ustar003

�Pf�E��@s�-ddlmZmZmZddlZdZddddddd	d
ddd
ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d,d,d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d���Zd�d�d�d�d�d�d��Zeed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fg�Z	eed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fg�Z
eed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fgN�Zeed�d�fed�d�fed�d�fed�d�fg�Zeed�d�fed�d�fed�d�fed�d�fed�d�fg�Z
d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�d/�>Z�d0�d1iZ�d2�d3e�d2f�d2�d4e�d2f�d2�d5e�d2f�d2�d6e�d2f�d2�d7e�d2f�d2d�e�d2f�d2�d8e�d2f�d9d�e�d9f�d9�d:e�d9f�d9�d;e�d9fd�d<e�d<f�d<�d2e�d<f�d=�Ze�d>�d?�ej�D��Ze�d@�dA�dB�dC�dDg�Zed�d�d�d�d�g�Zeej�Zeej�Zeej�Zeej�Zeej�Ze�dE�d?�ejD��Z�d|Z ed�d��dFd�d�d�d�d�d�d�d�d�dԐdG�dHg�Z!ed�d�g�Z"ed�d�d�d�d�d�d�g�Z#e�dIg�e�dJg�e�dKg�e�dL�dMg�e�dL�dMg�e�dN�dOg�e�dPg�e�dQ�dRg�e�dS�dR�dT�dUg�e�dVg�e�dWg�e�dR�dXg�e�dR�dX�dYg�e�dR�dXg�e�dR�dZg�e�dR�dX�d[�dZ�dT�dKg�e�dR�dX�dZ�dQg�e�dR�dXg��d\�Z$�d}Z%e�dy�dz�d{�d|�d}g�Z&�d~�d~�d�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d͐dΐdϐdАdѐdҐdӐd��dѐdԐdՐd֐dÐdאdؐdِdڐdېdܐdݐdސdߐd�d�d�d�d�d�d�d�d�d�d�d�dԐd�d�d�d�d�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d��d��d��d��d�d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d(�d+�d��d,�d-�d.�d/�d0�d0�d1�d1�d2�d3�d4�d5�d5�d4�d6�d7�dڐd8�d9�d:�d;�d<�d=�d>�d?�d@�dA�dB�dC�dC�dD�dE�dF�dG�dH�dI�dJ�dK�dL�dM�dN�dO�dP�dQ�dR�dS�dT�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d`�da�db�dc�dd�de�df�dg�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dr�ds�dt�dՐd֐du�dv�dw�dx�dy�dz�d{�d|�d}�d~�d�d��d��dאdؐdِd��d��d��dX�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d"�d��dA�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d͐dΐdϐdϐdАdѐdҐdҐdӐdӐdԐdՐd֐dאdאdؐdِdڐdېdܐdݐdސdߐd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d'�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d�d�d�d�d�d�d�d�d�d	�d�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �dڐd!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�d/�d0�dߐd^�d�d1�d2�d3�d4�d5�d6�d7�d8�d9�d:�d;�d<�d=�d>�d?�d?�d@�dA�dB�dC�dD�d�dE�dF�dG�dH�dF�dI�dI�dJ�dK�dL�d@�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d^�d_�d`�da�db�dc�dc�dd�de�df�dg�dg�dh�di�dj�dk�dl�dm�dn�do�dp�d1�dq�dr�ds�dt�du�dv�dܐdݐdw�dx�dy�dz�d{�d|�d}�d~�d~�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��dɐdɐd��d��d��d��d��d��d��d��d��d��d��d��d�d�d��d��d��d��d��d��d��d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d��d͐dΐdϐdY�dАdѐdҐdӐdԐdY�dҐdՐdՐd֐dאdY�d��dؐdؐdِdِd��dڐdېdܐdݐdސdߐd�d�d�d�dk�d�dܐd�d�d��d��d�dݐd��d�d�d�d�d:�d�dm�d�d�d�d�d�d�d�d�d��d��d�d��d
�d��d��d��d��d��d��d��d��d��d��d�d�d�du�du�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d*�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�dސd��d/�d/�d0�d1�dߐd�d2�d3�d4�d5�d5�d��d6�d,�d,�d7�d8�d9�d:�d;�d<�d=�d>�d?�d$�d@�dA�dB�dB�dC�dD�dE�dF�d��d��dG�dH�dH�d��dI�dJ�dK�dK�dL�dM�dN�dO�dP�dQ�dR�d��dS�dT�dU�dV�dP�dW�dX�dY�dZ�dZ�d[�d��d��d\�d]�d^�d3�d^�d��dX�d_�d��d`�d��d��d��da�db�dc�dd�de�df�dg�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dr�ds�dt�du�dv�dw�dx�dl�dm�dy�dz�d{�d{�dn�dw�dy�dz�d��d|�d}�dԐd~�d�d��dߐd��di�d��dːd��d��dϐd��d��d��d��d��d��d��d��d��dd�d�dΐdΐd��d��dѐd��d��d��d��d��d��d��d��d��d��d��d��dʐdӐd��d��d��d��d��dߐd��dd�d�d��d��d��d��d��d��d��d��d��d��d�d	�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��ddÐdĐdĐdŐd��d��d��d��d��dƐdǐd��dȐdɐdʐdːd̐dӐd��d͐dΐdΐdϐdϐdАdѐd�d�d�d��dҐdӐdԐdՐd֐dאdؐdِdڐdېdܐdݐdސd�dߐd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d�d�d�d�d�d�d��d��d��d��d�d�d�d"�d�d�d�d�d�d�d�d�d	�d	�d
�d
�d�d�d�d̐d
�d �d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d �d�d�d֐d��d�d(�d�dg�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d�d)�d*�d��d+�d+�d;�d,�d,�d-�d.�d/�d/�d֐d0�d1�d1�d7�d2�d3�d4�d5�d6�d7�d4�d@�d4�d8�d9�d:�d��d;�d<�d=�d8�d9�d>�d��d>�d?�d@�dA�dB�dC�dD�d@�dE�dE�dF�d��dG�dH�dI�dJ�d��d<�dK�dL�dM�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d}�dՐd`�da�dv�db�dc�dd�de�dX�df�d]�dg�d]�dh�di�di�d^�d_�dj�dk�d$�dl�dm�dn�do�dp�dq�dr�ds�dt�du�dv�dw�dx�dy�dz�d{�d|�da�dv�d}�d~�dܐd�d�d��d��d��d^�do�d�ds�d��dg�d`�d�d�d��du�d��dv�dy�dy�d��d��d��d��d��d��dh�d��du�db�dw�dz�d��df�d��dw�d��d�ds�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��db�d�d��d��d��dl�d��d��d��d��d��d��di�d��d��d��d��d��d`�d��d�d��d��d��d��d��d��dz�d��d��dw�dݐd��d��dT�dT�d��d��d��d��d��d��d��d��d��dn�d��d��d��d��d��d��d��d��d��d��d��d�d�d�dj�dv�d��d��d��d��d��d��ddÐdÐd��dאdĐd��d��dŐd!�d��dƐdǐd�d��dȐdɐd��dʐd��dːd̐d̐d͐dΐd��dϐdАdѐdҐd��dӐdԐdՐdƐd֐dאd̐dؐdِdڐd̐dېdېd��d��d��d��d��dܐdݐdސdːdߐd�d�d�d��d�d�dx�dx�d�d��d�d��d��d��d�d��d��d��d��d��d��d��d��d��dАd�d�d�d�d�d�d�dϐd�d�d�d��d�d�d��d�d��d��d��d��d��d��d��d͐d�d�d�d��d�d�d��d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��dӐd��d��d��d��d��d��dÐdŐdĐd��d͐d��dɐdʐdʐd͐d��d��d��d��d�dd��dd�dÐdĐd�d�dȐdǐdȐd�d��d�d�d��d��d��d��d��d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�dw�dw�d�dS�d�d�dT�dU�d�d�d�dV�d�d�d��d�d�d �d!�d"�d#�d#�d$�d%�dِd��dQ�d&�d'�d�d(�d)�d*�d+�d,�d��d-�d.�d/�d��d0�dR�d1�d2�d2�d3�d3�d4�d4�d5�d6�d7�d8�d2�d9�d9�d:�d;�d;�d��d<�d=�d=�d>�dސd?�d?�dސd@�dA�d�dB�dC�dD�dE�du�dF�dG�dH�dI�d��dJ�dK�dߐdL�d�dM�d�dN�dO�d"�dP�d��dQ�dR�d�d�dS�dT�d�dU�dV�dW�dW�d�dX�dY�d�d�d�dY�d�d�dZ�d[�d\�d�d]�d�d[�dZ�d\�d��d^�d_�d`�d��d��d�da�db�dc�dd�de�d2�df�dg�dh�d)�di�dj�dǐd��d��d#�dڐdk�d��dl�dm�dn�d5�do�d�dp�dq�d�dr�dr�d�ds�d
�dt�du�dv�d%�d��dw�dx�dy�dz�d{�d|�d��d�d}�d~�d�d��d��d��d��d��d�d~�d��d��d��d��d��d��d�d$�d�d!�d��d��d��d��d�d��d�d�d��d��d��d�dy�d�d�d�d�d��dz�d��d��dʐd�d�d��d��d��d��d��d��d�d��d��d��d��d��d��d��d��d��d��d%�d�d��d��d��d��d��d�d��d��d��d��d��d��dA�d��d��d��dC�dB�d��d��d��d��d��d��dD�d��d��d��d��d��d �d��d��d��d��d��d��d��d��d��d��d�d��d��d��d��dސd��d��d��d��d��dV�d��dW�dW�d��d��d��ddÐdĐdŐd^�d��dƐdǐd��dȐdɐdʐdːd̐d͐dΐdϐdАdѐdѐdސd7�dҐd<�dӐd8�d9�d8�d9�d:�d;�d:�d;�d6�d6�d
�d
�d�dԐd��dȐd>�dՐd�dŐdI�d��d֐dאdؐd@�dِdڐdېdܐdݐdސd֐d@�dאdܐdېdߐd�d�dA�d��dC�dB�d��d��d��dD�dE�d�d�d�d�d�d�d�dG�d�d�d�dH�d�d�d�d�d�d�d�d�dG�dH�d�d�d�d�d�d�d�d!�d��d��d��d��d��d��d��d�d��d��d��d[�d��d��dR�dR�d��d��d��dY�dV�dU�dY�dV�d�d�d͐d�d�d#�d�d�d3�d�d�d�d�d�d�d��d�dJ�d	�d��d��dn�d
�d��d�d�d
�d�d
�d�d�d�d�d�d�d�d�dY�d�dܐd�d�d�d1�d�d�d�d�d�dr�d�dt�d�d�d�d�dq�d�d�d �d �d!�d"�d#�dѐdѐd$�d%�d1�ds�dq�d�dn�d&�dy�d&�d'�d(�d(�d)�d*�d+�d,�d-�d.�d	�d��d'�d/�d/�d0�dݐd1�d2�dېd3�dŐdW�d��dI�dL�d��ds�d��d��d4�d5�d6�d7�d��dl�d�d8�d�d0�d9�d:�d;�d��d��d<�dl�d��dǐd=�d��d�d>�d5�d4�d7�d6�d?�d@�dA�d��dB�dC�dD�dE�dC�d��d��dF�d:�d�dm�d�dG�dؐd��dH�dאd�d��dI�d�dJ�d�d�dِd��dK�d�d�d�d��d��dL�dL�dM�dN�dO�dP�dP�dQ�dR�dS�dT�dU�dV�dV�dW�dX�dY�dZ�d��d[�d\�d]�d^�d_�d`�da�db�dc���Z'�dd�dD�dАde�d��dݐd�d�d�d��dO�dE�d,�d��dѐdf�d��dg�dh�dݐd��dܐd��d5�d�d��d͐dJ�d��d��d�di�dX�d��dj�"Z(d�dk�dl�dm�dn�do�dp�dq�dr�Z)ee)�dse)�dte)�dug�Z*e�dv�d?�ej�D��Z+�dwe+d�<G�dx�dy��dye,�Z-G�dz�d{��d{e.�Z/dS(~�)�absolute_import�division�unicode_literalsNz5Null character in input stream, replaced with U+FFFD.zInvalid codepoint in stream.z&Solidus (/) incorrectly placed in tag.z.Incorrect CR newline entity, replaced with LF.z9Entity used with illegal number (windows-1252 reference).zPNumeric entity couldn't be converted to character (codepoint U+%(charAsInt)08x).zBNumeric entity represents an illegal codepoint: U+%(charAsInt)08x.z#Numeric entity didn't end with ';'.z1Numeric entity expected. Got end of file instead.z'Numeric entity expected but none found.z!Named entity didn't end with ';'.z Named entity expected. Got none.z'End tag contains unexpected attributes.z.End tag contains unexpected self-closing flag.z#Expected tag name. Got '>' instead.zSExpected tag name. Got '?' instead. (HTML doesn't support processing instructions.)z-Expected tag name. Got something else insteadz6Expected closing tag. Got '>' instead. Ignoring '</>'.z-Expected closing tag. Unexpected end of file.z<Expected closing tag. Unexpected character '%(data)s' found.z'Unexpected end of file in the tag name.z8Unexpected end of file. Expected attribute name instead.z)Unexpected end of file in attribute name.z#Invalid character in attribute namez#Dropped duplicate attribute on tag.z1Unexpected end of file. Expected = or end of tag.z1Unexpected end of file. Expected attribute value.z*Expected attribute value. Got '>' instead.z"Unexpected = in unquoted attributez*Unexpected character in unquoted attributez*Unexpected character after attribute name.z+Unexpected character after attribute value.z.Unexpected end of file in attribute value (").z.Unexpected end of file in attribute value (').z*Unexpected end of file in attribute value.z)Unexpected end of file in tag. Expected >z/Unexpected character after / in tag. Expected >z&Expected '--' or 'DOCTYPE'. Not found.z Unexpected ! after -- in commentz$Unexpected space after -- in commentzIncorrect comment.z"Unexpected end of file in comment.z%Unexpected end of file in comment (-)z+Unexpected '-' after '--' found in comment.z'Unexpected end of file in comment (--).z&Unexpected character in comment found.z(No space after literal string 'DOCTYPE'.z.Unexpected > character. Expected DOCTYPE name.z.Unexpected end of file. Expected DOCTYPE name.z'Unexpected end of file in DOCTYPE name.z"Unexpected end of file in DOCTYPE.z%Expected space or '>'. Got '%(data)s'zUnexpected end of DOCTYPE.z Unexpected character in DOCTYPE.zXXX innerHTML EOFzUnexpected DOCTYPE. Ignored.z%html needs to be the first start tag.z)Unexpected End of file. Expected DOCTYPE.zErroneous DOCTYPE.z2Unexpected non-space characters. Expected DOCTYPE.z2Unexpected start tag (%(name)s). Expected DOCTYPE.z0Unexpected end tag (%(name)s). Expected DOCTYPE.z?Unexpected end tag (%(name)s) after the (implied) root element.z4Unexpected end of file. Expected end tag (%(name)s).z4Unexpected start tag head in existing head. Ignored.z'Unexpected end tag (%(name)s). Ignored.z;Unexpected start tag (%(name)s) that can be in head. Moved.z Unexpected start tag (%(name)s).zMissing end tag (%(name)s).zMissing end tags (%(name)s).zCUnexpected start tag (%(startName)s) implies end tag (%(endName)s).z@Unexpected start tag (%(originalName)s). Treated as %(newName)s.z,Unexpected start tag %(name)s. Don't use it!z'Unexpected start tag %(name)s. Ignored.zEUnexpected end tag (%(gotName)s). Missing end tag (%(expectedName)s).z:End tag (%(name)s) seen too early. Expected other end tag.zFUnexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).z+End tag (%(name)s) seen too early. Ignored.zQEnd tag (%(name)s) violates step 1, paragraph 1 of the adoption agency algorithm.zQEnd tag (%(name)s) violates step 1, paragraph 2 of the adoption agency algorithm.zQEnd tag (%(name)s) violates step 1, paragraph 3 of the adoption agency algorithm.zQEnd tag (%(name)s) violates step 4, paragraph 4 of the adoption agency algorithm.z>Unexpected end tag (%(originalName)s). Treated as %(newName)s.z'This element (%(name)s) has no end tag.z9Unexpected implied end tag (%(name)s) in the table phase.z>Unexpected implied end tag (%(name)s) in the table body phase.zDUnexpected non-space characters in table context caused voodoo mode.z3Unexpected input with type hidden in table context.z!Unexpected form in table context.zDUnexpected start tag (%(name)s) in table context caused voodoo mode.zBUnexpected end tag (%(name)s) in table context caused voodoo mode.zCUnexpected table cell start tag (%(name)s) in the table body phase.zFGot table cell end tag (%(name)s) while required end tags are missing.z?Unexpected end tag (%(name)s) in the table body phase. Ignored.z=Unexpected implied end tag (%(name)s) in the table row phase.z>Unexpected end tag (%(name)s) in the table row phase. Ignored.zJUnexpected select start tag in the select phase treated as select end tag.z/Unexpected input start tag in the select phase.zBUnexpected start tag token (%(name)s in the select phase. Ignored.z;Unexpected end tag (%(name)s) in the select phase. Ignored.zKUnexpected table element start tag (%(name)s) in the select in table phase.zIUnexpected table element end tag (%(name)s) in the select in table phase.z8Unexpected non-space characters in the after body phase.z>Unexpected start tag token (%(name)s) in the after body phase.z<Unexpected end tag token (%(name)s) in the after body phase.z@Unexpected characters in the frameset phase. Characters ignored.zEUnexpected start tag token (%(name)s) in the frameset phase. Ignored.zFUnexpected end tag token (frameset) in the frameset phase (innerHTML).zCUnexpected end tag token (%(name)s) in the frameset phase. Ignored.zEUnexpected non-space characters in the after frameset phase. Ignored.zEUnexpected start tag (%(name)s) in the after frameset phase. Ignored.zCUnexpected end tag (%(name)s) in the after frameset phase. Ignored.z(Unexpected end tag after body(innerHtml)z6Unexpected non-space characters. Expected end of file.z6Unexpected start tag (%(name)s). Expected end of file.z4Unexpected end tag (%(name)s). Expected end of file.z/Unexpected end of file. Expected table content.z0Unexpected end of file. Expected select content.z2Unexpected end of file. Expected frameset content.z0Unexpected end of file. Expected script content.z0Unexpected end of file. Expected foreign contentz0Trailing solidus not allowed on element %(name)sz2Element %(name)s not allowed in a non-html contextz*Unexpected end tag (%(name)s) before html.z9Element %(name)s not allowed in a inhead-noscript contextz8Unexpected end of file. Expected inhead-noscript contentz@Unexpected non-space character. Expected inhead-noscript contentz0Undefined error (this sucks and should be fixed))�znull-characterzinvalid-codepointzincorrectly-placed-soliduszincorrect-cr-newline-entityzillegal-windows-1252-entityzcant-convert-numeric-entityz$illegal-codepoint-for-numeric-entityz numeric-entity-without-semicolonz#expected-numeric-entity-but-got-eofzexpected-numeric-entityznamed-entity-without-semicolonzexpected-named-entityzattributes-in-end-tagzself-closing-flag-on-end-tagz'expected-tag-name-but-got-right-bracketz'expected-tag-name-but-got-question-markzexpected-tag-namez*expected-closing-tag-but-got-right-bracketz expected-closing-tag-but-got-eofz!expected-closing-tag-but-got-charzeof-in-tag-namez#expected-attribute-name-but-got-eofzeof-in-attribute-namez#invalid-character-in-attribute-namezduplicate-attributez$expected-end-of-tag-name-but-got-eofz$expected-attribute-value-but-got-eofz.expected-attribute-value-but-got-right-bracketz"equals-in-unquoted-attribute-valuez0unexpected-character-in-unquoted-attribute-valuez&invalid-character-after-attribute-namez*unexpected-character-after-attribute-valuez#eof-in-attribute-value-double-quotez#eof-in-attribute-value-single-quotez eof-in-attribute-value-no-quotesz#unexpected-EOF-after-solidus-in-tagz)unexpected-character-after-solidus-in-tagzexpected-dashes-or-doctypez,unexpected-bang-after-double-dash-in-commentz-unexpected-space-after-double-dash-in-commentzincorrect-commentzeof-in-commentzeof-in-comment-end-dashz,unexpected-dash-after-double-dash-in-commentzeof-in-comment-double-dashzeof-in-comment-end-space-statezeof-in-comment-end-bang-statezunexpected-char-in-commentzneed-space-after-doctypez+expected-doctype-name-but-got-right-bracketz!expected-doctype-name-but-got-eofzeof-in-doctype-namezeof-in-doctypez*expected-space-or-right-bracket-in-doctypezunexpected-end-of-doctypezunexpected-char-in-doctypezeof-in-innerhtmlzunexpected-doctypez
non-html-rootzexpected-doctype-but-got-eofzunknown-doctypezexpected-doctype-but-got-charsz"expected-doctype-but-got-start-tagz expected-doctype-but-got-end-tagzend-tag-after-implied-rootz&expected-named-closing-tag-but-got-eofz!two-heads-are-not-better-than-onezunexpected-end-tagz#unexpected-start-tag-out-of-my-headzunexpected-start-tagzmissing-end-tagzmissing-end-tagsz$unexpected-start-tag-implies-end-tagzunexpected-start-tag-treated-aszdeprecated-tagzunexpected-start-tag-ignoredz$expected-one-end-tag-but-got-anotherzend-tag-too-earlyzend-tag-too-early-namedzend-tag-too-early-ignoredzadoption-agency-1.1zadoption-agency-1.2zadoption-agency-1.3zadoption-agency-4.4zunexpected-end-tag-treated-asz
no-end-tagz#unexpected-implied-end-tag-in-tablez(unexpected-implied-end-tag-in-table-bodyz$unexpected-char-implies-table-voodooz unexpected-hidden-input-in-tablezunexpected-form-in-tablez)unexpected-start-tag-implies-table-voodooz'unexpected-end-tag-implies-table-voodoozunexpected-cell-in-table-bodyzunexpected-cell-end-tagz unexpected-end-tag-in-table-bodyz'unexpected-implied-end-tag-in-table-rowzunexpected-end-tag-in-table-rowzunexpected-select-in-selectzunexpected-input-in-selectzunexpected-start-tag-in-selectzunexpected-end-tag-in-selectz5unexpected-table-element-start-tag-in-select-in-tablez3unexpected-table-element-end-tag-in-select-in-tablezunexpected-char-after-bodyzunexpected-start-tag-after-bodyzunexpected-end-tag-after-bodyzunexpected-char-in-framesetz unexpected-start-tag-in-framesetz)unexpected-frameset-in-frameset-innerhtmlzunexpected-end-tag-in-framesetzunexpected-char-after-framesetz#unexpected-start-tag-after-framesetz!unexpected-end-tag-after-framesetz'unexpected-end-tag-after-body-innerhtmlzexpected-eof-but-got-charzexpected-eof-but-got-start-tagzexpected-eof-but-got-end-tagzeof-in-tablez
eof-in-selectzeof-in-framesetzeof-in-script-in-scriptzeof-in-foreign-landsz&non-void-element-with-trailing-solidusz*unexpected-html-element-in-foreign-contentzunexpected-end-tag-before-htmlzunexpected-inhead-noscript-tagzeof-in-head-noscriptzchar-in-head-noscriptzXXX-undefined-errorzhttp://www.w3.org/1999/xhtmlz"http://www.w3.org/1998/Math/MathMLzhttp://www.w3.org/2000/svgzhttp://www.w3.org/1999/xlinkz$http://www.w3.org/XML/1998/namespacezhttp://www.w3.org/2000/xmlns/)�html�mathml�svg�xlink�xml�xmlnsrZappletZcaptionZmarquee�object�tableZtdZthrZmi�moZmnZmsZmtextzannotation-xmlrZ
foreignObjectZdesc�title�a�bZbig�codeZemZfont�iZnobr�sZsmallZstrikeZstrongZtt�uZaddressZareaZarticleZaside�baseZbasefontZbgsoundZ
blockquoteZbody�br�button�center�colZcolgroup�commandZdd�details�dirZdivZdlZdtZembed�fieldsetZfigureZfooterZform�frameZframeset�h1�h2�h3�h4�h5�h6�head�header�hrZiframeZimage�img�inputZisindexZli�linkZlisting�menu�metaZnavZnoembedZnoframesZnoscriptZol�pZparamZ	plaintextZpre�scriptZsection�select�styleZtbodyZtextareaZtfootZtheadZtrZulZwbrZxmpz
annotaion-xmlZ
attributeNameZ
attributeTypeZ
baseFrequencyZbaseProfileZcalcModeZ
clipPathUnitsZcontentScriptTypeZcontentStyleTypeZdiffuseConstantZedgeModeZexternalResourcesRequiredZ	filterResZfilterUnitsZglyphRefZgradientTransformZ
gradientUnitsZkernelMatrixZkernelUnitLengthZ	keyPointsZ
keySplinesZkeyTimesZlengthAdjustZlimitingConeAngleZmarkerHeightZmarkerUnitsZmarkerWidthZmaskContentUnitsZ	maskUnitsZ
numOctavesZ
pathLengthZpatternContentUnitsZpatternTransformZpatternUnitsZ	pointsAtXZ	pointsAtYZ	pointsAtZZ
preserveAlphaZpreserveAspectRatioZprimitiveUnitsZrefXZrefYZrepeatCountZ	repeatDurZrequiredExtensionsZrequiredFeaturesZspecularConstantZspecularExponentZspreadMethodZstartOffsetZstdDeviationZstitchTilesZsurfaceScaleZsystemLanguageZtableValuesZtargetXZtargetYZ
textLengthZviewBoxZ
viewTargetZxChannelSelectorZyChannelSelectorZ
zoomAndPan)>Z
attributenameZ
attributetypeZ
basefrequencyZbaseprofileZcalcmodeZ
clippathunitsZcontentscripttypeZcontentstyletypeZdiffuseconstantZedgemodeZexternalresourcesrequiredZ	filterresZfilterunitsZglyphrefZgradienttransformZ
gradientunitsZkernelmatrixZkernelunitlengthZ	keypointsZ
keysplinesZkeytimesZlengthadjustZlimitingconeangleZmarkerheightZmarkerunitsZmarkerwidthZmaskcontentunitsZ	maskunitsZ
numoctavesZ
pathlengthZpatterncontentunitsZpatterntransformZpatternunitsZ	pointsatxZ	pointsatyZ	pointsatzZ
preservealphaZpreserveaspectratioZprimitiveunitsZrefxZrefyZrepeatcountZ	repeatdurZrequiredextensionsZrequiredfeaturesZspecularconstantZspecularexponentZspreadmethodZstartoffsetZstddeviationZstitchtilesZsurfacescaleZsystemlanguageZtablevaluesZtargetxZtargetyZ
textlengthZviewboxZ
viewtargetZxchannelselectorZychannelselectorZ
zoomandpanZ
definitionurlZ
definitionURLrZactuateZarcroleZhrefZroleZshow�typer	ZlangZspacer
)z
xlink:actuatez
xlink:arcrolez
xlink:hrefz
xlink:rolez
xlink:showzxlink:titlez
xlink:typezxml:basezxml:langz	xml:spacer
zxmlns:xlinkcCs"g|]\}\}}}||f|f�qS�r2)�.0Zqname�prefixZlocal�nsr2r2�/usr/lib/python3.6/constants.py�
<listcomp>
sr7�	�
�� �
cCs g|]}t|�t|j��f�qSr2)�ord�lower)r3�cr2r2r6r7#szevent-source�sourceZtrackZ
irrelevantZscopedZismapZautoplayZcontrolsZdefer�async�openZmultipleZdisabledZhiddenZchecked�defaultZnoshadeZ
autosubmit�readonlyZselectedZ	autofocusZrequired)�r0r(ZaudioZvideor.rZdatagridrr'r+rZoptionZoptgrouprr)r/�output� �� �� �& �  �! ���0 �`�9 �R�}� � � � �" � � ���"!�a�: �S�~�xzlt;zgt;zamp;zapos;zquot;�Æ�&�ÁuĂ�ÂuАu𝔄�ÀuΑuĀu⩓uĄu𝔸u⁡�Åu𝒜u≔�Ã�Äu∖u⫧u⌆uБu∵uℬuΒu𝔅u𝔹u˘u≎uЧ�©uĆu⋒uⅅuℭuČ�ÇuĈu∰uĊ�¸�·uΧu⊙u⊖u⊕u⊗u∲u”u’u∷u⩴u≡u∯u∮uℂu∐u∳u⨯u𝒞u⋓u≍u⤑uЂuЅuЏu‡u↡u⫤uĎuДu∇uΔu𝔇�´u˙u˝�`u˜u⋄uⅆu𝔻�¨u⃜u≐u⇓u⇐u⇔u⟸u⟺u⟹u⇒u⊨u⇑u⇕u∥u↓u⤓u⇵ȗu⥐u⥞u↽u⥖u⥟u⇁u⥗u⊤u↧u𝒟uĐuŊ�Ð�ÉuĚ�ÊuЭuĖu𝔈�Èu∈uĒu◻u▫uĘu𝔼uΕu⩵u≂u⇌uℰu⩳uΗ�Ëu∃uⅇuФu𝔉u◼u▪u𝔽u∀uℱuЃ�>uΓuϜuĞuĢuĜuГuĠu𝔊u⋙u𝔾u≥u⋛u≧u⪢u≷u⩾u≳u𝒢u≫uЪuˇ�^uĤuℌuℋuℍu─uĦu≏uЕuIJuЁ�Í�ÎuИuİuℑ�ÌuĪuⅈu∬u∫u⋂u⁣u⁢uĮu𝕀uΙuℐuĨuІ�ÏuĴuЙu𝔍u𝕁u𝒥uЈuЄuХuЌuΚuĶuКu𝔎u𝕂u𝒦uЉ�<uĹuΛu⟪uℒu↞uĽuĻuЛu⟨u←u⇤u⇆u⌈u⟦u⥡u⇃u⥙u⌊u↔u⥎u⊣u↤u⥚u⊲u⧏u⊴u⥑u⥠u↿u⥘u↼u⥒u⋚u≦u≶u⪡u⩽u≲u𝔏u⋘u⇚uĿu⟵u⟷u⟶u𝕃u↙u↘u↰uŁu≪u⤅uМu uℳu𝔐u∓u𝕄uΜuЊuŃuŇuŅuНu​u𝔑u⁠� uℕu⫬u≢u≭u∦u∉u≠u≂̸u∄u≯u≱u≧̸u≫̸u≹u⩾̸u≵u≎̸u≏̸u⋪u⧏̸u⋬u≮u≰u≸u≪̸u⩽̸u≴u⪢̸u⪡̸u⊀u⪯̸u⋠u∌u⋫u⧐̸u⋭u⊏̸u⋢u⊐̸u⋣u⊂⃒u⊈u⊁u⪰̸u⋡u≿̸u⊃⃒u⊉u≁u≄u≇u≉u∤u𝒩�ÑuΝuŒ�Ó�ÔuОuŐu𝔒�ÒuŌuΩuΟu𝕆u“u‘u⩔u𝒪�Ø�Õu⨷�Öu‾u⏞u⎴u⏜u∂uПu𝔓uΦuΠ�±uℙu⪻u≺u⪯u≼u≾u″u∏u∝u𝒫uΨ�"u𝔔uℚu𝒬u⤐�®uŔu⟫u↠u⤖uŘuŖuРuℜu∋u⇋u⥯uΡu⟩u→u⇥u⇄u⌉u⟧u⥝u⇂u⥕u⌋u⊢u↦u⥛u⊳u⧐u⊵u⥏u⥜u↾u⥔u⇀u⥓uℝu⥰u⇛uℛu↱u⧴uЩuШuЬuŚu⪼uŠuŞuŜuСu𝔖u↑uΣu∘u𝕊u√u□u⊓u⊏u⊑u⊐u⊒u⊔u𝒮u⋆u⋐u⊆u≻u⪰u≽u≿u∑u⋑u⊃u⊇�Þu™uЋuЦuΤuŤuŢuТu𝔗u∴uΘu  u u∼u≃u≅u≈u𝕋u⃛u𝒯uŦ�Úu↟u⥉uЎuŬ�ÛuУuŰu𝔘�ÙuŪ�_u⏟u⎵u⏝u⋃u⊎uŲu𝕌u⤒u⇅u↕u⥮u⊥u↥u↖u↗uϒuΥuŮu𝒰uŨ�Üu⊫u⫫uВu⊩u⫦u⋁u‖u∣�|u❘u≀u u𝔙u𝕍u𝒱u⊪uŴu⋀u𝔚u𝕎u𝒲u𝔛uΞu𝕏u𝒳uЯuЇuЮ�ÝuŶuЫu𝔜u𝕐u𝒴uŸuЖuŹuŽuЗuŻuΖuℨuℤu𝒵�áuău∾u∾̳u∿�âuа�æu𝔞�àuℵuαuāu⨿u∧u⩕u⩜u⩘u⩚u∠u⦤u∡u⦨u⦩u⦪u⦫u⦬u⦭u⦮u⦯u∟u⊾u⦝u∢u⍼uąu𝕒u⩰u⩯u≊u≋�'�åu𝒶�*�ã�äu⨑u⫭u≌u϶u‵u∽u⋍u⊽u⌅u⎶uбu„u⦰uβuℶu≬u𝔟u◯u⨀u⨁u⨂u⨆u★u▽u△u⨄u⤍u⧫u▴u▾u◂u▸u␣u▒u░u▓u█u=⃥u≡⃥u⌐u𝕓u⋈u╗u╔u╖u╓u═u╦u╩u╤u╧u╝u╚u╜u╙u║u╬u╣u╠u╫u╢u╟u⧉u╕u╒u┐u┌u╥u╨u┬u┴u⊟u⊞u⊠u╛u╘u┘u└u│u╪u╡u╞u┼u┤u├�¦u𝒷u⁏�\u⧅u⟈u•u⪮uću∩u⩄u⩉u⩋u⩇u⩀u∩︀u⁁u⩍uč�çuĉu⩌u⩐uċu⦲�¢u𝔠uчu✓uχu○u⧃uˆu≗u↺u↻uⓈu⊛u⊚u⊝u⨐u⫯u⧂u♣�:�,�@u∁u⩭u𝕔u℗u↵u✗u𝒸u⫏u⫑u⫐u⫒u⋯u⤸u⤵u⋞u⋟u↶u⤽u∪u⩈u⩆u⩊u⊍u⩅u∪︀u↷u⤼u⋎u⋏�¤u∱u⌭u⥥u†uℸu‐u⤏uďuдu⇊u⩷�°uδu⦱u⥿u𝔡u♦uϝu⋲�÷u⋇uђu⌞u⌍�$u𝕕u≑u∸u∔u⊡u⌟u⌌u𝒹uѕu⧶uđu⋱u▿u⦦uџu⟿�éu⩮uěu≖�êu≕uэuėu≒u𝔢u⪚�èu⪖u⪘u⪙u⏧uℓu⪕u⪗uēu∅u u u uŋu uęu𝕖u⋕u⧣u⩱uεuϵ�=u≟u⩸u⧥u≓u⥱uℯuη�ð�ëu€�!uфu♀uffiuffufflu𝔣ufiZfju♭uflu▱uƒu𝕗u⋔u⫙u⨍�½u⅓�¼u⅕u⅙u⅛u⅔u⅖�¾u⅗u⅜u⅘u⅚u⅝u⅞u⁄u⌢u𝒻u⪌uǵuγu⪆uğuĝuгuġu⪩u⪀u⪂u⪄u⋛︀u⪔u𝔤uℷuѓu⪒u⪥u⪤u≩u⪊u⪈u⋧u𝕘uℊu⪎u⪐u⪧u⩺u⋗u⦕u⩼u⥸u≩︀uъu⥈u↭uℏuĥu♥u…u⊹u𝔥u⤥u⤦u⇿u∻u↩u↪u𝕙u―u𝒽uħu⁃�í�îuиuе�¡u𝔦�ìu⨌u∭u⧜u℩uijuīuıu⊷uƵu℅u∞u⧝u⊺u⨗u⨼uёuįu𝕚uι�¿u𝒾u⋹u⋵u⋴u⋳uĩuі�ïuĵuйu𝔧uȷu𝕛u𝒿uјuєuκuϰuķuкu𝔨uĸuхuќu𝕜u𝓀u⤛u⤎u⪋u⥢uĺu⦴uλu⦑u⪅�«u⤟u⤝u↫u⤹u⥳u↢u⪫u⤙u⪭u⪭︀u⤌u❲�{�[u⦋u⦏u⦍uľuļuлu⤶u⥧u⥋u↲u≤u⇇u⋋u⪨u⩿u⪁u⪃u⋚︀u⪓u⋖u⥼u𝔩u⪑u⥪u▄uљu⥫u◺uŀu⎰u≨u⪉u⪇u⋦u⟬u⇽u⟼u↬u⦅u𝕝u⨭u⨴u∗u◊�(u⦓u⥭u‎u⊿u‹u𝓁u⪍u⪏u‚ułu⪦u⩹u⋉u⥶u⩻u⦖u◃u⥊u⥦u≨︀u∺�¯u♂u✠u▮u⨩uмu—u𝔪u℧�µu⫰u−u⨪u⫛u⊧u𝕞u𝓂uμu⊸u⋙̸u≫⃒u⇍u⇎u⋘̸u≪⃒u⇏u⊯u⊮uńu∠⃒u⩰̸u≋̸uʼnu♮u⩃uňuņu⩭̸u⩂uнu–u⇗u⤤u≐̸u⤨u𝔫u↮u⫲u⋼u⋺uњu≦̸u↚u‥u𝕟�¬u⋹̸u⋵̸u⋷u⋶u⋾u⋽u⫽⃥u∂̸u⨔u↛u⤳̸u↝̸u𝓃u⊄u⫅̸u⊅u⫆̸�ñuν�#u№u u⊭u⤄u≍⃒u⊬u≥⃒u>⃒u⧞u⤂u≤⃒u<⃒u⊴⃒u⤃u⊵⃒u∼⃒u⇖u⤣u⤧�ó�ôuоuőu⨸u⦼uœu⦿u𝔬u˛�òu⧁u⦵u⦾u⦻u⧀uōuωuοu⦶u𝕠u⦷u⦹u∨u⩝uℴ�ª�ºu⊶u⩖u⩗u⩛�øu⊘�õu⨶�öu⌽�¶u⫳u⫽uп�%�.u‰u‱u𝔭uφuϕu☎uπuϖuℎ�+u⨣u⨢u⨥u⩲u⨦u⨧u⨕u𝕡�£u⪳u⪷u⪹u⪵u⋨u′u⌮u⌒u⌓u⊰u𝓅uψu u𝔮u𝕢u⁗u𝓆u⨖�?u⤜u⥤u∽̱uŕu⦳u⦒u⦥�»u⥵u⤠u⤳u⤞u⥅u⥴u↣u↝u⤚u∶u❳�}�]u⦌u⦎u⦐uřuŗuрu⤷u⥩u↳u▭u⥽u𝔯u⥬uρuϱu⇉u⋌u˚u‏u⎱u⫮u⟭u⇾u⦆u𝕣u⨮u⨵�)u⦔u⨒u›u𝓇u⋊u▹u⧎u⥨u℞uśu⪴u⪸ušuşuŝu⪶u⪺u⋩u⨓uсu⋅u⩦u⇘�§�;u⤩u✶u𝔰u♯uщuш�­uσuςu⩪u⪞u⪠u⪝u⪟u≆u⨤u⥲u⨳u⧤u⌣u⪪u⪬u⪬︀uь�/u⧄u⌿u𝕤u♠u⊓︀u⊔︀u𝓈u☆u⊂u⫅u⪽u⫃u⫁u⫋u⊊u⪿u⥹u⫇u⫕u⫓u♪�¹�²�³u⫆u⪾u⫘u⫄u⟉u⫗u⥻u⫂u⫌u⊋u⫀u⫈u⫔u⫖u⇙u⤪�ßu⌖uτuťuţuтu⌕u𝔱uθuϑ�þ�×u⨱u⨰u⌶u⫱u𝕥u⫚u‴u▵u≜u◬u⨺u⨹u⧍u⨻u⏢u𝓉uцuћuŧu⥣�úuўuŭ�ûuуuűu⥾u𝔲�ùu▀u⌜u⌏u◸uūuųu𝕦uυu⇈u⌝u⌎uůu◹u𝓊u⋰uũ�üu⦧u⫨u⫩u⦜u⊊︀u⫋︀u⊋︀u⫌︀uвu⊻u≚u⋮u𝔳u𝕧u𝓋u⦚uŵu⩟u≙u℘u𝔴u𝕨u𝓌u𝔵uξu⋻u𝕩u𝓍�ýuяuŷuы�¥u𝔶uїu𝕪u𝓎uю�ÿuźužuзużuζu𝔷uжu⇝u𝕫u𝓏u‍u‌(�ZAEligzAElig;ZAMPzAMP;ZAacutezAacute;zAbreve;ZAcirczAcirc;zAcy;zAfr;ZAgravezAgrave;zAlpha;zAmacr;zAnd;zAogon;zAopf;zApplyFunction;ZAringzAring;zAscr;zAssign;ZAtildezAtilde;ZAumlzAuml;z
Backslash;zBarv;zBarwed;zBcy;zBecause;zBernoullis;zBeta;zBfr;zBopf;zBreve;zBscr;zBumpeq;zCHcy;ZCOPYzCOPY;zCacute;zCap;zCapitalDifferentialD;zCayleys;zCcaron;ZCcedilzCcedil;zCcirc;zCconint;zCdot;zCedilla;z
CenterDot;zCfr;zChi;z
CircleDot;zCircleMinus;zCirclePlus;zCircleTimes;zClockwiseContourIntegral;zCloseCurlyDoubleQuote;zCloseCurlyQuote;zColon;zColone;z
Congruent;zConint;zContourIntegral;zCopf;z
Coproduct;z CounterClockwiseContourIntegral;zCross;zCscr;zCup;zCupCap;zDD;z	DDotrahd;zDJcy;zDScy;zDZcy;zDagger;zDarr;zDashv;zDcaron;zDcy;zDel;zDelta;zDfr;zDiacriticalAcute;zDiacriticalDot;zDiacriticalDoubleAcute;zDiacriticalGrave;zDiacriticalTilde;zDiamond;zDifferentialD;zDopf;zDot;zDotDot;z	DotEqual;zDoubleContourIntegral;z
DoubleDot;zDoubleDownArrow;zDoubleLeftArrow;zDoubleLeftRightArrow;zDoubleLeftTee;zDoubleLongLeftArrow;zDoubleLongLeftRightArrow;zDoubleLongRightArrow;zDoubleRightArrow;zDoubleRightTee;zDoubleUpArrow;zDoubleUpDownArrow;zDoubleVerticalBar;z
DownArrow;z
DownArrowBar;zDownArrowUpArrow;z
DownBreve;zDownLeftRightVector;zDownLeftTeeVector;zDownLeftVector;zDownLeftVectorBar;zDownRightTeeVector;zDownRightVector;zDownRightVectorBar;zDownTee;z
DownTeeArrow;z
Downarrow;zDscr;zDstrok;zENG;ZETHzETH;ZEacutezEacute;zEcaron;ZEcirczEcirc;zEcy;zEdot;zEfr;ZEgravezEgrave;zElement;zEmacr;zEmptySmallSquare;zEmptyVerySmallSquare;zEogon;zEopf;zEpsilon;zEqual;zEqualTilde;zEquilibrium;zEscr;zEsim;zEta;ZEumlzEuml;zExists;z
ExponentialE;zFcy;zFfr;zFilledSmallSquare;zFilledVerySmallSquare;zFopf;zForAll;zFouriertrf;zFscr;zGJcy;ZGTzGT;zGamma;zGammad;zGbreve;zGcedil;zGcirc;zGcy;zGdot;zGfr;zGg;zGopf;z
GreaterEqual;zGreaterEqualLess;zGreaterFullEqual;zGreaterGreater;zGreaterLess;zGreaterSlantEqual;z
GreaterTilde;zGscr;zGt;zHARDcy;zHacek;zHat;zHcirc;zHfr;z
HilbertSpace;zHopf;zHorizontalLine;zHscr;zHstrok;z
HumpDownHump;z
HumpEqual;zIEcy;zIJlig;zIOcy;ZIacutezIacute;ZIcirczIcirc;zIcy;zIdot;zIfr;ZIgravezIgrave;zIm;zImacr;zImaginaryI;zImplies;zInt;z	Integral;z
Intersection;zInvisibleComma;zInvisibleTimes;zIogon;zIopf;zIota;zIscr;zItilde;zIukcy;ZIumlzIuml;zJcirc;zJcy;zJfr;zJopf;zJscr;zJsercy;zJukcy;zKHcy;zKJcy;zKappa;zKcedil;zKcy;zKfr;zKopf;zKscr;zLJcy;ZLTzLT;zLacute;zLambda;zLang;zLaplacetrf;zLarr;zLcaron;zLcedil;zLcy;zLeftAngleBracket;z
LeftArrow;z
LeftArrowBar;zLeftArrowRightArrow;zLeftCeiling;zLeftDoubleBracket;zLeftDownTeeVector;zLeftDownVector;zLeftDownVectorBar;z
LeftFloor;zLeftRightArrow;zLeftRightVector;zLeftTee;z
LeftTeeArrow;zLeftTeeVector;z
LeftTriangle;zLeftTriangleBar;zLeftTriangleEqual;zLeftUpDownVector;zLeftUpTeeVector;z
LeftUpVector;zLeftUpVectorBar;zLeftVector;zLeftVectorBar;z
Leftarrow;zLeftrightarrow;zLessEqualGreater;zLessFullEqual;zLessGreater;z	LessLess;zLessSlantEqual;z
LessTilde;zLfr;zLl;zLleftarrow;zLmidot;zLongLeftArrow;zLongLeftRightArrow;zLongRightArrow;zLongleftarrow;zLongleftrightarrow;zLongrightarrow;zLopf;zLowerLeftArrow;zLowerRightArrow;zLscr;zLsh;zLstrok;zLt;zMap;zMcy;zMediumSpace;z
Mellintrf;zMfr;z
MinusPlus;zMopf;zMscr;zMu;zNJcy;zNacute;zNcaron;zNcedil;zNcy;zNegativeMediumSpace;zNegativeThickSpace;zNegativeThinSpace;zNegativeVeryThinSpace;zNestedGreaterGreater;zNestedLessLess;zNewLine;zNfr;zNoBreak;zNonBreakingSpace;zNopf;zNot;z
NotCongruent;z
NotCupCap;zNotDoubleVerticalBar;zNotElement;z	NotEqual;zNotEqualTilde;z
NotExists;zNotGreater;zNotGreaterEqual;zNotGreaterFullEqual;zNotGreaterGreater;zNotGreaterLess;zNotGreaterSlantEqual;zNotGreaterTilde;zNotHumpDownHump;z
NotHumpEqual;zNotLeftTriangle;zNotLeftTriangleBar;zNotLeftTriangleEqual;zNotLess;z
NotLessEqual;zNotLessGreater;zNotLessLess;zNotLessSlantEqual;z
NotLessTilde;zNotNestedGreaterGreater;zNotNestedLessLess;zNotPrecedes;zNotPrecedesEqual;zNotPrecedesSlantEqual;zNotReverseElement;zNotRightTriangle;zNotRightTriangleBar;zNotRightTriangleEqual;zNotSquareSubset;zNotSquareSubsetEqual;zNotSquareSuperset;zNotSquareSupersetEqual;z
NotSubset;zNotSubsetEqual;zNotSucceeds;zNotSucceedsEqual;zNotSucceedsSlantEqual;zNotSucceedsTilde;zNotSuperset;zNotSupersetEqual;z	NotTilde;zNotTildeEqual;zNotTildeFullEqual;zNotTildeTilde;zNotVerticalBar;zNscr;ZNtildezNtilde;zNu;zOElig;ZOacutezOacute;ZOcirczOcirc;zOcy;zOdblac;zOfr;ZOgravezOgrave;zOmacr;zOmega;zOmicron;zOopf;zOpenCurlyDoubleQuote;zOpenCurlyQuote;zOr;zOscr;ZOslashzOslash;ZOtildezOtilde;zOtimes;ZOumlzOuml;zOverBar;z
OverBrace;zOverBracket;zOverParenthesis;z	PartialD;zPcy;zPfr;zPhi;zPi;z
PlusMinus;zPoincareplane;zPopf;zPr;z	Precedes;zPrecedesEqual;zPrecedesSlantEqual;zPrecedesTilde;zPrime;zProduct;zProportion;z
Proportional;zPscr;zPsi;ZQUOTzQUOT;zQfr;zQopf;zQscr;zRBarr;ZREGzREG;zRacute;zRang;zRarr;zRarrtl;zRcaron;zRcedil;zRcy;zRe;zReverseElement;zReverseEquilibrium;zReverseUpEquilibrium;zRfr;zRho;zRightAngleBracket;zRightArrow;zRightArrowBar;zRightArrowLeftArrow;z
RightCeiling;zRightDoubleBracket;zRightDownTeeVector;zRightDownVector;zRightDownVectorBar;zRightFloor;z	RightTee;zRightTeeArrow;zRightTeeVector;zRightTriangle;zRightTriangleBar;zRightTriangleEqual;zRightUpDownVector;zRightUpTeeVector;zRightUpVector;zRightUpVectorBar;zRightVector;zRightVectorBar;zRightarrow;zRopf;z
RoundImplies;zRrightarrow;zRscr;zRsh;zRuleDelayed;zSHCHcy;zSHcy;zSOFTcy;zSacute;zSc;zScaron;zScedil;zScirc;zScy;zSfr;zShortDownArrow;zShortLeftArrow;zShortRightArrow;z
ShortUpArrow;zSigma;zSmallCircle;zSopf;zSqrt;zSquare;zSquareIntersection;z
SquareSubset;zSquareSubsetEqual;zSquareSuperset;zSquareSupersetEqual;zSquareUnion;zSscr;zStar;zSub;zSubset;zSubsetEqual;z	Succeeds;zSucceedsEqual;zSucceedsSlantEqual;zSucceedsTilde;z	SuchThat;zSum;zSup;z	Superset;zSupersetEqual;zSupset;ZTHORNzTHORN;zTRADE;zTSHcy;zTScy;zTab;zTau;zTcaron;zTcedil;zTcy;zTfr;z
Therefore;zTheta;zThickSpace;z
ThinSpace;zTilde;zTildeEqual;zTildeFullEqual;zTildeTilde;zTopf;z
TripleDot;zTscr;zTstrok;ZUacutezUacute;zUarr;z	Uarrocir;zUbrcy;zUbreve;ZUcirczUcirc;zUcy;zUdblac;zUfr;ZUgravezUgrave;zUmacr;z	UnderBar;zUnderBrace;z
UnderBracket;zUnderParenthesis;zUnion;z
UnionPlus;zUogon;zUopf;zUpArrow;zUpArrowBar;zUpArrowDownArrow;zUpDownArrow;zUpEquilibrium;zUpTee;zUpTeeArrow;zUparrow;zUpdownarrow;zUpperLeftArrow;zUpperRightArrow;zUpsi;zUpsilon;zUring;zUscr;zUtilde;ZUumlzUuml;zVDash;zVbar;zVcy;zVdash;zVdashl;zVee;zVerbar;zVert;zVerticalBar;z
VerticalLine;zVerticalSeparator;zVerticalTilde;zVeryThinSpace;zVfr;zVopf;zVscr;zVvdash;zWcirc;zWedge;zWfr;zWopf;zWscr;zXfr;zXi;zXopf;zXscr;zYAcy;zYIcy;zYUcy;ZYacutezYacute;zYcirc;zYcy;zYfr;zYopf;zYscr;zYuml;zZHcy;zZacute;zZcaron;zZcy;zZdot;zZeroWidthSpace;zZeta;zZfr;zZopf;zZscr;Zaacutezaacute;zabreve;zac;zacE;zacd;Zacirczacirc;Zacutezacute;zacy;Zaeligzaelig;zaf;zafr;Zagravezagrave;zalefsym;zaleph;zalpha;zamacr;zamalg;Zampzamp;zand;zandand;zandd;z	andslope;zandv;zang;zange;zangle;zangmsd;z	angmsdaa;z	angmsdab;z	angmsdac;z	angmsdad;z	angmsdae;z	angmsdaf;z	angmsdag;z	angmsdah;zangrt;zangrtvb;z	angrtvbd;zangsph;zangst;zangzarr;zaogon;zaopf;zap;zapE;zapacir;zape;zapid;zapos;zapprox;z	approxeq;Zaringzaring;zascr;zast;zasymp;zasympeq;Zatildezatilde;Zaumlzauml;z	awconint;zawint;zbNot;z	backcong;zbackepsilon;z
backprime;zbacksim;z
backsimeq;zbarvee;zbarwed;z	barwedge;zbbrk;z	bbrktbrk;zbcong;zbcy;zbdquo;zbecaus;zbecause;zbemptyv;zbepsi;zbernou;zbeta;zbeth;zbetween;zbfr;zbigcap;zbigcirc;zbigcup;zbigodot;z	bigoplus;z
bigotimes;z	bigsqcup;zbigstar;zbigtriangledown;zbigtriangleup;z	biguplus;zbigvee;z	bigwedge;zbkarow;z
blacklozenge;zblacksquare;zblacktriangle;zblacktriangledown;zblacktriangleleft;zblacktriangleright;zblank;zblk12;zblk14;zblk34;zblock;zbne;zbnequiv;zbnot;zbopf;zbot;zbottom;zbowtie;zboxDL;zboxDR;zboxDl;zboxDr;zboxH;zboxHD;zboxHU;zboxHd;zboxHu;zboxUL;zboxUR;zboxUl;zboxUr;zboxV;zboxVH;zboxVL;zboxVR;zboxVh;zboxVl;zboxVr;zboxbox;zboxdL;zboxdR;zboxdl;zboxdr;zboxh;zboxhD;zboxhU;zboxhd;zboxhu;z	boxminus;zboxplus;z	boxtimes;zboxuL;zboxuR;zboxul;zboxur;zboxv;zboxvH;zboxvL;zboxvR;zboxvh;zboxvl;zboxvr;zbprime;zbreve;Zbrvbarzbrvbar;zbscr;zbsemi;zbsim;zbsime;zbsol;zbsolb;z	bsolhsub;zbull;zbullet;zbump;zbumpE;zbumpe;zbumpeq;zcacute;zcap;zcapand;z	capbrcup;zcapcap;zcapcup;zcapdot;zcaps;zcaret;zcaron;zccaps;zccaron;Zccedilzccedil;zccirc;zccups;zccupssm;zcdot;Zcedilzcedil;zcemptyv;Zcentzcent;z
centerdot;zcfr;zchcy;zcheck;z
checkmark;zchi;zcir;zcirE;zcirc;zcirceq;zcirclearrowleft;zcirclearrowright;z	circledR;z	circledS;zcircledast;zcircledcirc;zcircleddash;zcire;z	cirfnint;zcirmid;zcirscir;zclubs;z	clubsuit;zcolon;zcolone;zcoloneq;zcomma;zcommat;zcomp;zcompfn;zcomplement;z
complexes;zcong;zcongdot;zconint;zcopf;zcoprod;�copyzcopy;zcopysr;zcrarr;zcross;zcscr;zcsub;zcsube;zcsup;zcsupe;zctdot;zcudarrl;zcudarrr;zcuepr;zcuesc;zcularr;zcularrp;zcup;z	cupbrcap;zcupcap;zcupcup;zcupdot;zcupor;zcups;zcurarr;zcurarrm;zcurlyeqprec;zcurlyeqsucc;z	curlyvee;zcurlywedge;Zcurrenzcurren;zcurvearrowleft;zcurvearrowright;zcuvee;zcuwed;z	cwconint;zcwint;zcylcty;zdArr;zdHar;zdagger;zdaleth;zdarr;zdash;zdashv;zdbkarow;zdblac;zdcaron;zdcy;zdd;zddagger;zddarr;zddotseq;Zdegzdeg;zdelta;zdemptyv;zdfisht;zdfr;zdharl;zdharr;zdiam;zdiamond;zdiamondsuit;zdiams;zdie;zdigamma;zdisin;zdiv;Zdividezdivide;zdivideontimes;zdivonx;zdjcy;zdlcorn;zdlcrop;zdollar;zdopf;zdot;zdoteq;z	doteqdot;z	dotminus;zdotplus;z
dotsquare;zdoublebarwedge;z
downarrow;zdowndownarrows;zdownharpoonleft;zdownharpoonright;z	drbkarow;zdrcorn;zdrcrop;zdscr;zdscy;zdsol;zdstrok;zdtdot;zdtri;zdtrif;zduarr;zduhar;zdwangle;zdzcy;z	dzigrarr;zeDDot;zeDot;Zeacutezeacute;zeaster;zecaron;zecir;Zecirczecirc;zecolon;zecy;zedot;zee;zefDot;zefr;zeg;Zegravezegrave;zegs;zegsdot;zel;z	elinters;zell;zels;zelsdot;zemacr;zempty;z	emptyset;zemptyv;zemsp13;zemsp14;zemsp;zeng;zensp;zeogon;zeopf;zepar;zeparsl;zeplus;zepsi;zepsilon;zepsiv;zeqcirc;zeqcolon;zeqsim;zeqslantgtr;zeqslantless;zequals;zequest;zequiv;zequivDD;z	eqvparsl;zerDot;zerarr;zescr;zesdot;zesim;zeta;Zethzeth;Zeumlzeuml;zeuro;zexcl;zexist;zexpectation;z
exponentiale;zfallingdotseq;zfcy;zfemale;zffilig;zfflig;zffllig;zffr;zfilig;zfjlig;zflat;zfllig;zfltns;zfnof;zfopf;zforall;zfork;zforkv;z	fpartint;Zfrac12zfrac12;zfrac13;Zfrac14zfrac14;zfrac15;zfrac16;zfrac18;zfrac23;zfrac25;Zfrac34zfrac34;zfrac35;zfrac38;zfrac45;zfrac56;zfrac58;zfrac78;zfrasl;zfrown;zfscr;zgE;zgEl;zgacute;zgamma;zgammad;zgap;zgbreve;zgcirc;zgcy;zgdot;zge;zgel;zgeq;zgeqq;z	geqslant;zges;zgescc;zgesdot;zgesdoto;z	gesdotol;zgesl;zgesles;zgfr;zgg;zggg;zgimel;zgjcy;zgl;zglE;zgla;zglj;zgnE;zgnap;z	gnapprox;zgne;zgneq;zgneqq;zgnsim;zgopf;zgrave;zgscr;zgsim;zgsime;zgsiml;�gtzgt;zgtcc;zgtcir;zgtdot;zgtlPar;zgtquest;z
gtrapprox;zgtrarr;zgtrdot;z
gtreqless;zgtreqqless;zgtrless;zgtrsim;z
gvertneqq;zgvnE;zhArr;zhairsp;zhalf;zhamilt;zhardcy;zharr;zharrcir;zharrw;zhbar;zhcirc;zhearts;z
heartsuit;zhellip;zhercon;zhfr;z	hksearow;z	hkswarow;zhoarr;zhomtht;zhookleftarrow;zhookrightarrow;zhopf;zhorbar;zhscr;zhslash;zhstrok;zhybull;zhyphen;Ziacuteziacute;zic;Zicirczicirc;zicy;ziecy;Ziexclziexcl;ziff;zifr;Zigravezigrave;zii;ziiiint;ziiint;ziinfin;ziiota;zijlig;zimacr;zimage;z	imagline;z	imagpart;zimath;zimof;zimped;zin;zincare;zinfin;z	infintie;zinodot;zint;zintcal;z	integers;z	intercal;z	intlarhk;zintprod;ziocy;ziogon;ziopf;ziota;ziprod;Ziquestziquest;ziscr;zisin;zisinE;zisindot;zisins;zisinsv;zisinv;zit;zitilde;ziukcy;Ziumlziuml;zjcirc;zjcy;zjfr;zjmath;zjopf;zjscr;zjsercy;zjukcy;zkappa;zkappav;zkcedil;zkcy;zkfr;zkgreen;zkhcy;zkjcy;zkopf;zkscr;zlAarr;zlArr;zlAtail;zlBarr;zlE;zlEg;zlHar;zlacute;z	laemptyv;zlagran;zlambda;zlang;zlangd;zlangle;zlap;Zlaquozlaquo;zlarr;zlarrb;zlarrbfs;zlarrfs;zlarrhk;zlarrlp;zlarrpl;zlarrsim;zlarrtl;zlat;zlatail;zlate;zlates;zlbarr;zlbbrk;zlbrace;zlbrack;zlbrke;zlbrksld;zlbrkslu;zlcaron;zlcedil;zlceil;zlcub;zlcy;zldca;zldquo;zldquor;zldrdhar;z	ldrushar;zldsh;zle;z
leftarrow;zleftarrowtail;zleftharpoondown;zleftharpoonup;zleftleftarrows;zleftrightarrow;zleftrightarrows;zleftrightharpoons;zleftrightsquigarrow;zleftthreetimes;zleg;zleq;zleqq;z	leqslant;zles;zlescc;zlesdot;zlesdoto;z	lesdotor;zlesg;zlesges;zlessapprox;zlessdot;z
lesseqgtr;zlesseqqgtr;zlessgtr;zlesssim;zlfisht;zlfloor;zlfr;zlg;zlgE;zlhard;zlharu;zlharul;zlhblk;zljcy;zll;zllarr;z	llcorner;zllhard;zlltri;zlmidot;zlmoust;zlmoustache;zlnE;zlnap;z	lnapprox;zlne;zlneq;zlneqq;zlnsim;zloang;zloarr;zlobrk;zlongleftarrow;zlongleftrightarrow;zlongmapsto;zlongrightarrow;zlooparrowleft;zlooparrowright;zlopar;zlopf;zloplus;zlotimes;zlowast;zlowbar;zloz;zlozenge;zlozf;zlpar;zlparlt;zlrarr;z	lrcorner;zlrhar;zlrhard;zlrm;zlrtri;zlsaquo;zlscr;zlsh;zlsim;zlsime;zlsimg;zlsqb;zlsquo;zlsquor;zlstrok;�ltzlt;zltcc;zltcir;zltdot;zlthree;zltimes;zltlarr;zltquest;zltrPar;zltri;zltrie;zltrif;z	lurdshar;zluruhar;z
lvertneqq;zlvnE;zmDDot;Zmacrzmacr;zmale;zmalt;zmaltese;zmap;zmapsto;zmapstodown;zmapstoleft;z	mapstoup;zmarker;zmcomma;zmcy;zmdash;zmeasuredangle;zmfr;zmho;�microzmicro;zmid;zmidast;zmidcir;Zmiddotzmiddot;zminus;zminusb;zminusd;zminusdu;zmlcp;zmldr;zmnplus;zmodels;zmopf;zmp;zmscr;zmstpos;zmu;z	multimap;zmumap;znGg;znGt;znGtv;znLeftarrow;znLeftrightarrow;znLl;znLt;znLtv;znRightarrow;znVDash;znVdash;znabla;znacute;znang;znap;znapE;znapid;znapos;znapprox;znatur;znatural;z	naturals;Znbspznbsp;znbump;znbumpe;zncap;zncaron;zncedil;zncong;z	ncongdot;zncup;zncy;zndash;zne;zneArr;znearhk;znearr;znearrow;znedot;znequiv;znesear;znesim;znexist;znexists;znfr;zngE;znge;zngeq;zngeqq;z
ngeqslant;znges;zngsim;zngt;zngtr;znhArr;znharr;znhpar;zni;znis;znisd;zniv;znjcy;znlArr;znlE;znlarr;znldr;znle;znleftarrow;znleftrightarrow;znleq;znleqq;z
nleqslant;znles;znless;znlsim;znlt;znltri;znltrie;znmid;znopf;�notznot;znotin;znotinE;z	notindot;znotinva;znotinvb;znotinvc;znotni;znotniva;znotnivb;znotnivc;znpar;z
nparallel;znparsl;znpart;znpolint;znpr;znprcue;znpre;znprec;znpreceq;znrArr;znrarr;znrarrc;znrarrw;znrightarrow;znrtri;znrtrie;znsc;znsccue;znsce;znscr;z
nshortmid;znshortparallel;znsim;znsime;znsimeq;znsmid;znspar;znsqsube;znsqsupe;znsub;znsubE;znsube;znsubset;z
nsubseteq;znsubseteqq;znsucc;znsucceq;znsup;znsupE;znsupe;znsupset;z
nsupseteq;znsupseteqq;zntgl;Zntildezntilde;zntlg;zntriangleleft;zntrianglelefteq;zntriangleright;zntrianglerighteq;znu;znum;znumero;znumsp;znvDash;znvHarr;znvap;znvdash;znvge;znvgt;znvinfin;znvlArr;znvle;znvlt;znvltrie;znvrArr;znvrtrie;znvsim;znwArr;znwarhk;znwarr;znwarrow;znwnear;zoS;Zoacutezoacute;zoast;zocir;Zocirczocirc;zocy;zodash;zodblac;zodiv;zodot;zodsold;zoelig;zofcir;zofr;zogon;Zogravezograve;zogt;zohbar;zohm;zoint;zolarr;zolcir;zolcross;zoline;zolt;zomacr;zomega;zomicron;zomid;zominus;zoopf;zopar;zoperp;zoplus;zor;zorarr;zord;zorder;zorderof;Zordfzordf;Zordmzordm;zorigof;zoror;zorslope;zorv;zoscr;Zoslashzoslash;zosol;Zotildezotilde;zotimes;z	otimesas;Zoumlzouml;zovbar;zpar;Zparazpara;z	parallel;zparsim;zparsl;zpart;zpcy;zpercnt;zperiod;zpermil;zperp;zpertenk;zpfr;zphi;zphiv;zphmmat;zphone;zpi;z
pitchfork;zpiv;zplanck;zplanckh;zplankv;zplus;z	plusacir;zplusb;zpluscir;zplusdo;zplusdu;zpluse;Zplusmnzplusmn;zplussim;zplustwo;zpm;z	pointint;zpopf;Zpoundzpound;zpr;zprE;zprap;zprcue;zpre;zprec;zprecapprox;zpreccurlyeq;zpreceq;zprecnapprox;z	precneqq;z	precnsim;zprecsim;zprime;zprimes;zprnE;zprnap;zprnsim;zprod;z	profalar;z	profline;z	profsurf;zprop;zpropto;zprsim;zprurel;zpscr;zpsi;zpuncsp;zqfr;zqint;zqopf;zqprime;zqscr;zquaternions;zquatint;zquest;zquesteq;Zquotzquot;zrAarr;zrArr;zrAtail;zrBarr;zrHar;zrace;zracute;zradic;z	raemptyv;zrang;zrangd;zrange;zrangle;Zraquozraquo;zrarr;zrarrap;zrarrb;zrarrbfs;zrarrc;zrarrfs;zrarrhk;zrarrlp;zrarrpl;zrarrsim;zrarrtl;zrarrw;zratail;zratio;z
rationals;zrbarr;zrbbrk;zrbrace;zrbrack;zrbrke;zrbrksld;zrbrkslu;zrcaron;zrcedil;zrceil;zrcub;zrcy;zrdca;zrdldhar;zrdquo;zrdquor;zrdsh;zreal;zrealine;z	realpart;zreals;zrect;Zregzreg;zrfisht;zrfloor;zrfr;zrhard;zrharu;zrharul;zrho;zrhov;zrightarrow;zrightarrowtail;zrightharpoondown;zrightharpoonup;zrightleftarrows;zrightleftharpoons;zrightrightarrows;zrightsquigarrow;zrightthreetimes;zring;z
risingdotseq;zrlarr;zrlhar;zrlm;zrmoust;zrmoustache;zrnmid;zroang;zroarr;zrobrk;zropar;zropf;zroplus;zrotimes;zrpar;zrpargt;z	rppolint;zrrarr;zrsaquo;zrscr;zrsh;zrsqb;zrsquo;zrsquor;zrthree;zrtimes;zrtri;zrtrie;zrtrif;z	rtriltri;zruluhar;zrx;zsacute;zsbquo;zsc;zscE;zscap;zscaron;zsccue;zsce;zscedil;zscirc;zscnE;zscnap;zscnsim;z	scpolint;zscsim;zscy;zsdot;zsdotb;zsdote;zseArr;zsearhk;zsearr;zsearrow;Zsectzsect;zsemi;zseswar;z	setminus;zsetmn;zsext;zsfr;zsfrown;zsharp;zshchcy;zshcy;z	shortmid;zshortparallel;Zshyzshy;zsigma;zsigmaf;zsigmav;zsim;zsimdot;zsime;zsimeq;zsimg;zsimgE;zsiml;zsimlE;zsimne;zsimplus;zsimrarr;zslarr;zsmallsetminus;zsmashp;z	smeparsl;zsmid;zsmile;zsmt;zsmte;zsmtes;zsoftcy;zsol;zsolb;zsolbar;zsopf;zspades;z
spadesuit;zspar;zsqcap;zsqcaps;zsqcup;zsqcups;zsqsub;zsqsube;z	sqsubset;zsqsubseteq;zsqsup;zsqsupe;z	sqsupset;zsqsupseteq;zsqu;zsquare;zsquarf;zsquf;zsrarr;zsscr;zssetmn;zssmile;zsstarf;zstar;zstarf;zstraightepsilon;zstraightphi;zstrns;zsub;zsubE;zsubdot;zsube;zsubedot;zsubmult;zsubnE;zsubne;zsubplus;zsubrarr;zsubset;z	subseteq;z
subseteqq;z
subsetneq;zsubsetneqq;zsubsim;zsubsub;zsubsup;zsucc;zsuccapprox;zsucccurlyeq;zsucceq;zsuccnapprox;z	succneqq;z	succnsim;zsuccsim;zsum;zsung;Zsup1zsup1;Zsup2zsup2;Zsup3zsup3;zsup;zsupE;zsupdot;zsupdsub;zsupe;zsupedot;zsuphsol;zsuphsub;zsuplarr;zsupmult;zsupnE;zsupne;zsupplus;zsupset;z	supseteq;z
supseteqq;z
supsetneq;zsupsetneqq;zsupsim;zsupsub;zsupsup;zswArr;zswarhk;zswarr;zswarrow;zswnwar;Zszligzszlig;ztarget;ztau;ztbrk;ztcaron;ztcedil;ztcy;ztdot;ztelrec;ztfr;zthere4;z
therefore;ztheta;z	thetasym;zthetav;zthickapprox;z	thicksim;zthinsp;zthkap;zthksim;Zthornzthorn;ztilde;�timesztimes;ztimesb;z	timesbar;ztimesd;ztint;ztoea;ztop;ztopbot;ztopcir;ztopf;ztopfork;ztosa;ztprime;ztrade;z	triangle;z
triangledown;z
triangleleft;ztrianglelefteq;z
triangleq;ztriangleright;ztrianglerighteq;ztridot;ztrie;z	triminus;ztriplus;ztrisb;ztritime;z	trpezium;ztscr;ztscy;ztshcy;ztstrok;ztwixt;ztwoheadleftarrow;ztwoheadrightarrow;zuArr;zuHar;Zuacutezuacute;zuarr;zubrcy;zubreve;Zucirczucirc;zucy;zudarr;zudblac;zudhar;zufisht;zufr;Zugravezugrave;zuharl;zuharr;zuhblk;zulcorn;z	ulcorner;zulcrop;zultri;zumacr;Zumlzuml;zuogon;zuopf;zuparrow;zupdownarrow;zupharpoonleft;zupharpoonright;zuplus;zupsi;zupsih;zupsilon;zupuparrows;zurcorn;z	urcorner;zurcrop;zuring;zurtri;zuscr;zutdot;zutilde;zutri;zutrif;zuuarr;Zuumlzuuml;zuwangle;zvArr;zvBar;zvBarv;zvDash;zvangrt;zvarepsilon;z	varkappa;zvarnothing;zvarphi;zvarpi;z
varpropto;zvarr;zvarrho;z	varsigma;z
varsubsetneq;zvarsubsetneqq;z
varsupsetneq;zvarsupsetneqq;z	vartheta;zvartriangleleft;zvartriangleright;zvcy;zvdash;zvee;zveebar;zveeeq;zvellip;zverbar;zvert;zvfr;zvltri;zvnsub;zvnsup;zvopf;zvprop;zvrtri;zvscr;zvsubnE;zvsubne;zvsupnE;zvsupne;zvzigzag;zwcirc;zwedbar;zwedge;zwedgeq;zweierp;zwfr;zwopf;zwp;zwr;zwreath;zwscr;zxcap;zxcirc;zxcup;zxdtri;zxfr;zxhArr;zxharr;zxi;zxlArr;zxlarr;zxmap;zxnis;zxodot;zxopf;zxoplus;zxotime;zxrArr;zxrarr;zxscr;zxsqcup;zxuplus;zxutri;zxvee;zxwedge;Zyacutezyacute;zyacy;zycirc;zycy;Zyenzyen;zyfr;zyicy;zyopf;zyscr;zyucy;Zyumlzyuml;zzacute;zzcaron;zzcy;zzdot;zzeetrf;zzeta;zzfr;zzhcy;zzigrarr;zzopf;zzscr;zzwj;zzwnj;u������)"r�
���������������������������������������)ZDoctypeZ
CharactersZSpaceCharacters�StartTag�EndTag�EmptyTag�CommentZ
ParseErrorrrrcCsg|]\}}||f�qSr2r2)r3�k�vr2r2r6r7xsZmathc@seZdZdS)�DataLossWarningN)�__name__�
__module__�__qualname__r2r2r2r6r|src@seZdZdS)�ReparseExceptionN)rrrr2r2r2r6r�sr)rr r!r"r#r$) rGrHrIrJrKrLrMrNrOrPrQrRrSrHrTrHrHrUrVrWrXrYrZr[r\r]r^r_r`rHrarb)0Z
__future__rrr�stringZEOF�EZ
namespaces�	frozensetZscopingElementsZformattingElementsZspecialElementsZhtmlIntegrationPointElementsZ"mathmlTextIntegrationPointElementsZadjustSVGAttributesZadjustMathMLAttributesZadjustForeignAttributes�dict�itemsZunadjustForeignAttributesZspaceCharactersZtableInsertModeElementsZascii_lowercaseZasciiLowercaseZascii_uppercaseZasciiUppercaseZ
ascii_lettersZasciiLettersZdigitsZ	hexdigitsZ	hexDigitsZasciiUpper2LowerZheadingElementsZvoidElementsZ
cdataElementsZrcdataElementsZbooleanAttributesZentitiesWindows1252ZxmlEntitiesZentitiesZreplacementCharactersZ
tokenTypesZ
tagTokenTypes�prefixes�UserWarningr�	Exceptionrr2r2r2r6�<module>s<






























































































































_vendor/html5lib/__pycache__/__init__.cpython-36.pyc000064400000001563151733136420016327 0ustar003

�Pf�@shdZddlmZmZmZddlmZmZmZddl	m
Z
ddlmZddl
mZdd	d
ddd
gZdZdS)aM
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.

Example usage:

import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
�)�absolute_import�division�unicode_literals�)�
HTMLParser�parse�
parseFragment)�getTreeBuilder)�
getTreeWalker)�	serializerrrr	r
rz1.0b10N)�__doc__Z
__future__rrrZhtml5parserrrrZtreebuildersr	Ztreewalkersr
Z
serializerr�__all__�__version__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/__pycache__/serializer.cpython-36.opt-1.pyc000064400000021775151733136420017707 0ustar003

�Pfa7�@s�ddlmZmZmZddlmZddlZddlmZm	Z	ddl
mZmZm
Z
ddl
mZmZmZddlmZmZdd	lmZd
je
�dZejded
�Zejded�ZiZed�dkZx�eej��D]p\Z Z!er�ee!�dks�er�ee!�dkr�q�e!dkr�ee!�dk�rej"e!�Z!ne#e!�Z!e!ek�s4e j$�r�e ee!<q�Wdd�Z%ede%�ddd�Z&Gdd�de'�Z(Gdd�de)�Z*dS)�)�absolute_import�division�unicode_literals)�	text_typeN)�register_error�xmlcharrefreplace_errors�)�voidElements�booleanAttributes�spaceCharacters)�rcdataElements�entities�xmlEntities)�treewalkers�_utils)�escape�z"'=<>`�[�]u_	

 /`  ᠎᠏           

   ]u􏿿��&c
Cs"t|ttf��rg}g}d}x�t|j|j|j��D]n\}}|rFd}q4||j}tj|j|t	|j|dg���r�tj
|j||d��}d}nt|�}|j|�q4Wx^|D]V}t
j|�}	|	r�|jd�|j|	�|	jd�s�|jd�q�|jdt|�dd��q�Wdj|�|jfSt|�SdS)NFrTr�;z&#x%s;r)�
isinstance�UnicodeEncodeError�UnicodeTranslateError�	enumerate�object�start�endrZisSurrogatePair�min�surrogatePairToCodepoint�ord�append�_encode_entity_map�get�endswith�hex�joinr)
�exc�resZ
codepoints�skip�i�c�indexZ	codepointZcp�e�r/� /usr/lib/python3.6/serializer.py�htmlentityreplace_errors*s0 
"




r1�htmlentityreplace�etreecKs$tj|�}tf|�}|j||�|�S)N)rZ
getTreeWalker�HTMLSerializer�render)�inputZtree�encodingZserializer_optsZwalker�sr/r/r0�	serializeJs

r9c@s~eZdZdZdZdZdZdZdZdZ	dZ
dZdZdZ
dZdZdZd!Zdd�Zdd�Zdd�Zd"dd�Zd#dd�Zd$dd �ZdS)%r4�legacy�"TF�quote_attr_values�
quote_char�use_best_quote_char�omit_optional_tags�minimize_boolean_attributes�use_trailing_solidus�space_before_trailing_solidus�escape_lt_in_attrs�
escape_rcdata�resolve_entities�alphabetical_attributes�inject_meta_charset�strip_whitespace�sanitizec	Kszt|�t|j�}t|�dkr2tdtt|����d|kr@d|_x(|jD]}t|||j|t	||���qHWg|_
d|_dS)a6	Initialize HTMLSerializer.

        Keyword options (default given first unless specified) include:

        inject_meta_charset=True|False
          Whether it insert a meta element to define the character set of the
          document.
        quote_attr_values="legacy"|"spec"|"always"
          Whether to quote attribute values that don't require quoting
          per legacy browser behaviour, when required by the standard, or always.
        quote_char=u'"'|u"'"
          Use given quote character for attribute quoting. Default is to
          use double quote unless attribute value contains a double quote,
          in which case single quotes are used instead.
        escape_lt_in_attrs=False|True
          Whether to escape < in attribute values.
        escape_rcdata=False|True
          Whether to escape characters that need to be escaped within normal
          elements within rcdata elements such as style.
        resolve_entities=True|False
          Whether to resolve named character entities that appear in the
          source tree. The XML predefined entities &lt; &gt; &amp; &quot; &apos;
          are unaffected by this setting.
        strip_whitespace=False|True
          Whether to remove semantically meaningless whitespace. (This
          compresses all whitespace to a single space except within pre.)
        minimize_boolean_attributes=True|False
          Shortens boolean attributes to give just the attribute value,
          for example <input disabled="disabled"> becomes <input disabled>.
        use_trailing_solidus=False|True
          Includes a close-tag slash at the end of the start tag of void
          elements (empty elements whose end tag is forbidden). E.g. <hr/>.
        space_before_trailing_solidus=True|False
          Places a space immediately before the closing slash in a tag
          using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
        sanitize=False|True
          Strip all unsafe or unknown constructs from output.
          See `html5lib user documentation`_
        omit_optional_tags=True|False
          Omit start/end tags that are optional.
        alphabetical_attributes=False|True
          Reorder attributes to be in alphabetical order.

        .. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
        rz2__init__() got an unexpected keyword argument '%s'r=FN)�	frozenset�options�len�	TypeError�next�iterr>�setattrr$�getattr�errors�strict)�self�kwargsZunexpected_args�attrr/r/r0�__init__ps.zHTMLSerializer.__init__cCs|jr|j|jd�S|SdS)Nr2)r7�encode)rT�stringr/r/r0rX�szHTMLSerializer.encodecCs|jr|j|jd�S|SdS)NrS)r7rX)rTrYr/r/r0�encodeStrict�szHTMLSerializer.encodeStrictNccs�||_d}g|_|r0|jr0ddlm}|||�}|jrJddlm}||�}|jrdddlm}||�}|j	r~ddl
m}||�}|jr�ddlm}||�}�xR|D�]H}|d}|dk�r`d|d}|dr�|d	|d7}n|d
r�|d7}|d
�rJ|d
j
d�d
k�r0|d
j
d�d
k�r*|jd�d}nd}|d||d
|f7}|d7}|j|�Vq�|d5k�r�|dk�sz|�r�|�r�|dj
d�d
k�r�|jd�|j|d�Vn|jt|d��Vq�|d6k�r�|d}	|jd|	�V|	tk�r|j�rd}n|�r|jd��x�|dj�D�]�\\}
}}|}
|}|jd�V|j|
�V|j�s�|
tj|	t��k�r"|
tjdt��k�r"|jd�V|jdk�s�t|�d
k�r�d}n@|jd k�r�tj|�dk	}n$|jd!k�r�tj|�dk	}ntd"��|jd#d$�}|j �r|jd%d&�}|�r�|j!}|j"�rTd|k�r<d|k�r<d}nd|k�rTd|k�rTd}|dk�rl|jdd'�}n|jdd(�}|j|�V|j|�V|j|�Vn|j|�V�q"W|	t#k�r�|j$�r�|j%�r�|jd)�Vn|jd*�V|jd�Vq�|d+k�r6|d}	|	tk�rd}n|�r$|jd�|jd,|	�Vq�|d-k�rx|d}|j
d.�d
k�rb|jd/�|jd0|d�Vq�|d1k�r�|d}	|	d2}|t&k�r�|jd3|	�|j'�r�|t(k�r�t&|}nd4|	}|j|�Vq�|j|d�q�WdS)7NFr)�Filter�typeZDoctypez<!DOCTYPE %s�nameZpublicIdz PUBLIC "%s"ZsystemIdz SYSTEMr;r�'zASystem identifer contains both single and double quote charactersz %s%s%s�>�
Characters�SpaceCharacters�dataz</zUnexpected </ in CDATA�StartTag�EmptyTagz<%sTz+Unexpected child element of a CDATA element� r�=�always�specr:z?quote_attr_values must be one of: 'always', 'spec', or 'legacy'rz&amp;�<z&lt;z&#39;z&quot;z /�/ZEndTagz</%s>�Commentz--zComment contains --z	<!--%s-->ZEntityrzEntity %s not recognizedz&%s;)r`ra)rcrd))r7rRrGZfilters.inject_meta_charsetr[rFZfilters.alphabeticalattributesrHZfilters.whitespacerIZfilters.sanitizerr?Zfilters.optionaltags�find�serializeErrorrZrXrrrD�itemsr@r
r$�tupler<rL�_quoteAttributeSpec�search�_quoteAttributeLegacy�
ValueError�replacerCr=r>r	rArBr
rEr)rT�
treewalkerr7Zin_cdatar[�tokenr\Zdoctyper=r]�_Z	attr_nameZ
attr_value�k�vZ
quote_attrrb�keyr/r/r0r9�s�


















zHTMLSerializer.serializecCs2|rdjt|j||���Sdjt|j|���SdS)N�r)r'�listr9)rTrur7r/r/r0r5?szHTMLSerializer.render�XXX ERROR MESSAGE NEEDEDcCs|jj|�|jrt�dS)N)rRr"rS�SerializeError)rTrbr/r/r0rmEszHTMLSerializer.serializeError)r<r=r>r?r@rArBrCrDrErFrGrHrI)N)N)r})�__name__�
__module__�__qualname__r<r=r>r?r@rArBrCrDrErFrGrHrIrKrWrXrZr9r5rmr/r/r/r0r4Qs68


r4c@seZdZdZdS)r~zError in serialized treeN)rr�r��__doc__r/r/r/r0r~Lsr~)r3N)+Z
__future__rrrZpip._vendor.sixr�re�codecsrrZ	constantsr	r
rrr
rrrrZxml.sax.saxutilsrr'Z_quoteAttributeSpecChars�compilerprrr#rLZ_is_ucs4r|rnrxryr r!�islowerr1r9rr4�	Exceptionr~r/r/r/r0�<module>s:
	

|_vendor/html5lib/__pycache__/_tokenizer.cpython-36.pyc000064400000122141151733136420016735 0ustar003

�Pf$+�@s�ddlmZmZmZddlmZddlmZddl	m
Z
ddl	mZddl	mZm
Z
ddl	mZmZmZdd	l	mZmZdd
l	mZddlmZddlmZee�ZGd
d�de�ZdS)�)�absolute_import�division�unicode_literals)�unichr)�deque�)�spaceCharacters)�entities)�asciiLetters�asciiUpper2Lower)�digits�	hexDigits�EOF)�
tokenTypes�
tagTokenTypes)�replacementCharacters)�HTMLInputStream)�TriecsdeZdZdZd��fdd�	Zdd�Zdd�Zd�d
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Zd2d3�Zd4d5�Zd6d7�Zd8d9�Zd:d;�Zd<d=�Z d>d?�Z!d@dA�Z"dBdC�Z#dDdE�Z$dFdG�Z%dHdI�Z&dJdK�Z'dLdM�Z(dNdO�Z)dPdQ�Z*dRdS�Z+dTdU�Z,dVdW�Z-dXdY�Z.dZd[�Z/d\d]�Z0d^d_�Z1d`da�Z2dbdc�Z3ddde�Z4dfdg�Z5dhdi�Z6djdk�Z7dldm�Z8dndo�Z9dpdq�Z:drds�Z;dtdu�Z<dvdw�Z=dxdy�Z>dzd{�Z?d|d}�Z@d~d�ZAd�d��ZBd�d��ZCd�d��ZDd�d��ZEd�d��ZFd�d��ZGd�d��ZHd�d��ZId�d��ZJd�d��ZKd�d��ZL�ZMS)��
HTMLTokenizera	 This class takes care of tokenizing HTML.

    * self.currentToken
      Holds the token that is currently being processed.

    * self.state
      Holds a reference to the method to be invoked... XXX

    * self.stream
      Points to HTMLInputStream object.
    NcsFt|f|�|_||_d|_g|_|j|_d|_d|_t	t
|�j�dS)NF)r�stream�parserZ
escapeFlagZ
lastFourChars�	dataState�state�escape�currentToken�superr�__init__)�selfrr�kwargs)�	__class__�� /usr/lib/python3.6/_tokenizer.pyr"szHTMLTokenizer.__init__ccs\tg�|_xL|j�rVx&|jjr:td|jjjd�d�VqWx|jrR|jj�Vq>WqWdS)z� This is where the magic happens.

        We do our usually processing through the states and when we have a token
        to return we yield the token which pauses processing until the next token
        is requested.
        �
ParseErrorr)�type�dataN)r�
tokenQueuerr�errorsr�pop�popleft)rr r r!�__iter__1s


zHTMLTokenizer.__iter__c	%Cs(t}d}|rt}d}g}|jj�}x(||krJ|tk	rJ|j|�|jj�}q$Wtdj|�|�}|tkr�t|}|j	jt
ddd|id���nld|ko�d	kns�|d
kr�d}|j	jt
ddd|id���n(d|ko�d
kn�s�d|ko�dkn�s�d|k�odkn�s�d|k�o4dkn�s�|tddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d
g#�k�r�|j	jt
ddd|id��yt|�}Wn>t
k
�r�|d6}td|d?B�td7|d8@B�}YnX|d9k�r$|j	jt
dd:d;��|jj|�|S)<z�This function returns either U+FFFD or the character based on the
        decimal or hexadecimal representation. It also discards ";" if present.
        If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked.
        �
��r"z$illegal-codepoint-for-numeric-entity�	charAsInt)r#r$�datavarsi�i��i��u�r�����i�i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��	i��	i��
i��
i��i��i��i��i��
i��
i��i��i��i��i��ii�i��;z numeric-entity-without-semicolon)r#r$)rr
r�charr�append�int�joinrr%r�	frozenset�chr�
ValueError�unget)	rZisHexZallowed�radix�	charStack�cr-r6�vr r r!�consumeNumberEntityAs`

&

z!HTMLTokenizer.consumeNumberEntityFc	
Cs�d}|jj�g}|dtksB|dtddfksB|dk	rV||dkrV|jj|d��n"|ddk�rd}|j|jj��|ddkr�d	}|j|jj��|r�|dtks�|r�|dtkr�|jj|d�|j|�}n4|j	jt
d
dd��|jj|j��dd
j|�}�njx8|dtk	�rFt
jd
j|���s2P|j|jj���qWy$t
jd
j|dd���}t|�}Wntk
�r�d}YnX|dk	�rD|ddk�r�|j	jt
d
dd��|ddk�r|�r||tk�s�||tk�s�||dk�r|jj|j��dd
j|�}n.t|}|jj|j��|d
j||d��7}n4|j	jt
d
dd��|jj|j��dd
j|�}|�r�|jddd|7<n*|tk�r�d}nd}|j	jt
||d��dS)N�&r�<�#Fr�x�XTr"zexpected-numeric-entity)r#r$r,r5znamed-entity-without-semicolon�=zexpected-named-entityr$�SpaceCharacters�
Characters���)rFrGrKrKrKrKrKrKrKrK)rr6rrr=r7r
rrBr%rr'r9�entitiesTrieZhas_keys_with_prefixZlongest_prefix�len�KeyErrorr
r	r)	r�allowedChar�
fromAttribute�outputr?�hexZ
entityNameZentityLengthZ	tokenTyper r r!�
consumeEntity�sf





zHTMLTokenizer.consumeEntitycCs|j|dd�dS)zIThis method replaces the need for "entityInAttributeValueState".
        T)rOrPN)rS)rrOr r r!�processEntityInAttribute�sz&HTMLTokenizer.processEntityInAttributecCs�|j}|dtkrp|djt�|d<|dtdkrp|drR|jjtddd��|drp|jjtdd	d��|jj|�|j|_d
S)z�This method is a generic handler for emitting the tags. It also sets
        the state to "data" because that's what's needed after a token has been
        emitted.
        r#�name�EndTagr$r"zattributes-in-end-tag)r#r$�selfClosingzself-closing-flag-on-end-tagN)	rr�	translaterrr%r7rr)r�tokenr r r!�emitCurrentToken�s

zHTMLTokenizer.emitCurrentTokencCs�|jj�}|dkr|j|_n�|dkr.|j|_n�|dkrd|jjtddd��|jjtddd��n`|tkrpdS|t	kr�|jjtd	||jj
t	d
�d��n&|jj
d�}|jjtd||d��d
S)NrCrD�r"zinvalid-codepoint)r#r$rJFrIT)rCrDr[)rr6�entityDataStater�tagOpenStater%r7rrr�
charsUntil)rr$�charsr r r!r�s&



zHTMLTokenizer.dataStatecCs|j�|j|_dS)NT)rSrr)rr r r!r\szHTMLTokenizer.entityDataStatecCs�|jj�}|dkr|j|_n�|dkr.|j|_n�|tkr:dS|dkrp|jjtddd��|jjtdd	d��nT|t	kr�|jjtd
||jj
t	d�d��n&|jj
d�}|jjtd||d��dS)
NrCrDFr[r"zinvalid-codepoint)r#r$rJu�rIT)rCrDr[)rr6�characterReferenceInRcdatar�rcdataLessThanSignStaterr%r7rrr^)rr$r_r r r!�rcdataStates&



zHTMLTokenizer.rcdataStatecCs|j�|j|_dS)NT)rSrbr)rr r r!r`1sz(HTMLTokenizer.characterReferenceInRcdatacCs�|jj�}|dkr|j|_nh|dkrR|jjtddd��|jjtddd��n2|tkr^dS|jjd
�}|jjtd||d��d	S)NrDr[r"zinvalid-codepoint)r#r$rJu�FT)rDr[)	rr6�rawtextLessThanSignStaterr%r7rrr^)rr$r_r r r!�rawtextState6s


zHTMLTokenizer.rawtextStatecCs�|jj�}|dkr|j|_nh|dkrR|jjtddd��|jjtddd��n2|tkr^dS|jjd
�}|jjtd||d��d	S)NrDr[r"zinvalid-codepoint)r#r$rJu�FT)rDr[)	rr6�scriptDataLessThanSignStaterr%r7rrr^)rr$r_r r r!�scriptDataStateHs


zHTMLTokenizer.scriptDataStatecCsr|jj�}|tkrdS|dkrL|jjtddd��|jjtddd��n"|jjtd||jjd�d��dS)	NFr[r"zinvalid-codepoint)r#r$rJu�T)rr6rr%r7rr^)rr$r r r!�plaintextStateZs

zHTMLTokenizer.plaintextStatecCs|jj�}|dkr|j|_n�|dkr.|j|_n�|tkrVtd|gddd�|_|j|_n�|dkr�|j	j
tddd	��|j	j
td
dd	��|j|_nt|dkr�|j	j
tdd
d	��|jj|�|j
|_n@|j	j
tddd	��|j	j
td
dd	��|jj|�|j|_dS)N�!�/ZStartTagF)r#rUr$rWZselfClosingAcknowledged�>r"z'expected-tag-name-but-got-right-bracket)r#r$rJz<>�?z'expected-tag-name-but-got-question-markzexpected-tag-namerDT)rr6�markupDeclarationOpenStater�closeTagOpenStater
rr�tagNameStater%r7rr=�bogusCommentState)rr$r r r!r]is6









zHTMLTokenizer.tagOpenStatecCs�|jj�}|tkr0td|gdd�|_|j|_n�|dkrX|jjtddd��|j	|_nn|t
kr�|jjtddd��|jjtd	d
d��|j	|_n0|jjtddd|id
��|jj|�|j|_dS)NrVF)r#rUr$rWrjr"z*expected-closing-tag-but-got-right-bracket)r#r$z expected-closing-tag-but-got-eofrJz</z!expected-closing-tag-but-got-charr$)r#r$r.T)
rr6r
rrrnrr%r7rrr=ro)rr$r r r!rm�s(





zHTMLTokenizer.closeTagOpenStatecCs�|jj�}|tkr|j|_n�|dkr.|j�n~|tkrV|jjt	ddd��|j
|_nV|dkrh|j|_nD|dkr�|jjt	ddd��|jdd	7<n|jd|7<d
S)Nrjr"zeof-in-tag-name)r#r$rir[zinvalid-codepointrUu�T)
rr6r�beforeAttributeNameStaterrZrr%r7rr�selfClosingStartTagStater)rr$r r r!rn�s"






zHTMLTokenizer.tagNameStatecCsP|jj�}|dkr"d|_|j|_n*|jjtddd��|jj|�|j	|_dS)Nrir,rJrD)r#r$T)
rr6�temporaryBuffer�rcdataEndTagOpenStaterr%r7rr=rb)rr$r r r!ra�s

z%HTMLTokenizer.rcdataLessThanSignStatecCsX|jj�}|tkr*|j|7_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr�rcdataEndTagNameStaterr%r7rr=rb)rr$r r r!rs�s

z#HTMLTokenizer.rcdataEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)r�lowerrrrr6rrrprrqrZrr
r%r7r=rb)r�appropriater$r r r!rt�s2



z#HTMLTokenizer.rcdataEndTagNameStatecCsP|jj�}|dkr"d|_|j|_n*|jjtddd��|jj|�|j	|_dS)Nrir,rJrD)r#r$T)
rr6rr�rawtextEndTagOpenStaterr%r7rr=rd)rr$r r r!rc�s

z&HTMLTokenizer.rawtextLessThanSignStatecCsX|jj�}|tkr*|j|7_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr�rawtextEndTagNameStaterr%r7rr=rd)rr$r r r!rw�s

z$HTMLTokenizer.rawtextEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)rrurrrr6rrrprrqrZrr
r%r7r=rd)rrvr$r r r!rxs2



z$HTMLTokenizer.rawtextEndTagNameStatecCsx|jj�}|dkr"d|_|j|_nR|dkrJ|jjtddd��|j|_n*|jjtddd��|jj	|�|j
|_dS)	Nrir,rhrJz<!)r#r$rDT)rr6rr�scriptDataEndTagOpenStaterr%r7r�scriptDataEscapeStartStater=rf)rr$r r r!res


z)HTMLTokenizer.scriptDataLessThanSignStatecCsX|jj�}|tkr*|j|7_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr�scriptDataEndTagNameStaterr%r7rr=rf)rr$r r r!ry,s

z'HTMLTokenizer.scriptDataEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)rrurrrr6rrrprrqrZrr
r%r7r=rf)rrvr$r r r!r{7s2



z'HTMLTokenizer.scriptDataEndTagNameStatecCsJ|jj�}|dkr2|jjtddd��|j|_n|jj|�|j|_dS)N�-rJ)r#r$T)	rr6r%r7r�scriptDataEscapeStartDashStaterr=rf)rr$r r r!rzSs

z(HTMLTokenizer.scriptDataEscapeStartStatecCsJ|jj�}|dkr2|jjtddd��|j|_n|jj|�|j|_dS)Nr|rJ)r#r$T)	rr6r%r7r�scriptDataEscapedDashDashStaterr=rf)rr$r r r!r}]s

z,HTMLTokenizer.scriptDataEscapeStartDashStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrD|j|_nn|dkrz|jjtddd��|jjtddd��n8|tkr�|j	|_n&|jj
d
�}|jjtd||d��d	S)Nr|rJ)r#r$rDr[r"zinvalid-codepointu�T)rDr|r[)rr6r%r7r�scriptDataEscapedDashStater�"scriptDataEscapedLessThanSignStaterrr^)rr$r_r r r!�scriptDataEscapedStategs"




z$HTMLTokenizer.scriptDataEscapedStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrD|j|_nn|dkr�|jjtddd��|jjtddd��|j|_n0|t	kr�|j
|_n|jjtd|d��|j|_d	S)
Nr|rJ)r#r$rDr[r"zinvalid-codepointu�T)rr6r%r7rr~rr�r�rr)rr$r r r!r{s"






z(HTMLTokenizer.scriptDataEscapedDashStatecCs�|jj�}|dkr*|jjtddd��n�|dkr<|j|_n�|dkrd|jjtddd��|j|_nn|dkr�|jjtddd��|jjtdd	d��|j|_n0|t	kr�|j
|_n|jjtd|d��|j|_d
S)Nr|rJ)r#r$rDrjr[r"zinvalid-codepointu�T)rr6r%r7rr�rrfr�rr)rr$r r r!r~�s&






z,HTMLTokenizer.scriptDataEscapedDashDashStatecCs�|jj�}|dkr"d|_|j|_n\|tkrT|jjtdd|d��||_|j	|_n*|jjtddd��|jj
|�|j|_dS)Nrir,rJrD)r#r$T)rr6rr� scriptDataEscapedEndTagOpenStaterr
r%r7r� scriptDataDoubleEscapeStartStater=r�)rr$r r r!r��s


z0HTMLTokenizer.scriptDataEscapedLessThanSignStatecCsP|jj�}|tkr"||_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr� scriptDataEscapedEndTagNameStaterr%r7rr=r�)rr$r r r!r��s

z.HTMLTokenizer.scriptDataEscapedEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)rrurrrr6rrrprrqrZrr
r%r7r=r�)rrvr$r r r!r��s2



z.HTMLTokenizer.scriptDataEscapedEndTagNameStatecCs�|jj�}|ttd�BkrR|jjtd|d��|jj�dkrH|j	|_
q�|j|_
nB|tkr�|jjtd|d��|j|7_n|jj
|�|j|_
dS)NrirjrJ)r#r$�scriptT)rirj)rr6rr:r%r7rrrru�scriptDataDoubleEscapedStaterr�r
r=)rr$r r r!r��s


z.HTMLTokenizer.scriptDataDoubleEscapeStartStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrZ|jjtddd��|j|_nt|dkr�|jjtddd��|jjtddd��n>|tkr�|jjtdd	d��|j	|_n|jjtd|d��d
S)Nr|rJ)r#r$rDr[r"zinvalid-codepointu�zeof-in-script-in-scriptT)
rr6r%r7r� scriptDataDoubleEscapedDashStater�(scriptDataDoubleEscapedLessThanSignStaterr)rr$r r r!r��s$





z*HTMLTokenizer.scriptDataDoubleEscapedStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrZ|jjtddd��|j|_n�|dkr�|jjtddd��|jjtddd��|j|_nF|t	kr�|jjtdd	d��|j
|_n|jjtd|d��|j|_d
S)Nr|rJ)r#r$rDr[r"zinvalid-codepointu�zeof-in-script-in-scriptT)rr6r%r7r�$scriptDataDoubleEscapedDashDashStaterr�r�rr)rr$r r r!r�s(







z.HTMLTokenizer.scriptDataDoubleEscapedDashStatecCs|jj�}|dkr*|jjtddd��n�|dkrR|jjtddd��|j|_n�|dkrz|jjtddd��|j|_n�|dkr�|jjtddd��|jjtdd	d��|j|_nF|t	kr�|jjtdd
d��|j
|_n|jjtd|d��|j|_dS)Nr|rJ)r#r$rDrjr[r"zinvalid-codepointu�zeof-in-script-in-scriptT)rr6r%r7rr�rrfr�rr)rr$r r r!r�s,







z2HTMLTokenizer.scriptDataDoubleEscapedDashDashStatecCsP|jj�}|dkr8|jjtddd��d|_|j|_n|jj|�|j	|_dS)NrirJ)r#r$r,T)
rr6r%r7rrr�scriptDataDoubleEscapeEndStaterr=r�)rr$r r r!r�0s

z6HTMLTokenizer.scriptDataDoubleEscapedLessThanSignStatecCs�|jj�}|ttd�BkrR|jjtd|d��|jj�dkrH|j	|_
q�|j|_
nB|tkr�|jjtd|d��|j|7_n|jj
|�|j|_
dS)NrirjrJ)r#r$r�T)rirj)rr6rr:r%r7rrrrur�rr�r
r=)rr$r r r!r�;s


z,HTMLTokenizer.scriptDataDoubleEscapeEndStatecCs0|jj�}|tkr$|jjtd��n|tkrJ|jdj|dg�|j|_n�|dkr\|j	�n�|dkrn|j
|_n�|dkr�|jjtd
dd��|jdj|dg�|j|_n�|d
kr�|jjtd
dd��|jdjddg�|j|_nF|t
k�r|jjtd
dd��|j|_n|jdj|dg�|j|_dS)NTr$r,rjri�'�"rHrDr"z#invalid-character-in-attribute-name)r#r$r[zinvalid-codepointu�z#expected-attribute-name-but-got-eof)r�r�rHrD)rr6rr^r
rr7�attributeNameStaterrZrqr%rrr)rr$r r r!rpKs6










z&HTMLTokenizer.beforeAttributeNameStatecCs�|jj�}d}d}|dkr&|j|_�n0|tkr^|jddd||jjtd�7<d}�n�|dkrld}n�|tkr~|j|_n�|dkr�|j	|_n�|d	kr�|j
jtd
dd��|jdddd
7<d}n�|dk�r|j
jtd
dd��|jddd|7<d}nH|t
k�r8|j
jtd
dd��|j|_n|jddd|7<d}|�r�|jdddjt�|jddd<xP|jddd�D]:\}}|jddd|k�r�|j
jtd
dd��P�q�W|�r�|j�dS)NTFrHr$rrrjrir[r"zinvalid-codepoint)r#r$u�r�r�rDz#invalid-character-in-attribute-namezeof-in-attribute-namezduplicate-attributerKrK)r�r�rDrKrKrKrKrKrK)rr6�beforeAttributeValueStaterr
rr^r�afterAttributeNameStaterqr%r7rrrrXrrZ)rr$ZleavingThisStateZ	emitTokenrU�_r r r!r�isR








&
z HTMLTokenizer.attributeNameStatecCsF|jj�}|tkr$|jjtd��n|dkr8|j|_�n
|dkrJ|j�n�|tkrp|jdj	|dg�|j
|_n�|dkr�|j|_n�|dkr�|jj	t
dd	d
��|jdj	ddg�|j
|_n�|dk�r�|jj	t
ddd
��|jdj	|dg�|j
|_nF|tk�r&|jj	t
ddd
��|j|_n|jdj	|dg�|j
|_dS)NTrHrjr$r,rir[r"zinvalid-codepoint)r#r$u�r�r�rDz&invalid-character-after-attribute-namezexpected-end-of-tag-but-got-eof)r�r�rD)rr6rr^r�rrZr
rr7r�rqr%rrr)rr$r r r!r��s:











z%HTMLTokenizer.afterAttributeNameStatecCsj|jj�}|tkr$|jjtd��nB|dkr8|j|_�n.|dkrX|j|_|jj|��n|dkrl|j|_�n�|dkr�|j	j
tddd��|j�n�|d	kr�|j	j
tdd
d��|j
dddd
7<|j|_n�|dk�r|j	j
tddd��|j
ddd|7<|j|_nL|tk�rD|j	j
tddd��|j|_n"|j
ddd|7<|j|_dS)NTr�rCr�rjr"z.expected-attribute-value-but-got-right-bracket)r#r$r[zinvalid-codepointr$ru�rHrD�`z"equals-in-unquoted-attribute-valuez$expected-attribute-value-but-got-eofrK)rHrDr�rKrK)rr6rr^�attributeValueDoubleQuotedStater�attributeValueUnQuotedStater=�attributeValueSingleQuotedStater%r7rrZrrr)rr$r r r!r��s>










z'HTMLTokenizer.beforeAttributeValueStatecCs�|jj�}|dkr|j|_n�|dkr0|jd�n�|dkrj|jjtddd��|jdddd	7<nN|t	kr�|jjtdd
d��|j
|_n&|jdd
d||jjd�7<dS)Nr�rCr[r"zinvalid-codepoint)r#r$r$ru�z#eof-in-attribute-value-double-quoteTrKrK)r�rCr[)rr6�afterAttributeValueStaterrTr%r7rrrrr^)rr$r r r!r��s 




z-HTMLTokenizer.attributeValueDoubleQuotedStatecCs�|jj�}|dkr|j|_n�|dkr0|jd�n�|dkrj|jjtddd��|jdddd	7<nN|t	kr�|jjtdd
d��|j
|_n&|jdd
d||jjd�7<dS)Nr�rCr[r"zinvalid-codepoint)r#r$r$ru�z#eof-in-attribute-value-single-quoteTrKrK)r�rCr[)rr6r�rrTr%r7rrrrr^)rr$r r r!r��s 




z-HTMLTokenizer.attributeValueSingleQuotedStatecCs|jj�}|tkr|j|_�n�|dkr2|jd�n�|dkrD|j�n�|dkr~|jjt	dd	d
��|j
ddd|7<n�|d
kr�|jjt	ddd
��|j
dddd7<nV|tkr�|jjt	ddd
��|j|_n.|j
ddd||jj
td�tB�7<dS)NrCrjr�r�rHrDr�r"z0unexpected-character-in-unquoted-attribute-value)r#r$r$rr[zinvalid-codepointu�z eof-in-attribute-value-no-quotesT)r�r�rHrDr�rKrKrK)rCrjr�r�rHrDr�r[)rr6rrprrTrZr%r7rrrrr^r:)rr$r r r!r�s,





z)HTMLTokenizer.attributeValueUnQuotedStatecCs�|jj�}|tkr|j|_n�|dkr.|j�np|dkr@|j|_n^|tkrt|jj	t
ddd��|jj|�|j|_n*|jj	t
ddd��|jj|�|j|_dS)Nrjrir"z$unexpected-EOF-after-attribute-value)r#r$z*unexpected-character-after-attribute-valueT)
rr6rrprrZrqrr%r7rr=r)rr$r r r!r� s"






z&HTMLTokenizer.afterAttributeValueStatecCs�|jj�}|dkr&d|jd<|j�n^|tkrZ|jjtddd��|jj|�|j	|_
n*|jjtddd��|jj|�|j|_
dS)NrjTrWr"z#unexpected-EOF-after-solidus-in-tag)r#r$z)unexpected-character-after-solidus-in-tag)rr6rrZrr%r7rr=rrrp)rr$r r r!rq4s





z&HTMLTokenizer.selfClosingStartTagStatecCsD|jjd�}|jdd�}|jjtd|d��|jj�|j|_dS)Nrjr[u��Comment)r#r$T)	rr^�replacer%r7rr6rr)rr$r r r!roFs
zHTMLTokenizer.bogusCommentStatecCs�|jj�g}|ddkrT|j|jj��|ddkrPtddd�|_|j|_dS�n�|ddkr�d}x.d&D]&}|j|jj��|d'|krjd}PqjW|r�tdddddd�|_|j|_dSn�|d(dk�rH|jdk	�rH|jj	j
�rH|jj	j
d)j|jj	jk�rHd}x2d*D]*}|j|jj��|d+|k�rd}P�qW|�rH|j
|_dS|jjtddd��x|�rz|jj|j���q`W|j|_dS),Nrr|r�r,)r#r$T�d�D�o�Or@�C�t�T�y�Y�p�P�e�EFZDoctype)r#rU�publicId�systemId�correct�[�Ar"zexpected-dashes-or-doctyperKrKrK)r�r��r�r��r@r��r�r��r�r��r�r��r�r�)r�r�r�r�r�r�rKrKrK)r�r�r�r�r�r�rK)rr6r7rr�commentStartStater�doctypeStaterZtreeZopenElements�	namespaceZdefaultNamespace�cdataSectionStater%r=r'ro)rr?�matched�expectedr r r!rlUsR


z(HTMLTokenizer.markupDeclarationOpenStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��|jj|j�|j|_nP|t	kr�|jjtdd
d��|jj|j�|j|_n|jd|7<|j
|_dS)Nr|r[r"zinvalid-codepoint)r#r$r$u�rjzincorrect-commentzeof-in-commentT)rr6�commentStartDashStaterr%r7rrrr�commentState)rr$r r r!r��s(






zHTMLTokenizer.commentStartStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��|jj|j�|j|_nT|t	kr�|jjtdd
d��|jj|j�|j|_n|jdd|7<|j
|_dS)Nr|r[r"zinvalid-codepoint)r#r$r$u-�rjzincorrect-commentzeof-in-commentT)rr6�commentEndStaterr%r7rrrrr�)rr$r r r!r��s(






z#HTMLTokenizer.commentStartDashStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<nT|tkr�|jjtddd��|jj|j�|j	|_n|jd||jj
d
�7<d	S)Nr|r[r"zinvalid-codepoint)r#r$r$u�zeof-in-commentT)r|r[)rr6�commentEndDashStaterr%r7rrrrr^)rr$r r r!r��s




zHTMLTokenizer.commentStatecCs�|jj�}|dkr|j|_n�|dkrV|jjtddd��|jdd7<|j|_nT|t	kr�|jjtddd��|jj|j�|j
|_n|jdd|7<|j|_d	S)
Nr|r[r"zinvalid-codepoint)r#r$r$u-�zeof-in-comment-end-dashT)rr6r�rr%r7rrr�rr)rr$r r r!r��s 





z!HTMLTokenizer.commentEndDashStatecCs,|jj�}|dkr*|jj|j�|j|_n�|dkrd|jjtddd��|jdd7<|j|_n�|dkr�|jjtdd	d��|j	|_n�|d
kr�|jjtddd��|jd|7<nj|t
kr�|jjtddd��|jj|j�|j|_n4|jjtdd
d��|jdd|7<|j|_dS)Nrjr[r"zinvalid-codepoint)r#r$r$u--�rhz,unexpected-bang-after-double-dash-in-commentr|z,unexpected-dash-after-double-dash-in-commentzeof-in-comment-double-dashzunexpected-char-in-commentz--T)rr6r%r7rrrrr��commentEndBangStater)rr$r r r!r��s6









zHTMLTokenizer.commentEndStatecCs�|jj�}|dkr*|jj|j�|j|_n�|dkrN|jdd7<|j|_n�|dkr�|jjtddd��|jdd	7<|j	|_nT|t
kr�|jjtdd
d��|jj|j�|j|_n|jdd|7<|j	|_dS)Nrjr|r$z--!r[r"zinvalid-codepoint)r#r$u--!�zeof-in-comment-end-bang-stateT)rr6r%r7rrrr�rr�r)rr$r r r!r��s(






z!HTMLTokenizer.commentEndBangStatecCs�|jj�}|tkr|j|_nj|tkr\|jjtddd��d|j	d<|jj|j	�|j
|_n*|jjtddd��|jj|�|j|_dS)Nr"z!expected-doctype-name-but-got-eof)r#r$Fr�zneed-space-after-doctypeT)rr6r�beforeDoctypeNameStaterrr%r7rrrr=)rr$r r r!r�s





zHTMLTokenizer.doctypeStatecCs�|jj�}|tkrn�|dkrT|jjtddd��d|jd<|jj|j�|j|_n�|dkr�|jjtddd��d	|jd
<|j	|_nR|t
kr�|jjtddd��d|jd<|jj|j�|j|_n||jd
<|j	|_dS)
Nrjr"z+expected-doctype-name-but-got-right-bracket)r#r$Fr�r[zinvalid-codepointu�rUz!expected-doctype-name-but-got-eofT)rr6rr%r7rrrr�doctypeNameStater)rr$r r r!r�s.










z$HTMLTokenizer.beforeDoctypeNameStatecCs|jj�}|tkr2|jdjt�|jd<|j|_n�|dkrh|jdjt�|jd<|jj	|j�|j
|_n�|dkr�|jj	tddd��|jdd7<|j|_nh|t
kr�|jj	tddd��d	|jd
<|jdjt�|jd<|jj	|j�|j
|_n|jd|7<dS)NrUrjr[r"zinvalid-codepoint)r#r$u�zeof-in-doctype-nameFr�T)rr6rrrXr�afterDoctypeNameStaterr%r7rrr�r)rr$r r r!r�6s,







zHTMLTokenizer.doctypeNameStatecCsR|jj�}|tkr�n8|dkr8|jj|j�|j|_�n|tkr�d|jd<|jj	|�|jjt
ddd��|jj|j�|j|_�n�|d!kr�d	}x$d'D]}|jj�}||kr�d}Pq�W|r�|j|_d	SnJ|d(k�rd	}x(d.D] }|jj�}||k�r�d}P�q�W|�r|j|_d	S|jj	|�|jjt
ddd|id ��d|jd<|j
|_d	S)/NrjFr�r"zeof-in-doctype)r#r$r�r�T�u�U�b�B�l�L�i�Ir@r��s�Sr�r�r�r�r�r��m�Mz*expected-space-or-right-bracket-in-doctyper$)r#r$r.)r�r��r�r��r�r��r�r��r�r��r@r�)r�r�r�r�r�)r�r��r�r��r�r��r�r��r�r��r�r�)r�r�r�r�r�)rr6rr%r7rrrrr=r�afterDoctypePublicKeywordState�afterDoctypeSystemKeywordState�bogusDoctypeState)rr$r�r�r r r!r�OsT







z#HTMLTokenizer.afterDoctypeNameStatecCs�|jj�}|tkr|j|_n�|d
krP|jjtddd��|jj|�|j|_nT|t	kr�|jjtddd��d|j
d<|jj|j
�|j|_n|jj|�|j|_d	S)Nr�r�r"zunexpected-char-in-doctype)r#r$zeof-in-doctypeFr�T)r�r�)rr6r�"beforeDoctypePublicIdentifierStaterr%r7rr=rrr)rr$r r r!r��s"






z,HTMLTokenizer.afterDoctypePublicKeywordStatecCs�|jj�}|tkrn�|dkr0d|jd<|j|_n�|dkrLd|jd<|j|_n�|dkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_nh|tkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_n(|jjt	ddd��d	|jd
<|j|_d
S)Nr�r,r�r�rjr"zunexpected-end-of-doctype)r#r$Fr�zeof-in-doctypezunexpected-char-in-doctypeT)
rr6rr�(doctypePublicIdentifierDoubleQuotedStater�(doctypePublicIdentifierSingleQuotedStater%r7rrrr�)rr$r r r!r��s4












z0HTMLTokenizer.beforeDoctypePublicIdentifierStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6�!afterDoctypePublicIdentifierStaterr%r7rrrr)rr$r r r!r��s*








z6HTMLTokenizer.doctypePublicIdentifierDoubleQuotedStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6r�rr%r7rrrr)rr$r r r!r��s*








z6HTMLTokenizer.doctypePublicIdentifierSingleQuotedStatecCs|jj�}|tkr|j|_n�|dkr<|jj|j�|j|_n�|dkrn|jjt	ddd��d|jd<|j
|_n�|dkr�|jjt	ddd��d|jd<|j|_nh|tkr�|jjt	dd	d��d
|jd<|jj|j�|j|_n(|jjt	ddd��d
|jd<|j
|_dS)
Nrjr�r"zunexpected-char-in-doctype)r#r$r,r�r�zeof-in-doctypeFr�T)rr6r�-betweenDoctypePublicAndSystemIdentifiersStaterr%r7rrr�(doctypeSystemIdentifierDoubleQuotedState�(doctypeSystemIdentifierSingleQuotedStaterr�)rr$r r r!r��s6













z/HTMLTokenizer.afterDoctypePublicIdentifierStatecCs�|jj�}|tkrn�|dkr4|jj|j�|j|_n�|dkrPd|jd<|j|_n�|dkrld|jd<|j	|_nh|t
kr�|jjtddd��d	|jd
<|jj|j�|j|_n(|jjtddd��d	|jd
<|j|_dS)
Nrjr�r,r�r�r"zeof-in-doctype)r#r$Fr�zunexpected-char-in-doctypeT)
rr6rr%r7rrrr�r�rrr�)rr$r r r!r�s.










z;HTMLTokenizer.betweenDoctypePublicAndSystemIdentifiersStatecCs�|jj�}|tkr|j|_n�|d
krP|jjtddd��|jj|�|j|_nT|t	kr�|jjtddd��d|j
d<|jj|j
�|j|_n|jj|�|j|_d	S)Nr�r�r"zunexpected-char-in-doctype)r#r$zeof-in-doctypeFr�T)r�r�)rr6r�"beforeDoctypeSystemIdentifierStaterr%r7rr=rrr)rr$r r r!r�s"






z,HTMLTokenizer.afterDoctypeSystemKeywordStatecCs�|jj�}|tkrn�|dkr0d|jd<|j|_n�|dkrLd|jd<|j|_n�|dkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_nh|tkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_n(|jjt	ddd��d	|jd
<|j|_dS)
Nr�r,r�r�rjr"zunexpected-char-in-doctype)r#r$Fr�zeof-in-doctypeT)
rr6rrr�rr�r%r7rrrr�)rr$r r r!r�/s4












z0HTMLTokenizer.beforeDoctypeSystemIdentifierStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6�!afterDoctypeSystemIdentifierStaterr%r7rrrr)rr$r r r!r�Ls*








z6HTMLTokenizer.doctypeSystemIdentifierDoubleQuotedStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6r�rr%r7rrrr)rr$r r r!r�ds*








z6HTMLTokenizer.doctypeSystemIdentifierSingleQuotedStatecCs�|jj�}|tkrn~|dkr4|jj|j�|j|_n^|tkrt|jjt	ddd��d|jd<|jj|j�|j|_n|jjt	ddd��|j
|_dS)	Nrjr"zeof-in-doctype)r#r$Fr�zunexpected-char-in-doctypeT)rr6rr%r7rrrrrr�)rr$r r r!r�|s 





z/HTMLTokenizer.afterDoctypeSystemIdentifierStatecCsZ|jj�}|dkr*|jj|j�|j|_n,|tkrV|jj|�|jj|j�|j|_ndS)NrjT)	rr6r%r7rrrrr=)rr$r r r!r��s


zHTMLTokenizer.bogusDoctypeStatecCs�g}x�|j|jjd��|j|jjd��|jj�}|tkr@Pq|dksLt�|ddd�dkrx|ddd�|d<Pq|j|�qWdj|�}|jd�}|dkr�x&t|�D]}|j	jt
d	d
d��q�W|jdd�}|r�|j	jt
d
|d��|j|_
dS)N�]rjr�z]]r,r[rr"zinvalid-codepoint)r#r$u�rJTrK���rKr�rK)r7rr^r6r�AssertionErrorr9�count�ranger%rr�rr)rr$r6Z	nullCountr�r r r!r��s0



zHTMLTokenizer.cdataSectionState)N)NF)N�__name__�
__module__�__qualname__�__doc__rr)rBrSrTrZrr\rbr`rdrfrgr]rmrnrarsrtrcrwrxreryr{rzr}r�rr~r�r�r�r�r�r�r�r�r�rpr�r�r�r�r�r�r�rqrorlr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r��
__classcell__r r )rr!rs�H
P#

6 "-3rN)Z
__future__rrrZpip._vendor.sixrr;�collectionsrZ	constantsrr	r
rrr
rrrrZ_inputstreamrZ_trierrL�objectrr r r r!�<module>s_vendor/html5lib/__pycache__/_tokenizer.cpython-36.opt-1.pyc000064400000122103151733136420017672 0ustar003

�Pf$+�@s�ddlmZmZmZddlmZddlmZddl	m
Z
ddl	mZddl	mZm
Z
ddl	mZmZmZdd	l	mZmZdd
l	mZddlmZddlmZee�ZGd
d�de�ZdS)�)�absolute_import�division�unicode_literals)�unichr)�deque�)�spaceCharacters)�entities)�asciiLetters�asciiUpper2Lower)�digits�	hexDigits�EOF)�
tokenTypes�
tagTokenTypes)�replacementCharacters)�HTMLInputStream)�TriecsdeZdZdZd��fdd�	Zdd�Zdd�Zd�d
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Zd2d3�Zd4d5�Zd6d7�Zd8d9�Zd:d;�Zd<d=�Z d>d?�Z!d@dA�Z"dBdC�Z#dDdE�Z$dFdG�Z%dHdI�Z&dJdK�Z'dLdM�Z(dNdO�Z)dPdQ�Z*dRdS�Z+dTdU�Z,dVdW�Z-dXdY�Z.dZd[�Z/d\d]�Z0d^d_�Z1d`da�Z2dbdc�Z3ddde�Z4dfdg�Z5dhdi�Z6djdk�Z7dldm�Z8dndo�Z9dpdq�Z:drds�Z;dtdu�Z<dvdw�Z=dxdy�Z>dzd{�Z?d|d}�Z@d~d�ZAd�d��ZBd�d��ZCd�d��ZDd�d��ZEd�d��ZFd�d��ZGd�d��ZHd�d��ZId�d��ZJd�d��ZKd�d��ZL�ZMS)��
HTMLTokenizera	 This class takes care of tokenizing HTML.

    * self.currentToken
      Holds the token that is currently being processed.

    * self.state
      Holds a reference to the method to be invoked... XXX

    * self.stream
      Points to HTMLInputStream object.
    NcsFt|f|�|_||_d|_g|_|j|_d|_d|_t	t
|�j�dS)NF)r�stream�parserZ
escapeFlagZ
lastFourChars�	dataState�state�escape�currentToken�superr�__init__)�selfrr�kwargs)�	__class__�� /usr/lib/python3.6/_tokenizer.pyr"szHTMLTokenizer.__init__ccs\tg�|_xL|j�rVx&|jjr:td|jjjd�d�VqWx|jrR|jj�Vq>WqWdS)z� This is where the magic happens.

        We do our usually processing through the states and when we have a token
        to return we yield the token which pauses processing until the next token
        is requested.
        �
ParseErrorr)�type�dataN)r�
tokenQueuerr�errorsr�pop�popleft)rr r r!�__iter__1s


zHTMLTokenizer.__iter__c	%Cs(t}d}|rt}d}g}|jj�}x(||krJ|tk	rJ|j|�|jj�}q$Wtdj|�|�}|tkr�t|}|j	jt
ddd|id���nld|ko�d	kns�|d
kr�d}|j	jt
ddd|id���n(d|ko�d
kn�s�d|ko�dkn�s�d|k�odkn�s�d|k�o4dkn�s�|tddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d
g#�k�r�|j	jt
ddd|id��yt|�}Wn>t
k
�r�|d6}td|d?B�td7|d8@B�}YnX|d9k�r$|j	jt
dd:d;��|jj|�|S)<z�This function returns either U+FFFD or the character based on the
        decimal or hexadecimal representation. It also discards ";" if present.
        If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked.
        �
��r"z$illegal-codepoint-for-numeric-entity�	charAsInt)r#r$�datavarsi�i��i��u�r�����i�i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��	i��	i��
i��
i��i��i��i��i��
i��
i��i��i��i��i��ii�i��;z numeric-entity-without-semicolon)r#r$)rr
r�charr�append�int�joinrr%r�	frozenset�chr�
ValueError�unget)	rZisHexZallowed�radix�	charStack�cr-r6�vr r r!�consumeNumberEntityAs`

&

z!HTMLTokenizer.consumeNumberEntityFc	
Cs�d}|jj�g}|dtksB|dtddfksB|dk	rV||dkrV|jj|d��n"|ddk�rd}|j|jj��|ddkr�d	}|j|jj��|r�|dtks�|r�|dtkr�|jj|d�|j|�}n4|j	jt
d
dd��|jj|j��dd
j|�}�njx8|dtk	�rFt
jd
j|���s2P|j|jj���qWy$t
jd
j|dd���}t|�}Wntk
�r�d}YnX|dk	�rD|ddk�r�|j	jt
d
dd��|ddk�r|�r||tk�s�||tk�s�||dk�r|jj|j��dd
j|�}n.t|}|jj|j��|d
j||d��7}n4|j	jt
d
dd��|jj|j��dd
j|�}|�r�|jddd|7<n*|tk�r�d}nd}|j	jt
||d��dS)N�&r�<�#Fr�x�XTr"zexpected-numeric-entity)r#r$r,r5znamed-entity-without-semicolon�=zexpected-named-entityr$�SpaceCharacters�
Characters���)rFrGrKrKrKrKrKrKrKrK)rr6rrr=r7r
rrBr%rr'r9�entitiesTrieZhas_keys_with_prefixZlongest_prefix�len�KeyErrorr
r	r)	r�allowedChar�
fromAttribute�outputr?�hexZ
entityNameZentityLengthZ	tokenTyper r r!�
consumeEntity�sf





zHTMLTokenizer.consumeEntitycCs|j|dd�dS)zIThis method replaces the need for "entityInAttributeValueState".
        T)rOrPN)rS)rrOr r r!�processEntityInAttribute�sz&HTMLTokenizer.processEntityInAttributecCs�|j}|dtkrp|djt�|d<|dtdkrp|drR|jjtddd��|drp|jjtdd	d��|jj|�|j|_d
S)z�This method is a generic handler for emitting the tags. It also sets
        the state to "data" because that's what's needed after a token has been
        emitted.
        r#�name�EndTagr$r"zattributes-in-end-tag)r#r$�selfClosingzself-closing-flag-on-end-tagN)	rr�	translaterrr%r7rr)r�tokenr r r!�emitCurrentToken�s

zHTMLTokenizer.emitCurrentTokencCs�|jj�}|dkr|j|_n�|dkr.|j|_n�|dkrd|jjtddd��|jjtddd��n`|tkrpdS|t	kr�|jjtd	||jj
t	d
�d��n&|jj
d�}|jjtd||d��d
S)NrCrD�r"zinvalid-codepoint)r#r$rJFrIT)rCrDr[)rr6�entityDataStater�tagOpenStater%r7rrr�
charsUntil)rr$�charsr r r!r�s&



zHTMLTokenizer.dataStatecCs|j�|j|_dS)NT)rSrr)rr r r!r\szHTMLTokenizer.entityDataStatecCs�|jj�}|dkr|j|_n�|dkr.|j|_n�|tkr:dS|dkrp|jjtddd��|jjtdd	d��nT|t	kr�|jjtd
||jj
t	d�d��n&|jj
d�}|jjtd||d��dS)
NrCrDFr[r"zinvalid-codepoint)r#r$rJu�rIT)rCrDr[)rr6�characterReferenceInRcdatar�rcdataLessThanSignStaterr%r7rrr^)rr$r_r r r!�rcdataStates&



zHTMLTokenizer.rcdataStatecCs|j�|j|_dS)NT)rSrbr)rr r r!r`1sz(HTMLTokenizer.characterReferenceInRcdatacCs�|jj�}|dkr|j|_nh|dkrR|jjtddd��|jjtddd��n2|tkr^dS|jjd
�}|jjtd||d��d	S)NrDr[r"zinvalid-codepoint)r#r$rJu�FT)rDr[)	rr6�rawtextLessThanSignStaterr%r7rrr^)rr$r_r r r!�rawtextState6s


zHTMLTokenizer.rawtextStatecCs�|jj�}|dkr|j|_nh|dkrR|jjtddd��|jjtddd��n2|tkr^dS|jjd
�}|jjtd||d��d	S)NrDr[r"zinvalid-codepoint)r#r$rJu�FT)rDr[)	rr6�scriptDataLessThanSignStaterr%r7rrr^)rr$r_r r r!�scriptDataStateHs


zHTMLTokenizer.scriptDataStatecCsr|jj�}|tkrdS|dkrL|jjtddd��|jjtddd��n"|jjtd||jjd�d��dS)	NFr[r"zinvalid-codepoint)r#r$rJu�T)rr6rr%r7rr^)rr$r r r!�plaintextStateZs

zHTMLTokenizer.plaintextStatecCs|jj�}|dkr|j|_n�|dkr.|j|_n�|tkrVtd|gddd�|_|j|_n�|dkr�|j	j
tddd	��|j	j
td
dd	��|j|_nt|dkr�|j	j
tdd
d	��|jj|�|j
|_n@|j	j
tddd	��|j	j
td
dd	��|jj|�|j|_dS)N�!�/ZStartTagF)r#rUr$rWZselfClosingAcknowledged�>r"z'expected-tag-name-but-got-right-bracket)r#r$rJz<>�?z'expected-tag-name-but-got-question-markzexpected-tag-namerDT)rr6�markupDeclarationOpenStater�closeTagOpenStater
rr�tagNameStater%r7rr=�bogusCommentState)rr$r r r!r]is6









zHTMLTokenizer.tagOpenStatecCs�|jj�}|tkr0td|gdd�|_|j|_n�|dkrX|jjtddd��|j	|_nn|t
kr�|jjtddd��|jjtd	d
d��|j	|_n0|jjtddd|id
��|jj|�|j|_dS)NrVF)r#rUr$rWrjr"z*expected-closing-tag-but-got-right-bracket)r#r$z expected-closing-tag-but-got-eofrJz</z!expected-closing-tag-but-got-charr$)r#r$r.T)
rr6r
rrrnrr%r7rrr=ro)rr$r r r!rm�s(





zHTMLTokenizer.closeTagOpenStatecCs�|jj�}|tkr|j|_n�|dkr.|j�n~|tkrV|jjt	ddd��|j
|_nV|dkrh|j|_nD|dkr�|jjt	ddd��|jdd	7<n|jd|7<d
S)Nrjr"zeof-in-tag-name)r#r$rir[zinvalid-codepointrUu�T)
rr6r�beforeAttributeNameStaterrZrr%r7rr�selfClosingStartTagStater)rr$r r r!rn�s"






zHTMLTokenizer.tagNameStatecCsP|jj�}|dkr"d|_|j|_n*|jjtddd��|jj|�|j	|_dS)Nrir,rJrD)r#r$T)
rr6�temporaryBuffer�rcdataEndTagOpenStaterr%r7rr=rb)rr$r r r!ra�s

z%HTMLTokenizer.rcdataLessThanSignStatecCsX|jj�}|tkr*|j|7_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr�rcdataEndTagNameStaterr%r7rr=rb)rr$r r r!rs�s

z#HTMLTokenizer.rcdataEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)r�lowerrrrr6rrrprrqrZrr
r%r7r=rb)r�appropriater$r r r!rt�s2



z#HTMLTokenizer.rcdataEndTagNameStatecCsP|jj�}|dkr"d|_|j|_n*|jjtddd��|jj|�|j	|_dS)Nrir,rJrD)r#r$T)
rr6rr�rawtextEndTagOpenStaterr%r7rr=rd)rr$r r r!rc�s

z&HTMLTokenizer.rawtextLessThanSignStatecCsX|jj�}|tkr*|j|7_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr�rawtextEndTagNameStaterr%r7rr=rd)rr$r r r!rw�s

z$HTMLTokenizer.rawtextEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)rrurrrr6rrrprrqrZrr
r%r7r=rd)rrvr$r r r!rxs2



z$HTMLTokenizer.rawtextEndTagNameStatecCsx|jj�}|dkr"d|_|j|_nR|dkrJ|jjtddd��|j|_n*|jjtddd��|jj	|�|j
|_dS)	Nrir,rhrJz<!)r#r$rDT)rr6rr�scriptDataEndTagOpenStaterr%r7r�scriptDataEscapeStartStater=rf)rr$r r r!res


z)HTMLTokenizer.scriptDataLessThanSignStatecCsX|jj�}|tkr*|j|7_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr�scriptDataEndTagNameStaterr%r7rr=rf)rr$r r r!ry,s

z'HTMLTokenizer.scriptDataEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)rrurrrr6rrrprrqrZrr
r%r7r=rf)rrvr$r r r!r{7s2



z'HTMLTokenizer.scriptDataEndTagNameStatecCsJ|jj�}|dkr2|jjtddd��|j|_n|jj|�|j|_dS)N�-rJ)r#r$T)	rr6r%r7r�scriptDataEscapeStartDashStaterr=rf)rr$r r r!rzSs

z(HTMLTokenizer.scriptDataEscapeStartStatecCsJ|jj�}|dkr2|jjtddd��|j|_n|jj|�|j|_dS)Nr|rJ)r#r$T)	rr6r%r7r�scriptDataEscapedDashDashStaterr=rf)rr$r r r!r}]s

z,HTMLTokenizer.scriptDataEscapeStartDashStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrD|j|_nn|dkrz|jjtddd��|jjtddd��n8|tkr�|j	|_n&|jj
d
�}|jjtd||d��d	S)Nr|rJ)r#r$rDr[r"zinvalid-codepointu�T)rDr|r[)rr6r%r7r�scriptDataEscapedDashStater�"scriptDataEscapedLessThanSignStaterrr^)rr$r_r r r!�scriptDataEscapedStategs"




z$HTMLTokenizer.scriptDataEscapedStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrD|j|_nn|dkr�|jjtddd��|jjtddd��|j|_n0|t	kr�|j
|_n|jjtd|d��|j|_d	S)
Nr|rJ)r#r$rDr[r"zinvalid-codepointu�T)rr6r%r7rr~rr�r�rr)rr$r r r!r{s"






z(HTMLTokenizer.scriptDataEscapedDashStatecCs�|jj�}|dkr*|jjtddd��n�|dkr<|j|_n�|dkrd|jjtddd��|j|_nn|dkr�|jjtddd��|jjtdd	d��|j|_n0|t	kr�|j
|_n|jjtd|d��|j|_d
S)Nr|rJ)r#r$rDrjr[r"zinvalid-codepointu�T)rr6r%r7rr�rrfr�rr)rr$r r r!r~�s&






z,HTMLTokenizer.scriptDataEscapedDashDashStatecCs�|jj�}|dkr"d|_|j|_n\|tkrT|jjtdd|d��||_|j	|_n*|jjtddd��|jj
|�|j|_dS)Nrir,rJrD)r#r$T)rr6rr� scriptDataEscapedEndTagOpenStaterr
r%r7r� scriptDataDoubleEscapeStartStater=r�)rr$r r r!r��s


z0HTMLTokenizer.scriptDataEscapedLessThanSignStatecCsP|jj�}|tkr"||_|j|_n*|jjtddd��|jj	|�|j
|_dS)NrJz</)r#r$T)rr6r
rr� scriptDataEscapedEndTagNameStaterr%r7rr=r�)rr$r r r!r��s

z.HTMLTokenizer.scriptDataEscapedEndTagOpenStatecCs|jo|jdj�|jj�k}|jj�}|tkrT|rTtd|jgdd�|_|j|_n�|dkr�|r�td|jgdd�|_|j	|_n||dkr�|r�td|jgdd�|_|j
�|j|_nH|tkr�|j|7_n0|j
jtdd|jd	��|jj|�|j|_d
S)NrUrVF)r#rUr$rWrirjrJz</)r#r$T)rrurrrr6rrrprrqrZrr
r%r7r=r�)rrvr$r r r!r��s2



z.HTMLTokenizer.scriptDataEscapedEndTagNameStatecCs�|jj�}|ttd�BkrR|jjtd|d��|jj�dkrH|j	|_
q�|j|_
nB|tkr�|jjtd|d��|j|7_n|jj
|�|j|_
dS)NrirjrJ)r#r$�scriptT)rirj)rr6rr:r%r7rrrru�scriptDataDoubleEscapedStaterr�r
r=)rr$r r r!r��s


z.HTMLTokenizer.scriptDataDoubleEscapeStartStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrZ|jjtddd��|j|_nt|dkr�|jjtddd��|jjtddd��n>|tkr�|jjtdd	d��|j	|_n|jjtd|d��d
S)Nr|rJ)r#r$rDr[r"zinvalid-codepointu�zeof-in-script-in-scriptT)
rr6r%r7r� scriptDataDoubleEscapedDashStater�(scriptDataDoubleEscapedLessThanSignStaterr)rr$r r r!r��s$





z*HTMLTokenizer.scriptDataDoubleEscapedStatecCs�|jj�}|dkr2|jjtddd��|j|_n�|dkrZ|jjtddd��|j|_n�|dkr�|jjtddd��|jjtddd��|j|_nF|t	kr�|jjtdd	d��|j
|_n|jjtd|d��|j|_d
S)Nr|rJ)r#r$rDr[r"zinvalid-codepointu�zeof-in-script-in-scriptT)rr6r%r7r�$scriptDataDoubleEscapedDashDashStaterr�r�rr)rr$r r r!r�s(







z.HTMLTokenizer.scriptDataDoubleEscapedDashStatecCs|jj�}|dkr*|jjtddd��n�|dkrR|jjtddd��|j|_n�|dkrz|jjtddd��|j|_n�|dkr�|jjtddd��|jjtdd	d��|j|_nF|t	kr�|jjtdd
d��|j
|_n|jjtd|d��|j|_dS)Nr|rJ)r#r$rDrjr[r"zinvalid-codepointu�zeof-in-script-in-scriptT)rr6r%r7rr�rrfr�rr)rr$r r r!r�s,







z2HTMLTokenizer.scriptDataDoubleEscapedDashDashStatecCsP|jj�}|dkr8|jjtddd��d|_|j|_n|jj|�|j	|_dS)NrirJ)r#r$r,T)
rr6r%r7rrr�scriptDataDoubleEscapeEndStaterr=r�)rr$r r r!r�0s

z6HTMLTokenizer.scriptDataDoubleEscapedLessThanSignStatecCs�|jj�}|ttd�BkrR|jjtd|d��|jj�dkrH|j	|_
q�|j|_
nB|tkr�|jjtd|d��|j|7_n|jj
|�|j|_
dS)NrirjrJ)r#r$r�T)rirj)rr6rr:r%r7rrrrur�rr�r
r=)rr$r r r!r�;s


z,HTMLTokenizer.scriptDataDoubleEscapeEndStatecCs0|jj�}|tkr$|jjtd��n|tkrJ|jdj|dg�|j|_n�|dkr\|j	�n�|dkrn|j
|_n�|dkr�|jjtd
dd��|jdj|dg�|j|_n�|d
kr�|jjtd
dd��|jdjddg�|j|_nF|t
k�r|jjtd
dd��|j|_n|jdj|dg�|j|_dS)NTr$r,rjri�'�"rHrDr"z#invalid-character-in-attribute-name)r#r$r[zinvalid-codepointu�z#expected-attribute-name-but-got-eof)r�r�rHrD)rr6rr^r
rr7�attributeNameStaterrZrqr%rrr)rr$r r r!rpKs6










z&HTMLTokenizer.beforeAttributeNameStatecCs�|jj�}d}d}|dkr&|j|_�n0|tkr^|jddd||jjtd�7<d}�n�|dkrld}n�|tkr~|j|_n�|dkr�|j	|_n�|d	kr�|j
jtd
dd��|jdddd
7<d}n�|dk�r|j
jtd
dd��|jddd|7<d}nH|t
k�r8|j
jtd
dd��|j|_n|jddd|7<d}|�r�|jdddjt�|jddd<xP|jddd�D]:\}}|jddd|k�r�|j
jtd
dd��P�q�W|�r�|j�dS)NTFrHr$rrrjrir[r"zinvalid-codepoint)r#r$u�r�r�rDz#invalid-character-in-attribute-namezeof-in-attribute-namezduplicate-attributerKrK)r�r�rDrKrKrKrKrKrK)rr6�beforeAttributeValueStaterr
rr^r�afterAttributeNameStaterqr%r7rrrrXrrZ)rr$ZleavingThisStateZ	emitTokenrU�_r r r!r�isR








&
z HTMLTokenizer.attributeNameStatecCsF|jj�}|tkr$|jjtd��n|dkr8|j|_�n
|dkrJ|j�n�|tkrp|jdj	|dg�|j
|_n�|dkr�|j|_n�|dkr�|jj	t
dd	d
��|jdj	ddg�|j
|_n�|dk�r�|jj	t
ddd
��|jdj	|dg�|j
|_nF|tk�r&|jj	t
ddd
��|j|_n|jdj	|dg�|j
|_dS)NTrHrjr$r,rir[r"zinvalid-codepoint)r#r$u�r�r�rDz&invalid-character-after-attribute-namezexpected-end-of-tag-but-got-eof)r�r�rD)rr6rr^r�rrZr
rr7r�rqr%rrr)rr$r r r!r��s:











z%HTMLTokenizer.afterAttributeNameStatecCsj|jj�}|tkr$|jjtd��nB|dkr8|j|_�n.|dkrX|j|_|jj|��n|dkrl|j|_�n�|dkr�|j	j
tddd��|j�n�|d	kr�|j	j
tdd
d��|j
dddd
7<|j|_n�|dk�r|j	j
tddd��|j
ddd|7<|j|_nL|tk�rD|j	j
tddd��|j|_n"|j
ddd|7<|j|_dS)NTr�rCr�rjr"z.expected-attribute-value-but-got-right-bracket)r#r$r[zinvalid-codepointr$ru�rHrD�`z"equals-in-unquoted-attribute-valuez$expected-attribute-value-but-got-eofrK)rHrDr�rKrK)rr6rr^�attributeValueDoubleQuotedStater�attributeValueUnQuotedStater=�attributeValueSingleQuotedStater%r7rrZrrr)rr$r r r!r��s>










z'HTMLTokenizer.beforeAttributeValueStatecCs�|jj�}|dkr|j|_n�|dkr0|jd�n�|dkrj|jjtddd��|jdddd	7<nN|t	kr�|jjtdd
d��|j
|_n&|jdd
d||jjd�7<dS)Nr�rCr[r"zinvalid-codepoint)r#r$r$ru�z#eof-in-attribute-value-double-quoteTrKrK)r�rCr[)rr6�afterAttributeValueStaterrTr%r7rrrrr^)rr$r r r!r��s 




z-HTMLTokenizer.attributeValueDoubleQuotedStatecCs�|jj�}|dkr|j|_n�|dkr0|jd�n�|dkrj|jjtddd��|jdddd	7<nN|t	kr�|jjtdd
d��|j
|_n&|jdd
d||jjd�7<dS)Nr�rCr[r"zinvalid-codepoint)r#r$r$ru�z#eof-in-attribute-value-single-quoteTrKrK)r�rCr[)rr6r�rrTr%r7rrrrr^)rr$r r r!r��s 




z-HTMLTokenizer.attributeValueSingleQuotedStatecCs|jj�}|tkr|j|_�n�|dkr2|jd�n�|dkrD|j�n�|dkr~|jjt	dd	d
��|j
ddd|7<n�|d
kr�|jjt	ddd
��|j
dddd7<nV|tkr�|jjt	ddd
��|j|_n.|j
ddd||jj
td�tB�7<dS)NrCrjr�r�rHrDr�r"z0unexpected-character-in-unquoted-attribute-value)r#r$r$rr[zinvalid-codepointu�z eof-in-attribute-value-no-quotesT)r�r�rHrDr�rKrKrK)rCrjr�r�rHrDr�r[)rr6rrprrTrZr%r7rrrrr^r:)rr$r r r!r�s,





z)HTMLTokenizer.attributeValueUnQuotedStatecCs�|jj�}|tkr|j|_n�|dkr.|j�np|dkr@|j|_n^|tkrt|jj	t
ddd��|jj|�|j|_n*|jj	t
ddd��|jj|�|j|_dS)Nrjrir"z$unexpected-EOF-after-attribute-value)r#r$z*unexpected-character-after-attribute-valueT)
rr6rrprrZrqrr%r7rr=r)rr$r r r!r� s"






z&HTMLTokenizer.afterAttributeValueStatecCs�|jj�}|dkr&d|jd<|j�n^|tkrZ|jjtddd��|jj|�|j	|_
n*|jjtddd��|jj|�|j|_
dS)NrjTrWr"z#unexpected-EOF-after-solidus-in-tag)r#r$z)unexpected-character-after-solidus-in-tag)rr6rrZrr%r7rr=rrrp)rr$r r r!rq4s





z&HTMLTokenizer.selfClosingStartTagStatecCsD|jjd�}|jdd�}|jjtd|d��|jj�|j|_dS)Nrjr[u��Comment)r#r$T)	rr^�replacer%r7rr6rr)rr$r r r!roFs
zHTMLTokenizer.bogusCommentStatecCs�|jj�g}|ddkrT|j|jj��|ddkrPtddd�|_|j|_dS�n�|ddkr�d}x.d&D]&}|j|jj��|d'|krjd}PqjW|r�tdddddd�|_|j|_dSn�|d(dk�rH|jdk	�rH|jj	j
�rH|jj	j
d)j|jj	jk�rHd}x2d*D]*}|j|jj��|d+|k�rd}P�qW|�rH|j
|_dS|jjtddd��x|�rz|jj|j���q`W|j|_dS),Nrr|r�r,)r#r$T�d�D�o�Or@�C�t�T�y�Y�p�P�e�EFZDoctype)r#rU�publicId�systemId�correct�[�Ar"zexpected-dashes-or-doctyperKrKrK)r�r��r�r��r@r��r�r��r�r��r�r��r�r�)r�r�r�r�r�r�rKrKrK)r�r�r�r�r�r�rK)rr6r7rr�commentStartStater�doctypeStaterZtreeZopenElements�	namespaceZdefaultNamespace�cdataSectionStater%r=r'ro)rr?�matched�expectedr r r!rlUsR


z(HTMLTokenizer.markupDeclarationOpenStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��|jj|j�|j|_nP|t	kr�|jjtdd
d��|jj|j�|j|_n|jd|7<|j
|_dS)Nr|r[r"zinvalid-codepoint)r#r$r$u�rjzincorrect-commentzeof-in-commentT)rr6�commentStartDashStaterr%r7rrrr�commentState)rr$r r r!r��s(






zHTMLTokenizer.commentStartStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��|jj|j�|j|_nT|t	kr�|jjtdd
d��|jj|j�|j|_n|jdd|7<|j
|_dS)Nr|r[r"zinvalid-codepoint)r#r$r$u-�rjzincorrect-commentzeof-in-commentT)rr6�commentEndStaterr%r7rrrrr�)rr$r r r!r��s(






z#HTMLTokenizer.commentStartDashStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<nT|tkr�|jjtddd��|jj|j�|j	|_n|jd||jj
d
�7<d	S)Nr|r[r"zinvalid-codepoint)r#r$r$u�zeof-in-commentT)r|r[)rr6�commentEndDashStaterr%r7rrrrr^)rr$r r r!r��s




zHTMLTokenizer.commentStatecCs�|jj�}|dkr|j|_n�|dkrV|jjtddd��|jdd7<|j|_nT|t	kr�|jjtddd��|jj|j�|j
|_n|jdd|7<|j|_d	S)
Nr|r[r"zinvalid-codepoint)r#r$r$u-�zeof-in-comment-end-dashT)rr6r�rr%r7rrr�rr)rr$r r r!r��s 





z!HTMLTokenizer.commentEndDashStatecCs,|jj�}|dkr*|jj|j�|j|_n�|dkrd|jjtddd��|jdd7<|j|_n�|dkr�|jjtdd	d��|j	|_n�|d
kr�|jjtddd��|jd|7<nj|t
kr�|jjtddd��|jj|j�|j|_n4|jjtdd
d��|jdd|7<|j|_dS)Nrjr[r"zinvalid-codepoint)r#r$r$u--�rhz,unexpected-bang-after-double-dash-in-commentr|z,unexpected-dash-after-double-dash-in-commentzeof-in-comment-double-dashzunexpected-char-in-commentz--T)rr6r%r7rrrrr��commentEndBangStater)rr$r r r!r��s6









zHTMLTokenizer.commentEndStatecCs�|jj�}|dkr*|jj|j�|j|_n�|dkrN|jdd7<|j|_n�|dkr�|jjtddd��|jdd	7<|j	|_nT|t
kr�|jjtdd
d��|jj|j�|j|_n|jdd|7<|j	|_dS)Nrjr|r$z--!r[r"zinvalid-codepoint)r#r$u--!�zeof-in-comment-end-bang-stateT)rr6r%r7rrrr�rr�r)rr$r r r!r��s(






z!HTMLTokenizer.commentEndBangStatecCs�|jj�}|tkr|j|_nj|tkr\|jjtddd��d|j	d<|jj|j	�|j
|_n*|jjtddd��|jj|�|j|_dS)Nr"z!expected-doctype-name-but-got-eof)r#r$Fr�zneed-space-after-doctypeT)rr6r�beforeDoctypeNameStaterrr%r7rrrr=)rr$r r r!r�s





zHTMLTokenizer.doctypeStatecCs�|jj�}|tkrn�|dkrT|jjtddd��d|jd<|jj|j�|j|_n�|dkr�|jjtddd��d	|jd
<|j	|_nR|t
kr�|jjtddd��d|jd<|jj|j�|j|_n||jd
<|j	|_dS)
Nrjr"z+expected-doctype-name-but-got-right-bracket)r#r$Fr�r[zinvalid-codepointu�rUz!expected-doctype-name-but-got-eofT)rr6rr%r7rrrr�doctypeNameStater)rr$r r r!r�s.










z$HTMLTokenizer.beforeDoctypeNameStatecCs|jj�}|tkr2|jdjt�|jd<|j|_n�|dkrh|jdjt�|jd<|jj	|j�|j
|_n�|dkr�|jj	tddd��|jdd7<|j|_nh|t
kr�|jj	tddd��d	|jd
<|jdjt�|jd<|jj	|j�|j
|_n|jd|7<dS)NrUrjr[r"zinvalid-codepoint)r#r$u�zeof-in-doctype-nameFr�T)rr6rrrXr�afterDoctypeNameStaterr%r7rrr�r)rr$r r r!r�6s,







zHTMLTokenizer.doctypeNameStatecCsR|jj�}|tkr�n8|dkr8|jj|j�|j|_�n|tkr�d|jd<|jj	|�|jjt
ddd��|jj|j�|j|_�n�|d!kr�d	}x$d'D]}|jj�}||kr�d}Pq�W|r�|j|_d	SnJ|d(k�rd	}x(d.D] }|jj�}||k�r�d}P�q�W|�r|j|_d	S|jj	|�|jjt
ddd|id ��d|jd<|j
|_d	S)/NrjFr�r"zeof-in-doctype)r#r$r�r�T�u�U�b�B�l�L�i�Ir@r��s�Sr�r�r�r�r�r��m�Mz*expected-space-or-right-bracket-in-doctyper$)r#r$r.)r�r��r�r��r�r��r�r��r�r��r@r�)r�r�r�r�r�)r�r��r�r��r�r��r�r��r�r��r�r�)r�r�r�r�r�)rr6rr%r7rrrrr=r�afterDoctypePublicKeywordState�afterDoctypeSystemKeywordState�bogusDoctypeState)rr$r�r�r r r!r�OsT







z#HTMLTokenizer.afterDoctypeNameStatecCs�|jj�}|tkr|j|_n�|d
krP|jjtddd��|jj|�|j|_nT|t	kr�|jjtddd��d|j
d<|jj|j
�|j|_n|jj|�|j|_d	S)Nr�r�r"zunexpected-char-in-doctype)r#r$zeof-in-doctypeFr�T)r�r�)rr6r�"beforeDoctypePublicIdentifierStaterr%r7rr=rrr)rr$r r r!r��s"






z,HTMLTokenizer.afterDoctypePublicKeywordStatecCs�|jj�}|tkrn�|dkr0d|jd<|j|_n�|dkrLd|jd<|j|_n�|dkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_nh|tkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_n(|jjt	ddd��d	|jd
<|j|_d
S)Nr�r,r�r�rjr"zunexpected-end-of-doctype)r#r$Fr�zeof-in-doctypezunexpected-char-in-doctypeT)
rr6rr�(doctypePublicIdentifierDoubleQuotedStater�(doctypePublicIdentifierSingleQuotedStater%r7rrrr�)rr$r r r!r��s4












z0HTMLTokenizer.beforeDoctypePublicIdentifierStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6�!afterDoctypePublicIdentifierStaterr%r7rrrr)rr$r r r!r��s*








z6HTMLTokenizer.doctypePublicIdentifierDoubleQuotedStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6r�rr%r7rrrr)rr$r r r!r��s*








z6HTMLTokenizer.doctypePublicIdentifierSingleQuotedStatecCs|jj�}|tkr|j|_n�|dkr<|jj|j�|j|_n�|dkrn|jjt	ddd��d|jd<|j
|_n�|dkr�|jjt	ddd��d|jd<|j|_nh|tkr�|jjt	dd	d��d
|jd<|jj|j�|j|_n(|jjt	ddd��d
|jd<|j
|_dS)
Nrjr�r"zunexpected-char-in-doctype)r#r$r,r�r�zeof-in-doctypeFr�T)rr6r�-betweenDoctypePublicAndSystemIdentifiersStaterr%r7rrr�(doctypeSystemIdentifierDoubleQuotedState�(doctypeSystemIdentifierSingleQuotedStaterr�)rr$r r r!r��s6













z/HTMLTokenizer.afterDoctypePublicIdentifierStatecCs�|jj�}|tkrn�|dkr4|jj|j�|j|_n�|dkrPd|jd<|j|_n�|dkrld|jd<|j	|_nh|t
kr�|jjtddd��d	|jd
<|jj|j�|j|_n(|jjtddd��d	|jd
<|j|_dS)
Nrjr�r,r�r�r"zeof-in-doctype)r#r$Fr�zunexpected-char-in-doctypeT)
rr6rr%r7rrrr�r�rrr�)rr$r r r!r�s.










z;HTMLTokenizer.betweenDoctypePublicAndSystemIdentifiersStatecCs�|jj�}|tkr|j|_n�|d
krP|jjtddd��|jj|�|j|_nT|t	kr�|jjtddd��d|j
d<|jj|j
�|j|_n|jj|�|j|_d	S)Nr�r�r"zunexpected-char-in-doctype)r#r$zeof-in-doctypeFr�T)r�r�)rr6r�"beforeDoctypeSystemIdentifierStaterr%r7rr=rrr)rr$r r r!r�s"






z,HTMLTokenizer.afterDoctypeSystemKeywordStatecCs�|jj�}|tkrn�|dkr0d|jd<|j|_n�|dkrLd|jd<|j|_n�|dkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_nh|tkr�|jjt	ddd��d	|jd
<|jj|j�|j
|_n(|jjt	ddd��d	|jd
<|j|_dS)
Nr�r,r�r�rjr"zunexpected-char-in-doctype)r#r$Fr�zeof-in-doctypeT)
rr6rrr�rr�r%r7rrrr�)rr$r r r!r�/s4












z0HTMLTokenizer.beforeDoctypeSystemIdentifierStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6�!afterDoctypeSystemIdentifierStaterr%r7rrrr)rr$r r r!r�Ls*








z6HTMLTokenizer.doctypeSystemIdentifierDoubleQuotedStatecCs�|jj�}|dkr|j|_n�|dkrN|jjtddd��|jdd7<n�|dkr�|jjtdd	d��d
|jd<|jj|j�|j|_nR|t	kr�|jjtddd��d
|jd<|jj|j�|j|_n|jd|7<d
S)Nr�r[r"zinvalid-codepoint)r#r$r�u�rjzunexpected-end-of-doctypeFr�zeof-in-doctypeT)
rr6r�rr%r7rrrr)rr$r r r!r�ds*








z6HTMLTokenizer.doctypeSystemIdentifierSingleQuotedStatecCs�|jj�}|tkrn~|dkr4|jj|j�|j|_n^|tkrt|jjt	ddd��d|jd<|jj|j�|j|_n|jjt	ddd��|j
|_dS)	Nrjr"zeof-in-doctype)r#r$Fr�zunexpected-char-in-doctypeT)rr6rr%r7rrrrrr�)rr$r r r!r�|s 





z/HTMLTokenizer.afterDoctypeSystemIdentifierStatecCsZ|jj�}|dkr*|jj|j�|j|_n,|tkrV|jj|�|jj|j�|j|_ndS)NrjT)	rr6r%r7rrrrr=)rr$r r r!r��s


zHTMLTokenizer.bogusDoctypeStatecCs�g}xt|j|jjd��|j|jjd��|jj�}|tkr@Pq|ddd�dkrl|ddd�|d<Pq|j|�qWdj|�}|jd�}|dkr�x&t|�D]}|jjt	d	d
d��q�W|j
dd�}|r�|jjt	d
|d��|j|_dS)N�]rjr�z]]r,r[rr"zinvalid-codepoint)r#r$u�rJTrK���rKr�rK)
r7rr^r6rr9�count�ranger%rr�rr)rr$r6Z	nullCountr�r r r!r��s.



zHTMLTokenizer.cdataSectionState)N)NF)N�__name__�
__module__�__qualname__�__doc__rr)rBrSrTrZrr\rbr`rdrfrgr]rmrnrarsrtrcrwrxreryr{rzr}r�rr~r�r�r�r�r�r�r�r�r�rpr�r�r�r�r�r�r�rqrorlr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r��
__classcell__r r )rr!rs�H
P#

6 "-3rN)Z
__future__rrrZpip._vendor.sixrr;�collectionsrZ	constantsrr	r
rrr
rrrrZ_inputstreamrZ_trierrL�objectrr r r r!�<module>s_vendor/html5lib/__pycache__/__init__.cpython-36.opt-1.pyc000064400000001563151733136420017266 0ustar003

�Pf�@shdZddlmZmZmZddlmZmZmZddl	m
Z
ddlmZddl
mZdd	d
ddd
gZdZdS)aM
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.

Example usage:

import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
�)�absolute_import�division�unicode_literals�)�
HTMLParser�parse�
parseFragment)�getTreeBuilder)�
getTreeWalker)�	serializerrrr	r
rz1.0b10N)�__doc__Z
__future__rrrZhtml5parserrrrZtreebuildersr	Ztreewalkersr
Z
serializerr�__all__�__version__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/__pycache__/_ihatexml.cpython-36.pyc000064400000032721151733136420016542 0ustar003

�PfAA�@s�ddlmZmZmZddlZddlZddlmZdZdZ	dZ
dZd	Zd
j
ee	g�Zd
j
eeddd
e
eg�Zd
j
ed
g�Zejd�Zejd�Zdd�Zdd�Zedd�Zdd�Zdd�Zdd�Zdd�Zejd�Zejd�Zejd �ZGd!d"�d"e�Z dS)#�)�absolute_import�division�unicode_literalsN�)�DataLossWarninga^
[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]z*[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]a�
[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
#x3099 | #x309Aa
[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]z}
#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]z | �.�-�_z#x([\d|A-F]{4,4})z'\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]cCs�dd�|jd�D�}g}x�|D]�}d}x`ttfD]T}|j|�}|dk	r0|jdd�|j�D��t|d	�dkr~|d
d|d<d}Pq0W|st|�dks�t�|jt|�gd�qWt	|�}|S)NcSsg|]}|j��qS�)�strip)�.0�itemr
r
�/usr/lib/python3.6/_ihatexml.py�
<listcomp>hsz$charStringToList.<locals>.<listcomp>z | FcSsg|]}t|��qSr
)�hexToInt)rr
r
r
rrosr�T���rr)
�split�reChar�reCharRange�match�append�groups�len�AssertionError�ord�normaliseCharList)�charsZ
charRanges�rvr
Z
foundMatchZregexprr
r
r�charStringToListgs"

rcCs�t|�}x |D]}|d|dkst�qWg}d}x�|t|�kr�d}|j||�xT||t|�kr�|||d|dddkr�|||d|dd<|d7}qTW||7}q4W|S)Nrrrr)�sortedrrr)�charListr
r�i�jr
r
rr|s
2rZFFFF�cCs�g}|ddkr*|jd|dddg�xBt|dd��D].\}}|j|dd||dddg�q<W|ddtkr�|j|dddtg�|S)Nrrrrr)r�	enumerate�max_unicode)r!rr"r
r
r
r�
missingRanges�s*r'cCsrg}x^|D]V}|d|dkr6|jtt|d���q
|jtt|d��dtt|d���q
Wddj|�S)Nrrrz[%s]�)r�escapeRegexp�chr�join)r!rr
r
r
r�listToRegexpStr�s
r,cCs
t|d�S)Nr$)�int)Zhex_strr
r
rr�srcCs&d}x|D]}|j|d|�}q
W|S)Nr�^�$�*�+�?�{�}�[�]�|�(�)r�\)rr.r/r0r1r2r3r4r5r6r7r8r9r)�replace)�stringZspecialCharacters�charr
r
rr)�s

r)u�[-,/:-@\[-\^`\{-¶¸-¿×÷IJ-ijĿ-ŀʼnſDŽ-njDZ-dzǶ-ǹȘ-ɏʩ-ʺ˂-ˏ˒-˿͆-͟͢-΅΋΍΢Ϗϗ-ϙϛϝϟϡϴ-ЀЍѐѝ҂҇-ҏӅ-ӆӉ-ӊӍ-ӏӬ-ӭӶ-ӷӺ-԰՗-՘՚-ՠև-֐ֺ֢־׀׃ׅ-׏׫-ׯ׳-ؠػ-ؿٓ-ٟ٪-ٯڸ-ڹڿۏ۔۩ۮ-ۯۺ-ऀऄऺ-ऻॎ-ॐॕ-ॗ।-॥॰-ঀ঄঍-঎঑-঒঩঱঳-঵঺-঻ঽ৅-৆৉-৊ৎ-৖৘-৛৞৤-৥৲-ਁਃ-਄਋-਎਑-਒਩਱਴਷਺-਻਽੃-੆੉-੊੎-੘੝੟-੥ੵ-઀઄ઌ઎઒઩઱઴઺-઻૆૊૎-૟ૡ-૥૰-଀଄଍-଎଑-଒଩଱଴-ଵ଺-଻ୄ-୆୉-୊୎-୕୘-୛୞ୢ-୥୰-஁஄஋-஍஑஖-஘஛஝஠-஢஥-஧஫-஭ஶ஺-஽௃-௅௉௎-௖௘-௦௰-ఀఄ఍఑఩ఴ఺-ఽ౅౉౎-౔౗-౟ౢ-౥౰-ಁ಄಍಑಩಴಺-ಽ೅೉೎-೔೗-ೝ೟ೢ-೥೰-ഁഄ഍഑ഩഺ-ഽൄ-൅൉ൎ-ൖ൘-ൟൢ-൥൰-฀ฯ฻-฿๏๚-຀຃຅-ຆຉ຋-ຌຎ-ຓຘຠ຤຦ຨ-ຩຬຯ຺຾-຿໅໇໎-໏໚-༗༚-༟༪-༴༶༸༺-༽཈ཪ-཰྅ྌ-ྏྖ྘ྮ-ྰྸྺ-႟჆-჏ჷ-ჿᄁᄄᄈᄊᄍᄓ-ᄻᄽᄿᅁ-ᅋᅍᅏᅑ-ᅓᅖ-ᅘᅚ-ᅞᅢᅤᅦᅨᅪ-ᅬᅯ-ᅱᅴᅶ-ᆝᆟ-ᆧᆩ-ᆪᆬ-ᆭᆰ-ᆶᆹᆻᇃ-ᇪᇬ-ᇯᇱ-ᇸᇺ-᷿ẜ-ẟỺ-ỿ἖-἗἞-἟὆-὇὎-὏὘὚὜὞὾-὿᾵᾽᾿-῁῅῍-῏῔-῕῜-῟῭-῱῵´-⃏⃝-⃠⃢-℥℧-℩ℬ-ℭℯ-ⅿↃ-〄〆〈-〠〰〶-぀ゕ-゘゛-゜ゟ-゠・ヿ-㄄ㄭ-䷿龦-꯿힤-￿]u�[-@\[-\^`\{-¿×÷IJ-ijĿ-ŀʼnſDŽ-njDZ-dzǶ-ǹȘ-ɏʩ-ʺ˂-΅·΋΍΢Ϗϗ-ϙϛϝϟϡϴ-ЀЍѐѝ҂-ҏӅ-ӆӉ-ӊӍ-ӏӬ-ӭӶ-ӷӺ-԰՗-՘՚-ՠև-׏׫-ׯ׳-ؠػ-ـً-ٰڸ-ڹڿۏ۔ۖ-ۤۧ-ऄऺ-़ा-ॗॢ-঄঍-঎঑-঒঩঱঳-঵঺-৛৞ৢ-৯৲-਄਋-਎਑-਒਩਱਴਷਺-੘੝੟-ੱੵ-઄ઌ઎઒઩઱઴઺-઼ા-૟ૡ-଄଍-଎଑-଒଩଱଴-ଵ଺-଼ା-୛୞ୢ-஄஋-஍஑஖-஘஛஝஠-஢஥-஧஫-஭ஶ஺-ఄ఍఑఩ఴ఺-౟ౢ-಄಍಑಩಴಺-ೝ೟ೢ-ഄ഍഑ഩഺ-ൟൢ-฀ฯัิ-฿ๆ-຀຃຅-ຆຉ຋-ຌຎ-ຓຘຠ຤຦ຨ-ຩຬຯັິ-ຼ຾-຿໅-༿཈ཪ-႟჆-჏ჷ-ჿᄁᄄᄈᄊᄍᄓ-ᄻᄽᄿᅁ-ᅋᅍᅏᅑ-ᅓᅖ-ᅘᅚ-ᅞᅢᅤᅦᅨᅪ-ᅬᅯ-ᅱᅴᅶ-ᆝᆟ-ᆧᆩ-ᆪᆬ-ᆭᆰ-ᆶᆹᆻᇃ-ᇪᇬ-ᇯᇱ-ᇸᇺ-᷿ẜ-ẟỺ-ỿ἖-἗἞-἟὆-὇὎-὏὘὚὜὞὾-὿᾵᾽᾿-῁῅῍-῏῔-῕῜-῟῭-῱῵´-℥℧-℩ℬ-ℭℯ-ⅿↃ-〆〈-〠〪-぀ゕ-゠・-㄄ㄭ-䷿龦-꯿힤-￿]z#[^ 
a-zA-Z0-9\-'()+,./:=?;!*#@$_%]c@sreZdZejd�Zddd�Zddd�Zd	d
�Zdd�Z	d
d�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�ZdS)�
InfosetFilterz
U[\dA-F]{5,5}FTcCs.||_||_||_||_||_||_i|_dS)N)�dropXmlnsLocalName�dropXmlnsAttrNs�preventDoubleDashComments�preventDashAtCommentEnd�replaceFormFeedCharacters�preventSingleQuotePubid�replaceCache)�selfr?r@rArBrCrDr
r
r�__init__�szInfosetFilter.__init__NcCsL|jr |jd�r tjdt�dS|jr>|dkr>tjdt�dS|j|�SdS)Nzxmlns:z"Attributes cannot begin with xmlnszhttp://www.w3.org/2000/xmlns/z)Attributes cannot be in the xml namespace)r?�
startswith�warnings�warnrr@�	toXmlName)rF�name�	namespacer
r
r�coerceAttribute�szInfosetFilter.coerceAttributecCs
|j|�S)N)rK)rFrLr
r
r�
coerceElement�szInfosetFilter.coerceElementcCsN|jrJx$d|kr*tjdt�|jdd�}qW|jd�rJtjdt�|d7}|S)Nz--z'Comments cannot contain adjacent dashesz- -rzComments cannot end in a dash� )rArIrJrr;�endswith)rF�datar
r
r�
coerceComment�s

zInfosetFilter.coerceCommentcCs:|jr6x"t|jd��D]}tjdt�qW|jdd�}|S)N�zText cannot contain U+000CrP)rC�range�countrIrJrr;)rFrRr	r
r
r�coerceCharacters�s
zInfosetFilter.coerceCharacterscCsp|}x4tj|�D]&}tjdt�|j|�}|j||�}qW|jrl|jd�dkrltjdt�|jd|jd��}|S)NzCoercing non-XML pubid�'rz!Pubid cannot contain single quote)	�nonPubidCharRegexp�findallrIrJr�getReplacementCharacterr;rD�find)rFrRZ
dataOutputr=�replacementr
r
r�coercePubid�s
zInfosetFilter.coercePubidc
Cs�|d}|dd�}tj|�}|r:tjdt�|j|�}n|}|}ttj|��}x.|D]&}tjdt�|j|�}	|j	||	�}qVW||S)NrrzCoercing non-XML name)
�nonXmlNameFirstBMPRegexprrIrJrr[�set�nonXmlNameBMPRegexprZr;)
rFrL�	nameFirstZnameRest�mZnameFirstOutputZnameRestOutputZreplaceCharsr=r]r
r
rrK�s


zInfosetFilter.toXmlNamecCs$||jkr|j|}n
|j|�}|S)N)rE�
escapeChar)rFr=r]r
r
rr[s

z%InfosetFilter.getReplacementCharactercCs0x*t|jj|��D]}|j||j|��}qW|S)N)r`�replacementRegexprZr;�unescapeChar)rFrLr
r
r
r�fromXmlNameszInfosetFilter.fromXmlNamecCsdt|�}||j|<|S)NzU%05X)rrE)rFr=r]r
r
rrds
zInfosetFilter.escapeCharcCstt|dd�d��S)Nrr$)r*r-)rFZcharcoder
r
rrfszInfosetFilter.unescapeChar)FFFFTF)N)�__name__�
__module__�__qualname__�re�compilererGrNrOrSrWr^rKr[rgrdrfr
r
r
rr>�s"



r>)!Z
__future__rrrrkrIZ	constantsrZbaseCharZideographicZcombiningCharacterZdigitZextenderr+ZletterrLrbrlrrrrr-r&r'r,rr)rar_rY�objectr>r
r
r
r�<module>s20


	


_vendor/html5lib/__pycache__/constants.cpython-36.pyc000064400000201300151733136420016573 0ustar003

�Pf�E��@s�-ddlmZmZmZddlZdZddddddd	d
ddd
ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d,d,d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d���Zd�d�d�d�d�d�d��Zeed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fg�Z	eed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fg�Z
eed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fgN�Zeed�d�fed�d�fed�d�fed�d�fg�Zeed�d�fed�d�fed�d�fed�d�fed�d�fg�Z
d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�d/�>Z�d0�d1iZ�d2�d3e�d2f�d2�d4e�d2f�d2�d5e�d2f�d2�d6e�d2f�d2�d7e�d2f�d2d�e�d2f�d2�d8e�d2f�d9d�e�d9f�d9�d:e�d9f�d9�d;e�d9fd�d<e�d<f�d<�d2e�d<f�d=�Ze�d>�d?�ej�D��Ze�d@�dA�dB�dC�dDg�Zed�d�d�d�d�g�Zeej�Zeej�Zeej�Zeej�Zeej�Ze�dE�d?�ejD��Z�d|Z ed�d��dFd�d�d�d�d�d�d�d�d�dԐdG�dHg�Z!ed�d�g�Z"ed�d�d�d�d�d�d�g�Z#e�dIg�e�dJg�e�dKg�e�dL�dMg�e�dL�dMg�e�dN�dOg�e�dPg�e�dQ�dRg�e�dS�dR�dT�dUg�e�dVg�e�dWg�e�dR�dXg�e�dR�dX�dYg�e�dR�dXg�e�dR�dZg�e�dR�dX�d[�dZ�dT�dKg�e�dR�dX�dZ�dQg�e�dR�dXg��d\�Z$�d}Z%e�dy�dz�d{�d|�d}g�Z&�d~�d~�d�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d͐dΐdϐdАdѐdҐdӐd��dѐdԐdՐd֐dÐdאdؐdِdڐdېdܐdݐdސdߐd�d�d�d�d�d�d�d�d�d�d�d�dԐd�d�d�d�d�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d��d��d��d��d�d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d(�d+�d��d,�d-�d.�d/�d0�d0�d1�d1�d2�d3�d4�d5�d5�d4�d6�d7�dڐd8�d9�d:�d;�d<�d=�d>�d?�d@�dA�dB�dC�dC�dD�dE�dF�dG�dH�dI�dJ�dK�dL�dM�dN�dO�dP�dQ�dR�dS�dT�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d`�da�db�dc�dd�de�df�dg�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dr�ds�dt�dՐd֐du�dv�dw�dx�dy�dz�d{�d|�d}�d~�d�d��d��dאdؐdِd��d��d��dX�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d"�d��dA�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d͐dΐdϐdϐdАdѐdҐdҐdӐdӐdԐdՐd֐dאdאdؐdِdڐdېdܐdݐdސdߐd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d'�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d�d�d�d�d�d�d�d�d�d	�d�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �dڐd!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�d/�d0�dߐd^�d�d1�d2�d3�d4�d5�d6�d7�d8�d9�d:�d;�d<�d=�d>�d?�d?�d@�dA�dB�dC�dD�d�dE�dF�dG�dH�dF�dI�dI�dJ�dK�dL�d@�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d^�d_�d`�da�db�dc�dc�dd�de�df�dg�dg�dh�di�dj�dk�dl�dm�dn�do�dp�d1�dq�dr�ds�dt�du�dv�dܐdݐdw�dx�dy�dz�d{�d|�d}�d~�d~�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��dɐdɐd��d��d��d��d��d��d��d��d��d��d��d��d�d�d��d��d��d��d��d��d��d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d��d͐dΐdϐdY�dАdѐdҐdӐdԐdY�dҐdՐdՐd֐dאdY�d��dؐdؐdِdِd��dڐdېdܐdݐdސdߐd�d�d�d�dk�d�dܐd�d�d��d��d�dݐd��d�d�d�d�d:�d�dm�d�d�d�d�d�d�d�d�d��d��d�d��d
�d��d��d��d��d��d��d��d��d��d��d�d�d�du�du�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d*�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�dސd��d/�d/�d0�d1�dߐd�d2�d3�d4�d5�d5�d��d6�d,�d,�d7�d8�d9�d:�d;�d<�d=�d>�d?�d$�d@�dA�dB�dB�dC�dD�dE�dF�d��d��dG�dH�dH�d��dI�dJ�dK�dK�dL�dM�dN�dO�dP�dQ�dR�d��dS�dT�dU�dV�dP�dW�dX�dY�dZ�dZ�d[�d��d��d\�d]�d^�d3�d^�d��dX�d_�d��d`�d��d��d��da�db�dc�dd�de�df�dg�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dr�ds�dt�du�dv�dw�dx�dl�dm�dy�dz�d{�d{�dn�dw�dy�dz�d��d|�d}�dԐd~�d�d��dߐd��di�d��dːd��d��dϐd��d��d��d��d��d��d��d��d��dd�d�dΐdΐd��d��dѐd��d��d��d��d��d��d��d��d��d��d��d��dʐdӐd��d��d��d��d��dߐd��dd�d�d��d��d��d��d��d��d��d��d��d��d�d	�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��ddÐdĐdĐdŐd��d��d��d��d��dƐdǐd��dȐdɐdʐdːd̐dӐd��d͐dΐdΐdϐdϐdАdѐd�d�d�d��dҐdӐdԐdՐd֐dאdؐdِdڐdېdܐdݐdސd�dߐd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d�d�d�d�d�d�d��d��d��d��d�d�d�d"�d�d�d�d�d�d�d�d�d	�d	�d
�d
�d�d�d�d̐d
�d �d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d �d�d�d֐d��d�d(�d�dg�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d�d)�d*�d��d+�d+�d;�d,�d,�d-�d.�d/�d/�d֐d0�d1�d1�d7�d2�d3�d4�d5�d6�d7�d4�d@�d4�d8�d9�d:�d��d;�d<�d=�d8�d9�d>�d��d>�d?�d@�dA�dB�dC�dD�d@�dE�dE�dF�d��dG�dH�dI�dJ�d��d<�dK�dL�dM�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d}�dՐd`�da�dv�db�dc�dd�de�dX�df�d]�dg�d]�dh�di�di�d^�d_�dj�dk�d$�dl�dm�dn�do�dp�dq�dr�ds�dt�du�dv�dw�dx�dy�dz�d{�d|�da�dv�d}�d~�dܐd�d�d��d��d��d^�do�d�ds�d��dg�d`�d�d�d��du�d��dv�dy�dy�d��d��d��d��d��d��dh�d��du�db�dw�dz�d��df�d��dw�d��d�ds�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��db�d�d��d��d��dl�d��d��d��d��d��d��di�d��d��d��d��d��d`�d��d�d��d��d��d��d��d��dz�d��d��dw�dݐd��d��dT�dT�d��d��d��d��d��d��d��d��d��dn�d��d��d��d��d��d��d��d��d��d��d��d�d�d�dj�dv�d��d��d��d��d��d��ddÐdÐd��dאdĐd��d��dŐd!�d��dƐdǐd�d��dȐdɐd��dʐd��dːd̐d̐d͐dΐd��dϐdАdѐdҐd��dӐdԐdՐdƐd֐dאd̐dؐdِdڐd̐dېdېd��d��d��d��d��dܐdݐdސdːdߐd�d�d�d��d�d�dx�dx�d�d��d�d��d��d��d�d��d��d��d��d��d��d��d��d��dАd�d�d�d�d�d�d�dϐd�d�d�d��d�d�d��d�d��d��d��d��d��d��d��d͐d�d�d�d��d�d�d��d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��dӐd��d��d��d��d��d��dÐdŐdĐd��d͐d��dɐdʐdʐd͐d��d��d��d��d�dd��dd�dÐdĐd�d�dȐdǐdȐd�d��d�d�d��d��d��d��d��d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�dw�dw�d�dS�d�d�dT�dU�d�d�d�dV�d�d�d��d�d�d �d!�d"�d#�d#�d$�d%�dِd��dQ�d&�d'�d�d(�d)�d*�d+�d,�d��d-�d.�d/�d��d0�dR�d1�d2�d2�d3�d3�d4�d4�d5�d6�d7�d8�d2�d9�d9�d:�d;�d;�d��d<�d=�d=�d>�dސd?�d?�dސd@�dA�d�dB�dC�dD�dE�du�dF�dG�dH�dI�d��dJ�dK�dߐdL�d�dM�d�dN�dO�d"�dP�d��dQ�dR�d�d�dS�dT�d�dU�dV�dW�dW�d�dX�dY�d�d�d�dY�d�d�dZ�d[�d\�d�d]�d�d[�dZ�d\�d��d^�d_�d`�d��d��d�da�db�dc�dd�de�d2�df�dg�dh�d)�di�dj�dǐd��d��d#�dڐdk�d��dl�dm�dn�d5�do�d�dp�dq�d�dr�dr�d�ds�d
�dt�du�dv�d%�d��dw�dx�dy�dz�d{�d|�d��d�d}�d~�d�d��d��d��d��d��d�d~�d��d��d��d��d��d��d�d$�d�d!�d��d��d��d��d�d��d�d�d��d��d��d�dy�d�d�d�d�d��dz�d��d��dʐd�d�d��d��d��d��d��d��d�d��d��d��d��d��d��d��d��d��d��d%�d�d��d��d��d��d��d�d��d��d��d��d��d��dA�d��d��d��dC�dB�d��d��d��d��d��d��dD�d��d��d��d��d��d �d��d��d��d��d��d��d��d��d��d��d�d��d��d��d��dސd��d��d��d��d��dV�d��dW�dW�d��d��d��ddÐdĐdŐd^�d��dƐdǐd��dȐdɐdʐdːd̐d͐dΐdϐdАdѐdѐdސd7�dҐd<�dӐd8�d9�d8�d9�d:�d;�d:�d;�d6�d6�d
�d
�d�dԐd��dȐd>�dՐd�dŐdI�d��d֐dאdؐd@�dِdڐdېdܐdݐdސd֐d@�dאdܐdېdߐd�d�dA�d��dC�dB�d��d��d��dD�dE�d�d�d�d�d�d�d�dG�d�d�d�dH�d�d�d�d�d�d�d�d�dG�dH�d�d�d�d�d�d�d�d!�d��d��d��d��d��d��d��d�d��d��d��d[�d��d��dR�dR�d��d��d��dY�dV�dU�dY�dV�d�d�d͐d�d�d#�d�d�d3�d�d�d�d�d�d�d��d�dJ�d	�d��d��dn�d
�d��d�d�d
�d�d
�d�d�d�d�d�d�d�d�dY�d�dܐd�d�d�d1�d�d�d�d�d�dr�d�dt�d�d�d�d�dq�d�d�d �d �d!�d"�d#�dѐdѐd$�d%�d1�ds�dq�d�dn�d&�dy�d&�d'�d(�d(�d)�d*�d+�d,�d-�d.�d	�d��d'�d/�d/�d0�dݐd1�d2�dېd3�dŐdW�d��dI�dL�d��ds�d��d��d4�d5�d6�d7�d��dl�d�d8�d�d0�d9�d:�d;�d��d��d<�dl�d��dǐd=�d��d�d>�d5�d4�d7�d6�d?�d@�dA�d��dB�dC�dD�dE�dC�d��d��dF�d:�d�dm�d�dG�dؐd��dH�dאd�d��dI�d�dJ�d�d�dِd��dK�d�d�d�d��d��dL�dL�dM�dN�dO�dP�dP�dQ�dR�dS�dT�dU�dV�dV�dW�dX�dY�dZ�d��d[�d\�d]�d^�d_�d`�da�db�dc���Z'�dd�dD�dАde�d��dݐd�d�d�d��dO�dE�d,�d��dѐdf�d��dg�dh�dݐd��dܐd��d5�d�d��d͐dJ�d��d��d�di�dX�d��dj�"Z(d�dk�dl�dm�dn�do�dp�dq�dr�Z)ee)�dse)�dte)�dug�Z*e�dv�d?�ej�D��Z+�dwe+d�<G�dx�dy��dye,�Z-G�dz�d{��d{e.�Z/dS(~�)�absolute_import�division�unicode_literalsNz5Null character in input stream, replaced with U+FFFD.zInvalid codepoint in stream.z&Solidus (/) incorrectly placed in tag.z.Incorrect CR newline entity, replaced with LF.z9Entity used with illegal number (windows-1252 reference).zPNumeric entity couldn't be converted to character (codepoint U+%(charAsInt)08x).zBNumeric entity represents an illegal codepoint: U+%(charAsInt)08x.z#Numeric entity didn't end with ';'.z1Numeric entity expected. Got end of file instead.z'Numeric entity expected but none found.z!Named entity didn't end with ';'.z Named entity expected. Got none.z'End tag contains unexpected attributes.z.End tag contains unexpected self-closing flag.z#Expected tag name. Got '>' instead.zSExpected tag name. Got '?' instead. (HTML doesn't support processing instructions.)z-Expected tag name. Got something else insteadz6Expected closing tag. Got '>' instead. Ignoring '</>'.z-Expected closing tag. Unexpected end of file.z<Expected closing tag. Unexpected character '%(data)s' found.z'Unexpected end of file in the tag name.z8Unexpected end of file. Expected attribute name instead.z)Unexpected end of file in attribute name.z#Invalid character in attribute namez#Dropped duplicate attribute on tag.z1Unexpected end of file. Expected = or end of tag.z1Unexpected end of file. Expected attribute value.z*Expected attribute value. Got '>' instead.z"Unexpected = in unquoted attributez*Unexpected character in unquoted attributez*Unexpected character after attribute name.z+Unexpected character after attribute value.z.Unexpected end of file in attribute value (").z.Unexpected end of file in attribute value (').z*Unexpected end of file in attribute value.z)Unexpected end of file in tag. Expected >z/Unexpected character after / in tag. Expected >z&Expected '--' or 'DOCTYPE'. Not found.z Unexpected ! after -- in commentz$Unexpected space after -- in commentzIncorrect comment.z"Unexpected end of file in comment.z%Unexpected end of file in comment (-)z+Unexpected '-' after '--' found in comment.z'Unexpected end of file in comment (--).z&Unexpected character in comment found.z(No space after literal string 'DOCTYPE'.z.Unexpected > character. Expected DOCTYPE name.z.Unexpected end of file. Expected DOCTYPE name.z'Unexpected end of file in DOCTYPE name.z"Unexpected end of file in DOCTYPE.z%Expected space or '>'. Got '%(data)s'zUnexpected end of DOCTYPE.z Unexpected character in DOCTYPE.zXXX innerHTML EOFzUnexpected DOCTYPE. Ignored.z%html needs to be the first start tag.z)Unexpected End of file. Expected DOCTYPE.zErroneous DOCTYPE.z2Unexpected non-space characters. Expected DOCTYPE.z2Unexpected start tag (%(name)s). Expected DOCTYPE.z0Unexpected end tag (%(name)s). Expected DOCTYPE.z?Unexpected end tag (%(name)s) after the (implied) root element.z4Unexpected end of file. Expected end tag (%(name)s).z4Unexpected start tag head in existing head. Ignored.z'Unexpected end tag (%(name)s). Ignored.z;Unexpected start tag (%(name)s) that can be in head. Moved.z Unexpected start tag (%(name)s).zMissing end tag (%(name)s).zMissing end tags (%(name)s).zCUnexpected start tag (%(startName)s) implies end tag (%(endName)s).z@Unexpected start tag (%(originalName)s). Treated as %(newName)s.z,Unexpected start tag %(name)s. Don't use it!z'Unexpected start tag %(name)s. Ignored.zEUnexpected end tag (%(gotName)s). Missing end tag (%(expectedName)s).z:End tag (%(name)s) seen too early. Expected other end tag.zFUnexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).z+End tag (%(name)s) seen too early. Ignored.zQEnd tag (%(name)s) violates step 1, paragraph 1 of the adoption agency algorithm.zQEnd tag (%(name)s) violates step 1, paragraph 2 of the adoption agency algorithm.zQEnd tag (%(name)s) violates step 1, paragraph 3 of the adoption agency algorithm.zQEnd tag (%(name)s) violates step 4, paragraph 4 of the adoption agency algorithm.z>Unexpected end tag (%(originalName)s). Treated as %(newName)s.z'This element (%(name)s) has no end tag.z9Unexpected implied end tag (%(name)s) in the table phase.z>Unexpected implied end tag (%(name)s) in the table body phase.zDUnexpected non-space characters in table context caused voodoo mode.z3Unexpected input with type hidden in table context.z!Unexpected form in table context.zDUnexpected start tag (%(name)s) in table context caused voodoo mode.zBUnexpected end tag (%(name)s) in table context caused voodoo mode.zCUnexpected table cell start tag (%(name)s) in the table body phase.zFGot table cell end tag (%(name)s) while required end tags are missing.z?Unexpected end tag (%(name)s) in the table body phase. Ignored.z=Unexpected implied end tag (%(name)s) in the table row phase.z>Unexpected end tag (%(name)s) in the table row phase. Ignored.zJUnexpected select start tag in the select phase treated as select end tag.z/Unexpected input start tag in the select phase.zBUnexpected start tag token (%(name)s in the select phase. Ignored.z;Unexpected end tag (%(name)s) in the select phase. Ignored.zKUnexpected table element start tag (%(name)s) in the select in table phase.zIUnexpected table element end tag (%(name)s) in the select in table phase.z8Unexpected non-space characters in the after body phase.z>Unexpected start tag token (%(name)s) in the after body phase.z<Unexpected end tag token (%(name)s) in the after body phase.z@Unexpected characters in the frameset phase. Characters ignored.zEUnexpected start tag token (%(name)s) in the frameset phase. Ignored.zFUnexpected end tag token (frameset) in the frameset phase (innerHTML).zCUnexpected end tag token (%(name)s) in the frameset phase. Ignored.zEUnexpected non-space characters in the after frameset phase. Ignored.zEUnexpected start tag (%(name)s) in the after frameset phase. Ignored.zCUnexpected end tag (%(name)s) in the after frameset phase. Ignored.z(Unexpected end tag after body(innerHtml)z6Unexpected non-space characters. Expected end of file.z6Unexpected start tag (%(name)s). Expected end of file.z4Unexpected end tag (%(name)s). Expected end of file.z/Unexpected end of file. Expected table content.z0Unexpected end of file. Expected select content.z2Unexpected end of file. Expected frameset content.z0Unexpected end of file. Expected script content.z0Unexpected end of file. Expected foreign contentz0Trailing solidus not allowed on element %(name)sz2Element %(name)s not allowed in a non-html contextz*Unexpected end tag (%(name)s) before html.z9Element %(name)s not allowed in a inhead-noscript contextz8Unexpected end of file. Expected inhead-noscript contentz@Unexpected non-space character. Expected inhead-noscript contentz0Undefined error (this sucks and should be fixed))�znull-characterzinvalid-codepointzincorrectly-placed-soliduszincorrect-cr-newline-entityzillegal-windows-1252-entityzcant-convert-numeric-entityz$illegal-codepoint-for-numeric-entityz numeric-entity-without-semicolonz#expected-numeric-entity-but-got-eofzexpected-numeric-entityznamed-entity-without-semicolonzexpected-named-entityzattributes-in-end-tagzself-closing-flag-on-end-tagz'expected-tag-name-but-got-right-bracketz'expected-tag-name-but-got-question-markzexpected-tag-namez*expected-closing-tag-but-got-right-bracketz expected-closing-tag-but-got-eofz!expected-closing-tag-but-got-charzeof-in-tag-namez#expected-attribute-name-but-got-eofzeof-in-attribute-namez#invalid-character-in-attribute-namezduplicate-attributez$expected-end-of-tag-name-but-got-eofz$expected-attribute-value-but-got-eofz.expected-attribute-value-but-got-right-bracketz"equals-in-unquoted-attribute-valuez0unexpected-character-in-unquoted-attribute-valuez&invalid-character-after-attribute-namez*unexpected-character-after-attribute-valuez#eof-in-attribute-value-double-quotez#eof-in-attribute-value-single-quotez eof-in-attribute-value-no-quotesz#unexpected-EOF-after-solidus-in-tagz)unexpected-character-after-solidus-in-tagzexpected-dashes-or-doctypez,unexpected-bang-after-double-dash-in-commentz-unexpected-space-after-double-dash-in-commentzincorrect-commentzeof-in-commentzeof-in-comment-end-dashz,unexpected-dash-after-double-dash-in-commentzeof-in-comment-double-dashzeof-in-comment-end-space-statezeof-in-comment-end-bang-statezunexpected-char-in-commentzneed-space-after-doctypez+expected-doctype-name-but-got-right-bracketz!expected-doctype-name-but-got-eofzeof-in-doctype-namezeof-in-doctypez*expected-space-or-right-bracket-in-doctypezunexpected-end-of-doctypezunexpected-char-in-doctypezeof-in-innerhtmlzunexpected-doctypez
non-html-rootzexpected-doctype-but-got-eofzunknown-doctypezexpected-doctype-but-got-charsz"expected-doctype-but-got-start-tagz expected-doctype-but-got-end-tagzend-tag-after-implied-rootz&expected-named-closing-tag-but-got-eofz!two-heads-are-not-better-than-onezunexpected-end-tagz#unexpected-start-tag-out-of-my-headzunexpected-start-tagzmissing-end-tagzmissing-end-tagsz$unexpected-start-tag-implies-end-tagzunexpected-start-tag-treated-aszdeprecated-tagzunexpected-start-tag-ignoredz$expected-one-end-tag-but-got-anotherzend-tag-too-earlyzend-tag-too-early-namedzend-tag-too-early-ignoredzadoption-agency-1.1zadoption-agency-1.2zadoption-agency-1.3zadoption-agency-4.4zunexpected-end-tag-treated-asz
no-end-tagz#unexpected-implied-end-tag-in-tablez(unexpected-implied-end-tag-in-table-bodyz$unexpected-char-implies-table-voodooz unexpected-hidden-input-in-tablezunexpected-form-in-tablez)unexpected-start-tag-implies-table-voodooz'unexpected-end-tag-implies-table-voodoozunexpected-cell-in-table-bodyzunexpected-cell-end-tagz unexpected-end-tag-in-table-bodyz'unexpected-implied-end-tag-in-table-rowzunexpected-end-tag-in-table-rowzunexpected-select-in-selectzunexpected-input-in-selectzunexpected-start-tag-in-selectzunexpected-end-tag-in-selectz5unexpected-table-element-start-tag-in-select-in-tablez3unexpected-table-element-end-tag-in-select-in-tablezunexpected-char-after-bodyzunexpected-start-tag-after-bodyzunexpected-end-tag-after-bodyzunexpected-char-in-framesetz unexpected-start-tag-in-framesetz)unexpected-frameset-in-frameset-innerhtmlzunexpected-end-tag-in-framesetzunexpected-char-after-framesetz#unexpected-start-tag-after-framesetz!unexpected-end-tag-after-framesetz'unexpected-end-tag-after-body-innerhtmlzexpected-eof-but-got-charzexpected-eof-but-got-start-tagzexpected-eof-but-got-end-tagzeof-in-tablez
eof-in-selectzeof-in-framesetzeof-in-script-in-scriptzeof-in-foreign-landsz&non-void-element-with-trailing-solidusz*unexpected-html-element-in-foreign-contentzunexpected-end-tag-before-htmlzunexpected-inhead-noscript-tagzeof-in-head-noscriptzchar-in-head-noscriptzXXX-undefined-errorzhttp://www.w3.org/1999/xhtmlz"http://www.w3.org/1998/Math/MathMLzhttp://www.w3.org/2000/svgzhttp://www.w3.org/1999/xlinkz$http://www.w3.org/XML/1998/namespacezhttp://www.w3.org/2000/xmlns/)�html�mathml�svg�xlink�xml�xmlnsrZappletZcaptionZmarquee�object�tableZtdZthrZmi�moZmnZmsZmtextzannotation-xmlrZ
foreignObjectZdesc�title�a�bZbig�codeZemZfont�iZnobr�sZsmallZstrikeZstrongZtt�uZaddressZareaZarticleZaside�baseZbasefontZbgsoundZ
blockquoteZbody�br�button�center�colZcolgroup�commandZdd�details�dirZdivZdlZdtZembed�fieldsetZfigureZfooterZform�frameZframeset�h1�h2�h3�h4�h5�h6�head�header�hrZiframeZimage�img�inputZisindexZli�linkZlisting�menu�metaZnavZnoembedZnoframesZnoscriptZol�pZparamZ	plaintextZpre�scriptZsection�select�styleZtbodyZtextareaZtfootZtheadZtrZulZwbrZxmpz
annotaion-xmlZ
attributeNameZ
attributeTypeZ
baseFrequencyZbaseProfileZcalcModeZ
clipPathUnitsZcontentScriptTypeZcontentStyleTypeZdiffuseConstantZedgeModeZexternalResourcesRequiredZ	filterResZfilterUnitsZglyphRefZgradientTransformZ
gradientUnitsZkernelMatrixZkernelUnitLengthZ	keyPointsZ
keySplinesZkeyTimesZlengthAdjustZlimitingConeAngleZmarkerHeightZmarkerUnitsZmarkerWidthZmaskContentUnitsZ	maskUnitsZ
numOctavesZ
pathLengthZpatternContentUnitsZpatternTransformZpatternUnitsZ	pointsAtXZ	pointsAtYZ	pointsAtZZ
preserveAlphaZpreserveAspectRatioZprimitiveUnitsZrefXZrefYZrepeatCountZ	repeatDurZrequiredExtensionsZrequiredFeaturesZspecularConstantZspecularExponentZspreadMethodZstartOffsetZstdDeviationZstitchTilesZsurfaceScaleZsystemLanguageZtableValuesZtargetXZtargetYZ
textLengthZviewBoxZ
viewTargetZxChannelSelectorZyChannelSelectorZ
zoomAndPan)>Z
attributenameZ
attributetypeZ
basefrequencyZbaseprofileZcalcmodeZ
clippathunitsZcontentscripttypeZcontentstyletypeZdiffuseconstantZedgemodeZexternalresourcesrequiredZ	filterresZfilterunitsZglyphrefZgradienttransformZ
gradientunitsZkernelmatrixZkernelunitlengthZ	keypointsZ
keysplinesZkeytimesZlengthadjustZlimitingconeangleZmarkerheightZmarkerunitsZmarkerwidthZmaskcontentunitsZ	maskunitsZ
numoctavesZ
pathlengthZpatterncontentunitsZpatterntransformZpatternunitsZ	pointsatxZ	pointsatyZ	pointsatzZ
preservealphaZpreserveaspectratioZprimitiveunitsZrefxZrefyZrepeatcountZ	repeatdurZrequiredextensionsZrequiredfeaturesZspecularconstantZspecularexponentZspreadmethodZstartoffsetZstddeviationZstitchtilesZsurfacescaleZsystemlanguageZtablevaluesZtargetxZtargetyZ
textlengthZviewboxZ
viewtargetZxchannelselectorZychannelselectorZ
zoomandpanZ
definitionurlZ
definitionURLrZactuateZarcroleZhrefZroleZshow�typer	ZlangZspacer
)z
xlink:actuatez
xlink:arcrolez
xlink:hrefz
xlink:rolez
xlink:showzxlink:titlez
xlink:typezxml:basezxml:langz	xml:spacer
zxmlns:xlinkcCs"g|]\}\}}}||f|f�qS�r2)�.0Zqname�prefixZlocal�nsr2r2�/usr/lib/python3.6/constants.py�
<listcomp>
sr7�	�
�� �
cCs g|]}t|�t|j��f�qSr2)�ord�lower)r3�cr2r2r6r7#szevent-source�sourceZtrackZ
irrelevantZscopedZismapZautoplayZcontrolsZdefer�async�openZmultipleZdisabledZhiddenZchecked�defaultZnoshadeZ
autosubmit�readonlyZselectedZ	autofocusZrequired)�r0r(ZaudioZvideor.rZdatagridrr'r+rZoptionZoptgrouprr)r/�output� �� �� �& �  �! ���0 �`�9 �R�}� � � � �" � � ���"!�a�: �S�~�xzlt;zgt;zamp;zapos;zquot;�Æ�&�ÁuĂ�ÂuАu𝔄�ÀuΑuĀu⩓uĄu𝔸u⁡�Åu𝒜u≔�Ã�Äu∖u⫧u⌆uБu∵uℬuΒu𝔅u𝔹u˘u≎uЧ�©uĆu⋒uⅅuℭuČ�ÇuĈu∰uĊ�¸�·uΧu⊙u⊖u⊕u⊗u∲u”u’u∷u⩴u≡u∯u∮uℂu∐u∳u⨯u𝒞u⋓u≍u⤑uЂuЅuЏu‡u↡u⫤uĎuДu∇uΔu𝔇�´u˙u˝�`u˜u⋄uⅆu𝔻�¨u⃜u≐u⇓u⇐u⇔u⟸u⟺u⟹u⇒u⊨u⇑u⇕u∥u↓u⤓u⇵ȗu⥐u⥞u↽u⥖u⥟u⇁u⥗u⊤u↧u𝒟uĐuŊ�Ð�ÉuĚ�ÊuЭuĖu𝔈�Èu∈uĒu◻u▫uĘu𝔼uΕu⩵u≂u⇌uℰu⩳uΗ�Ëu∃uⅇuФu𝔉u◼u▪u𝔽u∀uℱuЃ�>uΓuϜuĞuĢuĜuГuĠu𝔊u⋙u𝔾u≥u⋛u≧u⪢u≷u⩾u≳u𝒢u≫uЪuˇ�^uĤuℌuℋuℍu─uĦu≏uЕuIJuЁ�Í�ÎuИuİuℑ�ÌuĪuⅈu∬u∫u⋂u⁣u⁢uĮu𝕀uΙuℐuĨuІ�ÏuĴuЙu𝔍u𝕁u𝒥uЈuЄuХuЌuΚuĶuКu𝔎u𝕂u𝒦uЉ�<uĹuΛu⟪uℒu↞uĽuĻuЛu⟨u←u⇤u⇆u⌈u⟦u⥡u⇃u⥙u⌊u↔u⥎u⊣u↤u⥚u⊲u⧏u⊴u⥑u⥠u↿u⥘u↼u⥒u⋚u≦u≶u⪡u⩽u≲u𝔏u⋘u⇚uĿu⟵u⟷u⟶u𝕃u↙u↘u↰uŁu≪u⤅uМu uℳu𝔐u∓u𝕄uΜuЊuŃuŇuŅuНu​u𝔑u⁠� uℕu⫬u≢u≭u∦u∉u≠u≂̸u∄u≯u≱u≧̸u≫̸u≹u⩾̸u≵u≎̸u≏̸u⋪u⧏̸u⋬u≮u≰u≸u≪̸u⩽̸u≴u⪢̸u⪡̸u⊀u⪯̸u⋠u∌u⋫u⧐̸u⋭u⊏̸u⋢u⊐̸u⋣u⊂⃒u⊈u⊁u⪰̸u⋡u≿̸u⊃⃒u⊉u≁u≄u≇u≉u∤u𝒩�ÑuΝuŒ�Ó�ÔuОuŐu𝔒�ÒuŌuΩuΟu𝕆u“u‘u⩔u𝒪�Ø�Õu⨷�Öu‾u⏞u⎴u⏜u∂uПu𝔓uΦuΠ�±uℙu⪻u≺u⪯u≼u≾u″u∏u∝u𝒫uΨ�"u𝔔uℚu𝒬u⤐�®uŔu⟫u↠u⤖uŘuŖuРuℜu∋u⇋u⥯uΡu⟩u→u⇥u⇄u⌉u⟧u⥝u⇂u⥕u⌋u⊢u↦u⥛u⊳u⧐u⊵u⥏u⥜u↾u⥔u⇀u⥓uℝu⥰u⇛uℛu↱u⧴uЩuШuЬuŚu⪼uŠuŞuŜuСu𝔖u↑uΣu∘u𝕊u√u□u⊓u⊏u⊑u⊐u⊒u⊔u𝒮u⋆u⋐u⊆u≻u⪰u≽u≿u∑u⋑u⊃u⊇�Þu™uЋuЦuΤuŤuŢuТu𝔗u∴uΘu  u u∼u≃u≅u≈u𝕋u⃛u𝒯uŦ�Úu↟u⥉uЎuŬ�ÛuУuŰu𝔘�ÙuŪ�_u⏟u⎵u⏝u⋃u⊎uŲu𝕌u⤒u⇅u↕u⥮u⊥u↥u↖u↗uϒuΥuŮu𝒰uŨ�Üu⊫u⫫uВu⊩u⫦u⋁u‖u∣�|u❘u≀u u𝔙u𝕍u𝒱u⊪uŴu⋀u𝔚u𝕎u𝒲u𝔛uΞu𝕏u𝒳uЯuЇuЮ�ÝuŶuЫu𝔜u𝕐u𝒴uŸuЖuŹuŽuЗuŻuΖuℨuℤu𝒵�áuău∾u∾̳u∿�âuа�æu𝔞�àuℵuαuāu⨿u∧u⩕u⩜u⩘u⩚u∠u⦤u∡u⦨u⦩u⦪u⦫u⦬u⦭u⦮u⦯u∟u⊾u⦝u∢u⍼uąu𝕒u⩰u⩯u≊u≋�'�åu𝒶�*�ã�äu⨑u⫭u≌u϶u‵u∽u⋍u⊽u⌅u⎶uбu„u⦰uβuℶu≬u𝔟u◯u⨀u⨁u⨂u⨆u★u▽u△u⨄u⤍u⧫u▴u▾u◂u▸u␣u▒u░u▓u█u=⃥u≡⃥u⌐u𝕓u⋈u╗u╔u╖u╓u═u╦u╩u╤u╧u╝u╚u╜u╙u║u╬u╣u╠u╫u╢u╟u⧉u╕u╒u┐u┌u╥u╨u┬u┴u⊟u⊞u⊠u╛u╘u┘u└u│u╪u╡u╞u┼u┤u├�¦u𝒷u⁏�\u⧅u⟈u•u⪮uću∩u⩄u⩉u⩋u⩇u⩀u∩︀u⁁u⩍uč�çuĉu⩌u⩐uċu⦲�¢u𝔠uчu✓uχu○u⧃uˆu≗u↺u↻uⓈu⊛u⊚u⊝u⨐u⫯u⧂u♣�:�,�@u∁u⩭u𝕔u℗u↵u✗u𝒸u⫏u⫑u⫐u⫒u⋯u⤸u⤵u⋞u⋟u↶u⤽u∪u⩈u⩆u⩊u⊍u⩅u∪︀u↷u⤼u⋎u⋏�¤u∱u⌭u⥥u†uℸu‐u⤏uďuдu⇊u⩷�°uδu⦱u⥿u𝔡u♦uϝu⋲�÷u⋇uђu⌞u⌍�$u𝕕u≑u∸u∔u⊡u⌟u⌌u𝒹uѕu⧶uđu⋱u▿u⦦uџu⟿�éu⩮uěu≖�êu≕uэuėu≒u𝔢u⪚�èu⪖u⪘u⪙u⏧uℓu⪕u⪗uēu∅u u u uŋu uęu𝕖u⋕u⧣u⩱uεuϵ�=u≟u⩸u⧥u≓u⥱uℯuη�ð�ëu€�!uфu♀uffiuffufflu𝔣ufiZfju♭uflu▱uƒu𝕗u⋔u⫙u⨍�½u⅓�¼u⅕u⅙u⅛u⅔u⅖�¾u⅗u⅜u⅘u⅚u⅝u⅞u⁄u⌢u𝒻u⪌uǵuγu⪆uğuĝuгuġu⪩u⪀u⪂u⪄u⋛︀u⪔u𝔤uℷuѓu⪒u⪥u⪤u≩u⪊u⪈u⋧u𝕘uℊu⪎u⪐u⪧u⩺u⋗u⦕u⩼u⥸u≩︀uъu⥈u↭uℏuĥu♥u…u⊹u𝔥u⤥u⤦u⇿u∻u↩u↪u𝕙u―u𝒽uħu⁃�í�îuиuе�¡u𝔦�ìu⨌u∭u⧜u℩uijuīuıu⊷uƵu℅u∞u⧝u⊺u⨗u⨼uёuįu𝕚uι�¿u𝒾u⋹u⋵u⋴u⋳uĩuі�ïuĵuйu𝔧uȷu𝕛u𝒿uјuєuκuϰuķuкu𝔨uĸuхuќu𝕜u𝓀u⤛u⤎u⪋u⥢uĺu⦴uλu⦑u⪅�«u⤟u⤝u↫u⤹u⥳u↢u⪫u⤙u⪭u⪭︀u⤌u❲�{�[u⦋u⦏u⦍uľuļuлu⤶u⥧u⥋u↲u≤u⇇u⋋u⪨u⩿u⪁u⪃u⋚︀u⪓u⋖u⥼u𝔩u⪑u⥪u▄uљu⥫u◺uŀu⎰u≨u⪉u⪇u⋦u⟬u⇽u⟼u↬u⦅u𝕝u⨭u⨴u∗u◊�(u⦓u⥭u‎u⊿u‹u𝓁u⪍u⪏u‚ułu⪦u⩹u⋉u⥶u⩻u⦖u◃u⥊u⥦u≨︀u∺�¯u♂u✠u▮u⨩uмu—u𝔪u℧�µu⫰u−u⨪u⫛u⊧u𝕞u𝓂uμu⊸u⋙̸u≫⃒u⇍u⇎u⋘̸u≪⃒u⇏u⊯u⊮uńu∠⃒u⩰̸u≋̸uʼnu♮u⩃uňuņu⩭̸u⩂uнu–u⇗u⤤u≐̸u⤨u𝔫u↮u⫲u⋼u⋺uњu≦̸u↚u‥u𝕟�¬u⋹̸u⋵̸u⋷u⋶u⋾u⋽u⫽⃥u∂̸u⨔u↛u⤳̸u↝̸u𝓃u⊄u⫅̸u⊅u⫆̸�ñuν�#u№u u⊭u⤄u≍⃒u⊬u≥⃒u>⃒u⧞u⤂u≤⃒u<⃒u⊴⃒u⤃u⊵⃒u∼⃒u⇖u⤣u⤧�ó�ôuоuőu⨸u⦼uœu⦿u𝔬u˛�òu⧁u⦵u⦾u⦻u⧀uōuωuοu⦶u𝕠u⦷u⦹u∨u⩝uℴ�ª�ºu⊶u⩖u⩗u⩛�øu⊘�õu⨶�öu⌽�¶u⫳u⫽uп�%�.u‰u‱u𝔭uφuϕu☎uπuϖuℎ�+u⨣u⨢u⨥u⩲u⨦u⨧u⨕u𝕡�£u⪳u⪷u⪹u⪵u⋨u′u⌮u⌒u⌓u⊰u𝓅uψu u𝔮u𝕢u⁗u𝓆u⨖�?u⤜u⥤u∽̱uŕu⦳u⦒u⦥�»u⥵u⤠u⤳u⤞u⥅u⥴u↣u↝u⤚u∶u❳�}�]u⦌u⦎u⦐uřuŗuрu⤷u⥩u↳u▭u⥽u𝔯u⥬uρuϱu⇉u⋌u˚u‏u⎱u⫮u⟭u⇾u⦆u𝕣u⨮u⨵�)u⦔u⨒u›u𝓇u⋊u▹u⧎u⥨u℞uśu⪴u⪸ušuşuŝu⪶u⪺u⋩u⨓uсu⋅u⩦u⇘�§�;u⤩u✶u𝔰u♯uщuш�­uσuςu⩪u⪞u⪠u⪝u⪟u≆u⨤u⥲u⨳u⧤u⌣u⪪u⪬u⪬︀uь�/u⧄u⌿u𝕤u♠u⊓︀u⊔︀u𝓈u☆u⊂u⫅u⪽u⫃u⫁u⫋u⊊u⪿u⥹u⫇u⫕u⫓u♪�¹�²�³u⫆u⪾u⫘u⫄u⟉u⫗u⥻u⫂u⫌u⊋u⫀u⫈u⫔u⫖u⇙u⤪�ßu⌖uτuťuţuтu⌕u𝔱uθuϑ�þ�×u⨱u⨰u⌶u⫱u𝕥u⫚u‴u▵u≜u◬u⨺u⨹u⧍u⨻u⏢u𝓉uцuћuŧu⥣�úuўuŭ�ûuуuűu⥾u𝔲�ùu▀u⌜u⌏u◸uūuųu𝕦uυu⇈u⌝u⌎uůu◹u𝓊u⋰uũ�üu⦧u⫨u⫩u⦜u⊊︀u⫋︀u⊋︀u⫌︀uвu⊻u≚u⋮u𝔳u𝕧u𝓋u⦚uŵu⩟u≙u℘u𝔴u𝕨u𝓌u𝔵uξu⋻u𝕩u𝓍�ýuяuŷuы�¥u𝔶uїu𝕪u𝓎uю�ÿuźužuзużuζu𝔷uжu⇝u𝕫u𝓏u‍u‌(�ZAEligzAElig;ZAMPzAMP;ZAacutezAacute;zAbreve;ZAcirczAcirc;zAcy;zAfr;ZAgravezAgrave;zAlpha;zAmacr;zAnd;zAogon;zAopf;zApplyFunction;ZAringzAring;zAscr;zAssign;ZAtildezAtilde;ZAumlzAuml;z
Backslash;zBarv;zBarwed;zBcy;zBecause;zBernoullis;zBeta;zBfr;zBopf;zBreve;zBscr;zBumpeq;zCHcy;ZCOPYzCOPY;zCacute;zCap;zCapitalDifferentialD;zCayleys;zCcaron;ZCcedilzCcedil;zCcirc;zCconint;zCdot;zCedilla;z
CenterDot;zCfr;zChi;z
CircleDot;zCircleMinus;zCirclePlus;zCircleTimes;zClockwiseContourIntegral;zCloseCurlyDoubleQuote;zCloseCurlyQuote;zColon;zColone;z
Congruent;zConint;zContourIntegral;zCopf;z
Coproduct;z CounterClockwiseContourIntegral;zCross;zCscr;zCup;zCupCap;zDD;z	DDotrahd;zDJcy;zDScy;zDZcy;zDagger;zDarr;zDashv;zDcaron;zDcy;zDel;zDelta;zDfr;zDiacriticalAcute;zDiacriticalDot;zDiacriticalDoubleAcute;zDiacriticalGrave;zDiacriticalTilde;zDiamond;zDifferentialD;zDopf;zDot;zDotDot;z	DotEqual;zDoubleContourIntegral;z
DoubleDot;zDoubleDownArrow;zDoubleLeftArrow;zDoubleLeftRightArrow;zDoubleLeftTee;zDoubleLongLeftArrow;zDoubleLongLeftRightArrow;zDoubleLongRightArrow;zDoubleRightArrow;zDoubleRightTee;zDoubleUpArrow;zDoubleUpDownArrow;zDoubleVerticalBar;z
DownArrow;z
DownArrowBar;zDownArrowUpArrow;z
DownBreve;zDownLeftRightVector;zDownLeftTeeVector;zDownLeftVector;zDownLeftVectorBar;zDownRightTeeVector;zDownRightVector;zDownRightVectorBar;zDownTee;z
DownTeeArrow;z
Downarrow;zDscr;zDstrok;zENG;ZETHzETH;ZEacutezEacute;zEcaron;ZEcirczEcirc;zEcy;zEdot;zEfr;ZEgravezEgrave;zElement;zEmacr;zEmptySmallSquare;zEmptyVerySmallSquare;zEogon;zEopf;zEpsilon;zEqual;zEqualTilde;zEquilibrium;zEscr;zEsim;zEta;ZEumlzEuml;zExists;z
ExponentialE;zFcy;zFfr;zFilledSmallSquare;zFilledVerySmallSquare;zFopf;zForAll;zFouriertrf;zFscr;zGJcy;ZGTzGT;zGamma;zGammad;zGbreve;zGcedil;zGcirc;zGcy;zGdot;zGfr;zGg;zGopf;z
GreaterEqual;zGreaterEqualLess;zGreaterFullEqual;zGreaterGreater;zGreaterLess;zGreaterSlantEqual;z
GreaterTilde;zGscr;zGt;zHARDcy;zHacek;zHat;zHcirc;zHfr;z
HilbertSpace;zHopf;zHorizontalLine;zHscr;zHstrok;z
HumpDownHump;z
HumpEqual;zIEcy;zIJlig;zIOcy;ZIacutezIacute;ZIcirczIcirc;zIcy;zIdot;zIfr;ZIgravezIgrave;zIm;zImacr;zImaginaryI;zImplies;zInt;z	Integral;z
Intersection;zInvisibleComma;zInvisibleTimes;zIogon;zIopf;zIota;zIscr;zItilde;zIukcy;ZIumlzIuml;zJcirc;zJcy;zJfr;zJopf;zJscr;zJsercy;zJukcy;zKHcy;zKJcy;zKappa;zKcedil;zKcy;zKfr;zKopf;zKscr;zLJcy;ZLTzLT;zLacute;zLambda;zLang;zLaplacetrf;zLarr;zLcaron;zLcedil;zLcy;zLeftAngleBracket;z
LeftArrow;z
LeftArrowBar;zLeftArrowRightArrow;zLeftCeiling;zLeftDoubleBracket;zLeftDownTeeVector;zLeftDownVector;zLeftDownVectorBar;z
LeftFloor;zLeftRightArrow;zLeftRightVector;zLeftTee;z
LeftTeeArrow;zLeftTeeVector;z
LeftTriangle;zLeftTriangleBar;zLeftTriangleEqual;zLeftUpDownVector;zLeftUpTeeVector;z
LeftUpVector;zLeftUpVectorBar;zLeftVector;zLeftVectorBar;z
Leftarrow;zLeftrightarrow;zLessEqualGreater;zLessFullEqual;zLessGreater;z	LessLess;zLessSlantEqual;z
LessTilde;zLfr;zLl;zLleftarrow;zLmidot;zLongLeftArrow;zLongLeftRightArrow;zLongRightArrow;zLongleftarrow;zLongleftrightarrow;zLongrightarrow;zLopf;zLowerLeftArrow;zLowerRightArrow;zLscr;zLsh;zLstrok;zLt;zMap;zMcy;zMediumSpace;z
Mellintrf;zMfr;z
MinusPlus;zMopf;zMscr;zMu;zNJcy;zNacute;zNcaron;zNcedil;zNcy;zNegativeMediumSpace;zNegativeThickSpace;zNegativeThinSpace;zNegativeVeryThinSpace;zNestedGreaterGreater;zNestedLessLess;zNewLine;zNfr;zNoBreak;zNonBreakingSpace;zNopf;zNot;z
NotCongruent;z
NotCupCap;zNotDoubleVerticalBar;zNotElement;z	NotEqual;zNotEqualTilde;z
NotExists;zNotGreater;zNotGreaterEqual;zNotGreaterFullEqual;zNotGreaterGreater;zNotGreaterLess;zNotGreaterSlantEqual;zNotGreaterTilde;zNotHumpDownHump;z
NotHumpEqual;zNotLeftTriangle;zNotLeftTriangleBar;zNotLeftTriangleEqual;zNotLess;z
NotLessEqual;zNotLessGreater;zNotLessLess;zNotLessSlantEqual;z
NotLessTilde;zNotNestedGreaterGreater;zNotNestedLessLess;zNotPrecedes;zNotPrecedesEqual;zNotPrecedesSlantEqual;zNotReverseElement;zNotRightTriangle;zNotRightTriangleBar;zNotRightTriangleEqual;zNotSquareSubset;zNotSquareSubsetEqual;zNotSquareSuperset;zNotSquareSupersetEqual;z
NotSubset;zNotSubsetEqual;zNotSucceeds;zNotSucceedsEqual;zNotSucceedsSlantEqual;zNotSucceedsTilde;zNotSuperset;zNotSupersetEqual;z	NotTilde;zNotTildeEqual;zNotTildeFullEqual;zNotTildeTilde;zNotVerticalBar;zNscr;ZNtildezNtilde;zNu;zOElig;ZOacutezOacute;ZOcirczOcirc;zOcy;zOdblac;zOfr;ZOgravezOgrave;zOmacr;zOmega;zOmicron;zOopf;zOpenCurlyDoubleQuote;zOpenCurlyQuote;zOr;zOscr;ZOslashzOslash;ZOtildezOtilde;zOtimes;ZOumlzOuml;zOverBar;z
OverBrace;zOverBracket;zOverParenthesis;z	PartialD;zPcy;zPfr;zPhi;zPi;z
PlusMinus;zPoincareplane;zPopf;zPr;z	Precedes;zPrecedesEqual;zPrecedesSlantEqual;zPrecedesTilde;zPrime;zProduct;zProportion;z
Proportional;zPscr;zPsi;ZQUOTzQUOT;zQfr;zQopf;zQscr;zRBarr;ZREGzREG;zRacute;zRang;zRarr;zRarrtl;zRcaron;zRcedil;zRcy;zRe;zReverseElement;zReverseEquilibrium;zReverseUpEquilibrium;zRfr;zRho;zRightAngleBracket;zRightArrow;zRightArrowBar;zRightArrowLeftArrow;z
RightCeiling;zRightDoubleBracket;zRightDownTeeVector;zRightDownVector;zRightDownVectorBar;zRightFloor;z	RightTee;zRightTeeArrow;zRightTeeVector;zRightTriangle;zRightTriangleBar;zRightTriangleEqual;zRightUpDownVector;zRightUpTeeVector;zRightUpVector;zRightUpVectorBar;zRightVector;zRightVectorBar;zRightarrow;zRopf;z
RoundImplies;zRrightarrow;zRscr;zRsh;zRuleDelayed;zSHCHcy;zSHcy;zSOFTcy;zSacute;zSc;zScaron;zScedil;zScirc;zScy;zSfr;zShortDownArrow;zShortLeftArrow;zShortRightArrow;z
ShortUpArrow;zSigma;zSmallCircle;zSopf;zSqrt;zSquare;zSquareIntersection;z
SquareSubset;zSquareSubsetEqual;zSquareSuperset;zSquareSupersetEqual;zSquareUnion;zSscr;zStar;zSub;zSubset;zSubsetEqual;z	Succeeds;zSucceedsEqual;zSucceedsSlantEqual;zSucceedsTilde;z	SuchThat;zSum;zSup;z	Superset;zSupersetEqual;zSupset;ZTHORNzTHORN;zTRADE;zTSHcy;zTScy;zTab;zTau;zTcaron;zTcedil;zTcy;zTfr;z
Therefore;zTheta;zThickSpace;z
ThinSpace;zTilde;zTildeEqual;zTildeFullEqual;zTildeTilde;zTopf;z
TripleDot;zTscr;zTstrok;ZUacutezUacute;zUarr;z	Uarrocir;zUbrcy;zUbreve;ZUcirczUcirc;zUcy;zUdblac;zUfr;ZUgravezUgrave;zUmacr;z	UnderBar;zUnderBrace;z
UnderBracket;zUnderParenthesis;zUnion;z
UnionPlus;zUogon;zUopf;zUpArrow;zUpArrowBar;zUpArrowDownArrow;zUpDownArrow;zUpEquilibrium;zUpTee;zUpTeeArrow;zUparrow;zUpdownarrow;zUpperLeftArrow;zUpperRightArrow;zUpsi;zUpsilon;zUring;zUscr;zUtilde;ZUumlzUuml;zVDash;zVbar;zVcy;zVdash;zVdashl;zVee;zVerbar;zVert;zVerticalBar;z
VerticalLine;zVerticalSeparator;zVerticalTilde;zVeryThinSpace;zVfr;zVopf;zVscr;zVvdash;zWcirc;zWedge;zWfr;zWopf;zWscr;zXfr;zXi;zXopf;zXscr;zYAcy;zYIcy;zYUcy;ZYacutezYacute;zYcirc;zYcy;zYfr;zYopf;zYscr;zYuml;zZHcy;zZacute;zZcaron;zZcy;zZdot;zZeroWidthSpace;zZeta;zZfr;zZopf;zZscr;Zaacutezaacute;zabreve;zac;zacE;zacd;Zacirczacirc;Zacutezacute;zacy;Zaeligzaelig;zaf;zafr;Zagravezagrave;zalefsym;zaleph;zalpha;zamacr;zamalg;Zampzamp;zand;zandand;zandd;z	andslope;zandv;zang;zange;zangle;zangmsd;z	angmsdaa;z	angmsdab;z	angmsdac;z	angmsdad;z	angmsdae;z	angmsdaf;z	angmsdag;z	angmsdah;zangrt;zangrtvb;z	angrtvbd;zangsph;zangst;zangzarr;zaogon;zaopf;zap;zapE;zapacir;zape;zapid;zapos;zapprox;z	approxeq;Zaringzaring;zascr;zast;zasymp;zasympeq;Zatildezatilde;Zaumlzauml;z	awconint;zawint;zbNot;z	backcong;zbackepsilon;z
backprime;zbacksim;z
backsimeq;zbarvee;zbarwed;z	barwedge;zbbrk;z	bbrktbrk;zbcong;zbcy;zbdquo;zbecaus;zbecause;zbemptyv;zbepsi;zbernou;zbeta;zbeth;zbetween;zbfr;zbigcap;zbigcirc;zbigcup;zbigodot;z	bigoplus;z
bigotimes;z	bigsqcup;zbigstar;zbigtriangledown;zbigtriangleup;z	biguplus;zbigvee;z	bigwedge;zbkarow;z
blacklozenge;zblacksquare;zblacktriangle;zblacktriangledown;zblacktriangleleft;zblacktriangleright;zblank;zblk12;zblk14;zblk34;zblock;zbne;zbnequiv;zbnot;zbopf;zbot;zbottom;zbowtie;zboxDL;zboxDR;zboxDl;zboxDr;zboxH;zboxHD;zboxHU;zboxHd;zboxHu;zboxUL;zboxUR;zboxUl;zboxUr;zboxV;zboxVH;zboxVL;zboxVR;zboxVh;zboxVl;zboxVr;zboxbox;zboxdL;zboxdR;zboxdl;zboxdr;zboxh;zboxhD;zboxhU;zboxhd;zboxhu;z	boxminus;zboxplus;z	boxtimes;zboxuL;zboxuR;zboxul;zboxur;zboxv;zboxvH;zboxvL;zboxvR;zboxvh;zboxvl;zboxvr;zbprime;zbreve;Zbrvbarzbrvbar;zbscr;zbsemi;zbsim;zbsime;zbsol;zbsolb;z	bsolhsub;zbull;zbullet;zbump;zbumpE;zbumpe;zbumpeq;zcacute;zcap;zcapand;z	capbrcup;zcapcap;zcapcup;zcapdot;zcaps;zcaret;zcaron;zccaps;zccaron;Zccedilzccedil;zccirc;zccups;zccupssm;zcdot;Zcedilzcedil;zcemptyv;Zcentzcent;z
centerdot;zcfr;zchcy;zcheck;z
checkmark;zchi;zcir;zcirE;zcirc;zcirceq;zcirclearrowleft;zcirclearrowright;z	circledR;z	circledS;zcircledast;zcircledcirc;zcircleddash;zcire;z	cirfnint;zcirmid;zcirscir;zclubs;z	clubsuit;zcolon;zcolone;zcoloneq;zcomma;zcommat;zcomp;zcompfn;zcomplement;z
complexes;zcong;zcongdot;zconint;zcopf;zcoprod;�copyzcopy;zcopysr;zcrarr;zcross;zcscr;zcsub;zcsube;zcsup;zcsupe;zctdot;zcudarrl;zcudarrr;zcuepr;zcuesc;zcularr;zcularrp;zcup;z	cupbrcap;zcupcap;zcupcup;zcupdot;zcupor;zcups;zcurarr;zcurarrm;zcurlyeqprec;zcurlyeqsucc;z	curlyvee;zcurlywedge;Zcurrenzcurren;zcurvearrowleft;zcurvearrowright;zcuvee;zcuwed;z	cwconint;zcwint;zcylcty;zdArr;zdHar;zdagger;zdaleth;zdarr;zdash;zdashv;zdbkarow;zdblac;zdcaron;zdcy;zdd;zddagger;zddarr;zddotseq;Zdegzdeg;zdelta;zdemptyv;zdfisht;zdfr;zdharl;zdharr;zdiam;zdiamond;zdiamondsuit;zdiams;zdie;zdigamma;zdisin;zdiv;Zdividezdivide;zdivideontimes;zdivonx;zdjcy;zdlcorn;zdlcrop;zdollar;zdopf;zdot;zdoteq;z	doteqdot;z	dotminus;zdotplus;z
dotsquare;zdoublebarwedge;z
downarrow;zdowndownarrows;zdownharpoonleft;zdownharpoonright;z	drbkarow;zdrcorn;zdrcrop;zdscr;zdscy;zdsol;zdstrok;zdtdot;zdtri;zdtrif;zduarr;zduhar;zdwangle;zdzcy;z	dzigrarr;zeDDot;zeDot;Zeacutezeacute;zeaster;zecaron;zecir;Zecirczecirc;zecolon;zecy;zedot;zee;zefDot;zefr;zeg;Zegravezegrave;zegs;zegsdot;zel;z	elinters;zell;zels;zelsdot;zemacr;zempty;z	emptyset;zemptyv;zemsp13;zemsp14;zemsp;zeng;zensp;zeogon;zeopf;zepar;zeparsl;zeplus;zepsi;zepsilon;zepsiv;zeqcirc;zeqcolon;zeqsim;zeqslantgtr;zeqslantless;zequals;zequest;zequiv;zequivDD;z	eqvparsl;zerDot;zerarr;zescr;zesdot;zesim;zeta;Zethzeth;Zeumlzeuml;zeuro;zexcl;zexist;zexpectation;z
exponentiale;zfallingdotseq;zfcy;zfemale;zffilig;zfflig;zffllig;zffr;zfilig;zfjlig;zflat;zfllig;zfltns;zfnof;zfopf;zforall;zfork;zforkv;z	fpartint;Zfrac12zfrac12;zfrac13;Zfrac14zfrac14;zfrac15;zfrac16;zfrac18;zfrac23;zfrac25;Zfrac34zfrac34;zfrac35;zfrac38;zfrac45;zfrac56;zfrac58;zfrac78;zfrasl;zfrown;zfscr;zgE;zgEl;zgacute;zgamma;zgammad;zgap;zgbreve;zgcirc;zgcy;zgdot;zge;zgel;zgeq;zgeqq;z	geqslant;zges;zgescc;zgesdot;zgesdoto;z	gesdotol;zgesl;zgesles;zgfr;zgg;zggg;zgimel;zgjcy;zgl;zglE;zgla;zglj;zgnE;zgnap;z	gnapprox;zgne;zgneq;zgneqq;zgnsim;zgopf;zgrave;zgscr;zgsim;zgsime;zgsiml;�gtzgt;zgtcc;zgtcir;zgtdot;zgtlPar;zgtquest;z
gtrapprox;zgtrarr;zgtrdot;z
gtreqless;zgtreqqless;zgtrless;zgtrsim;z
gvertneqq;zgvnE;zhArr;zhairsp;zhalf;zhamilt;zhardcy;zharr;zharrcir;zharrw;zhbar;zhcirc;zhearts;z
heartsuit;zhellip;zhercon;zhfr;z	hksearow;z	hkswarow;zhoarr;zhomtht;zhookleftarrow;zhookrightarrow;zhopf;zhorbar;zhscr;zhslash;zhstrok;zhybull;zhyphen;Ziacuteziacute;zic;Zicirczicirc;zicy;ziecy;Ziexclziexcl;ziff;zifr;Zigravezigrave;zii;ziiiint;ziiint;ziinfin;ziiota;zijlig;zimacr;zimage;z	imagline;z	imagpart;zimath;zimof;zimped;zin;zincare;zinfin;z	infintie;zinodot;zint;zintcal;z	integers;z	intercal;z	intlarhk;zintprod;ziocy;ziogon;ziopf;ziota;ziprod;Ziquestziquest;ziscr;zisin;zisinE;zisindot;zisins;zisinsv;zisinv;zit;zitilde;ziukcy;Ziumlziuml;zjcirc;zjcy;zjfr;zjmath;zjopf;zjscr;zjsercy;zjukcy;zkappa;zkappav;zkcedil;zkcy;zkfr;zkgreen;zkhcy;zkjcy;zkopf;zkscr;zlAarr;zlArr;zlAtail;zlBarr;zlE;zlEg;zlHar;zlacute;z	laemptyv;zlagran;zlambda;zlang;zlangd;zlangle;zlap;Zlaquozlaquo;zlarr;zlarrb;zlarrbfs;zlarrfs;zlarrhk;zlarrlp;zlarrpl;zlarrsim;zlarrtl;zlat;zlatail;zlate;zlates;zlbarr;zlbbrk;zlbrace;zlbrack;zlbrke;zlbrksld;zlbrkslu;zlcaron;zlcedil;zlceil;zlcub;zlcy;zldca;zldquo;zldquor;zldrdhar;z	ldrushar;zldsh;zle;z
leftarrow;zleftarrowtail;zleftharpoondown;zleftharpoonup;zleftleftarrows;zleftrightarrow;zleftrightarrows;zleftrightharpoons;zleftrightsquigarrow;zleftthreetimes;zleg;zleq;zleqq;z	leqslant;zles;zlescc;zlesdot;zlesdoto;z	lesdotor;zlesg;zlesges;zlessapprox;zlessdot;z
lesseqgtr;zlesseqqgtr;zlessgtr;zlesssim;zlfisht;zlfloor;zlfr;zlg;zlgE;zlhard;zlharu;zlharul;zlhblk;zljcy;zll;zllarr;z	llcorner;zllhard;zlltri;zlmidot;zlmoust;zlmoustache;zlnE;zlnap;z	lnapprox;zlne;zlneq;zlneqq;zlnsim;zloang;zloarr;zlobrk;zlongleftarrow;zlongleftrightarrow;zlongmapsto;zlongrightarrow;zlooparrowleft;zlooparrowright;zlopar;zlopf;zloplus;zlotimes;zlowast;zlowbar;zloz;zlozenge;zlozf;zlpar;zlparlt;zlrarr;z	lrcorner;zlrhar;zlrhard;zlrm;zlrtri;zlsaquo;zlscr;zlsh;zlsim;zlsime;zlsimg;zlsqb;zlsquo;zlsquor;zlstrok;�ltzlt;zltcc;zltcir;zltdot;zlthree;zltimes;zltlarr;zltquest;zltrPar;zltri;zltrie;zltrif;z	lurdshar;zluruhar;z
lvertneqq;zlvnE;zmDDot;Zmacrzmacr;zmale;zmalt;zmaltese;zmap;zmapsto;zmapstodown;zmapstoleft;z	mapstoup;zmarker;zmcomma;zmcy;zmdash;zmeasuredangle;zmfr;zmho;�microzmicro;zmid;zmidast;zmidcir;Zmiddotzmiddot;zminus;zminusb;zminusd;zminusdu;zmlcp;zmldr;zmnplus;zmodels;zmopf;zmp;zmscr;zmstpos;zmu;z	multimap;zmumap;znGg;znGt;znGtv;znLeftarrow;znLeftrightarrow;znLl;znLt;znLtv;znRightarrow;znVDash;znVdash;znabla;znacute;znang;znap;znapE;znapid;znapos;znapprox;znatur;znatural;z	naturals;Znbspznbsp;znbump;znbumpe;zncap;zncaron;zncedil;zncong;z	ncongdot;zncup;zncy;zndash;zne;zneArr;znearhk;znearr;znearrow;znedot;znequiv;znesear;znesim;znexist;znexists;znfr;zngE;znge;zngeq;zngeqq;z
ngeqslant;znges;zngsim;zngt;zngtr;znhArr;znharr;znhpar;zni;znis;znisd;zniv;znjcy;znlArr;znlE;znlarr;znldr;znle;znleftarrow;znleftrightarrow;znleq;znleqq;z
nleqslant;znles;znless;znlsim;znlt;znltri;znltrie;znmid;znopf;�notznot;znotin;znotinE;z	notindot;znotinva;znotinvb;znotinvc;znotni;znotniva;znotnivb;znotnivc;znpar;z
nparallel;znparsl;znpart;znpolint;znpr;znprcue;znpre;znprec;znpreceq;znrArr;znrarr;znrarrc;znrarrw;znrightarrow;znrtri;znrtrie;znsc;znsccue;znsce;znscr;z
nshortmid;znshortparallel;znsim;znsime;znsimeq;znsmid;znspar;znsqsube;znsqsupe;znsub;znsubE;znsube;znsubset;z
nsubseteq;znsubseteqq;znsucc;znsucceq;znsup;znsupE;znsupe;znsupset;z
nsupseteq;znsupseteqq;zntgl;Zntildezntilde;zntlg;zntriangleleft;zntrianglelefteq;zntriangleright;zntrianglerighteq;znu;znum;znumero;znumsp;znvDash;znvHarr;znvap;znvdash;znvge;znvgt;znvinfin;znvlArr;znvle;znvlt;znvltrie;znvrArr;znvrtrie;znvsim;znwArr;znwarhk;znwarr;znwarrow;znwnear;zoS;Zoacutezoacute;zoast;zocir;Zocirczocirc;zocy;zodash;zodblac;zodiv;zodot;zodsold;zoelig;zofcir;zofr;zogon;Zogravezograve;zogt;zohbar;zohm;zoint;zolarr;zolcir;zolcross;zoline;zolt;zomacr;zomega;zomicron;zomid;zominus;zoopf;zopar;zoperp;zoplus;zor;zorarr;zord;zorder;zorderof;Zordfzordf;Zordmzordm;zorigof;zoror;zorslope;zorv;zoscr;Zoslashzoslash;zosol;Zotildezotilde;zotimes;z	otimesas;Zoumlzouml;zovbar;zpar;Zparazpara;z	parallel;zparsim;zparsl;zpart;zpcy;zpercnt;zperiod;zpermil;zperp;zpertenk;zpfr;zphi;zphiv;zphmmat;zphone;zpi;z
pitchfork;zpiv;zplanck;zplanckh;zplankv;zplus;z	plusacir;zplusb;zpluscir;zplusdo;zplusdu;zpluse;Zplusmnzplusmn;zplussim;zplustwo;zpm;z	pointint;zpopf;Zpoundzpound;zpr;zprE;zprap;zprcue;zpre;zprec;zprecapprox;zpreccurlyeq;zpreceq;zprecnapprox;z	precneqq;z	precnsim;zprecsim;zprime;zprimes;zprnE;zprnap;zprnsim;zprod;z	profalar;z	profline;z	profsurf;zprop;zpropto;zprsim;zprurel;zpscr;zpsi;zpuncsp;zqfr;zqint;zqopf;zqprime;zqscr;zquaternions;zquatint;zquest;zquesteq;Zquotzquot;zrAarr;zrArr;zrAtail;zrBarr;zrHar;zrace;zracute;zradic;z	raemptyv;zrang;zrangd;zrange;zrangle;Zraquozraquo;zrarr;zrarrap;zrarrb;zrarrbfs;zrarrc;zrarrfs;zrarrhk;zrarrlp;zrarrpl;zrarrsim;zrarrtl;zrarrw;zratail;zratio;z
rationals;zrbarr;zrbbrk;zrbrace;zrbrack;zrbrke;zrbrksld;zrbrkslu;zrcaron;zrcedil;zrceil;zrcub;zrcy;zrdca;zrdldhar;zrdquo;zrdquor;zrdsh;zreal;zrealine;z	realpart;zreals;zrect;Zregzreg;zrfisht;zrfloor;zrfr;zrhard;zrharu;zrharul;zrho;zrhov;zrightarrow;zrightarrowtail;zrightharpoondown;zrightharpoonup;zrightleftarrows;zrightleftharpoons;zrightrightarrows;zrightsquigarrow;zrightthreetimes;zring;z
risingdotseq;zrlarr;zrlhar;zrlm;zrmoust;zrmoustache;zrnmid;zroang;zroarr;zrobrk;zropar;zropf;zroplus;zrotimes;zrpar;zrpargt;z	rppolint;zrrarr;zrsaquo;zrscr;zrsh;zrsqb;zrsquo;zrsquor;zrthree;zrtimes;zrtri;zrtrie;zrtrif;z	rtriltri;zruluhar;zrx;zsacute;zsbquo;zsc;zscE;zscap;zscaron;zsccue;zsce;zscedil;zscirc;zscnE;zscnap;zscnsim;z	scpolint;zscsim;zscy;zsdot;zsdotb;zsdote;zseArr;zsearhk;zsearr;zsearrow;Zsectzsect;zsemi;zseswar;z	setminus;zsetmn;zsext;zsfr;zsfrown;zsharp;zshchcy;zshcy;z	shortmid;zshortparallel;Zshyzshy;zsigma;zsigmaf;zsigmav;zsim;zsimdot;zsime;zsimeq;zsimg;zsimgE;zsiml;zsimlE;zsimne;zsimplus;zsimrarr;zslarr;zsmallsetminus;zsmashp;z	smeparsl;zsmid;zsmile;zsmt;zsmte;zsmtes;zsoftcy;zsol;zsolb;zsolbar;zsopf;zspades;z
spadesuit;zspar;zsqcap;zsqcaps;zsqcup;zsqcups;zsqsub;zsqsube;z	sqsubset;zsqsubseteq;zsqsup;zsqsupe;z	sqsupset;zsqsupseteq;zsqu;zsquare;zsquarf;zsquf;zsrarr;zsscr;zssetmn;zssmile;zsstarf;zstar;zstarf;zstraightepsilon;zstraightphi;zstrns;zsub;zsubE;zsubdot;zsube;zsubedot;zsubmult;zsubnE;zsubne;zsubplus;zsubrarr;zsubset;z	subseteq;z
subseteqq;z
subsetneq;zsubsetneqq;zsubsim;zsubsub;zsubsup;zsucc;zsuccapprox;zsucccurlyeq;zsucceq;zsuccnapprox;z	succneqq;z	succnsim;zsuccsim;zsum;zsung;Zsup1zsup1;Zsup2zsup2;Zsup3zsup3;zsup;zsupE;zsupdot;zsupdsub;zsupe;zsupedot;zsuphsol;zsuphsub;zsuplarr;zsupmult;zsupnE;zsupne;zsupplus;zsupset;z	supseteq;z
supseteqq;z
supsetneq;zsupsetneqq;zsupsim;zsupsub;zsupsup;zswArr;zswarhk;zswarr;zswarrow;zswnwar;Zszligzszlig;ztarget;ztau;ztbrk;ztcaron;ztcedil;ztcy;ztdot;ztelrec;ztfr;zthere4;z
therefore;ztheta;z	thetasym;zthetav;zthickapprox;z	thicksim;zthinsp;zthkap;zthksim;Zthornzthorn;ztilde;�timesztimes;ztimesb;z	timesbar;ztimesd;ztint;ztoea;ztop;ztopbot;ztopcir;ztopf;ztopfork;ztosa;ztprime;ztrade;z	triangle;z
triangledown;z
triangleleft;ztrianglelefteq;z
triangleq;ztriangleright;ztrianglerighteq;ztridot;ztrie;z	triminus;ztriplus;ztrisb;ztritime;z	trpezium;ztscr;ztscy;ztshcy;ztstrok;ztwixt;ztwoheadleftarrow;ztwoheadrightarrow;zuArr;zuHar;Zuacutezuacute;zuarr;zubrcy;zubreve;Zucirczucirc;zucy;zudarr;zudblac;zudhar;zufisht;zufr;Zugravezugrave;zuharl;zuharr;zuhblk;zulcorn;z	ulcorner;zulcrop;zultri;zumacr;Zumlzuml;zuogon;zuopf;zuparrow;zupdownarrow;zupharpoonleft;zupharpoonright;zuplus;zupsi;zupsih;zupsilon;zupuparrows;zurcorn;z	urcorner;zurcrop;zuring;zurtri;zuscr;zutdot;zutilde;zutri;zutrif;zuuarr;Zuumlzuuml;zuwangle;zvArr;zvBar;zvBarv;zvDash;zvangrt;zvarepsilon;z	varkappa;zvarnothing;zvarphi;zvarpi;z
varpropto;zvarr;zvarrho;z	varsigma;z
varsubsetneq;zvarsubsetneqq;z
varsupsetneq;zvarsupsetneqq;z	vartheta;zvartriangleleft;zvartriangleright;zvcy;zvdash;zvee;zveebar;zveeeq;zvellip;zverbar;zvert;zvfr;zvltri;zvnsub;zvnsup;zvopf;zvprop;zvrtri;zvscr;zvsubnE;zvsubne;zvsupnE;zvsupne;zvzigzag;zwcirc;zwedbar;zwedge;zwedgeq;zweierp;zwfr;zwopf;zwp;zwr;zwreath;zwscr;zxcap;zxcirc;zxcup;zxdtri;zxfr;zxhArr;zxharr;zxi;zxlArr;zxlarr;zxmap;zxnis;zxodot;zxopf;zxoplus;zxotime;zxrArr;zxrarr;zxscr;zxsqcup;zxuplus;zxutri;zxvee;zxwedge;Zyacutezyacute;zyacy;zycirc;zycy;Zyenzyen;zyfr;zyicy;zyopf;zyscr;zyucy;Zyumlzyuml;zzacute;zzcaron;zzcy;zzdot;zzeetrf;zzeta;zzfr;zzhcy;zzigrarr;zzopf;zzscr;zzwj;zzwnj;u������)"r�
���������������������������������������)ZDoctypeZ
CharactersZSpaceCharacters�StartTag�EndTag�EmptyTag�CommentZ
ParseErrorrrrcCsg|]\}}||f�qSr2r2)r3�k�vr2r2r6r7xsZmathc@seZdZdS)�DataLossWarningN)�__name__�
__module__�__qualname__r2r2r2r6r|src@seZdZdS)�ReparseExceptionN)rrrr2r2r2r6r�sr)rr r!r"r#r$) rGrHrIrJrKrLrMrNrOrPrQrRrSrHrTrHrHrUrVrWrXrYrZr[r\r]r^r_r`rHrarb)0Z
__future__rrr�stringZEOF�EZ
namespaces�	frozensetZscopingElementsZformattingElementsZspecialElementsZhtmlIntegrationPointElementsZ"mathmlTextIntegrationPointElementsZadjustSVGAttributesZadjustMathMLAttributesZadjustForeignAttributes�dict�itemsZunadjustForeignAttributesZspaceCharactersZtableInsertModeElementsZascii_lowercaseZasciiLowercaseZascii_uppercaseZasciiUppercaseZ
ascii_lettersZasciiLettersZdigitsZ	hexdigitsZ	hexDigitsZasciiUpper2LowerZheadingElementsZvoidElementsZ
cdataElementsZrcdataElementsZbooleanAttributesZentitiesWindows1252ZxmlEntitiesZentitiesZreplacementCharactersZ
tokenTypesZ
tagTokenTypes�prefixes�UserWarningr�	Exceptionrr2r2r2r6�<module>s<






























































































































_vendor/html5lib/__pycache__/_inputstream.cpython-36.pyc000064400000054147151733136420017310 0ustar003

�Pf�)@sddlmZmZmZddlmZmZddlmZm	Z	ddl
Z
ddlZddlm
Z
ddlmZmZmZmZddlmZdd	lmZdd
lmZyddlmZWnek
r�eZYnXedd
�eD��Zedd
�eD��Zedd
�eD��Zeeddg�BZdZej �rJedFdk�r&ej!d�dk�s*t"�ej#eddG�e$d�d�Z%n
ej#e�Z%e&dddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4g �Z'ej#d5�Z(iZ)Gd6d7�d7e*�Z+d8d9�Z,Gd:d;�d;e*�Z-Gd<d=�d=e-�Z.Gd>d?�d?e/�Z0Gd@dA�dAe*�Z1GdBdC�dCe*�Z2dDdE�Z3dS)H�)�absolute_import�division�unicode_literals)�	text_type�binary_type)�http_client�urllibN)�webencodings�)�EOF�spaceCharacters�asciiLetters�asciiUppercase)�ReparseException)�_utils)�StringIO)�BytesIOcCsg|]}|jd��qS)�ascii)�encode)�.0�item�r�"/usr/lib/python3.6/_inputstream.py�
<listcomp>srcCsg|]}|jd��qS)r)r)rrrrrrscCsg|]}|jd��qS)r)r)rrrrrrs�>�<u�[---Ÿ﷐-﷯￾￿🿾🿿𯿾𯿿𿿾𿿿񏿾񏿿񟿾񟿿񯿾񯿿񿿾񿿿򏿾򏿿򟿾򟿿򯿾򯿿򿿾򿿿󏿾󏿿󟿾󟿿󯿾󯿿󿿾󿿿􏿾􏿿]�]z"\uD800-\uDFFF"i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��i��	i��	i��
i��
i��i��i��i��i��
i��
i��i��i��i��i��i��z[	-
 -/:-@[-`{-~]c@sHeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dS)�BufferedStreamz�Buffering for streams that do not have buffering of their own

    The buffer is implemented as a list of chunks on the assumption that
    joining many strings will be slow since it is O(n**2)
    cCs||_g|_ddg|_dS)Nr
r���)�stream�buffer�position)�selfrrrr�__init__@szBufferedStream.__init__cCs@d}x(|jd|jd�D]}|t|�7}qW||jd7}|S)Nrr
)r r!�len)r"�pos�chunkrrr�tellEs
zBufferedStream.tellcCsX||j�kst�|}d}x0t|j|�|krH|t|j|�8}|d7}qW||g|_dS)Nrr
)�_bufferedBytes�AssertionErrorr$r r!)r"r%�offset�irrr�seekLszBufferedStream.seekcCsT|js|j|�S|jdt|j�krF|jdt|jd�krF|j|�S|j|�SdS)Nrr
r)r �_readStreamr!r$�_readFromBuffer)r"�bytesrrr�readUs

zBufferedStream.readcCstdd�|jD��S)NcSsg|]}t|��qSr)r$)rrrrrr_sz1BufferedStream._bufferedBytes.<locals>.<listcomp>)�sumr )r"rrrr(^szBufferedStream._bufferedBytescCs<|jj|�}|jj|�|jdd7<t|�|jd<|S)Nrr
)rr0r �appendr!r$)r"r/�datarrrr-as
zBufferedStream._readStreamcCs�|}g}|jd}|jd}x�|t|j�kr�|dkr�|dks@t�|j|}|t|�|krn|}|||g|_n"t|�|}|t|�g|_|d7}|j||||��||8}d}qW|r�|j|j|��dj|�S)Nrr
�)r!r$r r)r2r-�join)r"r/ZremainingBytes�rvZbufferIndexZbufferOffsetZbufferedDataZbytesToReadrrrr.hs&


zBufferedStream._readFromBufferN)�__name__�
__module__�__qualname__�__doc__r#r'r,r0r(r-r.rrrrr9s		rcKs�t|tj�s(t|tjj�r.t|jtj�r.d}n&t|d�rJt|jd�t	�}n
t|t	�}|r�dd�|D�}|rvt
d|��t|f|�St|f|�SdS)NFr0rcSsg|]}|jd�r|�qS)Z	_encoding)�endswith)r�xrrrr�sz#HTMLInputStream.<locals>.<listcomp>z3Cannot set an encoding with a unicode input, set %r)
�
isinstancerZHTTPResponserZresponseZaddbase�fp�hasattrr0r�	TypeError�HTMLUnicodeInputStream�HTMLBinaryInputStream)�source�kwargsZ	isUnicodeZ	encodingsrrr�HTMLInputStream�s

rEc@speZdZdZdZdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
d�Z
ddd�Zdd�Zdd�Z
ddd�Zdd�ZdS)rAz�Provides a unicode stream of characters to the HTMLTokenizer.

    This class takes care of character encoding and removing or replacing
    incorrect byte-sequences and also provides column and line tracking.

    i(cCsZtjsd|_ntd�dkr$|j|_n|j|_dg|_td�df|_|j	|�|_
|j�dS)a�Initialises the HTMLInputStream.

        HTMLInputStream(source, [encoding]) -> Normalized stream from source
        for use by html5lib.

        source can be either a file-object, local filename or a string.

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        Nu􏿿r
rzutf-8�certain)r�supports_lone_surrogates�reportCharacterErrorsr$�characterErrorsUCS4�characterErrorsUCS2ZnewLines�lookupEncoding�charEncoding�
openStream�
dataStream�reset)r"rCrrrr#�s
zHTMLUnicodeInputStream.__init__cCs.d|_d|_d|_g|_d|_d|_d|_dS)N�r)r&�	chunkSize�chunkOffset�errors�prevNumLines�prevNumCols�_bufferedCharacter)r"rrrrO�szHTMLUnicodeInputStream.resetcCst|d�r|}nt|�}|S)zvProduces a file object from source.

        source can be either a file object, local filename or a string.

        r0)r?r)r"rCrrrrrM�s
z!HTMLUnicodeInputStream.openStreamcCsT|j}|jdd|�}|j|}|jdd|�}|dkr@|j|}n||d}||fS)N�
rr
r)r&�countrT�rfindrU)r"r*r&ZnLinesZpositionLineZlastLinePosZpositionColumnrrr�	_position�s
z HTMLUnicodeInputStream._positioncCs|j|j�\}}|d|fS)z:Returns (line, col) of the current position in the stream.r
)rZrR)r"�line�colrrrr!�szHTMLUnicodeInputStream.positioncCs6|j|jkr|j�stS|j}|j|}|d|_|S)zo Read one character from the stream or queue if available. Return
            EOF when EOF is reached.
        r
)rRrQ�	readChunkrr&)r"rR�charrrrr^�s

zHTMLUnicodeInputStream.charNcCs�|dkr|j}|j|j�\|_|_d|_d|_d|_|jj|�}|j	rX|j	|}d|_	n|s`dSt
|�dkr�t|d�}|dks�d|ko�dknr�|d
|_	|dd�}|jr�|j|�|j
dd	�}|j
d
d	�}||_t
|�|_dS)NrPrFr
�
i�i��z
rW�
Trrr)�_defaultChunkSizerZrQrTrUr&rRrNr0rVr$�ordrH�replace)r"rQr3Zlastvrrrr]�s0
 


z HTMLUnicodeInputStream.readChunkcCs,x&tttj|���D]}|jjd�qWdS)Nzinvalid-codepoint)�ranger$�invalid_unicode_re�findallrSr2)r"r3�_rrrrI%sz*HTMLUnicodeInputStream.characterErrorsUCS4cCs�d}x�tj|�D]�}|rqt|j��}|j�}tj|||d��rttj|||d��}|tkrn|j	j
d�d}q|dkr�|dkr�|t|�dkr�|j	j
d�qd}|j	j
d�qWdS)NF�zinvalid-codepointTi�i��r
)re�finditerrb�group�startrZisSurrogatePairZsurrogatePairToCodepoint�non_bmp_invalid_codepointsrSr2r$)r"r3�skip�matchZ	codepointr%Zchar_valrrrrJ)s z*HTMLUnicodeInputStream.characterErrorsUCS2Fc
Csyt||f}Wnltk
r|x|D]}t|�dks&t�q&Wdjdd�|D��}|s^d|}tjd|�}t||f<YnXg}x||j|j|j	�}|dkr�|j	|j
kr�Pn0|j�}||j
kr�|j|j|j	|��||_	P|j|j|j	d��|j
�s�Pq�Wdj|�}	|	S)z� Returns a string of characters from the stream up to but not
        including any character in 'characters' or EOF. 'characters' must be
        a container that supports the 'in' method and iteration over its
        characters.
        �rPcSsg|]}dt|��qS)z\x%02x)rb)r�crrrrNsz5HTMLUnicodeInputStream.charsUntil.<locals>.<listcomp>z^%sz[%s]+N)�charsUntilRegEx�KeyErrorrbr)r5�re�compilernr&rRrQ�endr2r])
r"Z
charactersZopposite�charsrpZregexr6�mru�rrrr�
charsUntil@s2
 

z!HTMLUnicodeInputStream.charsUntilcCsT|dk	rP|jdkr.||j|_|jd7_n"|jd8_|j|j|ksPt�dS)Nrr
)rRr&rQr))r"r^rrr�ungetos
zHTMLUnicodeInputStream.unget)N)F)r7r8r9r:rar#rOrMrZr!r^r]rIrJryrzrrrrrA�s 
&
/rAc@sLeZdZdZddd�Zdd�Zd	d
�Zddd�Zd
d�Zdd�Z	dd�Z
dS)rBz�Provides a unicode stream of characters to the HTMLTokenizer.

    This class takes care of character encoding and removing or replacing
    incorrect byte-sequences and also provides column and line tracking.

    N�windows-1252TcCsn|j|�|_tj||j�d|_d|_||_||_||_||_	||_
|j|�|_|jddk	sbt
�|j�dS)a�Initialises the HTMLInputStream.

        HTMLInputStream(source, [encoding]) -> Normalized stream from source
        for use by html5lib.

        source can be either a file-object, local filename or a string.

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        i�drN)rM�	rawStreamrAr#�numBytesMeta�numBytesChardet�override_encoding�transport_encoding�same_origin_parent_encoding�likely_encoding�default_encoding�determineEncodingrLr)rO)r"rCr�r�r�r�r�Z
useChardetrrrr#�szHTMLBinaryInputStream.__init__cCs&|jdjj|jd�|_tj|�dS)Nrrc)rLZ
codec_info�streamreaderr}rNrArO)r"rrrrO�szHTMLBinaryInputStream.resetc	CsDt|d�r|}nt|�}y|j|j��Wnt|�}YnX|S)zvProduces a file object from source.

        source can be either a file object, local filename or a string.

        r0)r?rr,r'r)r"rCrrrrrM�s
z HTMLBinaryInputStream.openStreamcCs�|j�df}|ddk	r|St|j�df}|ddk	r:|St|j�df}|ddk	rX|S|j�df}|ddk	rt|St|j�df}|ddk	r�|djjd�r�|St|j�df}|ddk	r�|S|�rtyddl	m
}Wntk
r�Yn�Xg}|�}xF|j�s>|j
j|j�}t|t��s t�|�s(P|j|�|j|�q�W|j�t|jd�}|j
jd�|dk	�rt|dfSt|j�df}|ddk	�r�|Std�dfS)NrFrZ	tentativezutf-16)�UniversalDetector�encodingzwindows-1252)�	detectBOMrKr�r��detectEncodingMetar��name�
startswithr�Zchardet.universaldetectorr��ImportError�doner}r0rr=r/r)r2Zfeed�close�resultr,r�)r"ZchardetrLr�ZbuffersZdetectorr r�rrrr��sR


z'HTMLBinaryInputStream.determineEncodingcCs�|jddkst�t|�}|dkr&dS|jdkrFtd�}|dk	s�t�nT||jdkrf|jddf|_n4|jjd�|df|_|j�td|jd|f��dS)	Nr
rF�utf-16be�utf-16lezutf-8rzEncoding changed from %s to %s)r�r�)rLr)rKr�r}r,rOr)r"ZnewEncodingrrr�changeEncodings

z$HTMLBinaryInputStream.changeEncodingc
Cs�tjdtjdtjdtjdtjdi}|jjd�}t|t	�s<t
�|j|dd��}d}|s~|j|�}d}|s~|j|dd	��}d	}|r�|jj|�t
|�S|jjd
�dSdS)z�Attempts to detect at BOM at the start of the stream. If
        an encoding can be determined from the BOM return the name of the
        encoding otherwise return Nonezutf-8zutf-16lezutf-16bezutf-32lezutf-32be�N�rhr)�codecs�BOM_UTF8�BOM_UTF16_LE�BOM_UTF16_BE�BOM_UTF32_LE�BOM_UTF32_BEr}r0r=r/r)�getr,rK)r"ZbomDict�stringr�r,rrrr�s$
zHTMLBinaryInputStream.detectBOMcCsV|jj|j�}t|t�st�t|�}|jjd�|j�}|dk	rR|j	dkrRt
d�}|S)z9Report the encoding declared by the meta element
        rN�utf-16be�utf-16lezutf-8)r�r�)r}r0r~r=r/r)�EncodingParserr,�getEncodingr�rK)r"r �parserr�rrrr�9sz(HTMLBinaryInputStream.detectEncodingMeta)NNNNr{T)T)r7r8r9r:r#rOrMr�r�r�r�rrrrrB�s
(
>"rBc@s�eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zeee
�Z
dd�Zee�Zefdd�Zdd�Zdd�Zdd�ZdS)�
EncodingBytesz�String-like object with an associated position and various extra methods
    If the position is ever greater than the string length then an exception is
    raisedcCst|t�st�tj||j��S)N)r=r/r)�__new__�lower)r"�valuerrrr�LszEncodingBytes.__new__cCs
d|_dS)Nr
r)rZ)r"r�rrrr#PszEncodingBytes.__init__cCs|S)Nr)r"rrr�__iter__TszEncodingBytes.__iter__cCs>|jd}|_|t|�kr"t�n|dkr.t�|||d�S)Nr
r)rZr$�
StopIterationr@)r"�prrr�__next__WszEncodingBytes.__next__cCs|j�S)N)r�)r"rrr�next_szEncodingBytes.nextcCsB|j}|t|�krt�n|dkr$t�|d|_}|||d�S)Nrr
)rZr$r�r@)r"r�rrr�previouscszEncodingBytes.previouscCs|jt|�krt�||_dS)N)rZr$r�)r"r!rrr�setPositionlszEncodingBytes.setPositioncCs*|jt|�krt�|jdkr"|jSdSdS)Nr)rZr$r�)r"rrr�getPositionqs

zEncodingBytes.getPositioncCs||j|jd�S)Nr
)r!)r"rrr�getCurrentByte{szEncodingBytes.getCurrentBytecCsL|j}x:|t|�kr@|||d�}||kr6||_|S|d7}qW||_dS)zSkip past a list of charactersr
N)r!r$rZ)r"rvr�rprrrrm�szEncodingBytes.skipcCsL|j}x:|t|�kr@|||d�}||kr6||_|S|d7}qW||_dS)Nr
)r!r$rZ)r"rvr�rprrr�	skipUntil�szEncodingBytes.skipUntilcCs>|j}|||t|��}|j|�}|r:|jt|�7_|S)z�Look for a sequence of bytes at the start of a string. If the bytes
        are found return True and advance the position to the byte after the
        match. Otherwise return False and leave the position alone)r!r$r�)r"r/r�r3r6rrr�
matchBytes�s
zEncodingBytes.matchBytescCsR||jd�j|�}|dkrJ|jdkr,d|_|j|t|�d7_dSt�dS)z�Look for the next sequence of bytes matching a given sequence. If
        a match is found advance the position to the last byte of the matchNr
rTrr)r!�findrZr$r�)r"r/ZnewPositionrrr�jumpTo�s
zEncodingBytes.jumpToN)r7r8r9r:r�r#r�r�r�r�r�r��propertyr!r��currentByte�spaceCharactersBytesrmr�r�r�rrrrr�Hs 	
r�c@sXeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�ZdS)r�z?Mini parser for detecting character encoding from meta elementscCst|�|_d|_dS)z3string - the data to work on for encoding detectionN)r�r3r�)r"r3rrrr#�s
zEncodingParser.__init__c
Cs�d|jfd|jfd|jfd|jfd|jfd|jff}x^|jD]T}d}xD|D]<\}}|jj|�rJy|�}PWqJtk
r�d}PYqJXqJW|s<Pq<W|jS)	Ns<!--s<metas</s<!s<?rTF)	�
handleComment�
handleMeta�handlePossibleEndTag�handleOther�handlePossibleStartTagr3r�r�r�)r"ZmethodDispatchrgZkeepParsing�key�methodrrrr��s&zEncodingParser.getEncodingcCs|jjd�S)zSkip over commentss-->)r3r�)r"rrrr��szEncodingParser.handleCommentcCs�|jjtkrdSd}d}x�|j�}|dkr.dS|ddkr^|ddk}|r�|dk	r�||_dSq|ddkr�|d}t|�}|dk	r�||_dSq|ddkrtt|d��}|j�}|dk	rt|�}|dk	r|r�||_dS|}qWdS)	NTFrs
http-equivr
scontent-typescharsetscontent)	r3r�r��getAttributer�rK�ContentAttrParserr��parse)r"Z	hasPragmaZpendingEncoding�attrZtentativeEncoding�codecZ
contentParserrrrr��s:zEncodingParser.handleMetacCs
|jd�S)NF)�handlePossibleTag)r"rrrr��sz%EncodingParser.handlePossibleStartTagcCst|j�|jd�S)NT)r�r3r�)r"rrrr��s
z#EncodingParser.handlePossibleEndTagcCsf|j}|jtkr(|r$|j�|j�dS|jt�}|dkrD|j�n|j�}x|dk	r`|j�}qNWdS)NTr)r3r��asciiLettersBytesr�r�r��spacesAngleBracketsr�)r"ZendTagr3rpr�rrrr��s



z EncodingParser.handlePossibleTagcCs|jjd�S)Nr)r3r�)r"rrrr�szEncodingParser.handleOthercCs|j}|jttdg�B�}|dks2t|�dks2t�|d	kr>dSg}g}xt|dkrX|rXPnX|tkrl|j�}PnD|d
kr�dj|�dfS|tkr�|j|j	��n|dkr�dS|j|�t
|�}qHW|dkr�|j�dj|�dfSt
|�|j�}|dk�rT|}x�t
|�}||k�r(t
|�dj|�dj|�fS|tk�rB|j|j	��n
|j|��q�WnJ|dk�rldj|�dfS|tk�r�|j|j	��n|dk�r�dS|j|�x^t
|�}|tk�r�dj|�dj|�fS|tk�r�|j|j	��n|dk�r�dS|j|��q�WdS)z_Return a name,value pair for the next attribute in the stream,
        if one is found, or None�/Nr
r�=r4�'�")rN)r�r)r�r�)
r3rmr��	frozensetr$r)r5�asciiUppercaseBytesr2r�r�r�r�)r"r3rpZattrNameZ	attrValueZ	quoteCharrrrr�sh










zEncodingParser.getAttributeN)
r7r8r9r:r#r�r�r�r�r�r�r�r�rrrrr��s$r�c@seZdZdd�Zdd�ZdS)r�cCst|t�st�||_dS)N)r=r/r)r3)r"r3rrrr#fszContentAttrParser.__init__cCsy�|jjd�|jjd7_|jj�|jjdks8dS|jjd7_|jj�|jjdkr�|jj}|jjd7_|jj}|jj|�r�|j||jj�SdSnF|jj}y|jjt�|j||jj�Stk
r�|j|d�SXWntk
�rdSXdS)Nscharsetr
r�r�r�)r�r�)r3r�r!rmr�r�r�r�)r"Z	quoteMarkZoldPositionrrrr�js.

zContentAttrParser.parseN)r7r8r9r#r�rrrrr�esr�cCs`t|t�r.y|jd�}Wntk
r,dSX|dk	rXy
tj|�Stk
rTdSXndSdS)z{Return the python codec name corresponding to an encoding or None if the
    string doesn't correspond to a valid encoding.rN)r=r�decode�UnicodeDecodeErrorr	�lookup�AttributeError)r�rrrrK�s

rKrr)4Z
__future__rrrZpip._vendor.sixrrZpip._vendor.six.movesrrr�rsZpip._vendorr	Z	constantsrrr
rrrPr�iorrr�r�r�r�r�r�Zinvalid_unicode_no_surrogaterGrXr)rt�evalre�setrlZascii_punctuation_rerq�objectrrErArBr/r�r�r�rKrrrr�<module>sX
"








JgIh6'_vendor/html5lib/__pycache__/html5parser.cpython-36.opt-1.pyc000064400000275615151733136420020010 0ustar003

�Pf���@sFddlmZmZmZddlmZmZmZddlZyddl	m
Z
Wn ek
r`ddlm
Z
YnXddl
mZddl
mZddl
mZdd	lmZdd
l
mZddlmZmZmZmZmZmZmZmZmZmZmZm Z!m"Z"m#Z#m$Z$m%Z%d!dd�Z&d"dd�Z'dd�Z(Gdd�de)�Z*ej+dd��Z,dd�Z-d#dd�Z.Gdd �d e/�Z0dS)$�)�absolute_import�division�unicode_literals)�with_metaclass�viewkeys�PY3N)�OrderedDict�)�_inputstream)�
_tokenizer)�treebuilders)�Marker)�_utils)�spaceCharacters�asciiUpper2Lower�specialElements�headingElements�
cdataElements�rcdataElements�
tokenTypes�
tagTokenTypes�
namespaces�htmlIntegrationPointElements�"mathmlTextIntegrationPointElements�adjustForeignAttributes�adjustMathMLAttributes�adjustSVGAttributes�E�ReparseException�etreeTcKs$tj|�}t||d�}|j|f|�S)z.Parse a string or file-like object into a tree)�namespaceHTMLElements)r�getTreeBuilder�
HTMLParser�parse)�doc�treebuilderr �kwargs�tb�p�r)�!/usr/lib/python3.6/html5parser.pyr#s
r#�divcKs,tj|�}t||d�}|j|fd|i|��S)N)r �	container)rr!r"�
parseFragment)r$r,r%r r&r'r(r)r)r*r-&s
r-csG�fdd�dt�}|S)NcseZdZ�fdd�ZdS)z-method_decorator_metaclass.<locals>.DecoratedcsBx0|j�D]$\}}t|tj�r&�|�}|||<q
Wtj||||�S)N)�items�
isinstance�types�FunctionType�type�__new__)�metaZ	classname�basesZ	classDictZ
attributeNameZ	attribute)�functionr)r*r3.s
z5method_decorator_metaclass.<locals>.Decorated.__new__N)�__name__�
__module__�__qualname__r3r))r6r)r*�	Decorated-sr:)r2)r6r:r))r6r*�method_decorator_metaclass,sr;c@s�eZdZdZd+dd�Zd,dd	�Zd
d�Zedd
��Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zd-dd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�ZdS).r"zZHTML parser. Generates a tree structure from a stream of (possibly
        malformed) HTMLNFTcsL|�_|dkrtjd�}||��_g�_t�fdd�t|�j�D���_dS)a
        strict - raise an exception when a parse error is encountered

        tree - a treebuilder class controlling the type of tree that will be
        returned. Built in treebuilders can be accessed through
        html5lib.treebuilders.getTreeBuilder(treeType)
        Nrcs g|]\}}||��j�f�qSr))�tree)�.0�name�cls)�selfr)r*�
<listcomp>Msz'HTMLParser.__init__.<locals>.<listcomp>)	�strictrr!r<�errors�dict�	getPhasesr.�phases)r@r<rBr �debugr))r@r*�__init__<s


zHTMLParser.__init__r+cKsh||_||_||_tj|fd|i|��|_|j�y|j�Wn$tk
rb|j�|j�YnXdS)N�parser)	�
innerHTMLModer,�	scriptingrZ
HTMLTokenizer�	tokenizer�reset�mainLoopr)r@�stream�	innerHTMLr,rKr&r)r)r*�_parsePszHTMLParser._parsecCs�|jj�d|_g|_g|_d|_|jr�|jj�|_	|j	t
krL|jj|j_
n0|j	tkrd|jj|j_
n|j	dkr||jj|j_
n|jd|_|jj�|j�nd|_	|jd|_d|_d|_d|_dS)NFz	no quirks�	plaintext�
beforeHtml�initialT)r<rM�
firstStartTagrC�log�
compatModerJr,�lowerrPrrL�rcdataState�stater�rawtextState�plaintextStaterF�phase�insertHtmlElement�resetInsertionModeZ	lastPhaseZbeforeRCDataPhase�
framesetOK)r@r)r)r*rM^s*





zHTMLParser.resetcCst|d�sdS|jjjdjS)z�The name of the character encoding
        that was used to decode the input stream,
        or :obj:`None` if that is not determined yet.

        rLNr)�hasattrrLrO�charEncodingr>)r@r)r)r*�documentEncoding�s
zHTMLParser.documentEncodingcCsJ|jdkr6|jtdkr6d|jko4|jdjt�dkS|j|jftkSdS)Nzannotation-xml�mathml�encoding�	text/html�application/xhtml+xml)rfrg)r>�	namespacer�
attributes�	translaterr)r@�elementr)r)r*�isHTMLIntegrationPoint�s


z!HTMLParser.isHTMLIntegrationPointcCs|j|jftkS)N)rhr>r)r@rkr)r)r*�isMathMLTextIntegrationPoint�sz'HTMLParser.isMathMLTextIntegrationPointcCsjtd}td}td}td}td}td}td}�x�|j�D�]�}d}	|}
�x�|
dk	�r|
}	|jjrx|jjdnd}|r�|jnd}|r�|jnd}
|
d	}||kr�|j|
d
|
jdi��d}
qVt|jj�dk�sl||jj	k�sl|j
|��r ||k�r|d
tddg�k�sl|||fk�sl|tdk�rP|
dk�rP||k�rP|d
dk�sl|j
|��rt||||fk�rt|j}n
|jd}||k�r�|j|
�}
qV||k�r�|j|
�}
qV||k�r�|j|
�}
qV||k�r�|j|
�}
qV||k�r�|j|
�}
qV||krV|j|
�}
qVW||krD|	drD|	drD|jdd
|	d
i�qDWd}g}x(|�rd|j|j�|jj�}|�r>�q>WdS)N�
CharactersZSpaceCharacters�StartTag�EndTag�CommentZDoctype�
ParseErrorr	r2�data�datavarsrr>ZmglyphZ
malignmarkrdzannotation-xml�svg�inForeignContent�selfClosing�selfClosingAcknowledgedz&non-void-element-with-trailing-solidusT���)r�normalizedTokensr<�openElementsrhr>�
parseError�get�len�defaultNamespacerm�	frozensetrrlr]rF�processCharacters�processSpaceCharacters�processStartTag�
processEndTag�processComment�processDoctype�append�
processEOF)r@ZCharactersTokenZSpaceCharactersTokenZ
StartTagTokenZEndTagTokenZCommentTokenZDoctypeTokenZParseErrorToken�tokenZ
prev_token�	new_token�currentNodeZcurrentNodeNamespaceZcurrentNodeNamer2r]Z	reprocessrFr)r)r*rN�sp










zHTMLParser.mainLoopccs x|jD]}|j|�VqWdS)N)rL�normalizeToken)r@r�r)r)r*rz�szHTMLParser.normalizedTokenscOs |j|ddf|�|�|jj�S)a�Parse a HTML document into a well-formed tree

        stream - a filelike object or string containing the HTML to be parsed

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        scripting - treat noscript elements as if javascript was turned on
        FN)rQr<ZgetDocument)r@rO�argsr&r)r)r*r#�szHTMLParser.parsecOs|j|df|�|�|jj�S)a2Parse a HTML fragment into a well-formed tree fragment

        container - name of the element we're setting the innerHTML property
        if set to None, default to 'div'

        stream - a filelike object or string containing the HTML to be parsed

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        scripting - treat noscript elements as if javascript was turned on
        T)rQr<ZgetFragment)r@rOr�r&r)r)r*r-�szHTMLParser.parseFragment�XXX-undefined-errorcCs@|dkri}|jj|jjj�||f�|jr<tt||��dS)N)rCr�rLrOZpositionrBrrr)r@�	errorcodertr)r)r*r|s
zHTMLParser.parseErrorcCsT|dtdkrP|d}t|�|d<t|�t|d�krP|dj|ddd��|S)z3 HTML5 specific normalizations to the token stream r2rorsNr	ry)rrr~�update)r@r��rawr)r)r*r�szHTMLParser.normalizeTokencCst|t�dS)N)�adjust_attributesr)r@r�r)r)r*rsz!HTMLParser.adjustMathMLAttributescCst|t�dS)N)r�r)r@r�r)r)r*rszHTMLParser.adjustSVGAttributescCst|t�dS)N)r��adjustForeignAttributesMap)r@r�r)r)r*rsz"HTMLParser.adjustForeignAttributescCs|jj�dS)N)rIr])r@r�r)r)r*�reparseTokenNormalszHTMLParser.reparseTokenNormalcCs�d}ddddddddddd	d	d
dd�}x�|jjddd�D]p}|j}d}||jjdkrbd}|j}|dkrj|r�|j|jjkr�q:||kr�|j||}Pq:|r:|jd	}Pq:W||_dS)NF�inSelect�inCell�inRow�inTableBody�	inCaption�
inColumnGroup�inTable�inBody�
inFrameset�
beforeHead)�select�td�th�tr�tbody�thead�tfoot�caption�colgroup�table�head�body�frameset�htmlr	rTr�r�r�r�ry)r�r�r�r�)r<r{r>rPrhrrFr])r@ZlastZnewModes�nodeZnodeNameZ	new_phaser)r)r*r_!s>
zHTMLParser.resetInsertionModecCsF|jj|�|dkr"|jj|j_n|jj|j_|j|_|jd|_dS)zYGeneric RCDATA/RAWTEXT Parsing algorithm
        contentType - RCDATA or RAWTEXT
        �RAWTEXT�textN)	r<�
insertElementrLr[rZrYr]�
originalPhaserF)r@r�ZcontentTyper)r)r*�parseRCDataRawtextMszHTMLParser.parseRCDataRawtext)NFTF)Fr+F)r�N)r7r8r9�__doc__rHrQrM�propertyrcrlrmrNrzr#r-r|r�rrrr�r_r�r)r)r)r*r"8s&

"
C
,r"cs"dd�}dd�}Gdd�dt|||����Gdd�d��}Gd	d
�d
��}G�fdd�d��}G�fd
d�d��}G�fdd�d��}G�fdd�d��}G�fdd�d��}	G�fdd�d��}
G�fdd�d��}G�fdd�d��}G�fdd�d��}
G�fdd�d��}G�fdd �d ��}G�fd!d"�d"��}G�fd#d$�d$��}G�fd%d&�d&��}G�fd'd(�d(��}G�fd)d*�d*��}G�fd+d,�d,��}G�fd-d.�d.��}G�fd/d0�d0��}G�fd1d2�d2��}G�fd3d4�d4��}|||||||	|
|||
||||||||||||d5�S)6Ncs(tdd�tj�D�����fdd�}|S)z4Logger that records which phase processes each tokencss|]\}}||fVqdS)Nr))r=�key�valuer)r)r*�	<genexpr>csz)getPhases.<locals>.log.<locals>.<genexpr>cs��jjd�r�t|�dkr�|d}yd�|di}Wn�YnX|dtkr\|d|d<|jjj|jjjj|jj	j
j|j
j�j|f��|f|�|�S�|f|�|�SdS)NZprocessrr2r>)r7�
startswithr~rrIrVr�rLrZr]�	__class__)r@r�r&r��info)r6�
type_namesr)r*�wrappedfs
z'getPhases.<locals>.log.<locals>.wrapped)rDrr.)r6r�r))r6r�r*rVaszgetPhases.<locals>.logcSs|rt|�StSdS)N)r;r2)Z
use_metaclassZmetaclass_funcr)r)r*�getMetaclasszszgetPhases.<locals>.getMetaclassc@sXeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�ZdS)zgetPhases.<locals>.PhasezNBase class for helper object that implements each phase of processing
        cSs||_||_dS)N)rIr<)r@rIr<r)r)r*rH�sz!getPhases.<locals>.Phase.__init__cSst�dS)N)�NotImplementedError)r@r)r)r*r��sz#getPhases.<locals>.Phase.processEOFcSs|jj||jjd�dS)Nr	ry)r<�
insertCommentr{)r@r�r)r)r*r��sz'getPhases.<locals>.Phase.processCommentcSs|jjd�dS)Nzunexpected-doctype)rIr|)r@r�r)r)r*r��sz'getPhases.<locals>.Phase.processDoctypecSs|jj|d�dS)Nrs)r<�
insertText)r@r�r)r)r*r��sz*getPhases.<locals>.Phase.processCharacterscSs|jj|d�dS)Nrs)r<r�)r@r�r)r)r*r��sz/getPhases.<locals>.Phase.processSpaceCharacterscSs|j|d|�S)Nr>)�startTagHandler)r@r�r)r)r*r��sz(getPhases.<locals>.Phase.processStartTagcSsl|jjr"|ddkr"|jjd�x<|dj�D],\}}||jjdjkr0||jjdj|<q0Wd|j_dS)Nr>r�z
non-html-rootrsrF)rIrUr|r.r<r{ri)r@r��attrr�r)r)r*�startTagHtml�sz%getPhases.<locals>.Phase.startTagHtmlcSs|j|d|�S)Nr>)�
endTagHandler)r@r�r)r)r*r��sz&getPhases.<locals>.Phase.processEndTagN)
r7r8r9r�rHr�r�r�r�r�r�r�r�r)r)r)r*�Phase�s
r�c@sLeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)zgetPhases.<locals>.InitialPhasecSsdS)Nr))r@r�r)r)r*r��sz6getPhases.<locals>.InitialPhase.processSpaceCharacterscSs|jj||jj�dS)N)r<r��document)r@r�r)r)r*r��sz.getPhases.<locals>.InitialPhase.processCommentc8Ss|d}|d}|d}|d}|dks@|dk	s@|dk	rL|dkrL|jjd�|dkrXd}|jj|�|dkrv|jt�}|�s�|ddk�s�|jdJ��s�|dKk�s�|jdL��r�|dk�s�|�r�|j�dDk�r�dE|j_n*|jdM��s�|jdN��r|dk	�rdH|j_|jj	dI|j_
dS)ONr>�publicId�systemId�correctr�zabout:legacy-compatzunknown-doctype��*+//silmaril//dtd html pro v0r11 19970101//�4-//advasoft ltd//dtd html 3.0 aswedit + extensions//�*-//as//dtd html 3.0 aswedit + extensions//�-//ietf//dtd html 2.0 level 1//�-//ietf//dtd html 2.0 level 2//�&-//ietf//dtd html 2.0 strict level 1//�&-//ietf//dtd html 2.0 strict level 2//�-//ietf//dtd html 2.0 strict//�-//ietf//dtd html 2.0//�-//ietf//dtd html 2.1e//�-//ietf//dtd html 3.0//�-//ietf//dtd html 3.2 final//�-//ietf//dtd html 3.2//�-//ietf//dtd html 3//�-//ietf//dtd html level 0//�-//ietf//dtd html level 1//�-//ietf//dtd html level 2//�-//ietf//dtd html level 3//�"-//ietf//dtd html strict level 0//�"-//ietf//dtd html strict level 1//�"-//ietf//dtd html strict level 2//�"-//ietf//dtd html strict level 3//�-//ietf//dtd html strict//�-//ietf//dtd html//�(-//metrius//dtd metrius presentational//�5-//microsoft//dtd internet explorer 2.0 html strict//�.-//microsoft//dtd internet explorer 2.0 html//�0-//microsoft//dtd internet explorer 2.0 tables//�5-//microsoft//dtd internet explorer 3.0 html strict//�.-//microsoft//dtd internet explorer 3.0 html//�0-//microsoft//dtd internet explorer 3.0 tables//�#-//netscape comm. corp.//dtd html//�*-//netscape comm. corp.//dtd strict html//�*-//o'reilly and associates//dtd html 2.0//�3-//o'reilly and associates//dtd html extended 1.0//�;-//o'reilly and associates//dtd html extended relaxed 1.0//�N-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//�E-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//�$-//spyglass//dtd html 2.0 extended//�+-//sq//dtd html 2.0 hotmetal + extensions//�--//sun microsystems corp.//dtd hotjava html//�4-//sun microsystems corp.//dtd hotjava strict html//�-//w3c//dtd html 3 1995-03-24//�-//w3c//dtd html 3.2 draft//�-//w3c//dtd html 3.2 final//�-//w3c//dtd html 3.2//�-//w3c//dtd html 3.2s draft//�-//w3c//dtd html 4.0 frameset//�#-//w3c//dtd html 4.0 transitional//�(-//w3c//dtd html experimental 19960712//�&-//w3c//dtd html experimental 970421//�-//w3c//dtd w3 html//�-//w3o//dtd w3 html 3.0//�#-//webtechs//dtd mozilla html 2.0//�-//webtechs//dtd mozilla html//�$-//w3o//dtd w3 html strict 3.0//en//�"-/w3c/dtd html 4.0 transitional/en� -//w3c//dtd html 4.01 frameset//�$-//w3c//dtd html 4.01 transitional//z:http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd�quirks� -//w3c//dtd xhtml 1.0 frameset//�$-//w3c//dtd xhtml 1.0 transitional//zlimited quirksrS)7r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr)rrr�)rr)rr)rr)rIr|r<Z
insertDoctyperjrr�rXrWrFr])r@r�r>r�r�r�r)r)r*r��s�



z.getPhases.<locals>.InitialPhase.processDoctypecSsd|j_|jjd|j_dS)NrrS)rIrWrFr])r@r)r)r*�anythingElsesz,getPhases.<locals>.InitialPhase.anythingElsecSs|jjd�|j�|S)Nzexpected-doctype-but-got-chars)rIr|r	)r@r�r)r)r*r�sz1getPhases.<locals>.InitialPhase.processCharacterscSs"|jjdd|di�|j�|S)Nz"expected-doctype-but-got-start-tagr>)rIr|r	)r@r�r)r)r*r�sz/getPhases.<locals>.InitialPhase.processStartTagcSs"|jjdd|di�|j�|S)Nz expected-doctype-but-got-end-tagr>)rIr|r	)r@r�r)r)r*r�sz-getPhases.<locals>.InitialPhase.processEndTagcSs|jjd�|j�dS)Nzexpected-doctype-but-got-eofT)rIr|r	)r@r)r)r*r�%sz*getPhases.<locals>.InitialPhase.processEOFN)r7r8r9r�r�r�r	r�r�r�r�r)r)r)r*�InitialPhase�s_r
c@sDeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)z"getPhases.<locals>.BeforeHtmlPhasecSs&|jjtdd��|jjd|j_dS)Nr�ror�)r<Z
insertRoot�impliedTagTokenrIrFr])r@r)r)r*r^,sz4getPhases.<locals>.BeforeHtmlPhase.insertHtmlElementcSs|j�dS)NT)r^)r@r)r)r*r�1sz-getPhases.<locals>.BeforeHtmlPhase.processEOFcSs|jj||jj�dS)N)r<r�r�)r@r�r)r)r*r�5sz1getPhases.<locals>.BeforeHtmlPhase.processCommentcSsdS)Nr))r@r�r)r)r*r�8sz9getPhases.<locals>.BeforeHtmlPhase.processSpaceCharacterscSs|j�|S)N)r^)r@r�r)r)r*r�;sz4getPhases.<locals>.BeforeHtmlPhase.processCharacterscSs |ddkrd|j_|j�|S)Nr>r�T)rIrUr^)r@r�r)r)r*r�?sz2getPhases.<locals>.BeforeHtmlPhase.processStartTagcSs4|ddkr$|jjdd|di�n|j�|SdS)Nr>r�r�r��brzunexpected-end-tag-before-html)r�r�r�r)rIr|r^)r@r�r)r)r*r�Es
z0getPhases.<locals>.BeforeHtmlPhase.processEndTagN)
r7r8r9r^r�r�r�r�r�r�r)r)r)r*�BeforeHtmlPhase*sr
csXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z"getPhases.<locals>.BeforeHeadPhasecsV�j|||�tjd|jfd|jfg�|_|j|j_tjd|jfg�|_	|j
|j	_dS)Nr�r�r�r)r�r�r�r)rHr�MethodDispatcherr��startTagHeadr��
startTagOther�default�endTagImplyHeadr��endTagOther)r@rIr<)r�r)r*rHNs
z+getPhases.<locals>.BeforeHeadPhase.__init__cSs|jtdd��dS)Nr�roT)rr)r@r)r)r*r�\sz-getPhases.<locals>.BeforeHeadPhase.processEOFcSsdS)Nr))r@r�r)r)r*r�`sz9getPhases.<locals>.BeforeHeadPhase.processSpaceCharacterscSs|jtdd��|S)Nr�ro)rr)r@r�r)r)r*r�csz4getPhases.<locals>.BeforeHeadPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�gsz/getPhases.<locals>.BeforeHeadPhase.startTagHtmlcSs0|jj|�|jjd|j_|jjd|j_dS)Nr	�inHeadry)r<r�r{�headPointerrIrFr])r@r�r)r)r*rjsz/getPhases.<locals>.BeforeHeadPhase.startTagHeadcSs|jtdd��|S)Nr�ro)rr)r@r�r)r)r*rosz0getPhases.<locals>.BeforeHeadPhase.startTagOthercSs|jtdd��|S)Nr�ro)rr)r@r�r)r)r*rssz2getPhases.<locals>.BeforeHeadPhase.endTagImplyHeadcSs|jjdd|di�dS)Nzend-tag-after-implied-rootr>)rIr|)r@r�r)r)r*rwsz.getPhases.<locals>.BeforeHeadPhase.endTagOtherN)r7r8r9rHr�r�r�r�rrrrr))r�r)r*�BeforeHeadPhaseMsrcs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd �Zd!S)"zgetPhases.<locals>.InHeadPhasecs��j|||�tjd|jfd|jfd|jfd|jfd|jfd|jfd|j	fd
|j
fg�|_|j|j_
tjd
|jfd|jfg�|_|j|j_
dS)Nr��title�noframes�style�noscript�script�base�basefont�bgsound�command�linkr4r�rr�)rr)rrrrr )rr�r�)rHrrr��
startTagTitle�startTagNoFramesStyle�startTagNoscript�startTagScript�startTagBaseLinkCommand�startTagMetarr�rr�
endTagHead�endTagHtmlBodyBrr�r)r@rIr<)r�r)r*rH|s 
z'getPhases.<locals>.InHeadPhase.__init__cSs|j�dS)NT)r	)r@r)r)r*r��sz)getPhases.<locals>.InHeadPhase.processEOFcSs|j�|S)N)r	)r@r�r)r)r*r��sz0getPhases.<locals>.InHeadPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��sz+getPhases.<locals>.InHeadPhase.startTagHtmlcSs|jjd�dS)Nz!two-heads-are-not-better-than-one)rIr|)r@r�r)r)r*r�sz+getPhases.<locals>.InHeadPhase.startTagHeadcSs$|jj|�|jjj�d|d<dS)NTrx)r<r�r{�pop)r@r�r)r)r*r%�sz6getPhases.<locals>.InHeadPhase.startTagBaseLinkCommandcSs�|jj|�|jjj�d|d<|d}|jjjjddkr�d|krZ|jjjj|d�nVd|kr�d|kr�|dj	�d	kr�t
j|djd
��}t
j
|�}|j�}|jjjj|�dS)NTrxrsr	Z	tentative�charsetZcontentz
http-equivzcontent-typezutf-8)r<r�r{r)rIrLrOrbZchangeEncodingrXr
Z
EncodingBytes�encodeZContentAttrParserr#)r@r�rirsrI�codecr)r)r*r&�s
z+getPhases.<locals>.InHeadPhase.startTagMetacSs|jj|d�dS)NZRCDATA)rIr�)r@r�r)r)r*r!�sz,getPhases.<locals>.InHeadPhase.startTagTitlecSs|jj|d�dS)Nr�)rIr�)r@r�r)r)r*r"�sz4getPhases.<locals>.InHeadPhase.startTagNoFramesStylecSs8|jjr|jj|d�n|jj|�|jjd|j_dS)Nr��inHeadNoscript)rIrKr�r<r�rFr])r@r�r)r)r*r#�sz/getPhases.<locals>.InHeadPhase.startTagNoscriptcSs<|jj|�|jjj|jj_|jj|j_|jjd|j_dS)Nr�)	r<r�rIrLZscriptDataStaterZr]r�rF)r@r�r)r)r*r$�sz-getPhases.<locals>.InHeadPhase.startTagScriptcSs|j�|S)N)r	)r@r�r)r)r*r�sz,getPhases.<locals>.InHeadPhase.startTagOthercSs"|jjjj�}|jjd|j_dS)N�	afterHead)rIr<r{r)rFr])r@r�r�r)r)r*r'�sz)getPhases.<locals>.InHeadPhase.endTagHeadcSs|j�|S)N)r	)r@r�r)r)r*r(�sz/getPhases.<locals>.InHeadPhase.endTagHtmlBodyBrcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r�sz*getPhases.<locals>.InHeadPhase.endTagOthercSs|jtd��dS)Nr�)r'r)r@r)r)r*r	�sz+getPhases.<locals>.InHeadPhase.anythingElseN)r7r8r9rHr�r�r�rr%r&r!r"r#r$rr'r(rr	r))r�r)r*�InHeadPhase{s r/csxeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)z&getPhases.<locals>.InHeadNoscriptPhasecsf�j|||�tjd|jfd|jfd|jfg�|_|j|j_tjd	|j	fd
|j
fg�|_|j|j_dS)
Nr�rrr r4rrr�rr)rrr r4rr)r�r)
rHrrr�r%�startTagHeadNoscriptr�rr�endTagNoscript�endTagBrr�r)r@rIr<)r�r)r*rH�s
z/getPhases.<locals>.InHeadNoscriptPhase.__init__cSs|jjd�|j�dS)Nzeof-in-head-noscriptT)rIr|r	)r@r)r)r*r��sz1getPhases.<locals>.InHeadNoscriptPhase.processEOFcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r��sz5getPhases.<locals>.InHeadNoscriptPhase.processCommentcSs|jjd�|j�|S)Nzchar-in-head-noscript)rIr|r	)r@r�r)r)r*r��sz8getPhases.<locals>.InHeadNoscriptPhase.processCharacterscSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r�sz=getPhases.<locals>.InHeadNoscriptPhase.processSpaceCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz3getPhases.<locals>.InHeadNoscriptPhase.startTagHtmlcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r%sz>getPhases.<locals>.InHeadNoscriptPhase.startTagBaseLinkCommandcSs|jjdd|di�dS)Nzunexpected-start-tagr>)rIr|)r@r�r)r)r*r0	sz;getPhases.<locals>.InHeadNoscriptPhase.startTagHeadNoscriptcSs"|jjdd|di�|j�|S)Nzunexpected-inhead-noscript-tagr>)rIr|r	)r@r�r)r)r*rsz4getPhases.<locals>.InHeadNoscriptPhase.startTagOthercSs"|jjjj�}|jjd|j_dS)Nr)rIr<r{r)rFr])r@r�r�r)r)r*r1sz5getPhases.<locals>.InHeadNoscriptPhase.endTagNoscriptcSs"|jjdd|di�|j�|S)Nzunexpected-inhead-noscript-tagr>)rIr|r	)r@r�r)r)r*r2sz/getPhases.<locals>.InHeadNoscriptPhase.endTagBrcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*rsz2getPhases.<locals>.InHeadNoscriptPhase.endTagOthercSs|jtd��dS)Nr)r1r)r@r)r)r*r	sz3getPhases.<locals>.InHeadNoscriptPhase.anythingElseN)r7r8r9rHr�r�r�r�r�r%r0rr1r2rr	r))r�r)r*�InHeadNoscriptPhase�sr3cspeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�ZdS)z!getPhases.<locals>.AfterHeadPhasec
sn�j|||�tjd|jfd|jfd|jfd|jfd
|jfg�|_|j	|j_
tjd|jfg�|_|j
|j_
dS)Nr�r�r�rrrr r4rrrrr�r)	rrrr r4rrrr)r�r�r)rHrrr��startTagBody�startTagFrameset�startTagFromHeadrr�rrr(r�r)r@rIr<)r�r)r*rH#s
z*getPhases.<locals>.AfterHeadPhase.__init__cSs|j�dS)NT)r	)r@r)r)r*r�4sz,getPhases.<locals>.AfterHeadPhase.processEOFcSs|j�|S)N)r	)r@r�r)r)r*r�8sz3getPhases.<locals>.AfterHeadPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�<sz.getPhases.<locals>.AfterHeadPhase.startTagHtmlcSs(d|j_|jj|�|jjd|j_dS)NFr�)rIr`r<r�rFr])r@r�r)r)r*r4?sz.getPhases.<locals>.AfterHeadPhase.startTagBodycSs |jj|�|jjd|j_dS)Nr�)r<r�rIrFr])r@r�r)r)r*r5Dsz2getPhases.<locals>.AfterHeadPhase.startTagFramesetcSst|jjdd|di�|jjj|jj�|jjdj|�x4|jjddd�D]}|jdkrN|jjj	|�PqNWdS)Nz#unexpected-start-tag-out-of-my-headr>rr	r�ry)
rIr|r<r{r�rrFr�r>�remove)r@r�r�r)r)r*r6Hs
z2getPhases.<locals>.AfterHeadPhase.startTagFromHeadcSs|jjdd|di�dS)Nzunexpected-start-tagr>)rIr|)r@r�r)r)r*rRsz.getPhases.<locals>.AfterHeadPhase.startTagHeadcSs|j�|S)N)r	)r@r�r)r)r*rUsz/getPhases.<locals>.AfterHeadPhase.startTagOthercSs|j�|S)N)r	)r@r�r)r)r*r(Ysz2getPhases.<locals>.AfterHeadPhase.endTagHtmlBodyBrcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r]sz-getPhases.<locals>.AfterHeadPhase.endTagOthercSs.|jjtdd��|jjd|j_d|j_dS)Nr�ror�T)r<r�rrIrFr]r`)r@r)r)r*r	`sz.getPhases.<locals>.AfterHeadPhase.anythingElseN)r7r8r9rHr�r�r�r4r5r6rrr(rr	r))r�r)r*�AfterHeadPhase"s
r8cs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�Zd3d4�Zd5d6�Zd7d8�Zd9d:�Zd;d<�Z d=d>�Z!d?d@�Z"dAdB�Z#dCdD�Z$dEdF�Z%dGdH�Z&dIdJ�Z'dKdL�Z(dMdN�Z)dOdP�Z*dQdR�Z+dSdT�Z,dUdV�Z-dWdX�Z.dYdZ�Z/d[d\�Z0d]d^�Z1d_d`�Z2dadb�Z3dcdd�Z4dedf�Z5dgS)hzgetPhases.<locals>.InBodyPhasec,s��j|||�|j|_tjd|jfdd|jfd|jfd|jfde|j	ft
|jfdf|jfd&|j
fdg|jfd*|jfd+|jfdh|jfd8|jfd9|jfdi|jfd=|jfd>|jfdj|jfdk|jfdH|jfdI|jfdJ|jfdK|jfdL|jfdM|jfdN|jfdl|j fdQ|j!fdm|j"fdn|j#fdV|j$fdW|j%fdo|j&fg!�|_'|j(|j'_)tjd|j*fd|j+fdp|j,fd&|j-fd |j.fdq|j/ft
|j0fdr|j1fds|j2fd@|j3fg
�|_4|j5|j4_)dS)tNr�rrrrr r4rrrr�r��address�article�aside�
blockquote�center�details�dirr+�dl�fieldset�
figcaption�figure�footer�header�hgroup�main�menu�nav�olr(�section�summary�ul�pre�listing�form�li�dd�dtrR�a�b�big�code�em�font�i�s�small�strike�strong�tt�u�nobr�button�applet�marquee�objectZxmpr��arear�embed�img�keygen�wbr�param�source�track�input�hr�image�isindex�textareaZiframer�noembedrr��rp�rt�option�optgroupZmathrur��colr��framer�r�r�r�r�r�r��dialog)	rrrrr r4rrr)r9r:r;r<r=r>r?r+r@rArBrCrDrErFrGrHrIrJr(rKrLrM)rNrO)rQrRrS)rUrVrWrXrYrZr[r\r]r^r_r`)rcrdre)rfrrgrhrirj)rkrlrm)rsr)rtru)rvrw)r�rxr�ryr�r�r�r�r�r�r�)r9r:r;r<rbr=r>rzr?r+r@rArBrCrDrErFrOrGrHrIrJrNrKrLrM)rRrSrQ)rTrUrVrWrXrYrZrar[r\r]r^r_r`)rcrdre)6rH�processSpaceCharactersNonPrer�rrr��startTagProcessInHeadr4r5�startTagClosePr�startTagHeading�startTagPreListing�startTagForm�startTagListItem�startTagPlaintext�	startTagA�startTagFormatting�startTagNobr�startTagButton�startTagAppletMarqueeObject�startTagXmp�
startTagTable�startTagVoidFormatting�startTagParamSource�
startTagInput�
startTagHr�
startTagImage�startTagIsIndex�startTagTextarea�startTagIFramer#�startTagRawtext�startTagSelect�startTagRpRt�startTagOpt�startTagMath�startTagSvg�startTagMisplacedr�rr�
endTagBody�
endTagHtml�endTagBlock�
endTagForm�endTagP�endTagListItem�
endTagHeading�endTagFormatting�endTagAppletMarqueeObjectr2r�r)r@rIr<)r�r)r*rHhs~
z'getPhases.<locals>.InBodyPhase.__init__cSs$|j|jko"|j|jko"|j|jkS)N)r>rhri)r@Znode1Znode2r)r)r*�isMatchingFormattingElement�sz:getPhases.<locals>.InBodyPhase.isMatchingFormattingElementcSs�|jj|�|jjd}g}x<|jjddd�D]&}|tkr@Pq0|j||�r0|j|�q0Wt|�dkrx|jjj|d�|jjj|�dS)Nr	�ryryry)	r<r�r{�activeFormattingElementsr
r�r�r~r7)r@r�rkZmatchingElementsr�r)r)r*�addFormattingElement�sz3getPhases.<locals>.InBodyPhase.addFormattingElementc
Ss@td�}x2|jjddd�D]}|j|kr|jjd�PqWdS)NrRrSrQr(r�r�r�r�r�r�r�r�r	z expected-closing-tag-but-got-eof)rRrSrQr(r�r�r�r�r�r�r�r�ry)r�r<r{r>rIr|)r@Zallowed_elementsr�r)r)r*r��s
z)getPhases.<locals>.InBodyPhase.processEOFcSsh|d}|j|_|jd�rJ|jjdjdkrJ|jjd	j�rJ|dd�}|rd|jj�|jj|�dS)
Nrs�
r	rNrOrrry)rNrOrrry)	r{r�r�r<r{r>Z
hasContent�#reconstructActiveFormattingElementsr�)r@r�rsr)r)r*�!processSpaceCharactersDropNewline�s

z@getPhases.<locals>.InBodyPhase.processSpaceCharactersDropNewlinecSsT|ddkrdS|jj�|jj|d�|jjrPtdd�|dD��rPd|j_dS)Nrs�cSsg|]}|tk�qSr))r)r=�charr)r)r*rA�szDgetPhases.<locals>.InBodyPhase.processCharacters.<locals>.<listcomp>F)r<r�r�rIr`�any)r@r�r)r)r*r��s
z0getPhases.<locals>.InBodyPhase.processCharacterscSs|jj�|jj|d�dS)Nrs)r<r�r�)r@r�r)r)r*r{�s
z;getPhases.<locals>.InBodyPhase.processSpaceCharactersNonPrecSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r|�sz4getPhases.<locals>.InBodyPhase.startTagProcessInHeadcSs�|jjdddi�t|jj�dks||jjdjdkr6nFd|j_x<|dj�D],\}}||jjdjkrL||jjdj|<qLWdS)Nzunexpected-start-tagr>r�r	Frs)	rIr|r~r<r{r>r`r.ri)r@r�r�r�r)r)r*r4�sz+getPhases.<locals>.InBodyPhase.startTagBodycSs�|jjdddi�t|jj�dks�|jjdjdkr6nt|jjs@nj|jjdjrj|jjdjj|jjd�x"|jjdjdkr�|jjj	�qlW|jj
|�|jjd|j_dS)	Nzunexpected-start-tagr>r�r	r�r�r�ry)
rIr|r~r<r{r>r`�parent�removeChildr)r�rFr])r@r�r)r)r*r5�s"z/getPhases.<locals>.InBodyPhase.startTagFramesetcSs.|jjddd�r|jtd��|jj|�dS)Nr(rb)�variant)r<�elementInScoper�rr�)r@r�r)r)r*r}	sz-getPhases.<locals>.InBodyPhase.startTagClosePcSs>|jjddd�r|jtd��|jj|�d|j_|j|_dS)Nr(rb)r�F)	r<r�r�rr�rIr`r�r�)r@r�r)r)r*rs
z1getPhases.<locals>.InBodyPhase.startTagPreListingcSsZ|jjr|jjdddi�n:|jjddd�r:|jtd��|jj|�|jjd|j_dS)	Nzunexpected-start-tagr>rPr(rb)r�r	ry)	r<�formPointerrIr|r�r�rr�r{)r@r�r)r)r*r�sz+getPhases.<locals>.InBodyPhase.startTagFormcSs�d|j_dgddgddgd�}||d}xLt|jj�D]<}|j|kr^|jjjt|jd��P|j	t
kr8|jd
kr8Pq8W|jjd
dd�r�|jjjtd
d��|jj|�dS)NFrQrSrR)rQrSrRr>rpr9r+r(rb)r�)r9r+r()
rIr`�reversedr<r{r>r]r�r�	nameTuplerr�r�)r@r�ZstopNamesMapZ	stopNamesr�r)r)r*r�s"


z/getPhases.<locals>.InBodyPhase.startTagListItemcSs>|jjddd�r|jtd��|jj|�|jjj|jj_dS)Nr(rb)r�)	r<r�r�rr�rIrLr\rZ)r@r�r)r)r*r�4sz0getPhases.<locals>.InBodyPhase.startTagPlaintextcSsb|jjddd�r|jtd��|jjdjtkrR|jjdd|di�|jjj	�|jj
|�dS)Nr(rb)r�r	zunexpected-start-tagr>ry)r<r�r�rr{r>rrIr|r)r�)r@r�r)r)r*r~:sz.getPhases.<locals>.InBodyPhase.startTagHeadingcSs~|jjd�}|rf|jjdddd��|jtd��||jjkrL|jjj|�||jjkrf|jjj|�|jj	�|j
|�dS)NrTz$unexpected-start-tag-implies-end-tag)�	startName�endName)r<�!elementInActiveFormattingElementsrIr|r�rr{r7r�r�r�)r@r�ZafeAElementr)r)r*r�Bs
z(getPhases.<locals>.InBodyPhase.startTagAcSs|jj�|j|�dS)N)r<r�r�)r@r�r)r)r*r�Os
z1getPhases.<locals>.InBodyPhase.startTagFormattingcSsP|jj�|jjd�rB|jjdddd��|jtd��|jj�|j|�dS)Nraz$unexpected-start-tag-implies-end-tag)r�r�)r<r�r�rIr|r�rr�)r@r�r)r)r*r�Ss

z+getPhases.<locals>.InBodyPhase.startTagNobrcSsT|jjd�r2|jjdddd��|jtd��|S|jj�|jj|�d|j_dS)Nrbz$unexpected-start-tag-implies-end-tag)r�r�F)	r<r�rIr|r�rr�r�r`)r@r�r)r)r*r�]s
z-getPhases.<locals>.InBodyPhase.startTagButtoncSs0|jj�|jj|�|jjjt�d|j_dS)NF)r<r�r�r�r�r
rIr`)r@r�r)r)r*r�hs
z:getPhases.<locals>.InBodyPhase.startTagAppletMarqueeObjectcSsB|jjddd�r|jtd��|jj�d|j_|jj|d�dS)Nr(rb)r�Fr�)r<r�r�rr�rIr`r�)r@r�r)r)r*r�ns

z*getPhases.<locals>.InBodyPhase.startTagXmpcSsR|jjdkr*|jjddd�r*|jtd��|jj|�d|j_|jjd|j_	dS)Nrr(rb)r�Fr�)
rIrWr<r�r�rr�r`rFr])r@r�r)r)r*r�usz,getPhases.<locals>.InBodyPhase.startTagTablecSs6|jj�|jj|�|jjj�d|d<d|j_dS)NTrxF)r<r�r�r{r)rIr`)r@r�r)r)r*r�}s

z5getPhases.<locals>.InBodyPhase.startTagVoidFormattingcSs@|jj}|j|�d|dkr<|ddjt�dkr<||j_dS)Nr2rs�hidden)rIr`r�rjr)r@r�r`r)r)r*r��s

z,getPhases.<locals>.InBodyPhase.startTagInputcSs$|jj|�|jjj�d|d<dS)NTrx)r<r�r{r))r@r�r)r)r*r��sz2getPhases.<locals>.InBodyPhase.startTagParamSourcecSsJ|jjddd�r|jtd��|jj|�|jjj�d|d<d|j_dS)Nr(rb)r�TrxF)	r<r�r�rr�r{r)rIr`)r@r�r)r)r*r��sz)getPhases.<locals>.InBodyPhase.startTagHrcSs6|jjdddd��|jtdd|d|dd��dS)	Nzunexpected-start-tag-treated-asrprh)�originalName�newNamerorsrw)rirw)rIr|r�r)r@r�r)r)r*r��s

z,getPhases.<locals>.InBodyPhase.startTagImagecSs|jjdddi�|jjrdSi}d|dkr>|dd|d<|jtdd|d��|jtd	d��|jtd
d��d|dkr�|dd}nd}|jtd
|d��|dj�}d|kr�|d=d|kr�|d=d|d<|jtdd||dd��|j	td
��|jtd	d��|j	td��dS)Nzdeprecated-tagr>rq�actionrsrPro)riroZlabel�promptz3This is a searchable index. Enter search keywords: rn)r2rsrnrw)rirw)
rIr|r<r�r�rr�r�copyr�)r@r�Z
form_attrsr�rir)r)r*r��s6


z.getPhases.<locals>.InBodyPhase.startTagIsIndexcSs0|jj|�|jjj|jj_|j|_d|j_dS)NF)	r<r�rIrLrYrZr�r�r`)r@r�r)r)r*r��sz/getPhases.<locals>.InBodyPhase.startTagTextareacSsd|j_|j|�dS)NF)rIr`r�)r@r�r)r)r*r��sz-getPhases.<locals>.InBodyPhase.startTagIFramecSs"|jjr|j|�n
|j|�dS)N)rIrKr�r)r@r�r)r)r*r#�sz/getPhases.<locals>.InBodyPhase.startTagNoscriptcSs|jj|d�dS)z8iframe, noembed noframes, noscript(if scripting enabled)r�N)rIr�)r@r�r)r)r*r��sz.getPhases.<locals>.InBodyPhase.startTagRawtextcSs@|jjdjdkr$|jjjtd��|jj�|jjj|�dS)Nr	rvry)	r<r{r>rIr]r�rr�r�)r@r�r)r)r*r��s
z*getPhases.<locals>.InBodyPhase.startTagOptcSs�|jj�|jj|�d|j_|jj|jjd|jjd|jjd|jjd|jjd|jjdfkrx|jjd|j_n|jjd	|j_dS)
NFr�r�r�r�r�r��inSelectInTabler�)r<r�r�rIr`r]rF)r@r�r)r)r*r��s




z-getPhases.<locals>.InBodyPhase.startTagSelectcSsB|jjd�r2|jj�|jjdjdkr2|jj�|jj|�dS)N�rubyr	ry)r<r��generateImpliedEndTagsr{r>rIr|r�)r@r�r)r)r*r��s


z+getPhases.<locals>.InBodyPhase.startTagRpRtcSsZ|jj�|jj|�|jj|�td|d<|jj|�|drV|jjj�d|d<dS)NrdrhrwTrx)	r<r�rIrrrr�r{r))r@r�r)r)r*r��s
z+getPhases.<locals>.InBodyPhase.startTagMathcSsZ|jj�|jj|�|jj|�td|d<|jj|�|drV|jjj�d|d<dS)NrurhrwTrx)	r<r�rIrrrr�r{r))r@r�r)r)r*r��s
z*getPhases.<locals>.InBodyPhase.startTagSvgcSs|jjdd|di�dS)a5 Elements that should be children of other elements that have a
            different insertion mode; here they are ignored
            "caption", "col", "colgroup", "frame", "frameset", "head",
            "option", "optgroup", "tbody", "td", "tfoot", "th", "thead",
            "tr", "noscript"
            zunexpected-start-tag-ignoredr>N)rIr|)r@r�r)r)r*r�sz0getPhases.<locals>.InBodyPhase.startTagMisplacedcSs|jj�|jj|�dS)N)r<r�r�)r@r�r)r)r*rs
z,getPhases.<locals>.InBodyPhase.startTagOthercSs�|jjddd�sD|jtdd��|jjdddi�|jtdd��nX|jjd�|jjd	j	dkrt|jjdddi�|jjj
�}x|j	dkr�|jjj
�}q�WdS)
Nr(rb)r�rozunexpected-end-tagr>rpr	ry)r<r�r}rrIr|r�r�r{r>r))r@r�r�r)r)r*r�sz&getPhases.<locals>.InBodyPhase.endTagPcSs�|jjd�s|jj�dS|jjdjdkrlx>|jjdd�D]*}|jtd�kr>|jjdd|jd��Pq>W|jjd|j_dS)Nr�r	�rRrSrQrwrvr(rtrur�r�r�r�r�r�r�z$expected-one-end-tag-but-got-another)�gotName�expectedName�	afterBodyry)rRrSrQrwrvr(rtrur�r�r�r�r�r�r�r�)	r<r�rIr|r{r>r�rFr])r@r�r�r)r)r*r�!s
z)getPhases.<locals>.InBodyPhase.endTagBodycSs"|jjd�r|jtd��|SdS)Nr�)r<r�r�r)r@r�r)r)r*r�3sz)getPhases.<locals>.InBodyPhase.endTagHtmlcSs�|ddkr|j|_|jj|d�}|r2|jj�|jjdj|dkr^|jjdd|di�|r�|jjj	�}x|j|dkr�|jjj	�}qpWdS)Nr>rNr	zend-tag-too-earlyry)
r{r�r<r�r�r{r>rIr|r))r@r�ZinScoper�r)r)r*r�9s
z*getPhases.<locals>.InBodyPhase.endTagBlockcSsx|jj}d|j_|dks&|jj|�r:|jjdddi�n:|jj�|jjd|krf|jjdddi�|jjj|�dS)Nzunexpected-end-tagr>rPr	zend-tag-too-early-ignoredry)r<r�r�rIr|r�r{r7)r@r�r�r)r)r*r�Gs

z)getPhases.<locals>.InBodyPhase.endTagFormcSs�|ddkrd}nd}|jj|d|d�sB|jjdd|di�nj|jj|dd�|jjd	j|dkr�|jjdd|di�|jjj�}x|j|dkr�|jjj�}q�WdS)
Nr>rQ�list)r�zunexpected-end-tag)�excluder	zend-tag-too-earlyry)r<r�rIr|r�r{r>r))r@r�r�r�r)r)r*r�Tsz-getPhases.<locals>.InBodyPhase.endTagListItemcSs�x$tD]}|jj|�r|jj�PqW|jjdj|dkrR|jjdd|di�xBtD]:}|jj|�rX|jjj�}x|jtkr�|jjj�}qvWPqXWdS)Nr	r>zend-tag-too-earlyry)	rr<r�r�r{r>rIr|r))r@r��itemr)r)r*r�es


z,getPhases.<locals>.InBodyPhase.endTagHeadingcSs"d}�x|dk�r|d7}|jj|d�}|sL||jjkrZ|jj|j�rZ|j|�dS||jjkr�|jjdd|di�|jjj	|�dS|jj|j�s�|jjdd|di�dS||jjdkr�|jjdd|di�|jjj
|�}d}x,|jj|d�D]}|jtk�r|}P�qW|dk�rb|jjj
�}x||k�rN|jjj
�}�q4W|jjj	|�dS|jj|d}|jjj
|�}|}	}
d}|jjj
|
�}x�|d	k�rh|d7}|d8}|jj|}
|
|jjk�r�|jjj	|
��q�|
|k�r�P|	|k�r
|jjj
|
�d}|
j�}
|
|jj|jjj
|
�<|
|jj|jjj
|
�<|
}
|	j�rV|	jj|	�|
j|	�|
}	�q�W|	j�r~|	jj|	�|jtd�k�r�|jj�\}}|j|	|�n
|j|	�|j�}
|j|
�|j|
�|jjj	|�|jjj||
�|jjj	|�|jjj|jjj
|�d|
�qWdS)z)The much-feared adoption agency algorithmr�r	r>Nzadoption-agency-1.2zadoption-agency-4.4zadoption-agency-1.3r�r�r�r�r�r�ry)r�r�r�r�r�)r<r�r{r�r>rrIr|r�r7�indexr�rr)Z	cloneNoder�r�ZappendChildr�ZgetTableMisnestedNodePosition�insertBeforeZreparentChildren�insert)r@r�ZouterLoopCounterZformattingElementZafeIndexZ
furthestBlockrkZcommonAncestorZbookmarkZlastNoder�ZinnerLoopCounterr�Zcloner�r�r)r)r*r�ts�











z/getPhases.<locals>.InBodyPhase.endTagFormattingcSs�|jj|d�r|jj�|jjdj|dkrF|jjdd|di�|jj|d�r�|jjj�}x|j|dkr�|jjj�}qdW|jj�dS)Nr>r	zend-tag-too-earlyry)	r<r�r�r{r>rIr|r)�clearActiveFormattingElements)r@r�rkr)r)r*r�s
z8getPhases.<locals>.InBodyPhase.endTagAppletMarqueeObjectcSs@|jjdddd��|jj�|jjtdd��|jjj�dS)Nzunexpected-end-tag-treated-asrz
br element)r�r�ro)rIr|r<r�r�rr{r))r@r�r)r)r*r2#s

z'getPhases.<locals>.InBodyPhase.endTagBrcSs�x�|jjddd�D]�}|j|dkr~|jj|dd�|jjdj|dkrd|jjdd|di�x|jjj�|krxqfWPq|jtkr|jjdd|di�PqWdS)Nr	r>)r�zunexpected-end-tagryry)	r<r{r>r�rIr|r)r�r)r@r�r�r)r)r*r*s
z*getPhases.<locals>.InBodyPhase.endTagOtherN)6r7r8r9rHr�r�r�r�r�r{r|r4r5r}rr�r�r�r~r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r#r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r2rr))r�r)r*�InBodyPhaseeshG

	

	

$r�cs@eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
S)zgetPhases.<locals>.TextPhasecsF�j|||�tjg�|_|j|j_tjd|jfg�|_|j|j_dS)Nr)	rHrrr�rr�endTagScriptr�r)r@rIr<)r�r)r*rH9s
z%getPhases.<locals>.TextPhase.__init__cSs|jj|d�dS)Nrs)r<r�)r@r�r)r)r*r�Asz.getPhases.<locals>.TextPhase.processCharacterscSs8|jjdd|jjdji�|jjj�|jj|j_dS)Nz&expected-named-closing-tag-but-got-eofr>r	Try)rIr|r<r{r>r)r�r])r@r)r)r*r�Ds
z'getPhases.<locals>.TextPhase.processEOFcSsdS)Nr))r@r�r)r)r*rKsz*getPhases.<locals>.TextPhase.startTagOthercSs|jjj�}|jj|j_dS)N)r<r{r)rIr�r])r@r�r�r)r)r*r�Nsz)getPhases.<locals>.TextPhase.endTagScriptcSs|jjj�|jj|j_dS)N)r<r{r)rIr�r])r@r�r)r)r*rUsz(getPhases.<locals>.TextPhase.endTagOtherN)	r7r8r9rHr�r�rr�rr))r�r)r*�	TextPhase8sr�cs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�Zd'S)(zgetPhases.<locals>.InTablePhasec
s��j|||�tjd|jfd|jfd|jfd|jfd|jfd|jfd|j	fd|j
fd|jfd|jfg
�|_
|j|j
_tjd|jfd|jfg�|_|j|j_dS)Nr�r�r�rxr�r�r�r�r�r�r�rrrnrPr�)r�r�r�)r�r�r�)rr)r�r�rxr�r�r�r�r�r�r�r�)rHrrr��startTagCaption�startTagColgroup�startTagCol�startTagRowGroup�startTagImplyTbodyr��startTagStyleScriptr�r�r�rr�endTagTable�endTagIgnorer�r)r@rIr<)r�r)r*rH[s$
z(getPhases.<locals>.InTablePhase.__init__cSs(x"|jjdjdkr"|jjj�qWdS)Nr	r�r�ry)r�r�)r<r{r>r))r@r)r)r*�clearStackToTableContextssz8getPhases.<locals>.InTablePhase.clearStackToTableContextcSs$|jjdjdkr |jjd�ndS)Nr	r�zeof-in-tablery)r<r{r>rIr|)r@r)r)r*r�|sz*getPhases.<locals>.InTablePhase.processEOFcSs4|jj}|jjd|j_||jj_|jjj|�dS)N�inTableText)rIr]rFr�r�)r@r�r�r)r)r*r��s
z6getPhases.<locals>.InTablePhase.processSpaceCharacterscSs4|jj}|jjd|j_||jj_|jjj|�dS)Nr�)rIr]rFr�r�)r@r�r�r)r)r*r��s
z1getPhases.<locals>.InTablePhase.processCharacterscSs&d|j_|jjdj|�d|j_dS)NTr�F)r<�insertFromTablerIrFr�)r@r�r)r)r*r��sz*getPhases.<locals>.InTablePhase.insertTextcSs6|j�|jjjt�|jj|�|jjd|j_dS)Nr�)	r�r<r�r�r
r�rIrFr])r@r�r)r)r*r��sz/getPhases.<locals>.InTablePhase.startTagCaptioncSs(|j�|jj|�|jjd|j_dS)Nr�)r�r<r�rIrFr])r@r�r)r)r*r��sz0getPhases.<locals>.InTablePhase.startTagColgroupcSs|jtdd��|S)Nr�ro)r�r)r@r�r)r)r*r��sz+getPhases.<locals>.InTablePhase.startTagColcSs(|j�|jj|�|jjd|j_dS)Nr�)r�r<r�rIrFr])r@r�r)r)r*r��sz0getPhases.<locals>.InTablePhase.startTagRowGroupcSs|jtdd��|S)Nr�ro)r�r)r@r�r)r)r*r��sz2getPhases.<locals>.InTablePhase.startTagImplyTbodycSs6|jjdddd��|jjjtd��|jjs2|SdS)Nz$unexpected-start-tag-implies-end-tagr�)r�r�)rIr|r]r�rrP)r@r�r)r)r*r��s
z-getPhases.<locals>.InTablePhase.startTagTablecSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r��sz3getPhases.<locals>.InTablePhase.startTagStyleScriptcSsVd|dkrH|ddjt�dkrH|jjd�|jj|�|jjj�n
|j|�dS)Nr2rsr�z unexpected-hidden-input-in-table)	rjrrIr|r<r�r{r)r)r@r�r)r)r*r��sz-getPhases.<locals>.InTablePhase.startTagInputcSsD|jjd�|jjdkr@|jj|�|jjd|j_|jjj�dS)Nzunexpected-form-in-tabler	ry)rIr|r<r�r�r{r))r@r�r)r)r*r��s
z,getPhases.<locals>.InTablePhase.startTagFormcSs<|jjdd|di�d|j_|jjdj|�d|j_dS)Nz)unexpected-start-tag-implies-table-voodoor>Tr�F)rIr|r<r�rFr�)r@r�r)r)r*r�sz-getPhases.<locals>.InTablePhase.startTagOthercSs�|jjddd�r�|jj�|jjdjdkrJ|jjdd|jjdjd��x"|jjdjdkrl|jjj�qLW|jjj�|jj�n
|jj�dS)	Nr�)r�r	zend-tag-too-early-named)r�r�ryryry)	r<r�r�r{r>rIr|r)r_)r@r�r)r)r*r��s
z+getPhases.<locals>.InTablePhase.endTagTablecSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r��sz,getPhases.<locals>.InTablePhase.endTagIgnorecSs<|jjdd|di�d|j_|jjdj|�d|j_dS)Nz'unexpected-end-tag-implies-table-voodoor>Tr�F)rIr|r<r�rFr�)r@r�r)r)r*r�sz+getPhases.<locals>.InTablePhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�rr))r�r)r*�InTablePhaseYs&	
r�csPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)z#getPhases.<locals>.InTableTextPhasecs�j|||�d|_g|_dS)N)rHr��characterTokens)r@rIr<)r�r)r*rH�sz,getPhases.<locals>.InTableTextPhase.__init__cSsddjdd�|jD��}tdd�|D��rJtd|d�}|jjdj|�n|rZ|jj|�g|_dS)Nr�cSsg|]}|d�qS)rsr))r=r�r)r)r*rA�szGgetPhases.<locals>.InTableTextPhase.flushCharacters.<locals>.<listcomp>cSsg|]}|tk�qSr))r)r=r�r)r)r*rA�srn)r2rsr�)�joinr�r�rrIrFr�r<)r@rsr�r)r)r*�flushCharacters�sz3getPhases.<locals>.InTableTextPhase.flushCharacterscSs|j�|j|j_|S)N)r�r�rIr])r@r�r)r)r*r��s
z2getPhases.<locals>.InTableTextPhase.processCommentcSs|j�|j|j_dS)NT)r�r�rIr])r@r)r)r*r��s
z.getPhases.<locals>.InTableTextPhase.processEOFcSs |ddkrdS|jj|�dS)Nrsr�)r�r�)r@r�r)r)r*r�sz5getPhases.<locals>.InTableTextPhase.processCharacterscSs|jj|�dS)N)r�r�)r@r�r)r)r*r�sz:getPhases.<locals>.InTableTextPhase.processSpaceCharacterscSs|j�|j|j_|S)N)r�r�rIr])r@r�r)r)r*r�
s
z3getPhases.<locals>.InTableTextPhase.processStartTagcSs|j�|j|j_|S)N)r�r�rIr])r@r�r)r)r*r�s
z1getPhases.<locals>.InTableTextPhase.processEndTagN)r7r8r9rHr�r�r�r�r�r�r�r))r�r)r*�InTableTextPhase�s	r�cs`eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�ZdS)z!getPhases.<locals>.InCaptionPhasec
sf�j|||�tjd|jfd
|jfg�|_|j|j_tjd|jfd|j	fd|j
fg�|_|j|j_dS)Nr�r�rxr�r�r�r�r�r�r�r�r�)	r�rxr�r�r�r�r�r�r�)
r�rxr�r�r�r�r�r�r�r�)
rHrrr��startTagTableElementr�rr�
endTagCaptionr�r�r�r)r@rIr<)r�r)r*rHs
z*getPhases.<locals>.InCaptionPhase.__init__cSs|jjddd�S)Nr�r�)r�)r<r�)r@r)r)r*�ignoreEndTagCaption+sz5getPhases.<locals>.InCaptionPhase.ignoreEndTagCaptioncSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�.sz,getPhases.<locals>.InCaptionPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�1sz3getPhases.<locals>.InCaptionPhase.processCharacterscSs0|jj�|j�}|jjjtd��|s,|SdS)Nr�)rIr|r�r]r�r)r@r��ignoreEndTagr)r)r*r�4s

z6getPhases.<locals>.InCaptionPhase.startTagTableElementcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r<sz/getPhases.<locals>.InCaptionPhase.startTagOthercSs�|j�s�|jj�|jjdjdkrB|jjdd|jjdjd��x"|jjdjdkrd|jjj�qDW|jjj�|jj�|jj	d|j_
n
|jj�dS)	Nr	r�z$expected-one-end-tag-but-got-another)r�r�r�ryryry)r�r<r�r{r>rIr|r)r�rFr])r@r�r)r)r*r�?s

z/getPhases.<locals>.InCaptionPhase.endTagCaptioncSs0|jj�|j�}|jjjtd��|s,|SdS)Nr�)rIr|r�r]r�r)r@r�r�r)r)r*r�Qs

z-getPhases.<locals>.InCaptionPhase.endTagTablecSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r�Xsz.getPhases.<locals>.InCaptionPhase.endTagIgnorecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r[sz-getPhases.<locals>.InCaptionPhase.endTagOtherN)
r7r8r9rHr�r�r�r�rr�r�r�rr))r�r)r*�InCaptionPhasesr�csXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z%getPhases.<locals>.InColumnGroupPhasecs^�j|||�tjd|jfd|jfg�|_|j|j_tjd|jfd|j	fg�|_
|j|j
_dS)Nr�rxr�)rHrrr�r�r�rr�endTagColgroup�	endTagColr�r)r@rIr<)r�r)r*rHas
z.getPhases.<locals>.InColumnGroupPhase.__init__cSs|jjdjdkS)Nr	r�ry)r<r{r>)r@r)r)r*�ignoreEndTagColgrouppsz:getPhases.<locals>.InColumnGroupPhase.ignoreEndTagColgroupcSs8|jjdjdkrdS|j�}|jtd��|s4dSdS)Nr	r�r�Try)r<r{r>r�r�r)r@r�r)r)r*r�ssz0getPhases.<locals>.InColumnGroupPhase.processEOFcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r)r@r�r�r)r)r*r�}sz7getPhases.<locals>.InColumnGroupPhase.processCharacterscSs$|jj|�|jjj�d|d<dS)NTrx)r<r�r{r))r@r�r)r)r*r��sz1getPhases.<locals>.InColumnGroupPhase.startTagColcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r)r@r�r�r)r)r*r�sz3getPhases.<locals>.InColumnGroupPhase.startTagOthercSs4|j�r|jj�n|jjj�|jjd|j_dS)Nr�)r�rIr|r<r{r)rFr])r@r�r)r)r*r��sz4getPhases.<locals>.InColumnGroupPhase.endTagColgroupcSs|jjdddi�dS)Nz
no-end-tagr>rx)rIr|)r@r�r)r)r*r��sz/getPhases.<locals>.InColumnGroupPhase.endTagColcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r)r@r�r�r)r)r*r�sz1getPhases.<locals>.InColumnGroupPhase.endTagOtherN)r7r8r9rHr�r�r�r�rr�r�rr))r�r)r*�InColumnGroupPhase^s
	r�csxeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)z#getPhases.<locals>.InTableBodyPhasecsv�j|||�tjd|jfd|jfd
|jfd|jfg�|_|j|j_	tjd|j
fd|jfd|jfg�|_
|j|j
_	dS)Nr�r�r�r�r�rxr�r�r�r�r�r�)r�r�)r�rxr�r�r�r�)r�r�r�)r�r�rxr�r�r�r�r�)rHrrr��
startTagTr�startTagTableCell�startTagTableOtherr�rr�endTagTableRowGroupr�r�r�r)r@rIr<)r�r)r*rH�s
z,getPhases.<locals>.InTableBodyPhase.__init__cSs:x"|jjdjdkr"|jjj�qW|jjdjdkr6dS)	Nr	r�r�r�r�ry)r�r�r�r�ry)r<r{r>r))r@r)r)r*�clearStackToTableBodyContext�s
z@getPhases.<locals>.InTableBodyPhase.clearStackToTableBodyContextcSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r��sz.getPhases.<locals>.InTableBodyPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��sz:getPhases.<locals>.InTableBodyPhase.processSpaceCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��sz5getPhases.<locals>.InTableBodyPhase.processCharacterscSs(|j�|jj|�|jjd|j_dS)Nr�)r�r<r�rIrFr])r@r�r)r)r*r��sz.getPhases.<locals>.InTableBodyPhase.startTagTrcSs*|jjdd|di�|jtdd��|S)Nzunexpected-cell-in-table-bodyr>r�ro)rIr|r�r)r@r�r)r)r*r��sz5getPhases.<locals>.InTableBodyPhase.startTagTableCellcSsb|jjddd�s0|jjddd�s0|jjddd�rT|j�|jt|jjdj��|S|jj�dS)Nr�r�)r�r�r�r	ry)	r<r�r�r�rr{r>rIr|)r@r�r)r)r*r��sz6getPhases.<locals>.InTableBodyPhase.startTagTableOthercSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz1getPhases.<locals>.InTableBodyPhase.startTagOthercSsT|jj|ddd�r:|j�|jjj�|jjd|j_n|jjdd|di�dS)Nr>r�)r�r�z unexpected-end-tag-in-table-body)	r<r�r�r{r)rIrFr]r|)r@r�r)r)r*r��sz7getPhases.<locals>.InTableBodyPhase.endTagTableRowGroupcSsb|jjddd�s0|jjddd�s0|jjddd�rT|j�|jt|jjdj��|S|jj�dS)Nr�r�)r�r�r�r	ry)	r<r�r�r�rr{r>rIr|)r@r�r)r)r*r��sz/getPhases.<locals>.InTableBodyPhase.endTagTablecSs|jjdd|di�dS)Nz unexpected-end-tag-in-table-bodyr>)rIr|)r@r�r)r)r*r��sz0getPhases.<locals>.InTableBodyPhase.endTagIgnorecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz/getPhases.<locals>.InTableBodyPhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r�rr�r�r�rr))r�r)r*�InTableBodyPhase�s
	
r�cs�eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�ZdS)zgetPhases.<locals>.InRowPhasecsv�j|||�tjd|jfd
|jfd|jfg�|_|j|j_tjd
|j	fd|j
fd|jfd|jfg�|_
|j|j
_dS)Nr�r�r�r�rxr�r�r�r�r�r�r�)r�r�)r�rxr�r�r�r�r�)r�r�r�)r�r�rxr�r�r�r�)rHrrr�r�r�r�rr�endTagTrr�r�r�r�r)r@rIr<)r�r)r*rHs
z&getPhases.<locals>.InRowPhase.__init__cSsDx>|jjdjdkr>|jjdd|jjdji�|jjj�qWdS)	Nr	r�r�z'unexpected-implied-end-tag-in-table-rowr>ry)r�r�ry)r<r{r>rIr|r))r@r)r)r*�clearStackToTableRowContextsz9getPhases.<locals>.InRowPhase.clearStackToTableRowContextcSs|jjddd�S)Nr�r�)r�)r<r�)r@r)r)r*�ignoreEndTagTrsz,getPhases.<locals>.InRowPhase.ignoreEndTagTrcSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�"sz(getPhases.<locals>.InRowPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�%sz4getPhases.<locals>.InRowPhase.processSpaceCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�(sz/getPhases.<locals>.InRowPhase.processCharacterscSs6|j�|jj|�|jjd|j_|jjjt�dS)Nr�)	r�r<r�rIrFr]r�r�r
)r@r�r)r)r*r�+sz/getPhases.<locals>.InRowPhase.startTagTableCellcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r)r@r�r�r)r)r*r�1sz0getPhases.<locals>.InRowPhase.startTagTableOthercSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r8sz+getPhases.<locals>.InRowPhase.startTagOthercSs<|j�s.|j�|jjj�|jjd|j_n
|jj�dS)Nr�)	r�r�r<r{r)rIrFr]r|)r@r�r)r)r*r�;s
z&getPhases.<locals>.InRowPhase.endTagTrcSs"|j�}|jtd��|s|SdS)Nr�)r�r�r)r@r�r�r)r)r*r�Esz)getPhases.<locals>.InRowPhase.endTagTablecSs4|jj|ddd�r&|jtd��|S|jj�dS)Nr>r�)r�r�)r<r�r�rrIr|)r@r�r)r)r*r�Msz1getPhases.<locals>.InRowPhase.endTagTableRowGroupcSs|jjdd|di�dS)Nzunexpected-end-tag-in-table-rowr>)rIr|)r@r�r)r)r*r�Tsz*getPhases.<locals>.InRowPhase.endTagIgnorecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*rXsz)getPhases.<locals>.InRowPhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r�rr�r�r�r�rr))r�r)r*�
InRowPhases
r�cs`eZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�ZdS)zgetPhases.<locals>.InCellPhasecsf�j|||�tjd|jfd
|jfg�|_|j|j_tjd|jfd|j	fd|j
fg�|_|j|j_dS)Nr�r�rxr�r�r�r�r�r�r�r�r�)	r�rxr�r�r�r�r�r�r�)r�r�)r�r�rxr�r�)r�r�r�r�r�)
rHrrr�r�r�rr�endTagTableCellr��endTagImplyr�r)r@rIr<)r�r)r*rH]s
z'getPhases.<locals>.InCellPhase.__init__cSsB|jjddd�r |jtd��n|jjddd�r>|jtd��dS)Nr�r�)r�r�)r<r�r�r)r@r)r)r*�	closeCellnsz(getPhases.<locals>.InCellPhase.closeCellcSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�usz)getPhases.<locals>.InCellPhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�xsz0getPhases.<locals>.InCellPhase.processCharacterscSs:|jjddd�s |jjddd�r,|j�|S|jj�dS)Nr�r�)r�r�)r<r�r�rIr|)r@r�r)r)r*r�{s
z1getPhases.<locals>.InCellPhase.startTagTableOthercSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz,getPhases.<locals>.InCellPhase.startTagOthercSs�|jj|ddd�r�|jj|d�|jjdj|dkrt|jjdd|di�x.|jjj�}|j|dkrRPqRWn|jjj�|jj�|jj	d|j_
n|jjdd|di�dS)	Nr>r�)r�r	zunexpected-cell-end-tagr�zunexpected-end-tagry)r<r�r�r{r>rIr|r)r�rFr])r@r�r�r)r)r*r��s
z.getPhases.<locals>.InCellPhase.endTagTableCellcSs|jjdd|di�dS)Nzunexpected-end-tagr>)rIr|)r@r�r)r)r*r��sz+getPhases.<locals>.InCellPhase.endTagIgnorecSs.|jj|ddd�r |j�|S|jj�dS)Nr>r�)r�)r<r�r�rIr|)r@r�r)r)r*r��sz*getPhases.<locals>.InCellPhase.endTagImplycSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�sz*getPhases.<locals>.InCellPhase.endTagOtherN)
r7r8r9rHr�r�r�r�rr�r�r�rr))r�r)r*�InCellPhase[s
r�csxeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)z getPhases.<locals>.InSelectPhasecs��j|||�tjd|jfd|jfd|jfd|jfd	|jfd|jfg�|_	|j
|j	_tjd|jfd|j
fd|jfg�|_|j|j_dS)
Nr�rvrwr�rnrirrr)rnrirr)rHrrr��startTagOption�startTagOptgroupr�r�r$r�rr�endTagOption�endTagOptgroup�endTagSelectr�r)r@rIr<)r�r)r*rH�s
z)getPhases.<locals>.InSelectPhase.__init__cSs$|jjdjdkr |jjd�ndS)Nr	r�z
eof-in-selectry)r<r{r>rIr|)r@r)r)r*r��sz+getPhases.<locals>.InSelectPhase.processEOFcSs$|ddkrdS|jj|d�dS)Nrsr�)r<r�)r@r�r)r)r*r��sz2getPhases.<locals>.InSelectPhase.processCharacterscSs.|jjdjdkr|jjj�|jj|�dS)Nr	rvry)r<r{r>r)r�)r@r�r)r)r*r��sz/getPhases.<locals>.InSelectPhase.startTagOptioncSsL|jjdjdkr|jjj�|jjdjdkr<|jjj�|jj|�dS)Nr	rvrwryry)r<r{r>r)r�)r@r�r)r)r*r��s
z1getPhases.<locals>.InSelectPhase.startTagOptgroupcSs|jjd�|jtd��dS)Nzunexpected-select-in-selectr�)rIr|r�r)r@r�r)r)r*r��sz/getPhases.<locals>.InSelectPhase.startTagSelectcSs2|jjd�|jjddd�r.|jtd��|SdS)Nzunexpected-input-in-selectr�)r�)rIr|r<r�r�r)r@r�r)r)r*r��s
z.getPhases.<locals>.InSelectPhase.startTagInputcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r$�sz/getPhases.<locals>.InSelectPhase.startTagScriptcSs|jjdd|di�dS)Nzunexpected-start-tag-in-selectr>)rIr|)r@r�r)r)r*r�sz.getPhases.<locals>.InSelectPhase.startTagOthercSs6|jjdjdkr |jjj�n|jjdddi�dS)Nr	rvzunexpected-end-tag-in-selectr>ry)r<r{r>r)rIr|)r@r�r)r)r*r��sz-getPhases.<locals>.InSelectPhase.endTagOptioncSsf|jjdjdkr0|jjdjdkr0|jjj�|jjd	jdkrP|jjj�n|jjdddi�dS)
Nr	rvr�rwzunexpected-end-tag-in-selectr>ry���ry)r<r{r>r)rIr|)r@r�r)r)r*r��sz/getPhases.<locals>.InSelectPhase.endTagOptgroupcSsR|jjddd�rD|jjj�}x|jdkr6|jjj�}qW|jj�n
|jj�dS)Nr�)r�)r<r�r{r)r>rIr_r|)r@r�r�r)r)r*r��sz-getPhases.<locals>.InSelectPhase.endTagSelectcSs|jjdd|di�dS)Nzunexpected-end-tag-in-selectr>)rIr|)r@r�r)r)r*r	sz,getPhases.<locals>.InSelectPhase.endTagOtherN)r7r8r9rHr�r�r�r�r�r�r$rr�r�r�rr))r�r)r*�
InSelectPhase�s
r�csHeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)z'getPhases.<locals>.InSelectInTablePhasec	sN�j|||�tjd	|jfg�|_|j|j_tjd
|jfg�|_|j	|j_dS)Nr�r�r�r�r�r�r�r�)r�r�r�r�r�r�r�r�)r�r�r�r�r�r�r�r�)
rHrrr�r�rrr�r�r)r@rIr<)r�r)r*rH	s
z0getPhases.<locals>.InSelectInTablePhase.__init__cSs|jjdj�dS)Nr�)rIrFr�)r@r)r)r*r�	sz2getPhases.<locals>.InSelectInTablePhase.processEOFcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�	sz9getPhases.<locals>.InSelectInTablePhase.processCharacterscSs(|jjdd|di�|jtd��|S)Nz5unexpected-table-element-start-tag-in-select-in-tabler>r�)rIr|rr)r@r�r)r)r*r�!	sz5getPhases.<locals>.InSelectInTablePhase.startTagTablecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r&	sz5getPhases.<locals>.InSelectInTablePhase.startTagOthercSs@|jjdd|di�|jj|ddd�r<|jtd��|SdS)Nz3unexpected-table-element-end-tag-in-select-in-tabler>r�)r�r�)rIr|r<r�rr)r@r�r)r)r*r�)	sz3getPhases.<locals>.InSelectInTablePhase.endTagTablecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r/	sz3getPhases.<locals>.InSelectInTablePhase.endTagOtherN)
r7r8r9rHr�r�r�rr�rr))r�r)r*�InSelectInTablePhase	sr�c-s�eZdZeddddddddd	d
ddd
ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,g,�Z�fd-d.�Zd/d0�Z�fd1d2�Zd3d4�Zd5d6�Z	d7S)8z(getPhases.<locals>.InForeignContentPhaserUrVr<r�rr=rWrRr+r@rSrXrgZh1Zh2Zh3Zh4Zh5Zh6r�rorZrhrQrOrHr4rarJr(rNr�r[r\�spanr^r]�subZsupr�r_r`rM�varcs�j|||�dS)N)rH)r@rIr<)r�r)r*rH<	sz1getPhases.<locals>.InForeignContentPhase.__init__c%Ssnddddddddd	d
ddd
ddddddddddddddddddd d!d"d#d$d%�$}|d&|krj||d&|d&<dS)'NZaltGlyphZaltGlyphDefZaltGlyphItemZanimateColorZ
animateMotionZanimateTransformZclipPathZfeBlendZ
feColorMatrixZfeComponentTransferZfeCompositeZfeConvolveMatrixZfeDiffuseLightingZfeDisplacementMapZfeDistantLightZfeFloodZfeFuncAZfeFuncBZfeFuncGZfeFuncRZfeGaussianBlurZfeImageZfeMergeZfeMergeNodeZfeMorphologyZfeOffsetZfePointLightZfeSpecularLightingZfeSpotLightZfeTileZfeTurbulenceZ
foreignObjectZglyphRefZlinearGradientZradialGradientZtextPath)$ZaltglyphZaltglyphdefZaltglyphitemZanimatecolorZ
animatemotionZanimatetransformZclippathZfeblendZ
fecolormatrixZfecomponenttransferZfecompositeZfeconvolvematrixZfediffuselightingZfedisplacementmapZfedistantlightZfefloodZfefuncaZfefuncbZfefuncgZfefuncrZfegaussianblurZfeimageZfemergeZfemergenodeZfemorphologyZfeoffsetZfepointlightZfespecularlightingZfespotlightZfetileZfeturbulenceZ
foreignobjectZglyphrefZlineargradientZradialgradientZtextpathr>r))r@r��replacementsr)r)r*�adjustSVGTagNames?	sLz:getPhases.<locals>.InForeignContentPhase.adjustSVGTagNamescsL|ddkrd|d<n&|jjr<tdd�|dD��r<d|j_�j||�dS)Nrsr�u�css|]}|tkVqdS)N)r)r=r�r)r)r*r�l	szMgetPhases.<locals>.InForeignContentPhase.processCharacters.<locals>.<genexpr>F)rIr`r�r�)r@r�)r�r)r*r�h	s
z:getPhases.<locals>.InForeignContentPhase.processCharacterscSs6|jjd}|d|jksD|ddkr�t|dj��tdddg�@r�|jjdd|di�xR|jjdj|jjkr�|jj	|jjd�r�|jj
|jjd�r�|jjj�q\W|S|jtd	kr�|jj
|�n$|jtd
kr�|j|�|jj|�|jj|�|j|d<|jj|�|d�r2|jjj�d
|d<dS)Nr	r>rYrsZcolorZface�sizez*unexpected-html-element-in-foreign-contentrdrurhrwTrxryryryry)r<r{�breakoutElements�set�keysrIr|rhrrlrmr)rrrrrr�)r@r�r�r)r)r*r�p	s.



z8getPhases.<locals>.InForeignContentPhase.processStartTagcSs�t|jj�d}|jjd}|jjt�|dkrF|jjdd|di�x�|jjt�|dkr�|jj|jj	dkr�|jjj
�|jjj|j_x|jjj�|kr�q�Wd}P|d8}|jj|}|j
|jjkr�qHqH|jjj|�}PqHW|S)Nr	r>zunexpected-end-tagr�ry)r~r<r{r>rjrrIr|r]rFr�r�r)rhrr�)r@r�Z	nodeIndexr�r�r)r)r*r��	s(z6getPhases.<locals>.InForeignContentPhase.processEndTagN)
r7r8r9r�rrHrr�r�r�r))r�r)r*�InForeignContentPhase2	s


)rcsPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)z!getPhases.<locals>.AfterBodyPhasecsN�j|||�tjd|jfg�|_|j|j_tjd|jfg�|_|j	|j_dS)Nr�)
rHrrr�r�rrr�r�r)r@rIr<)r�r)r*rH�	s
z*getPhases.<locals>.AfterBodyPhase.__init__cSsdS)Nr))r@r)r)r*r��	sz,getPhases.<locals>.AfterBodyPhase.processEOFcSs|jj||jjd�dS)Nr)r<r�r{)r@r�r)r)r*r��	sz0getPhases.<locals>.AfterBodyPhase.processCommentcSs |jjd�|jjd|j_|S)Nzunexpected-char-after-bodyr�)rIr|rFr])r@r�r)r)r*r��	sz3getPhases.<locals>.AfterBodyPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r��	sz.getPhases.<locals>.AfterBodyPhase.startTagHtmlcSs*|jjdd|di�|jjd|j_|S)Nzunexpected-start-tag-after-bodyr>r�)rIr|rFr])r@r�r)r)r*r�	sz/getPhases.<locals>.AfterBodyPhase.startTagOthercSs*|jjr|jjd�n|jjd|j_dS)Nz'unexpected-end-tag-after-body-innerhtml�afterAfterBody)rIrPr|rFr])r@r>r)r)r*r��	sz,getPhases.<locals>.AfterBodyPhase.endTagHtmlcSs*|jjdd|di�|jjd|j_|S)Nzunexpected-end-tag-after-bodyr>r�)rIr|rFr])r@r�r)r)r*r�	sz-getPhases.<locals>.AfterBodyPhase.endTagOtherN)r7r8r9rHr�r�r�r�rr�rr))r�r)r*�AfterBodyPhase�	srcsXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z"getPhases.<locals>.InFramesetPhasecsf�j|||�tjd|jfd|jfd|jfd|jfg�|_|j|j_	tjd|j
fg�|_|j|j_	dS)Nr�r�ryr)
rHrrr�r5�
startTagFrame�startTagNoframesr�rr�endTagFramesetr�r)r@rIr<)r�r)r*rH�	s
z+getPhases.<locals>.InFramesetPhase.__init__cSs$|jjdjdkr |jjd�ndS)Nr	r�zeof-in-framesetry)r<r{r>rIr|)r@r)r)r*r��	sz-getPhases.<locals>.InFramesetPhase.processEOFcSs|jjd�dS)Nzunexpected-char-in-frameset)rIr|)r@r�r)r)r*r��	sz4getPhases.<locals>.InFramesetPhase.processCharacterscSs|jj|�dS)N)r<r�)r@r�r)r)r*r5�	sz3getPhases.<locals>.InFramesetPhase.startTagFramesetcSs|jj|�|jjj�dS)N)r<r�r{r))r@r�r)r)r*r	�	sz0getPhases.<locals>.InFramesetPhase.startTagFramecSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r
�	sz3getPhases.<locals>.InFramesetPhase.startTagNoframescSs|jjdd|di�dS)Nz unexpected-start-tag-in-framesetr>)rIr|)r@r�r)r)r*r�	sz0getPhases.<locals>.InFramesetPhase.startTagOthercSs\|jjdjdkr |jjd�n|jjj�|jjrX|jjdjdkrX|jjd|j_dS)Nr	r�z)unexpected-frameset-in-frameset-innerhtmlr��
afterFramesetryry)	r<r{r>rIr|r)rPrFr])r@r�r)r)r*r�	s
z1getPhases.<locals>.InFramesetPhase.endTagFramesetcSs|jjdd|di�dS)Nzunexpected-end-tag-in-framesetr>)rIr|)r@r�r)r)r*r	
sz.getPhases.<locals>.InFramesetPhase.endTagOtherN)r7r8r9rHr�r�r5r	r
rrrr))r�r)r*�InFramesetPhase�	sr
csHeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dS)z%getPhases.<locals>.AfterFramesetPhasecsV�j|||�tjd|jfd|jfg�|_|j|j_tjd|jfg�|_	|j
|j	_dS)Nr�r)rHrrr�r
r�rrr�r�r)r@rIr<)r�r)r*rH
s
z.getPhases.<locals>.AfterFramesetPhase.__init__cSsdS)Nr))r@r)r)r*r�
sz0getPhases.<locals>.AfterFramesetPhase.processEOFcSs|jjd�dS)Nzunexpected-char-after-frameset)rIr|)r@r�r)r)r*r�!
sz7getPhases.<locals>.AfterFramesetPhase.processCharacterscSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*r
$
sz6getPhases.<locals>.AfterFramesetPhase.startTagNoframescSs|jjdd|di�dS)Nz#unexpected-start-tag-after-framesetr>)rIr|)r@r�r)r)r*r'
sz3getPhases.<locals>.AfterFramesetPhase.startTagOthercSs|jjd|j_dS)N�afterAfterFrameset)rIrFr])r@r�r)r)r*r�+
sz0getPhases.<locals>.AfterFramesetPhase.endTagHtmlcSs|jjdd|di�dS)Nz!unexpected-end-tag-after-framesetr>)rIr|)r@r�r)r)r*r.
sz1getPhases.<locals>.AfterFramesetPhase.endTagOtherN)
r7r8r9rHr�r�r
rr�rr))r�r)r*�AfterFramesetPhase
srcsPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)z&getPhases.<locals>.AfterAfterBodyPhasecs0�j|||�tjd|jfg�|_|j|j_dS)Nr�)rHrrr�r�rr)r@rIr<)r�r)r*rH3
sz/getPhases.<locals>.AfterAfterBodyPhase.__init__cSsdS)Nr))r@r)r)r*r�;
sz1getPhases.<locals>.AfterAfterBodyPhase.processEOFcSs|jj||jj�dS)N)r<r�r�)r@r�r)r)r*r�>
sz5getPhases.<locals>.AfterAfterBodyPhase.processCommentcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�A
sz=getPhases.<locals>.AfterAfterBodyPhase.processSpaceCharacterscSs |jjd�|jjd|j_|S)Nzexpected-eof-but-got-charr�)rIr|rFr])r@r�r)r)r*r�D
sz8getPhases.<locals>.AfterAfterBodyPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�I
sz3getPhases.<locals>.AfterAfterBodyPhase.startTagHtmlcSs*|jjdd|di�|jjd|j_|S)Nzexpected-eof-but-got-start-tagr>r�)rIr|rFr])r@r�r)r)r*rL
sz4getPhases.<locals>.AfterAfterBodyPhase.startTagOthercSs*|jjdd|di�|jjd|j_|S)Nzexpected-eof-but-got-end-tagr>r�)rIr|rFr])r@r�r)r)r*r�R
sz4getPhases.<locals>.AfterAfterBodyPhase.processEndTagN)r7r8r9rHr�r�r�r�r�rr�r))r�r)r*�AfterAfterBodyPhase2
srcsXeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)z*getPhases.<locals>.AfterAfterFramesetPhasecs8�j|||�tjd|jfd|jfg�|_|j|j_dS)Nr�r)rHrrr��startTagNoFramesr�rr)r@rIr<)r�r)r*rHY
s
z3getPhases.<locals>.AfterAfterFramesetPhase.__init__cSsdS)Nr))r@r)r)r*r�b
sz5getPhases.<locals>.AfterAfterFramesetPhase.processEOFcSs|jj||jj�dS)N)r<r�r�)r@r�r)r)r*r�e
sz9getPhases.<locals>.AfterAfterFramesetPhase.processCommentcSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�h
szAgetPhases.<locals>.AfterAfterFramesetPhase.processSpaceCharacterscSs|jjd�dS)Nzexpected-eof-but-got-char)rIr|)r@r�r)r)r*r�k
sz<getPhases.<locals>.AfterAfterFramesetPhase.processCharacterscSs|jjdj|�S)Nr�)rIrFr�)r@r�r)r)r*r�n
sz7getPhases.<locals>.AfterAfterFramesetPhase.startTagHtmlcSs|jjdj|�S)Nr)rIrFr�)r@r�r)r)r*rq
sz;getPhases.<locals>.AfterAfterFramesetPhase.startTagNoFramescSs|jjdd|di�dS)Nzexpected-eof-but-got-start-tagr>)rIr|)r@r�r)r)r*rt
sz8getPhases.<locals>.AfterAfterFramesetPhase.startTagOthercSs|jjdd|di�dS)Nzexpected-eof-but-got-end-tagr>)rIr|)r@r�r)r)r*r�x
sz8getPhases.<locals>.AfterAfterFramesetPhase.processEndTagN)r7r8r9rHr�r�r�r�r�rrr�r))r�r)r*�AfterAfterFramesetPhaseX
s	r)rTrSr�rr-r.r�r�r�r�r�r�r�r�r�r�r�rvr�r�rrr)r)rGrVr�r
r
rr/r3r8r�r�r�r�r�r�r�r�r�r�r�rrr
rrrr))r�r*rE_sp)#.g@CX!-GBbYLd's/9%&&rEcs^ts
tjr t|d�t��@}nt|d�t��@}|rZt�fdd�|dj�D��|d<dS)Nrsc3s"|]\}}�j||�|fVqdS)N)r})r=�k�v)rr)r*r��
sz$adjust_attributes.<locals>.<genexpr>)rrZPY27rr�rr.)r�rZneeds_adjustmentr))rr*r��
s
r�rpFcCs|dkri}t||||d�S)N)r2r>rsrw)r)r>r2rirwr)r)r*r�
s
rc@seZdZdZdS)rrzError in parsed documentN)r7r8r9r�r)r)r)r*rr�
srr)rT)r+rT)rpNF)1Z
__future__rrrZpip._vendor.sixrrrr0�collectionsr�ImportErrorZpip._vendor.ordereddictr�r
rrZtreebuilders.baser
rZ	constantsrrrrrrrrrrrrr�rrrrr#r-r;rer"ZmemoizerEr�r�	Exceptionrrr)r)r)r*�<module>sRH

)L

_vendor/html5lib/__pycache__/_utils.cpython-36.opt-1.pyc000064400000006234151733136420017026 0ustar003

�Pf�@s�ddlmZmZmZddlZddlmZddlmZyddl	j
jZWn e
k
rdddlj
jZYnXddddd	d
dgZejddko�ejd
dkZyed�Zeee�s�ed�ZWndZYnXdZGdd�de�Zdd�Zdd�Zdd	�Zdd�ZdS)�)�absolute_import�division�unicode_literalsN)�
ModuleType)�	text_type�
default_etree�MethodDispatcher�isSurrogatePair�surrogatePairToCodepoint�moduleFactoryFactory�supports_lone_surrogates�PY27���z"\uD800"z	u"\uD800"FTc@s$eZdZdZffdd�Zdd�ZdS)rapDict with 2 special properties:

    On initiation, keys that are lists, sets or tuples are converted to
    multiple keys so accessing any one of the items in the original
    list-like object returns the matching value

    md = MethodDispatcher({("foo", "bar"):"baz"})
    md["foo"] == "baz"

    A default value which can be set through the default attribute.
    cCsjg}xN|D]F\}}t|ttttf�rBx*|D]}|j||f�q*Wq
|j||f�q
Wtj||�d|_dS)N)	�
isinstance�list�tuple�	frozenset�set�append�dict�__init__�default)�self�itemsZ_dictEntries�name�value�item�r�/usr/lib/python3.6/_utils.pyr4s
zMethodDispatcher.__init__cCstj|||j�S)N)r�getr)r�keyrrr �__getitem__CszMethodDispatcher.__getitem__N)�__name__�
__module__�__qualname__�__doc__rr#rrrr r'scCsLt|�dkoJt|d�dkoJt|d�dkoJt|d�dkoJt|d�dkS)Nrri�i��ri�i��)�len�ord)�datarrr r	Js cCs,dt|d�ddt|d�d}|S)Niri�iri�)r))r*Zchar_valrrr r
Pscsi���fdd�}|S)Ncs�ttjtd��rd|j}n
d|j}t|j��}y�|||Stk
r�t|�}�|f|�|�}|jj|�d�kr�i�|<d�|kr�i�||<d�||kr�i�|||<|�|||<|SXdS)N�z_%s_factorys_%s_factoryr�args�kwargs)	rrr$�typerr�KeyError�__dict__�update)Z
baseModuler,r-rZkwargs_tuple�modZobjs)�factory�moduleCacherr �
moduleFactory\s$
z+moduleFactoryFactory.<locals>.moduleFactoryr)r3r5r)r3r4r rYscsi���fdd�}|S)Ncs2t|�t|j��f}|�kr*�||��|<�|S)N)rr)r,r-r")�cache�funcrr �wrappedyszmemoize.<locals>.wrappedr)r7r8r)r6r7r �memoizevsr9)Z
__future__rrr�sys�typesrZpip._vendor.sixrZxml.etree.cElementTreeZetreeZcElementTreer�ImportErrorZxml.etree.ElementTreeZElementTree�__all__�version_infor
�evalZ_xrrrrr	r
rr9rrrr �<module>s0

#	_vendor/html5lib/__pycache__/_ihatexml.cpython-36.opt-1.pyc000064400000032626151733136420017505 0ustar003

�PfAA�@s�ddlmZmZmZddlZddlZddlmZdZdZ	dZ
dZd	Zd
j
ee	g�Zd
j
eeddd
e
eg�Zd
j
ed
g�Zejd�Zejd�Zdd�Zdd�Zedd�Zdd�Zdd�Zdd�Zdd�Zejd�Zejd�Zejd �ZGd!d"�d"e�Z dS)#�)�absolute_import�division�unicode_literalsN�)�DataLossWarninga^
[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]z*[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]a�
[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
#x3099 | #x309Aa
[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]z}
#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]z | �.�-�_z#x([\d|A-F]{4,4})z'\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]cCs�dd�|jd�D�}g}x�|D]�}d}x`ttfD]T}|j|�}|dk	r0|jdd�|j�D��t|d	�dkr~|d
d|d<d}Pq0W|s|jt|�gd�qWt|�}|S)NcSsg|]}|j��qS�)�strip)�.0�itemr
r
�/usr/lib/python3.6/_ihatexml.py�
<listcomp>hsz$charStringToList.<locals>.<listcomp>z | FcSsg|]}t|��qSr
)�hexToInt)rr
r
r
rrosr�T���rr)	�split�reChar�reCharRange�match�append�groups�len�ord�normaliseCharList)�charsZ
charRanges�rvr
Z
foundMatchZregexprr
r
r�charStringToListgs 

rcCs�t|�}x|D]}qWg}d}x�|t|�kr�d}|j||�xT||t|�kr�|||d|dddkr�|||d|dd<|d7}q@W||7}q W|S)Nrrrr)�sortedrr)�charListr
r�i�jr
r
rr|s
2rZFFFF�cCs�g}|ddkr*|jd|dddg�xBt|dd��D].\}}|j|dd||dddg�q<W|ddtkr�|j|dddtg�|S)Nrrrrr)r�	enumerate�max_unicode)r rr!r
r
r
r�
missingRanges�s*r&cCsrg}x^|D]V}|d|dkr6|jtt|d���q
|jtt|d��dtt|d���q
Wddj|�S)Nrrrz[%s]�)r�escapeRegexp�chr�join)r rr
r
r
r�listToRegexpStr�s
r+cCs
t|d�S)Nr#)�int)Zhex_strr
r
rr�srcCs&d}x|D]}|j|d|�}q
W|S)Nr�^�$�*�+�?�{�}�[�]�|�(�)r�\)rr-r.r/r0r1r2r3r4r5r6r7r8r)�replace)�stringZspecialCharacters�charr
r
rr(�s

r(u�[-,/:-@\[-\^`\{-¶¸-¿×÷IJ-ijĿ-ŀʼnſDŽ-njDZ-dzǶ-ǹȘ-ɏʩ-ʺ˂-ˏ˒-˿͆-͟͢-΅΋΍΢Ϗϗ-ϙϛϝϟϡϴ-ЀЍѐѝ҂҇-ҏӅ-ӆӉ-ӊӍ-ӏӬ-ӭӶ-ӷӺ-԰՗-՘՚-ՠև-֐ֺ֢־׀׃ׅ-׏׫-ׯ׳-ؠػ-ؿٓ-ٟ٪-ٯڸ-ڹڿۏ۔۩ۮ-ۯۺ-ऀऄऺ-ऻॎ-ॐॕ-ॗ।-॥॰-ঀ঄঍-঎঑-঒঩঱঳-঵঺-঻ঽ৅-৆৉-৊ৎ-৖৘-৛৞৤-৥৲-ਁਃ-਄਋-਎਑-਒਩਱਴਷਺-਻਽੃-੆੉-੊੎-੘੝੟-੥ੵ-઀઄ઌ઎઒઩઱઴઺-઻૆૊૎-૟ૡ-૥૰-଀଄଍-଎଑-଒଩଱଴-ଵ଺-଻ୄ-୆୉-୊୎-୕୘-୛୞ୢ-୥୰-஁஄஋-஍஑஖-஘஛஝஠-஢஥-஧஫-஭ஶ஺-஽௃-௅௉௎-௖௘-௦௰-ఀఄ఍఑఩ఴ఺-ఽ౅౉౎-౔౗-౟ౢ-౥౰-ಁ಄಍಑಩಴಺-ಽ೅೉೎-೔೗-ೝ೟ೢ-೥೰-ഁഄ഍഑ഩഺ-ഽൄ-൅൉ൎ-ൖ൘-ൟൢ-൥൰-฀ฯ฻-฿๏๚-຀຃຅-ຆຉ຋-ຌຎ-ຓຘຠ຤຦ຨ-ຩຬຯ຺຾-຿໅໇໎-໏໚-༗༚-༟༪-༴༶༸༺-༽཈ཪ-཰྅ྌ-ྏྖ྘ྮ-ྰྸྺ-႟჆-჏ჷ-ჿᄁᄄᄈᄊᄍᄓ-ᄻᄽᄿᅁ-ᅋᅍᅏᅑ-ᅓᅖ-ᅘᅚ-ᅞᅢᅤᅦᅨᅪ-ᅬᅯ-ᅱᅴᅶ-ᆝᆟ-ᆧᆩ-ᆪᆬ-ᆭᆰ-ᆶᆹᆻᇃ-ᇪᇬ-ᇯᇱ-ᇸᇺ-᷿ẜ-ẟỺ-ỿ἖-἗἞-἟὆-὇὎-὏὘὚὜὞὾-὿᾵᾽᾿-῁῅῍-῏῔-῕῜-῟῭-῱῵´-⃏⃝-⃠⃢-℥℧-℩ℬ-ℭℯ-ⅿↃ-〄〆〈-〠〰〶-぀ゕ-゘゛-゜ゟ-゠・ヿ-㄄ㄭ-䷿龦-꯿힤-￿]u�[-@\[-\^`\{-¿×÷IJ-ijĿ-ŀʼnſDŽ-njDZ-dzǶ-ǹȘ-ɏʩ-ʺ˂-΅·΋΍΢Ϗϗ-ϙϛϝϟϡϴ-ЀЍѐѝ҂-ҏӅ-ӆӉ-ӊӍ-ӏӬ-ӭӶ-ӷӺ-԰՗-՘՚-ՠև-׏׫-ׯ׳-ؠػ-ـً-ٰڸ-ڹڿۏ۔ۖ-ۤۧ-ऄऺ-़ा-ॗॢ-঄঍-঎঑-঒঩঱঳-঵঺-৛৞ৢ-৯৲-਄਋-਎਑-਒਩਱਴਷਺-੘੝੟-ੱੵ-઄ઌ઎઒઩઱઴઺-઼ા-૟ૡ-଄଍-଎଑-଒଩଱଴-ଵ଺-଼ା-୛୞ୢ-஄஋-஍஑஖-஘஛஝஠-஢஥-஧஫-஭ஶ஺-ఄ఍఑఩ఴ఺-౟ౢ-಄಍಑಩಴಺-ೝ೟ೢ-ഄ഍഑ഩഺ-ൟൢ-฀ฯัิ-฿ๆ-຀຃຅-ຆຉ຋-ຌຎ-ຓຘຠ຤຦ຨ-ຩຬຯັິ-ຼ຾-຿໅-༿཈ཪ-႟჆-჏ჷ-ჿᄁᄄᄈᄊᄍᄓ-ᄻᄽᄿᅁ-ᅋᅍᅏᅑ-ᅓᅖ-ᅘᅚ-ᅞᅢᅤᅦᅨᅪ-ᅬᅯ-ᅱᅴᅶ-ᆝᆟ-ᆧᆩ-ᆪᆬ-ᆭᆰ-ᆶᆹᆻᇃ-ᇪᇬ-ᇯᇱ-ᇸᇺ-᷿ẜ-ẟỺ-ỿ἖-἗἞-἟὆-὇὎-὏὘὚὜὞὾-὿᾵᾽᾿-῁῅῍-῏῔-῕῜-῟῭-῱῵´-℥℧-℩ℬ-ℭℯ-ⅿↃ-〆〈-〠〪-぀ゕ-゠・-㄄ㄭ-䷿龦-꯿힤-￿]z#[^ 
a-zA-Z0-9\-'()+,./:=?;!*#@$_%]c@sreZdZejd�Zddd�Zddd�Zd	d
�Zdd�Z	d
d�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�ZdS)�
InfosetFilterz
U[\dA-F]{5,5}FTcCs.||_||_||_||_||_||_i|_dS)N)�dropXmlnsLocalName�dropXmlnsAttrNs�preventDoubleDashComments�preventDashAtCommentEnd�replaceFormFeedCharacters�preventSingleQuotePubid�replaceCache)�selfr>r?r@rArBrCr
r
r�__init__�szInfosetFilter.__init__NcCsL|jr |jd�r tjdt�dS|jr>|dkr>tjdt�dS|j|�SdS)Nzxmlns:z"Attributes cannot begin with xmlnszhttp://www.w3.org/2000/xmlns/z)Attributes cannot be in the xml namespace)r>�
startswith�warnings�warnrr?�	toXmlName)rE�name�	namespacer
r
r�coerceAttribute�szInfosetFilter.coerceAttributecCs
|j|�S)N)rJ)rErKr
r
r�
coerceElement�szInfosetFilter.coerceElementcCsN|jrJx$d|kr*tjdt�|jdd�}qW|jd�rJtjdt�|d7}|S)Nz--z'Comments cannot contain adjacent dashesz- -rzComments cannot end in a dash� )r@rHrIrr:�endswith)rE�datar
r
r�
coerceComment�s

zInfosetFilter.coerceCommentcCs:|jr6x"t|jd��D]}tjdt�qW|jdd�}|S)N�zText cannot contain U+000CrO)rB�range�countrHrIrr:)rErQr	r
r
r�coerceCharacters�s
zInfosetFilter.coerceCharacterscCsp|}x4tj|�D]&}tjdt�|j|�}|j||�}qW|jrl|jd�dkrltjdt�|jd|jd��}|S)NzCoercing non-XML pubid�'rz!Pubid cannot contain single quote)	�nonPubidCharRegexp�findallrHrIr�getReplacementCharacterr:rC�find)rErQZ
dataOutputr<�replacementr
r
r�coercePubid�s
zInfosetFilter.coercePubidc
Cs�|d}|dd�}tj|�}|r:tjdt�|j|�}n|}|}ttj|��}x.|D]&}tjdt�|j|�}	|j	||	�}qVW||S)NrrzCoercing non-XML name)
�nonXmlNameFirstBMPRegexprrHrIrrZ�set�nonXmlNameBMPRegexprYr:)
rErK�	nameFirstZnameRest�mZnameFirstOutputZnameRestOutputZreplaceCharsr<r\r
r
rrJ�s


zInfosetFilter.toXmlNamecCs$||jkr|j|}n
|j|�}|S)N)rD�
escapeChar)rEr<r\r
r
rrZs

z%InfosetFilter.getReplacementCharactercCs0x*t|jj|��D]}|j||j|��}qW|S)N)r_�replacementRegexprYr:�unescapeChar)rErKr
r
r
r�fromXmlNameszInfosetFilter.fromXmlNamecCsdt|�}||j|<|S)NzU%05X)rrD)rEr<r\r
r
rrcs
zInfosetFilter.escapeCharcCstt|dd�d��S)Nrr#)r)r,)rEZcharcoder
r
rreszInfosetFilter.unescapeChar)FFFFTF)N)�__name__�
__module__�__qualname__�re�compilerdrFrMrNrRrVr]rJrZrfrcrer
r
r
rr=�s"



r=)!Z
__future__rrrrjrHZ	constantsrZbaseCharZideographicZcombiningCharacterZdigitZextenderr*ZletterrKrarkrrrrr,r%r&r+rr(r`r^rX�objectr=r
r
r
r�<module>s20


	


_vendor/html5lib/constants.py000064400000242673151733136420012331 0ustar00from __future__ import absolute_import, division, unicode_literals

import string

EOF = None

E = {
    "null-character":
        "Null character in input stream, replaced with U+FFFD.",
    "invalid-codepoint":
        "Invalid codepoint in stream.",
    "incorrectly-placed-solidus":
        "Solidus (/) incorrectly placed in tag.",
    "incorrect-cr-newline-entity":
        "Incorrect CR newline entity, replaced with LF.",
    "illegal-windows-1252-entity":
        "Entity used with illegal number (windows-1252 reference).",
    "cant-convert-numeric-entity":
        "Numeric entity couldn't be converted to character "
        "(codepoint U+%(charAsInt)08x).",
    "illegal-codepoint-for-numeric-entity":
        "Numeric entity represents an illegal codepoint: "
        "U+%(charAsInt)08x.",
    "numeric-entity-without-semicolon":
        "Numeric entity didn't end with ';'.",
    "expected-numeric-entity-but-got-eof":
        "Numeric entity expected. Got end of file instead.",
    "expected-numeric-entity":
        "Numeric entity expected but none found.",
    "named-entity-without-semicolon":
        "Named entity didn't end with ';'.",
    "expected-named-entity":
        "Named entity expected. Got none.",
    "attributes-in-end-tag":
        "End tag contains unexpected attributes.",
    'self-closing-flag-on-end-tag':
        "End tag contains unexpected self-closing flag.",
    "expected-tag-name-but-got-right-bracket":
        "Expected tag name. Got '>' instead.",
    "expected-tag-name-but-got-question-mark":
        "Expected tag name. Got '?' instead. (HTML doesn't "
        "support processing instructions.)",
    "expected-tag-name":
        "Expected tag name. Got something else instead",
    "expected-closing-tag-but-got-right-bracket":
        "Expected closing tag. Got '>' instead. Ignoring '</>'.",
    "expected-closing-tag-but-got-eof":
        "Expected closing tag. Unexpected end of file.",
    "expected-closing-tag-but-got-char":
        "Expected closing tag. Unexpected character '%(data)s' found.",
    "eof-in-tag-name":
        "Unexpected end of file in the tag name.",
    "expected-attribute-name-but-got-eof":
        "Unexpected end of file. Expected attribute name instead.",
    "eof-in-attribute-name":
        "Unexpected end of file in attribute name.",
    "invalid-character-in-attribute-name":
        "Invalid character in attribute name",
    "duplicate-attribute":
        "Dropped duplicate attribute on tag.",
    "expected-end-of-tag-name-but-got-eof":
        "Unexpected end of file. Expected = or end of tag.",
    "expected-attribute-value-but-got-eof":
        "Unexpected end of file. Expected attribute value.",
    "expected-attribute-value-but-got-right-bracket":
        "Expected attribute value. Got '>' instead.",
    'equals-in-unquoted-attribute-value':
        "Unexpected = in unquoted attribute",
    'unexpected-character-in-unquoted-attribute-value':
        "Unexpected character in unquoted attribute",
    "invalid-character-after-attribute-name":
        "Unexpected character after attribute name.",
    "unexpected-character-after-attribute-value":
        "Unexpected character after attribute value.",
    "eof-in-attribute-value-double-quote":
        "Unexpected end of file in attribute value (\").",
    "eof-in-attribute-value-single-quote":
        "Unexpected end of file in attribute value (').",
    "eof-in-attribute-value-no-quotes":
        "Unexpected end of file in attribute value.",
    "unexpected-EOF-after-solidus-in-tag":
        "Unexpected end of file in tag. Expected >",
    "unexpected-character-after-solidus-in-tag":
        "Unexpected character after / in tag. Expected >",
    "expected-dashes-or-doctype":
        "Expected '--' or 'DOCTYPE'. Not found.",
    "unexpected-bang-after-double-dash-in-comment":
        "Unexpected ! after -- in comment",
    "unexpected-space-after-double-dash-in-comment":
        "Unexpected space after -- in comment",
    "incorrect-comment":
        "Incorrect comment.",
    "eof-in-comment":
        "Unexpected end of file in comment.",
    "eof-in-comment-end-dash":
        "Unexpected end of file in comment (-)",
    "unexpected-dash-after-double-dash-in-comment":
        "Unexpected '-' after '--' found in comment.",
    "eof-in-comment-double-dash":
        "Unexpected end of file in comment (--).",
    "eof-in-comment-end-space-state":
        "Unexpected end of file in comment.",
    "eof-in-comment-end-bang-state":
        "Unexpected end of file in comment.",
    "unexpected-char-in-comment":
        "Unexpected character in comment found.",
    "need-space-after-doctype":
        "No space after literal string 'DOCTYPE'.",
    "expected-doctype-name-but-got-right-bracket":
        "Unexpected > character. Expected DOCTYPE name.",
    "expected-doctype-name-but-got-eof":
        "Unexpected end of file. Expected DOCTYPE name.",
    "eof-in-doctype-name":
        "Unexpected end of file in DOCTYPE name.",
    "eof-in-doctype":
        "Unexpected end of file in DOCTYPE.",
    "expected-space-or-right-bracket-in-doctype":
        "Expected space or '>'. Got '%(data)s'",
    "unexpected-end-of-doctype":
        "Unexpected end of DOCTYPE.",
    "unexpected-char-in-doctype":
        "Unexpected character in DOCTYPE.",
    "eof-in-innerhtml":
        "XXX innerHTML EOF",
    "unexpected-doctype":
        "Unexpected DOCTYPE. Ignored.",
    "non-html-root":
        "html needs to be the first start tag.",
    "expected-doctype-but-got-eof":
        "Unexpected End of file. Expected DOCTYPE.",
    "unknown-doctype":
        "Erroneous DOCTYPE.",
    "expected-doctype-but-got-chars":
        "Unexpected non-space characters. Expected DOCTYPE.",
    "expected-doctype-but-got-start-tag":
        "Unexpected start tag (%(name)s). Expected DOCTYPE.",
    "expected-doctype-but-got-end-tag":
        "Unexpected end tag (%(name)s). Expected DOCTYPE.",
    "end-tag-after-implied-root":
        "Unexpected end tag (%(name)s) after the (implied) root element.",
    "expected-named-closing-tag-but-got-eof":
        "Unexpected end of file. Expected end tag (%(name)s).",
    "two-heads-are-not-better-than-one":
        "Unexpected start tag head in existing head. Ignored.",
    "unexpected-end-tag":
        "Unexpected end tag (%(name)s). Ignored.",
    "unexpected-start-tag-out-of-my-head":
        "Unexpected start tag (%(name)s) that can be in head. Moved.",
    "unexpected-start-tag":
        "Unexpected start tag (%(name)s).",
    "missing-end-tag":
        "Missing end tag (%(name)s).",
    "missing-end-tags":
        "Missing end tags (%(name)s).",
    "unexpected-start-tag-implies-end-tag":
        "Unexpected start tag (%(startName)s) "
        "implies end tag (%(endName)s).",
    "unexpected-start-tag-treated-as":
        "Unexpected start tag (%(originalName)s). Treated as %(newName)s.",
    "deprecated-tag":
        "Unexpected start tag %(name)s. Don't use it!",
    "unexpected-start-tag-ignored":
        "Unexpected start tag %(name)s. Ignored.",
    "expected-one-end-tag-but-got-another":
        "Unexpected end tag (%(gotName)s). "
        "Missing end tag (%(expectedName)s).",
    "end-tag-too-early":
        "End tag (%(name)s) seen too early. Expected other end tag.",
    "end-tag-too-early-named":
        "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).",
    "end-tag-too-early-ignored":
        "End tag (%(name)s) seen too early. Ignored.",
    "adoption-agency-1.1":
        "End tag (%(name)s) violates step 1, "
        "paragraph 1 of the adoption agency algorithm.",
    "adoption-agency-1.2":
        "End tag (%(name)s) violates step 1, "
        "paragraph 2 of the adoption agency algorithm.",
    "adoption-agency-1.3":
        "End tag (%(name)s) violates step 1, "
        "paragraph 3 of the adoption agency algorithm.",
    "adoption-agency-4.4":
        "End tag (%(name)s) violates step 4, "
        "paragraph 4 of the adoption agency algorithm.",
    "unexpected-end-tag-treated-as":
        "Unexpected end tag (%(originalName)s). Treated as %(newName)s.",
    "no-end-tag":
        "This element (%(name)s) has no end tag.",
    "unexpected-implied-end-tag-in-table":
        "Unexpected implied end tag (%(name)s) in the table phase.",
    "unexpected-implied-end-tag-in-table-body":
        "Unexpected implied end tag (%(name)s) in the table body phase.",
    "unexpected-char-implies-table-voodoo":
        "Unexpected non-space characters in "
        "table context caused voodoo mode.",
    "unexpected-hidden-input-in-table":
        "Unexpected input with type hidden in table context.",
    "unexpected-form-in-table":
        "Unexpected form in table context.",
    "unexpected-start-tag-implies-table-voodoo":
        "Unexpected start tag (%(name)s) in "
        "table context caused voodoo mode.",
    "unexpected-end-tag-implies-table-voodoo":
        "Unexpected end tag (%(name)s) in "
        "table context caused voodoo mode.",
    "unexpected-cell-in-table-body":
        "Unexpected table cell start tag (%(name)s) "
        "in the table body phase.",
    "unexpected-cell-end-tag":
        "Got table cell end tag (%(name)s) "
        "while required end tags are missing.",
    "unexpected-end-tag-in-table-body":
        "Unexpected end tag (%(name)s) in the table body phase. Ignored.",
    "unexpected-implied-end-tag-in-table-row":
        "Unexpected implied end tag (%(name)s) in the table row phase.",
    "unexpected-end-tag-in-table-row":
        "Unexpected end tag (%(name)s) in the table row phase. Ignored.",
    "unexpected-select-in-select":
        "Unexpected select start tag in the select phase "
        "treated as select end tag.",
    "unexpected-input-in-select":
        "Unexpected input start tag in the select phase.",
    "unexpected-start-tag-in-select":
        "Unexpected start tag token (%(name)s in the select phase. "
        "Ignored.",
    "unexpected-end-tag-in-select":
        "Unexpected end tag (%(name)s) in the select phase. Ignored.",
    "unexpected-table-element-start-tag-in-select-in-table":
        "Unexpected table element start tag (%(name)s) in the select in table phase.",
    "unexpected-table-element-end-tag-in-select-in-table":
        "Unexpected table element end tag (%(name)s) in the select in table phase.",
    "unexpected-char-after-body":
        "Unexpected non-space characters in the after body phase.",
    "unexpected-start-tag-after-body":
        "Unexpected start tag token (%(name)s)"
        " in the after body phase.",
    "unexpected-end-tag-after-body":
        "Unexpected end tag token (%(name)s)"
        " in the after body phase.",
    "unexpected-char-in-frameset":
        "Unexpected characters in the frameset phase. Characters ignored.",
    "unexpected-start-tag-in-frameset":
        "Unexpected start tag token (%(name)s)"
        " in the frameset phase. Ignored.",
    "unexpected-frameset-in-frameset-innerhtml":
        "Unexpected end tag token (frameset) "
        "in the frameset phase (innerHTML).",
    "unexpected-end-tag-in-frameset":
        "Unexpected end tag token (%(name)s)"
        " in the frameset phase. Ignored.",
    "unexpected-char-after-frameset":
        "Unexpected non-space characters in the "
        "after frameset phase. Ignored.",
    "unexpected-start-tag-after-frameset":
        "Unexpected start tag (%(name)s)"
        " in the after frameset phase. Ignored.",
    "unexpected-end-tag-after-frameset":
        "Unexpected end tag (%(name)s)"
        " in the after frameset phase. Ignored.",
    "unexpected-end-tag-after-body-innerhtml":
        "Unexpected end tag after body(innerHtml)",
    "expected-eof-but-got-char":
        "Unexpected non-space characters. Expected end of file.",
    "expected-eof-but-got-start-tag":
        "Unexpected start tag (%(name)s)"
        ". Expected end of file.",
    "expected-eof-but-got-end-tag":
        "Unexpected end tag (%(name)s)"
        ". Expected end of file.",
    "eof-in-table":
        "Unexpected end of file. Expected table content.",
    "eof-in-select":
        "Unexpected end of file. Expected select content.",
    "eof-in-frameset":
        "Unexpected end of file. Expected frameset content.",
    "eof-in-script-in-script":
        "Unexpected end of file. Expected script content.",
    "eof-in-foreign-lands":
        "Unexpected end of file. Expected foreign content",
    "non-void-element-with-trailing-solidus":
        "Trailing solidus not allowed on element %(name)s",
    "unexpected-html-element-in-foreign-content":
        "Element %(name)s not allowed in a non-html context",
    "unexpected-end-tag-before-html":
        "Unexpected end tag (%(name)s) before html.",
    "unexpected-inhead-noscript-tag":
        "Element %(name)s not allowed in a inhead-noscript context",
    "eof-in-head-noscript":
        "Unexpected end of file. Expected inhead-noscript content",
    "char-in-head-noscript":
        "Unexpected non-space character. Expected inhead-noscript content",
    "XXX-undefined-error":
        "Undefined error (this sucks and should be fixed)",
}

namespaces = {
    "html": "http://www.w3.org/1999/xhtml",
    "mathml": "http://www.w3.org/1998/Math/MathML",
    "svg": "http://www.w3.org/2000/svg",
    "xlink": "http://www.w3.org/1999/xlink",
    "xml": "http://www.w3.org/XML/1998/namespace",
    "xmlns": "http://www.w3.org/2000/xmlns/"
}

scopingElements = frozenset([
    (namespaces["html"], "applet"),
    (namespaces["html"], "caption"),
    (namespaces["html"], "html"),
    (namespaces["html"], "marquee"),
    (namespaces["html"], "object"),
    (namespaces["html"], "table"),
    (namespaces["html"], "td"),
    (namespaces["html"], "th"),
    (namespaces["mathml"], "mi"),
    (namespaces["mathml"], "mo"),
    (namespaces["mathml"], "mn"),
    (namespaces["mathml"], "ms"),
    (namespaces["mathml"], "mtext"),
    (namespaces["mathml"], "annotation-xml"),
    (namespaces["svg"], "foreignObject"),
    (namespaces["svg"], "desc"),
    (namespaces["svg"], "title"),
])

formattingElements = frozenset([
    (namespaces["html"], "a"),
    (namespaces["html"], "b"),
    (namespaces["html"], "big"),
    (namespaces["html"], "code"),
    (namespaces["html"], "em"),
    (namespaces["html"], "font"),
    (namespaces["html"], "i"),
    (namespaces["html"], "nobr"),
    (namespaces["html"], "s"),
    (namespaces["html"], "small"),
    (namespaces["html"], "strike"),
    (namespaces["html"], "strong"),
    (namespaces["html"], "tt"),
    (namespaces["html"], "u")
])

specialElements = frozenset([
    (namespaces["html"], "address"),
    (namespaces["html"], "applet"),
    (namespaces["html"], "area"),
    (namespaces["html"], "article"),
    (namespaces["html"], "aside"),
    (namespaces["html"], "base"),
    (namespaces["html"], "basefont"),
    (namespaces["html"], "bgsound"),
    (namespaces["html"], "blockquote"),
    (namespaces["html"], "body"),
    (namespaces["html"], "br"),
    (namespaces["html"], "button"),
    (namespaces["html"], "caption"),
    (namespaces["html"], "center"),
    (namespaces["html"], "col"),
    (namespaces["html"], "colgroup"),
    (namespaces["html"], "command"),
    (namespaces["html"], "dd"),
    (namespaces["html"], "details"),
    (namespaces["html"], "dir"),
    (namespaces["html"], "div"),
    (namespaces["html"], "dl"),
    (namespaces["html"], "dt"),
    (namespaces["html"], "embed"),
    (namespaces["html"], "fieldset"),
    (namespaces["html"], "figure"),
    (namespaces["html"], "footer"),
    (namespaces["html"], "form"),
    (namespaces["html"], "frame"),
    (namespaces["html"], "frameset"),
    (namespaces["html"], "h1"),
    (namespaces["html"], "h2"),
    (namespaces["html"], "h3"),
    (namespaces["html"], "h4"),
    (namespaces["html"], "h5"),
    (namespaces["html"], "h6"),
    (namespaces["html"], "head"),
    (namespaces["html"], "header"),
    (namespaces["html"], "hr"),
    (namespaces["html"], "html"),
    (namespaces["html"], "iframe"),
    # Note that image is commented out in the spec as "this isn't an
    # element that can end up on the stack, so it doesn't matter,"
    (namespaces["html"], "image"),
    (namespaces["html"], "img"),
    (namespaces["html"], "input"),
    (namespaces["html"], "isindex"),
    (namespaces["html"], "li"),
    (namespaces["html"], "link"),
    (namespaces["html"], "listing"),
    (namespaces["html"], "marquee"),
    (namespaces["html"], "menu"),
    (namespaces["html"], "meta"),
    (namespaces["html"], "nav"),
    (namespaces["html"], "noembed"),
    (namespaces["html"], "noframes"),
    (namespaces["html"], "noscript"),
    (namespaces["html"], "object"),
    (namespaces["html"], "ol"),
    (namespaces["html"], "p"),
    (namespaces["html"], "param"),
    (namespaces["html"], "plaintext"),
    (namespaces["html"], "pre"),
    (namespaces["html"], "script"),
    (namespaces["html"], "section"),
    (namespaces["html"], "select"),
    (namespaces["html"], "style"),
    (namespaces["html"], "table"),
    (namespaces["html"], "tbody"),
    (namespaces["html"], "td"),
    (namespaces["html"], "textarea"),
    (namespaces["html"], "tfoot"),
    (namespaces["html"], "th"),
    (namespaces["html"], "thead"),
    (namespaces["html"], "title"),
    (namespaces["html"], "tr"),
    (namespaces["html"], "ul"),
    (namespaces["html"], "wbr"),
    (namespaces["html"], "xmp"),
    (namespaces["svg"], "foreignObject")
])

htmlIntegrationPointElements = frozenset([
    (namespaces["mathml"], "annotaion-xml"),
    (namespaces["svg"], "foreignObject"),
    (namespaces["svg"], "desc"),
    (namespaces["svg"], "title")
])

mathmlTextIntegrationPointElements = frozenset([
    (namespaces["mathml"], "mi"),
    (namespaces["mathml"], "mo"),
    (namespaces["mathml"], "mn"),
    (namespaces["mathml"], "ms"),
    (namespaces["mathml"], "mtext")
])

adjustSVGAttributes = {
    "attributename": "attributeName",
    "attributetype": "attributeType",
    "basefrequency": "baseFrequency",
    "baseprofile": "baseProfile",
    "calcmode": "calcMode",
    "clippathunits": "clipPathUnits",
    "contentscripttype": "contentScriptType",
    "contentstyletype": "contentStyleType",
    "diffuseconstant": "diffuseConstant",
    "edgemode": "edgeMode",
    "externalresourcesrequired": "externalResourcesRequired",
    "filterres": "filterRes",
    "filterunits": "filterUnits",
    "glyphref": "glyphRef",
    "gradienttransform": "gradientTransform",
    "gradientunits": "gradientUnits",
    "kernelmatrix": "kernelMatrix",
    "kernelunitlength": "kernelUnitLength",
    "keypoints": "keyPoints",
    "keysplines": "keySplines",
    "keytimes": "keyTimes",
    "lengthadjust": "lengthAdjust",
    "limitingconeangle": "limitingConeAngle",
    "markerheight": "markerHeight",
    "markerunits": "markerUnits",
    "markerwidth": "markerWidth",
    "maskcontentunits": "maskContentUnits",
    "maskunits": "maskUnits",
    "numoctaves": "numOctaves",
    "pathlength": "pathLength",
    "patterncontentunits": "patternContentUnits",
    "patterntransform": "patternTransform",
    "patternunits": "patternUnits",
    "pointsatx": "pointsAtX",
    "pointsaty": "pointsAtY",
    "pointsatz": "pointsAtZ",
    "preservealpha": "preserveAlpha",
    "preserveaspectratio": "preserveAspectRatio",
    "primitiveunits": "primitiveUnits",
    "refx": "refX",
    "refy": "refY",
    "repeatcount": "repeatCount",
    "repeatdur": "repeatDur",
    "requiredextensions": "requiredExtensions",
    "requiredfeatures": "requiredFeatures",
    "specularconstant": "specularConstant",
    "specularexponent": "specularExponent",
    "spreadmethod": "spreadMethod",
    "startoffset": "startOffset",
    "stddeviation": "stdDeviation",
    "stitchtiles": "stitchTiles",
    "surfacescale": "surfaceScale",
    "systemlanguage": "systemLanguage",
    "tablevalues": "tableValues",
    "targetx": "targetX",
    "targety": "targetY",
    "textlength": "textLength",
    "viewbox": "viewBox",
    "viewtarget": "viewTarget",
    "xchannelselector": "xChannelSelector",
    "ychannelselector": "yChannelSelector",
    "zoomandpan": "zoomAndPan"
}

adjustMathMLAttributes = {"definitionurl": "definitionURL"}

adjustForeignAttributes = {
    "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
    "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
    "xlink:href": ("xlink", "href", namespaces["xlink"]),
    "xlink:role": ("xlink", "role", namespaces["xlink"]),
    "xlink:show": ("xlink", "show", namespaces["xlink"]),
    "xlink:title": ("xlink", "title", namespaces["xlink"]),
    "xlink:type": ("xlink", "type", namespaces["xlink"]),
    "xml:base": ("xml", "base", namespaces["xml"]),
    "xml:lang": ("xml", "lang", namespaces["xml"]),
    "xml:space": ("xml", "space", namespaces["xml"]),
    "xmlns": (None, "xmlns", namespaces["xmlns"]),
    "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
}

unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
                                  adjustForeignAttributes.items()])

spaceCharacters = frozenset([
    "\t",
    "\n",
    "\u000C",
    " ",
    "\r"
])

tableInsertModeElements = frozenset([
    "table",
    "tbody",
    "tfoot",
    "thead",
    "tr"
])

asciiLowercase = frozenset(string.ascii_lowercase)
asciiUppercase = frozenset(string.ascii_uppercase)
asciiLetters = frozenset(string.ascii_letters)
digits = frozenset(string.digits)
hexDigits = frozenset(string.hexdigits)

asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
                         for c in string.ascii_uppercase])

# Heading elements need to be ordered
headingElements = (
    "h1",
    "h2",
    "h3",
    "h4",
    "h5",
    "h6"
)

voidElements = frozenset([
    "base",
    "command",
    "event-source",
    "link",
    "meta",
    "hr",
    "br",
    "img",
    "embed",
    "param",
    "area",
    "col",
    "input",
    "source",
    "track"
])

cdataElements = frozenset(['title', 'textarea'])

rcdataElements = frozenset([
    'style',
    'script',
    'xmp',
    'iframe',
    'noembed',
    'noframes',
    'noscript'
])

booleanAttributes = {
    "": frozenset(["irrelevant"]),
    "style": frozenset(["scoped"]),
    "img": frozenset(["ismap"]),
    "audio": frozenset(["autoplay", "controls"]),
    "video": frozenset(["autoplay", "controls"]),
    "script": frozenset(["defer", "async"]),
    "details": frozenset(["open"]),
    "datagrid": frozenset(["multiple", "disabled"]),
    "command": frozenset(["hidden", "disabled", "checked", "default"]),
    "hr": frozenset(["noshade"]),
    "menu": frozenset(["autosubmit"]),
    "fieldset": frozenset(["disabled", "readonly"]),
    "option": frozenset(["disabled", "readonly", "selected"]),
    "optgroup": frozenset(["disabled", "readonly"]),
    "button": frozenset(["disabled", "autofocus"]),
    "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]),
    "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]),
    "output": frozenset(["disabled", "readonly"]),
}

# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
# therefore can't be a frozenset.
entitiesWindows1252 = (
    8364,   # 0x80  0x20AC  EURO SIGN
    65533,  # 0x81          UNDEFINED
    8218,   # 0x82  0x201A  SINGLE LOW-9 QUOTATION MARK
    402,    # 0x83  0x0192  LATIN SMALL LETTER F WITH HOOK
    8222,   # 0x84  0x201E  DOUBLE LOW-9 QUOTATION MARK
    8230,   # 0x85  0x2026  HORIZONTAL ELLIPSIS
    8224,   # 0x86  0x2020  DAGGER
    8225,   # 0x87  0x2021  DOUBLE DAGGER
    710,    # 0x88  0x02C6  MODIFIER LETTER CIRCUMFLEX ACCENT
    8240,   # 0x89  0x2030  PER MILLE SIGN
    352,    # 0x8A  0x0160  LATIN CAPITAL LETTER S WITH CARON
    8249,   # 0x8B  0x2039  SINGLE LEFT-POINTING ANGLE QUOTATION MARK
    338,    # 0x8C  0x0152  LATIN CAPITAL LIGATURE OE
    65533,  # 0x8D          UNDEFINED
    381,    # 0x8E  0x017D  LATIN CAPITAL LETTER Z WITH CARON
    65533,  # 0x8F          UNDEFINED
    65533,  # 0x90          UNDEFINED
    8216,   # 0x91  0x2018  LEFT SINGLE QUOTATION MARK
    8217,   # 0x92  0x2019  RIGHT SINGLE QUOTATION MARK
    8220,   # 0x93  0x201C  LEFT DOUBLE QUOTATION MARK
    8221,   # 0x94  0x201D  RIGHT DOUBLE QUOTATION MARK
    8226,   # 0x95  0x2022  BULLET
    8211,   # 0x96  0x2013  EN DASH
    8212,   # 0x97  0x2014  EM DASH
    732,    # 0x98  0x02DC  SMALL TILDE
    8482,   # 0x99  0x2122  TRADE MARK SIGN
    353,    # 0x9A  0x0161  LATIN SMALL LETTER S WITH CARON
    8250,   # 0x9B  0x203A  SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
    339,    # 0x9C  0x0153  LATIN SMALL LIGATURE OE
    65533,  # 0x9D          UNDEFINED
    382,    # 0x9E  0x017E  LATIN SMALL LETTER Z WITH CARON
    376     # 0x9F  0x0178  LATIN CAPITAL LETTER Y WITH DIAERESIS
)

xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;'])

entities = {
    "AElig": "\xc6",
    "AElig;": "\xc6",
    "AMP": "&",
    "AMP;": "&",
    "Aacute": "\xc1",
    "Aacute;": "\xc1",
    "Abreve;": "\u0102",
    "Acirc": "\xc2",
    "Acirc;": "\xc2",
    "Acy;": "\u0410",
    "Afr;": "\U0001d504",
    "Agrave": "\xc0",
    "Agrave;": "\xc0",
    "Alpha;": "\u0391",
    "Amacr;": "\u0100",
    "And;": "\u2a53",
    "Aogon;": "\u0104",
    "Aopf;": "\U0001d538",
    "ApplyFunction;": "\u2061",
    "Aring": "\xc5",
    "Aring;": "\xc5",
    "Ascr;": "\U0001d49c",
    "Assign;": "\u2254",
    "Atilde": "\xc3",
    "Atilde;": "\xc3",
    "Auml": "\xc4",
    "Auml;": "\xc4",
    "Backslash;": "\u2216",
    "Barv;": "\u2ae7",
    "Barwed;": "\u2306",
    "Bcy;": "\u0411",
    "Because;": "\u2235",
    "Bernoullis;": "\u212c",
    "Beta;": "\u0392",
    "Bfr;": "\U0001d505",
    "Bopf;": "\U0001d539",
    "Breve;": "\u02d8",
    "Bscr;": "\u212c",
    "Bumpeq;": "\u224e",
    "CHcy;": "\u0427",
    "COPY": "\xa9",
    "COPY;": "\xa9",
    "Cacute;": "\u0106",
    "Cap;": "\u22d2",
    "CapitalDifferentialD;": "\u2145",
    "Cayleys;": "\u212d",
    "Ccaron;": "\u010c",
    "Ccedil": "\xc7",
    "Ccedil;": "\xc7",
    "Ccirc;": "\u0108",
    "Cconint;": "\u2230",
    "Cdot;": "\u010a",
    "Cedilla;": "\xb8",
    "CenterDot;": "\xb7",
    "Cfr;": "\u212d",
    "Chi;": "\u03a7",
    "CircleDot;": "\u2299",
    "CircleMinus;": "\u2296",
    "CirclePlus;": "\u2295",
    "CircleTimes;": "\u2297",
    "ClockwiseContourIntegral;": "\u2232",
    "CloseCurlyDoubleQuote;": "\u201d",
    "CloseCurlyQuote;": "\u2019",
    "Colon;": "\u2237",
    "Colone;": "\u2a74",
    "Congruent;": "\u2261",
    "Conint;": "\u222f",
    "ContourIntegral;": "\u222e",
    "Copf;": "\u2102",
    "Coproduct;": "\u2210",
    "CounterClockwiseContourIntegral;": "\u2233",
    "Cross;": "\u2a2f",
    "Cscr;": "\U0001d49e",
    "Cup;": "\u22d3",
    "CupCap;": "\u224d",
    "DD;": "\u2145",
    "DDotrahd;": "\u2911",
    "DJcy;": "\u0402",
    "DScy;": "\u0405",
    "DZcy;": "\u040f",
    "Dagger;": "\u2021",
    "Darr;": "\u21a1",
    "Dashv;": "\u2ae4",
    "Dcaron;": "\u010e",
    "Dcy;": "\u0414",
    "Del;": "\u2207",
    "Delta;": "\u0394",
    "Dfr;": "\U0001d507",
    "DiacriticalAcute;": "\xb4",
    "DiacriticalDot;": "\u02d9",
    "DiacriticalDoubleAcute;": "\u02dd",
    "DiacriticalGrave;": "`",
    "DiacriticalTilde;": "\u02dc",
    "Diamond;": "\u22c4",
    "DifferentialD;": "\u2146",
    "Dopf;": "\U0001d53b",
    "Dot;": "\xa8",
    "DotDot;": "\u20dc",
    "DotEqual;": "\u2250",
    "DoubleContourIntegral;": "\u222f",
    "DoubleDot;": "\xa8",
    "DoubleDownArrow;": "\u21d3",
    "DoubleLeftArrow;": "\u21d0",
    "DoubleLeftRightArrow;": "\u21d4",
    "DoubleLeftTee;": "\u2ae4",
    "DoubleLongLeftArrow;": "\u27f8",
    "DoubleLongLeftRightArrow;": "\u27fa",
    "DoubleLongRightArrow;": "\u27f9",
    "DoubleRightArrow;": "\u21d2",
    "DoubleRightTee;": "\u22a8",
    "DoubleUpArrow;": "\u21d1",
    "DoubleUpDownArrow;": "\u21d5",
    "DoubleVerticalBar;": "\u2225",
    "DownArrow;": "\u2193",
    "DownArrowBar;": "\u2913",
    "DownArrowUpArrow;": "\u21f5",
    "DownBreve;": "\u0311",
    "DownLeftRightVector;": "\u2950",
    "DownLeftTeeVector;": "\u295e",
    "DownLeftVector;": "\u21bd",
    "DownLeftVectorBar;": "\u2956",
    "DownRightTeeVector;": "\u295f",
    "DownRightVector;": "\u21c1",
    "DownRightVectorBar;": "\u2957",
    "DownTee;": "\u22a4",
    "DownTeeArrow;": "\u21a7",
    "Downarrow;": "\u21d3",
    "Dscr;": "\U0001d49f",
    "Dstrok;": "\u0110",
    "ENG;": "\u014a",
    "ETH": "\xd0",
    "ETH;": "\xd0",
    "Eacute": "\xc9",
    "Eacute;": "\xc9",
    "Ecaron;": "\u011a",
    "Ecirc": "\xca",
    "Ecirc;": "\xca",
    "Ecy;": "\u042d",
    "Edot;": "\u0116",
    "Efr;": "\U0001d508",
    "Egrave": "\xc8",
    "Egrave;": "\xc8",
    "Element;": "\u2208",
    "Emacr;": "\u0112",
    "EmptySmallSquare;": "\u25fb",
    "EmptyVerySmallSquare;": "\u25ab",
    "Eogon;": "\u0118",
    "Eopf;": "\U0001d53c",
    "Epsilon;": "\u0395",
    "Equal;": "\u2a75",
    "EqualTilde;": "\u2242",
    "Equilibrium;": "\u21cc",
    "Escr;": "\u2130",
    "Esim;": "\u2a73",
    "Eta;": "\u0397",
    "Euml": "\xcb",
    "Euml;": "\xcb",
    "Exists;": "\u2203",
    "ExponentialE;": "\u2147",
    "Fcy;": "\u0424",
    "Ffr;": "\U0001d509",
    "FilledSmallSquare;": "\u25fc",
    "FilledVerySmallSquare;": "\u25aa",
    "Fopf;": "\U0001d53d",
    "ForAll;": "\u2200",
    "Fouriertrf;": "\u2131",
    "Fscr;": "\u2131",
    "GJcy;": "\u0403",
    "GT": ">",
    "GT;": ">",
    "Gamma;": "\u0393",
    "Gammad;": "\u03dc",
    "Gbreve;": "\u011e",
    "Gcedil;": "\u0122",
    "Gcirc;": "\u011c",
    "Gcy;": "\u0413",
    "Gdot;": "\u0120",
    "Gfr;": "\U0001d50a",
    "Gg;": "\u22d9",
    "Gopf;": "\U0001d53e",
    "GreaterEqual;": "\u2265",
    "GreaterEqualLess;": "\u22db",
    "GreaterFullEqual;": "\u2267",
    "GreaterGreater;": "\u2aa2",
    "GreaterLess;": "\u2277",
    "GreaterSlantEqual;": "\u2a7e",
    "GreaterTilde;": "\u2273",
    "Gscr;": "\U0001d4a2",
    "Gt;": "\u226b",
    "HARDcy;": "\u042a",
    "Hacek;": "\u02c7",
    "Hat;": "^",
    "Hcirc;": "\u0124",
    "Hfr;": "\u210c",
    "HilbertSpace;": "\u210b",
    "Hopf;": "\u210d",
    "HorizontalLine;": "\u2500",
    "Hscr;": "\u210b",
    "Hstrok;": "\u0126",
    "HumpDownHump;": "\u224e",
    "HumpEqual;": "\u224f",
    "IEcy;": "\u0415",
    "IJlig;": "\u0132",
    "IOcy;": "\u0401",
    "Iacute": "\xcd",
    "Iacute;": "\xcd",
    "Icirc": "\xce",
    "Icirc;": "\xce",
    "Icy;": "\u0418",
    "Idot;": "\u0130",
    "Ifr;": "\u2111",
    "Igrave": "\xcc",
    "Igrave;": "\xcc",
    "Im;": "\u2111",
    "Imacr;": "\u012a",
    "ImaginaryI;": "\u2148",
    "Implies;": "\u21d2",
    "Int;": "\u222c",
    "Integral;": "\u222b",
    "Intersection;": "\u22c2",
    "InvisibleComma;": "\u2063",
    "InvisibleTimes;": "\u2062",
    "Iogon;": "\u012e",
    "Iopf;": "\U0001d540",
    "Iota;": "\u0399",
    "Iscr;": "\u2110",
    "Itilde;": "\u0128",
    "Iukcy;": "\u0406",
    "Iuml": "\xcf",
    "Iuml;": "\xcf",
    "Jcirc;": "\u0134",
    "Jcy;": "\u0419",
    "Jfr;": "\U0001d50d",
    "Jopf;": "\U0001d541",
    "Jscr;": "\U0001d4a5",
    "Jsercy;": "\u0408",
    "Jukcy;": "\u0404",
    "KHcy;": "\u0425",
    "KJcy;": "\u040c",
    "Kappa;": "\u039a",
    "Kcedil;": "\u0136",
    "Kcy;": "\u041a",
    "Kfr;": "\U0001d50e",
    "Kopf;": "\U0001d542",
    "Kscr;": "\U0001d4a6",
    "LJcy;": "\u0409",
    "LT": "<",
    "LT;": "<",
    "Lacute;": "\u0139",
    "Lambda;": "\u039b",
    "Lang;": "\u27ea",
    "Laplacetrf;": "\u2112",
    "Larr;": "\u219e",
    "Lcaron;": "\u013d",
    "Lcedil;": "\u013b",
    "Lcy;": "\u041b",
    "LeftAngleBracket;": "\u27e8",
    "LeftArrow;": "\u2190",
    "LeftArrowBar;": "\u21e4",
    "LeftArrowRightArrow;": "\u21c6",
    "LeftCeiling;": "\u2308",
    "LeftDoubleBracket;": "\u27e6",
    "LeftDownTeeVector;": "\u2961",
    "LeftDownVector;": "\u21c3",
    "LeftDownVectorBar;": "\u2959",
    "LeftFloor;": "\u230a",
    "LeftRightArrow;": "\u2194",
    "LeftRightVector;": "\u294e",
    "LeftTee;": "\u22a3",
    "LeftTeeArrow;": "\u21a4",
    "LeftTeeVector;": "\u295a",
    "LeftTriangle;": "\u22b2",
    "LeftTriangleBar;": "\u29cf",
    "LeftTriangleEqual;": "\u22b4",
    "LeftUpDownVector;": "\u2951",
    "LeftUpTeeVector;": "\u2960",
    "LeftUpVector;": "\u21bf",
    "LeftUpVectorBar;": "\u2958",
    "LeftVector;": "\u21bc",
    "LeftVectorBar;": "\u2952",
    "Leftarrow;": "\u21d0",
    "Leftrightarrow;": "\u21d4",
    "LessEqualGreater;": "\u22da",
    "LessFullEqual;": "\u2266",
    "LessGreater;": "\u2276",
    "LessLess;": "\u2aa1",
    "LessSlantEqual;": "\u2a7d",
    "LessTilde;": "\u2272",
    "Lfr;": "\U0001d50f",
    "Ll;": "\u22d8",
    "Lleftarrow;": "\u21da",
    "Lmidot;": "\u013f",
    "LongLeftArrow;": "\u27f5",
    "LongLeftRightArrow;": "\u27f7",
    "LongRightArrow;": "\u27f6",
    "Longleftarrow;": "\u27f8",
    "Longleftrightarrow;": "\u27fa",
    "Longrightarrow;": "\u27f9",
    "Lopf;": "\U0001d543",
    "LowerLeftArrow;": "\u2199",
    "LowerRightArrow;": "\u2198",
    "Lscr;": "\u2112",
    "Lsh;": "\u21b0",
    "Lstrok;": "\u0141",
    "Lt;": "\u226a",
    "Map;": "\u2905",
    "Mcy;": "\u041c",
    "MediumSpace;": "\u205f",
    "Mellintrf;": "\u2133",
    "Mfr;": "\U0001d510",
    "MinusPlus;": "\u2213",
    "Mopf;": "\U0001d544",
    "Mscr;": "\u2133",
    "Mu;": "\u039c",
    "NJcy;": "\u040a",
    "Nacute;": "\u0143",
    "Ncaron;": "\u0147",
    "Ncedil;": "\u0145",
    "Ncy;": "\u041d",
    "NegativeMediumSpace;": "\u200b",
    "NegativeThickSpace;": "\u200b",
    "NegativeThinSpace;": "\u200b",
    "NegativeVeryThinSpace;": "\u200b",
    "NestedGreaterGreater;": "\u226b",
    "NestedLessLess;": "\u226a",
    "NewLine;": "\n",
    "Nfr;": "\U0001d511",
    "NoBreak;": "\u2060",
    "NonBreakingSpace;": "\xa0",
    "Nopf;": "\u2115",
    "Not;": "\u2aec",
    "NotCongruent;": "\u2262",
    "NotCupCap;": "\u226d",
    "NotDoubleVerticalBar;": "\u2226",
    "NotElement;": "\u2209",
    "NotEqual;": "\u2260",
    "NotEqualTilde;": "\u2242\u0338",
    "NotExists;": "\u2204",
    "NotGreater;": "\u226f",
    "NotGreaterEqual;": "\u2271",
    "NotGreaterFullEqual;": "\u2267\u0338",
    "NotGreaterGreater;": "\u226b\u0338",
    "NotGreaterLess;": "\u2279",
    "NotGreaterSlantEqual;": "\u2a7e\u0338",
    "NotGreaterTilde;": "\u2275",
    "NotHumpDownHump;": "\u224e\u0338",
    "NotHumpEqual;": "\u224f\u0338",
    "NotLeftTriangle;": "\u22ea",
    "NotLeftTriangleBar;": "\u29cf\u0338",
    "NotLeftTriangleEqual;": "\u22ec",
    "NotLess;": "\u226e",
    "NotLessEqual;": "\u2270",
    "NotLessGreater;": "\u2278",
    "NotLessLess;": "\u226a\u0338",
    "NotLessSlantEqual;": "\u2a7d\u0338",
    "NotLessTilde;": "\u2274",
    "NotNestedGreaterGreater;": "\u2aa2\u0338",
    "NotNestedLessLess;": "\u2aa1\u0338",
    "NotPrecedes;": "\u2280",
    "NotPrecedesEqual;": "\u2aaf\u0338",
    "NotPrecedesSlantEqual;": "\u22e0",
    "NotReverseElement;": "\u220c",
    "NotRightTriangle;": "\u22eb",
    "NotRightTriangleBar;": "\u29d0\u0338",
    "NotRightTriangleEqual;": "\u22ed",
    "NotSquareSubset;": "\u228f\u0338",
    "NotSquareSubsetEqual;": "\u22e2",
    "NotSquareSuperset;": "\u2290\u0338",
    "NotSquareSupersetEqual;": "\u22e3",
    "NotSubset;": "\u2282\u20d2",
    "NotSubsetEqual;": "\u2288",
    "NotSucceeds;": "\u2281",
    "NotSucceedsEqual;": "\u2ab0\u0338",
    "NotSucceedsSlantEqual;": "\u22e1",
    "NotSucceedsTilde;": "\u227f\u0338",
    "NotSuperset;": "\u2283\u20d2",
    "NotSupersetEqual;": "\u2289",
    "NotTilde;": "\u2241",
    "NotTildeEqual;": "\u2244",
    "NotTildeFullEqual;": "\u2247",
    "NotTildeTilde;": "\u2249",
    "NotVerticalBar;": "\u2224",
    "Nscr;": "\U0001d4a9",
    "Ntilde": "\xd1",
    "Ntilde;": "\xd1",
    "Nu;": "\u039d",
    "OElig;": "\u0152",
    "Oacute": "\xd3",
    "Oacute;": "\xd3",
    "Ocirc": "\xd4",
    "Ocirc;": "\xd4",
    "Ocy;": "\u041e",
    "Odblac;": "\u0150",
    "Ofr;": "\U0001d512",
    "Ograve": "\xd2",
    "Ograve;": "\xd2",
    "Omacr;": "\u014c",
    "Omega;": "\u03a9",
    "Omicron;": "\u039f",
    "Oopf;": "\U0001d546",
    "OpenCurlyDoubleQuote;": "\u201c",
    "OpenCurlyQuote;": "\u2018",
    "Or;": "\u2a54",
    "Oscr;": "\U0001d4aa",
    "Oslash": "\xd8",
    "Oslash;": "\xd8",
    "Otilde": "\xd5",
    "Otilde;": "\xd5",
    "Otimes;": "\u2a37",
    "Ouml": "\xd6",
    "Ouml;": "\xd6",
    "OverBar;": "\u203e",
    "OverBrace;": "\u23de",
    "OverBracket;": "\u23b4",
    "OverParenthesis;": "\u23dc",
    "PartialD;": "\u2202",
    "Pcy;": "\u041f",
    "Pfr;": "\U0001d513",
    "Phi;": "\u03a6",
    "Pi;": "\u03a0",
    "PlusMinus;": "\xb1",
    "Poincareplane;": "\u210c",
    "Popf;": "\u2119",
    "Pr;": "\u2abb",
    "Precedes;": "\u227a",
    "PrecedesEqual;": "\u2aaf",
    "PrecedesSlantEqual;": "\u227c",
    "PrecedesTilde;": "\u227e",
    "Prime;": "\u2033",
    "Product;": "\u220f",
    "Proportion;": "\u2237",
    "Proportional;": "\u221d",
    "Pscr;": "\U0001d4ab",
    "Psi;": "\u03a8",
    "QUOT": "\"",
    "QUOT;": "\"",
    "Qfr;": "\U0001d514",
    "Qopf;": "\u211a",
    "Qscr;": "\U0001d4ac",
    "RBarr;": "\u2910",
    "REG": "\xae",
    "REG;": "\xae",
    "Racute;": "\u0154",
    "Rang;": "\u27eb",
    "Rarr;": "\u21a0",
    "Rarrtl;": "\u2916",
    "Rcaron;": "\u0158",
    "Rcedil;": "\u0156",
    "Rcy;": "\u0420",
    "Re;": "\u211c",
    "ReverseElement;": "\u220b",
    "ReverseEquilibrium;": "\u21cb",
    "ReverseUpEquilibrium;": "\u296f",
    "Rfr;": "\u211c",
    "Rho;": "\u03a1",
    "RightAngleBracket;": "\u27e9",
    "RightArrow;": "\u2192",
    "RightArrowBar;": "\u21e5",
    "RightArrowLeftArrow;": "\u21c4",
    "RightCeiling;": "\u2309",
    "RightDoubleBracket;": "\u27e7",
    "RightDownTeeVector;": "\u295d",
    "RightDownVector;": "\u21c2",
    "RightDownVectorBar;": "\u2955",
    "RightFloor;": "\u230b",
    "RightTee;": "\u22a2",
    "RightTeeArrow;": "\u21a6",
    "RightTeeVector;": "\u295b",
    "RightTriangle;": "\u22b3",
    "RightTriangleBar;": "\u29d0",
    "RightTriangleEqual;": "\u22b5",
    "RightUpDownVector;": "\u294f",
    "RightUpTeeVector;": "\u295c",
    "RightUpVector;": "\u21be",
    "RightUpVectorBar;": "\u2954",
    "RightVector;": "\u21c0",
    "RightVectorBar;": "\u2953",
    "Rightarrow;": "\u21d2",
    "Ropf;": "\u211d",
    "RoundImplies;": "\u2970",
    "Rrightarrow;": "\u21db",
    "Rscr;": "\u211b",
    "Rsh;": "\u21b1",
    "RuleDelayed;": "\u29f4",
    "SHCHcy;": "\u0429",
    "SHcy;": "\u0428",
    "SOFTcy;": "\u042c",
    "Sacute;": "\u015a",
    "Sc;": "\u2abc",
    "Scaron;": "\u0160",
    "Scedil;": "\u015e",
    "Scirc;": "\u015c",
    "Scy;": "\u0421",
    "Sfr;": "\U0001d516",
    "ShortDownArrow;": "\u2193",
    "ShortLeftArrow;": "\u2190",
    "ShortRightArrow;": "\u2192",
    "ShortUpArrow;": "\u2191",
    "Sigma;": "\u03a3",
    "SmallCircle;": "\u2218",
    "Sopf;": "\U0001d54a",
    "Sqrt;": "\u221a",
    "Square;": "\u25a1",
    "SquareIntersection;": "\u2293",
    "SquareSubset;": "\u228f",
    "SquareSubsetEqual;": "\u2291",
    "SquareSuperset;": "\u2290",
    "SquareSupersetEqual;": "\u2292",
    "SquareUnion;": "\u2294",
    "Sscr;": "\U0001d4ae",
    "Star;": "\u22c6",
    "Sub;": "\u22d0",
    "Subset;": "\u22d0",
    "SubsetEqual;": "\u2286",
    "Succeeds;": "\u227b",
    "SucceedsEqual;": "\u2ab0",
    "SucceedsSlantEqual;": "\u227d",
    "SucceedsTilde;": "\u227f",
    "SuchThat;": "\u220b",
    "Sum;": "\u2211",
    "Sup;": "\u22d1",
    "Superset;": "\u2283",
    "SupersetEqual;": "\u2287",
    "Supset;": "\u22d1",
    "THORN": "\xde",
    "THORN;": "\xde",
    "TRADE;": "\u2122",
    "TSHcy;": "\u040b",
    "TScy;": "\u0426",
    "Tab;": "\t",
    "Tau;": "\u03a4",
    "Tcaron;": "\u0164",
    "Tcedil;": "\u0162",
    "Tcy;": "\u0422",
    "Tfr;": "\U0001d517",
    "Therefore;": "\u2234",
    "Theta;": "\u0398",
    "ThickSpace;": "\u205f\u200a",
    "ThinSpace;": "\u2009",
    "Tilde;": "\u223c",
    "TildeEqual;": "\u2243",
    "TildeFullEqual;": "\u2245",
    "TildeTilde;": "\u2248",
    "Topf;": "\U0001d54b",
    "TripleDot;": "\u20db",
    "Tscr;": "\U0001d4af",
    "Tstrok;": "\u0166",
    "Uacute": "\xda",
    "Uacute;": "\xda",
    "Uarr;": "\u219f",
    "Uarrocir;": "\u2949",
    "Ubrcy;": "\u040e",
    "Ubreve;": "\u016c",
    "Ucirc": "\xdb",
    "Ucirc;": "\xdb",
    "Ucy;": "\u0423",
    "Udblac;": "\u0170",
    "Ufr;": "\U0001d518",
    "Ugrave": "\xd9",
    "Ugrave;": "\xd9",
    "Umacr;": "\u016a",
    "UnderBar;": "_",
    "UnderBrace;": "\u23df",
    "UnderBracket;": "\u23b5",
    "UnderParenthesis;": "\u23dd",
    "Union;": "\u22c3",
    "UnionPlus;": "\u228e",
    "Uogon;": "\u0172",
    "Uopf;": "\U0001d54c",
    "UpArrow;": "\u2191",
    "UpArrowBar;": "\u2912",
    "UpArrowDownArrow;": "\u21c5",
    "UpDownArrow;": "\u2195",
    "UpEquilibrium;": "\u296e",
    "UpTee;": "\u22a5",
    "UpTeeArrow;": "\u21a5",
    "Uparrow;": "\u21d1",
    "Updownarrow;": "\u21d5",
    "UpperLeftArrow;": "\u2196",
    "UpperRightArrow;": "\u2197",
    "Upsi;": "\u03d2",
    "Upsilon;": "\u03a5",
    "Uring;": "\u016e",
    "Uscr;": "\U0001d4b0",
    "Utilde;": "\u0168",
    "Uuml": "\xdc",
    "Uuml;": "\xdc",
    "VDash;": "\u22ab",
    "Vbar;": "\u2aeb",
    "Vcy;": "\u0412",
    "Vdash;": "\u22a9",
    "Vdashl;": "\u2ae6",
    "Vee;": "\u22c1",
    "Verbar;": "\u2016",
    "Vert;": "\u2016",
    "VerticalBar;": "\u2223",
    "VerticalLine;": "|",
    "VerticalSeparator;": "\u2758",
    "VerticalTilde;": "\u2240",
    "VeryThinSpace;": "\u200a",
    "Vfr;": "\U0001d519",
    "Vopf;": "\U0001d54d",
    "Vscr;": "\U0001d4b1",
    "Vvdash;": "\u22aa",
    "Wcirc;": "\u0174",
    "Wedge;": "\u22c0",
    "Wfr;": "\U0001d51a",
    "Wopf;": "\U0001d54e",
    "Wscr;": "\U0001d4b2",
    "Xfr;": "\U0001d51b",
    "Xi;": "\u039e",
    "Xopf;": "\U0001d54f",
    "Xscr;": "\U0001d4b3",
    "YAcy;": "\u042f",
    "YIcy;": "\u0407",
    "YUcy;": "\u042e",
    "Yacute": "\xdd",
    "Yacute;": "\xdd",
    "Ycirc;": "\u0176",
    "Ycy;": "\u042b",
    "Yfr;": "\U0001d51c",
    "Yopf;": "\U0001d550",
    "Yscr;": "\U0001d4b4",
    "Yuml;": "\u0178",
    "ZHcy;": "\u0416",
    "Zacute;": "\u0179",
    "Zcaron;": "\u017d",
    "Zcy;": "\u0417",
    "Zdot;": "\u017b",
    "ZeroWidthSpace;": "\u200b",
    "Zeta;": "\u0396",
    "Zfr;": "\u2128",
    "Zopf;": "\u2124",
    "Zscr;": "\U0001d4b5",
    "aacute": "\xe1",
    "aacute;": "\xe1",
    "abreve;": "\u0103",
    "ac;": "\u223e",
    "acE;": "\u223e\u0333",
    "acd;": "\u223f",
    "acirc": "\xe2",
    "acirc;": "\xe2",
    "acute": "\xb4",
    "acute;": "\xb4",
    "acy;": "\u0430",
    "aelig": "\xe6",
    "aelig;": "\xe6",
    "af;": "\u2061",
    "afr;": "\U0001d51e",
    "agrave": "\xe0",
    "agrave;": "\xe0",
    "alefsym;": "\u2135",
    "aleph;": "\u2135",
    "alpha;": "\u03b1",
    "amacr;": "\u0101",
    "amalg;": "\u2a3f",
    "amp": "&",
    "amp;": "&",
    "and;": "\u2227",
    "andand;": "\u2a55",
    "andd;": "\u2a5c",
    "andslope;": "\u2a58",
    "andv;": "\u2a5a",
    "ang;": "\u2220",
    "ange;": "\u29a4",
    "angle;": "\u2220",
    "angmsd;": "\u2221",
    "angmsdaa;": "\u29a8",
    "angmsdab;": "\u29a9",
    "angmsdac;": "\u29aa",
    "angmsdad;": "\u29ab",
    "angmsdae;": "\u29ac",
    "angmsdaf;": "\u29ad",
    "angmsdag;": "\u29ae",
    "angmsdah;": "\u29af",
    "angrt;": "\u221f",
    "angrtvb;": "\u22be",
    "angrtvbd;": "\u299d",
    "angsph;": "\u2222",
    "angst;": "\xc5",
    "angzarr;": "\u237c",
    "aogon;": "\u0105",
    "aopf;": "\U0001d552",
    "ap;": "\u2248",
    "apE;": "\u2a70",
    "apacir;": "\u2a6f",
    "ape;": "\u224a",
    "apid;": "\u224b",
    "apos;": "'",
    "approx;": "\u2248",
    "approxeq;": "\u224a",
    "aring": "\xe5",
    "aring;": "\xe5",
    "ascr;": "\U0001d4b6",
    "ast;": "*",
    "asymp;": "\u2248",
    "asympeq;": "\u224d",
    "atilde": "\xe3",
    "atilde;": "\xe3",
    "auml": "\xe4",
    "auml;": "\xe4",
    "awconint;": "\u2233",
    "awint;": "\u2a11",
    "bNot;": "\u2aed",
    "backcong;": "\u224c",
    "backepsilon;": "\u03f6",
    "backprime;": "\u2035",
    "backsim;": "\u223d",
    "backsimeq;": "\u22cd",
    "barvee;": "\u22bd",
    "barwed;": "\u2305",
    "barwedge;": "\u2305",
    "bbrk;": "\u23b5",
    "bbrktbrk;": "\u23b6",
    "bcong;": "\u224c",
    "bcy;": "\u0431",
    "bdquo;": "\u201e",
    "becaus;": "\u2235",
    "because;": "\u2235",
    "bemptyv;": "\u29b0",
    "bepsi;": "\u03f6",
    "bernou;": "\u212c",
    "beta;": "\u03b2",
    "beth;": "\u2136",
    "between;": "\u226c",
    "bfr;": "\U0001d51f",
    "bigcap;": "\u22c2",
    "bigcirc;": "\u25ef",
    "bigcup;": "\u22c3",
    "bigodot;": "\u2a00",
    "bigoplus;": "\u2a01",
    "bigotimes;": "\u2a02",
    "bigsqcup;": "\u2a06",
    "bigstar;": "\u2605",
    "bigtriangledown;": "\u25bd",
    "bigtriangleup;": "\u25b3",
    "biguplus;": "\u2a04",
    "bigvee;": "\u22c1",
    "bigwedge;": "\u22c0",
    "bkarow;": "\u290d",
    "blacklozenge;": "\u29eb",
    "blacksquare;": "\u25aa",
    "blacktriangle;": "\u25b4",
    "blacktriangledown;": "\u25be",
    "blacktriangleleft;": "\u25c2",
    "blacktriangleright;": "\u25b8",
    "blank;": "\u2423",
    "blk12;": "\u2592",
    "blk14;": "\u2591",
    "blk34;": "\u2593",
    "block;": "\u2588",
    "bne;": "=\u20e5",
    "bnequiv;": "\u2261\u20e5",
    "bnot;": "\u2310",
    "bopf;": "\U0001d553",
    "bot;": "\u22a5",
    "bottom;": "\u22a5",
    "bowtie;": "\u22c8",
    "boxDL;": "\u2557",
    "boxDR;": "\u2554",
    "boxDl;": "\u2556",
    "boxDr;": "\u2553",
    "boxH;": "\u2550",
    "boxHD;": "\u2566",
    "boxHU;": "\u2569",
    "boxHd;": "\u2564",
    "boxHu;": "\u2567",
    "boxUL;": "\u255d",
    "boxUR;": "\u255a",
    "boxUl;": "\u255c",
    "boxUr;": "\u2559",
    "boxV;": "\u2551",
    "boxVH;": "\u256c",
    "boxVL;": "\u2563",
    "boxVR;": "\u2560",
    "boxVh;": "\u256b",
    "boxVl;": "\u2562",
    "boxVr;": "\u255f",
    "boxbox;": "\u29c9",
    "boxdL;": "\u2555",
    "boxdR;": "\u2552",
    "boxdl;": "\u2510",
    "boxdr;": "\u250c",
    "boxh;": "\u2500",
    "boxhD;": "\u2565",
    "boxhU;": "\u2568",
    "boxhd;": "\u252c",
    "boxhu;": "\u2534",
    "boxminus;": "\u229f",
    "boxplus;": "\u229e",
    "boxtimes;": "\u22a0",
    "boxuL;": "\u255b",
    "boxuR;": "\u2558",
    "boxul;": "\u2518",
    "boxur;": "\u2514",
    "boxv;": "\u2502",
    "boxvH;": "\u256a",
    "boxvL;": "\u2561",
    "boxvR;": "\u255e",
    "boxvh;": "\u253c",
    "boxvl;": "\u2524",
    "boxvr;": "\u251c",
    "bprime;": "\u2035",
    "breve;": "\u02d8",
    "brvbar": "\xa6",
    "brvbar;": "\xa6",
    "bscr;": "\U0001d4b7",
    "bsemi;": "\u204f",
    "bsim;": "\u223d",
    "bsime;": "\u22cd",
    "bsol;": "\\",
    "bsolb;": "\u29c5",
    "bsolhsub;": "\u27c8",
    "bull;": "\u2022",
    "bullet;": "\u2022",
    "bump;": "\u224e",
    "bumpE;": "\u2aae",
    "bumpe;": "\u224f",
    "bumpeq;": "\u224f",
    "cacute;": "\u0107",
    "cap;": "\u2229",
    "capand;": "\u2a44",
    "capbrcup;": "\u2a49",
    "capcap;": "\u2a4b",
    "capcup;": "\u2a47",
    "capdot;": "\u2a40",
    "caps;": "\u2229\ufe00",
    "caret;": "\u2041",
    "caron;": "\u02c7",
    "ccaps;": "\u2a4d",
    "ccaron;": "\u010d",
    "ccedil": "\xe7",
    "ccedil;": "\xe7",
    "ccirc;": "\u0109",
    "ccups;": "\u2a4c",
    "ccupssm;": "\u2a50",
    "cdot;": "\u010b",
    "cedil": "\xb8",
    "cedil;": "\xb8",
    "cemptyv;": "\u29b2",
    "cent": "\xa2",
    "cent;": "\xa2",
    "centerdot;": "\xb7",
    "cfr;": "\U0001d520",
    "chcy;": "\u0447",
    "check;": "\u2713",
    "checkmark;": "\u2713",
    "chi;": "\u03c7",
    "cir;": "\u25cb",
    "cirE;": "\u29c3",
    "circ;": "\u02c6",
    "circeq;": "\u2257",
    "circlearrowleft;": "\u21ba",
    "circlearrowright;": "\u21bb",
    "circledR;": "\xae",
    "circledS;": "\u24c8",
    "circledast;": "\u229b",
    "circledcirc;": "\u229a",
    "circleddash;": "\u229d",
    "cire;": "\u2257",
    "cirfnint;": "\u2a10",
    "cirmid;": "\u2aef",
    "cirscir;": "\u29c2",
    "clubs;": "\u2663",
    "clubsuit;": "\u2663",
    "colon;": ":",
    "colone;": "\u2254",
    "coloneq;": "\u2254",
    "comma;": ",",
    "commat;": "@",
    "comp;": "\u2201",
    "compfn;": "\u2218",
    "complement;": "\u2201",
    "complexes;": "\u2102",
    "cong;": "\u2245",
    "congdot;": "\u2a6d",
    "conint;": "\u222e",
    "copf;": "\U0001d554",
    "coprod;": "\u2210",
    "copy": "\xa9",
    "copy;": "\xa9",
    "copysr;": "\u2117",
    "crarr;": "\u21b5",
    "cross;": "\u2717",
    "cscr;": "\U0001d4b8",
    "csub;": "\u2acf",
    "csube;": "\u2ad1",
    "csup;": "\u2ad0",
    "csupe;": "\u2ad2",
    "ctdot;": "\u22ef",
    "cudarrl;": "\u2938",
    "cudarrr;": "\u2935",
    "cuepr;": "\u22de",
    "cuesc;": "\u22df",
    "cularr;": "\u21b6",
    "cularrp;": "\u293d",
    "cup;": "\u222a",
    "cupbrcap;": "\u2a48",
    "cupcap;": "\u2a46",
    "cupcup;": "\u2a4a",
    "cupdot;": "\u228d",
    "cupor;": "\u2a45",
    "cups;": "\u222a\ufe00",
    "curarr;": "\u21b7",
    "curarrm;": "\u293c",
    "curlyeqprec;": "\u22de",
    "curlyeqsucc;": "\u22df",
    "curlyvee;": "\u22ce",
    "curlywedge;": "\u22cf",
    "curren": "\xa4",
    "curren;": "\xa4",
    "curvearrowleft;": "\u21b6",
    "curvearrowright;": "\u21b7",
    "cuvee;": "\u22ce",
    "cuwed;": "\u22cf",
    "cwconint;": "\u2232",
    "cwint;": "\u2231",
    "cylcty;": "\u232d",
    "dArr;": "\u21d3",
    "dHar;": "\u2965",
    "dagger;": "\u2020",
    "daleth;": "\u2138",
    "darr;": "\u2193",
    "dash;": "\u2010",
    "dashv;": "\u22a3",
    "dbkarow;": "\u290f",
    "dblac;": "\u02dd",
    "dcaron;": "\u010f",
    "dcy;": "\u0434",
    "dd;": "\u2146",
    "ddagger;": "\u2021",
    "ddarr;": "\u21ca",
    "ddotseq;": "\u2a77",
    "deg": "\xb0",
    "deg;": "\xb0",
    "delta;": "\u03b4",
    "demptyv;": "\u29b1",
    "dfisht;": "\u297f",
    "dfr;": "\U0001d521",
    "dharl;": "\u21c3",
    "dharr;": "\u21c2",
    "diam;": "\u22c4",
    "diamond;": "\u22c4",
    "diamondsuit;": "\u2666",
    "diams;": "\u2666",
    "die;": "\xa8",
    "digamma;": "\u03dd",
    "disin;": "\u22f2",
    "div;": "\xf7",
    "divide": "\xf7",
    "divide;": "\xf7",
    "divideontimes;": "\u22c7",
    "divonx;": "\u22c7",
    "djcy;": "\u0452",
    "dlcorn;": "\u231e",
    "dlcrop;": "\u230d",
    "dollar;": "$",
    "dopf;": "\U0001d555",
    "dot;": "\u02d9",
    "doteq;": "\u2250",
    "doteqdot;": "\u2251",
    "dotminus;": "\u2238",
    "dotplus;": "\u2214",
    "dotsquare;": "\u22a1",
    "doublebarwedge;": "\u2306",
    "downarrow;": "\u2193",
    "downdownarrows;": "\u21ca",
    "downharpoonleft;": "\u21c3",
    "downharpoonright;": "\u21c2",
    "drbkarow;": "\u2910",
    "drcorn;": "\u231f",
    "drcrop;": "\u230c",
    "dscr;": "\U0001d4b9",
    "dscy;": "\u0455",
    "dsol;": "\u29f6",
    "dstrok;": "\u0111",
    "dtdot;": "\u22f1",
    "dtri;": "\u25bf",
    "dtrif;": "\u25be",
    "duarr;": "\u21f5",
    "duhar;": "\u296f",
    "dwangle;": "\u29a6",
    "dzcy;": "\u045f",
    "dzigrarr;": "\u27ff",
    "eDDot;": "\u2a77",
    "eDot;": "\u2251",
    "eacute": "\xe9",
    "eacute;": "\xe9",
    "easter;": "\u2a6e",
    "ecaron;": "\u011b",
    "ecir;": "\u2256",
    "ecirc": "\xea",
    "ecirc;": "\xea",
    "ecolon;": "\u2255",
    "ecy;": "\u044d",
    "edot;": "\u0117",
    "ee;": "\u2147",
    "efDot;": "\u2252",
    "efr;": "\U0001d522",
    "eg;": "\u2a9a",
    "egrave": "\xe8",
    "egrave;": "\xe8",
    "egs;": "\u2a96",
    "egsdot;": "\u2a98",
    "el;": "\u2a99",
    "elinters;": "\u23e7",
    "ell;": "\u2113",
    "els;": "\u2a95",
    "elsdot;": "\u2a97",
    "emacr;": "\u0113",
    "empty;": "\u2205",
    "emptyset;": "\u2205",
    "emptyv;": "\u2205",
    "emsp13;": "\u2004",
    "emsp14;": "\u2005",
    "emsp;": "\u2003",
    "eng;": "\u014b",
    "ensp;": "\u2002",
    "eogon;": "\u0119",
    "eopf;": "\U0001d556",
    "epar;": "\u22d5",
    "eparsl;": "\u29e3",
    "eplus;": "\u2a71",
    "epsi;": "\u03b5",
    "epsilon;": "\u03b5",
    "epsiv;": "\u03f5",
    "eqcirc;": "\u2256",
    "eqcolon;": "\u2255",
    "eqsim;": "\u2242",
    "eqslantgtr;": "\u2a96",
    "eqslantless;": "\u2a95",
    "equals;": "=",
    "equest;": "\u225f",
    "equiv;": "\u2261",
    "equivDD;": "\u2a78",
    "eqvparsl;": "\u29e5",
    "erDot;": "\u2253",
    "erarr;": "\u2971",
    "escr;": "\u212f",
    "esdot;": "\u2250",
    "esim;": "\u2242",
    "eta;": "\u03b7",
    "eth": "\xf0",
    "eth;": "\xf0",
    "euml": "\xeb",
    "euml;": "\xeb",
    "euro;": "\u20ac",
    "excl;": "!",
    "exist;": "\u2203",
    "expectation;": "\u2130",
    "exponentiale;": "\u2147",
    "fallingdotseq;": "\u2252",
    "fcy;": "\u0444",
    "female;": "\u2640",
    "ffilig;": "\ufb03",
    "fflig;": "\ufb00",
    "ffllig;": "\ufb04",
    "ffr;": "\U0001d523",
    "filig;": "\ufb01",
    "fjlig;": "fj",
    "flat;": "\u266d",
    "fllig;": "\ufb02",
    "fltns;": "\u25b1",
    "fnof;": "\u0192",
    "fopf;": "\U0001d557",
    "forall;": "\u2200",
    "fork;": "\u22d4",
    "forkv;": "\u2ad9",
    "fpartint;": "\u2a0d",
    "frac12": "\xbd",
    "frac12;": "\xbd",
    "frac13;": "\u2153",
    "frac14": "\xbc",
    "frac14;": "\xbc",
    "frac15;": "\u2155",
    "frac16;": "\u2159",
    "frac18;": "\u215b",
    "frac23;": "\u2154",
    "frac25;": "\u2156",
    "frac34": "\xbe",
    "frac34;": "\xbe",
    "frac35;": "\u2157",
    "frac38;": "\u215c",
    "frac45;": "\u2158",
    "frac56;": "\u215a",
    "frac58;": "\u215d",
    "frac78;": "\u215e",
    "frasl;": "\u2044",
    "frown;": "\u2322",
    "fscr;": "\U0001d4bb",
    "gE;": "\u2267",
    "gEl;": "\u2a8c",
    "gacute;": "\u01f5",
    "gamma;": "\u03b3",
    "gammad;": "\u03dd",
    "gap;": "\u2a86",
    "gbreve;": "\u011f",
    "gcirc;": "\u011d",
    "gcy;": "\u0433",
    "gdot;": "\u0121",
    "ge;": "\u2265",
    "gel;": "\u22db",
    "geq;": "\u2265",
    "geqq;": "\u2267",
    "geqslant;": "\u2a7e",
    "ges;": "\u2a7e",
    "gescc;": "\u2aa9",
    "gesdot;": "\u2a80",
    "gesdoto;": "\u2a82",
    "gesdotol;": "\u2a84",
    "gesl;": "\u22db\ufe00",
    "gesles;": "\u2a94",
    "gfr;": "\U0001d524",
    "gg;": "\u226b",
    "ggg;": "\u22d9",
    "gimel;": "\u2137",
    "gjcy;": "\u0453",
    "gl;": "\u2277",
    "glE;": "\u2a92",
    "gla;": "\u2aa5",
    "glj;": "\u2aa4",
    "gnE;": "\u2269",
    "gnap;": "\u2a8a",
    "gnapprox;": "\u2a8a",
    "gne;": "\u2a88",
    "gneq;": "\u2a88",
    "gneqq;": "\u2269",
    "gnsim;": "\u22e7",
    "gopf;": "\U0001d558",
    "grave;": "`",
    "gscr;": "\u210a",
    "gsim;": "\u2273",
    "gsime;": "\u2a8e",
    "gsiml;": "\u2a90",
    "gt": ">",
    "gt;": ">",
    "gtcc;": "\u2aa7",
    "gtcir;": "\u2a7a",
    "gtdot;": "\u22d7",
    "gtlPar;": "\u2995",
    "gtquest;": "\u2a7c",
    "gtrapprox;": "\u2a86",
    "gtrarr;": "\u2978",
    "gtrdot;": "\u22d7",
    "gtreqless;": "\u22db",
    "gtreqqless;": "\u2a8c",
    "gtrless;": "\u2277",
    "gtrsim;": "\u2273",
    "gvertneqq;": "\u2269\ufe00",
    "gvnE;": "\u2269\ufe00",
    "hArr;": "\u21d4",
    "hairsp;": "\u200a",
    "half;": "\xbd",
    "hamilt;": "\u210b",
    "hardcy;": "\u044a",
    "harr;": "\u2194",
    "harrcir;": "\u2948",
    "harrw;": "\u21ad",
    "hbar;": "\u210f",
    "hcirc;": "\u0125",
    "hearts;": "\u2665",
    "heartsuit;": "\u2665",
    "hellip;": "\u2026",
    "hercon;": "\u22b9",
    "hfr;": "\U0001d525",
    "hksearow;": "\u2925",
    "hkswarow;": "\u2926",
    "hoarr;": "\u21ff",
    "homtht;": "\u223b",
    "hookleftarrow;": "\u21a9",
    "hookrightarrow;": "\u21aa",
    "hopf;": "\U0001d559",
    "horbar;": "\u2015",
    "hscr;": "\U0001d4bd",
    "hslash;": "\u210f",
    "hstrok;": "\u0127",
    "hybull;": "\u2043",
    "hyphen;": "\u2010",
    "iacute": "\xed",
    "iacute;": "\xed",
    "ic;": "\u2063",
    "icirc": "\xee",
    "icirc;": "\xee",
    "icy;": "\u0438",
    "iecy;": "\u0435",
    "iexcl": "\xa1",
    "iexcl;": "\xa1",
    "iff;": "\u21d4",
    "ifr;": "\U0001d526",
    "igrave": "\xec",
    "igrave;": "\xec",
    "ii;": "\u2148",
    "iiiint;": "\u2a0c",
    "iiint;": "\u222d",
    "iinfin;": "\u29dc",
    "iiota;": "\u2129",
    "ijlig;": "\u0133",
    "imacr;": "\u012b",
    "image;": "\u2111",
    "imagline;": "\u2110",
    "imagpart;": "\u2111",
    "imath;": "\u0131",
    "imof;": "\u22b7",
    "imped;": "\u01b5",
    "in;": "\u2208",
    "incare;": "\u2105",
    "infin;": "\u221e",
    "infintie;": "\u29dd",
    "inodot;": "\u0131",
    "int;": "\u222b",
    "intcal;": "\u22ba",
    "integers;": "\u2124",
    "intercal;": "\u22ba",
    "intlarhk;": "\u2a17",
    "intprod;": "\u2a3c",
    "iocy;": "\u0451",
    "iogon;": "\u012f",
    "iopf;": "\U0001d55a",
    "iota;": "\u03b9",
    "iprod;": "\u2a3c",
    "iquest": "\xbf",
    "iquest;": "\xbf",
    "iscr;": "\U0001d4be",
    "isin;": "\u2208",
    "isinE;": "\u22f9",
    "isindot;": "\u22f5",
    "isins;": "\u22f4",
    "isinsv;": "\u22f3",
    "isinv;": "\u2208",
    "it;": "\u2062",
    "itilde;": "\u0129",
    "iukcy;": "\u0456",
    "iuml": "\xef",
    "iuml;": "\xef",
    "jcirc;": "\u0135",
    "jcy;": "\u0439",
    "jfr;": "\U0001d527",
    "jmath;": "\u0237",
    "jopf;": "\U0001d55b",
    "jscr;": "\U0001d4bf",
    "jsercy;": "\u0458",
    "jukcy;": "\u0454",
    "kappa;": "\u03ba",
    "kappav;": "\u03f0",
    "kcedil;": "\u0137",
    "kcy;": "\u043a",
    "kfr;": "\U0001d528",
    "kgreen;": "\u0138",
    "khcy;": "\u0445",
    "kjcy;": "\u045c",
    "kopf;": "\U0001d55c",
    "kscr;": "\U0001d4c0",
    "lAarr;": "\u21da",
    "lArr;": "\u21d0",
    "lAtail;": "\u291b",
    "lBarr;": "\u290e",
    "lE;": "\u2266",
    "lEg;": "\u2a8b",
    "lHar;": "\u2962",
    "lacute;": "\u013a",
    "laemptyv;": "\u29b4",
    "lagran;": "\u2112",
    "lambda;": "\u03bb",
    "lang;": "\u27e8",
    "langd;": "\u2991",
    "langle;": "\u27e8",
    "lap;": "\u2a85",
    "laquo": "\xab",
    "laquo;": "\xab",
    "larr;": "\u2190",
    "larrb;": "\u21e4",
    "larrbfs;": "\u291f",
    "larrfs;": "\u291d",
    "larrhk;": "\u21a9",
    "larrlp;": "\u21ab",
    "larrpl;": "\u2939",
    "larrsim;": "\u2973",
    "larrtl;": "\u21a2",
    "lat;": "\u2aab",
    "latail;": "\u2919",
    "late;": "\u2aad",
    "lates;": "\u2aad\ufe00",
    "lbarr;": "\u290c",
    "lbbrk;": "\u2772",
    "lbrace;": "{",
    "lbrack;": "[",
    "lbrke;": "\u298b",
    "lbrksld;": "\u298f",
    "lbrkslu;": "\u298d",
    "lcaron;": "\u013e",
    "lcedil;": "\u013c",
    "lceil;": "\u2308",
    "lcub;": "{",
    "lcy;": "\u043b",
    "ldca;": "\u2936",
    "ldquo;": "\u201c",
    "ldquor;": "\u201e",
    "ldrdhar;": "\u2967",
    "ldrushar;": "\u294b",
    "ldsh;": "\u21b2",
    "le;": "\u2264",
    "leftarrow;": "\u2190",
    "leftarrowtail;": "\u21a2",
    "leftharpoondown;": "\u21bd",
    "leftharpoonup;": "\u21bc",
    "leftleftarrows;": "\u21c7",
    "leftrightarrow;": "\u2194",
    "leftrightarrows;": "\u21c6",
    "leftrightharpoons;": "\u21cb",
    "leftrightsquigarrow;": "\u21ad",
    "leftthreetimes;": "\u22cb",
    "leg;": "\u22da",
    "leq;": "\u2264",
    "leqq;": "\u2266",
    "leqslant;": "\u2a7d",
    "les;": "\u2a7d",
    "lescc;": "\u2aa8",
    "lesdot;": "\u2a7f",
    "lesdoto;": "\u2a81",
    "lesdotor;": "\u2a83",
    "lesg;": "\u22da\ufe00",
    "lesges;": "\u2a93",
    "lessapprox;": "\u2a85",
    "lessdot;": "\u22d6",
    "lesseqgtr;": "\u22da",
    "lesseqqgtr;": "\u2a8b",
    "lessgtr;": "\u2276",
    "lesssim;": "\u2272",
    "lfisht;": "\u297c",
    "lfloor;": "\u230a",
    "lfr;": "\U0001d529",
    "lg;": "\u2276",
    "lgE;": "\u2a91",
    "lhard;": "\u21bd",
    "lharu;": "\u21bc",
    "lharul;": "\u296a",
    "lhblk;": "\u2584",
    "ljcy;": "\u0459",
    "ll;": "\u226a",
    "llarr;": "\u21c7",
    "llcorner;": "\u231e",
    "llhard;": "\u296b",
    "lltri;": "\u25fa",
    "lmidot;": "\u0140",
    "lmoust;": "\u23b0",
    "lmoustache;": "\u23b0",
    "lnE;": "\u2268",
    "lnap;": "\u2a89",
    "lnapprox;": "\u2a89",
    "lne;": "\u2a87",
    "lneq;": "\u2a87",
    "lneqq;": "\u2268",
    "lnsim;": "\u22e6",
    "loang;": "\u27ec",
    "loarr;": "\u21fd",
    "lobrk;": "\u27e6",
    "longleftarrow;": "\u27f5",
    "longleftrightarrow;": "\u27f7",
    "longmapsto;": "\u27fc",
    "longrightarrow;": "\u27f6",
    "looparrowleft;": "\u21ab",
    "looparrowright;": "\u21ac",
    "lopar;": "\u2985",
    "lopf;": "\U0001d55d",
    "loplus;": "\u2a2d",
    "lotimes;": "\u2a34",
    "lowast;": "\u2217",
    "lowbar;": "_",
    "loz;": "\u25ca",
    "lozenge;": "\u25ca",
    "lozf;": "\u29eb",
    "lpar;": "(",
    "lparlt;": "\u2993",
    "lrarr;": "\u21c6",
    "lrcorner;": "\u231f",
    "lrhar;": "\u21cb",
    "lrhard;": "\u296d",
    "lrm;": "\u200e",
    "lrtri;": "\u22bf",
    "lsaquo;": "\u2039",
    "lscr;": "\U0001d4c1",
    "lsh;": "\u21b0",
    "lsim;": "\u2272",
    "lsime;": "\u2a8d",
    "lsimg;": "\u2a8f",
    "lsqb;": "[",
    "lsquo;": "\u2018",
    "lsquor;": "\u201a",
    "lstrok;": "\u0142",
    "lt": "<",
    "lt;": "<",
    "ltcc;": "\u2aa6",
    "ltcir;": "\u2a79",
    "ltdot;": "\u22d6",
    "lthree;": "\u22cb",
    "ltimes;": "\u22c9",
    "ltlarr;": "\u2976",
    "ltquest;": "\u2a7b",
    "ltrPar;": "\u2996",
    "ltri;": "\u25c3",
    "ltrie;": "\u22b4",
    "ltrif;": "\u25c2",
    "lurdshar;": "\u294a",
    "luruhar;": "\u2966",
    "lvertneqq;": "\u2268\ufe00",
    "lvnE;": "\u2268\ufe00",
    "mDDot;": "\u223a",
    "macr": "\xaf",
    "macr;": "\xaf",
    "male;": "\u2642",
    "malt;": "\u2720",
    "maltese;": "\u2720",
    "map;": "\u21a6",
    "mapsto;": "\u21a6",
    "mapstodown;": "\u21a7",
    "mapstoleft;": "\u21a4",
    "mapstoup;": "\u21a5",
    "marker;": "\u25ae",
    "mcomma;": "\u2a29",
    "mcy;": "\u043c",
    "mdash;": "\u2014",
    "measuredangle;": "\u2221",
    "mfr;": "\U0001d52a",
    "mho;": "\u2127",
    "micro": "\xb5",
    "micro;": "\xb5",
    "mid;": "\u2223",
    "midast;": "*",
    "midcir;": "\u2af0",
    "middot": "\xb7",
    "middot;": "\xb7",
    "minus;": "\u2212",
    "minusb;": "\u229f",
    "minusd;": "\u2238",
    "minusdu;": "\u2a2a",
    "mlcp;": "\u2adb",
    "mldr;": "\u2026",
    "mnplus;": "\u2213",
    "models;": "\u22a7",
    "mopf;": "\U0001d55e",
    "mp;": "\u2213",
    "mscr;": "\U0001d4c2",
    "mstpos;": "\u223e",
    "mu;": "\u03bc",
    "multimap;": "\u22b8",
    "mumap;": "\u22b8",
    "nGg;": "\u22d9\u0338",
    "nGt;": "\u226b\u20d2",
    "nGtv;": "\u226b\u0338",
    "nLeftarrow;": "\u21cd",
    "nLeftrightarrow;": "\u21ce",
    "nLl;": "\u22d8\u0338",
    "nLt;": "\u226a\u20d2",
    "nLtv;": "\u226a\u0338",
    "nRightarrow;": "\u21cf",
    "nVDash;": "\u22af",
    "nVdash;": "\u22ae",
    "nabla;": "\u2207",
    "nacute;": "\u0144",
    "nang;": "\u2220\u20d2",
    "nap;": "\u2249",
    "napE;": "\u2a70\u0338",
    "napid;": "\u224b\u0338",
    "napos;": "\u0149",
    "napprox;": "\u2249",
    "natur;": "\u266e",
    "natural;": "\u266e",
    "naturals;": "\u2115",
    "nbsp": "\xa0",
    "nbsp;": "\xa0",
    "nbump;": "\u224e\u0338",
    "nbumpe;": "\u224f\u0338",
    "ncap;": "\u2a43",
    "ncaron;": "\u0148",
    "ncedil;": "\u0146",
    "ncong;": "\u2247",
    "ncongdot;": "\u2a6d\u0338",
    "ncup;": "\u2a42",
    "ncy;": "\u043d",
    "ndash;": "\u2013",
    "ne;": "\u2260",
    "neArr;": "\u21d7",
    "nearhk;": "\u2924",
    "nearr;": "\u2197",
    "nearrow;": "\u2197",
    "nedot;": "\u2250\u0338",
    "nequiv;": "\u2262",
    "nesear;": "\u2928",
    "nesim;": "\u2242\u0338",
    "nexist;": "\u2204",
    "nexists;": "\u2204",
    "nfr;": "\U0001d52b",
    "ngE;": "\u2267\u0338",
    "nge;": "\u2271",
    "ngeq;": "\u2271",
    "ngeqq;": "\u2267\u0338",
    "ngeqslant;": "\u2a7e\u0338",
    "nges;": "\u2a7e\u0338",
    "ngsim;": "\u2275",
    "ngt;": "\u226f",
    "ngtr;": "\u226f",
    "nhArr;": "\u21ce",
    "nharr;": "\u21ae",
    "nhpar;": "\u2af2",
    "ni;": "\u220b",
    "nis;": "\u22fc",
    "nisd;": "\u22fa",
    "niv;": "\u220b",
    "njcy;": "\u045a",
    "nlArr;": "\u21cd",
    "nlE;": "\u2266\u0338",
    "nlarr;": "\u219a",
    "nldr;": "\u2025",
    "nle;": "\u2270",
    "nleftarrow;": "\u219a",
    "nleftrightarrow;": "\u21ae",
    "nleq;": "\u2270",
    "nleqq;": "\u2266\u0338",
    "nleqslant;": "\u2a7d\u0338",
    "nles;": "\u2a7d\u0338",
    "nless;": "\u226e",
    "nlsim;": "\u2274",
    "nlt;": "\u226e",
    "nltri;": "\u22ea",
    "nltrie;": "\u22ec",
    "nmid;": "\u2224",
    "nopf;": "\U0001d55f",
    "not": "\xac",
    "not;": "\xac",
    "notin;": "\u2209",
    "notinE;": "\u22f9\u0338",
    "notindot;": "\u22f5\u0338",
    "notinva;": "\u2209",
    "notinvb;": "\u22f7",
    "notinvc;": "\u22f6",
    "notni;": "\u220c",
    "notniva;": "\u220c",
    "notnivb;": "\u22fe",
    "notnivc;": "\u22fd",
    "npar;": "\u2226",
    "nparallel;": "\u2226",
    "nparsl;": "\u2afd\u20e5",
    "npart;": "\u2202\u0338",
    "npolint;": "\u2a14",
    "npr;": "\u2280",
    "nprcue;": "\u22e0",
    "npre;": "\u2aaf\u0338",
    "nprec;": "\u2280",
    "npreceq;": "\u2aaf\u0338",
    "nrArr;": "\u21cf",
    "nrarr;": "\u219b",
    "nrarrc;": "\u2933\u0338",
    "nrarrw;": "\u219d\u0338",
    "nrightarrow;": "\u219b",
    "nrtri;": "\u22eb",
    "nrtrie;": "\u22ed",
    "nsc;": "\u2281",
    "nsccue;": "\u22e1",
    "nsce;": "\u2ab0\u0338",
    "nscr;": "\U0001d4c3",
    "nshortmid;": "\u2224",
    "nshortparallel;": "\u2226",
    "nsim;": "\u2241",
    "nsime;": "\u2244",
    "nsimeq;": "\u2244",
    "nsmid;": "\u2224",
    "nspar;": "\u2226",
    "nsqsube;": "\u22e2",
    "nsqsupe;": "\u22e3",
    "nsub;": "\u2284",
    "nsubE;": "\u2ac5\u0338",
    "nsube;": "\u2288",
    "nsubset;": "\u2282\u20d2",
    "nsubseteq;": "\u2288",
    "nsubseteqq;": "\u2ac5\u0338",
    "nsucc;": "\u2281",
    "nsucceq;": "\u2ab0\u0338",
    "nsup;": "\u2285",
    "nsupE;": "\u2ac6\u0338",
    "nsupe;": "\u2289",
    "nsupset;": "\u2283\u20d2",
    "nsupseteq;": "\u2289",
    "nsupseteqq;": "\u2ac6\u0338",
    "ntgl;": "\u2279",
    "ntilde": "\xf1",
    "ntilde;": "\xf1",
    "ntlg;": "\u2278",
    "ntriangleleft;": "\u22ea",
    "ntrianglelefteq;": "\u22ec",
    "ntriangleright;": "\u22eb",
    "ntrianglerighteq;": "\u22ed",
    "nu;": "\u03bd",
    "num;": "#",
    "numero;": "\u2116",
    "numsp;": "\u2007",
    "nvDash;": "\u22ad",
    "nvHarr;": "\u2904",
    "nvap;": "\u224d\u20d2",
    "nvdash;": "\u22ac",
    "nvge;": "\u2265\u20d2",
    "nvgt;": ">\u20d2",
    "nvinfin;": "\u29de",
    "nvlArr;": "\u2902",
    "nvle;": "\u2264\u20d2",
    "nvlt;": "<\u20d2",
    "nvltrie;": "\u22b4\u20d2",
    "nvrArr;": "\u2903",
    "nvrtrie;": "\u22b5\u20d2",
    "nvsim;": "\u223c\u20d2",
    "nwArr;": "\u21d6",
    "nwarhk;": "\u2923",
    "nwarr;": "\u2196",
    "nwarrow;": "\u2196",
    "nwnear;": "\u2927",
    "oS;": "\u24c8",
    "oacute": "\xf3",
    "oacute;": "\xf3",
    "oast;": "\u229b",
    "ocir;": "\u229a",
    "ocirc": "\xf4",
    "ocirc;": "\xf4",
    "ocy;": "\u043e",
    "odash;": "\u229d",
    "odblac;": "\u0151",
    "odiv;": "\u2a38",
    "odot;": "\u2299",
    "odsold;": "\u29bc",
    "oelig;": "\u0153",
    "ofcir;": "\u29bf",
    "ofr;": "\U0001d52c",
    "ogon;": "\u02db",
    "ograve": "\xf2",
    "ograve;": "\xf2",
    "ogt;": "\u29c1",
    "ohbar;": "\u29b5",
    "ohm;": "\u03a9",
    "oint;": "\u222e",
    "olarr;": "\u21ba",
    "olcir;": "\u29be",
    "olcross;": "\u29bb",
    "oline;": "\u203e",
    "olt;": "\u29c0",
    "omacr;": "\u014d",
    "omega;": "\u03c9",
    "omicron;": "\u03bf",
    "omid;": "\u29b6",
    "ominus;": "\u2296",
    "oopf;": "\U0001d560",
    "opar;": "\u29b7",
    "operp;": "\u29b9",
    "oplus;": "\u2295",
    "or;": "\u2228",
    "orarr;": "\u21bb",
    "ord;": "\u2a5d",
    "order;": "\u2134",
    "orderof;": "\u2134",
    "ordf": "\xaa",
    "ordf;": "\xaa",
    "ordm": "\xba",
    "ordm;": "\xba",
    "origof;": "\u22b6",
    "oror;": "\u2a56",
    "orslope;": "\u2a57",
    "orv;": "\u2a5b",
    "oscr;": "\u2134",
    "oslash": "\xf8",
    "oslash;": "\xf8",
    "osol;": "\u2298",
    "otilde": "\xf5",
    "otilde;": "\xf5",
    "otimes;": "\u2297",
    "otimesas;": "\u2a36",
    "ouml": "\xf6",
    "ouml;": "\xf6",
    "ovbar;": "\u233d",
    "par;": "\u2225",
    "para": "\xb6",
    "para;": "\xb6",
    "parallel;": "\u2225",
    "parsim;": "\u2af3",
    "parsl;": "\u2afd",
    "part;": "\u2202",
    "pcy;": "\u043f",
    "percnt;": "%",
    "period;": ".",
    "permil;": "\u2030",
    "perp;": "\u22a5",
    "pertenk;": "\u2031",
    "pfr;": "\U0001d52d",
    "phi;": "\u03c6",
    "phiv;": "\u03d5",
    "phmmat;": "\u2133",
    "phone;": "\u260e",
    "pi;": "\u03c0",
    "pitchfork;": "\u22d4",
    "piv;": "\u03d6",
    "planck;": "\u210f",
    "planckh;": "\u210e",
    "plankv;": "\u210f",
    "plus;": "+",
    "plusacir;": "\u2a23",
    "plusb;": "\u229e",
    "pluscir;": "\u2a22",
    "plusdo;": "\u2214",
    "plusdu;": "\u2a25",
    "pluse;": "\u2a72",
    "plusmn": "\xb1",
    "plusmn;": "\xb1",
    "plussim;": "\u2a26",
    "plustwo;": "\u2a27",
    "pm;": "\xb1",
    "pointint;": "\u2a15",
    "popf;": "\U0001d561",
    "pound": "\xa3",
    "pound;": "\xa3",
    "pr;": "\u227a",
    "prE;": "\u2ab3",
    "prap;": "\u2ab7",
    "prcue;": "\u227c",
    "pre;": "\u2aaf",
    "prec;": "\u227a",
    "precapprox;": "\u2ab7",
    "preccurlyeq;": "\u227c",
    "preceq;": "\u2aaf",
    "precnapprox;": "\u2ab9",
    "precneqq;": "\u2ab5",
    "precnsim;": "\u22e8",
    "precsim;": "\u227e",
    "prime;": "\u2032",
    "primes;": "\u2119",
    "prnE;": "\u2ab5",
    "prnap;": "\u2ab9",
    "prnsim;": "\u22e8",
    "prod;": "\u220f",
    "profalar;": "\u232e",
    "profline;": "\u2312",
    "profsurf;": "\u2313",
    "prop;": "\u221d",
    "propto;": "\u221d",
    "prsim;": "\u227e",
    "prurel;": "\u22b0",
    "pscr;": "\U0001d4c5",
    "psi;": "\u03c8",
    "puncsp;": "\u2008",
    "qfr;": "\U0001d52e",
    "qint;": "\u2a0c",
    "qopf;": "\U0001d562",
    "qprime;": "\u2057",
    "qscr;": "\U0001d4c6",
    "quaternions;": "\u210d",
    "quatint;": "\u2a16",
    "quest;": "?",
    "questeq;": "\u225f",
    "quot": "\"",
    "quot;": "\"",
    "rAarr;": "\u21db",
    "rArr;": "\u21d2",
    "rAtail;": "\u291c",
    "rBarr;": "\u290f",
    "rHar;": "\u2964",
    "race;": "\u223d\u0331",
    "racute;": "\u0155",
    "radic;": "\u221a",
    "raemptyv;": "\u29b3",
    "rang;": "\u27e9",
    "rangd;": "\u2992",
    "range;": "\u29a5",
    "rangle;": "\u27e9",
    "raquo": "\xbb",
    "raquo;": "\xbb",
    "rarr;": "\u2192",
    "rarrap;": "\u2975",
    "rarrb;": "\u21e5",
    "rarrbfs;": "\u2920",
    "rarrc;": "\u2933",
    "rarrfs;": "\u291e",
    "rarrhk;": "\u21aa",
    "rarrlp;": "\u21ac",
    "rarrpl;": "\u2945",
    "rarrsim;": "\u2974",
    "rarrtl;": "\u21a3",
    "rarrw;": "\u219d",
    "ratail;": "\u291a",
    "ratio;": "\u2236",
    "rationals;": "\u211a",
    "rbarr;": "\u290d",
    "rbbrk;": "\u2773",
    "rbrace;": "}",
    "rbrack;": "]",
    "rbrke;": "\u298c",
    "rbrksld;": "\u298e",
    "rbrkslu;": "\u2990",
    "rcaron;": "\u0159",
    "rcedil;": "\u0157",
    "rceil;": "\u2309",
    "rcub;": "}",
    "rcy;": "\u0440",
    "rdca;": "\u2937",
    "rdldhar;": "\u2969",
    "rdquo;": "\u201d",
    "rdquor;": "\u201d",
    "rdsh;": "\u21b3",
    "real;": "\u211c",
    "realine;": "\u211b",
    "realpart;": "\u211c",
    "reals;": "\u211d",
    "rect;": "\u25ad",
    "reg": "\xae",
    "reg;": "\xae",
    "rfisht;": "\u297d",
    "rfloor;": "\u230b",
    "rfr;": "\U0001d52f",
    "rhard;": "\u21c1",
    "rharu;": "\u21c0",
    "rharul;": "\u296c",
    "rho;": "\u03c1",
    "rhov;": "\u03f1",
    "rightarrow;": "\u2192",
    "rightarrowtail;": "\u21a3",
    "rightharpoondown;": "\u21c1",
    "rightharpoonup;": "\u21c0",
    "rightleftarrows;": "\u21c4",
    "rightleftharpoons;": "\u21cc",
    "rightrightarrows;": "\u21c9",
    "rightsquigarrow;": "\u219d",
    "rightthreetimes;": "\u22cc",
    "ring;": "\u02da",
    "risingdotseq;": "\u2253",
    "rlarr;": "\u21c4",
    "rlhar;": "\u21cc",
    "rlm;": "\u200f",
    "rmoust;": "\u23b1",
    "rmoustache;": "\u23b1",
    "rnmid;": "\u2aee",
    "roang;": "\u27ed",
    "roarr;": "\u21fe",
    "robrk;": "\u27e7",
    "ropar;": "\u2986",
    "ropf;": "\U0001d563",
    "roplus;": "\u2a2e",
    "rotimes;": "\u2a35",
    "rpar;": ")",
    "rpargt;": "\u2994",
    "rppolint;": "\u2a12",
    "rrarr;": "\u21c9",
    "rsaquo;": "\u203a",
    "rscr;": "\U0001d4c7",
    "rsh;": "\u21b1",
    "rsqb;": "]",
    "rsquo;": "\u2019",
    "rsquor;": "\u2019",
    "rthree;": "\u22cc",
    "rtimes;": "\u22ca",
    "rtri;": "\u25b9",
    "rtrie;": "\u22b5",
    "rtrif;": "\u25b8",
    "rtriltri;": "\u29ce",
    "ruluhar;": "\u2968",
    "rx;": "\u211e",
    "sacute;": "\u015b",
    "sbquo;": "\u201a",
    "sc;": "\u227b",
    "scE;": "\u2ab4",
    "scap;": "\u2ab8",
    "scaron;": "\u0161",
    "sccue;": "\u227d",
    "sce;": "\u2ab0",
    "scedil;": "\u015f",
    "scirc;": "\u015d",
    "scnE;": "\u2ab6",
    "scnap;": "\u2aba",
    "scnsim;": "\u22e9",
    "scpolint;": "\u2a13",
    "scsim;": "\u227f",
    "scy;": "\u0441",
    "sdot;": "\u22c5",
    "sdotb;": "\u22a1",
    "sdote;": "\u2a66",
    "seArr;": "\u21d8",
    "searhk;": "\u2925",
    "searr;": "\u2198",
    "searrow;": "\u2198",
    "sect": "\xa7",
    "sect;": "\xa7",
    "semi;": ";",
    "seswar;": "\u2929",
    "setminus;": "\u2216",
    "setmn;": "\u2216",
    "sext;": "\u2736",
    "sfr;": "\U0001d530",
    "sfrown;": "\u2322",
    "sharp;": "\u266f",
    "shchcy;": "\u0449",
    "shcy;": "\u0448",
    "shortmid;": "\u2223",
    "shortparallel;": "\u2225",
    "shy": "\xad",
    "shy;": "\xad",
    "sigma;": "\u03c3",
    "sigmaf;": "\u03c2",
    "sigmav;": "\u03c2",
    "sim;": "\u223c",
    "simdot;": "\u2a6a",
    "sime;": "\u2243",
    "simeq;": "\u2243",
    "simg;": "\u2a9e",
    "simgE;": "\u2aa0",
    "siml;": "\u2a9d",
    "simlE;": "\u2a9f",
    "simne;": "\u2246",
    "simplus;": "\u2a24",
    "simrarr;": "\u2972",
    "slarr;": "\u2190",
    "smallsetminus;": "\u2216",
    "smashp;": "\u2a33",
    "smeparsl;": "\u29e4",
    "smid;": "\u2223",
    "smile;": "\u2323",
    "smt;": "\u2aaa",
    "smte;": "\u2aac",
    "smtes;": "\u2aac\ufe00",
    "softcy;": "\u044c",
    "sol;": "/",
    "solb;": "\u29c4",
    "solbar;": "\u233f",
    "sopf;": "\U0001d564",
    "spades;": "\u2660",
    "spadesuit;": "\u2660",
    "spar;": "\u2225",
    "sqcap;": "\u2293",
    "sqcaps;": "\u2293\ufe00",
    "sqcup;": "\u2294",
    "sqcups;": "\u2294\ufe00",
    "sqsub;": "\u228f",
    "sqsube;": "\u2291",
    "sqsubset;": "\u228f",
    "sqsubseteq;": "\u2291",
    "sqsup;": "\u2290",
    "sqsupe;": "\u2292",
    "sqsupset;": "\u2290",
    "sqsupseteq;": "\u2292",
    "squ;": "\u25a1",
    "square;": "\u25a1",
    "squarf;": "\u25aa",
    "squf;": "\u25aa",
    "srarr;": "\u2192",
    "sscr;": "\U0001d4c8",
    "ssetmn;": "\u2216",
    "ssmile;": "\u2323",
    "sstarf;": "\u22c6",
    "star;": "\u2606",
    "starf;": "\u2605",
    "straightepsilon;": "\u03f5",
    "straightphi;": "\u03d5",
    "strns;": "\xaf",
    "sub;": "\u2282",
    "subE;": "\u2ac5",
    "subdot;": "\u2abd",
    "sube;": "\u2286",
    "subedot;": "\u2ac3",
    "submult;": "\u2ac1",
    "subnE;": "\u2acb",
    "subne;": "\u228a",
    "subplus;": "\u2abf",
    "subrarr;": "\u2979",
    "subset;": "\u2282",
    "subseteq;": "\u2286",
    "subseteqq;": "\u2ac5",
    "subsetneq;": "\u228a",
    "subsetneqq;": "\u2acb",
    "subsim;": "\u2ac7",
    "subsub;": "\u2ad5",
    "subsup;": "\u2ad3",
    "succ;": "\u227b",
    "succapprox;": "\u2ab8",
    "succcurlyeq;": "\u227d",
    "succeq;": "\u2ab0",
    "succnapprox;": "\u2aba",
    "succneqq;": "\u2ab6",
    "succnsim;": "\u22e9",
    "succsim;": "\u227f",
    "sum;": "\u2211",
    "sung;": "\u266a",
    "sup1": "\xb9",
    "sup1;": "\xb9",
    "sup2": "\xb2",
    "sup2;": "\xb2",
    "sup3": "\xb3",
    "sup3;": "\xb3",
    "sup;": "\u2283",
    "supE;": "\u2ac6",
    "supdot;": "\u2abe",
    "supdsub;": "\u2ad8",
    "supe;": "\u2287",
    "supedot;": "\u2ac4",
    "suphsol;": "\u27c9",
    "suphsub;": "\u2ad7",
    "suplarr;": "\u297b",
    "supmult;": "\u2ac2",
    "supnE;": "\u2acc",
    "supne;": "\u228b",
    "supplus;": "\u2ac0",
    "supset;": "\u2283",
    "supseteq;": "\u2287",
    "supseteqq;": "\u2ac6",
    "supsetneq;": "\u228b",
    "supsetneqq;": "\u2acc",
    "supsim;": "\u2ac8",
    "supsub;": "\u2ad4",
    "supsup;": "\u2ad6",
    "swArr;": "\u21d9",
    "swarhk;": "\u2926",
    "swarr;": "\u2199",
    "swarrow;": "\u2199",
    "swnwar;": "\u292a",
    "szlig": "\xdf",
    "szlig;": "\xdf",
    "target;": "\u2316",
    "tau;": "\u03c4",
    "tbrk;": "\u23b4",
    "tcaron;": "\u0165",
    "tcedil;": "\u0163",
    "tcy;": "\u0442",
    "tdot;": "\u20db",
    "telrec;": "\u2315",
    "tfr;": "\U0001d531",
    "there4;": "\u2234",
    "therefore;": "\u2234",
    "theta;": "\u03b8",
    "thetasym;": "\u03d1",
    "thetav;": "\u03d1",
    "thickapprox;": "\u2248",
    "thicksim;": "\u223c",
    "thinsp;": "\u2009",
    "thkap;": "\u2248",
    "thksim;": "\u223c",
    "thorn": "\xfe",
    "thorn;": "\xfe",
    "tilde;": "\u02dc",
    "times": "\xd7",
    "times;": "\xd7",
    "timesb;": "\u22a0",
    "timesbar;": "\u2a31",
    "timesd;": "\u2a30",
    "tint;": "\u222d",
    "toea;": "\u2928",
    "top;": "\u22a4",
    "topbot;": "\u2336",
    "topcir;": "\u2af1",
    "topf;": "\U0001d565",
    "topfork;": "\u2ada",
    "tosa;": "\u2929",
    "tprime;": "\u2034",
    "trade;": "\u2122",
    "triangle;": "\u25b5",
    "triangledown;": "\u25bf",
    "triangleleft;": "\u25c3",
    "trianglelefteq;": "\u22b4",
    "triangleq;": "\u225c",
    "triangleright;": "\u25b9",
    "trianglerighteq;": "\u22b5",
    "tridot;": "\u25ec",
    "trie;": "\u225c",
    "triminus;": "\u2a3a",
    "triplus;": "\u2a39",
    "trisb;": "\u29cd",
    "tritime;": "\u2a3b",
    "trpezium;": "\u23e2",
    "tscr;": "\U0001d4c9",
    "tscy;": "\u0446",
    "tshcy;": "\u045b",
    "tstrok;": "\u0167",
    "twixt;": "\u226c",
    "twoheadleftarrow;": "\u219e",
    "twoheadrightarrow;": "\u21a0",
    "uArr;": "\u21d1",
    "uHar;": "\u2963",
    "uacute": "\xfa",
    "uacute;": "\xfa",
    "uarr;": "\u2191",
    "ubrcy;": "\u045e",
    "ubreve;": "\u016d",
    "ucirc": "\xfb",
    "ucirc;": "\xfb",
    "ucy;": "\u0443",
    "udarr;": "\u21c5",
    "udblac;": "\u0171",
    "udhar;": "\u296e",
    "ufisht;": "\u297e",
    "ufr;": "\U0001d532",
    "ugrave": "\xf9",
    "ugrave;": "\xf9",
    "uharl;": "\u21bf",
    "uharr;": "\u21be",
    "uhblk;": "\u2580",
    "ulcorn;": "\u231c",
    "ulcorner;": "\u231c",
    "ulcrop;": "\u230f",
    "ultri;": "\u25f8",
    "umacr;": "\u016b",
    "uml": "\xa8",
    "uml;": "\xa8",
    "uogon;": "\u0173",
    "uopf;": "\U0001d566",
    "uparrow;": "\u2191",
    "updownarrow;": "\u2195",
    "upharpoonleft;": "\u21bf",
    "upharpoonright;": "\u21be",
    "uplus;": "\u228e",
    "upsi;": "\u03c5",
    "upsih;": "\u03d2",
    "upsilon;": "\u03c5",
    "upuparrows;": "\u21c8",
    "urcorn;": "\u231d",
    "urcorner;": "\u231d",
    "urcrop;": "\u230e",
    "uring;": "\u016f",
    "urtri;": "\u25f9",
    "uscr;": "\U0001d4ca",
    "utdot;": "\u22f0",
    "utilde;": "\u0169",
    "utri;": "\u25b5",
    "utrif;": "\u25b4",
    "uuarr;": "\u21c8",
    "uuml": "\xfc",
    "uuml;": "\xfc",
    "uwangle;": "\u29a7",
    "vArr;": "\u21d5",
    "vBar;": "\u2ae8",
    "vBarv;": "\u2ae9",
    "vDash;": "\u22a8",
    "vangrt;": "\u299c",
    "varepsilon;": "\u03f5",
    "varkappa;": "\u03f0",
    "varnothing;": "\u2205",
    "varphi;": "\u03d5",
    "varpi;": "\u03d6",
    "varpropto;": "\u221d",
    "varr;": "\u2195",
    "varrho;": "\u03f1",
    "varsigma;": "\u03c2",
    "varsubsetneq;": "\u228a\ufe00",
    "varsubsetneqq;": "\u2acb\ufe00",
    "varsupsetneq;": "\u228b\ufe00",
    "varsupsetneqq;": "\u2acc\ufe00",
    "vartheta;": "\u03d1",
    "vartriangleleft;": "\u22b2",
    "vartriangleright;": "\u22b3",
    "vcy;": "\u0432",
    "vdash;": "\u22a2",
    "vee;": "\u2228",
    "veebar;": "\u22bb",
    "veeeq;": "\u225a",
    "vellip;": "\u22ee",
    "verbar;": "|",
    "vert;": "|",
    "vfr;": "\U0001d533",
    "vltri;": "\u22b2",
    "vnsub;": "\u2282\u20d2",
    "vnsup;": "\u2283\u20d2",
    "vopf;": "\U0001d567",
    "vprop;": "\u221d",
    "vrtri;": "\u22b3",
    "vscr;": "\U0001d4cb",
    "vsubnE;": "\u2acb\ufe00",
    "vsubne;": "\u228a\ufe00",
    "vsupnE;": "\u2acc\ufe00",
    "vsupne;": "\u228b\ufe00",
    "vzigzag;": "\u299a",
    "wcirc;": "\u0175",
    "wedbar;": "\u2a5f",
    "wedge;": "\u2227",
    "wedgeq;": "\u2259",
    "weierp;": "\u2118",
    "wfr;": "\U0001d534",
    "wopf;": "\U0001d568",
    "wp;": "\u2118",
    "wr;": "\u2240",
    "wreath;": "\u2240",
    "wscr;": "\U0001d4cc",
    "xcap;": "\u22c2",
    "xcirc;": "\u25ef",
    "xcup;": "\u22c3",
    "xdtri;": "\u25bd",
    "xfr;": "\U0001d535",
    "xhArr;": "\u27fa",
    "xharr;": "\u27f7",
    "xi;": "\u03be",
    "xlArr;": "\u27f8",
    "xlarr;": "\u27f5",
    "xmap;": "\u27fc",
    "xnis;": "\u22fb",
    "xodot;": "\u2a00",
    "xopf;": "\U0001d569",
    "xoplus;": "\u2a01",
    "xotime;": "\u2a02",
    "xrArr;": "\u27f9",
    "xrarr;": "\u27f6",
    "xscr;": "\U0001d4cd",
    "xsqcup;": "\u2a06",
    "xuplus;": "\u2a04",
    "xutri;": "\u25b3",
    "xvee;": "\u22c1",
    "xwedge;": "\u22c0",
    "yacute": "\xfd",
    "yacute;": "\xfd",
    "yacy;": "\u044f",
    "ycirc;": "\u0177",
    "ycy;": "\u044b",
    "yen": "\xa5",
    "yen;": "\xa5",
    "yfr;": "\U0001d536",
    "yicy;": "\u0457",
    "yopf;": "\U0001d56a",
    "yscr;": "\U0001d4ce",
    "yucy;": "\u044e",
    "yuml": "\xff",
    "yuml;": "\xff",
    "zacute;": "\u017a",
    "zcaron;": "\u017e",
    "zcy;": "\u0437",
    "zdot;": "\u017c",
    "zeetrf;": "\u2128",
    "zeta;": "\u03b6",
    "zfr;": "\U0001d537",
    "zhcy;": "\u0436",
    "zigrarr;": "\u21dd",
    "zopf;": "\U0001d56b",
    "zscr;": "\U0001d4cf",
    "zwj;": "\u200d",
    "zwnj;": "\u200c",
}

replacementCharacters = {
    0x0: "\uFFFD",
    0x0d: "\u000D",
    0x80: "\u20AC",
    0x81: "\u0081",
    0x82: "\u201A",
    0x83: "\u0192",
    0x84: "\u201E",
    0x85: "\u2026",
    0x86: "\u2020",
    0x87: "\u2021",
    0x88: "\u02C6",
    0x89: "\u2030",
    0x8A: "\u0160",
    0x8B: "\u2039",
    0x8C: "\u0152",
    0x8D: "\u008D",
    0x8E: "\u017D",
    0x8F: "\u008F",
    0x90: "\u0090",
    0x91: "\u2018",
    0x92: "\u2019",
    0x93: "\u201C",
    0x94: "\u201D",
    0x95: "\u2022",
    0x96: "\u2013",
    0x97: "\u2014",
    0x98: "\u02DC",
    0x99: "\u2122",
    0x9A: "\u0161",
    0x9B: "\u203A",
    0x9C: "\u0153",
    0x9D: "\u009D",
    0x9E: "\u017E",
    0x9F: "\u0178",
}

tokenTypes = {
    "Doctype": 0,
    "Characters": 1,
    "SpaceCharacters": 2,
    "StartTag": 3,
    "EndTag": 4,
    "EmptyTag": 5,
    "Comment": 6,
    "ParseError": 7
}

tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"],
                           tokenTypes["EmptyTag"]])


prefixes = dict([(v, k) for k, v in namespaces.items()])
prefixes["http://www.w3.org/1998/Math/MathML"] = "math"


class DataLossWarning(UserWarning):
    pass


class ReparseException(Exception):
    pass
_vendor/html5lib/_tokenizer.py000064400000225444151733136420012463 0ustar00from __future__ import absolute_import, division, unicode_literals

from pip._vendor.six import unichr as chr

from collections import deque

from .constants import spaceCharacters
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters

from ._inputstream import HTMLInputStream

from ._trie import Trie

entitiesTrie = Trie(entities)


class HTMLTokenizer(object):
    """ This class takes care of tokenizing HTML.

    * self.currentToken
      Holds the token that is currently being processed.

    * self.state
      Holds a reference to the method to be invoked... XXX

    * self.stream
      Points to HTMLInputStream object.
    """

    def __init__(self, stream, parser=None, **kwargs):

        self.stream = HTMLInputStream(stream, **kwargs)
        self.parser = parser

        # Setup the initial tokenizer state
        self.escapeFlag = False
        self.lastFourChars = []
        self.state = self.dataState
        self.escape = False

        # The current token being created
        self.currentToken = None
        super(HTMLTokenizer, self).__init__()

    def __iter__(self):
        """ This is where the magic happens.

        We do our usually processing through the states and when we have a token
        to return we yield the token which pauses processing until the next token
        is requested.
        """
        self.tokenQueue = deque([])
        # Start processing. When EOF is reached self.state will return False
        # instead of True and the loop will terminate.
        while self.state():
            while self.stream.errors:
                yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)}
            while self.tokenQueue:
                yield self.tokenQueue.popleft()

    def consumeNumberEntity(self, isHex):
        """This function returns either U+FFFD or the character based on the
        decimal or hexadecimal representation. It also discards ";" if present.
        If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked.
        """

        allowed = digits
        radix = 10
        if isHex:
            allowed = hexDigits
            radix = 16

        charStack = []

        # Consume all the characters that are in range while making sure we
        # don't hit an EOF.
        c = self.stream.char()
        while c in allowed and c is not EOF:
            charStack.append(c)
            c = self.stream.char()

        # Convert the set of characters consumed to an int.
        charAsInt = int("".join(charStack), radix)

        # Certain characters get replaced with others
        if charAsInt in replacementCharacters:
            char = replacementCharacters[charAsInt]
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "illegal-codepoint-for-numeric-entity",
                                    "datavars": {"charAsInt": charAsInt}})
        elif ((0xD800 <= charAsInt <= 0xDFFF) or
              (charAsInt > 0x10FFFF)):
            char = "\uFFFD"
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "illegal-codepoint-for-numeric-entity",
                                    "datavars": {"charAsInt": charAsInt}})
        else:
            # Should speed up this check somehow (e.g. move the set to a constant)
            if ((0x0001 <= charAsInt <= 0x0008) or
                (0x000E <= charAsInt <= 0x001F) or
                (0x007F <= charAsInt <= 0x009F) or
                (0xFDD0 <= charAsInt <= 0xFDEF) or
                charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE,
                                        0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
                                        0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE,
                                        0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE,
                                        0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE,
                                        0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE,
                                        0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
                                        0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE,
                                        0xFFFFF, 0x10FFFE, 0x10FFFF])):
                self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                        "data":
                                        "illegal-codepoint-for-numeric-entity",
                                        "datavars": {"charAsInt": charAsInt}})
            try:
                # Try/except needed as UCS-2 Python builds' unichar only works
                # within the BMP.
                char = chr(charAsInt)
            except ValueError:
                v = charAsInt - 0x10000
                char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF))

        # Discard the ; if present. Otherwise, put it back on the queue and
        # invoke parseError on parser.
        if c != ";":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "numeric-entity-without-semicolon"})
            self.stream.unget(c)

        return char

    def consumeEntity(self, allowedChar=None, fromAttribute=False):
        # Initialise to the default output for when no entity is matched
        output = "&"

        charStack = [self.stream.char()]
        if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or
                (allowedChar is not None and allowedChar == charStack[0])):
            self.stream.unget(charStack[0])

        elif charStack[0] == "#":
            # Read the next character to see if it's hex or decimal
            hex = False
            charStack.append(self.stream.char())
            if charStack[-1] in ("x", "X"):
                hex = True
                charStack.append(self.stream.char())

            # charStack[-1] should be the first digit
            if (hex and charStack[-1] in hexDigits) \
                    or (not hex and charStack[-1] in digits):
                # At least one digit found, so consume the whole number
                self.stream.unget(charStack[-1])
                output = self.consumeNumberEntity(hex)
            else:
                # No digits found
                self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                        "data": "expected-numeric-entity"})
                self.stream.unget(charStack.pop())
                output = "&" + "".join(charStack)

        else:
            # At this point in the process might have named entity. Entities
            # are stored in the global variable "entities".
            #
            # Consume characters and compare to these to a substring of the
            # entity names in the list until the substring no longer matches.
            while (charStack[-1] is not EOF):
                if not entitiesTrie.has_keys_with_prefix("".join(charStack)):
                    break
                charStack.append(self.stream.char())

            # At this point we have a string that starts with some characters
            # that may match an entity
            # Try to find the longest entity the string will match to take care
            # of &noti for instance.
            try:
                entityName = entitiesTrie.longest_prefix("".join(charStack[:-1]))
                entityLength = len(entityName)
            except KeyError:
                entityName = None

            if entityName is not None:
                if entityName[-1] != ";":
                    self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                            "named-entity-without-semicolon"})
                if (entityName[-1] != ";" and fromAttribute and
                    (charStack[entityLength] in asciiLetters or
                     charStack[entityLength] in digits or
                     charStack[entityLength] == "=")):
                    self.stream.unget(charStack.pop())
                    output = "&" + "".join(charStack)
                else:
                    output = entities[entityName]
                    self.stream.unget(charStack.pop())
                    output += "".join(charStack[entityLength:])
            else:
                self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                        "expected-named-entity"})
                self.stream.unget(charStack.pop())
                output = "&" + "".join(charStack)

        if fromAttribute:
            self.currentToken["data"][-1][1] += output
        else:
            if output in spaceCharacters:
                tokenType = "SpaceCharacters"
            else:
                tokenType = "Characters"
            self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output})

    def processEntityInAttribute(self, allowedChar):
        """This method replaces the need for "entityInAttributeValueState".
        """
        self.consumeEntity(allowedChar=allowedChar, fromAttribute=True)

    def emitCurrentToken(self):
        """This method is a generic handler for emitting the tags. It also sets
        the state to "data" because that's what's needed after a token has been
        emitted.
        """
        token = self.currentToken
        # Add token to the queue to be yielded
        if (token["type"] in tagTokenTypes):
            token["name"] = token["name"].translate(asciiUpper2Lower)
            if token["type"] == tokenTypes["EndTag"]:
                if token["data"]:
                    self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                            "data": "attributes-in-end-tag"})
                if token["selfClosing"]:
                    self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                            "data": "self-closing-flag-on-end-tag"})
        self.tokenQueue.append(token)
        self.state = self.dataState

    # Below are the various tokenizer states worked out.
    def dataState(self):
        data = self.stream.char()
        if data == "&":
            self.state = self.entityDataState
        elif data == "<":
            self.state = self.tagOpenState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\u0000"})
        elif data is EOF:
            # Tokenization ends.
            return False
        elif data in spaceCharacters:
            # Directly after emitting a token you switch back to the "data
            # state". At that point spaceCharacters are important so they are
            # emitted separately.
            self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
                                    data + self.stream.charsUntil(spaceCharacters, True)})
            # No need to update lastFourChars here, since the first space will
            # have already been appended to lastFourChars and will have broken
            # any <!-- or --> sequences
        else:
            chars = self.stream.charsUntil(("&", "<", "\u0000"))
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
                                    data + chars})
        return True

    def entityDataState(self):
        self.consumeEntity()
        self.state = self.dataState
        return True

    def rcdataState(self):
        data = self.stream.char()
        if data == "&":
            self.state = self.characterReferenceInRcdata
        elif data == "<":
            self.state = self.rcdataLessThanSignState
        elif data == EOF:
            # Tokenization ends.
            return False
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
        elif data in spaceCharacters:
            # Directly after emitting a token you switch back to the "data
            # state". At that point spaceCharacters are important so they are
            # emitted separately.
            self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
                                    data + self.stream.charsUntil(spaceCharacters, True)})
            # No need to update lastFourChars here, since the first space will
            # have already been appended to lastFourChars and will have broken
            # any <!-- or --> sequences
        else:
            chars = self.stream.charsUntil(("&", "<", "\u0000"))
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
                                    data + chars})
        return True

    def characterReferenceInRcdata(self):
        self.consumeEntity()
        self.state = self.rcdataState
        return True

    def rawtextState(self):
        data = self.stream.char()
        if data == "<":
            self.state = self.rawtextLessThanSignState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
        elif data == EOF:
            # Tokenization ends.
            return False
        else:
            chars = self.stream.charsUntil(("<", "\u0000"))
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
                                    data + chars})
        return True

    def scriptDataState(self):
        data = self.stream.char()
        if data == "<":
            self.state = self.scriptDataLessThanSignState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
        elif data == EOF:
            # Tokenization ends.
            return False
        else:
            chars = self.stream.charsUntil(("<", "\u0000"))
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
                                    data + chars})
        return True

    def plaintextState(self):
        data = self.stream.char()
        if data == EOF:
            # Tokenization ends.
            return False
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
                                    data + self.stream.charsUntil("\u0000")})
        return True

    def tagOpenState(self):
        data = self.stream.char()
        if data == "!":
            self.state = self.markupDeclarationOpenState
        elif data == "/":
            self.state = self.closeTagOpenState
        elif data in asciiLetters:
            self.currentToken = {"type": tokenTypes["StartTag"],
                                 "name": data, "data": [],
                                 "selfClosing": False,
                                 "selfClosingAcknowledged": False}
            self.state = self.tagNameState
        elif data == ">":
            # XXX In theory it could be something besides a tag name. But
            # do we really care?
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-tag-name-but-got-right-bracket"})
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"})
            self.state = self.dataState
        elif data == "?":
            # XXX In theory it could be something besides a tag name. But
            # do we really care?
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-tag-name-but-got-question-mark"})
            self.stream.unget(data)
            self.state = self.bogusCommentState
        else:
            # XXX
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-tag-name"})
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.stream.unget(data)
            self.state = self.dataState
        return True

    def closeTagOpenState(self):
        data = self.stream.char()
        if data in asciiLetters:
            self.currentToken = {"type": tokenTypes["EndTag"], "name": data,
                                 "data": [], "selfClosing": False}
            self.state = self.tagNameState
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-closing-tag-but-got-right-bracket"})
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-closing-tag-but-got-eof"})
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
            self.state = self.dataState
        else:
            # XXX data can be _'_...
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-closing-tag-but-got-char",
                                    "datavars": {"data": data}})
            self.stream.unget(data)
            self.state = self.bogusCommentState
        return True

    def tagNameState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.state = self.beforeAttributeNameState
        elif data == ">":
            self.emitCurrentToken()
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-tag-name"})
            self.state = self.dataState
        elif data == "/":
            self.state = self.selfClosingStartTagState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["name"] += "\uFFFD"
        else:
            self.currentToken["name"] += data
            # (Don't use charsUntil here, because tag names are
            # very short and it's faster to not do anything fancy)
        return True

    def rcdataLessThanSignState(self):
        data = self.stream.char()
        if data == "/":
            self.temporaryBuffer = ""
            self.state = self.rcdataEndTagOpenState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.stream.unget(data)
            self.state = self.rcdataState
        return True

    def rcdataEndTagOpenState(self):
        data = self.stream.char()
        if data in asciiLetters:
            self.temporaryBuffer += data
            self.state = self.rcdataEndTagNameState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
            self.stream.unget(data)
            self.state = self.rcdataState
        return True

    def rcdataEndTagNameState(self):
        appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
        data = self.stream.char()
        if data in spaceCharacters and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.beforeAttributeNameState
        elif data == "/" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.selfClosingStartTagState
        elif data == ">" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.emitCurrentToken()
            self.state = self.dataState
        elif data in asciiLetters:
            self.temporaryBuffer += data
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "</" + self.temporaryBuffer})
            self.stream.unget(data)
            self.state = self.rcdataState
        return True

    def rawtextLessThanSignState(self):
        data = self.stream.char()
        if data == "/":
            self.temporaryBuffer = ""
            self.state = self.rawtextEndTagOpenState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.stream.unget(data)
            self.state = self.rawtextState
        return True

    def rawtextEndTagOpenState(self):
        data = self.stream.char()
        if data in asciiLetters:
            self.temporaryBuffer += data
            self.state = self.rawtextEndTagNameState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
            self.stream.unget(data)
            self.state = self.rawtextState
        return True

    def rawtextEndTagNameState(self):
        appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
        data = self.stream.char()
        if data in spaceCharacters and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.beforeAttributeNameState
        elif data == "/" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.selfClosingStartTagState
        elif data == ">" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.emitCurrentToken()
            self.state = self.dataState
        elif data in asciiLetters:
            self.temporaryBuffer += data
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "</" + self.temporaryBuffer})
            self.stream.unget(data)
            self.state = self.rawtextState
        return True

    def scriptDataLessThanSignState(self):
        data = self.stream.char()
        if data == "/":
            self.temporaryBuffer = ""
            self.state = self.scriptDataEndTagOpenState
        elif data == "!":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<!"})
            self.state = self.scriptDataEscapeStartState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.stream.unget(data)
            self.state = self.scriptDataState
        return True

    def scriptDataEndTagOpenState(self):
        data = self.stream.char()
        if data in asciiLetters:
            self.temporaryBuffer += data
            self.state = self.scriptDataEndTagNameState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
            self.stream.unget(data)
            self.state = self.scriptDataState
        return True

    def scriptDataEndTagNameState(self):
        appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
        data = self.stream.char()
        if data in spaceCharacters and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.beforeAttributeNameState
        elif data == "/" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.selfClosingStartTagState
        elif data == ">" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.emitCurrentToken()
            self.state = self.dataState
        elif data in asciiLetters:
            self.temporaryBuffer += data
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "</" + self.temporaryBuffer})
            self.stream.unget(data)
            self.state = self.scriptDataState
        return True

    def scriptDataEscapeStartState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
            self.state = self.scriptDataEscapeStartDashState
        else:
            self.stream.unget(data)
            self.state = self.scriptDataState
        return True

    def scriptDataEscapeStartDashState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
            self.state = self.scriptDataEscapedDashDashState
        else:
            self.stream.unget(data)
            self.state = self.scriptDataState
        return True

    def scriptDataEscapedState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
            self.state = self.scriptDataEscapedDashState
        elif data == "<":
            self.state = self.scriptDataEscapedLessThanSignState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
        elif data == EOF:
            self.state = self.dataState
        else:
            chars = self.stream.charsUntil(("<", "-", "\u0000"))
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
                                    data + chars})
        return True

    def scriptDataEscapedDashState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
            self.state = self.scriptDataEscapedDashDashState
        elif data == "<":
            self.state = self.scriptDataEscapedLessThanSignState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
            self.state = self.scriptDataEscapedState
        elif data == EOF:
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            self.state = self.scriptDataEscapedState
        return True

    def scriptDataEscapedDashDashState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
        elif data == "<":
            self.state = self.scriptDataEscapedLessThanSignState
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
            self.state = self.scriptDataState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
            self.state = self.scriptDataEscapedState
        elif data == EOF:
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            self.state = self.scriptDataEscapedState
        return True

    def scriptDataEscapedLessThanSignState(self):
        data = self.stream.char()
        if data == "/":
            self.temporaryBuffer = ""
            self.state = self.scriptDataEscapedEndTagOpenState
        elif data in asciiLetters:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data})
            self.temporaryBuffer = data
            self.state = self.scriptDataDoubleEscapeStartState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.stream.unget(data)
            self.state = self.scriptDataEscapedState
        return True

    def scriptDataEscapedEndTagOpenState(self):
        data = self.stream.char()
        if data in asciiLetters:
            self.temporaryBuffer = data
            self.state = self.scriptDataEscapedEndTagNameState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
            self.stream.unget(data)
            self.state = self.scriptDataEscapedState
        return True

    def scriptDataEscapedEndTagNameState(self):
        appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
        data = self.stream.char()
        if data in spaceCharacters and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.beforeAttributeNameState
        elif data == "/" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.state = self.selfClosingStartTagState
        elif data == ">" and appropriate:
            self.currentToken = {"type": tokenTypes["EndTag"],
                                 "name": self.temporaryBuffer,
                                 "data": [], "selfClosing": False}
            self.emitCurrentToken()
            self.state = self.dataState
        elif data in asciiLetters:
            self.temporaryBuffer += data
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "</" + self.temporaryBuffer})
            self.stream.unget(data)
            self.state = self.scriptDataEscapedState
        return True

    def scriptDataDoubleEscapeStartState(self):
        data = self.stream.char()
        if data in (spaceCharacters | frozenset(("/", ">"))):
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            if self.temporaryBuffer.lower() == "script":
                self.state = self.scriptDataDoubleEscapedState
            else:
                self.state = self.scriptDataEscapedState
        elif data in asciiLetters:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            self.temporaryBuffer += data
        else:
            self.stream.unget(data)
            self.state = self.scriptDataEscapedState
        return True

    def scriptDataDoubleEscapedState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
            self.state = self.scriptDataDoubleEscapedDashState
        elif data == "<":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.state = self.scriptDataDoubleEscapedLessThanSignState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
        elif data == EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-script-in-script"})
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
        return True

    def scriptDataDoubleEscapedDashState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
            self.state = self.scriptDataDoubleEscapedDashDashState
        elif data == "<":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.state = self.scriptDataDoubleEscapedLessThanSignState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
            self.state = self.scriptDataDoubleEscapedState
        elif data == EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-script-in-script"})
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            self.state = self.scriptDataDoubleEscapedState
        return True

    def scriptDataDoubleEscapedDashDashState(self):
        data = self.stream.char()
        if data == "-":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
        elif data == "<":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
            self.state = self.scriptDataDoubleEscapedLessThanSignState
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
            self.state = self.scriptDataState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": "\uFFFD"})
            self.state = self.scriptDataDoubleEscapedState
        elif data == EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-script-in-script"})
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            self.state = self.scriptDataDoubleEscapedState
        return True

    def scriptDataDoubleEscapedLessThanSignState(self):
        data = self.stream.char()
        if data == "/":
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"})
            self.temporaryBuffer = ""
            self.state = self.scriptDataDoubleEscapeEndState
        else:
            self.stream.unget(data)
            self.state = self.scriptDataDoubleEscapedState
        return True

    def scriptDataDoubleEscapeEndState(self):
        data = self.stream.char()
        if data in (spaceCharacters | frozenset(("/", ">"))):
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            if self.temporaryBuffer.lower() == "script":
                self.state = self.scriptDataEscapedState
            else:
                self.state = self.scriptDataDoubleEscapedState
        elif data in asciiLetters:
            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
            self.temporaryBuffer += data
        else:
            self.stream.unget(data)
            self.state = self.scriptDataDoubleEscapedState
        return True

    def beforeAttributeNameState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.stream.charsUntil(spaceCharacters, True)
        elif data in asciiLetters:
            self.currentToken["data"].append([data, ""])
            self.state = self.attributeNameState
        elif data == ">":
            self.emitCurrentToken()
        elif data == "/":
            self.state = self.selfClosingStartTagState
        elif data in ("'", '"', "=", "<"):
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "invalid-character-in-attribute-name"})
            self.currentToken["data"].append([data, ""])
            self.state = self.attributeNameState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"].append(["\uFFFD", ""])
            self.state = self.attributeNameState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-attribute-name-but-got-eof"})
            self.state = self.dataState
        else:
            self.currentToken["data"].append([data, ""])
            self.state = self.attributeNameState
        return True

    def attributeNameState(self):
        data = self.stream.char()
        leavingThisState = True
        emitToken = False
        if data == "=":
            self.state = self.beforeAttributeValueState
        elif data in asciiLetters:
            self.currentToken["data"][-1][0] += data +\
                self.stream.charsUntil(asciiLetters, True)
            leavingThisState = False
        elif data == ">":
            # XXX If we emit here the attributes are converted to a dict
            # without being checked and when the code below runs we error
            # because data is a dict not a list
            emitToken = True
        elif data in spaceCharacters:
            self.state = self.afterAttributeNameState
        elif data == "/":
            self.state = self.selfClosingStartTagState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"][-1][0] += "\uFFFD"
            leavingThisState = False
        elif data in ("'", '"', "<"):
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data":
                                    "invalid-character-in-attribute-name"})
            self.currentToken["data"][-1][0] += data
            leavingThisState = False
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "eof-in-attribute-name"})
            self.state = self.dataState
        else:
            self.currentToken["data"][-1][0] += data
            leavingThisState = False

        if leavingThisState:
            # Attributes are not dropped at this stage. That happens when the
            # start tag token is emitted so values can still be safely appended
            # to attributes, but we do want to report the parse error in time.
            self.currentToken["data"][-1][0] = (
                self.currentToken["data"][-1][0].translate(asciiUpper2Lower))
            for name, _ in self.currentToken["data"][:-1]:
                if self.currentToken["data"][-1][0] == name:
                    self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                            "duplicate-attribute"})
                    break
            # XXX Fix for above XXX
            if emitToken:
                self.emitCurrentToken()
        return True

    def afterAttributeNameState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.stream.charsUntil(spaceCharacters, True)
        elif data == "=":
            self.state = self.beforeAttributeValueState
        elif data == ">":
            self.emitCurrentToken()
        elif data in asciiLetters:
            self.currentToken["data"].append([data, ""])
            self.state = self.attributeNameState
        elif data == "/":
            self.state = self.selfClosingStartTagState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"].append(["\uFFFD", ""])
            self.state = self.attributeNameState
        elif data in ("'", '"', "<"):
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "invalid-character-after-attribute-name"})
            self.currentToken["data"].append([data, ""])
            self.state = self.attributeNameState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-end-of-tag-but-got-eof"})
            self.state = self.dataState
        else:
            self.currentToken["data"].append([data, ""])
            self.state = self.attributeNameState
        return True

    def beforeAttributeValueState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.stream.charsUntil(spaceCharacters, True)
        elif data == "\"":
            self.state = self.attributeValueDoubleQuotedState
        elif data == "&":
            self.state = self.attributeValueUnQuotedState
            self.stream.unget(data)
        elif data == "'":
            self.state = self.attributeValueSingleQuotedState
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-attribute-value-but-got-right-bracket"})
            self.emitCurrentToken()
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"][-1][1] += "\uFFFD"
            self.state = self.attributeValueUnQuotedState
        elif data in ("=", "<", "`"):
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "equals-in-unquoted-attribute-value"})
            self.currentToken["data"][-1][1] += data
            self.state = self.attributeValueUnQuotedState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-attribute-value-but-got-eof"})
            self.state = self.dataState
        else:
            self.currentToken["data"][-1][1] += data
            self.state = self.attributeValueUnQuotedState
        return True

    def attributeValueDoubleQuotedState(self):
        data = self.stream.char()
        if data == "\"":
            self.state = self.afterAttributeValueState
        elif data == "&":
            self.processEntityInAttribute('"')
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"][-1][1] += "\uFFFD"
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-attribute-value-double-quote"})
            self.state = self.dataState
        else:
            self.currentToken["data"][-1][1] += data +\
                self.stream.charsUntil(("\"", "&", "\u0000"))
        return True

    def attributeValueSingleQuotedState(self):
        data = self.stream.char()
        if data == "'":
            self.state = self.afterAttributeValueState
        elif data == "&":
            self.processEntityInAttribute("'")
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"][-1][1] += "\uFFFD"
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-attribute-value-single-quote"})
            self.state = self.dataState
        else:
            self.currentToken["data"][-1][1] += data +\
                self.stream.charsUntil(("'", "&", "\u0000"))
        return True

    def attributeValueUnQuotedState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.state = self.beforeAttributeNameState
        elif data == "&":
            self.processEntityInAttribute(">")
        elif data == ">":
            self.emitCurrentToken()
        elif data in ('"', "'", "=", "<", "`"):
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-character-in-unquoted-attribute-value"})
            self.currentToken["data"][-1][1] += data
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"][-1][1] += "\uFFFD"
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-attribute-value-no-quotes"})
            self.state = self.dataState
        else:
            self.currentToken["data"][-1][1] += data + self.stream.charsUntil(
                frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters)
        return True

    def afterAttributeValueState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.state = self.beforeAttributeNameState
        elif data == ">":
            self.emitCurrentToken()
        elif data == "/":
            self.state = self.selfClosingStartTagState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-EOF-after-attribute-value"})
            self.stream.unget(data)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-character-after-attribute-value"})
            self.stream.unget(data)
            self.state = self.beforeAttributeNameState
        return True

    def selfClosingStartTagState(self):
        data = self.stream.char()
        if data == ">":
            self.currentToken["selfClosing"] = True
            self.emitCurrentToken()
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data":
                                    "unexpected-EOF-after-solidus-in-tag"})
            self.stream.unget(data)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-character-after-solidus-in-tag"})
            self.stream.unget(data)
            self.state = self.beforeAttributeNameState
        return True

    def bogusCommentState(self):
        # Make a new comment token and give it as value all the characters
        # until the first > or EOF (charsUntil checks for EOF automatically)
        # and emit it.
        data = self.stream.charsUntil(">")
        data = data.replace("\u0000", "\uFFFD")
        self.tokenQueue.append(
            {"type": tokenTypes["Comment"], "data": data})

        # Eat the character directly after the bogus comment which is either a
        # ">" or an EOF.
        self.stream.char()
        self.state = self.dataState
        return True

    def markupDeclarationOpenState(self):
        charStack = [self.stream.char()]
        if charStack[-1] == "-":
            charStack.append(self.stream.char())
            if charStack[-1] == "-":
                self.currentToken = {"type": tokenTypes["Comment"], "data": ""}
                self.state = self.commentStartState
                return True
        elif charStack[-1] in ('d', 'D'):
            matched = True
            for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'),
                             ('y', 'Y'), ('p', 'P'), ('e', 'E')):
                charStack.append(self.stream.char())
                if charStack[-1] not in expected:
                    matched = False
                    break
            if matched:
                self.currentToken = {"type": tokenTypes["Doctype"],
                                     "name": "",
                                     "publicId": None, "systemId": None,
                                     "correct": True}
                self.state = self.doctypeState
                return True
        elif (charStack[-1] == "[" and
              self.parser is not None and
              self.parser.tree.openElements and
              self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace):
            matched = True
            for expected in ["C", "D", "A", "T", "A", "["]:
                charStack.append(self.stream.char())
                if charStack[-1] != expected:
                    matched = False
                    break
            if matched:
                self.state = self.cdataSectionState
                return True

        self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                "expected-dashes-or-doctype"})

        while charStack:
            self.stream.unget(charStack.pop())
        self.state = self.bogusCommentState
        return True

    def commentStartState(self):
        data = self.stream.char()
        if data == "-":
            self.state = self.commentStartDashState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"] += "\uFFFD"
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "incorrect-comment"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-comment"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["data"] += data
            self.state = self.commentState
        return True

    def commentStartDashState(self):
        data = self.stream.char()
        if data == "-":
            self.state = self.commentEndState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"] += "-\uFFFD"
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "incorrect-comment"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-comment"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["data"] += "-" + data
            self.state = self.commentState
        return True

    def commentState(self):
        data = self.stream.char()
        if data == "-":
            self.state = self.commentEndDashState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"] += "\uFFFD"
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "eof-in-comment"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["data"] += data + \
                self.stream.charsUntil(("-", "\u0000"))
        return True

    def commentEndDashState(self):
        data = self.stream.char()
        if data == "-":
            self.state = self.commentEndState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"] += "-\uFFFD"
            self.state = self.commentState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-comment-end-dash"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["data"] += "-" + data
            self.state = self.commentState
        return True

    def commentEndState(self):
        data = self.stream.char()
        if data == ">":
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"] += "--\uFFFD"
            self.state = self.commentState
        elif data == "!":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-bang-after-double-dash-in-comment"})
            self.state = self.commentEndBangState
        elif data == "-":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-dash-after-double-dash-in-comment"})
            self.currentToken["data"] += data
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-comment-double-dash"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            # XXX
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-comment"})
            self.currentToken["data"] += "--" + data
            self.state = self.commentState
        return True

    def commentEndBangState(self):
        data = self.stream.char()
        if data == ">":
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data == "-":
            self.currentToken["data"] += "--!"
            self.state = self.commentEndDashState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["data"] += "--!\uFFFD"
            self.state = self.commentState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-comment-end-bang-state"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["data"] += "--!" + data
            self.state = self.commentState
        return True

    def doctypeState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.state = self.beforeDoctypeNameState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-doctype-name-but-got-eof"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "need-space-after-doctype"})
            self.stream.unget(data)
            self.state = self.beforeDoctypeNameState
        return True

    def beforeDoctypeNameState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            pass
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-doctype-name-but-got-right-bracket"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["name"] = "\uFFFD"
            self.state = self.doctypeNameState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-doctype-name-but-got-eof"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["name"] = data
            self.state = self.doctypeNameState
        return True

    def doctypeNameState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
            self.state = self.afterDoctypeNameState
        elif data == ">":
            self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["name"] += "\uFFFD"
            self.state = self.doctypeNameState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype-name"})
            self.currentToken["correct"] = False
            self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["name"] += data
        return True

    def afterDoctypeNameState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            pass
        elif data == ">":
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.currentToken["correct"] = False
            self.stream.unget(data)
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            if data in ("p", "P"):
                matched = True
                for expected in (("u", "U"), ("b", "B"), ("l", "L"),
                                 ("i", "I"), ("c", "C")):
                    data = self.stream.char()
                    if data not in expected:
                        matched = False
                        break
                if matched:
                    self.state = self.afterDoctypePublicKeywordState
                    return True
            elif data in ("s", "S"):
                matched = True
                for expected in (("y", "Y"), ("s", "S"), ("t", "T"),
                                 ("e", "E"), ("m", "M")):
                    data = self.stream.char()
                    if data not in expected:
                        matched = False
                        break
                if matched:
                    self.state = self.afterDoctypeSystemKeywordState
                    return True

            # All the characters read before the current 'data' will be
            # [a-zA-Z], so they're garbage in the bogus doctype and can be
            # discarded; only the latest character might be '>' or EOF
            # and needs to be ungetted
            self.stream.unget(data)
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "expected-space-or-right-bracket-in-doctype", "datavars":
                                    {"data": data}})
            self.currentToken["correct"] = False
            self.state = self.bogusDoctypeState

        return True

    def afterDoctypePublicKeywordState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.state = self.beforeDoctypePublicIdentifierState
        elif data in ("'", '"'):
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.stream.unget(data)
            self.state = self.beforeDoctypePublicIdentifierState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.stream.unget(data)
            self.state = self.beforeDoctypePublicIdentifierState
        return True

    def beforeDoctypePublicIdentifierState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            pass
        elif data == "\"":
            self.currentToken["publicId"] = ""
            self.state = self.doctypePublicIdentifierDoubleQuotedState
        elif data == "'":
            self.currentToken["publicId"] = ""
            self.state = self.doctypePublicIdentifierSingleQuotedState
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-end-of-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.currentToken["correct"] = False
            self.state = self.bogusDoctypeState
        return True

    def doctypePublicIdentifierDoubleQuotedState(self):
        data = self.stream.char()
        if data == "\"":
            self.state = self.afterDoctypePublicIdentifierState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["publicId"] += "\uFFFD"
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-end-of-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["publicId"] += data
        return True

    def doctypePublicIdentifierSingleQuotedState(self):
        data = self.stream.char()
        if data == "'":
            self.state = self.afterDoctypePublicIdentifierState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["publicId"] += "\uFFFD"
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-end-of-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["publicId"] += data
        return True

    def afterDoctypePublicIdentifierState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.state = self.betweenDoctypePublicAndSystemIdentifiersState
        elif data == ">":
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data == '"':
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.currentToken["systemId"] = ""
            self.state = self.doctypeSystemIdentifierDoubleQuotedState
        elif data == "'":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.currentToken["systemId"] = ""
            self.state = self.doctypeSystemIdentifierSingleQuotedState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.currentToken["correct"] = False
            self.state = self.bogusDoctypeState
        return True

    def betweenDoctypePublicAndSystemIdentifiersState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            pass
        elif data == ">":
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data == '"':
            self.currentToken["systemId"] = ""
            self.state = self.doctypeSystemIdentifierDoubleQuotedState
        elif data == "'":
            self.currentToken["systemId"] = ""
            self.state = self.doctypeSystemIdentifierSingleQuotedState
        elif data == EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.currentToken["correct"] = False
            self.state = self.bogusDoctypeState
        return True

    def afterDoctypeSystemKeywordState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            self.state = self.beforeDoctypeSystemIdentifierState
        elif data in ("'", '"'):
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.stream.unget(data)
            self.state = self.beforeDoctypeSystemIdentifierState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.stream.unget(data)
            self.state = self.beforeDoctypeSystemIdentifierState
        return True

    def beforeDoctypeSystemIdentifierState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            pass
        elif data == "\"":
            self.currentToken["systemId"] = ""
            self.state = self.doctypeSystemIdentifierDoubleQuotedState
        elif data == "'":
            self.currentToken["systemId"] = ""
            self.state = self.doctypeSystemIdentifierSingleQuotedState
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.currentToken["correct"] = False
            self.state = self.bogusDoctypeState
        return True

    def doctypeSystemIdentifierDoubleQuotedState(self):
        data = self.stream.char()
        if data == "\"":
            self.state = self.afterDoctypeSystemIdentifierState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["systemId"] += "\uFFFD"
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-end-of-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["systemId"] += data
        return True

    def doctypeSystemIdentifierSingleQuotedState(self):
        data = self.stream.char()
        if data == "'":
            self.state = self.afterDoctypeSystemIdentifierState
        elif data == "\u0000":
            self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                    "data": "invalid-codepoint"})
            self.currentToken["systemId"] += "\uFFFD"
        elif data == ">":
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-end-of-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.currentToken["systemId"] += data
        return True

    def afterDoctypeSystemIdentifierState(self):
        data = self.stream.char()
        if data in spaceCharacters:
            pass
        elif data == ">":
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "eof-in-doctype"})
            self.currentToken["correct"] = False
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
                                    "unexpected-char-in-doctype"})
            self.state = self.bogusDoctypeState
        return True

    def bogusDoctypeState(self):
        data = self.stream.char()
        if data == ">":
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        elif data is EOF:
            # XXX EMIT
            self.stream.unget(data)
            self.tokenQueue.append(self.currentToken)
            self.state = self.dataState
        else:
            pass
        return True

    def cdataSectionState(self):
        data = []
        while True:
            data.append(self.stream.charsUntil("]"))
            data.append(self.stream.charsUntil(">"))
            char = self.stream.char()
            if char == EOF:
                break
            else:
                assert char == ">"
                if data[-1][-2:] == "]]":
                    data[-1] = data[-1][:-2]
                    break
                else:
                    data.append(char)

        data = "".join(data)  # pylint:disable=redefined-variable-type
        # Deal with null here rather than in the parser
        nullCount = data.count("\u0000")
        if nullCount > 0:
            for _ in range(nullCount):
                self.tokenQueue.append({"type": tokenTypes["ParseError"],
                                        "data": "invalid-codepoint"})
            data = data.replace("\u0000", "\uFFFD")
        if data:
            self.tokenQueue.append({"type": tokenTypes["Characters"],
                                    "data": data})
        self.state = self.dataState
        return True
_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.opt-1.pyc000064400000003137151733136420020635 0ustar003

�Pf��@sBddlmZmZmZddlmZddlmZGdd�dej�Z	dS)�)�absolute_import�division�unicode_literals)�Node�)�basec@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�
TreeWalkercCs�|jtjkr tj|j|j|jfS|jtjtj	fkr>tj
|jfS|jtjkr�i}xJt
|jj��D]8}|j|�}|jr�|j||j|jf<q^|j|d|jf<q^Wtj|j|j||j�fS|jtjkr�tj|jfS|jtjtjfkr�tjfStj|jfSdS)N)ZnodeTyperZDOCUMENT_TYPE_NODErZDOCTYPE�nameZpublicIdZsystemIdZ	TEXT_NODEZCDATA_SECTION_NODEZTEXTZ	nodeValueZELEMENT_NODE�listZ
attributes�keysZgetAttributeNodeZnamespaceURI�valueZ	localNameZELEMENTZnodeNameZ
hasChildNodesZCOMMENT_NODE�COMMENTZ
DOCUMENT_NODEZDOCUMENT_FRAGMENT_NODEZDOCUMENTZUNKNOWN)�self�nodeZattrs�attr�r�/usr/lib/python3.6/dom.py�getNodeDetails	s$
zTreeWalker.getNodeDetailscCs|jS)N)Z
firstChild)rrrrr�
getFirstChild$szTreeWalker.getFirstChildcCs|jS)N)ZnextSibling)rrrrr�getNextSibling'szTreeWalker.getNextSiblingcCs|jS)N)Z
parentNode)rrrrr�
getParentNode*szTreeWalker.getParentNodeN)�__name__�
__module__�__qualname__rrrrrrrrrsrN)
Z
__future__rrrZxml.domr�rZNonRecursiveTreeWalkerrrrrr�<module>s_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.opt-1.pyc000064400000014053151733136420022215 0ustar003

�Pf��@s�ddlmZmZmZddlmZddlmZddlm	Z	ddl
mZddl
mZd	d
�Z
Gdd�de�ZGd
d�de�ZGdd�de�ZGdd�de�ZGdd�dej�ZdS)�)�absolute_import�division�unicode_literals)�	text_type)�etree�)�
tag_regexp�)�base)�	_ihatexmlcCs*|dkrdSt|t�r|S|jdd�SdS)N�ascii�strict)�
isinstancer�decode)�s�r� /usr/lib/python3.6/etree_lxml.py�
ensure_strs

rc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�RootcCs�||_g|_y:|jjrD|jjt|t|jj�t|jj�t|jj	���Wnt
k
rZYnXy|j�}Wnt
k
r�|}YnXx|j�dk	r�|j�}q�Wx |dk	r�|jj|�|j
�}q�Wd|_d|_dS)N)Zelementtree�childrenZdocinfoZinternalDTD�append�DoctyperZ	root_name�	public_idZ
system_url�AttributeErrorZgetrootZgetprevious�getnext�text�tail)�selfZet�noderrr�__init__s*




z
Root.__init__cCs
|j|S)N)r)r�keyrrr�__getitem__1szRoot.__getitem__cCsdS)Nr)rrrrr4szRoot.getnextcCsdS)Nr	r)rrrr�__len__7szRoot.__len__N)�__name__�
__module__�__qualname__rr!rr"rrrrrsrc@seZdZdd�Zdd�ZdS)rcCs(||_||_||_||_d|_d|_dS)N)�	root_node�namer�	system_idrr)rr&r'rr(rrrr<szDoctype.__init__cCs|jjdS)Nr	)r&r)rrrrrEszDoctype.getnextN)r#r$r%rrrrrrr;s	rc@seZdZdd�Zdd�ZdS)�FragmentRootcs$�fdd�|D��_d�_�_dS)Ncsg|]}t�|��qSr)�FragmentWrapper)�.0Zchild)rrr�
<listcomp>Ksz)FragmentRoot.__init__.<locals>.<listcomp>)rrr)rrr)rrrJszFragmentRoot.__init__cCsdS)Nr)rrrrrNszFragmentRoot.getnextN)r#r$r%rrrrrrr)Isr)c@sTeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)r*cCsT||_||_t|jd�r(t|jj�|_nd|_t|jd�rJt|jj�|_nd|_dS)Nrr)r&�obj�hasattrrrr)rZ
fragment_rootr-rrrrSszFragmentWrapper.__init__cCst|j|�S)N)�getattrr-)rr'rrr�__getattr___szFragmentWrapper.__getattr__cCs6|jj}|j|�}|t|�dkr.||dSdSdS)Nr	)r&r�index�len)rZsiblings�idxrrrrbs

zFragmentWrapper.getnextcCs
|j|S)N)r-)rr rrrr!jszFragmentWrapper.__getitem__cCs
t|j�S)N)�boolr-)rrrr�__bool__mszFragmentWrapper.__bool__cCsdS)Nr)rrrr�	getparentpszFragmentWrapper.getparentcCs
t|j�S)N)�strr-)rrrr�__str__sszFragmentWrapper.__str__cCs
t|j�S)N)r7r-)rrrr�__unicode__vszFragmentWrapper.__unicode__cCs
t|j�S)N)r2r-)rrrrr"yszFragmentWrapper.__len__N)r#r$r%rr0rr!r5r6r8r9r"rrrrr*Rsr*c@s4eZdZdd�Zdd�Zdd�Zdd�Zd	d
�ZdS)�
TreeWalkercCsJt|t�rt|�|_t|�}nt�|_t|�}tjj||�t	j
�|_dS)N)r�list�set�fragmentChildrenr)rr
�NonRecursiveTreeWalkerrrZ
InfosetFilter�filter)rZtreerrrr~s


zTreeWalker.__init__c	Cs�t|t�r&|\}}tjtt||��fSt|t�r8tjfSt|t�rVtj	|j
|j|jfSt|t
�r|t|d�r|tjt|j�fS|jtjkr�tjt|j�fS|jtjkr�tjt|j�dd�fStjt|j��}|r�|j�\}}nd}t|j�}i}xbt|jj��D]P\}}t|�}t|�}tj|�}|�rB|||jd�|jd�f<n||d|f<�qWtj||jj |�|t!|�dk�px|jfSdS)N�tagr	rr���)"r�tupler
ZTEXTrr/rZDOCUMENTrZDOCTYPEr'rr(r*r.r-r@r�Comment�COMMENTrZEntityZENTITYr�match�groupsr;Zattrib�items�groupZELEMENTr?ZfromXmlNamer2)	rrr rE�	namespacer@Zattrsr'�valuerrr�getNodeDetails�s8




zTreeWalker.getNodeDetailscCs|jr|dfS|dSdS)Nrr)r)rrrrr�
getFirstChild�szTreeWalker.getFirstChildcCsNt|t�r8|\}}|dkr0t|�r*|dSdSn|j�S|jrF|dfS|j�S)Nrrr)rrBr2rr)rrr rrr�getNextSibling�s
zTreeWalker.getNextSiblingcCs6t|t�r |\}}|dkr.|Sn||jkr.dS|j�S)Nr)rrBr=r6)rrr rrr�
getParentNode�s

zTreeWalker.getParentNodeN)r#r$r%rrKrLrMrNrrrrr:}s
)	r:N)Z
__future__rrrZpip._vendor.sixrZlxmlrZtreebuilders.etreer�r
rr�objectrrr)r*r>r:rrrr�<module>s	&	+_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.pyc000064400000007001151733136420020215 0ustar003

�PfL�@s�ddlmZmZmZyddlmZWn>ek
rbyddlmZWnek
r\eZYnXYnXddl	Z	ddl
mZddlm
Z
ddlmZe	jd	�Zd
d�Zee�ZdS)�)�absolute_import�division�unicode_literals)�OrderedDictN)�string_types�)�base�)�moduleFactoryFactoryz
{([^}]*)}(.*)cs,|}|jd�j�G�fdd�dtj�}t�S)NZasdcs4eZdZdZ�fdd�Zdd�Zdd�Zdd	�Zd
S)z#getETreeBuilder.<locals>.TreeWalkera�Given the particular ElementTree representation, this implementation,
        to avoid using recursion, returns "nodes" as tuples with the following
        content:

        1. The current element

        2. The index of the element relative to its parent

        3. A stack of ancestor elements

        4. A flag "text", "tail" or None to indicate if the current node is a
           text node; either the text or tail of the current element (1)
        csLt|t�r2|\}}}}|dkr.tjt||�fS|}t|d�sD|j�}|jdkrVtjfS|jdkr|tj	|j
|jd�|jd�fS|j�kr�tj|j
fSt|jt
�s�tt|j���tj|j�}|r�|j�\}}n
d}|j}t�}xPt|jj��D]>\}	}
tj|	�}|�r|
||jd	�|jd
�f<q�|
|d|	f<q�Wtj|||t|��pD|j
fSdS)
N�text�tail�tag�
DOCUMENT_ROOT�DOCUMENT_FRAGMENTz
<!DOCTYPE>ZpublicIdZsystemIdrr	)rr)rr)�
isinstance�tuplerZTEXT�getattr�hasattrZgetrootr
ZDOCUMENTZDOCTYPEr�get�COMMENTr�AssertionError�type�
tag_regexp�match�groupsr�listZattrib�items�groupZELEMENT�len)�self�nodeZelt�_�flagr�	namespacer
Zattrs�name�value)�ElementTreeCommentType��/usr/lib/python3.6/etree.py�getNodeDetails's8





z2getETreeBuilder.<locals>.TreeWalker.getNodeDetailscSstt|t�r|\}}}}n|dgdf\}}}}|dkr8dS|jrJ|||dfSt|�rl|j|�|dd|dfSdSdS)Nrrr)rr)rrrr�append)rr �element�key�parentsr"r'r'r(�
getFirstChildOs

z1getETreeBuilder.<locals>.TreeWalker.getFirstChildcSs�t|t�r|\}}}}ndS|dkrLt|�rF|j|�|dd|dfSdSnN|jrf|dkrf|||dfS|t|d�dkr�|d|d|d|dfSdSdS)Nrrrr���r/)rrrr*r)rr r+r,r-r"r'r'r(�getNextSibling`s

z2getETreeBuilder.<locals>.TreeWalker.getNextSiblingcSs�t|t�r|\}}}}ndS|dkr:|s,|S|||dfSnD|j�}|sJ|St|d�j|�dksdt�|t|d�j|�|dfSdS)Nrrr/r/)rr�popr�countr�index)rr r+r,r-r"�parentr'r'r(�
getParentNodets
z1getETreeBuilder.<locals>.TreeWalker.getParentNodeN)�__name__�
__module__�__qualname__�__doc__r)r.r0r5r')r&r'r(�
TreeWalkers

(r:)�Commentr
rZNonRecursiveTreeWalker�locals)ZElementTreeImplementationZElementTreer:r')r&r(�getETreeBuildersnr=)Z
__future__rrr�collectionsr�ImportErrorZordereddict�dict�reZpip._vendor.sixr�rZ_utilsr
�compilerr=ZgetETreeModuler'r'r'r(�<module>s
t_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.pyc000064400000007226151733136430020662 0ustar003

�Pf��@sbdZddlmZmZmZddlmZddlmZdddd	d
dgZ	iZ
dd
d�Zdd�Zdd�Z
dS)a�A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.

To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
�)�absolute_import�division�unicode_literals�)�	constants)�
default_etree�
getTreeWalker�pprint�dom�etree�genshi�
etree_lxmlNcKs�|j�}|tkr�|dkr0ddlm}|jt|<np|dkrPddlm}|jt|<nP|dkrpddlm}|jt|<n0|dkr�dd	lm}|d
kr�t}|j	|f|�jStj
|�S)a�Get a TreeWalker class for various types of tree with built-in support

    Args:
        treeType (str): the name of the tree type required (case-insensitive).
            Supported values are:

            - "dom": The xml.dom.minidom DOM implementation
            - "etree": A generic walker for tree implementations exposing an
                       elementtree-like interface (known to work with
                       ElementTree, cElementTree and lxml.etree).
            - "lxml": Optimized walker for lxml.etree
            - "genshi": a Genshi stream

        Implementation: A module implementing the tree type e.g.
            xml.etree.ElementTree or cElementTree (Currently applies to the
            "etree" tree type only).
    r
�)r
r)rZlxml)r
r)rN)�lower�treeWalkerCache�r
Z
TreeWalkerrr
rrZgetETreeModule�get)ZtreeType�implementation�kwargsr
rr
r�r�/usr/lib/python3.6/__init__.pyrs"ccslg}xL|D]D}|d}|dkr.|j|d�q
|rHddj|�d�Vg}|Vq
W|rhddj|�d�VdS)N�type�
Characters�SpaceCharacters�datar)rr)rr)�append�join)�tokensZpendingCharacters�tokenrrrr�concatenateCharacterTokens<s

rcCslg}d}�xVt|�D�]H}|d}|d k�r&|dr~|dtjdkr~|dtjkrdtj|d}n|d}d||df}n|d}|jd	d
||f�|d7}|d}xdt|j��D]T\\}}	}
|r�|tjkr�tj|}n|}d||	f}n|	}|jd
d
|||
f�q�W|dk�r^|d8}q|dk�r:|d8}q|dk�r`|jdd
||df�q|dk�r|d�r�|d�r�|jdd
||d|d|d�r�|dndf�nF|d�r�|jdd
||d|df�n|jdd
||df�n|jdd
|f�q|dk�r8|jdd
||df�q|dk�rRd�s^td��qtd|��qWdj	|�S)!zPretty printer for tree walkersrr�StartTag�EmptyTag�	namespaceZhtmlz%s %s�namez%s<%s>� rrz	%s%s="%s"ZEndTag�Commentz
%s<!-- %s -->ZDoctypeZpublicIdz%s<!DOCTYPE %s "%s" "%s">ZsystemIdrz%s<!DOCTYPE %s "" "%s">z%s<!DOCTYPE %s>z
%s<!DOCTYPE >rz%s"%s"rFzBconcatenateCharacterTokens should have got rid of all Space tokenszUnknown token type, %s�
)r r!)
rrZ
namespaces�prefixesr�sorted�items�AssertionError�
ValueErrorr)Zwalker�output�indentrr�nsr#Zattrsr"Z	localname�valuerrrr	Ksd












)N)�__doc__Z
__future__rrrrrZ_utilsr�__all__rrrr	rrrr�<module>	s
'_vendor/html5lib/treewalkers/__pycache__/dom.cpython-36.pyc000064400000003137151733136430017677 0ustar003

�Pf��@sBddlmZmZmZddlmZddlmZGdd�dej�Z	dS)�)�absolute_import�division�unicode_literals)�Node�)�basec@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�
TreeWalkercCs�|jtjkr tj|j|j|jfS|jtjtj	fkr>tj
|jfS|jtjkr�i}xJt
|jj��D]8}|j|�}|jr�|j||j|jf<q^|j|d|jf<q^Wtj|j|j||j�fS|jtjkr�tj|jfS|jtjtjfkr�tjfStj|jfSdS)N)ZnodeTyperZDOCUMENT_TYPE_NODErZDOCTYPE�nameZpublicIdZsystemIdZ	TEXT_NODEZCDATA_SECTION_NODEZTEXTZ	nodeValueZELEMENT_NODE�listZ
attributes�keysZgetAttributeNodeZnamespaceURI�valueZ	localNameZELEMENTZnodeNameZ
hasChildNodesZCOMMENT_NODE�COMMENTZ
DOCUMENT_NODEZDOCUMENT_FRAGMENT_NODEZDOCUMENTZUNKNOWN)�self�nodeZattrs�attr�r�/usr/lib/python3.6/dom.py�getNodeDetails	s$
zTreeWalker.getNodeDetailscCs|jS)N)Z
firstChild)rrrrr�
getFirstChild$szTreeWalker.getFirstChildcCs|jS)N)ZnextSibling)rrrrr�getNextSibling'szTreeWalker.getNextSiblingcCs|jS)N)Z
parentNode)rrrrr�
getParentNode*szTreeWalker.getParentNodeN)�__name__�
__module__�__qualname__rrrrrrrrrsrN)
Z
__future__rrrZxml.domr�rZNonRecursiveTreeWalkerrrrrr�<module>s_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-36.pyc000064400000014646151733136430021267 0ustar003

�Pf��@s�ddlmZmZmZddlmZddlmZddlm	Z	ddl
mZddl
mZd	d
�Z
Gdd�de�ZGd
d�de�ZGdd�de�ZGdd�de�ZGdd�dej�ZdS)�)�absolute_import�division�unicode_literals)�	text_type)�etree�)�
tag_regexp�)�base)�	_ihatexmlcCs*|dkrdSt|t�r|S|jdd�SdS)N�ascii�strict)�
isinstancer�decode)�s�r� /usr/lib/python3.6/etree_lxml.py�
ensure_strs

rc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�RootcCs�||_g|_y:|jjrD|jjt|t|jj�t|jj�t|jj	���Wnt
k
rZYnXy|j�}Wnt
k
r�|}YnXx|j�dk	r�|j�}q�Wx |dk	r�|jj|�|j
�}q�Wd|_d|_dS)N)Zelementtree�childrenZdocinfoZinternalDTD�append�DoctyperZ	root_name�	public_idZ
system_url�AttributeErrorZgetrootZgetprevious�getnext�text�tail)�selfZet�noderrr�__init__s*




z
Root.__init__cCs
|j|S)N)r)r�keyrrr�__getitem__1szRoot.__getitem__cCsdS)Nr)rrrrr4szRoot.getnextcCsdS)Nr	r)rrrr�__len__7szRoot.__len__N)�__name__�
__module__�__qualname__rr!rr"rrrrrsrc@seZdZdd�Zdd�ZdS)rcCs(||_||_||_||_d|_d|_dS)N)�	root_node�namer�	system_idrr)rr&r'rr(rrrr<szDoctype.__init__cCs|jjdS)Nr	)r&r)rrrrrEszDoctype.getnextN)r#r$r%rrrrrrr;s	rc@seZdZdd�Zdd�ZdS)�FragmentRootcs$�fdd�|D��_d�_�_dS)Ncsg|]}t�|��qSr)�FragmentWrapper)�.0Zchild)rrr�
<listcomp>Ksz)FragmentRoot.__init__.<locals>.<listcomp>)rrr)rrr)rrrJszFragmentRoot.__init__cCsdS)Nr)rrrrrNszFragmentRoot.getnextN)r#r$r%rrrrrrr)Isr)c@sTeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)r*cCsT||_||_t|jd�r(t|jj�|_nd|_t|jd�rJt|jj�|_nd|_dS)Nrr)r&�obj�hasattrrrr)rZ
fragment_rootr-rrrrSszFragmentWrapper.__init__cCst|j|�S)N)�getattrr-)rr'rrr�__getattr___szFragmentWrapper.__getattr__cCs6|jj}|j|�}|t|�dkr.||dSdSdS)Nr	)r&r�index�len)rZsiblings�idxrrrrbs

zFragmentWrapper.getnextcCs
|j|S)N)r-)rr rrrr!jszFragmentWrapper.__getitem__cCs
t|j�S)N)�boolr-)rrrr�__bool__mszFragmentWrapper.__bool__cCsdS)Nr)rrrr�	getparentpszFragmentWrapper.getparentcCs
t|j�S)N)�strr-)rrrr�__str__sszFragmentWrapper.__str__cCs
t|j�S)N)r7r-)rrrr�__unicode__vszFragmentWrapper.__unicode__cCs
t|j�S)N)r2r-)rrrrr"yszFragmentWrapper.__len__N)r#r$r%rr0rr!r5r6r8r9r"rrrrr*Rsr*c@s4eZdZdd�Zdd�Zdd�Zdd�Zd	d
�ZdS)�
TreeWalkercCsJt|t�rt|�|_t|�}nt�|_t|�}tjj||�t	j
�|_dS)N)r�list�set�fragmentChildrenr)rr
�NonRecursiveTreeWalkerrrZ
InfosetFilter�filter)rZtreerrrr~s


zTreeWalker.__init__c	Cs�t|t�r:|\}}|dks&td|��tjtt||��fSt|t�rLtjfSt|t	�rjtj
|j|j|j
fSt|t�r�t|d�r�tjt|j�fS|jtjkr�tjt|j�fS|jtjkr�tjt|j�dd	�fStjt|j��}|�r�|j�\}}nd}t|j�}i}xbt|jj��D]P\}}t|�}t|�}tj|�}|�rX|||jd�|jd�f<n||d|f<�qWtj||j j!|�|t"|�dk�p�|jfSdS)
Nrrz%Text nodes are text or tail, found %s�tagr	rr)rr���)#r�tuple�AssertionErrorr
ZTEXTrr/rZDOCUMENTrZDOCTYPEr'rr(r*r.r-r@r�Comment�COMMENTrZEntityZENTITYr�match�groupsr;Zattrib�items�groupZELEMENTr?ZfromXmlNamer2)	rrr rF�	namespacer@Zattrsr'�valuerrr�getNodeDetails�s:




zTreeWalker.getNodeDetailscCsDt|t�std��t|�s*|js*td��|jr8|dfS|dSdS)NzText nodes have no childrenzNode has no childrenrr)rrBrCr2r)rrrrr�
getFirstChild�s
zTreeWalker.getFirstChildcCsbt|t�rL|\}}|dks&td|��|dkrDt|�r>|dSdSn|j�S|jrZ|dfS|j�S)Nrrz%Text nodes are text or tail, found %sr)rr)rrBrCr2rr)rrr rrr�getNextSibling�s
zTreeWalker.getNextSiblingcCsJt|t�r4|\}}|dks&td|��|dkrB|Sn||jkrBdS|j�S)Nrrz%Text nodes are text or tail, found %s)rr)rrBrCr=r6)rrr rrr�
getParentNode�s

zTreeWalker.getParentNodeN)r#r$r%rrLrMrNrOrrrrr:}s
)	r:N)Z
__future__rrrZpip._vendor.sixrZlxmlrZtreebuilders.etreer�r
rr�objectrrr)r*r>r:rrrr�<module>s	&	+_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-36.opt-1.pyc000064400000007063151733136430021620 0ustar003

�Pf��@sbdZddlmZmZmZddlmZddlmZdddd	d
dgZ	iZ
dd
d�Zdd�Zdd�Z
dS)a�A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.

To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
�)�absolute_import�division�unicode_literals�)�	constants)�
default_etree�
getTreeWalker�pprint�dom�etree�genshi�
etree_lxmlNcKs�|j�}|tkr�|dkr0ddlm}|jt|<np|dkrPddlm}|jt|<nP|dkrpddlm}|jt|<n0|dkr�dd	lm}|d
kr�t}|j	|f|�jStj
|�S)a�Get a TreeWalker class for various types of tree with built-in support

    Args:
        treeType (str): the name of the tree type required (case-insensitive).
            Supported values are:

            - "dom": The xml.dom.minidom DOM implementation
            - "etree": A generic walker for tree implementations exposing an
                       elementtree-like interface (known to work with
                       ElementTree, cElementTree and lxml.etree).
            - "lxml": Optimized walker for lxml.etree
            - "genshi": a Genshi stream

        Implementation: A module implementing the tree type e.g.
            xml.etree.ElementTree or cElementTree (Currently applies to the
            "etree" tree type only).
    r
�)r
r)rZlxml)r
r)rN)�lower�treeWalkerCache�r
Z
TreeWalkerrr
rrZgetETreeModule�get)ZtreeType�implementation�kwargsr
rr
r�r�/usr/lib/python3.6/__init__.pyrs"ccslg}xL|D]D}|d}|dkr.|j|d�q
|rHddj|�d�Vg}|Vq
W|rhddj|�d�VdS)N�type�
Characters�SpaceCharacters�datar)rr)rr)�append�join)�tokensZpendingCharacters�tokenrrrr�concatenateCharacterTokens<s

rcCs^g}d}�xHt|�D�]:}|d}|dk�r&|dr~|dtjdkr~|dtjkrdtj|d}n|d}d||df}n|d}|jd	d
||f�|d7}|d}xdt|j��D]T\\}}	}
|r�|tjkr�tj|}n|}d||	f}n|	}|jd
d
|||
f�q�W|dk�rP|d8}q|dk�r:|d8}q|dk�r`|jdd
||df�q|dk�r|d�r�|d�r�|jdd
||d|d|d�r�|dndf�nF|d�r�|jdd
||d|df�n|jdd
||df�n|jdd
|f�q|dk�r8|jdd
||df�q|dk�rDqtd|��qWdj|�S)zPretty printer for tree walkersrr�StartTag�EmptyTag�	namespaceZhtmlz%s %s�namez%s<%s>� rrz	%s%s="%s"ZEndTag�Commentz
%s<!-- %s -->ZDoctypeZpublicIdz%s<!DOCTYPE %s "%s" "%s">ZsystemIdrz%s<!DOCTYPE %s "" "%s">z%s<!DOCTYPE %s>z
%s<!DOCTYPE >rz%s"%s"rzUnknown token type, %s�
)r r!)	rrZ
namespaces�prefixesr�sorted�items�
ValueErrorr)Zwalker�output�indentrr�nsr#Zattrsr"Z	localname�valuerrrr	Ksd












)N)�__doc__Z
__future__rrrrrZ_utilsr�__all__rrrr	rrrr�<module>	s
'_vendor/html5lib/treewalkers/__pycache__/base.cpython-36.pyc000064400000010612151733136430020026 0ustar003

�PfK�	@s�ddlmZmZmZddlmZddlmZmZm	Z	ddddd	d
ddd
g	Z
ejZej
ZejZejZejZejZdZdje	�Z	Gdd�de�ZGdd
�d
e�ZdS)�)�absolute_import�division�unicode_literals)�Node�)�
namespaces�voidElements�spaceCharacters�DOCUMENT�DOCTYPE�TEXT�ELEMENT�COMMENT�ENTITY�UNKNOWN�
TreeWalker�NonRecursiveTreeWalkerz<#UNKNOWN#>�c@sheZdZdd�Zdd�Zdd�Zddd	�Zd
d�Zdd
�Zdd�Z	dd�Z
ddd�Zdd�Zdd�Z
dS)rcCs
||_dS)N)�tree)�selfr�r�/usr/lib/python3.6/base.py�__init__szTreeWalker.__init__cCst�dS)N)�NotImplementedError)rrrr�__iter__szTreeWalker.__iter__cCs
d|d�S)NZSerializeError)�type�datar)r�msgrrr�errorszTreeWalker.errorFccs$d|||d�V|r |jd�VdS)NZEmptyTag)r�name�	namespacerzVoid element has children)r)rr r�attrs�hasChildrenrrr�emptyTags

zTreeWalker.emptyTagcCsd|||d�S)NZStartTag)rrr rr)rr rr!rrr�startTag%szTreeWalker.startTagcCsd||d�S)NZEndTag)rrr r)rr rrrr�endTag+szTreeWalker.endTagccsx|}|jt�}|dt|�t|��}|r6d|d�V|}|jt�}|t|�d�}|rdd|d�V|rtd|d�VdS)NZSpaceCharacters)rrZ
Characters)�lstripr	�len�rstrip)rrZmiddle�left�rightrrr�text0s

zTreeWalker.textcCs
d|d�S)N�Comment)rrr)rrrrr�comment>szTreeWalker.commentNcCsd|||d�S)NZDoctype)rr�publicId�systemIdr)rrr.r/rrr�doctypeAszTreeWalker.doctypecCs
d|d�S)NZEntity)rrr)rrrrr�entityGszTreeWalker.entitycCs|jd|�S)NzUnknown node type: )r)rZnodeTyperrr�unknownJszTreeWalker.unknown)F)NN)�__name__�
__module__�__qualname__rrrr#r$r%r+r-r0r1r2rrrrrs

c@s4eZdZdd�Zdd�Zdd�Zdd�Zd	d
�ZdS)rcCst�dS)N)r)r�noderrr�getNodeDetailsOsz%NonRecursiveTreeWalker.getNodeDetailscCst�dS)N)r)rr6rrr�
getFirstChildRsz$NonRecursiveTreeWalker.getFirstChildcCst�dS)N)r)rr6rrr�getNextSiblingUsz%NonRecursiveTreeWalker.getNextSiblingcCst�dS)N)r)rr6rrr�
getParentNodeXsz$NonRecursiveTreeWalker.getParentNodeccs|j}�x�|dk	�r|j|�}|d|dd�}}d}|tkrN|j|�Vn�|tkrrx�|j|�D]
}|VqbWn�|tkr�|\}}}}|s�|tdkr�|tkr�x|j	||||�D]
}|Vq�Wd}n|j
|||�VnV|tkr�|j|d�Vn<|t
k�r|j|d�Vn |tk�rd}n|j|d�V|�r@|j|�}	nd}	|	dk	�rT|	}q
x�|dk	�r�|j|�}|d|dd�}}|tk�r�|\}}}}|�r�|tdk�s�|tk�r�|j||�V|j|k�r�d}P|j|�}
|
dk	�r�|
}Pn
|j|�}�qVWq
WdS)Nr�FZhtmlT)rr7rr0rr+r
rrr#r$rr-rr1r
r2r8r%r9r:)rZcurrentNodeZdetailsrr"�tokenr rZ
attributesZ
firstChildZnextSiblingrrrr[sZ









zNonRecursiveTreeWalker.__iter__N)r3r4r5r7r8r9r:rrrrrrNs
N)Z
__future__rrrZxml.domrZ	constantsrrr	�__all__Z
DOCUMENT_NODEr
ZDOCUMENT_TYPE_NODErZ	TEXT_NODErZELEMENT_NODEr
ZCOMMENT_NODErZENTITY_NODErr�join�objectrrrrrr�<module>s
:_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.pyc000064400000003415151733136430020374 0ustar003

�Pf	�@s�ddlmZmZmZddlmZddlmZmZmZm	Z	m
Z
ddlmZmZm
Z
mZmZmZddlmZddlmZmZGd	d
�d
ej�ZdS)�)�absolute_import�division�unicode_literals)�QName)�START�END�
XML_NAMESPACE�DOCTYPE�TEXT)�START_NS�END_NS�START_CDATA�	END_CDATA�PI�COMMENT�)�base�)�voidElements�
namespacesc@seZdZdd�Zdd�ZdS)�
TreeWalkerccsdd}x6|jD],}|dk	r4x|j||�D]
}|Vq&W|}qW|dk	r`x|j|d�D]
}|VqRWdS)N)Ztree�tokens)�selfZprevious�event�token�r�/usr/lib/python3.6/genshi.py�__iter__
s
zTreeWalker.__iter__ccs�|\}}}|tkr�|\}}|j}|j}	i}
x8|D]0\}}t|t�rT||
|j|jf<q0||
d|f<q0W|	tdkr�|tkr�xJ|j|	||
|p�|dtkp�|d|k�D]
}
|
Vq�Wn|j	|	||
�Vn�|tkr�|j}|j}	|	tdks�|tkr�|j
|	|�Vn~|tk�r|j|�Vnf|t
k�r>xZ|j|�D]}
|
V�q,Wn>|tk�rV|j|�Vn&|tttttttfk�rpn|j|�VdS)NZhtmlrr)rZ	localname�	namespace�
isinstancerrrZemptyTagrZstartTagZendTagrZcommentr
�textr	Zdoctyperrrr
rr�unknown)rr�nextZkind�data�_�tagZattribs�namerZconverted_attribs�k�vrrrrrs@





zTreeWalker.tokensN)�__name__�
__module__�__qualname__rrrrrrrsrN)Z
__future__rrrZgenshi.corerrrrr	r
rrr
rrr�rZ	constantsrrrrrrr�<module>s _vendor/html5lib/treewalkers/__pycache__/genshi.cpython-36.opt-1.pyc000064400000003415151733136430021333 0ustar003

�Pf	�@s�ddlmZmZmZddlmZddlmZmZmZm	Z	m
Z
ddlmZmZm
Z
mZmZmZddlmZddlmZmZGd	d
�d
ej�ZdS)�)�absolute_import�division�unicode_literals)�QName)�START�END�
XML_NAMESPACE�DOCTYPE�TEXT)�START_NS�END_NS�START_CDATA�	END_CDATA�PI�COMMENT�)�base�)�voidElements�
namespacesc@seZdZdd�Zdd�ZdS)�
TreeWalkerccsdd}x6|jD],}|dk	r4x|j||�D]
}|Vq&W|}qW|dk	r`x|j|d�D]
}|VqRWdS)N)Ztree�tokens)�selfZprevious�event�token�r�/usr/lib/python3.6/genshi.py�__iter__
s
zTreeWalker.__iter__ccs�|\}}}|tkr�|\}}|j}|j}	i}
x8|D]0\}}t|t�rT||
|j|jf<q0||
d|f<q0W|	tdkr�|tkr�xJ|j|	||
|p�|dtkp�|d|k�D]
}
|
Vq�Wn|j	|	||
�Vn�|tkr�|j}|j}	|	tdks�|tkr�|j
|	|�Vn~|tk�r|j|�Vnf|t
k�r>xZ|j|�D]}
|
V�q,Wn>|tk�rV|j|�Vn&|tttttttfk�rpn|j|�VdS)NZhtmlrr)rZ	localname�	namespace�
isinstancerrrZemptyTagrZstartTagZendTagrZcommentr
�textr	Zdoctyperrrr
rr�unknown)rr�nextZkind�data�_�tagZattribs�namerZconverted_attribs�k�vrrrrrs@





zTreeWalker.tokensN)�__name__�
__module__�__qualname__rrrrrrrsrN)Z
__future__rrrZgenshi.corerrrrr	r
rrr
rrr�rZ	constantsrrrrrrr�<module>s _vendor/html5lib/treewalkers/__pycache__/base.cpython-36.opt-1.pyc000064400000010612151733136430020765 0ustar003

�PfK�	@s�ddlmZmZmZddlmZddlmZmZm	Z	ddddd	d
ddd
g	Z
ejZej
ZejZejZejZejZdZdje	�Z	Gdd�de�ZGdd
�d
e�ZdS)�)�absolute_import�division�unicode_literals)�Node�)�
namespaces�voidElements�spaceCharacters�DOCUMENT�DOCTYPE�TEXT�ELEMENT�COMMENT�ENTITY�UNKNOWN�
TreeWalker�NonRecursiveTreeWalkerz<#UNKNOWN#>�c@sheZdZdd�Zdd�Zdd�Zddd	�Zd
d�Zdd
�Zdd�Z	dd�Z
ddd�Zdd�Zdd�Z
dS)rcCs
||_dS)N)�tree)�selfr�r�/usr/lib/python3.6/base.py�__init__szTreeWalker.__init__cCst�dS)N)�NotImplementedError)rrrr�__iter__szTreeWalker.__iter__cCs
d|d�S)NZSerializeError)�type�datar)r�msgrrr�errorszTreeWalker.errorFccs$d|||d�V|r |jd�VdS)NZEmptyTag)r�name�	namespacerzVoid element has children)r)rr r�attrs�hasChildrenrrr�emptyTags

zTreeWalker.emptyTagcCsd|||d�S)NZStartTag)rrr rr)rr rr!rrr�startTag%szTreeWalker.startTagcCsd||d�S)NZEndTag)rrr r)rr rrrr�endTag+szTreeWalker.endTagccsx|}|jt�}|dt|�t|��}|r6d|d�V|}|jt�}|t|�d�}|rdd|d�V|rtd|d�VdS)NZSpaceCharacters)rrZ
Characters)�lstripr	�len�rstrip)rrZmiddle�left�rightrrr�text0s

zTreeWalker.textcCs
d|d�S)N�Comment)rrr)rrrrr�comment>szTreeWalker.commentNcCsd|||d�S)NZDoctype)rr�publicId�systemIdr)rrr.r/rrr�doctypeAszTreeWalker.doctypecCs
d|d�S)NZEntity)rrr)rrrrr�entityGszTreeWalker.entitycCs|jd|�S)NzUnknown node type: )r)rZnodeTyperrr�unknownJszTreeWalker.unknown)F)NN)�__name__�
__module__�__qualname__rrrr#r$r%r+r-r0r1r2rrrrrs

c@s4eZdZdd�Zdd�Zdd�Zdd�Zd	d
�ZdS)rcCst�dS)N)r)r�noderrr�getNodeDetailsOsz%NonRecursiveTreeWalker.getNodeDetailscCst�dS)N)r)rr6rrr�
getFirstChildRsz$NonRecursiveTreeWalker.getFirstChildcCst�dS)N)r)rr6rrr�getNextSiblingUsz%NonRecursiveTreeWalker.getNextSiblingcCst�dS)N)r)rr6rrr�
getParentNodeXsz$NonRecursiveTreeWalker.getParentNodeccs|j}�x�|dk	�r|j|�}|d|dd�}}d}|tkrN|j|�Vn�|tkrrx�|j|�D]
}|VqbWn�|tkr�|\}}}}|s�|tdkr�|tkr�x|j	||||�D]
}|Vq�Wd}n|j
|||�VnV|tkr�|j|d�Vn<|t
k�r|j|d�Vn |tk�rd}n|j|d�V|�r@|j|�}	nd}	|	dk	�rT|	}q
x�|dk	�r�|j|�}|d|dd�}}|tk�r�|\}}}}|�r�|tdk�s�|tk�r�|j||�V|j|k�r�d}P|j|�}
|
dk	�r�|
}Pn
|j|�}�qVWq
WdS)Nr�FZhtmlT)rr7rr0rr+r
rrr#r$rr-rr1r
r2r8r%r9r:)rZcurrentNodeZdetailsrr"�tokenr rZ
attributesZ
firstChildZnextSiblingrrrr[sZ









zNonRecursiveTreeWalker.__iter__N)r3r4r5r7r8r9r:rrrrrrNs
N)Z
__future__rrrZxml.domrZ	constantsrrr	�__all__Z
DOCUMENT_NODEr
ZDOCUMENT_TYPE_NODErZ	TEXT_NODErZELEMENT_NODEr
ZCOMMENT_NODErZENTITY_NODErr�join�objectrrrrrr�<module>s
:_vendor/html5lib/treewalkers/__pycache__/etree.cpython-36.opt-1.pyc000064400000006635151733136430021171 0ustar003

�PfL�@s�ddlmZmZmZyddlmZWn>ek
rbyddlmZWnek
r\eZYnXYnXddl	Z	ddl
mZddlm
Z
ddlmZe	jd	�Zd
d�Zee�ZdS)�)�absolute_import�division�unicode_literals)�OrderedDictN)�string_types�)�base�)�moduleFactoryFactoryz
{([^}]*)}(.*)cs,|}|jd�j�G�fdd�dtj�}t�S)NZasdcs4eZdZdZ�fdd�Zdd�Zdd�Zdd	�Zd
S)z#getETreeBuilder.<locals>.TreeWalkera�Given the particular ElementTree representation, this implementation,
        to avoid using recursion, returns "nodes" as tuples with the following
        content:

        1. The current element

        2. The index of the element relative to its parent

        3. A stack of ancestor elements

        4. A flag "text", "tail" or None to indicate if the current node is a
           text node; either the text or tail of the current element (1)
        cs2t|t�r2|\}}}}|dkr.tjt||�fS|}t|d�sD|j�}|jdkrVtjfS|jdkr|tj	|j
|jd�|jd�fS|j�kr�tj|j
fSt
j|j�}|r�|j�\}}n
d}|j}t�}xPt|jj��D]>\}	}
t
j|	�}|�r|
||jd	�|jd
�f<q�|
|d|	f<q�Wtj|||t|��p*|j
fSdS)
N�text�tail�tag�
DOCUMENT_ROOT�DOCUMENT_FRAGMENTz
<!DOCTYPE>ZpublicIdZsystemIdrr	)rr)rr)�
isinstance�tuplerZTEXT�getattr�hasattrZgetrootr
ZDOCUMENTZDOCTYPEr�get�COMMENT�
tag_regexp�match�groupsr�listZattrib�items�groupZELEMENT�len)�self�nodeZelt�_�flagr�	namespacer
Zattrs�name�value)�ElementTreeCommentType��/usr/lib/python3.6/etree.py�getNodeDetails's6





z2getETreeBuilder.<locals>.TreeWalker.getNodeDetailscSstt|t�r|\}}}}n|dgdf\}}}}|dkr8dS|jrJ|||dfSt|�rl|j|�|dd|dfSdSdS)Nrrr)rr)rrrr�append)rr�element�key�parentsr r%r%r&�
getFirstChildOs

z1getETreeBuilder.<locals>.TreeWalker.getFirstChildcSs�t|t�r|\}}}}ndS|dkrLt|�rF|j|�|dd|dfSdSnN|jrf|dkrf|||dfS|t|d�dkr�|d|d|d|dfSdSdS)Nrrrr���r-)rrrr(r)rrr)r*r+r r%r%r&�getNextSibling`s

z2getETreeBuilder.<locals>.TreeWalker.getNextSiblingcSsht|t�r|\}}}}ndS|dkr:|s,|S|||dfSn*|j�}|sJ|S|t|d�j|�|dfSdS)Nrrr-)rr�popr�index)rrr)r*r+r �parentr%r%r&�
getParentNodets
z1getETreeBuilder.<locals>.TreeWalker.getParentNodeN)�__name__�
__module__�__qualname__�__doc__r'r,r.r2r%)r$r%r&�
TreeWalkers

(r7)�Commentr
rZNonRecursiveTreeWalker�locals)ZElementTreeImplementationZElementTreer7r%)r$r&�getETreeBuildersnr:)Z
__future__rrr�collectionsr�ImportErrorZordereddict�dict�reZpip._vendor.sixr�rZ_utilsr
�compilerr:ZgetETreeModuler%r%r%r&�<module>s
t_vendor/html5lib/treewalkers/__init__.py000064400000012650151733136430014373 0ustar00"""A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.

To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
"""

from __future__ import absolute_import, division, unicode_literals

from .. import constants
from .._utils import default_etree

__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshi", "etree_lxml"]

treeWalkerCache = {}


def getTreeWalker(treeType, implementation=None, **kwargs):
    """Get a TreeWalker class for various types of tree with built-in support

    Args:
        treeType (str): the name of the tree type required (case-insensitive).
            Supported values are:

            - "dom": The xml.dom.minidom DOM implementation
            - "etree": A generic walker for tree implementations exposing an
                       elementtree-like interface (known to work with
                       ElementTree, cElementTree and lxml.etree).
            - "lxml": Optimized walker for lxml.etree
            - "genshi": a Genshi stream

        Implementation: A module implementing the tree type e.g.
            xml.etree.ElementTree or cElementTree (Currently applies to the
            "etree" tree type only).
    """

    treeType = treeType.lower()
    if treeType not in treeWalkerCache:
        if treeType == "dom":
            from . import dom
            treeWalkerCache[treeType] = dom.TreeWalker
        elif treeType == "genshi":
            from . import genshi
            treeWalkerCache[treeType] = genshi.TreeWalker
        elif treeType == "lxml":
            from . import etree_lxml
            treeWalkerCache[treeType] = etree_lxml.TreeWalker
        elif treeType == "etree":
            from . import etree
            if implementation is None:
                implementation = default_etree
            # XXX: NEVER cache here, caching is done in the etree submodule
            return etree.getETreeModule(implementation, **kwargs).TreeWalker
    return treeWalkerCache.get(treeType)


def concatenateCharacterTokens(tokens):
    pendingCharacters = []
    for token in tokens:
        type = token["type"]
        if type in ("Characters", "SpaceCharacters"):
            pendingCharacters.append(token["data"])
        else:
            if pendingCharacters:
                yield {"type": "Characters", "data": "".join(pendingCharacters)}
                pendingCharacters = []
            yield token
    if pendingCharacters:
        yield {"type": "Characters", "data": "".join(pendingCharacters)}


def pprint(walker):
    """Pretty printer for tree walkers"""
    output = []
    indent = 0
    for token in concatenateCharacterTokens(walker):
        type = token["type"]
        if type in ("StartTag", "EmptyTag"):
            # tag name
            if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
                if token["namespace"] in constants.prefixes:
                    ns = constants.prefixes[token["namespace"]]
                else:
                    ns = token["namespace"]
                name = "%s %s" % (ns, token["name"])
            else:
                name = token["name"]
            output.append("%s<%s>" % (" " * indent, name))
            indent += 2
            # attributes (sorted for consistent ordering)
            attrs = token["data"]
            for (namespace, localname), value in sorted(attrs.items()):
                if namespace:
                    if namespace in constants.prefixes:
                        ns = constants.prefixes[namespace]
                    else:
                        ns = namespace
                    name = "%s %s" % (ns, localname)
                else:
                    name = localname
                output.append("%s%s=\"%s\"" % (" " * indent, name, value))
            # self-closing
            if type == "EmptyTag":
                indent -= 2

        elif type == "EndTag":
            indent -= 2

        elif type == "Comment":
            output.append("%s<!-- %s -->" % (" " * indent, token["data"]))

        elif type == "Doctype":
            if token["name"]:
                if token["publicId"]:
                    output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
                                  (" " * indent,
                                   token["name"],
                                   token["publicId"],
                                   token["systemId"] if token["systemId"] else ""))
                elif token["systemId"]:
                    output.append("""%s<!DOCTYPE %s "" "%s">""" %
                                  (" " * indent,
                                   token["name"],
                                   token["systemId"]))
                else:
                    output.append("%s<!DOCTYPE %s>" % (" " * indent,
                                                       token["name"]))
            else:
                output.append("%s<!DOCTYPE >" % (" " * indent,))

        elif type == "Characters":
            output.append("%s\"%s\"" % (" " * indent, token["data"]))

        elif type == "SpaceCharacters":
            assert False, "concatenateCharacterTokens should have got rid of all Space tokens"

        else:
            raise ValueError("Unknown token type, %s" % type)

    return "\n".join(output)
_vendor/html5lib/treewalkers/dom.py000064400000002605151733136430013412 0ustar00from __future__ import absolute_import, division, unicode_literals

from xml.dom import Node

from . import base


class TreeWalker(base.NonRecursiveTreeWalker):
    def getNodeDetails(self, node):
        if node.nodeType == Node.DOCUMENT_TYPE_NODE:
            return base.DOCTYPE, node.name, node.publicId, node.systemId

        elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
            return base.TEXT, node.nodeValue

        elif node.nodeType == Node.ELEMENT_NODE:
            attrs = {}
            for attr in list(node.attributes.keys()):
                attr = node.getAttributeNode(attr)
                if attr.namespaceURI:
                    attrs[(attr.namespaceURI, attr.localName)] = attr.value
                else:
                    attrs[(None, attr.name)] = attr.value
            return (base.ELEMENT, node.namespaceURI, node.nodeName,
                    attrs, node.hasChildNodes())

        elif node.nodeType == Node.COMMENT_NODE:
            return base.COMMENT, node.nodeValue

        elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
            return (base.DOCUMENT,)

        else:
            return base.UNKNOWN, node.nodeType

    def getFirstChild(self, node):
        return node.firstChild

    def getNextSibling(self, node):
        return node.nextSibling

    def getParentNode(self, node):
        return node.parentNode
_vendor/html5lib/treewalkers/etree.py000064400000011114151733136430013732 0ustar00from __future__ import absolute_import, division, unicode_literals

try:
    from collections import OrderedDict
except ImportError:
    try:
        from ordereddict import OrderedDict
    except ImportError:
        OrderedDict = dict

import re

from pip._vendor.six import string_types

from . import base
from .._utils import moduleFactoryFactory

tag_regexp = re.compile("{([^}]*)}(.*)")


def getETreeBuilder(ElementTreeImplementation):
    ElementTree = ElementTreeImplementation
    ElementTreeCommentType = ElementTree.Comment("asd").tag

    class TreeWalker(base.NonRecursiveTreeWalker):  # pylint:disable=unused-variable
        """Given the particular ElementTree representation, this implementation,
        to avoid using recursion, returns "nodes" as tuples with the following
        content:

        1. The current element

        2. The index of the element relative to its parent

        3. A stack of ancestor elements

        4. A flag "text", "tail" or None to indicate if the current node is a
           text node; either the text or tail of the current element (1)
        """
        def getNodeDetails(self, node):
            if isinstance(node, tuple):  # It might be the root Element
                elt, _, _, flag = node
                if flag in ("text", "tail"):
                    return base.TEXT, getattr(elt, flag)
                else:
                    node = elt

            if not(hasattr(node, "tag")):
                node = node.getroot()

            if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"):
                return (base.DOCUMENT,)

            elif node.tag == "<!DOCTYPE>":
                return (base.DOCTYPE, node.text,
                        node.get("publicId"), node.get("systemId"))

            elif node.tag == ElementTreeCommentType:
                return base.COMMENT, node.text

            else:
                assert isinstance(node.tag, string_types), type(node.tag)
                # This is assumed to be an ordinary element
                match = tag_regexp.match(node.tag)
                if match:
                    namespace, tag = match.groups()
                else:
                    namespace = None
                    tag = node.tag
                attrs = OrderedDict()
                for name, value in list(node.attrib.items()):
                    match = tag_regexp.match(name)
                    if match:
                        attrs[(match.group(1), match.group(2))] = value
                    else:
                        attrs[(None, name)] = value
                return (base.ELEMENT, namespace, tag,
                        attrs, len(node) or node.text)

        def getFirstChild(self, node):
            if isinstance(node, tuple):
                element, key, parents, flag = node
            else:
                element, key, parents, flag = node, None, [], None

            if flag in ("text", "tail"):
                return None
            else:
                if element.text:
                    return element, key, parents, "text"
                elif len(element):
                    parents.append(element)
                    return element[0], 0, parents, None
                else:
                    return None

        def getNextSibling(self, node):
            if isinstance(node, tuple):
                element, key, parents, flag = node
            else:
                return None

            if flag == "text":
                if len(element):
                    parents.append(element)
                    return element[0], 0, parents, None
                else:
                    return None
            else:
                if element.tail and flag != "tail":
                    return element, key, parents, "tail"
                elif key < len(parents[-1]) - 1:
                    return parents[-1][key + 1], key + 1, parents, None
                else:
                    return None

        def getParentNode(self, node):
            if isinstance(node, tuple):
                element, key, parents, flag = node
            else:
                return None

            if flag == "text":
                if not parents:
                    return element
                else:
                    return element, key, parents, None
            else:
                parent = parents.pop()
                if not parents:
                    return parent
                else:
                    assert list(parents[-1]).count(parent) == 1
                    return parent, list(parents[-1]).index(parent), parents, None

    return locals()

getETreeModule = moduleFactoryFactory(getETreeBuilder)
_vendor/html5lib/treewalkers/base.py000064400000011513151733136430013543 0ustar00from __future__ import absolute_import, division, unicode_literals

from xml.dom import Node
from ..constants import namespaces, voidElements, spaceCharacters

__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN",
           "TreeWalker", "NonRecursiveTreeWalker"]

DOCUMENT = Node.DOCUMENT_NODE
DOCTYPE = Node.DOCUMENT_TYPE_NODE
TEXT = Node.TEXT_NODE
ELEMENT = Node.ELEMENT_NODE
COMMENT = Node.COMMENT_NODE
ENTITY = Node.ENTITY_NODE
UNKNOWN = "<#UNKNOWN#>"

spaceCharacters = "".join(spaceCharacters)


class TreeWalker(object):
    def __init__(self, tree):
        self.tree = tree

    def __iter__(self):
        raise NotImplementedError

    def error(self, msg):
        return {"type": "SerializeError", "data": msg}

    def emptyTag(self, namespace, name, attrs, hasChildren=False):
        yield {"type": "EmptyTag", "name": name,
               "namespace": namespace,
               "data": attrs}
        if hasChildren:
            yield self.error("Void element has children")

    def startTag(self, namespace, name, attrs):
        return {"type": "StartTag",
                "name": name,
                "namespace": namespace,
                "data": attrs}

    def endTag(self, namespace, name):
        return {"type": "EndTag",
                "name": name,
                "namespace": namespace}

    def text(self, data):
        data = data
        middle = data.lstrip(spaceCharacters)
        left = data[:len(data) - len(middle)]
        if left:
            yield {"type": "SpaceCharacters", "data": left}
        data = middle
        middle = data.rstrip(spaceCharacters)
        right = data[len(middle):]
        if middle:
            yield {"type": "Characters", "data": middle}
        if right:
            yield {"type": "SpaceCharacters", "data": right}

    def comment(self, data):
        return {"type": "Comment", "data": data}

    def doctype(self, name, publicId=None, systemId=None):
        return {"type": "Doctype",
                "name": name,
                "publicId": publicId,
                "systemId": systemId}

    def entity(self, name):
        return {"type": "Entity", "name": name}

    def unknown(self, nodeType):
        return self.error("Unknown node type: " + nodeType)


class NonRecursiveTreeWalker(TreeWalker):
    def getNodeDetails(self, node):
        raise NotImplementedError

    def getFirstChild(self, node):
        raise NotImplementedError

    def getNextSibling(self, node):
        raise NotImplementedError

    def getParentNode(self, node):
        raise NotImplementedError

    def __iter__(self):
        currentNode = self.tree
        while currentNode is not None:
            details = self.getNodeDetails(currentNode)
            type, details = details[0], details[1:]
            hasChildren = False

            if type == DOCTYPE:
                yield self.doctype(*details)

            elif type == TEXT:
                for token in self.text(*details):
                    yield token

            elif type == ELEMENT:
                namespace, name, attributes, hasChildren = details
                if (not namespace or namespace == namespaces["html"]) and name in voidElements:
                    for token in self.emptyTag(namespace, name, attributes,
                                               hasChildren):
                        yield token
                    hasChildren = False
                else:
                    yield self.startTag(namespace, name, attributes)

            elif type == COMMENT:
                yield self.comment(details[0])

            elif type == ENTITY:
                yield self.entity(details[0])

            elif type == DOCUMENT:
                hasChildren = True

            else:
                yield self.unknown(details[0])

            if hasChildren:
                firstChild = self.getFirstChild(currentNode)
            else:
                firstChild = None

            if firstChild is not None:
                currentNode = firstChild
            else:
                while currentNode is not None:
                    details = self.getNodeDetails(currentNode)
                    type, details = details[0], details[1:]
                    if type == ELEMENT:
                        namespace, name, attributes, hasChildren = details
                        if (namespace and namespace != namespaces["html"]) or name not in voidElements:
                            yield self.endTag(namespace, name)
                    if self.tree is currentNode:
                        currentNode = None
                        break
                    nextSibling = self.getNextSibling(currentNode)
                    if nextSibling is not None:
                        currentNode = nextSibling
                        break
                    else:
                        currentNode = self.getParentNode(currentNode)
_vendor/html5lib/treewalkers/genshi.py000064400000004405151733136430014110 0ustar00from __future__ import absolute_import, division, unicode_literals

from genshi.core import QName
from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT
from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT

from . import base

from ..constants import voidElements, namespaces


class TreeWalker(base.TreeWalker):
    def __iter__(self):
        # Buffer the events so we can pass in the following one
        previous = None
        for event in self.tree:
            if previous is not None:
                for token in self.tokens(previous, event):
                    yield token
            previous = event

        # Don't forget the final event!
        if previous is not None:
            for token in self.tokens(previous, None):
                yield token

    def tokens(self, event, next):
        kind, data, _ = event
        if kind == START:
            tag, attribs = data
            name = tag.localname
            namespace = tag.namespace
            converted_attribs = {}
            for k, v in attribs:
                if isinstance(k, QName):
                    converted_attribs[(k.namespace, k.localname)] = v
                else:
                    converted_attribs[(None, k)] = v

            if namespace == namespaces["html"] and name in voidElements:
                for token in self.emptyTag(namespace, name, converted_attribs,
                                           not next or next[0] != END or
                                           next[1] != tag):
                    yield token
            else:
                yield self.startTag(namespace, name, converted_attribs)

        elif kind == END:
            name = data.localname
            namespace = data.namespace
            if namespace != namespaces["html"] or name not in voidElements:
                yield self.endTag(namespace, name)

        elif kind == COMMENT:
            yield self.comment(data)

        elif kind == TEXT:
            for token in self.text(data):
                yield token

        elif kind == DOCTYPE:
            yield self.doctype(*data)

        elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,
                      START_CDATA, END_CDATA, PI):
            pass

        else:
            yield self.unknown(kind)
_vendor/html5lib/treewalkers/etree_lxml.py000064400000014245151733136430014776 0ustar00from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type

from lxml import etree
from ..treebuilders.etree import tag_regexp

from . import base

from .. import _ihatexml


def ensure_str(s):
    if s is None:
        return None
    elif isinstance(s, text_type):
        return s
    else:
        return s.decode("ascii", "strict")


class Root(object):
    def __init__(self, et):
        self.elementtree = et
        self.children = []

        try:
            if et.docinfo.internalDTD:
                self.children.append(Doctype(self,
                                             ensure_str(et.docinfo.root_name),
                                             ensure_str(et.docinfo.public_id),
                                             ensure_str(et.docinfo.system_url)))
        except AttributeError:
            pass

        try:
            node = et.getroot()
        except AttributeError:
            node = et

        while node.getprevious() is not None:
            node = node.getprevious()
        while node is not None:
            self.children.append(node)
            node = node.getnext()

        self.text = None
        self.tail = None

    def __getitem__(self, key):
        return self.children[key]

    def getnext(self):
        return None

    def __len__(self):
        return 1


class Doctype(object):
    def __init__(self, root_node, name, public_id, system_id):
        self.root_node = root_node
        self.name = name
        self.public_id = public_id
        self.system_id = system_id

        self.text = None
        self.tail = None

    def getnext(self):
        return self.root_node.children[1]


class FragmentRoot(Root):
    def __init__(self, children):
        self.children = [FragmentWrapper(self, child) for child in children]
        self.text = self.tail = None

    def getnext(self):
        return None


class FragmentWrapper(object):
    def __init__(self, fragment_root, obj):
        self.root_node = fragment_root
        self.obj = obj
        if hasattr(self.obj, 'text'):
            self.text = ensure_str(self.obj.text)
        else:
            self.text = None
        if hasattr(self.obj, 'tail'):
            self.tail = ensure_str(self.obj.tail)
        else:
            self.tail = None

    def __getattr__(self, name):
        return getattr(self.obj, name)

    def getnext(self):
        siblings = self.root_node.children
        idx = siblings.index(self)
        if idx < len(siblings) - 1:
            return siblings[idx + 1]
        else:
            return None

    def __getitem__(self, key):
        return self.obj[key]

    def __bool__(self):
        return bool(self.obj)

    def getparent(self):
        return None

    def __str__(self):
        return str(self.obj)

    def __unicode__(self):
        return str(self.obj)

    def __len__(self):
        return len(self.obj)


class TreeWalker(base.NonRecursiveTreeWalker):
    def __init__(self, tree):
        # pylint:disable=redefined-variable-type
        if isinstance(tree, list):
            self.fragmentChildren = set(tree)
            tree = FragmentRoot(tree)
        else:
            self.fragmentChildren = set()
            tree = Root(tree)
        base.NonRecursiveTreeWalker.__init__(self, tree)
        self.filter = _ihatexml.InfosetFilter()

    def getNodeDetails(self, node):
        if isinstance(node, tuple):  # Text node
            node, key = node
            assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
            return base.TEXT, ensure_str(getattr(node, key))

        elif isinstance(node, Root):
            return (base.DOCUMENT,)

        elif isinstance(node, Doctype):
            return base.DOCTYPE, node.name, node.public_id, node.system_id

        elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"):
            return base.TEXT, ensure_str(node.obj)

        elif node.tag == etree.Comment:
            return base.COMMENT, ensure_str(node.text)

        elif node.tag == etree.Entity:
            return base.ENTITY, ensure_str(node.text)[1:-1]  # strip &;

        else:
            # This is assumed to be an ordinary element
            match = tag_regexp.match(ensure_str(node.tag))
            if match:
                namespace, tag = match.groups()
            else:
                namespace = None
                tag = ensure_str(node.tag)
            attrs = {}
            for name, value in list(node.attrib.items()):
                name = ensure_str(name)
                value = ensure_str(value)
                match = tag_regexp.match(name)
                if match:
                    attrs[(match.group(1), match.group(2))] = value
                else:
                    attrs[(None, name)] = value
            return (base.ELEMENT, namespace, self.filter.fromXmlName(tag),
                    attrs, len(node) > 0 or node.text)

    def getFirstChild(self, node):
        assert not isinstance(node, tuple), "Text nodes have no children"

        assert len(node) or node.text, "Node has no children"
        if node.text:
            return (node, "text")
        else:
            return node[0]

    def getNextSibling(self, node):
        if isinstance(node, tuple):  # Text node
            node, key = node
            assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
            if key == "text":
                # XXX: we cannot use a "bool(node) and node[0] or None" construct here
                # because node[0] might evaluate to False if it has no child element
                if len(node):
                    return node[0]
                else:
                    return None
            else:  # tail
                return node.getnext()

        return (node, "tail") if node.tail else node.getnext()

    def getParentNode(self, node):
        if isinstance(node, tuple):  # Text node
            node, key = node
            assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
            if key == "text":
                return node
            # else: fallback to "normal" processing
        elif node in self.fragmentChildren:
            return None

        return node.getparent()
_vendor/html5lib/__init__.py000064400000001414151733136430012037 0ustar00"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.

Example usage:

import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""

from __future__ import absolute_import, division, unicode_literals

from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize

__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
           "getTreeWalker", "serialize"]

# this has to be at the top level, see how setup.py parses this
__version__ = "1.0b10"
_vendor/html5lib/_utils.py000064400000010000151733136430011566 0ustar00from __future__ import absolute_import, division, unicode_literals

import sys
from types import ModuleType

from pip._vendor.six import text_type

try:
    import xml.etree.cElementTree as default_etree
except ImportError:
    import xml.etree.ElementTree as default_etree


__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair",
           "surrogatePairToCodepoint", "moduleFactoryFactory",
           "supports_lone_surrogates", "PY27"]


PY27 = sys.version_info[0] == 2 and sys.version_info[1] >= 7

# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be
# caught by the below test. In general this would be any platform
# using UTF-16 as its encoding of unicode strings, such as
# Jython. This is because UTF-16 itself is based on the use of such
# surrogates, and there is no mechanism to further escape such
# escapes.
try:
    _x = eval('"\\uD800"')  # pylint:disable=eval-used
    if not isinstance(_x, text_type):
        # We need this with u"" because of http://bugs.jython.org/issue2039
        _x = eval('u"\\uD800"')  # pylint:disable=eval-used
        assert isinstance(_x, text_type)
except:  # pylint:disable=bare-except
    supports_lone_surrogates = False
else:
    supports_lone_surrogates = True


class MethodDispatcher(dict):
    """Dict with 2 special properties:

    On initiation, keys that are lists, sets or tuples are converted to
    multiple keys so accessing any one of the items in the original
    list-like object returns the matching value

    md = MethodDispatcher({("foo", "bar"):"baz"})
    md["foo"] == "baz"

    A default value which can be set through the default attribute.
    """

    def __init__(self, items=()):
        # Using _dictEntries instead of directly assigning to self is about
        # twice as fast. Please do careful performance testing before changing
        # anything here.
        _dictEntries = []
        for name, value in items:
            if isinstance(name, (list, tuple, frozenset, set)):
                for item in name:
                    _dictEntries.append((item, value))
            else:
                _dictEntries.append((name, value))
        dict.__init__(self, _dictEntries)
        assert len(self) == len(_dictEntries)
        self.default = None

    def __getitem__(self, key):
        return dict.get(self, key, self.default)


# Some utility functions to deal with weirdness around UCS2 vs UCS4
# python builds

def isSurrogatePair(data):
    return (len(data) == 2 and
            ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and
            ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF)


def surrogatePairToCodepoint(data):
    char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 +
                (ord(data[1]) - 0xDC00))
    return char_val

# Module Factory Factory (no, this isn't Java, I know)
# Here to stop this being duplicated all over the place.


def moduleFactoryFactory(factory):
    moduleCache = {}

    def moduleFactory(baseModule, *args, **kwargs):
        if isinstance(ModuleType.__name__, type("")):
            name = "_%s_factory" % baseModule.__name__
        else:
            name = b"_%s_factory" % baseModule.__name__

        kwargs_tuple = tuple(kwargs.items())

        try:
            return moduleCache[name][args][kwargs_tuple]
        except KeyError:
            mod = ModuleType(name)
            objs = factory(baseModule, *args, **kwargs)
            mod.__dict__.update(objs)
            if "name" not in moduleCache:
                moduleCache[name] = {}
            if "args" not in moduleCache[name]:
                moduleCache[name][args] = {}
            if "kwargs" not in moduleCache[name][args]:
                moduleCache[name][args][kwargs_tuple] = {}
            moduleCache[name][args][kwargs_tuple] = mod
            return mod

    return moduleFactory


def memoize(func):
    cache = {}

    def wrapped(*args, **kwargs):
        key = (tuple(args), tuple(kwargs.items()))
        if key not in cache:
            cache[key] = func(*args, **kwargs)
        return cache[key]

    return wrapped
_vendor/html5lib/filters/optionaltags.py000064400000024446151733136430014466 0ustar00from __future__ import absolute_import, division, unicode_literals

from . import base


class Filter(base.Filter):
    def slider(self):
        previous1 = previous2 = None
        for token in self.source:
            if previous1 is not None:
                yield previous2, previous1, token
            previous2 = previous1
            previous1 = token
        if previous1 is not None:
            yield previous2, previous1, None

    def __iter__(self):
        for previous, token, next in self.slider():
            type = token["type"]
            if type == "StartTag":
                if (token["data"] or
                        not self.is_optional_start(token["name"], previous, next)):
                    yield token
            elif type == "EndTag":
                if not self.is_optional_end(token["name"], next):
                    yield token
            else:
                yield token

    def is_optional_start(self, tagname, previous, next):
        type = next and next["type"] or None
        if tagname in 'html':
            # An html element's start tag may be omitted if the first thing
            # inside the html element is not a space character or a comment.
            return type not in ("Comment", "SpaceCharacters")
        elif tagname == 'head':
            # A head element's start tag may be omitted if the first thing
            # inside the head element is an element.
            # XXX: we also omit the start tag if the head element is empty
            if type in ("StartTag", "EmptyTag"):
                return True
            elif type == "EndTag":
                return next["name"] == "head"
        elif tagname == 'body':
            # A body element's start tag may be omitted if the first thing
            # inside the body element is not a space character or a comment,
            # except if the first thing inside the body element is a script
            # or style element and the node immediately preceding the body
            # element is a head element whose end tag has been omitted.
            if type in ("Comment", "SpaceCharacters"):
                return False
            elif type == "StartTag":
                # XXX: we do not look at the preceding event, so we never omit
                # the body element's start tag if it's followed by a script or
                # a style element.
                return next["name"] not in ('script', 'style')
            else:
                return True
        elif tagname == 'colgroup':
            # A colgroup element's start tag may be omitted if the first thing
            # inside the colgroup element is a col element, and if the element
            # is not immediately preceded by another colgroup element whose
            # end tag has been omitted.
            if type in ("StartTag", "EmptyTag"):
                # XXX: we do not look at the preceding event, so instead we never
                # omit the colgroup element's end tag when it is immediately
                # followed by another colgroup element. See is_optional_end.
                return next["name"] == "col"
            else:
                return False
        elif tagname == 'tbody':
            # A tbody element's start tag may be omitted if the first thing
            # inside the tbody element is a tr element, and if the element is
            # not immediately preceded by a tbody, thead, or tfoot element
            # whose end tag has been omitted.
            if type == "StartTag":
                # omit the thead and tfoot elements' end tag when they are
                # immediately followed by a tbody element. See is_optional_end.
                if previous and previous['type'] == 'EndTag' and \
                        previous['name'] in ('tbody', 'thead', 'tfoot'):
                    return False
                return next["name"] == 'tr'
            else:
                return False
        return False

    def is_optional_end(self, tagname, next):
        type = next and next["type"] or None
        if tagname in ('html', 'head', 'body'):
            # An html element's end tag may be omitted if the html element
            # is not immediately followed by a space character or a comment.
            return type not in ("Comment", "SpaceCharacters")
        elif tagname in ('li', 'optgroup', 'tr'):
            # A li element's end tag may be omitted if the li element is
            # immediately followed by another li element or if there is
            # no more content in the parent element.
            # An optgroup element's end tag may be omitted if the optgroup
            # element is immediately followed by another optgroup element,
            # or if there is no more content in the parent element.
            # A tr element's end tag may be omitted if the tr element is
            # immediately followed by another tr element, or if there is
            # no more content in the parent element.
            if type == "StartTag":
                return next["name"] == tagname
            else:
                return type == "EndTag" or type is None
        elif tagname in ('dt', 'dd'):
            # A dt element's end tag may be omitted if the dt element is
            # immediately followed by another dt element or a dd element.
            # A dd element's end tag may be omitted if the dd element is
            # immediately followed by another dd element or a dt element,
            # or if there is no more content in the parent element.
            if type == "StartTag":
                return next["name"] in ('dt', 'dd')
            elif tagname == 'dd':
                return type == "EndTag" or type is None
            else:
                return False
        elif tagname == 'p':
            # A p element's end tag may be omitted if the p element is
            # immediately followed by an address, article, aside,
            # blockquote, datagrid, dialog, dir, div, dl, fieldset,
            # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
            # nav, ol, p, pre, section, table, or ul, element, or if
            # there is no more content in the parent element.
            if type in ("StartTag", "EmptyTag"):
                return next["name"] in ('address', 'article', 'aside',
                                        'blockquote', 'datagrid', 'dialog',
                                        'dir', 'div', 'dl', 'fieldset', 'footer',
                                        'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
                                        'header', 'hr', 'menu', 'nav', 'ol',
                                        'p', 'pre', 'section', 'table', 'ul')
            else:
                return type == "EndTag" or type is None
        elif tagname == 'option':
            # An option element's end tag may be omitted if the option
            # element is immediately followed by another option element,
            # or if it is immediately followed by an <code>optgroup</code>
            # element, or if there is no more content in the parent
            # element.
            if type == "StartTag":
                return next["name"] in ('option', 'optgroup')
            else:
                return type == "EndTag" or type is None
        elif tagname in ('rt', 'rp'):
            # An rt element's end tag may be omitted if the rt element is
            # immediately followed by an rt or rp element, or if there is
            # no more content in the parent element.
            # An rp element's end tag may be omitted if the rp element is
            # immediately followed by an rt or rp element, or if there is
            # no more content in the parent element.
            if type == "StartTag":
                return next["name"] in ('rt', 'rp')
            else:
                return type == "EndTag" or type is None
        elif tagname == 'colgroup':
            # A colgroup element's end tag may be omitted if the colgroup
            # element is not immediately followed by a space character or
            # a comment.
            if type in ("Comment", "SpaceCharacters"):
                return False
            elif type == "StartTag":
                # XXX: we also look for an immediately following colgroup
                # element. See is_optional_start.
                return next["name"] != 'colgroup'
            else:
                return True
        elif tagname in ('thead', 'tbody'):
            # A thead element's end tag may be omitted if the thead element
            # is immediately followed by a tbody or tfoot element.
            # A tbody element's end tag may be omitted if the tbody element
            # is immediately followed by a tbody or tfoot element, or if
            # there is no more content in the parent element.
            # A tfoot element's end tag may be omitted if the tfoot element
            # is immediately followed by a tbody element, or if there is no
            # more content in the parent element.
            # XXX: we never omit the end tag when the following element is
            # a tbody. See is_optional_start.
            if type == "StartTag":
                return next["name"] in ['tbody', 'tfoot']
            elif tagname == 'tbody':
                return type == "EndTag" or type is None
            else:
                return False
        elif tagname == 'tfoot':
            # A tfoot element's end tag may be omitted if the tfoot element
            # is immediately followed by a tbody element, or if there is no
            # more content in the parent element.
            # XXX: we never omit the end tag when the following element is
            # a tbody. See is_optional_start.
            if type == "StartTag":
                return next["name"] == 'tbody'
            else:
                return type == "EndTag" or type is None
        elif tagname in ('td', 'th'):
            # A td element's end tag may be omitted if the td element is
            # immediately followed by a td or th element, or if there is
            # no more content in the parent element.
            # A th element's end tag may be omitted if the th element is
            # immediately followed by a td or th element, or if there is
            # no more content in the parent element.
            if type == "StartTag":
                return next["name"] in ('td', 'th')
            else:
                return type == "EndTag" or type is None
        return False
_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.opt-1.pyc000064400000002245151733136430021332 0ustar003

�Pfs�@snddlmZmZmZddlZddlmZddlmZm	Z	dj
e	�Z	ejde	�ZGd	d
�d
ej
�Z
dd�ZdS)
�)�absolute_import�division�unicode_literalsN�)�base�)�rcdataElements�spaceCharacters�z[%s]+c@s(eZdZeddgee��Zdd�ZdS)�FilterZpreZtextareaccs�d}x�tjj|�D]�}|d}|dkrB|s8|d|jkrB|d7}nT|dkrX|rX|d8}n>|rx|dkrx|drxd	|d<n|r�|d
kr�t|d�|d<|VqWdS)Nr�typeZStartTag�namerZEndTagZSpaceCharacters�data� Z
Characters)rr�__iter__�spacePreserveElements�collapse_spaces)�selfZpreserve�tokenr�r� /usr/lib/python3.6/whitespace.pyrs


zFilter.__iter__N)�__name__�
__module__�__qualname__�	frozenset�listrrrrrrrrsrcCstjd|�S)Nr)�SPACES_REGEX�sub)�textrrrr%sr)Z
__future__rrr�rer
rZ	constantsrr	�join�compilerrrrrrr�<module>s
_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.pyc000064400000003116151733136430022230 0ustar003

�Pf�
�@s6ddlmZmZmZddlmZGdd�dej�ZdS)�)�absolute_import�division�unicode_literals�)�basec@seZdZdd�Zdd�ZdS)�FiltercCstjj||�||_dS)N)rr�__init__�encoding)�self�sourcer	�r�)/usr/lib/python3.6/inject_meta_charset.pyrszFilter.__init__c
cs�d}|jdk}g}�x�tjj|�D�]�}|d}|dkrP|dj�dkrLd}�np|dk�rV|dj�dk�rd	}x�|d
j�D]V\\}}}	|dk	r�q~q~|j�dkr�|j|d
||f<d}Pq~|d
kr~|	j�dkr~d}q~W|o�d|d
k�rTd|j|d
d<d}nR|dj�dk�r�|�r�dd|d
d�Vddd|jid�Vddd�Vd}q"nj|dk�r�|dj�dk�r�|�r�|jd�V|�s�ddd|jid�Vx|�r�|jd�V�q�Wd}d}|dk�r�|j|�q"|Vq"WdS)NZpre_head�typeZStartTag�name�headZin_headZEmptyTag�metaF�data�charsetTz
http-equivzcontent-type�contentztext/html; charset=%s)rrrZEndTag)rrrZ	post_head)Nr)Nr)Nr)Nr)r	rr�__iter__�lower�items�pop�append)
r
�stateZ
meta_found�pending�tokenrZhas_http_equiv_content_type�	namespacer�valuerrr
rsX



zFilter.__iter__N)�__name__�
__module__�__qualname__rrrrrr
rsrN)Z
__future__rrr�rrrrrr
�<module>s_vendor/html5lib/filters/__pycache__/whitespace.cpython-36.pyc000064400000002245151733136430020373 0ustar003

�Pfs�@snddlmZmZmZddlZddlmZddlmZm	Z	dj
e	�Z	ejde	�ZGd	d
�d
ej
�Z
dd�ZdS)
�)�absolute_import�division�unicode_literalsN�)�base�)�rcdataElements�spaceCharacters�z[%s]+c@s(eZdZeddgee��Zdd�ZdS)�FilterZpreZtextareaccs�d}x�tjj|�D]�}|d}|dkrB|s8|d|jkrB|d7}nT|dkrX|rX|d8}n>|rx|dkrx|drxd	|d<n|r�|d
kr�t|d�|d<|VqWdS)Nr�typeZStartTag�namerZEndTagZSpaceCharacters�data� Z
Characters)rr�__iter__�spacePreserveElements�collapse_spaces)�selfZpreserve�tokenr�r� /usr/lib/python3.6/whitespace.pyrs


zFilter.__iter__N)�__name__�
__module__�__qualname__�	frozenset�listrrrrrrrrsrcCstjd|�S)Nr)�SPACES_REGEX�sub)�textrrrr%sr)Z
__future__rrr�rer
rZ	constantsrr	�join�compilerrrrrrr�<module>s
_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.pyc000064400000043040151733136430020245 0ustar003

�Pfb�E@s
ddlmZmZmZddlZddlmZmZddlm	Z
ddlmZddl
mZmZd	gZeed
dfed
dfed
d
fed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
d fed
d!fed
d"fed
d#fed
d$fed
d%fed
d&fed
d'fed
d(fed
d)fed
d*fed
d+fed
d,fed
d-fed
d.fed
d/fed
d0fed
d1fed
d2fed
d3fed
d4fed
d5fed
d6fed
d7fed
d8fed
d9fed
d:fed
d;fed
d<fed
d=fed
d>fed
d?fed
d@fed
dAfed
dBfed
dCfed
dDfed
dEfed
dFfed
dGfed
dHfed
dIfed
dJfed
dKfed
dLfed
dMfed
dNfed
dOfed
dPfed
dQfed
dRfed
dSfed
dTfed
dUfed
dVfed
dWfed
dXfed
dYfed
dZfed
d[fed
d\fed
d]fed
d^fed
d_fed
d`fed
dafed
dbfed
dcfed
ddfed
defed
dffed
dgfed
dhfed
difed
djfed
dkfed
dlfed
dmfedndofedndpfedndqfedndrfedndsfedndtfedndufedndvfedndwfedndxfedndyfedndzfednd{fednd|fednd}fednd~fedndfednd�fednd�fednd�fednd�fednd�fednd�fednd�fednd�fednd�fednd�fed�dfed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�ff��Ze�d3�d4�d5�d6�d7�d8�d9�d:�d;�d<�d=�d>�d?�d@�dA�dB�dC�dD�dE�dF�dG�dH�dI�dJ�dK�dL�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d`�da�db�dc�dd�de�df�dg�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dr�ds�dt�du�dv�dw�dx�dy�dz�d{�d|�d}�d~�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d�e�d1d�f�d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d͐dΐdϐdАdѐdҐdӐdԐdՐd֐dאdؐdِdڐdېdܐdݐdސdߐd�d�d�d�d�d�d�e�dOd�fe�dO�dPfe�dO�d%f�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d��d��d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�d/�d0�d1�d2�d3�d4�d5�d6�d7�d8�d9�d:�d;�d<�d=�d>�d?�d@�dA�dB�dC�dD�dE�dF�dG�dH�dI�dJ�dK�dL�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d`�da�db�dce�dO�d�fe�dO�d�fe�dOd�fe�dO�d�fe�dO�dPfe�dOd�fe�dO�d%fe�d1�d�fe�d1d�fe�d1�d�f�dd�de�df�dg�fC�Ze�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dre�dOd�fe�d1�d�ff
�Ze�d~�Ze�d��Ze�d��Ze�d��Ze�d��Ze�d��Ze�d��Zej�d1ej�ZG�d2d	�d	ej�ZdS(��)�absolute_import�division�unicode_literalsN)�escape�unescape)�urllib_parse�)�base�)�
namespaces�prefixes�Filter�html�a�abbrZacronymZaddressZareaZarticleZasideZaudio�bZbigZ
blockquote�brZbuttonZcanvasZcaption�center�cite�code�colZcolgroupZcommandZdatagridZdatalistZdd�delZdetailsZdfnZdialog�dirZdivZdlZdtZemzevent-sourceZfieldsetZ
figcaptionZfigureZfooter�font�form�headerZh1Zh2Zh3Zh4Zh5Zh6Zhr�iZimg�inputZinsZkeygenZkbd�labelZlegendZli�m�mapZmenuZmeterZmulticolZnavZnextidZol�outputZoptgroupZoption�pZpreZprogress�q�sZsampZsectionZselectZsmallZsound�sourceZspacer�spanZstrikeZstrong�subZsup�tableZtbodyZtdZtextareaZtimeZtfootZthZtheadZtrZtt�uZul�varZvideoZmathmlZmactionZmathZmerrorZmfracZmiZ
mmultiscriptsZmn�moZmoverZmpaddedZmphantomZmprescriptsZmrootZmrowZmspaceZmsqrtZmstyleZmsubZmsubsupZmsupZmtableZmtdZmtextZmtrZmunderZ
munderover�noneZsvg�animate�animateColor�
animateMotion�animateTransformZclipPathZcircleZdefsZdescZellipsez	font-facezfont-face-namez
font-face-src�gZglyphZhkern�linearGradient�line�markerZmetadataz
missing-glyphZmpath�pathZpolygonZpolyline�radialGradientZrect�set�stopZswitch�text�titleZtspan�use�accept�accept-charset�	accesskey�action�align�alt�autocomplete�	autofocus�axis�
background�balance�bgcolor�bgproperties�border�bordercolor�bordercolordark�bordercolorlight�
bottompadding�cellpadding�cellspacing�ch�	challenge�char�charoff�choff�charset�checked�class�clear�color�cols�colspan�compact�contenteditable�controls�coords�data�datafld�datapagesize�datasrc�datetime�default�delay�disabled�	draggable�dynsrc�enctype�end�face�for�frame�
galleryimg�gutter�headers�height�	hidefocus�hidden�high�href�hreflang�hspace�icon�id�	inputmode�ismap�keytype�leftspacing�lang�list�longdesc�loop�	loopcount�loopend�	loopstart�low�lowsrc�max�	maxlength�media�method�min�multiple�name�nohref�noshade�nowrap�open�optimum�pattern�ping�
point-size�poster�pqg�preload�prompt�
radiogroup�readonly�rel�
repeat-max�
repeat-min�replace�required�rev�rightspacing�rows�rowspan�rules�scope�selected�shape�size�src�start�step�style�summary�suppress�tabindex�target�template�
toppadding�type�unselectable�usemap�urn�valign�value�variable�volume�vspace�vrml�width�wrapZxml�
actiontype�columnalign�columnlines�
columnspacing�
columnspan�depth�display�displaystyle�equalcolumns�	equalrows�fence�	fontstyle�
fontweight�
linethickness�lspace�mathbackground�	mathcolor�mathvariant�maxsize�minsize�other�rowalign�rowlines�
rowspacing�rspace�scriptlevel�	selection�	separator�stretchy�xlinkZshow�
accent-height�
accumulate�additive�
alphabetic�arabic-form�ascent�
attributeName�
attributeType�baseProfile�bbox�begin�by�calcMode�
cap-height�	clip-path�color-rendering�content�cx�cy�d�dx�dy�descent�dur�fill�fill-opacity�	fill-rule�font-family�	font-size�font-stretch�
font-style�font-variant�font-weight�from�fx�fy�g1�g2�
glyph-name�
gradientUnits�hanging�horiz-adv-x�horiz-origin-x�ideographic�k�	keyPoints�
keySplines�keyTimes�
marker-end�
marker-mid�marker-start�markerHeight�markerUnits�markerWidth�mathematical�offset�opacity�orient�origin�overline-position�overline-thickness�panose-1�
pathLength�points�preserveAspectRatio�r�refX�refY�repeatCount�	repeatDur�requiredExtensions�requiredFeatures�restart�rotate�rx�ry�slope�stemh�stemv�
stop-color�stop-opacity�strikethrough-position�strikethrough-thickness�stroke�stroke-dasharray�stroke-dashoffset�stroke-linecap�stroke-linejoin�stroke-miterlimit�stroke-opacity�stroke-width�systemLanguage�text-anchor�to�	transform�u1�u2�underline-position�underline-thickness�unicode�
unicode-range�units-per-em�values�version�viewBox�
visibility�widths�x�x-height�x1�x2ZactuateZarcroleZroler	Zspace�y�y1�y2�
zoomAndPan�
color-profile�cursor�filter�mask�altGlyph�feImage�textpath�tref�azimuth�background-color�border-bottom-color�border-collapse�border-color�border-left-color�border-right-color�border-top-color�	direction�	elevation�float�letter-spacing�line-height�overflow�pause�pause-after�pause-before�pitch�pitch-range�richness�speak�speak-header�
speak-numeral�speak-punctuation�speech-rate�stress�
text-align�text-decoration�text-indent�unicode-bidi�vertical-align�voice-family�white-space�auto�aqua�black�block�blue�bold�both�bottom�brown�collapse�dashed�dotted�fuchsia�gray�green�
!important�italic�left�lime�maroon�medium�navy�normal�olive�pointer�purple�red�right�solid�silver�teal�top�transparent�	underline�white�yellow�ed2k�ftp�http�https�irc�mailto�news�gopher�nntp�telnet�webcal�xmpp�callto�feed�aim�rsync�tag�ssh�sftp�rtsp�afs�	image/png�
image/jpeg�	image/gif�
image/webp�	image/bmp�
text/plainaL
                                ^
                                # Match a content type <application>/<type>
                                (?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
                                # Match any character set and encoding
                                (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
                                  |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
                                # Assume the rest is data
                                ,.*
                                $
                                c
s^eZdZdZeeeeee	e
eee
f
�fdd�	Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z�ZS)r
zA sanitization of XHTML+MathML+SVG and of inline style attributes.csPtt|�j|�||_||_||_||_||_||_||_	|	|_
|
|_||_dS)N)
�superr
�__init__�allowed_elements�allowed_attributes�allowed_css_properties�allowed_css_keywords�allowed_svg_properties�allowed_protocols�allowed_content_types�attr_val_is_uri�svg_attr_val_allows_ref�svg_allow_local_href)�selfr%r�r�r�r�r�r�r�r�r�r�)�	__class__��/usr/lib/python3.6/sanitizer.pyr��szFilter.__init__ccs.x(tjj|�D]}|j|�}|r|VqWdS)N)r	r
�__iter__�sanitize_token)r��tokenr�r�r�r��s
zFilter.__iter__cCsp|d}|d	kr^|d}|d}||f|jksH|dkrRtd|f|jkrR|j|�S|j|�Sn|dkrhn|SdS)
Nr��StartTag�EndTag�EmptyTagr��	namespacer�Comment)r�r�r�)r�r�
allowed_token�disallowed_token)r�r��
token_typer�r�r�r�r�r��s
zFilter.sanitize_tokenc	Cs�d|k�r�|d}t|j��}x&||jD]}|d|=|j|�q*Wx�||j@D]�}||ksbt�tjddt||��j	�}|j
dd�}ytj|�}Wntk
r�d}||=YnX|o�|j
rR|j
|jkr�||=|j
dkrRtj|j�}|s�||=qR|jd�|jkrR||=qRWx4|jD]*}||k�rtjddt||��||<�qW|d|jk�r�td	d
f|k�r�tjd|td	d
f��r�|td	d
f=d
|k�r�|j|d�|d<||d<|S)Nr`u
[`- - \s]+�u�Zcontent_typezurl\s*\(\s*[^#\s][^)]+?\)� r�r�rvz^\s*[^#\s].*r�)Nr�)Nr�)Nr�)r7�keysr��remover��AssertionError�rer'r�lowerr��urlparse�
ValueError�schemer��data_content_type�matchr5�groupr�r�r�r�search�sanitize_css)	r�r��attrsZ
attr_namesZ	to_remove�attrZ
val_unescapedZurirr�r�r�r�sL






zFilter.allowed_tokencCs�|d}|dkr"d|d|d<n�|dr�|dks6t�g}xJ|dj�D]:\\}}}|jd|dkrf|nd	t||ft|�f�qHWd
|ddj|�f|d<nd|d|d<|jd
�r�|ddd�d|d<d|d<|d=|S)Nr�r�z</%s>r�r`r�r�z %s="%s"z%s:%sz<%s%s>r�z<%s>ZselfClosingrz/>Z
Characters)r�r����)r��items�appendrr�join�get)r�r�r�r��nsr��vr�r�r�r�2s2
zFilter.disallowed_tokencCstjd�jd|�}tjd|�s"dStjd|�s2dSg}x�tjd|�D]�\}}|sRqD|j�|jkrx|j|d|d�qD|jd	�d
j�dkr�xf|j�D]}||j	kr�tjd|�r�Pq�W|j|d|d�qD|j�|j
krD|j|d|d�qDWdj|�S)Nzurl\s*\(\s*[^\s)]+?\s*\)\s*r�z@^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$r�z ^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$z([-\w]+)\s*:\s*([^:;]*)z: �;�-rrErI�margin�paddingz\^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$)rErIr�r�)r��compiler'r��findallr�r�r��splitr�r�r�)r�r�ZcleanZpropr��keywordr�r�r�r�Fs*
zFilter.sanitize_css)�__name__�
__module__�__qualname__�__doc__r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r��
__classcell__r�r�)r�r�r
�s 
2)Nr)Nr<)Nr=)Nr>)Nr?)Nr@)NrA)NrB)NrC)NrD)NrE)NrF)NrG)NrH)NrI)NrJ)NrK)NrL)NrM)NrN)NrO)NrP)NrQ)NrR)NrS)NrT)NrU)NrV)Nr)NrW)NrX)NrY)NrZ)Nr[)Nr\)Nr])Nr^)Nr_)Nr`)Nra)Nrb)Nrc)Nrd)Nre)Nrf)Nr)Nrg)Nrh)Nri)Nrj)Nrk)Nrl)Nrm)Nr)Nrn)Nro)Nrp)Nrq)Nrr)Nrs)Nrt)Nru)Nrv)Nrw)Nrx)Nry)Nrz)Nr{)Nr|)Nr})Nr)Nr~)Nr)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr&)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr:)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr@)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nrn)Nrr)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)NrW)Nr�)NrY)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nrk)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nrr)Nr)Nr	)Nrz)Nr
)Nr)Nr)Nr
)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr�)Nr�)Nr�)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr5)Nr)Nr)Nr)Nr )Nr!)Nr")Nr#)Nr$)Nr%)Nr&)Nr')Nr()Nr))Nr*)Nr+)Nr,)Nr-)Nr.)Nr/)Nr0)Nr1)Nr2)Nr3)Nr4)Nr5)Nr6)Nr7)Nr8)Nr9)Nr:)Nr�)Nr;)Nr<)Nr=)Nr�)Nr>)Nr?)Nr@)NrA)NrB)NrC)NrD)NrE)NrF)NrG)NrH)Nr�)NrI)NrJ)NrK)NrL)NrM)NrN)NrO)NrP)NrQ)Nrv)Nr�)Nr)Nr?)Nr�)Nr�)NrE)Nrc)Nri)Nr�)Nr��Nr��NrR�NrS�Nr��NrT�Nr4�Nr�Nr�Nr�NrU�Nr2)r�r�r�r�r�r�rrrrr�NrV�Nr-�Nr.�Nr/�Nr0�NrS�NrW�NrT�Nr2�Nr��Nr6�NrX�NrY�Nr7�Nr;)rrrrr	r
rrr
rrrrrr).rZr[r\r]r^r_r`rarXrYrSrbr�rcrdrr�r�r�r�r�rrrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryr�rzr�)'r{r|r}r~rr�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r,r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)r�r�r�r2r9r5r6r8)r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r`)r�r�r�r�r�r�) Z
__future__rrrr�Zxml.sax.saxutilsrrZpip._vendor.six.movesrr�r�r	Z	constantsrr�__all__�	frozensetr�r�r�r�r�r�r�r�r�r�r��VERBOSEr�r
r�r�r�r��<module>s2
































































































































































_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.pyc000064400000005615151733136430020747 0ustar003

�Pf&)�@s6ddlmZmZmZddlmZGdd�dej�ZdS)�)�absolute_import�division�unicode_literals�)�basec@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�FilterccsLd}}x*|jD] }|dk	r(|||fV|}|}qW|dk	rH||dfVdS)N)�source)�selfZ	previous1Z	previous2�token�r�"/usr/lib/python3.6/optionaltags.py�slidersz
Filter.sliderccsvxp|j�D]d\}}}|d}|dkrH|ds@|j|d||�rn|Vq
|dkrh|j|d|�sn|Vq
|Vq
WdS)N�type�StartTag�data�name�EndTag)r
�is_optional_start�is_optional_end)r	�previousr
�nextrrrr�__iter__szFilter.__iter__cCs�|r|dpd}|dkr |dkS|dkrJ|dkr4dS|d	krH|d
dkSn�|dkrx|dkr^dS|dkrr|d
dkSdSnd|dkr�|dkr�|d
dkSdSnB|dk�r�|dkr�|r�|dd	kr�|d
dkr�dS|d
dkSdSdS)Nr�html�Comment�SpaceCharacters�headr�EmptyTagTrr�bodyF�script�style�colgroup�col�tbody�thead�tfoot�tr)rr)rr)rr)rr)rr)r"r#r$r)r	�tagnamerrrrrrrs4
zFilter.is_optional_startcCs|r|dpd}|d7kr |d8kS|d9krP|d
kr<|d|kS|dkpJ|dkS�n�|d:kr�|d
krl|dd;kS|dkr�|dkp�|dkSdS�n||dk�r�|d<k�r�|dd=kS|dk�p�|dkS�nF|d-k�r�|d
k�r�|dd>kS|dk�p�|dkS�n|d?k�r,|d
k�r|dd@kS|dk�p(|dkSn�|d0k�r`|dAk�rDdS|d
k�rZ|dd0kSd1Sn�|dBk�r�|d
k�r�|ddCkS|d3k�r�|dk�p�|dkSdSnf|d4k�r�|d
k�r�|dd3kS|dk�p�|dkSn2|dDk�r|d
k�r�|ddEkS|dk�p|dkSdS)FNrrrrrr�li�optgroupr%rrr�dt�ddF�pr�address�article�aside�
blockquote�datagrid�dialog�dir�div�dl�fieldset�footer�form�h1�h2�h3�h4�h5�h6�header�hr�menu�nav�ol�pre�section�table�ul�option�rt�rpr Tr#r"r$�td�th)rrr)rr)r'r(r%)r)r*)r)r*)rr)r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBr+rCrDrErF)rGr()rHrI)rHrI)rr)r#r")r"r$)rJrK)rJrKr)r	r&rrrrrrWsf
















zFilter.is_optional_endN)�__name__�
__module__�__qualname__r
rrrrrrrrs

9rN)Z
__future__rrr�rrrrrr�<module>s_vendor/html5lib/filters/__pycache__/__init__.cpython-36.pyc000064400000000161151733136430017771 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/filters/__pycache__/lint.cpython-36.pyc000064400000004355151733136430017211 0ustar003

�Pf%
�@shddlmZmZmZddlmZddlmZddlm	Z	m
Z
ddlmZdje�ZGd	d
�d
ej
�Z
dS)�)�absolute_import�division�unicode_literals)�	text_type�)�base�)�
namespaces�voidElements)�spaceCharacters�cs&eZdZd�fdd�	Zdd�Z�ZS)�FilterTcstt|�j|�||_dS)N)�superr
�__init__�require_matching_tags)�self�sourcer)�	__class__��/usr/lib/python3.6/lint.pyr
szFilter.__init__c	csRg}�xFtjj|�D�]4}|d}|dk�rP|d}|d}|dksRt|t�sRt�|dks^t�t|t�slt�|dksxt�t|dt�s�t�|s�|tdkr�|tkr�|dks�t�n|dks�t�|dkr�|j	r�|j
||f�xp|dj�D]`\\}}}|dk�st|t��st�|dk�st�t|t��s,t�|dk�s:t�t|t�s�t�q�W�n�|d	k�r|d}|d}|dk�s�t|t��s�t�|dk�s�t�t|t��s�t�|dk�s�t�|�s�|tdk�r�|tk�r�d
�s
tdd|i��n"|j	�rD|j�}|||fk�sDt��n6|d
k�r4|d}t|t��sDt��n|dk�r�|d}t|t��sVt�|dk�sdt�|dk�rD|j
t�dk�sDt�n�|dk�r�|d}|dk�s�t|t��s�t�|ddk�s�t|t��s�t�|ddk�sDt|t��sDt�nV|dk�rt|dt��sDt�n6|dk�r.t|dt��sDt�nd
�sDtdd|i��|VqWdS)N�type�StartTag�EmptyTag�	namespace�namer�dataZhtmlZEndTagFz.Void element reported as EndTag token: %(tag)s�tag�Comment�
Characters�SpaceCharactersZDoctypeZpublicIdZsystemIdZEntityZSerializerErrorzUnknown token type: %(type)s)rr)rr)rr
�__iter__�
isinstancer�AssertionError�dictr	r
r�append�items�pop�stripr)	rZ
open_elements�tokenrrr�value�startrrrrr sl

 



 

zFilter.__iter__)T)�__name__�
__module__�__qualname__rr �
__classcell__rr)rrr
sr
N)Z
__future__rrrZpip._vendor.sixrrrZ	constantsr	r
r�joinr
rrrr�<module>s
_vendor/html5lib/filters/__pycache__/sanitizer.cpython-36.opt-1.pyc000064400000042731151733136430021212 0ustar003

�Pfb�E@s
ddlmZmZmZddlZddlmZmZddlm	Z
ddlmZddl
mZmZd	gZeed
dfed
dfed
d
fed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
dfed
d fed
d!fed
d"fed
d#fed
d$fed
d%fed
d&fed
d'fed
d(fed
d)fed
d*fed
d+fed
d,fed
d-fed
d.fed
d/fed
d0fed
d1fed
d2fed
d3fed
d4fed
d5fed
d6fed
d7fed
d8fed
d9fed
d:fed
d;fed
d<fed
d=fed
d>fed
d?fed
d@fed
dAfed
dBfed
dCfed
dDfed
dEfed
dFfed
dGfed
dHfed
dIfed
dJfed
dKfed
dLfed
dMfed
dNfed
dOfed
dPfed
dQfed
dRfed
dSfed
dTfed
dUfed
dVfed
dWfed
dXfed
dYfed
dZfed
d[fed
d\fed
d]fed
d^fed
d_fed
d`fed
dafed
dbfed
dcfed
ddfed
defed
dffed
dgfed
dhfed
difed
djfed
dkfed
dlfed
dmfedndofedndpfedndqfedndrfedndsfedndtfedndufedndvfedndwfedndxfedndyfedndzfednd{fednd|fednd}fednd~fedndfednd�fednd�fednd�fednd�fednd�fednd�fednd�fednd�fednd�fednd�fed�dfed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�fed�d�ff��Ze�d3�d4�d5�d6�d7�d8�d9�d:�d;�d<�d=�d>�d?�d@�dA�dB�dC�dD�dE�dF�dG�dH�dI�dJ�dK�dL�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d`�da�db�dc�dd�de�df�dg�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dr�ds�dt�du�dv�dw�dx�dy�dz�d{�d|�d}�d~�d�d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d��d�e�d1d�f�d��d��d��ddÐdĐdŐdƐdǐdȐdɐdʐdːd̐d͐dΐdϐdАdѐdҐdӐdԐdՐd֐dאdؐdِdڐdېdܐdݐdސdߐd�d�d�d�d�d�d�e�dOd�fe�dO�dPfe�dO�d%f�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d��d��d��d��d��d��d��d��d��d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.�d/�d0�d1�d2�d3�d4�d5�d6�d7�d8�d9�d:�d;�d<�d=�d>�d?�d@�dA�dB�dC�dD�dE�dF�dG�dH�dI�dJ�dK�dL�dM�dN�dO�dP�dQ�dR�dS�dT�dU�dV�dW�dX�dY�dZ�d[�d\�d]�d^�d_�d`�da�db�dce�dO�d�fe�dO�d�fe�dOd�fe�dO�d�fe�dO�dPfe�dOd�fe�dO�d%fe�d1�d�fe�d1d�fe�d1�d�f�dd�de�df�dg�fC�Ze�dh�di�dj�dk�dl�dm�dn�do�dp�dq�dre�dOd�fe�d1�d�ff
�Ze�d~�Ze�d��Ze�d��Ze�d��Ze�d��Ze�d��Ze�d��Zej�d1ej�ZG�d2d	�d	ej�ZdS(��)�absolute_import�division�unicode_literalsN)�escape�unescape)�urllib_parse�)�base�)�
namespaces�prefixes�Filter�html�a�abbrZacronymZaddressZareaZarticleZasideZaudio�bZbigZ
blockquote�brZbuttonZcanvasZcaption�center�cite�code�colZcolgroupZcommandZdatagridZdatalistZdd�delZdetailsZdfnZdialog�dirZdivZdlZdtZemzevent-sourceZfieldsetZ
figcaptionZfigureZfooter�font�form�headerZh1Zh2Zh3Zh4Zh5Zh6Zhr�iZimg�inputZinsZkeygenZkbd�labelZlegendZli�m�mapZmenuZmeterZmulticolZnavZnextidZol�outputZoptgroupZoption�pZpreZprogress�q�sZsampZsectionZselectZsmallZsound�sourceZspacer�spanZstrikeZstrong�subZsup�tableZtbodyZtdZtextareaZtimeZtfootZthZtheadZtrZtt�uZul�varZvideoZmathmlZmactionZmathZmerrorZmfracZmiZ
mmultiscriptsZmn�moZmoverZmpaddedZmphantomZmprescriptsZmrootZmrowZmspaceZmsqrtZmstyleZmsubZmsubsupZmsupZmtableZmtdZmtextZmtrZmunderZ
munderover�noneZsvg�animate�animateColor�
animateMotion�animateTransformZclipPathZcircleZdefsZdescZellipsez	font-facezfont-face-namez
font-face-src�gZglyphZhkern�linearGradient�line�markerZmetadataz
missing-glyphZmpath�pathZpolygonZpolyline�radialGradientZrect�set�stopZswitch�text�titleZtspan�use�accept�accept-charset�	accesskey�action�align�alt�autocomplete�	autofocus�axis�
background�balance�bgcolor�bgproperties�border�bordercolor�bordercolordark�bordercolorlight�
bottompadding�cellpadding�cellspacing�ch�	challenge�char�charoff�choff�charset�checked�class�clear�color�cols�colspan�compact�contenteditable�controls�coords�data�datafld�datapagesize�datasrc�datetime�default�delay�disabled�	draggable�dynsrc�enctype�end�face�for�frame�
galleryimg�gutter�headers�height�	hidefocus�hidden�high�href�hreflang�hspace�icon�id�	inputmode�ismap�keytype�leftspacing�lang�list�longdesc�loop�	loopcount�loopend�	loopstart�low�lowsrc�max�	maxlength�media�method�min�multiple�name�nohref�noshade�nowrap�open�optimum�pattern�ping�
point-size�poster�pqg�preload�prompt�
radiogroup�readonly�rel�
repeat-max�
repeat-min�replace�required�rev�rightspacing�rows�rowspan�rules�scope�selected�shape�size�src�start�step�style�summary�suppress�tabindex�target�template�
toppadding�type�unselectable�usemap�urn�valign�value�variable�volume�vspace�vrml�width�wrapZxml�
actiontype�columnalign�columnlines�
columnspacing�
columnspan�depth�display�displaystyle�equalcolumns�	equalrows�fence�	fontstyle�
fontweight�
linethickness�lspace�mathbackground�	mathcolor�mathvariant�maxsize�minsize�other�rowalign�rowlines�
rowspacing�rspace�scriptlevel�	selection�	separator�stretchy�xlinkZshow�
accent-height�
accumulate�additive�
alphabetic�arabic-form�ascent�
attributeName�
attributeType�baseProfile�bbox�begin�by�calcMode�
cap-height�	clip-path�color-rendering�content�cx�cy�d�dx�dy�descent�dur�fill�fill-opacity�	fill-rule�font-family�	font-size�font-stretch�
font-style�font-variant�font-weight�from�fx�fy�g1�g2�
glyph-name�
gradientUnits�hanging�horiz-adv-x�horiz-origin-x�ideographic�k�	keyPoints�
keySplines�keyTimes�
marker-end�
marker-mid�marker-start�markerHeight�markerUnits�markerWidth�mathematical�offset�opacity�orient�origin�overline-position�overline-thickness�panose-1�
pathLength�points�preserveAspectRatio�r�refX�refY�repeatCount�	repeatDur�requiredExtensions�requiredFeatures�restart�rotate�rx�ry�slope�stemh�stemv�
stop-color�stop-opacity�strikethrough-position�strikethrough-thickness�stroke�stroke-dasharray�stroke-dashoffset�stroke-linecap�stroke-linejoin�stroke-miterlimit�stroke-opacity�stroke-width�systemLanguage�text-anchor�to�	transform�u1�u2�underline-position�underline-thickness�unicode�
unicode-range�units-per-em�values�version�viewBox�
visibility�widths�x�x-height�x1�x2ZactuateZarcroleZroler	Zspace�y�y1�y2�
zoomAndPan�
color-profile�cursor�filter�mask�altGlyph�feImage�textpath�tref�azimuth�background-color�border-bottom-color�border-collapse�border-color�border-left-color�border-right-color�border-top-color�	direction�	elevation�float�letter-spacing�line-height�overflow�pause�pause-after�pause-before�pitch�pitch-range�richness�speak�speak-header�
speak-numeral�speak-punctuation�speech-rate�stress�
text-align�text-decoration�text-indent�unicode-bidi�vertical-align�voice-family�white-space�auto�aqua�black�block�blue�bold�both�bottom�brown�collapse�dashed�dotted�fuchsia�gray�green�
!important�italic�left�lime�maroon�medium�navy�normal�olive�pointer�purple�red�right�solid�silver�teal�top�transparent�	underline�white�yellow�ed2k�ftp�http�https�irc�mailto�news�gopher�nntp�telnet�webcal�xmpp�callto�feed�aim�rsync�tag�ssh�sftp�rtsp�afs�	image/png�
image/jpeg�	image/gif�
image/webp�	image/bmp�
text/plainaL
                                ^
                                # Match a content type <application>/<type>
                                (?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
                                # Match any character set and encoding
                                (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
                                  |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
                                # Assume the rest is data
                                ,.*
                                $
                                c
s^eZdZdZeeeeee	e
eee
f
�fdd�	Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z�ZS)r
zA sanitization of XHTML+MathML+SVG and of inline style attributes.csPtt|�j|�||_||_||_||_||_||_||_	|	|_
|
|_||_dS)N)
�superr
�__init__�allowed_elements�allowed_attributes�allowed_css_properties�allowed_css_keywords�allowed_svg_properties�allowed_protocols�allowed_content_types�attr_val_is_uri�svg_attr_val_allows_ref�svg_allow_local_href)�selfr%r�r�r�r�r�r�r�r�r�r�)�	__class__��/usr/lib/python3.6/sanitizer.pyr��szFilter.__init__ccs.x(tjj|�D]}|j|�}|r|VqWdS)N)r	r
�__iter__�sanitize_token)r��tokenr�r�r�r��s
zFilter.__iter__cCsp|d}|d	kr^|d}|d}||f|jksH|dkrRtd|f|jkrR|j|�S|j|�Sn|dkrhn|SdS)
Nr��StartTag�EndTag�EmptyTagr��	namespacer�Comment)r�r�r�)r�r�
allowed_token�disallowed_token)r�r��
token_typer�r�r�r�r�r��s
zFilter.sanitize_tokenc	Cs�d|k�r�|d}t|j��}x&||jD]}|d|=|j|�q*Wx�||j@D]�}tjddt||��j�}|j	dd�}yt
j
|�}Wntk
r�d}||=YnX|o�|jrR|j|j
kr�||=|jdkrRtj|j�}|s�||=qR|jd�|jkrR||=qRWx4|jD]*}||k�r
tjddt||��||<�q
W|d|jk�r�td	d
f|k�r�tjd|td	d
f��r�|td	d
f=d
|k�r�|j|d�|d<||d<|S)Nr`u
[`- - \s]+�u�Zcontent_typezurl\s*\(\s*[^#\s][^)]+?\)� r�r�rvz^\s*[^#\s].*r�)Nr�)Nr�)Nr�)r7�keysr��remover��rer'r�lowerr��urlparse�
ValueError�schemer��data_content_type�matchr5�groupr�r�r�r�search�sanitize_css)	r�r��attrsZ
attr_namesZ	to_remove�attrZ
val_unescapedZurirr�r�r�r�sJ






zFilter.allowed_tokencCs�|d}|dkr"d|d|d<n�|dr�g}xJ|dj�D]:\\}}}|jd|dkrZ|ndt||ft|�f�q<Wd|dd	j|�f|d<nd
|d|d<|jd�r�|ddd�d
|d<d|d<|d=|S)Nr�r�z</%s>r�r`z %s="%s"z%s:%sz<%s%s>r�z<%s>ZselfClosingrz/>Z
Characters���)�items�appendrr�join�get)r�r�r�r��nsr��vr�r�r�r�2s2
zFilter.disallowed_tokencCstjd�jd|�}tjd|�s"dStjd|�s2dSg}x�tjd|�D]�\}}|sRqD|j�|jkrx|j|d|d�qD|jd	�d
j�dkr�xf|j�D]}||j	kr�tjd|�r�Pq�W|j|d|d�qD|j�|j
krD|j|d|d�qDWdj|�S)Nzurl\s*\(\s*[^\s)]+?\s*\)\s*r�z@^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$r�z ^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$z([-\w]+)\s*:\s*([^:;]*)z: �;�-rrErI�margin�paddingz\^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$)rErIr�r�)r��compiler'r��findallr�r�r��splitr�r�r�)r�r�ZcleanZpropr��keywordr�r�r�r�Fs*
zFilter.sanitize_css)�__name__�
__module__�__qualname__�__doc__r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r��
__classcell__r�r�)r�r�r
�s 
2)Nr)Nr<)Nr=)Nr>)Nr?)Nr@)NrA)NrB)NrC)NrD)NrE)NrF)NrG)NrH)NrI)NrJ)NrK)NrL)NrM)NrN)NrO)NrP)NrQ)NrR)NrS)NrT)NrU)NrV)Nr)NrW)NrX)NrY)NrZ)Nr[)Nr\)Nr])Nr^)Nr_)Nr`)Nra)Nrb)Nrc)Nrd)Nre)Nrf)Nr)Nrg)Nrh)Nri)Nrj)Nrk)Nrl)Nrm)Nr)Nrn)Nro)Nrp)Nrq)Nrr)Nrs)Nrt)Nru)Nrv)Nrw)Nrx)Nry)Nrz)Nr{)Nr|)Nr})Nr)Nr~)Nr)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr&)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr:)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr@)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nrn)Nrr)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)NrW)Nr�)NrY)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nrk)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr�)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nrr)Nr)Nr	)Nrz)Nr
)Nr)Nr)Nr
)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr�)Nr�)Nr�)Nr)Nr)Nr)Nr)Nr)Nr)Nr)Nr5)Nr)Nr)Nr)Nr )Nr!)Nr")Nr#)Nr$)Nr%)Nr&)Nr')Nr()Nr))Nr*)Nr+)Nr,)Nr-)Nr.)Nr/)Nr0)Nr1)Nr2)Nr3)Nr4)Nr5)Nr6)Nr7)Nr8)Nr9)Nr:)Nr�)Nr;)Nr<)Nr=)Nr�)Nr>)Nr?)Nr@)NrA)NrB)NrC)NrD)NrE)NrF)NrG)NrH)Nr�)NrI)NrJ)NrK)NrL)NrM)NrN)NrO)NrP)NrQ)Nrv)Nr�)Nr)Nr?)Nr�)Nr�)NrE)Nrc)Nri)Nr�)Nr��Nr��NrR�NrS�Nr��NrT�Nr4�Nr�Nr�Nr�NrU�Nr2)r�r�r�r�r�r�r�rrrr�NrV�Nr-�Nr.�Nr/�Nr0�NrS�NrW�NrT�Nr2�Nr��Nr6�NrX�NrY�Nr7�Nr;)rrrrrr	r
rrr
rrrrr).rZr[r\r]r^r_r`rarXrYrSrbr�rcrdrr�r�r�r�r�rrrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryr�rzr�)'r{r|r}r~rr�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r,r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�)r�r�r�r2r9r5r6r8)r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r`)r�r�r�r�r�r�) Z
__future__rrrr�Zxml.sax.saxutilsrrZpip._vendor.six.movesrr�r�r	Z	constantsrr�__all__�	frozensetr�r�r�r�r�r�r�r�r�r�r��VERBOSEr�r
r�r�r�r��<module>s2
































































































































































_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.opt-1.pyc000064400000001730151733136430023714 0ustar003

�Pfm�@shddlmZmZmZddlmZyddlmZWn ek
rPddl	mZYnXGdd�dej
�Z
dS)�)�absolute_import�division�unicode_literals�)�base)�OrderedDictc@seZdZdd�ZdS)�Filterccshxbtjj|�D]R}|ddkrZt�}x,t|dj�dd�d�D]\}}|||<q>W||d<|VqWdS)	N�type�StartTag�EmptyTag�datacSs|dS)Nr�)�xr
r
�,/usr/lib/python3.6/alphabeticalattributes.py�<lambda>sz!Filter.__iter__.<locals>.<lambda>)�key)r
r)rr�__iter__r�sorted�items)�self�tokenZattrs�name�valuer
r
rrszFilter.__iter__N)�__name__�
__module__�__qualname__rr
r
r
rrsrN)Z
__future__rrr�r�collectionsr�ImportErrorZordereddictrr
r
r
r�<module>s_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-36.opt-1.pyc000064400000003116151733136430023167 0ustar003

�Pf�
�@s6ddlmZmZmZddlmZGdd�dej�ZdS)�)�absolute_import�division�unicode_literals�)�basec@seZdZdd�Zdd�ZdS)�FiltercCstjj||�||_dS)N)rr�__init__�encoding)�self�sourcer	�r�)/usr/lib/python3.6/inject_meta_charset.pyrszFilter.__init__c
cs�d}|jdk}g}�x�tjj|�D�]�}|d}|dkrP|dj�dkrLd}�np|dk�rV|dj�dk�rd	}x�|d
j�D]V\\}}}	|dk	r�q~q~|j�dkr�|j|d
||f<d}Pq~|d
kr~|	j�dkr~d}q~W|o�d|d
k�rTd|j|d
d<d}nR|dj�dk�r�|�r�dd|d
d�Vddd|jid�Vddd�Vd}q"nj|dk�r�|dj�dk�r�|�r�|jd�V|�s�ddd|jid�Vx|�r�|jd�V�q�Wd}d}|dk�r�|j|�q"|Vq"WdS)NZpre_head�typeZStartTag�name�headZin_headZEmptyTag�metaF�data�charsetTz
http-equivzcontent-type�contentztext/html; charset=%s)rrrZEndTag)rrrZ	post_head)Nr)Nr)Nr)Nr)r	rr�__iter__�lower�items�pop�append)
r
�stateZ
meta_found�pending�tokenrZhas_http_equiv_content_type�	namespacer�valuerrr
rsX



zFilter.__iter__N)�__name__�
__module__�__qualname__rrrrrr
rsrN)Z
__future__rrr�rrrrrr
�<module>s_vendor/html5lib/filters/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000161151733136430020730 0ustar003

�Pf�@sdS)N�rrr�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-36.pyc000064400000001730151733136430022755 0ustar003

�Pfm�@shddlmZmZmZddlmZyddlmZWn ek
rPddl	mZYnXGdd�dej
�Z
dS)�)�absolute_import�division�unicode_literals�)�base)�OrderedDictc@seZdZdd�ZdS)�Filterccshxbtjj|�D]R}|ddkrZt�}x,t|dj�dd�d�D]\}}|||<q>W||d<|VqWdS)	N�type�StartTag�EmptyTag�datacSs|dS)Nr�)�xr
r
�,/usr/lib/python3.6/alphabeticalattributes.py�<lambda>sz!Filter.__iter__.<locals>.<lambda>)�key)r
r)rr�__iter__r�sorted�items)�self�tokenZattrs�name�valuer
r
rrszFilter.__iter__N)�__name__�
__module__�__qualname__rr
r
r
rrsrN)Z
__future__rrr�r�collectionsr�ImportErrorZordereddictrr
r
r
r�<module>s_vendor/html5lib/filters/__pycache__/optionaltags.cpython-36.opt-1.pyc000064400000005615151733136430021706 0ustar003

�Pf&)�@s6ddlmZmZmZddlmZGdd�dej�ZdS)�)�absolute_import�division�unicode_literals�)�basec@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�FilterccsLd}}x*|jD] }|dk	r(|||fV|}|}qW|dk	rH||dfVdS)N)�source)�selfZ	previous1Z	previous2�token�r�"/usr/lib/python3.6/optionaltags.py�slidersz
Filter.sliderccsvxp|j�D]d\}}}|d}|dkrH|ds@|j|d||�rn|Vq
|dkrh|j|d|�sn|Vq
|Vq
WdS)N�type�StartTag�data�name�EndTag)r
�is_optional_start�is_optional_end)r	�previousr
�nextrrrr�__iter__szFilter.__iter__cCs�|r|dpd}|dkr |dkS|dkrJ|dkr4dS|d	krH|d
dkSn�|dkrx|dkr^dS|dkrr|d
dkSdSnd|dkr�|dkr�|d
dkSdSnB|dk�r�|dkr�|r�|dd	kr�|d
dkr�dS|d
dkSdSdS)Nr�html�Comment�SpaceCharacters�headr�EmptyTagTrr�bodyF�script�style�colgroup�col�tbody�thead�tfoot�tr)rr)rr)rr)rr)rr)r"r#r$r)r	�tagnamerrrrrrrs4
zFilter.is_optional_startcCs|r|dpd}|d7kr |d8kS|d9krP|d
kr<|d|kS|dkpJ|dkS�n�|d:kr�|d
krl|dd;kS|dkr�|dkp�|dkSdS�n||dk�r�|d<k�r�|dd=kS|dk�p�|dkS�nF|d-k�r�|d
k�r�|dd>kS|dk�p�|dkS�n|d?k�r,|d
k�r|dd@kS|dk�p(|dkSn�|d0k�r`|dAk�rDdS|d
k�rZ|dd0kSd1Sn�|dBk�r�|d
k�r�|ddCkS|d3k�r�|dk�p�|dkSdSnf|d4k�r�|d
k�r�|dd3kS|dk�p�|dkSn2|dDk�r|d
k�r�|ddEkS|dk�p|dkSdS)FNrrrrrr�li�optgroupr%rrr�dt�ddF�pr�address�article�aside�
blockquote�datagrid�dialog�dir�div�dl�fieldset�footer�form�h1�h2�h3�h4�h5�h6�header�hr�menu�nav�ol�pre�section�table�ul�option�rt�rpr Tr#r"r$�td�th)rrr)rr)r'r(r%)r)r*)r)r*)rr)r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBr+rCrDrErF)rGr()rHrI)rHrI)rr)r#r")r"r$)rJrK)rJrKr)r	r&rrrrrrWsf
















zFilter.is_optional_endN)�__name__�
__module__�__qualname__r
rrrrrrrrs

9rN)Z
__future__rrr�rrrrrr�<module>s_vendor/html5lib/filters/__pycache__/lint.cpython-36.opt-1.pyc000064400000003025151733136430020141 0ustar003

�Pf%
�@shddlmZmZmZddlmZddlmZddlm	Z	m
Z
ddlmZdje�ZGd	d
�d
ej
�Z
dS)�)�absolute_import�division�unicode_literals)�	text_type�)�base�)�
namespaces�voidElements)�spaceCharacters�cs&eZdZd�fdd�	Zdd�Z�ZS)�FilterTcstt|�j|�||_dS)N)�superr
�__init__�require_matching_tags)�self�sourcer)�	__class__��/usr/lib/python3.6/lint.pyr
szFilter.__init__c	cs@g}�x4tjj|�D�]"}|d}|dkr�|d}|d}|sL|tdkrV|tkrVn|dkrr|jrr|j||f�x�|dj�D]\\}}}q�Wn�|dkr�|d}|d}|s�|tdkr�|tkr�n|jr�|j�}n\|d	kr�|d}nJ|dk�r|d}|dk�r2n,|dk�r|d}n|d
k�r&n|dk�r2n|VqWdS)N�type�StartTag�EmptyTag�	namespace�nameZhtml�dataZEndTag�Comment�
Characters�SpaceCharactersZDoctypeZEntityZSerializerError)rr)rr)	rr
�__iter__r	r
r�append�items�pop)	rZ
open_elements�tokenrrr�value�startrrrrrsF




zFilter.__iter__)T)�__name__�
__module__�__qualname__rr�
__classcell__rr)rrr
sr
N)Z
__future__rrrZpip._vendor.sixrrrZ	constantsr	r
r�joinr
rrrr�<module>s
_vendor/html5lib/filters/__pycache__/base.cpython-36.pyc000064400000001373151733136430017152 0ustar003

�Pf�@s(ddlmZmZmZGdd�de�ZdS)�)�absolute_import�division�unicode_literalsc@s$eZdZdd�Zdd�Zdd�ZdS)�FiltercCs
||_dS)N)�source)�selfr�r�/usr/lib/python3.6/base.py�__init__szFilter.__init__cCs
t|j�S)N)�iterr)rrrr	�__iter__szFilter.__iter__cCst|j|�S)N)�getattrr)r�namerrr	�__getattr__szFilter.__getattr__N)�__name__�
__module__�__qualname__r
rrrrrr	rsrN)Z
__future__rrr�objectrrrrr	�<module>s_vendor/html5lib/filters/__pycache__/base.cpython-36.opt-1.pyc000064400000001373151733136430020111 0ustar003

�Pf�@s(ddlmZmZmZGdd�de�ZdS)�)�absolute_import�division�unicode_literalsc@s$eZdZdd�Zdd�Zdd�ZdS)�FiltercCs
||_dS)N)�source)�selfr�r�/usr/lib/python3.6/base.py�__init__szFilter.__init__cCs
t|j�S)N)�iterr)rrrr	�__iter__szFilter.__iter__cCst|j|�S)N)�getattrr)r�namerrr	�__getattr__szFilter.__getattr__N)�__name__�
__module__�__qualname__r
rrrrrr	rsrN)Z
__future__rrr�objectrrrrr	�<module>s_vendor/html5lib/filters/inject_meta_charset.py000064400000005266151733136430015754 0ustar00from __future__ import absolute_import, division, unicode_literals

from . import base


class Filter(base.Filter):
    def __init__(self, source, encoding):
        base.Filter.__init__(self, source)
        self.encoding = encoding

    def __iter__(self):
        state = "pre_head"
        meta_found = (self.encoding is None)
        pending = []

        for token in base.Filter.__iter__(self):
            type = token["type"]
            if type == "StartTag":
                if token["name"].lower() == "head":
                    state = "in_head"

            elif type == "EmptyTag":
                if token["name"].lower() == "meta":
                    # replace charset with actual encoding
                    has_http_equiv_content_type = False
                    for (namespace, name), value in token["data"].items():
                        if namespace is not None:
                            continue
                        elif name.lower() == 'charset':
                            token["data"][(namespace, name)] = self.encoding
                            meta_found = True
                            break
                        elif name == 'http-equiv' and value.lower() == 'content-type':
                            has_http_equiv_content_type = True
                    else:
                        if has_http_equiv_content_type and (None, "content") in token["data"]:
                            token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding
                            meta_found = True

                elif token["name"].lower() == "head" and not meta_found:
                    # insert meta into empty head
                    yield {"type": "StartTag", "name": "head",
                           "data": token["data"]}
                    yield {"type": "EmptyTag", "name": "meta",
                           "data": {(None, "charset"): self.encoding}}
                    yield {"type": "EndTag", "name": "head"}
                    meta_found = True
                    continue

            elif type == "EndTag":
                if token["name"].lower() == "head" and pending:
                    # insert meta into head (if necessary) and flush pending queue
                    yield pending.pop(0)
                    if not meta_found:
                        yield {"type": "EmptyTag", "name": "meta",
                               "data": {(None, "charset"): self.encoding}}
                    while pending:
                        yield pending.pop(0)
                    meta_found = True
                    state = "post_head"

            if state == "in_head":
                pending.append(token)
            else:
                yield token
_vendor/html5lib/filters/__init__.py000064400000000000151733136430013475 0ustar00_vendor/html5lib/filters/alphabeticalattributes.py000064400000001155151733136430016472 0ustar00from __future__ import absolute_import, division, unicode_literals

from . import base

try:
    from collections import OrderedDict
except ImportError:
    from ordereddict import OrderedDict


class Filter(base.Filter):
    def __iter__(self):
        for token in base.Filter.__iter__(self):
            if token["type"] in ("StartTag", "EmptyTag"):
                attrs = OrderedDict()
                for name, value in sorted(token["data"].items(),
                                          key=lambda x: x[0]):
                    attrs[name] = value
                token["data"] = attrs
            yield token
_vendor/html5lib/filters/base.py000064400000000436151733136430012665 0ustar00from __future__ import absolute_import, division, unicode_literals


class Filter(object):
    def __init__(self, source):
        self.source = source

    def __iter__(self):
        return iter(self.source)

    def __getattr__(self, name):
        return getattr(self.source, name)
_vendor/html5lib/filters/whitespace.py000064400000002163151733136430014106 0ustar00from __future__ import absolute_import, division, unicode_literals

import re

from . import base
from ..constants import rcdataElements, spaceCharacters
spaceCharacters = "".join(spaceCharacters)

SPACES_REGEX = re.compile("[%s]+" % spaceCharacters)


class Filter(base.Filter):

    spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements))

    def __iter__(self):
        preserve = 0
        for token in base.Filter.__iter__(self):
            type = token["type"]
            if type == "StartTag" \
                    and (preserve or token["name"] in self.spacePreserveElements):
                preserve += 1

            elif type == "EndTag" and preserve:
                preserve -= 1

            elif not preserve and type == "SpaceCharacters" and token["data"]:
                # Test on token["data"] above to not introduce spaces where there were not
                token["data"] = " "

            elif not preserve and type == "Characters":
                token["data"] = collapse_spaces(token["data"])

            yield token


def collapse_spaces(text):
    return SPACES_REGEX.sub(' ', text)
_vendor/html5lib/filters/sanitizer.py000064400000061030151733136430013760 0ustar00from __future__ import absolute_import, division, unicode_literals

import re
from xml.sax.saxutils import escape, unescape

from pip._vendor.six.moves import urllib_parse as urlparse

from . import base
from ..constants import namespaces, prefixes

__all__ = ["Filter"]


allowed_elements = frozenset((
    (namespaces['html'], 'a'),
    (namespaces['html'], 'abbr'),
    (namespaces['html'], 'acronym'),
    (namespaces['html'], 'address'),
    (namespaces['html'], 'area'),
    (namespaces['html'], 'article'),
    (namespaces['html'], 'aside'),
    (namespaces['html'], 'audio'),
    (namespaces['html'], 'b'),
    (namespaces['html'], 'big'),
    (namespaces['html'], 'blockquote'),
    (namespaces['html'], 'br'),
    (namespaces['html'], 'button'),
    (namespaces['html'], 'canvas'),
    (namespaces['html'], 'caption'),
    (namespaces['html'], 'center'),
    (namespaces['html'], 'cite'),
    (namespaces['html'], 'code'),
    (namespaces['html'], 'col'),
    (namespaces['html'], 'colgroup'),
    (namespaces['html'], 'command'),
    (namespaces['html'], 'datagrid'),
    (namespaces['html'], 'datalist'),
    (namespaces['html'], 'dd'),
    (namespaces['html'], 'del'),
    (namespaces['html'], 'details'),
    (namespaces['html'], 'dfn'),
    (namespaces['html'], 'dialog'),
    (namespaces['html'], 'dir'),
    (namespaces['html'], 'div'),
    (namespaces['html'], 'dl'),
    (namespaces['html'], 'dt'),
    (namespaces['html'], 'em'),
    (namespaces['html'], 'event-source'),
    (namespaces['html'], 'fieldset'),
    (namespaces['html'], 'figcaption'),
    (namespaces['html'], 'figure'),
    (namespaces['html'], 'footer'),
    (namespaces['html'], 'font'),
    (namespaces['html'], 'form'),
    (namespaces['html'], 'header'),
    (namespaces['html'], 'h1'),
    (namespaces['html'], 'h2'),
    (namespaces['html'], 'h3'),
    (namespaces['html'], 'h4'),
    (namespaces['html'], 'h5'),
    (namespaces['html'], 'h6'),
    (namespaces['html'], 'hr'),
    (namespaces['html'], 'i'),
    (namespaces['html'], 'img'),
    (namespaces['html'], 'input'),
    (namespaces['html'], 'ins'),
    (namespaces['html'], 'keygen'),
    (namespaces['html'], 'kbd'),
    (namespaces['html'], 'label'),
    (namespaces['html'], 'legend'),
    (namespaces['html'], 'li'),
    (namespaces['html'], 'm'),
    (namespaces['html'], 'map'),
    (namespaces['html'], 'menu'),
    (namespaces['html'], 'meter'),
    (namespaces['html'], 'multicol'),
    (namespaces['html'], 'nav'),
    (namespaces['html'], 'nextid'),
    (namespaces['html'], 'ol'),
    (namespaces['html'], 'output'),
    (namespaces['html'], 'optgroup'),
    (namespaces['html'], 'option'),
    (namespaces['html'], 'p'),
    (namespaces['html'], 'pre'),
    (namespaces['html'], 'progress'),
    (namespaces['html'], 'q'),
    (namespaces['html'], 's'),
    (namespaces['html'], 'samp'),
    (namespaces['html'], 'section'),
    (namespaces['html'], 'select'),
    (namespaces['html'], 'small'),
    (namespaces['html'], 'sound'),
    (namespaces['html'], 'source'),
    (namespaces['html'], 'spacer'),
    (namespaces['html'], 'span'),
    (namespaces['html'], 'strike'),
    (namespaces['html'], 'strong'),
    (namespaces['html'], 'sub'),
    (namespaces['html'], 'sup'),
    (namespaces['html'], 'table'),
    (namespaces['html'], 'tbody'),
    (namespaces['html'], 'td'),
    (namespaces['html'], 'textarea'),
    (namespaces['html'], 'time'),
    (namespaces['html'], 'tfoot'),
    (namespaces['html'], 'th'),
    (namespaces['html'], 'thead'),
    (namespaces['html'], 'tr'),
    (namespaces['html'], 'tt'),
    (namespaces['html'], 'u'),
    (namespaces['html'], 'ul'),
    (namespaces['html'], 'var'),
    (namespaces['html'], 'video'),
    (namespaces['mathml'], 'maction'),
    (namespaces['mathml'], 'math'),
    (namespaces['mathml'], 'merror'),
    (namespaces['mathml'], 'mfrac'),
    (namespaces['mathml'], 'mi'),
    (namespaces['mathml'], 'mmultiscripts'),
    (namespaces['mathml'], 'mn'),
    (namespaces['mathml'], 'mo'),
    (namespaces['mathml'], 'mover'),
    (namespaces['mathml'], 'mpadded'),
    (namespaces['mathml'], 'mphantom'),
    (namespaces['mathml'], 'mprescripts'),
    (namespaces['mathml'], 'mroot'),
    (namespaces['mathml'], 'mrow'),
    (namespaces['mathml'], 'mspace'),
    (namespaces['mathml'], 'msqrt'),
    (namespaces['mathml'], 'mstyle'),
    (namespaces['mathml'], 'msub'),
    (namespaces['mathml'], 'msubsup'),
    (namespaces['mathml'], 'msup'),
    (namespaces['mathml'], 'mtable'),
    (namespaces['mathml'], 'mtd'),
    (namespaces['mathml'], 'mtext'),
    (namespaces['mathml'], 'mtr'),
    (namespaces['mathml'], 'munder'),
    (namespaces['mathml'], 'munderover'),
    (namespaces['mathml'], 'none'),
    (namespaces['svg'], 'a'),
    (namespaces['svg'], 'animate'),
    (namespaces['svg'], 'animateColor'),
    (namespaces['svg'], 'animateMotion'),
    (namespaces['svg'], 'animateTransform'),
    (namespaces['svg'], 'clipPath'),
    (namespaces['svg'], 'circle'),
    (namespaces['svg'], 'defs'),
    (namespaces['svg'], 'desc'),
    (namespaces['svg'], 'ellipse'),
    (namespaces['svg'], 'font-face'),
    (namespaces['svg'], 'font-face-name'),
    (namespaces['svg'], 'font-face-src'),
    (namespaces['svg'], 'g'),
    (namespaces['svg'], 'glyph'),
    (namespaces['svg'], 'hkern'),
    (namespaces['svg'], 'linearGradient'),
    (namespaces['svg'], 'line'),
    (namespaces['svg'], 'marker'),
    (namespaces['svg'], 'metadata'),
    (namespaces['svg'], 'missing-glyph'),
    (namespaces['svg'], 'mpath'),
    (namespaces['svg'], 'path'),
    (namespaces['svg'], 'polygon'),
    (namespaces['svg'], 'polyline'),
    (namespaces['svg'], 'radialGradient'),
    (namespaces['svg'], 'rect'),
    (namespaces['svg'], 'set'),
    (namespaces['svg'], 'stop'),
    (namespaces['svg'], 'svg'),
    (namespaces['svg'], 'switch'),
    (namespaces['svg'], 'text'),
    (namespaces['svg'], 'title'),
    (namespaces['svg'], 'tspan'),
    (namespaces['svg'], 'use'),
))

allowed_attributes = frozenset((
    # HTML attributes
    (None, 'abbr'),
    (None, 'accept'),
    (None, 'accept-charset'),
    (None, 'accesskey'),
    (None, 'action'),
    (None, 'align'),
    (None, 'alt'),
    (None, 'autocomplete'),
    (None, 'autofocus'),
    (None, 'axis'),
    (None, 'background'),
    (None, 'balance'),
    (None, 'bgcolor'),
    (None, 'bgproperties'),
    (None, 'border'),
    (None, 'bordercolor'),
    (None, 'bordercolordark'),
    (None, 'bordercolorlight'),
    (None, 'bottompadding'),
    (None, 'cellpadding'),
    (None, 'cellspacing'),
    (None, 'ch'),
    (None, 'challenge'),
    (None, 'char'),
    (None, 'charoff'),
    (None, 'choff'),
    (None, 'charset'),
    (None, 'checked'),
    (None, 'cite'),
    (None, 'class'),
    (None, 'clear'),
    (None, 'color'),
    (None, 'cols'),
    (None, 'colspan'),
    (None, 'compact'),
    (None, 'contenteditable'),
    (None, 'controls'),
    (None, 'coords'),
    (None, 'data'),
    (None, 'datafld'),
    (None, 'datapagesize'),
    (None, 'datasrc'),
    (None, 'datetime'),
    (None, 'default'),
    (None, 'delay'),
    (None, 'dir'),
    (None, 'disabled'),
    (None, 'draggable'),
    (None, 'dynsrc'),
    (None, 'enctype'),
    (None, 'end'),
    (None, 'face'),
    (None, 'for'),
    (None, 'form'),
    (None, 'frame'),
    (None, 'galleryimg'),
    (None, 'gutter'),
    (None, 'headers'),
    (None, 'height'),
    (None, 'hidefocus'),
    (None, 'hidden'),
    (None, 'high'),
    (None, 'href'),
    (None, 'hreflang'),
    (None, 'hspace'),
    (None, 'icon'),
    (None, 'id'),
    (None, 'inputmode'),
    (None, 'ismap'),
    (None, 'keytype'),
    (None, 'label'),
    (None, 'leftspacing'),
    (None, 'lang'),
    (None, 'list'),
    (None, 'longdesc'),
    (None, 'loop'),
    (None, 'loopcount'),
    (None, 'loopend'),
    (None, 'loopstart'),
    (None, 'low'),
    (None, 'lowsrc'),
    (None, 'max'),
    (None, 'maxlength'),
    (None, 'media'),
    (None, 'method'),
    (None, 'min'),
    (None, 'multiple'),
    (None, 'name'),
    (None, 'nohref'),
    (None, 'noshade'),
    (None, 'nowrap'),
    (None, 'open'),
    (None, 'optimum'),
    (None, 'pattern'),
    (None, 'ping'),
    (None, 'point-size'),
    (None, 'poster'),
    (None, 'pqg'),
    (None, 'preload'),
    (None, 'prompt'),
    (None, 'radiogroup'),
    (None, 'readonly'),
    (None, 'rel'),
    (None, 'repeat-max'),
    (None, 'repeat-min'),
    (None, 'replace'),
    (None, 'required'),
    (None, 'rev'),
    (None, 'rightspacing'),
    (None, 'rows'),
    (None, 'rowspan'),
    (None, 'rules'),
    (None, 'scope'),
    (None, 'selected'),
    (None, 'shape'),
    (None, 'size'),
    (None, 'span'),
    (None, 'src'),
    (None, 'start'),
    (None, 'step'),
    (None, 'style'),
    (None, 'summary'),
    (None, 'suppress'),
    (None, 'tabindex'),
    (None, 'target'),
    (None, 'template'),
    (None, 'title'),
    (None, 'toppadding'),
    (None, 'type'),
    (None, 'unselectable'),
    (None, 'usemap'),
    (None, 'urn'),
    (None, 'valign'),
    (None, 'value'),
    (None, 'variable'),
    (None, 'volume'),
    (None, 'vspace'),
    (None, 'vrml'),
    (None, 'width'),
    (None, 'wrap'),
    (namespaces['xml'], 'lang'),
    # MathML attributes
    (None, 'actiontype'),
    (None, 'align'),
    (None, 'columnalign'),
    (None, 'columnalign'),
    (None, 'columnalign'),
    (None, 'columnlines'),
    (None, 'columnspacing'),
    (None, 'columnspan'),
    (None, 'depth'),
    (None, 'display'),
    (None, 'displaystyle'),
    (None, 'equalcolumns'),
    (None, 'equalrows'),
    (None, 'fence'),
    (None, 'fontstyle'),
    (None, 'fontweight'),
    (None, 'frame'),
    (None, 'height'),
    (None, 'linethickness'),
    (None, 'lspace'),
    (None, 'mathbackground'),
    (None, 'mathcolor'),
    (None, 'mathvariant'),
    (None, 'mathvariant'),
    (None, 'maxsize'),
    (None, 'minsize'),
    (None, 'other'),
    (None, 'rowalign'),
    (None, 'rowalign'),
    (None, 'rowalign'),
    (None, 'rowlines'),
    (None, 'rowspacing'),
    (None, 'rowspan'),
    (None, 'rspace'),
    (None, 'scriptlevel'),
    (None, 'selection'),
    (None, 'separator'),
    (None, 'stretchy'),
    (None, 'width'),
    (None, 'width'),
    (namespaces['xlink'], 'href'),
    (namespaces['xlink'], 'show'),
    (namespaces['xlink'], 'type'),
    # SVG attributes
    (None, 'accent-height'),
    (None, 'accumulate'),
    (None, 'additive'),
    (None, 'alphabetic'),
    (None, 'arabic-form'),
    (None, 'ascent'),
    (None, 'attributeName'),
    (None, 'attributeType'),
    (None, 'baseProfile'),
    (None, 'bbox'),
    (None, 'begin'),
    (None, 'by'),
    (None, 'calcMode'),
    (None, 'cap-height'),
    (None, 'class'),
    (None, 'clip-path'),
    (None, 'color'),
    (None, 'color-rendering'),
    (None, 'content'),
    (None, 'cx'),
    (None, 'cy'),
    (None, 'd'),
    (None, 'dx'),
    (None, 'dy'),
    (None, 'descent'),
    (None, 'display'),
    (None, 'dur'),
    (None, 'end'),
    (None, 'fill'),
    (None, 'fill-opacity'),
    (None, 'fill-rule'),
    (None, 'font-family'),
    (None, 'font-size'),
    (None, 'font-stretch'),
    (None, 'font-style'),
    (None, 'font-variant'),
    (None, 'font-weight'),
    (None, 'from'),
    (None, 'fx'),
    (None, 'fy'),
    (None, 'g1'),
    (None, 'g2'),
    (None, 'glyph-name'),
    (None, 'gradientUnits'),
    (None, 'hanging'),
    (None, 'height'),
    (None, 'horiz-adv-x'),
    (None, 'horiz-origin-x'),
    (None, 'id'),
    (None, 'ideographic'),
    (None, 'k'),
    (None, 'keyPoints'),
    (None, 'keySplines'),
    (None, 'keyTimes'),
    (None, 'lang'),
    (None, 'marker-end'),
    (None, 'marker-mid'),
    (None, 'marker-start'),
    (None, 'markerHeight'),
    (None, 'markerUnits'),
    (None, 'markerWidth'),
    (None, 'mathematical'),
    (None, 'max'),
    (None, 'min'),
    (None, 'name'),
    (None, 'offset'),
    (None, 'opacity'),
    (None, 'orient'),
    (None, 'origin'),
    (None, 'overline-position'),
    (None, 'overline-thickness'),
    (None, 'panose-1'),
    (None, 'path'),
    (None, 'pathLength'),
    (None, 'points'),
    (None, 'preserveAspectRatio'),
    (None, 'r'),
    (None, 'refX'),
    (None, 'refY'),
    (None, 'repeatCount'),
    (None, 'repeatDur'),
    (None, 'requiredExtensions'),
    (None, 'requiredFeatures'),
    (None, 'restart'),
    (None, 'rotate'),
    (None, 'rx'),
    (None, 'ry'),
    (None, 'slope'),
    (None, 'stemh'),
    (None, 'stemv'),
    (None, 'stop-color'),
    (None, 'stop-opacity'),
    (None, 'strikethrough-position'),
    (None, 'strikethrough-thickness'),
    (None, 'stroke'),
    (None, 'stroke-dasharray'),
    (None, 'stroke-dashoffset'),
    (None, 'stroke-linecap'),
    (None, 'stroke-linejoin'),
    (None, 'stroke-miterlimit'),
    (None, 'stroke-opacity'),
    (None, 'stroke-width'),
    (None, 'systemLanguage'),
    (None, 'target'),
    (None, 'text-anchor'),
    (None, 'to'),
    (None, 'transform'),
    (None, 'type'),
    (None, 'u1'),
    (None, 'u2'),
    (None, 'underline-position'),
    (None, 'underline-thickness'),
    (None, 'unicode'),
    (None, 'unicode-range'),
    (None, 'units-per-em'),
    (None, 'values'),
    (None, 'version'),
    (None, 'viewBox'),
    (None, 'visibility'),
    (None, 'width'),
    (None, 'widths'),
    (None, 'x'),
    (None, 'x-height'),
    (None, 'x1'),
    (None, 'x2'),
    (namespaces['xlink'], 'actuate'),
    (namespaces['xlink'], 'arcrole'),
    (namespaces['xlink'], 'href'),
    (namespaces['xlink'], 'role'),
    (namespaces['xlink'], 'show'),
    (namespaces['xlink'], 'title'),
    (namespaces['xlink'], 'type'),
    (namespaces['xml'], 'base'),
    (namespaces['xml'], 'lang'),
    (namespaces['xml'], 'space'),
    (None, 'y'),
    (None, 'y1'),
    (None, 'y2'),
    (None, 'zoomAndPan'),
))

attr_val_is_uri = frozenset((
    (None, 'href'),
    (None, 'src'),
    (None, 'cite'),
    (None, 'action'),
    (None, 'longdesc'),
    (None, 'poster'),
    (None, 'background'),
    (None, 'datasrc'),
    (None, 'dynsrc'),
    (None, 'lowsrc'),
    (None, 'ping'),
    (namespaces['xlink'], 'href'),
    (namespaces['xml'], 'base'),
))

svg_attr_val_allows_ref = frozenset((
    (None, 'clip-path'),
    (None, 'color-profile'),
    (None, 'cursor'),
    (None, 'fill'),
    (None, 'filter'),
    (None, 'marker'),
    (None, 'marker-start'),
    (None, 'marker-mid'),
    (None, 'marker-end'),
    (None, 'mask'),
    (None, 'stroke'),
))

svg_allow_local_href = frozenset((
    (None, 'altGlyph'),
    (None, 'animate'),
    (None, 'animateColor'),
    (None, 'animateMotion'),
    (None, 'animateTransform'),
    (None, 'cursor'),
    (None, 'feImage'),
    (None, 'filter'),
    (None, 'linearGradient'),
    (None, 'pattern'),
    (None, 'radialGradient'),
    (None, 'textpath'),
    (None, 'tref'),
    (None, 'set'),
    (None, 'use')
))

allowed_css_properties = frozenset((
    'azimuth',
    'background-color',
    'border-bottom-color',
    'border-collapse',
    'border-color',
    'border-left-color',
    'border-right-color',
    'border-top-color',
    'clear',
    'color',
    'cursor',
    'direction',
    'display',
    'elevation',
    'float',
    'font',
    'font-family',
    'font-size',
    'font-style',
    'font-variant',
    'font-weight',
    'height',
    'letter-spacing',
    'line-height',
    'overflow',
    'pause',
    'pause-after',
    'pause-before',
    'pitch',
    'pitch-range',
    'richness',
    'speak',
    'speak-header',
    'speak-numeral',
    'speak-punctuation',
    'speech-rate',
    'stress',
    'text-align',
    'text-decoration',
    'text-indent',
    'unicode-bidi',
    'vertical-align',
    'voice-family',
    'volume',
    'white-space',
    'width',
))

allowed_css_keywords = frozenset((
    'auto',
    'aqua',
    'black',
    'block',
    'blue',
    'bold',
    'both',
    'bottom',
    'brown',
    'center',
    'collapse',
    'dashed',
    'dotted',
    'fuchsia',
    'gray',
    'green',
    '!important',
    'italic',
    'left',
    'lime',
    'maroon',
    'medium',
    'none',
    'navy',
    'normal',
    'nowrap',
    'olive',
    'pointer',
    'purple',
    'red',
    'right',
    'solid',
    'silver',
    'teal',
    'top',
    'transparent',
    'underline',
    'white',
    'yellow',
))

allowed_svg_properties = frozenset((
    'fill',
    'fill-opacity',
    'fill-rule',
    'stroke',
    'stroke-width',
    'stroke-linecap',
    'stroke-linejoin',
    'stroke-opacity',
))

allowed_protocols = frozenset((
    'ed2k',
    'ftp',
    'http',
    'https',
    'irc',
    'mailto',
    'news',
    'gopher',
    'nntp',
    'telnet',
    'webcal',
    'xmpp',
    'callto',
    'feed',
    'urn',
    'aim',
    'rsync',
    'tag',
    'ssh',
    'sftp',
    'rtsp',
    'afs',
    'data',
))

allowed_content_types = frozenset((
    'image/png',
    'image/jpeg',
    'image/gif',
    'image/webp',
    'image/bmp',
    'text/plain',
))


data_content_type = re.compile(r'''
                                ^
                                # Match a content type <application>/<type>
                                (?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
                                # Match any character set and encoding
                                (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
                                  |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
                                # Assume the rest is data
                                ,.*
                                $
                                ''',
                               re.VERBOSE)


class Filter(base.Filter):
    """ sanitization of XHTML+MathML+SVG and of inline style attributes."""
    def __init__(self,
                 source,
                 allowed_elements=allowed_elements,
                 allowed_attributes=allowed_attributes,
                 allowed_css_properties=allowed_css_properties,
                 allowed_css_keywords=allowed_css_keywords,
                 allowed_svg_properties=allowed_svg_properties,
                 allowed_protocols=allowed_protocols,
                 allowed_content_types=allowed_content_types,
                 attr_val_is_uri=attr_val_is_uri,
                 svg_attr_val_allows_ref=svg_attr_val_allows_ref,
                 svg_allow_local_href=svg_allow_local_href):
        super(Filter, self).__init__(source)
        self.allowed_elements = allowed_elements
        self.allowed_attributes = allowed_attributes
        self.allowed_css_properties = allowed_css_properties
        self.allowed_css_keywords = allowed_css_keywords
        self.allowed_svg_properties = allowed_svg_properties
        self.allowed_protocols = allowed_protocols
        self.allowed_content_types = allowed_content_types
        self.attr_val_is_uri = attr_val_is_uri
        self.svg_attr_val_allows_ref = svg_attr_val_allows_ref
        self.svg_allow_local_href = svg_allow_local_href

    def __iter__(self):
        for token in base.Filter.__iter__(self):
            token = self.sanitize_token(token)
            if token:
                yield token

    # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
    # stripping out all # attributes not in ALLOWED_ATTRIBUTES. Style
    # attributes are parsed, and a restricted set, # specified by
    # ALLOWED_CSS_PROPERTIES and ALLOWED_CSS_KEYWORDS, are allowed through.
    # attributes in ATTR_VAL_IS_URI are scanned, and only URI schemes specified
    # in ALLOWED_PROTOCOLS are allowed.
    #
    #   sanitize_html('<script> do_nasty_stuff() </script>')
    #    => &lt;script> do_nasty_stuff() &lt;/script>
    #   sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')
    #    => <a>Click here for $100</a>
    def sanitize_token(self, token):

        # accommodate filters which use token_type differently
        token_type = token["type"]
        if token_type in ("StartTag", "EndTag", "EmptyTag"):
            name = token["name"]
            namespace = token["namespace"]
            if ((namespace, name) in self.allowed_elements or
                (namespace is None and
                 (namespaces["html"], name) in self.allowed_elements)):
                return self.allowed_token(token)
            else:
                return self.disallowed_token(token)
        elif token_type == "Comment":
            pass
        else:
            return token

    def allowed_token(self, token):
        if "data" in token:
            attrs = token["data"]
            attr_names = set(attrs.keys())

            # Remove forbidden attributes
            for to_remove in (attr_names - self.allowed_attributes):
                del token["data"][to_remove]
                attr_names.remove(to_remove)

            # Remove attributes with disallowed URL values
            for attr in (attr_names & self.attr_val_is_uri):
                assert attr in attrs
                # I don't have a clue where this regexp comes from or why it matches those
                # characters, nor why we call unescape. I just know it's always been here.
                # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all
                # this will do is remove *more* than it otherwise would.
                val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\s]+", '',
                                       unescape(attrs[attr])).lower()
                # remove replacement characters from unescaped characters
                val_unescaped = val_unescaped.replace("\ufffd", "")
                try:
                    uri = urlparse.urlparse(val_unescaped)
                except ValueError:
                    uri = None
                    del attrs[attr]
                if uri and uri.scheme:
                    if uri.scheme not in self.allowed_protocols:
                        del attrs[attr]
                    if uri.scheme == 'data':
                        m = data_content_type.match(uri.path)
                        if not m:
                            del attrs[attr]
                        elif m.group('content_type') not in self.allowed_content_types:
                            del attrs[attr]

            for attr in self.svg_attr_val_allows_ref:
                if attr in attrs:
                    attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
                                         ' ',
                                         unescape(attrs[attr]))
            if (token["name"] in self.svg_allow_local_href and
                (namespaces['xlink'], 'href') in attrs and re.search('^\s*[^#\s].*',
                                                                     attrs[(namespaces['xlink'], 'href')])):
                del attrs[(namespaces['xlink'], 'href')]
            if (None, 'style') in attrs:
                attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')])
            token["data"] = attrs
        return token

    def disallowed_token(self, token):
        token_type = token["type"]
        if token_type == "EndTag":
            token["data"] = "</%s>" % token["name"]
        elif token["data"]:
            assert token_type in ("StartTag", "EmptyTag")
            attrs = []
            for (ns, name), v in token["data"].items():
                attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v)))
            token["data"] = "<%s%s>" % (token["name"], ''.join(attrs))
        else:
            token["data"] = "<%s>" % token["name"]
        if token.get("selfClosing"):
            token["data"] = token["data"][:-1] + "/>"

        token["type"] = "Characters"

        del token["name"]
        return token

    def sanitize_css(self, style):
        # disallow urls
        style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)

        # gauntlet
        if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
            return ''
        if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
            return ''

        clean = []
        for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style):
            if not value:
                continue
            if prop.lower() in self.allowed_css_properties:
                clean.append(prop + ': ' + value + ';')
            elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
                                                'padding']:
                for keyword in value.split():
                    if keyword not in self.allowed_css_keywords and \
                            not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword):  # noqa
                        break
                else:
                    clean.append(prop + ': ' + value + ';')
            elif prop.lower() in self.allowed_svg_properties:
                clean.append(prop + ': ' + value + ';')

        return ' '.join(clean)
_vendor/html5lib/filters/lint.py000064400000006445151733136430012727 0ustar00from __future__ import absolute_import, division, unicode_literals

from pip._vendor.six import text_type

from . import base
from ..constants import namespaces, voidElements

from ..constants import spaceCharacters
spaceCharacters = "".join(spaceCharacters)


class Filter(base.Filter):
    def __init__(self, source, require_matching_tags=True):
        super(Filter, self).__init__(source)
        self.require_matching_tags = require_matching_tags

    def __iter__(self):
        open_elements = []
        for token in base.Filter.__iter__(self):
            type = token["type"]
            if type in ("StartTag", "EmptyTag"):
                namespace = token["namespace"]
                name = token["name"]
                assert namespace is None or isinstance(namespace, text_type)
                assert namespace != ""
                assert isinstance(name, text_type)
                assert name != ""
                assert isinstance(token["data"], dict)
                if (not namespace or namespace == namespaces["html"]) and name in voidElements:
                    assert type == "EmptyTag"
                else:
                    assert type == "StartTag"
                if type == "StartTag" and self.require_matching_tags:
                    open_elements.append((namespace, name))
                for (namespace, name), value in token["data"].items():
                    assert namespace is None or isinstance(namespace, text_type)
                    assert namespace != ""
                    assert isinstance(name, text_type)
                    assert name != ""
                    assert isinstance(value, text_type)

            elif type == "EndTag":
                namespace = token["namespace"]
                name = token["name"]
                assert namespace is None or isinstance(namespace, text_type)
                assert namespace != ""
                assert isinstance(name, text_type)
                assert name != ""
                if (not namespace or namespace == namespaces["html"]) and name in voidElements:
                    assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name}
                elif self.require_matching_tags:
                    start = open_elements.pop()
                    assert start == (namespace, name)

            elif type == "Comment":
                data = token["data"]
                assert isinstance(data, text_type)

            elif type in ("Characters", "SpaceCharacters"):
                data = token["data"]
                assert isinstance(data, text_type)
                assert data != ""
                if type == "SpaceCharacters":
                    assert data.strip(spaceCharacters) == ""

            elif type == "Doctype":
                name = token["name"]
                assert name is None or isinstance(name, text_type)
                assert token["publicId"] is None or isinstance(name, text_type)
                assert token["systemId"] is None or isinstance(name, text_type)

            elif type == "Entity":
                assert isinstance(token["name"], text_type)

            elif type == "SerializerError":
                assert isinstance(token["data"], text_type)

            else:
                assert False, "Unknown token type: %(type)s" % {"type": type}

            yield token
_vendor/html5lib/_inputstream.py000064400000077424151733136430013030 0ustar00from __future__ import absolute_import, division, unicode_literals

from pip._vendor.six import text_type, binary_type
from pip._vendor.six.moves import http_client, urllib

import codecs
import re

from pip._vendor import webencodings

from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import ReparseException
from . import _utils

from io import StringIO

try:
    from io import BytesIO
except ImportError:
    BytesIO = StringIO

# Non-unicode versions of constants for use in the pre-parser
spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])
asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])
asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])
spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])


invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]"  # noqa

if _utils.supports_lone_surrogates:
    # Use one extra step of indirection and create surrogates with
    # eval. Not using this indirection would introduce an illegal
    # unicode literal on platforms not supporting such lone
    # surrogates.
    assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1
    invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +
                                    eval('"\\uD800-\\uDFFF"') +  # pylint:disable=eval-used
                                    "]")
else:
    invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)

non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
                                  0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
                                  0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
                                  0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
                                  0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
                                  0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
                                  0x10FFFE, 0x10FFFF])

ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005B-\u0060\u007B-\u007E]")

# Cache for charsUntil()
charsUntilRegEx = {}


class BufferedStream(object):
    """Buffering for streams that do not have buffering of their own

    The buffer is implemented as a list of chunks on the assumption that
    joining many strings will be slow since it is O(n**2)
    """

    def __init__(self, stream):
        self.stream = stream
        self.buffer = []
        self.position = [-1, 0]  # chunk number, offset

    def tell(self):
        pos = 0
        for chunk in self.buffer[:self.position[0]]:
            pos += len(chunk)
        pos += self.position[1]
        return pos

    def seek(self, pos):
        assert pos <= self._bufferedBytes()
        offset = pos
        i = 0
        while len(self.buffer[i]) < offset:
            offset -= len(self.buffer[i])
            i += 1
        self.position = [i, offset]

    def read(self, bytes):
        if not self.buffer:
            return self._readStream(bytes)
        elif (self.position[0] == len(self.buffer) and
              self.position[1] == len(self.buffer[-1])):
            return self._readStream(bytes)
        else:
            return self._readFromBuffer(bytes)

    def _bufferedBytes(self):
        return sum([len(item) for item in self.buffer])

    def _readStream(self, bytes):
        data = self.stream.read(bytes)
        self.buffer.append(data)
        self.position[0] += 1
        self.position[1] = len(data)
        return data

    def _readFromBuffer(self, bytes):
        remainingBytes = bytes
        rv = []
        bufferIndex = self.position[0]
        bufferOffset = self.position[1]
        while bufferIndex < len(self.buffer) and remainingBytes != 0:
            assert remainingBytes > 0
            bufferedData = self.buffer[bufferIndex]

            if remainingBytes <= len(bufferedData) - bufferOffset:
                bytesToRead = remainingBytes
                self.position = [bufferIndex, bufferOffset + bytesToRead]
            else:
                bytesToRead = len(bufferedData) - bufferOffset
                self.position = [bufferIndex, len(bufferedData)]
                bufferIndex += 1
            rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])
            remainingBytes -= bytesToRead

            bufferOffset = 0

        if remainingBytes:
            rv.append(self._readStream(remainingBytes))

        return b"".join(rv)


def HTMLInputStream(source, **kwargs):
    # Work around Python bug #20007: read(0) closes the connection.
    # http://bugs.python.org/issue20007
    if (isinstance(source, http_client.HTTPResponse) or
        # Also check for addinfourl wrapping HTTPResponse
        (isinstance(source, urllib.response.addbase) and
         isinstance(source.fp, http_client.HTTPResponse))):
        isUnicode = False
    elif hasattr(source, "read"):
        isUnicode = isinstance(source.read(0), text_type)
    else:
        isUnicode = isinstance(source, text_type)

    if isUnicode:
        encodings = [x for x in kwargs if x.endswith("_encoding")]
        if encodings:
            raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings)

        return HTMLUnicodeInputStream(source, **kwargs)
    else:
        return HTMLBinaryInputStream(source, **kwargs)


class HTMLUnicodeInputStream(object):
    """Provides a unicode stream of characters to the HTMLTokenizer.

    This class takes care of character encoding and removing or replacing
    incorrect byte-sequences and also provides column and line tracking.

    """

    _defaultChunkSize = 10240

    def __init__(self, source):
        """Initialises the HTMLInputStream.

        HTMLInputStream(source, [encoding]) -> Normalized stream from source
        for use by html5lib.

        source can be either a file-object, local filename or a string.

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        """

        if not _utils.supports_lone_surrogates:
            # Such platforms will have already checked for such
            # surrogate errors, so no need to do this checking.
            self.reportCharacterErrors = None
        elif len("\U0010FFFF") == 1:
            self.reportCharacterErrors = self.characterErrorsUCS4
        else:
            self.reportCharacterErrors = self.characterErrorsUCS2

        # List of where new lines occur
        self.newLines = [0]

        self.charEncoding = (lookupEncoding("utf-8"), "certain")
        self.dataStream = self.openStream(source)

        self.reset()

    def reset(self):
        self.chunk = ""
        self.chunkSize = 0
        self.chunkOffset = 0
        self.errors = []

        # number of (complete) lines in previous chunks
        self.prevNumLines = 0
        # number of columns in the last line of the previous chunk
        self.prevNumCols = 0

        # Deal with CR LF and surrogates split over chunk boundaries
        self._bufferedCharacter = None

    def openStream(self, source):
        """Produces a file object from source.

        source can be either a file object, local filename or a string.

        """
        # Already a file object
        if hasattr(source, 'read'):
            stream = source
        else:
            stream = StringIO(source)

        return stream

    def _position(self, offset):
        chunk = self.chunk
        nLines = chunk.count('\n', 0, offset)
        positionLine = self.prevNumLines + nLines
        lastLinePos = chunk.rfind('\n', 0, offset)
        if lastLinePos == -1:
            positionColumn = self.prevNumCols + offset
        else:
            positionColumn = offset - (lastLinePos + 1)
        return (positionLine, positionColumn)

    def position(self):
        """Returns (line, col) of the current position in the stream."""
        line, col = self._position(self.chunkOffset)
        return (line + 1, col)

    def char(self):
        """ Read one character from the stream or queue if available. Return
            EOF when EOF is reached.
        """
        # Read a new chunk from the input stream if necessary
        if self.chunkOffset >= self.chunkSize:
            if not self.readChunk():
                return EOF

        chunkOffset = self.chunkOffset
        char = self.chunk[chunkOffset]
        self.chunkOffset = chunkOffset + 1

        return char

    def readChunk(self, chunkSize=None):
        if chunkSize is None:
            chunkSize = self._defaultChunkSize

        self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)

        self.chunk = ""
        self.chunkSize = 0
        self.chunkOffset = 0

        data = self.dataStream.read(chunkSize)

        # Deal with CR LF and surrogates broken across chunks
        if self._bufferedCharacter:
            data = self._bufferedCharacter + data
            self._bufferedCharacter = None
        elif not data:
            # We have no more data, bye-bye stream
            return False

        if len(data) > 1:
            lastv = ord(data[-1])
            if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
                self._bufferedCharacter = data[-1]
                data = data[:-1]

        if self.reportCharacterErrors:
            self.reportCharacterErrors(data)

        # Replace invalid characters
        data = data.replace("\r\n", "\n")
        data = data.replace("\r", "\n")

        self.chunk = data
        self.chunkSize = len(data)

        return True

    def characterErrorsUCS4(self, data):
        for _ in range(len(invalid_unicode_re.findall(data))):
            self.errors.append("invalid-codepoint")

    def characterErrorsUCS2(self, data):
        # Someone picked the wrong compile option
        # You lose
        skip = False
        for match in invalid_unicode_re.finditer(data):
            if skip:
                continue
            codepoint = ord(match.group())
            pos = match.start()
            # Pretty sure there should be endianness issues here
            if _utils.isSurrogatePair(data[pos:pos + 2]):
                # We have a surrogate pair!
                char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2])
                if char_val in non_bmp_invalid_codepoints:
                    self.errors.append("invalid-codepoint")
                skip = True
            elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
                  pos == len(data) - 1):
                self.errors.append("invalid-codepoint")
            else:
                skip = False
                self.errors.append("invalid-codepoint")

    def charsUntil(self, characters, opposite=False):
        """ Returns a string of characters from the stream up to but not
        including any character in 'characters' or EOF. 'characters' must be
        a container that supports the 'in' method and iteration over its
        characters.
        """

        # Use a cache of regexps to find the required characters
        try:
            chars = charsUntilRegEx[(characters, opposite)]
        except KeyError:
            if __debug__:
                for c in characters:
                    assert(ord(c) < 128)
            regex = "".join(["\\x%02x" % ord(c) for c in characters])
            if not opposite:
                regex = "^%s" % regex
            chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)

        rv = []

        while True:
            # Find the longest matching prefix
            m = chars.match(self.chunk, self.chunkOffset)
            if m is None:
                # If nothing matched, and it wasn't because we ran out of chunk,
                # then stop
                if self.chunkOffset != self.chunkSize:
                    break
            else:
                end = m.end()
                # If not the whole chunk matched, return everything
                # up to the part that didn't match
                if end != self.chunkSize:
                    rv.append(self.chunk[self.chunkOffset:end])
                    self.chunkOffset = end
                    break
            # If the whole remainder of the chunk matched,
            # use it all and read the next chunk
            rv.append(self.chunk[self.chunkOffset:])
            if not self.readChunk():
                # Reached EOF
                break

        r = "".join(rv)
        return r

    def unget(self, char):
        # Only one character is allowed to be ungotten at once - it must
        # be consumed again before any further call to unget
        if char is not None:
            if self.chunkOffset == 0:
                # unget is called quite rarely, so it's a good idea to do
                # more work here if it saves a bit of work in the frequently
                # called char and charsUntil.
                # So, just prepend the ungotten character onto the current
                # chunk:
                self.chunk = char + self.chunk
                self.chunkSize += 1
            else:
                self.chunkOffset -= 1
                assert self.chunk[self.chunkOffset] == char


class HTMLBinaryInputStream(HTMLUnicodeInputStream):
    """Provides a unicode stream of characters to the HTMLTokenizer.

    This class takes care of character encoding and removing or replacing
    incorrect byte-sequences and also provides column and line tracking.

    """

    def __init__(self, source, override_encoding=None, transport_encoding=None,
                 same_origin_parent_encoding=None, likely_encoding=None,
                 default_encoding="windows-1252", useChardet=True):
        """Initialises the HTMLInputStream.

        HTMLInputStream(source, [encoding]) -> Normalized stream from source
        for use by html5lib.

        source can be either a file-object, local filename or a string.

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        """
        # Raw Stream - for unicode objects this will encode to utf-8 and set
        #              self.charEncoding as appropriate
        self.rawStream = self.openStream(source)

        HTMLUnicodeInputStream.__init__(self, self.rawStream)

        # Encoding Information
        # Number of bytes to use when looking for a meta element with
        # encoding information
        self.numBytesMeta = 1024
        # Number of bytes to use when using detecting encoding using chardet
        self.numBytesChardet = 100
        # Things from args
        self.override_encoding = override_encoding
        self.transport_encoding = transport_encoding
        self.same_origin_parent_encoding = same_origin_parent_encoding
        self.likely_encoding = likely_encoding
        self.default_encoding = default_encoding

        # Determine encoding
        self.charEncoding = self.determineEncoding(useChardet)
        assert self.charEncoding[0] is not None

        # Call superclass
        self.reset()

    def reset(self):
        self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace')
        HTMLUnicodeInputStream.reset(self)

    def openStream(self, source):
        """Produces a file object from source.

        source can be either a file object, local filename or a string.

        """
        # Already a file object
        if hasattr(source, 'read'):
            stream = source
        else:
            stream = BytesIO(source)

        try:
            stream.seek(stream.tell())
        except:  # pylint:disable=bare-except
            stream = BufferedStream(stream)

        return stream

    def determineEncoding(self, chardet=True):
        # BOMs take precedence over everything
        # This will also read past the BOM if present
        charEncoding = self.detectBOM(), "certain"
        if charEncoding[0] is not None:
            return charEncoding

        # If we've been overriden, we've been overriden
        charEncoding = lookupEncoding(self.override_encoding), "certain"
        if charEncoding[0] is not None:
            return charEncoding

        # Now check the transport layer
        charEncoding = lookupEncoding(self.transport_encoding), "certain"
        if charEncoding[0] is not None:
            return charEncoding

        # Look for meta elements with encoding information
        charEncoding = self.detectEncodingMeta(), "tentative"
        if charEncoding[0] is not None:
            return charEncoding

        # Parent document encoding
        charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative"
        if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"):
            return charEncoding

        # "likely" encoding
        charEncoding = lookupEncoding(self.likely_encoding), "tentative"
        if charEncoding[0] is not None:
            return charEncoding

        # Guess with chardet, if available
        if chardet:
            try:
                from chardet.universaldetector import UniversalDetector
            except ImportError:
                pass
            else:
                buffers = []
                detector = UniversalDetector()
                while not detector.done:
                    buffer = self.rawStream.read(self.numBytesChardet)
                    assert isinstance(buffer, bytes)
                    if not buffer:
                        break
                    buffers.append(buffer)
                    detector.feed(buffer)
                detector.close()
                encoding = lookupEncoding(detector.result['encoding'])
                self.rawStream.seek(0)
                if encoding is not None:
                    return encoding, "tentative"

        # Try the default encoding
        charEncoding = lookupEncoding(self.default_encoding), "tentative"
        if charEncoding[0] is not None:
            return charEncoding

        # Fallback to html5lib's default if even that hasn't worked
        return lookupEncoding("windows-1252"), "tentative"

    def changeEncoding(self, newEncoding):
        assert self.charEncoding[1] != "certain"
        newEncoding = lookupEncoding(newEncoding)
        if newEncoding is None:
            return
        if newEncoding.name in ("utf-16be", "utf-16le"):
            newEncoding = lookupEncoding("utf-8")
            assert newEncoding is not None
        elif newEncoding == self.charEncoding[0]:
            self.charEncoding = (self.charEncoding[0], "certain")
        else:
            self.rawStream.seek(0)
            self.charEncoding = (newEncoding, "certain")
            self.reset()
            raise ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))

    def detectBOM(self):
        """Attempts to detect at BOM at the start of the stream. If
        an encoding can be determined from the BOM return the name of the
        encoding otherwise return None"""
        bomDict = {
            codecs.BOM_UTF8: 'utf-8',
            codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be',
            codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be'
        }

        # Go to beginning of file and read in 4 bytes
        string = self.rawStream.read(4)
        assert isinstance(string, bytes)

        # Try detecting the BOM using bytes from the string
        encoding = bomDict.get(string[:3])         # UTF-8
        seek = 3
        if not encoding:
            # Need to detect UTF-32 before UTF-16
            encoding = bomDict.get(string)         # UTF-32
            seek = 4
            if not encoding:
                encoding = bomDict.get(string[:2])  # UTF-16
                seek = 2

        # Set the read position past the BOM if one was found, otherwise
        # set it to the start of the stream
        if encoding:
            self.rawStream.seek(seek)
            return lookupEncoding(encoding)
        else:
            self.rawStream.seek(0)
            return None

    def detectEncodingMeta(self):
        """Report the encoding declared by the meta element
        """
        buffer = self.rawStream.read(self.numBytesMeta)
        assert isinstance(buffer, bytes)
        parser = EncodingParser(buffer)
        self.rawStream.seek(0)
        encoding = parser.getEncoding()

        if encoding is not None and encoding.name in ("utf-16be", "utf-16le"):
            encoding = lookupEncoding("utf-8")

        return encoding


class EncodingBytes(bytes):
    """String-like object with an associated position and various extra methods
    If the position is ever greater than the string length then an exception is
    raised"""
    def __new__(self, value):
        assert isinstance(value, bytes)
        return bytes.__new__(self, value.lower())

    def __init__(self, value):
        # pylint:disable=unused-argument
        self._position = -1

    def __iter__(self):
        return self

    def __next__(self):
        p = self._position = self._position + 1
        if p >= len(self):
            raise StopIteration
        elif p < 0:
            raise TypeError
        return self[p:p + 1]

    def next(self):
        # Py2 compat
        return self.__next__()

    def previous(self):
        p = self._position
        if p >= len(self):
            raise StopIteration
        elif p < 0:
            raise TypeError
        self._position = p = p - 1
        return self[p:p + 1]

    def setPosition(self, position):
        if self._position >= len(self):
            raise StopIteration
        self._position = position

    def getPosition(self):
        if self._position >= len(self):
            raise StopIteration
        if self._position >= 0:
            return self._position
        else:
            return None

    position = property(getPosition, setPosition)

    def getCurrentByte(self):
        return self[self.position:self.position + 1]

    currentByte = property(getCurrentByte)

    def skip(self, chars=spaceCharactersBytes):
        """Skip past a list of characters"""
        p = self.position               # use property for the error-checking
        while p < len(self):
            c = self[p:p + 1]
            if c not in chars:
                self._position = p
                return c
            p += 1
        self._position = p
        return None

    def skipUntil(self, chars):
        p = self.position
        while p < len(self):
            c = self[p:p + 1]
            if c in chars:
                self._position = p
                return c
            p += 1
        self._position = p
        return None

    def matchBytes(self, bytes):
        """Look for a sequence of bytes at the start of a string. If the bytes
        are found return True and advance the position to the byte after the
        match. Otherwise return False and leave the position alone"""
        p = self.position
        data = self[p:p + len(bytes)]
        rv = data.startswith(bytes)
        if rv:
            self.position += len(bytes)
        return rv

    def jumpTo(self, bytes):
        """Look for the next sequence of bytes matching a given sequence. If
        a match is found advance the position to the last byte of the match"""
        newPosition = self[self.position:].find(bytes)
        if newPosition > -1:
            # XXX: This is ugly, but I can't see a nicer way to fix this.
            if self._position == -1:
                self._position = 0
            self._position += (newPosition + len(bytes) - 1)
            return True
        else:
            raise StopIteration


class EncodingParser(object):
    """Mini parser for detecting character encoding from meta elements"""

    def __init__(self, data):
        """string - the data to work on for encoding detection"""
        self.data = EncodingBytes(data)
        self.encoding = None

    def getEncoding(self):
        methodDispatch = (
            (b"<!--", self.handleComment),
            (b"<meta", self.handleMeta),
            (b"</", self.handlePossibleEndTag),
            (b"<!", self.handleOther),
            (b"<?", self.handleOther),
            (b"<", self.handlePossibleStartTag))
        for _ in self.data:
            keepParsing = True
            for key, method in methodDispatch:
                if self.data.matchBytes(key):
                    try:
                        keepParsing = method()
                        break
                    except StopIteration:
                        keepParsing = False
                        break
            if not keepParsing:
                break

        return self.encoding

    def handleComment(self):
        """Skip over comments"""
        return self.data.jumpTo(b"-->")

    def handleMeta(self):
        if self.data.currentByte not in spaceCharactersBytes:
            # if we have <meta not followed by a space so just keep going
            return True
        # We have a valid meta element we want to search for attributes
        hasPragma = False
        pendingEncoding = None
        while True:
            # Try to find the next attribute after the current position
            attr = self.getAttribute()
            if attr is None:
                return True
            else:
                if attr[0] == b"http-equiv":
                    hasPragma = attr[1] == b"content-type"
                    if hasPragma and pendingEncoding is not None:
                        self.encoding = pendingEncoding
                        return False
                elif attr[0] == b"charset":
                    tentativeEncoding = attr[1]
                    codec = lookupEncoding(tentativeEncoding)
                    if codec is not None:
                        self.encoding = codec
                        return False
                elif attr[0] == b"content":
                    contentParser = ContentAttrParser(EncodingBytes(attr[1]))
                    tentativeEncoding = contentParser.parse()
                    if tentativeEncoding is not None:
                        codec = lookupEncoding(tentativeEncoding)
                        if codec is not None:
                            if hasPragma:
                                self.encoding = codec
                                return False
                            else:
                                pendingEncoding = codec

    def handlePossibleStartTag(self):
        return self.handlePossibleTag(False)

    def handlePossibleEndTag(self):
        next(self.data)
        return self.handlePossibleTag(True)

    def handlePossibleTag(self, endTag):
        data = self.data
        if data.currentByte not in asciiLettersBytes:
            # If the next byte is not an ascii letter either ignore this
            # fragment (possible start tag case) or treat it according to
            # handleOther
            if endTag:
                data.previous()
                self.handleOther()
            return True

        c = data.skipUntil(spacesAngleBrackets)
        if c == b"<":
            # return to the first step in the overall "two step" algorithm
            # reprocessing the < byte
            data.previous()
        else:
            # Read all attributes
            attr = self.getAttribute()
            while attr is not None:
                attr = self.getAttribute()
        return True

    def handleOther(self):
        return self.data.jumpTo(b">")

    def getAttribute(self):
        """Return a name,value pair for the next attribute in the stream,
        if one is found, or None"""
        data = self.data
        # Step 1 (skip chars)
        c = data.skip(spaceCharactersBytes | frozenset([b"/"]))
        assert c is None or len(c) == 1
        # Step 2
        if c in (b">", None):
            return None
        # Step 3
        attrName = []
        attrValue = []
        # Step 4 attribute name
        while True:
            if c == b"=" and attrName:
                break
            elif c in spaceCharactersBytes:
                # Step 6!
                c = data.skip()
                break
            elif c in (b"/", b">"):
                return b"".join(attrName), b""
            elif c in asciiUppercaseBytes:
                attrName.append(c.lower())
            elif c is None:
                return None
            else:
                attrName.append(c)
            # Step 5
            c = next(data)
        # Step 7
        if c != b"=":
            data.previous()
            return b"".join(attrName), b""
        # Step 8
        next(data)
        # Step 9
        c = data.skip()
        # Step 10
        if c in (b"'", b'"'):
            # 10.1
            quoteChar = c
            while True:
                # 10.2
                c = next(data)
                # 10.3
                if c == quoteChar:
                    next(data)
                    return b"".join(attrName), b"".join(attrValue)
                # 10.4
                elif c in asciiUppercaseBytes:
                    attrValue.append(c.lower())
                # 10.5
                else:
                    attrValue.append(c)
        elif c == b">":
            return b"".join(attrName), b""
        elif c in asciiUppercaseBytes:
            attrValue.append(c.lower())
        elif c is None:
            return None
        else:
            attrValue.append(c)
        # Step 11
        while True:
            c = next(data)
            if c in spacesAngleBrackets:
                return b"".join(attrName), b"".join(attrValue)
            elif c in asciiUppercaseBytes:
                attrValue.append(c.lower())
            elif c is None:
                return None
            else:
                attrValue.append(c)


class ContentAttrParser(object):
    def __init__(self, data):
        assert isinstance(data, bytes)
        self.data = data

    def parse(self):
        try:
            # Check if the attr name is charset
            # otherwise return
            self.data.jumpTo(b"charset")
            self.data.position += 1
            self.data.skip()
            if not self.data.currentByte == b"=":
                # If there is no = sign keep looking for attrs
                return None
            self.data.position += 1
            self.data.skip()
            # Look for an encoding between matching quote marks
            if self.data.currentByte in (b'"', b"'"):
                quoteMark = self.data.currentByte
                self.data.position += 1
                oldPosition = self.data.position
                if self.data.jumpTo(quoteMark):
                    return self.data[oldPosition:self.data.position]
                else:
                    return None
            else:
                # Unquoted value
                oldPosition = self.data.position
                try:
                    self.data.skipUntil(spaceCharactersBytes)
                    return self.data[oldPosition:self.data.position]
                except StopIteration:
                    # Return the whole remaining value
                    return self.data[oldPosition:]
        except StopIteration:
            return None


def lookupEncoding(encoding):
    """Return the python codec name corresponding to an encoding or None if the
    string doesn't correspond to a valid encoding."""
    if isinstance(encoding, binary_type):
        try:
            encoding = encoding.decode("ascii")
        except UnicodeDecodeError:
            return None

    if encoding is not None:
        try:
            return webencodings.lookup(encoding)
        except AttributeError:
            return None
    else:
        return None
_vendor/html5lib/serializer.py000064400000033541151733136430012457 0ustar00from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type

import re

from codecs import register_error, xmlcharrefreplace_errors

from .constants import voidElements, booleanAttributes, spaceCharacters
from .constants import rcdataElements, entities, xmlEntities
from . import treewalkers, _utils
from xml.sax.saxutils import escape

_quoteAttributeSpecChars = "".join(spaceCharacters) + "\"'=<>`"
_quoteAttributeSpec = re.compile("[" + _quoteAttributeSpecChars + "]")
_quoteAttributeLegacy = re.compile("[" + _quoteAttributeSpecChars +
                                   "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
                                   "\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
                                   "\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
                                   "\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
                                   "\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
                                   "\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
                                   "\u3000]")


_encode_entity_map = {}
_is_ucs4 = len("\U0010FFFF") == 1
for k, v in list(entities.items()):
    # skip multi-character entities
    if ((_is_ucs4 and len(v) > 1) or
            (not _is_ucs4 and len(v) > 2)):
        continue
    if v != "&":
        if len(v) == 2:
            v = _utils.surrogatePairToCodepoint(v)
        else:
            v = ord(v)
        if v not in _encode_entity_map or k.islower():
            # prefer &lt; over &LT; and similarly for &amp;, &gt;, etc.
            _encode_entity_map[v] = k


def htmlentityreplace_errors(exc):
    if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
        res = []
        codepoints = []
        skip = False
        for i, c in enumerate(exc.object[exc.start:exc.end]):
            if skip:
                skip = False
                continue
            index = i + exc.start
            if _utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
                codepoint = _utils.surrogatePairToCodepoint(exc.object[index:index + 2])
                skip = True
            else:
                codepoint = ord(c)
            codepoints.append(codepoint)
        for cp in codepoints:
            e = _encode_entity_map.get(cp)
            if e:
                res.append("&")
                res.append(e)
                if not e.endswith(";"):
                    res.append(";")
            else:
                res.append("&#x%s;" % (hex(cp)[2:]))
        return ("".join(res), exc.end)
    else:
        return xmlcharrefreplace_errors(exc)

register_error("htmlentityreplace", htmlentityreplace_errors)


def serialize(input, tree="etree", encoding=None, **serializer_opts):
    # XXX: Should we cache this?
    walker = treewalkers.getTreeWalker(tree)
    s = HTMLSerializer(**serializer_opts)
    return s.render(walker(input), encoding)


class HTMLSerializer(object):

    # attribute quoting options
    quote_attr_values = "legacy"  # be secure by default
    quote_char = '"'
    use_best_quote_char = True

    # tag syntax options
    omit_optional_tags = True
    minimize_boolean_attributes = True
    use_trailing_solidus = False
    space_before_trailing_solidus = True

    # escaping options
    escape_lt_in_attrs = False
    escape_rcdata = False
    resolve_entities = True

    # miscellaneous options
    alphabetical_attributes = False
    inject_meta_charset = True
    strip_whitespace = False
    sanitize = False

    options = ("quote_attr_values", "quote_char", "use_best_quote_char",
               "omit_optional_tags", "minimize_boolean_attributes",
               "use_trailing_solidus", "space_before_trailing_solidus",
               "escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
               "alphabetical_attributes", "inject_meta_charset",
               "strip_whitespace", "sanitize")

    def __init__(self, **kwargs):
        """Initialize HTMLSerializer.

        Keyword options (default given first unless specified) include:

        inject_meta_charset=True|False
          Whether it insert a meta element to define the character set of the
          document.
        quote_attr_values="legacy"|"spec"|"always"
          Whether to quote attribute values that don't require quoting
          per legacy browser behaviour, when required by the standard, or always.
        quote_char=u'"'|u"'"
          Use given quote character for attribute quoting. Default is to
          use double quote unless attribute value contains a double quote,
          in which case single quotes are used instead.
        escape_lt_in_attrs=False|True
          Whether to escape < in attribute values.
        escape_rcdata=False|True
          Whether to escape characters that need to be escaped within normal
          elements within rcdata elements such as style.
        resolve_entities=True|False
          Whether to resolve named character entities that appear in the
          source tree. The XML predefined entities &lt; &gt; &amp; &quot; &apos;
          are unaffected by this setting.
        strip_whitespace=False|True
          Whether to remove semantically meaningless whitespace. (This
          compresses all whitespace to a single space except within pre.)
        minimize_boolean_attributes=True|False
          Shortens boolean attributes to give just the attribute value,
          for example <input disabled="disabled"> becomes <input disabled>.
        use_trailing_solidus=False|True
          Includes a close-tag slash at the end of the start tag of void
          elements (empty elements whose end tag is forbidden). E.g. <hr/>.
        space_before_trailing_solidus=True|False
          Places a space immediately before the closing slash in a tag
          using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
        sanitize=False|True
          Strip all unsafe or unknown constructs from output.
          See `html5lib user documentation`_
        omit_optional_tags=True|False
          Omit start/end tags that are optional.
        alphabetical_attributes=False|True
          Reorder attributes to be in alphabetical order.

        .. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
        """
        unexpected_args = frozenset(kwargs) - frozenset(self.options)
        if len(unexpected_args) > 0:
            raise TypeError("__init__() got an unexpected keyword argument '%s'" % next(iter(unexpected_args)))
        if 'quote_char' in kwargs:
            self.use_best_quote_char = False
        for attr in self.options:
            setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
        self.errors = []
        self.strict = False

    def encode(self, string):
        assert(isinstance(string, text_type))
        if self.encoding:
            return string.encode(self.encoding, "htmlentityreplace")
        else:
            return string

    def encodeStrict(self, string):
        assert(isinstance(string, text_type))
        if self.encoding:
            return string.encode(self.encoding, "strict")
        else:
            return string

    def serialize(self, treewalker, encoding=None):
        # pylint:disable=too-many-nested-blocks
        self.encoding = encoding
        in_cdata = False
        self.errors = []

        if encoding and self.inject_meta_charset:
            from .filters.inject_meta_charset import Filter
            treewalker = Filter(treewalker, encoding)
        # Alphabetical attributes is here under the assumption that none of
        # the later filters add or change order of attributes; it needs to be
        # before the sanitizer so escaped elements come out correctly
        if self.alphabetical_attributes:
            from .filters.alphabeticalattributes import Filter
            treewalker = Filter(treewalker)
        # WhitespaceFilter should be used before OptionalTagFilter
        # for maximum efficiently of this latter filter
        if self.strip_whitespace:
            from .filters.whitespace import Filter
            treewalker = Filter(treewalker)
        if self.sanitize:
            from .filters.sanitizer import Filter
            treewalker = Filter(treewalker)
        if self.omit_optional_tags:
            from .filters.optionaltags import Filter
            treewalker = Filter(treewalker)

        for token in treewalker:
            type = token["type"]
            if type == "Doctype":
                doctype = "<!DOCTYPE %s" % token["name"]

                if token["publicId"]:
                    doctype += ' PUBLIC "%s"' % token["publicId"]
                elif token["systemId"]:
                    doctype += " SYSTEM"
                if token["systemId"]:
                    if token["systemId"].find('"') >= 0:
                        if token["systemId"].find("'") >= 0:
                            self.serializeError("System identifer contains both single and double quote characters")
                        quote_char = "'"
                    else:
                        quote_char = '"'
                    doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)

                doctype += ">"
                yield self.encodeStrict(doctype)

            elif type in ("Characters", "SpaceCharacters"):
                if type == "SpaceCharacters" or in_cdata:
                    if in_cdata and token["data"].find("</") >= 0:
                        self.serializeError("Unexpected </ in CDATA")
                    yield self.encode(token["data"])
                else:
                    yield self.encode(escape(token["data"]))

            elif type in ("StartTag", "EmptyTag"):
                name = token["name"]
                yield self.encodeStrict("<%s" % name)
                if name in rcdataElements and not self.escape_rcdata:
                    in_cdata = True
                elif in_cdata:
                    self.serializeError("Unexpected child element of a CDATA element")
                for (_, attr_name), attr_value in token["data"].items():
                    # TODO: Add namespace support here
                    k = attr_name
                    v = attr_value
                    yield self.encodeStrict(' ')

                    yield self.encodeStrict(k)
                    if not self.minimize_boolean_attributes or \
                        (k not in booleanAttributes.get(name, tuple()) and
                         k not in booleanAttributes.get("", tuple())):
                        yield self.encodeStrict("=")
                        if self.quote_attr_values == "always" or len(v) == 0:
                            quote_attr = True
                        elif self.quote_attr_values == "spec":
                            quote_attr = _quoteAttributeSpec.search(v) is not None
                        elif self.quote_attr_values == "legacy":
                            quote_attr = _quoteAttributeLegacy.search(v) is not None
                        else:
                            raise ValueError("quote_attr_values must be one of: "
                                             "'always', 'spec', or 'legacy'")
                        v = v.replace("&", "&amp;")
                        if self.escape_lt_in_attrs:
                            v = v.replace("<", "&lt;")
                        if quote_attr:
                            quote_char = self.quote_char
                            if self.use_best_quote_char:
                                if "'" in v and '"' not in v:
                                    quote_char = '"'
                                elif '"' in v and "'" not in v:
                                    quote_char = "'"
                            if quote_char == "'":
                                v = v.replace("'", "&#39;")
                            else:
                                v = v.replace('"', "&quot;")
                            yield self.encodeStrict(quote_char)
                            yield self.encode(v)
                            yield self.encodeStrict(quote_char)
                        else:
                            yield self.encode(v)
                if name in voidElements and self.use_trailing_solidus:
                    if self.space_before_trailing_solidus:
                        yield self.encodeStrict(" /")
                    else:
                        yield self.encodeStrict("/")
                yield self.encode(">")

            elif type == "EndTag":
                name = token["name"]
                if name in rcdataElements:
                    in_cdata = False
                elif in_cdata:
                    self.serializeError("Unexpected child element of a CDATA element")
                yield self.encodeStrict("</%s>" % name)

            elif type == "Comment":
                data = token["data"]
                if data.find("--") >= 0:
                    self.serializeError("Comment contains --")
                yield self.encodeStrict("<!--%s-->" % token["data"])

            elif type == "Entity":
                name = token["name"]
                key = name + ";"
                if key not in entities:
                    self.serializeError("Entity %s not recognized" % name)
                if self.resolve_entities and key not in xmlEntities:
                    data = entities[key]
                else:
                    data = "&%s;" % name
                yield self.encodeStrict(data)

            else:
                self.serializeError(token["data"])

    def render(self, treewalker, encoding=None):
        if encoding:
            return b"".join(list(self.serialize(treewalker, encoding)))
        else:
            return "".join(list(self.serialize(treewalker)))

    def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
        # XXX The idea is to make data mandatory.
        self.errors.append(data)
        if self.strict:
            raise SerializeError


class SerializeError(Exception):
    """Error in serialized tree"""
    pass
_vendor/html5lib/_trie/__pycache__/_base.cpython-36.opt-1.pyc000064400000002625151733136430017703 0ustar003

�Pf��@s4ddlmZmZmZddlmZGdd�de�ZdS)�)�absolute_import�division�unicode_literals)�Mappingcs:eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�ZS)�TriezAbstract base class for triesNcs4tt|�j�}�dkrt|�St�fdd�|D��S)Ncsg|]}|j��r|�qS�)�
startswith)�.0�x)�prefixr�/usr/lib/python3.6/_base.py�
<listcomp>szTrie.keys.<locals>.<listcomp>)�superr�keys�set)�selfrr)�	__class__)rrr	sz	Trie.keyscCs$x|j�D]}|j|�r
dSq
WdS)NTF)rr)rr�keyrrr�has_keys_with_prefixs
zTrie.has_keys_with_prefixcCsT||kr|Sx:tdt|�d�D]$}|d|�|kr |d|�Sq Wt|��dS)N�)�range�len�KeyError)rr�irrr�longest_prefixszTrie.longest_prefixcCs|j|�}|||fS)N)r)rrZlprefixrrr�longest_prefix_item$s
zTrie.longest_prefix_item)N)	�__name__�
__module__�__qualname__�__doc__rrrr�
__classcell__rr)rrrs


rN)Z
__future__rrr�collectionsrrrrrr�<module>s_vendor/html5lib/_trie/__pycache__/py.cpython-36.opt-1.pyc000064400000004143151733136430017257 0ustar003

�Pf��@sLddlmZmZmZddlmZddlmZddlm	Z
Gdd�de
�Z	dS)	�)�absolute_import�division�unicode_literals)�	text_type)�bisect_left�)�Triec@sFeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dS)rcCsJtdd�|j�D��std��||_t|j��|_d|_dt|�f|_dS)Ncss|]}t|t�VqdS)N)�
isinstancer)�.0�x�r�/usr/lib/python3.6/py.py�	<genexpr>sz Trie.__init__.<locals>.<genexpr>zAll keys must be strings�r)	�all�keys�	TypeError�_data�sorted�_keys�	_cachestr�len�_cachepoints)�self�datarrr
�__init__
sz
Trie.__init__cCs
||jkS)N)r)r�keyrrr
�__contains__szTrie.__contains__cCs
t|j�S)N)rr)rrrr
�__len__szTrie.__len__cCs
t|j�S)N)�iterr)rrrr
�__iter__sz
Trie.__iter__cCs
|j|S)N)r)rrrrr
�__getitem__szTrie.__getitem__NcCs�|dks|dks|jr"t|j�S|j|j�rN|j\}}t|j|||�}}nt|j|�}}t�}|t|j�krv|Sx,|j|j|�r�|j|j|�|d7}qxW||_||f|_|S)Nrr)r�set�
startswithrrrr�add)r�prefix�lo�hi�start�irrrr
rs


z	Trie.keyscCsd||jkrdS|j|j�r6|j\}}t|j|||�}nt|j|�}|t|j�krTdS|j|j|�S)NTF)rr#rrrrr)rr%r&r'r)rrr
�has_keys_with_prefix6s

zTrie.has_keys_with_prefix)N)
�__name__�
__module__�__qualname__rrrr r!rr*rrrr
r	s	
rN)Z
__future__rrrZpip._vendor.sixrZbisectrZ_baserZABCTrierrrr
�<module>s_vendor/html5lib/_trie/__pycache__/datrie.cpython-36.opt-1.pyc000064400000003626151733136440020105 0ustar003

�Pf��@sLddlmZmZmZddlmZddlmZddl	mZ
Gdd�de
�ZdS)�)�absolute_import�division�unicode_literals)�Trie)�	text_type�c@sVeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dd�Z
dd�ZdS)rcCsvt�}x:|j�D].}t|t�s&td��x|D]}|j|�q,WqWtdj|��|_x|j	�D]\}}||j|<q\WdS)NzAll keys must be strings�)
�set�keys�
isinstancer�	TypeError�add�DATrie�join�_data�items)�self�data�chars�key�char�value�r�/usr/lib/python3.6/datrie.py�__init__
s

z
Trie.__init__cCs
||jkS)N)r)rrrrr�__contains__szTrie.__contains__cCs
t|j�S)N)�lenr)rrrr�__len__szTrie.__len__cCs
t��dS)N)�NotImplementedError)rrrr�__iter__sz
Trie.__iter__cCs
|j|S)N)r)rrrrr�__getitem__szTrie.__getitem__NcCs|jj|�S)N)rr
)r�prefixrrrr
"sz	Trie.keyscCs|jj|�S)N)r�has_keys_with_prefix)rr!rrrr"%szTrie.has_keys_with_prefixcCs|jj|�S)N)r�longest_prefix)rr!rrrr#(szTrie.longest_prefixcCs|jj|�S)N)r�longest_prefix_item)rr!rrrr$+szTrie.longest_prefix_item)N)�__name__�
__module__�__qualname__rrrrr r
r"r#r$rrrrr	s
rN)Z
__future__rrrZdatrierrZpip._vendor.sixrZ_baseZABCTrierrrr�<module>s_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.pyc000064400000000524151733136440017427 0ustar003

�Pf!�@sRddlmZmZmZddlmZeZyddlmZWne	k
rHYnXeZdS)�)�absolute_import�division�unicode_literals�)�TrieN)
Z
__future__rrr�pyrZPyTrieZdatrieZDATrie�ImportError�r	r	�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/_trie/__pycache__/datrie.cpython-36.pyc000064400000003626151733136440017146 0ustar003

�Pf��@sLddlmZmZmZddlmZddlmZddl	mZ
Gdd�de
�ZdS)�)�absolute_import�division�unicode_literals)�Trie)�	text_type�c@sVeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dd�Z
dd�ZdS)rcCsvt�}x:|j�D].}t|t�s&td��x|D]}|j|�q,WqWtdj|��|_x|j	�D]\}}||j|<q\WdS)NzAll keys must be strings�)
�set�keys�
isinstancer�	TypeError�add�DATrie�join�_data�items)�self�data�chars�key�char�value�r�/usr/lib/python3.6/datrie.py�__init__
s

z
Trie.__init__cCs
||jkS)N)r)rrrrr�__contains__szTrie.__contains__cCs
t|j�S)N)�lenr)rrrr�__len__szTrie.__len__cCs
t��dS)N)�NotImplementedError)rrrr�__iter__sz
Trie.__iter__cCs
|j|S)N)r)rrrrr�__getitem__szTrie.__getitem__NcCs|jj|�S)N)rr
)r�prefixrrrr
"sz	Trie.keyscCs|jj|�S)N)r�has_keys_with_prefix)rr!rrrr"%szTrie.has_keys_with_prefixcCs|jj|�S)N)r�longest_prefix)rr!rrrr#(szTrie.longest_prefixcCs|jj|�S)N)r�longest_prefix_item)rr!rrrr$+szTrie.longest_prefix_item)N)�__name__�
__module__�__qualname__rrrrr r
r"r#r$rrrrr	s
rN)Z
__future__rrrZdatrierrZpip._vendor.sixrZ_baseZABCTrierrrr�<module>s_vendor/html5lib/_trie/__pycache__/py.cpython-36.pyc000064400000004143151733136440016321 0ustar003

�Pf��@sLddlmZmZmZddlmZddlmZddlm	Z
Gdd�de
�Z	dS)	�)�absolute_import�division�unicode_literals)�	text_type)�bisect_left�)�Triec@sFeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zddd
�Zdd�Z	dS)rcCsJtdd�|j�D��std��||_t|j��|_d|_dt|�f|_dS)Ncss|]}t|t�VqdS)N)�
isinstancer)�.0�x�r�/usr/lib/python3.6/py.py�	<genexpr>sz Trie.__init__.<locals>.<genexpr>zAll keys must be strings�r)	�all�keys�	TypeError�_data�sorted�_keys�	_cachestr�len�_cachepoints)�self�datarrr
�__init__
sz
Trie.__init__cCs
||jkS)N)r)r�keyrrr
�__contains__szTrie.__contains__cCs
t|j�S)N)rr)rrrr
�__len__szTrie.__len__cCs
t|j�S)N)�iterr)rrrr
�__iter__sz
Trie.__iter__cCs
|j|S)N)r)rrrrr
�__getitem__szTrie.__getitem__NcCs�|dks|dks|jr"t|j�S|j|j�rN|j\}}t|j|||�}}nt|j|�}}t�}|t|j�krv|Sx,|j|j|�r�|j|j|�|d7}qxW||_||f|_|S)Nrr)r�set�
startswithrrrr�add)r�prefix�lo�hi�start�irrrr
rs


z	Trie.keyscCsd||jkrdS|j|j�r6|j\}}t|j|||�}nt|j|�}|t|j�krTdS|j|j|�S)NTF)rr#rrrrr)rr%r&r'r)rrr
�has_keys_with_prefix6s

zTrie.has_keys_with_prefix)N)
�__name__�
__module__�__qualname__rrrr r!rr*rrrr
r	s	
rN)Z
__future__rrrZpip._vendor.sixrZbisectrZ_baserZABCTrierrrr
�<module>s_vendor/html5lib/_trie/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000524151733136440020366 0ustar003

�Pf!�@sRddlmZmZmZddlmZeZyddlmZWne	k
rHYnXeZdS)�)�absolute_import�division�unicode_literals�)�TrieN)
Z
__future__rrr�pyrZPyTrieZdatrieZDATrie�ImportError�r	r	�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/_trie/__pycache__/_base.cpython-36.pyc000064400000002625151733136440016745 0ustar003

�Pf��@s4ddlmZmZmZddlmZGdd�de�ZdS)�)�absolute_import�division�unicode_literals)�Mappingcs:eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�ZS)�TriezAbstract base class for triesNcs4tt|�j�}�dkrt|�St�fdd�|D��S)Ncsg|]}|j��r|�qS�)�
startswith)�.0�x)�prefixr�/usr/lib/python3.6/_base.py�
<listcomp>szTrie.keys.<locals>.<listcomp>)�superr�keys�set)�selfrr)�	__class__)rrr	sz	Trie.keyscCs$x|j�D]}|j|�r
dSq
WdS)NTF)rr)rr�keyrrr�has_keys_with_prefixs
zTrie.has_keys_with_prefixcCsT||kr|Sx:tdt|�d�D]$}|d|�|kr |d|�Sq Wt|��dS)N�)�range�len�KeyError)rr�irrr�longest_prefixszTrie.longest_prefixcCs|j|�}|||fS)N)r)rrZlprefixrrr�longest_prefix_item$s
zTrie.longest_prefix_item)N)	�__name__�
__module__�__qualname__�__doc__rrrr�
__classcell__rr)rrrs


rN)Z
__future__rrr�collectionsrrrrrr�<module>s_vendor/html5lib/_trie/datrie.py000064400000002232151733136440012652 0ustar00from __future__ import absolute_import, division, unicode_literals

from datrie import Trie as DATrie
from pip._vendor.six import text_type

from ._base import Trie as ABCTrie


class Trie(ABCTrie):
    def __init__(self, data):
        chars = set()
        for key in data.keys():
            if not isinstance(key, text_type):
                raise TypeError("All keys must be strings")
            for char in key:
                chars.add(char)

        self._data = DATrie("".join(chars))
        for key, value in data.items():
            self._data[key] = value

    def __contains__(self, key):
        return key in self._data

    def __len__(self):
        return len(self._data)

    def __iter__(self):
        raise NotImplementedError()

    def __getitem__(self, key):
        return self._data[key]

    def keys(self, prefix=None):
        return self._data.keys(prefix)

    def has_keys_with_prefix(self, prefix):
        return self._data.has_keys_with_prefix(prefix)

    def longest_prefix(self, prefix):
        return self._data.longest_prefix(prefix)

    def longest_prefix_item(self, prefix):
        return self._data.longest_prefix_item(prefix)
_vendor/html5lib/_trie/_base.py000064400000001723151733136440012457 0ustar00from __future__ import absolute_import, division, unicode_literals

from collections import Mapping


class Trie(Mapping):
    """Abstract base class for tries"""

    def keys(self, prefix=None):
        # pylint:disable=arguments-differ
        keys = super(Trie, self).keys()

        if prefix is None:
            return set(keys)

        # Python 2.6: no set comprehensions
        return set([x for x in keys if x.startswith(prefix)])

    def has_keys_with_prefix(self, prefix):
        for key in self.keys():
            if key.startswith(prefix):
                return True

        return False

    def longest_prefix(self, prefix):
        if prefix in self:
            return prefix

        for i in range(1, len(prefix) + 1):
            if prefix[:-i] in self:
                return prefix[:-i]

        raise KeyError(prefix)

    def longest_prefix_item(self, prefix):
        lprefix = self.longest_prefix(prefix)
        return (lprefix, self[lprefix])
_vendor/html5lib/_trie/__init__.py000064400000000441151733136440013141 0ustar00from __future__ import absolute_import, division, unicode_literals

from .py import Trie as PyTrie

Trie = PyTrie

# pylint:disable=wrong-import-position
try:
    from .datrie import Trie as DATrie
except ImportError:
    pass
else:
    Trie = DATrie
# pylint:enable=wrong-import-position
_vendor/html5lib/_trie/py.py000064400000003357151733136440012043 0ustar00from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type

from bisect import bisect_left

from ._base import Trie as ABCTrie


class Trie(ABCTrie):
    def __init__(self, data):
        if not all(isinstance(x, text_type) for x in data.keys()):
            raise TypeError("All keys must be strings")

        self._data = data
        self._keys = sorted(data.keys())
        self._cachestr = ""
        self._cachepoints = (0, len(data))

    def __contains__(self, key):
        return key in self._data

    def __len__(self):
        return len(self._data)

    def __iter__(self):
        return iter(self._data)

    def __getitem__(self, key):
        return self._data[key]

    def keys(self, prefix=None):
        if prefix is None or prefix == "" or not self._keys:
            return set(self._keys)

        if prefix.startswith(self._cachestr):
            lo, hi = self._cachepoints
            start = i = bisect_left(self._keys, prefix, lo, hi)
        else:
            start = i = bisect_left(self._keys, prefix)

        keys = set()
        if start == len(self._keys):
            return keys

        while self._keys[i].startswith(prefix):
            keys.add(self._keys[i])
            i += 1

        self._cachestr = prefix
        self._cachepoints = (start, i)

        return keys

    def has_keys_with_prefix(self, prefix):
        if prefix in self._data:
            return True

        if prefix.startswith(self._cachestr):
            lo, hi = self._cachepoints
            i = bisect_left(self._keys, prefix, lo, hi)
        else:
            i = bisect_left(self._keys, prefix)

        if i == len(self._keys):
            return False

        return self._keys[i].startswith(prefix)
_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.opt-1.pyc000064400000021735151733136440021004 0ustar003

�Pf�"�@s|ddlmZmZmZddlmZddlmZmZddl	Z	ddl
mZddl
mZdd	lm
Z
dd
lmZdd�Zee�ZdS)
�)�absolute_import�division�unicode_literals)�MutableMapping)�minidom�NodeN�)�base�)�	constants)�
namespaces)�moduleFactoryFactorycsV��Gdd�dt��G��fdd�dtj��G����fdd�dtj�}dd��t�S)	Nc@sLeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)zgetDomBuilder.<locals>.AttrListcSs
||_dS)N)�element)�selfr�r�/usr/lib/python3.6/dom.py�__init__sz(getDomBuilder.<locals>.AttrList.__init__cSst|jjj��S)N)�iterr�
attributes�keys)rrrr�__iter__sz(getDomBuilder.<locals>.AttrList.__iter__cSs4t|t�rt�n |jjj|�}||_||jj|<dS)N)�
isinstance�tuple�NotImplementedErrorr�
ownerDocumentZcreateAttribute�valuer)r�namer�attrrrr�__setitem__s

z+getDomBuilder.<locals>.AttrList.__setitem__cSst|jj�S)N)�lenrr)rrrr�__len__ sz'getDomBuilder.<locals>.AttrList.__len__cSst|jjj��S)N)�listrr�items)rrrrr"#sz%getDomBuilder.<locals>.AttrList.itemscSst|jjj��S)N)r!rr�values)rrrrr#&sz&getDomBuilder.<locals>.AttrList.valuescSs"t|t�rt�n|jj|jSdS)N)rrrrrr)rrrrr�__getitem__)s
z+getDomBuilder.<locals>.AttrList.__getitem__cSst|t�rt�n
|jj|=dS)N)rrrrr)rrrrr�__delitem__/s
z+getDomBuilder.<locals>.AttrList.__delitem__N)�__name__�
__module__�__qualname__rrrr r"r#r$r%rrrr�AttrListsr)cs�eZdZdd�Zedd��Zdd�Zddd	�Zd
d�Zdd
�Z	dd�Z
�fdd�Zdd�Zeee�Z
�fdd�Zdd�Zdd�Zee�ZdS)z"getDomBuilder.<locals>.NodeBuildercSstjj||j�||_dS)N)r	rr�nodeNamer)rrrrrr6sz+getDomBuilder.<locals>.NodeBuilder.__init__cSst|jd�r|jjpdS)N�namespaceURI)�hasattrrr+)rrrr�<lambda>:sz+getDomBuilder.<locals>.NodeBuilder.<lambda>cSs||_|jj|j�dS)N)�parentr�appendChild)r�noderrrr/=sz.getDomBuilder.<locals>.NodeBuilder.appendChildNcSs4|jjj|�}|r$|jj||j�n|jj|�dS)N)rr�createTextNode�insertBeforer/)r�datar2�textrrr�
insertTextAsz-getDomBuilder.<locals>.NodeBuilder.insertTextcSs|jj|j|j�||_dS)N)rr2r.)rr0ZrefNoderrrr2Hsz/getDomBuilder.<locals>.NodeBuilder.insertBeforecSs&|jj|jkr|jj|j�d|_dS)N)rZ
parentNode�removeChildr.)rr0rrrr6Lsz.getDomBuilder.<locals>.NodeBuilder.removeChildcSs:x.|jj�r.|jj}|jj|�|jj|�qWg|_dS)N)r�
hasChildNodesZ
firstChildr6r/�
childNodes)rZ	newParent�childrrr�reparentChildrenQs
z3getDomBuilder.<locals>.NodeBuilder.reparentChildrencs
�|j�S)N)r)r)r)rr�
getAttributesXsz0getDomBuilder.<locals>.NodeBuilder.getAttributescSsz|rvxpt|j��D]`\}}t|t�rd|ddk	rF|dd|d}n|d}|jj|d||�q|jj||�qWdS)Nr�:rr
)r!r"rrrZsetAttributeNSZsetAttribute)rrrrZ
qualifiedNamerrr�
setAttributes[s
z0getDomBuilder.<locals>.NodeBuilder.setAttributescs�|jjd��S)NF)r�	cloneNode)r)�NodeBuilderrrr>jsz,getDomBuilder.<locals>.NodeBuilder.cloneNodecSs
|jj�S)N)rr7)rrrr�
hasContentmsz-getDomBuilder.<locals>.NodeBuilder.hasContentcSs(|jdkrtd|jfS|j|jfSdS)NZhtml)�	namespacerr)rrrr�getNameTupleps
z/getDomBuilder.<locals>.NodeBuilder.getNameTuple)N)r&r'r(r�propertyrAr/r5r2r6r:r;r=rr>r@rBZ	nameTupler)r)r?rrr?5s

r?cs�eZdZ�fdd�Z��fdd�Zd�fdd�	Z�fdd	�Z�fd
d�Zdd
�Z�fdd�Z	dd�Z
dd�Zddd�Z�Z
dZdS)z"getDomBuilder.<locals>.TreeBuildercs�j�jddd�|_tj|�S)N)�getDOMImplementationZcreateDocument�dom�weakref�proxy)r)�Domrr�
documentClassysz0getDomBuilder.<locals>.TreeBuilder.documentClasscsR|d}|d}|d}�j�}|j|||�}|jj�|���tkrN|j|_dS)Nr�publicId�systemId)rDZcreateDocumentTypeZdocumentr/rrEr)r�tokenrrJrKZdomimplZdoctype)rHr?rr�
insertDoctype}sz0getDomBuilder.<locals>.TreeBuilder.insertDoctypeNcs6|dkr |jdkr |jj|�}n|jj||�}�|�S)N)ZdefaultNamespacerEZ
createElementZcreateElementNS)rrrAr0)r?rr�elementClass�sz/getDomBuilder.<locals>.TreeBuilder.elementClasscs�|jj|��S)N)rEZ
createComment)rr3)r?rr�commentClass�sz/getDomBuilder.<locals>.TreeBuilder.commentClasscs�|jj��S)N)rEZcreateDocumentFragment)r)r?rr�
fragmentClass�sz0getDomBuilder.<locals>.TreeBuilder.fragmentClasscSs|jj|j�dS)N)rEr/r)rr0rrrr/�sz.getDomBuilder.<locals>.TreeBuilder.appendChildcs�|�S)Nr)rr)�testSerializerrrrQ�sz1getDomBuilder.<locals>.TreeBuilder.testSerializercSs|jS)N)rE)rrrr�getDocument�sz.getDomBuilder.<locals>.TreeBuilder.getDocumentcSstjj|�jS)N)r	�TreeBuilder�getFragmentr)rrrrrT�sz.getDomBuilder.<locals>.TreeBuilder.getFragmentcSsp|}||krtjj|||�nNt|jd�rXtj|jjkrXt|jj�|j_|jjj	tj�|jj
|jj|��dS)N�_child_node_types)r	rSr5r,rEr�	TEXT_NODErUr!�appendr/r1)rr3r.rrrr5�sz-getDomBuilder.<locals>.TreeBuilder.insertText)N)N)r&r'r(rIrMrNrOrPr/rQrRrTr5�implementationrr)rH�DomImplementationr?rQrrrSxs

rScs0|j�g�d��fdd�	��|d�dj��S)Nrcs$|jtjkr�|jrj|js|jrP|jp&d}|jp0d}�jdd||j||f�q~�jdd||jf�n�jdd|f��nz|jtjkr��jd��n`|jtjkr��jd��nF|jtj	krވjdd||j
f��n|jtjk�r�jd	d||j
f�n�t|d
��r6|j
dk	�r6dtj|j
|jf}n|j}�jdd||f�|j��r�g}xftt|j��D]T}|jj|�}|j}|j}|j
}	|	�r�dtj|	|jf}n|j}|j||f��qpWx2t|�D]&\}}�jd
d|d||f��q�W|d7}x|jD]}
�|
|��qWdS)N�z|%s<!DOCTYPE %s "%s" "%s">� z|%s<!DOCTYPE %s>z|%s<!DOCTYPE >z	#documentz#document-fragmentz|%s<!-- %s -->z|%s"%s"r+z%s %sz|%s<%s>z
|%s%s="%s"r
)ZnodeTyperZDOCUMENT_TYPE_NODErrJrKrWZ
DOCUMENT_NODEZDOCUMENT_FRAGMENT_NODEZCOMMENT_NODEZ	nodeValuerVr,r+r�prefixesr*Z
hasAttributes�rangerr�itemrZ	localName�sortedr8)r�indentrJrKrr�irr�nsr9)�rv�serializeElementrrrd�sN


"z?getDomBuilder.<locals>.testSerializer.<locals>.serializeElement�
)r)Z	normalize�join)rr)rcrdrrQ�s
.
z%getDomBuilder.<locals>.testSerializer)rr	rrS�locals)rYrSr)r)rHrYr?rQr�
getDomBuilders$C:6rh)Z
__future__rrr�collectionsrZxml.domrrrFrZr	rrZ_utilsr
rhZgetDomModulerrrr�<module>s__vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.opt-1.pyc000064400000026430151733136440022362 0ustar003

�PfQ7�@s�dZddlmZmZmZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
dd	lmZ
dd
lmZddljZdZejd�Zejd
�jZGdd�de�ZGdd�de�Zdd�Zdd�ZGdd�de	j�ZdS)a�Module for supporting the lxml.etree library. The idea here is to use as much
of the native library as possible, without using fragile hacks like custom element
names that break between releases. The downside of this is that we cannot represent
all possible trees; specifically the following are known to cause problems:

Text or comments as siblings of the root element
Docypes with no name

When any of these things occur, we emit a DataLossWarning
�)�absolute_import�division�unicode_literalsN�)�base�)�DataLossWarning)�	constants)�etree)�	_ihatexmlTz
{([^}]*)}(.*)Zasdc@seZdZdd�ZdS)�DocumentTypecCs||_||_||_dS)N)�name�publicId�systemId)�selfr
rr�r� /usr/lib/python3.6/etree_lxml.py�__init__#szDocumentType.__init__N)�__name__�
__module__�__qualname__rrrrrr"src@s,eZdZdd�Zdd�Zdd�Zee�ZdS)�DocumentcCsd|_g|_dS)N)�_elementTree�_childNodes)rrrrr*szDocument.__init__cCs|jj�j|j�dS)N)r�getrootZaddnext�_element)r�elementrrr�appendChild.szDocument.appendChildcCs|jS)N)r)rrrr�_getChildNodes1szDocument._getChildNodesN)rrrrrr�propertyZ
childNodesrrrrr)srcs6g�tjdd��d���fdd�	��|d�dj��S)NT)�preventDoubleDashCommentsrc
st|d��st|d�rˆjd�|jjrz|jjp6|jjsFd|jj}nd|jj|jj|jjf}�jdd|d|f�|j�}x|j�dk	r�|j�}q�Wxx|dk	r��||d�|j	�}q�WnTt
|t�s�t
|t�r�jd	d||f�n(�jd
�x|D]}�||d��q�W�n�|j
tk�rn�jdd||jf�t|d��r|j�r�jd	d||jf��n�tjj|j
�}|dk	�r�|jd
�}|jd�}tj|}�jdd||�j|�f�n�jdd|�j|j
�f�t|d��r�g}xr|jj�D]d\}	}
tj|	�}|dk	�rH|j�\}}	�j|	�}	tj|}d||	f}n
�j|	�}|j||
f��q�Wx2t|�D]&\}	}
�jdd|d|	|
f��qpW|j�r��jd	d|d|jf�|d7}x|D]}�||��q�Wt|d��r|j�r�jd	d|d|jf�dS)N�tagrz	#documentz
<!DOCTYPE %s>z<!DOCTYPE %s "%s" "%s">z|%s%s� rz|%s"%s"z#document-fragmentz|%s<!-- %s -->�tailrz
|%s<%s %s>z|%s<%s>�attribz%s %sz
|%s%s="%s")�hasattr�append�docinfo�internalDTDZ	public_idZ
system_url�	root_namerZgetpreviousZgetnext�
isinstance�str�bytesr!�comment_type�textr#�etree_builders�
tag_regexp�match�groupr	�prefixes�fromXmlNamer$�items�groups�sorted)
r�indent�dtd_strZnext_elementZnsmatch�nsr!�prefix�
attributesr
�valueZattr_string�child)�
infosetFilter�rv�serializeElementrrrA;sp













"
z(testSerializer.<locals>.serializeElement�
)r)r�
InfosetFilter�join)rr)r?r@rAr�testSerializer7s
F
rEcs$g���fdd���|�dj��S)z4Serialize an element and its child nodes to a stringcs
t|d�sH|jjr:|jjr$|jj}nd|jj}�j|��|j��n�|jtkrf�jd|j	f�n�|j
s��jd|jf�n.djdd�|j
j�D��}�jd|j|f�|j	r��j|j	�x|D]}�|�q�W�jd	|jf�t|d
�o�|j
�r�j|j
�dS)Nr!z
<!DOCTYPE %s>z	<!--%s-->z<%s>r"cSsg|]\}}d||f�qS)z%s="%s"r)�.0r
r=rrr�
<listcomp>�sz6tostring.<locals>.serializeElement.<locals>.<listcomp>z<%s %s>z</%s>r#)r%r'r(�doctyper)r&rr!r-r.r$rDr5r#)rr9�attrr>)r@rArrrA�s*





z"tostring.<locals>.serializeElement�)rD)rr)r@rAr�tostring�s rKcszeZdZeZeZdZdZeZ	e
Zddd�Zdd�Z
dd�Zd	d
�Zdd�Zd
d�Zddd�Zd�fdd�	Zdd�Z�ZS)�TreeBuilderNFcs�tjt|d��tjdd��|_||_G�fdd�dt��G���fdd�d�j�}G��fdd	�d	�j	�}||_
||_tj
j||�dS)
N)�fullTreeT)r cs&eZdZd�fdd�	Z�fdd�ZdS)z(TreeBuilder.__init__.<locals>.AttributesNcsv|dkri}||_tj||�xR|j�D]F\}}t|t�rVd|d�j|d�f}n
�j|�}||jjj|<q(WdS)Nz{%s}%srr)r�dictrr5r*�tuple�coerceAttributer$)rrr=�keyr
)r?rrr�s

z1TreeBuilder.__init__.<locals>.Attributes.__init__csPtj|||�t|t�r4d|d�j|d�f}n
�j|�}||jjj|<dS)Nz{%s}%srr)rN�__setitem__r*rOrPrr$)rrQr=r
)r?rrrR�s


z4TreeBuilder.__init__.<locals>.Attributes.__setitem__)N)rrrrrRr)r?rr�
Attributes�srScsxeZdZ���fdd�Z�fdd�Z�fdd�Zeee�Zdd�Z�fd	d
�Z	eee	�Z
d��fdd
�	Z�fdd�ZdS)z%TreeBuilder.__init__.<locals>.Elementcs*�j|�}�jj|||d��|�|_dS)N)�	namespace)�
coerceElement�Elementr�_attributes)rr
rT)rS�builderr?rrr�s
z.TreeBuilder.__init__.<locals>.Element.__init__cs$�j|�|_|j|j|j�|j_dS)N)rU�_nameZ_getETreeTagZ
_namespacerr!)rr
)r?rr�_setName�sz.TreeBuilder.__init__.<locals>.Element._setNamecs�j|j�S)N)r4rY)r)r?rr�_getName�sz.TreeBuilder.__init__.<locals>.Element._getNamecSs|jS)N)rW)rrrr�_getAttributes�sz4TreeBuilder.__init__.<locals>.Element._getAttributescs�||�|_dS)N)rW)rr<)rSrr�_setAttributes�sz4TreeBuilder.__init__.<locals>.Element._setAttributesNcs�j|�}�jj|||�dS)N)ZcoerceCharactersrV�
insertText)r�dataZinsertBefore)rXr?rrr^�s
z0TreeBuilder.__init__.<locals>.Element.insertTextcs�jj||�dS)N)rVr)rr>)rXrrr�sz1TreeBuilder.__init__.<locals>.Element.appendChild)N)
rrrrrZr[rr
r\r]r<r^rr)rSrXr?rrrV�s

rVcs8eZdZ��fdd�Z�fdd�Zdd�Zeee�ZdS)z%TreeBuilder.__init__.<locals>.Commentcs�j|�}�jj||�dS)N)�
coerceComment�Commentr)rr_)rXr?rrr�s
z.TreeBuilder.__init__.<locals>.Comment.__init__cs�j|�}||j_dS)N)r`rr.)rr_)r?rr�_setData�s
z.TreeBuilder.__init__.<locals>.Comment._setDatacSs|jjS)N)rr.)rrrr�_getData�sz.TreeBuilder.__init__.<locals>.Comment._getDataN)rrrrrbrcrr_r)rXr?rrra�sra)r/ZgetETreeModuler
rrCr?�namespaceHTMLElementsrNrVra�elementClass�commentClassrrLr)rrdrMrVrar)rSrXr?rr�szTreeBuilder.__init__cCs$tjj|�|j|_g|_d|_dS)N)rrL�reset�insertCommentInitial�
insertComment�initial_commentsrH)rrrrrgszTreeBuilder.resetcCst|�S)N)rE)rrrrrrE	szTreeBuilder.testSerializercCstr|jjS|jjj�SdS)N)rM�documentrr)rrrr�getDocumentszTreeBuilder.getDocumentcCsFg}|jdj}|jr"|j|j�|jt|��|jrB|j|j�|S)Nr)�openElementsrr.r&�extend�listr#)rZfragmentrrrr�getFragmentszTreeBuilder.getFragmentcCsh|d}|d}|d}|s0tjdt�d|_n4|jj|�}||krPtjdt�|j|||�}||_dS)Nr
rrz#lxml cannot represent empty doctypez%lxml cannot represent non-xml doctype)�warnings�warnrrHr?rU�doctypeClass)r�tokenr
rrZcoercedNamerHrrr�
insertDoctypeszTreeBuilder.insertDoctypecCs|jj|�dS)N)rjr&)rr_�parentrrrrh,sz TreeBuilder.insertCommentInitialcsB||jkr,|jjj�djtkr,tjdt�tt	|�j
||�dS)Nrz@lxml cannot represent adjacent comments beyond the root elements���)rkrrr!r-rqrrr�superrLri)rr_rv)�	__class__rr�insertCommentMain1s
zTreeBuilder.insertCommentMaincCs�d}|jr�|d|jj7}|jjdk	s2|jjdk	r�|d|jj|jjpFd�7}|jjr�|jj}|jd�dkr�|jd�dkr�tjdt	�|j
dd	�}|jd�dkr�|d
|7}q�|d|7}n|d7}|d
7}|jj|dkr�tjdt	�|d7}tj|�}x*|j
D] }|j|d�}|j|j��qW|j�|_|j�|j_|d}|jd|j�}|dk�rb|}	nd||f}	|	|_|j||�}
||
_|jjj|
�|jj|
�|j|_dS)zCreate the document rootrJz<!DOCTYPE %sNz
 PUBLIC "%s" �'r�"z6DOCTYPE system cannot contain single and double quotesZU00027z"%s"z'%s'z''�>r
zGlxml cannot represent doctype with a different name to the root elementz$<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>r_rTz{%s}%s)rHr
rrr?ZcoercePubid�findrqrrr�replacer
Z
fromstringrjrfZaddpreviousr�
documentClassrkZgetroottreer�getZdefaultNamespacer!rerr&rmrzri)rrtZdocStrZsysid�root�
comment_tokenZcommentr
rTZ	etree_tagZroot_elementrrr�
insertRoot7sJ


zTreeBuilder.insertRoot)F)N)N)rrrrr�rrsrerfZ
fragmentClassr
�implementationrrgrErlrprurhrzr��
__classcell__rr)ryrrL�s
L

rL)�__doc__Z
__future__rrrrq�re�sysrJrr	rr
r/rZ
lxml.etreerM�compiler0rar!r-�objectrrrErKrLrrrr�<module>
s$

O)_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.pyc000064400000026777151733136440020405 0ustar003

�Pf�1�@s�ddlmZmZmZddlmZddlZddlmZddlm	Z	ddlm
Z
dd	l
mZdd
lm
Z
ejd�Zdd
d�Ze
e�ZdS)�)�absolute_import�division�unicode_literals)�	text_typeN�)�base�)�	_ihatexml)�	constants)�
namespaces)�moduleFactoryFactoryz
{([^}]*)}(.*)Fc	s����jd�j�G�fdd�dtj��G�fdd�d���G�fdd�d���G�fdd	�d	���G�fd
d�d����fdd
��	��fdd�}G��������	fdd�dtj�}t�S)NZasdcs�eZdZd$�fdd�	Zdd�Zdd�Zdd	�Zeee�Zd
d�Z	dd
�Z
ee
e	�Zdd�Zdd�Z
eee
�Zdd�Zdd�Zeee�Zdd�Zdd�Zdd�Zdd�Zd%dd�Zd d!�Zd"d#�ZdS)&z getETreeBuilder.<locals>.ElementNcs^||_||_�j|j||��|_|dkr:td|jf|_n|j|jf|_d|_g|_g|_	dS)N�html)
�_name�
_namespace�Element�_getETreeTag�_elementrZ	nameTuple�parent�_childNodes�_flags)�self�name�	namespace)�ElementTree��/usr/lib/python3.6/etree.py�__init__s

z)getETreeBuilder.<locals>.Element.__init__cSs|dkr|}nd||f}|S)Nz{%s}%sr)rrrZ	etree_tagrrrr#sz-getETreeBuilder.<locals>.Element._getETreeTagcSs||_|j|j|j�|j_dS)N)rrrr�tag)rrrrr�_setName*sz)getETreeBuilder.<locals>.Element._setNamecSs|jS)N)r)rrrr�_getName.sz)getETreeBuilder.<locals>.Element._getNamecSs||_|j|j|j�|j_dS)N)rrrrr)rrrrr�
_setNamespace3sz.getETreeBuilder.<locals>.Element._setNamespacecSs|jS)N)r)rrrr�
_getNamespace7sz.getETreeBuilder.<locals>.Element._getNamespacecSs|jjS)N)r�attrib)rrrr�_getAttributes<sz/getETreeBuilder.<locals>.Element._getAttributescSspx"t|jjj��D]}|jj|=qWxF|j�D]:\}}t|t�rVd|d|df}n|}|jj||�q.WdS)Nz{%s}%srr)�listrr"�keys�items�
isinstance�tuple�set)r�
attributes�key�valuerrrr�_setAttributes?s
z/getETreeBuilder.<locals>.Element._setAttributescSs|jS)N)r)rrrr�_getChildNodesMsz/getETreeBuilder.<locals>.Element._getChildNodescSs.|jdd�=g|_x|D]}|j|�qWdS)N)rrZinsertChild)rr,�elementrrr�_setChildNodesPs
z/getETreeBuilder.<locals>.Element._setChildNodescSst|jjpt|j��S)z,Return true if the node has children or text)�boolr�text�len)rrrr�
hasContentXsz+getETreeBuilder.<locals>.Element.hasContentcSs$|jj|�|jj|j�||_dS)N)r�appendrr)r�noderrr�appendChild\sz,getETreeBuilder.<locals>.Element.appendChildcSs,t|j�j|j�}|jj||j�||_dS)N)r$r�index�insertr)rr6ZrefNoder8rrr�insertBeforeasz-getETreeBuilder.<locals>.Element.insertBeforecSs$|jj|�|jj|j�d|_dS)N)r�removerr)rr6rrr�removeChildfsz,getETreeBuilder.<locals>.Element.removeChildcSs�t|j�s,|jjsd|j_|jj|7_n�|dkrb|jdjsLd|jd_|jdj|7_nxt|j�}|j|j�}|dkr�|j|djs�d|j|d_|j|dj|7_n |jjs�d|j_|jj|7_dS)N�rr���r>r>)r3rr2�tailr$r8)r�datar:Zchildrenr8rrr�
insertTextks"

z+getETreeBuilder.<locals>.Element.insertTextcSs8t|�|j|j�}x |jj�D]\}}||j|<qW|S)N)�typerrr*r&)rr/rr,rrr�	cloneNode�sz*getETreeBuilder.<locals>.Element.cloneNodecSsl|jr"|jdjj|jj7_n0|jjs2d|j_|jjdk	rR|jj|jj7_d|j_tjj||�dS)Nrr=r>)�
childNodesrr?r2r�Node�reparentChildren)rZ	newParentrrrrF�sz1getETreeBuilder.<locals>.Element.reparentChildren)N)N)�__name__�
__module__�__qualname__rrrr�propertyrr r!rr#r-r*r.r0rDr4r7r:r<rArCrFr)rrrrs*





rcs2eZdZ�fdd�Zdd�Zdd�Zeee�ZdS)z getETreeBuilder.<locals>.Commentcs"�j|�|_d|_g|_g|_dS)N)�Commentrrrr)rr@)rrrr�sz)getETreeBuilder.<locals>.Comment.__init__cSs|jjS)N)rr2)rrrr�_getData�sz)getETreeBuilder.<locals>.Comment._getDatacSs||j_dS)N)rr2)rr,rrr�_setData�sz)getETreeBuilder.<locals>.Comment._setDataN)rGrHrIrrLrMrJr@r)rrrrK�srKcsLeZdZ�fdd�Zdd�Zdd�Zeee�Zdd�Zd	d
�Z	eee	�Z
dS)z%getETreeBuilder.<locals>.DocumentTypecs$�j|d�||j_||_||_dS)Nz
<!DOCTYPE>)rrr2�publicId�systemId)rrrNrO)rrrr�sz.getETreeBuilder.<locals>.DocumentType.__init__cSs|jjdd�S)NrNr=)r�get)rrrr�_getPublicId�sz2getETreeBuilder.<locals>.DocumentType._getPublicIdcSs|dk	r|jjd|�dS)NrN)rr))rr,rrr�_setPublicId�sz2getETreeBuilder.<locals>.DocumentType._setPublicIdcSs|jjdd�S)NrOr=)rrP)rrrr�_getSystemId�sz2getETreeBuilder.<locals>.DocumentType._getSystemIdcSs|dk	r|jjd|�dS)NrO)rr))rr,rrr�_setSystemId�sz2getETreeBuilder.<locals>.DocumentType._setSystemIdN)rGrHrIrrQrRrJrNrSrTrOr)rrr�DocumentType�s
rUcseZdZ�fdd�ZdS)z!getETreeBuilder.<locals>.Documentcs�j|d�dS)N�
DOCUMENT_ROOT)r)r)rrrr�sz*getETreeBuilder.<locals>.Document.__init__N)rGrHrIrr)rrr�Document�srWcseZdZ�fdd�ZdS)z)getETreeBuilder.<locals>.DocumentFragmentcs�j|d�dS)NZDOCUMENT_FRAGMENT)r)r)rrrr�sz2getETreeBuilder.<locals>.DocumentFragment.__init__N)rGrHrIrr)rrr�DocumentFragment�srXcs*g�d���fdd�	��|d�dj��S)Nrcs�t|d�s|j�}|jdkrz|jd�s0|jd�rd|jd�p<d}|jd�pJd}�jd|j||f�n�jd|jf��n�|jdkr�jd	�|jdk	r��jd
d|d|jf�|jdk	r�td
��t|d�r�t|j	�r�td���np|j�k�r�jdd||jf��nHt
|jt��s4tdt
|j�|jf��tj|j�}|dk�rR|j}n"|j�\}}tj|}d||f}�jdd||f�t|d��r2g}xb|j	j�D]T\}}	tj|�}|dk	�r�|j�\}}tj|}d||f}
n|}
|j|
|	f��q�Wx2t|�D]&\}}	�jdd|d||	f��qW|j�rV�jd
d|d|jf�|d7}x|D]}�||��qdW|j�r��jd
d|d|jf�dS)Nrz
<!DOCTYPE>rNrOr=z<!DOCTYPE %s "%s" "%s">z
<!DOCTYPE %s>rVz	#documentz|%s"%s"� rzDocument node cannot have tailr"z$Document node cannot have attributesz|%s<!-- %s -->zExpected unicode, got %s, %sz%s %sz|%s<%s>z
|%s%s="%s")�hasattr�getrootrrPr5r2r?�	TypeErrorr3r"r'r�AssertionErrorrB�
tag_regexp�match�groupsr
�prefixesr&�sorted)r/�indentrNrOZnsmatchr�ns�prefixr*r,Zattr_string�child)�ElementTreeCommentType�rv�serializeElementrrri�s^










"
zAgetETreeBuilder.<locals>.testSerializer.<locals>.serializeElement�
)r)�join)r/)rg)rhrir�testSerializer�s7
z'getETreeBuilder.<locals>.testSerializercs2g�tj�������fdd���|�dj��S)z4Serialize an element and its child nodes to a stringcs�t|�j�r|j�}|jdkr||jd�s2|jd�rf|jd�p>d}|jd�pLd}�jd|j||f�n�jd|jf��n|jdkr�|jdk	r��j|j�|jdk	r�td��t	|d	�r�t
|j�r�td
��x�|D]}�|�q�Wn�|j�k�r�jd|jf�n�|j�s$�jd�j|j�f�n2d
j
�fdd�|jj�D��}�jd|j|f�|j�rj�j|j�x|D]}�|��qpW�jd|jf�|j�r��j|j�dS)Nz
<!DOCTYPE>rNrOr=z<!DOCTYPE %s PUBLIC "%s" "%s">z
<!DOCTYPE %s>rVzDocument node cannot have tailr"z$Document node cannot have attributesz	<!--%s-->z<%s>rYcs"g|]\}}d�j|�|f�qS)z%s="%s")�fromXmlName)�.0rr,)�filterrr�
<listcomp>&szOgetETreeBuilder.<locals>.tostring.<locals>.serializeElement.<locals>.<listcomp>z<%s %s>z</%s>)r'rr[rrPr5r2r?r\rZr3r"rmrkr&)r/rNrOrf�attr)rrgrorhrirrris@





z;getETreeBuilder.<locals>.tostring.<locals>.serializeElementr=)r	Z
InfosetFilterrk)r/)rrg)rorhrir�tostrings
-z!getETreeBuilder.<locals>.tostringcsDeZdZ�Z�Z�Z�Z�Z�Z�fdd�Z	�fdd�Z
dd�ZdS)z$getETreeBuilder.<locals>.TreeBuildercs�|�S)Nr)rr/)rlrrrlAsz3getETreeBuilder.<locals>.TreeBuilder.testSerializercs<�r|jjS|jdk	r*|jjjd|j�S|jjjd�SdS)Nz{%s}htmlr
)ZdocumentrZdefaultNamespace�find)r)�fullTreerr�getDocumentDs
z0getETreeBuilder.<locals>.TreeBuilder.getDocumentcSstjj|�jS)N)r�TreeBuilder�getFragmentr)rrrrrwNsz0getETreeBuilder.<locals>.TreeBuilder.getFragmentN)rGrHrIZ
documentClassZdoctypeClassZelementClassZcommentClassZ
fragmentClass�implementationrlrurwr)rKrWrXrUr�ElementTreeImplementationrtrlrrrv9s
rv)rKrrrErv�locals)ryrtrrrvr)
rKrWrXrUrrrgryrtrlr�getETreeBuilders~>6$r{)F)Z
__future__rrrZpip._vendor.sixr�rer=rr	r
rZ_utilsr�compiler^r{ZgetETreeModulerrrr�<module>s

E_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.pyc000064400000005742151733136440021025 0ustar003

�PfN
�@s6dZddlmZmZmZddlmZiZddd�ZdS)	a�A collection of modules for building different kinds of tree from
HTML documents.

To create a treebuilder for a new type of tree, you need to do
implement several things:

1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.etree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.

2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree

3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
�)�absolute_import�division�unicode_literals�)�
default_etreeNcKs�|j�}|tkr�|dkrLddlm}|dkr<ddlm}|}|j|f|�jS|dkrlddlm}|jt|<n<|d	kr�dd
lm	}|dkr�t
}|j|f|�jStd|��tj
|�S)a�Get a TreeBuilder class for various types of tree with built-in support

    treeType - the name of the tree type required (case-insensitive). Supported
               values are:

               "dom" - A generic builder for DOM implementations, defaulting to
                       a xml.dom.minidom based implementation.
               "etree" - A generic builder for tree implementations exposing an
                         ElementTree-like interface, defaulting to
                         xml.etree.cElementTree if available and
                         xml.etree.ElementTree if not.
               "lxml" - A etree-based builder for lxml.etree, handling
                        limitations of lxml's implementation.

    implementation - (Currently applies to the "etree" and "dom" tree types). A
                      module implementing the tree type e.g.
                      xml.etree.ElementTree or xml.etree.cElementTree.�dom�)rNr)�minidomZlxml)�
etree_lxml�etree)rzUnrecognised treebuilder "%s" )�lower�treeBuilderCache�rZxml.domr	ZgetDomModuleZTreeBuilderr
rrZgetETreeModule�
ValueError�get)ZtreeType�implementation�kwargsrr	r
r�r�/usr/lib/python3.6/__init__.py�getTreeBuilder$s$r)N)	�__doc__Z
__future__rrrZ_utilsrr
rrrrr�<module>s_vendor/html5lib/treebuilders/__pycache__/dom.cpython-36.pyc000064400000021735151733136440020045 0ustar003

�Pf�"�@s|ddlmZmZmZddlmZddlmZmZddl	Z	ddl
mZddl
mZdd	lm
Z
dd
lmZdd�Zee�ZdS)
�)�absolute_import�division�unicode_literals)�MutableMapping)�minidom�NodeN�)�base�)�	constants)�
namespaces)�moduleFactoryFactorycsV��Gdd�dt��G��fdd�dtj��G����fdd�dtj�}dd��t�S)	Nc@sLeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)zgetDomBuilder.<locals>.AttrListcSs
||_dS)N)�element)�selfr�r�/usr/lib/python3.6/dom.py�__init__sz(getDomBuilder.<locals>.AttrList.__init__cSst|jjj��S)N)�iterr�
attributes�keys)rrrr�__iter__sz(getDomBuilder.<locals>.AttrList.__iter__cSs4t|t�rt�n |jjj|�}||_||jj|<dS)N)�
isinstance�tuple�NotImplementedErrorr�
ownerDocumentZcreateAttribute�valuer)r�namer�attrrrr�__setitem__s

z+getDomBuilder.<locals>.AttrList.__setitem__cSst|jj�S)N)�lenrr)rrrr�__len__ sz'getDomBuilder.<locals>.AttrList.__len__cSst|jjj��S)N)�listrr�items)rrrrr"#sz%getDomBuilder.<locals>.AttrList.itemscSst|jjj��S)N)r!rr�values)rrrrr#&sz&getDomBuilder.<locals>.AttrList.valuescSs"t|t�rt�n|jj|jSdS)N)rrrrrr)rrrrr�__getitem__)s
z+getDomBuilder.<locals>.AttrList.__getitem__cSst|t�rt�n
|jj|=dS)N)rrrrr)rrrrr�__delitem__/s
z+getDomBuilder.<locals>.AttrList.__delitem__N)�__name__�
__module__�__qualname__rrrr r"r#r$r%rrrr�AttrListsr)cs�eZdZdd�Zedd��Zdd�Zddd	�Zd
d�Zdd
�Z	dd�Z
�fdd�Zdd�Zeee�Z
�fdd�Zdd�Zdd�Zee�ZdS)z"getDomBuilder.<locals>.NodeBuildercSstjj||j�||_dS)N)r	rr�nodeNamer)rrrrrr6sz+getDomBuilder.<locals>.NodeBuilder.__init__cSst|jd�r|jjpdS)N�namespaceURI)�hasattrrr+)rrrr�<lambda>:sz+getDomBuilder.<locals>.NodeBuilder.<lambda>cSs||_|jj|j�dS)N)�parentr�appendChild)r�noderrrr/=sz.getDomBuilder.<locals>.NodeBuilder.appendChildNcSs4|jjj|�}|r$|jj||j�n|jj|�dS)N)rr�createTextNode�insertBeforer/)r�datar2�textrrr�
insertTextAsz-getDomBuilder.<locals>.NodeBuilder.insertTextcSs|jj|j|j�||_dS)N)rr2r.)rr0ZrefNoderrrr2Hsz/getDomBuilder.<locals>.NodeBuilder.insertBeforecSs&|jj|jkr|jj|j�d|_dS)N)rZ
parentNode�removeChildr.)rr0rrrr6Lsz.getDomBuilder.<locals>.NodeBuilder.removeChildcSs:x.|jj�r.|jj}|jj|�|jj|�qWg|_dS)N)r�
hasChildNodesZ
firstChildr6r/�
childNodes)rZ	newParent�childrrr�reparentChildrenQs
z3getDomBuilder.<locals>.NodeBuilder.reparentChildrencs
�|j�S)N)r)r)r)rr�
getAttributesXsz0getDomBuilder.<locals>.NodeBuilder.getAttributescSsz|rvxpt|j��D]`\}}t|t�rd|ddk	rF|dd|d}n|d}|jj|d||�q|jj||�qWdS)Nr�:rr
)r!r"rrrZsetAttributeNSZsetAttribute)rrrrZ
qualifiedNamerrr�
setAttributes[s
z0getDomBuilder.<locals>.NodeBuilder.setAttributescs�|jjd��S)NF)r�	cloneNode)r)�NodeBuilderrrr>jsz,getDomBuilder.<locals>.NodeBuilder.cloneNodecSs
|jj�S)N)rr7)rrrr�
hasContentmsz-getDomBuilder.<locals>.NodeBuilder.hasContentcSs(|jdkrtd|jfS|j|jfSdS)NZhtml)�	namespacerr)rrrr�getNameTupleps
z/getDomBuilder.<locals>.NodeBuilder.getNameTuple)N)r&r'r(r�propertyrAr/r5r2r6r:r;r=rr>r@rBZ	nameTupler)r)r?rrr?5s

r?cs�eZdZ�fdd�Z��fdd�Zd�fdd�	Z�fdd	�Z�fd
d�Zdd
�Z�fdd�Z	dd�Z
dd�Zddd�Z�Z
dZdS)z"getDomBuilder.<locals>.TreeBuildercs�j�jddd�|_tj|�S)N)�getDOMImplementationZcreateDocument�dom�weakref�proxy)r)�Domrr�
documentClassysz0getDomBuilder.<locals>.TreeBuilder.documentClasscsR|d}|d}|d}�j�}|j|||�}|jj�|���tkrN|j|_dS)Nr�publicId�systemId)rDZcreateDocumentTypeZdocumentr/rrEr)r�tokenrrJrKZdomimplZdoctype)rHr?rr�
insertDoctype}sz0getDomBuilder.<locals>.TreeBuilder.insertDoctypeNcs6|dkr |jdkr |jj|�}n|jj||�}�|�S)N)ZdefaultNamespacerEZ
createElementZcreateElementNS)rrrAr0)r?rr�elementClass�sz/getDomBuilder.<locals>.TreeBuilder.elementClasscs�|jj|��S)N)rEZ
createComment)rr3)r?rr�commentClass�sz/getDomBuilder.<locals>.TreeBuilder.commentClasscs�|jj��S)N)rEZcreateDocumentFragment)r)r?rr�
fragmentClass�sz0getDomBuilder.<locals>.TreeBuilder.fragmentClasscSs|jj|j�dS)N)rEr/r)rr0rrrr/�sz.getDomBuilder.<locals>.TreeBuilder.appendChildcs�|�S)Nr)rr)�testSerializerrrrQ�sz1getDomBuilder.<locals>.TreeBuilder.testSerializercSs|jS)N)rE)rrrr�getDocument�sz.getDomBuilder.<locals>.TreeBuilder.getDocumentcSstjj|�jS)N)r	�TreeBuilder�getFragmentr)rrrrrT�sz.getDomBuilder.<locals>.TreeBuilder.getFragmentcSsp|}||krtjj|||�nNt|jd�rXtj|jjkrXt|jj�|j_|jjj	tj�|jj
|jj|��dS)N�_child_node_types)r	rSr5r,rEr�	TEXT_NODErUr!�appendr/r1)rr3r.rrrr5�sz-getDomBuilder.<locals>.TreeBuilder.insertText)N)N)r&r'r(rIrMrNrOrPr/rQrRrTr5�implementationrr)rH�DomImplementationr?rQrrrSxs

rScs0|j�g�d��fdd�	��|d�dj��S)Nrcs$|jtjkr�|jrj|js|jrP|jp&d}|jp0d}�jdd||j||f�q~�jdd||jf�n�jdd|f��nz|jtjkr��jd��n`|jtjkr��jd��nF|jtj	krވjdd||j
f��n|jtjk�r�jd	d||j
f�n�t|d
��r6|j
dk	�r6dtj|j
|jf}n|j}�jdd||f�|j��r�g}xftt|j��D]T}|jj|�}|j}|j}|j
}	|	�r�dtj|	|jf}n|j}|j||f��qpWx2t|�D]&\}}�jd
d|d||f��q�W|d7}x|jD]}
�|
|��qWdS)N�z|%s<!DOCTYPE %s "%s" "%s">� z|%s<!DOCTYPE %s>z|%s<!DOCTYPE >z	#documentz#document-fragmentz|%s<!-- %s -->z|%s"%s"r+z%s %sz|%s<%s>z
|%s%s="%s"r
)ZnodeTyperZDOCUMENT_TYPE_NODErrJrKrWZ
DOCUMENT_NODEZDOCUMENT_FRAGMENT_NODEZCOMMENT_NODEZ	nodeValuerVr,r+r�prefixesr*Z
hasAttributes�rangerr�itemrZ	localName�sortedr8)r�indentrJrKrr�irr�nsr9)�rv�serializeElementrrrd�sN


"z?getDomBuilder.<locals>.testSerializer.<locals>.serializeElement�
)r)Z	normalize�join)rr)rcrdrrQ�s
.
z%getDomBuilder.<locals>.testSerializer)rr	rrS�locals)rYrSr)r)rHrYr?rQr�
getDomBuilders$C:6rh)Z
__future__rrr�collectionsrZxml.domrrrFrZr	rrZ_utilsr
rhZgetDomModulerrrr�<module>s__vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-36.pyc000064400000026717151733136440021433 0ustar003

�PfQ7�@s�dZddlmZmZmZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
dd	lmZ
dd
lmZddljZdZejd�Zejd
�jZGdd�de�ZGdd�de�Zdd�Zdd�ZGdd�de	j�ZdS)a�Module for supporting the lxml.etree library. The idea here is to use as much
of the native library as possible, without using fragile hacks like custom element
names that break between releases. The downside of this is that we cannot represent
all possible trees; specifically the following are known to cause problems:

Text or comments as siblings of the root element
Docypes with no name

When any of these things occur, we emit a DataLossWarning
�)�absolute_import�division�unicode_literalsN�)�base�)�DataLossWarning)�	constants)�etree)�	_ihatexmlTz
{([^}]*)}(.*)Zasdc@seZdZdd�ZdS)�DocumentTypecCs||_||_||_dS)N)�name�publicId�systemId)�selfr
rr�r� /usr/lib/python3.6/etree_lxml.py�__init__#szDocumentType.__init__N)�__name__�
__module__�__qualname__rrrrrr"src@s,eZdZdd�Zdd�Zdd�Zee�ZdS)�DocumentcCsd|_g|_dS)N)�_elementTree�_childNodes)rrrrr*szDocument.__init__cCs|jj�j|j�dS)N)r�getrootZaddnext�_element)r�elementrrr�appendChild.szDocument.appendChildcCs|jS)N)r)rrrr�_getChildNodes1szDocument._getChildNodesN)rrrrrr�propertyZ
childNodesrrrrr)srcs6g�tjdd��d���fdd�	��|d�dj��S)NT)�preventDoubleDashCommentsrc
sDt|d��s8t|d�rˆjd�|jjrz|jjp6|jjsFd|jj}nd|jj|jj|jjf}�jdd|d|f�|j�}x|j�dk	r�|j�}q�Wx�|dk	r��||d�|j	�}q�Wnrt
|t�s�t
|t��rt
|t�s�t
jd	dks�t��jd
d||f�n(�jd�x|D]}�||d��qW�n|jtk�r��jdd||jf�t|d
��r@|j�r@�jd
d||jf��n�t
|tj��s�t�tjj|j�}|dk	�r�|jd�}|jd�}tj|}�jdd||�j|�f�n�jdd|�j|j�f�t|d��r�g}xr|jj�D]d\}	}
tj|	�}|dk	�rx|j�\}}	�j|	�}	tj|}d||	f}n
�j|	�}|j||
f��q.Wx2t |�D]&\}	}
�jdd|d|	|
f��q�W|j�r�jd
d|d|jf�|d7}x|D]}�||��q�Wt|d
��r@|j�r@�jd
d|d|jf�dS)N�tagrz	#documentz
<!DOCTYPE %s>z<!DOCTYPE %s "%s" "%s">z|%s%s� rrz|%s"%s"z#document-fragmentz|%s<!-- %s -->�tailrz
|%s<%s %s>z|%s<%s>�attribz%s %sz
|%s%s="%s")!�hasattr�append�docinfo�internalDTDZ	public_idZ
system_url�	root_namerZgetpreviousZgetnext�
isinstance�str�bytes�sys�version_info�AssertionErrorr!�comment_type�textr#r
Z_Element�etree_builders�
tag_regexp�match�groupr	�prefixes�fromXmlNamer$�items�groups�sorted)
r�indent�dtd_strZnext_elementZnsmatch�nsr!�prefix�
attributesr
�valueZattr_string�child)�
infosetFilter�rv�serializeElementrrrD;st













"
z(testSerializer.<locals>.serializeElement�
)r)r�
InfosetFilter�join)rr)rBrCrDr�testSerializer7s
F
rHcs$g���fdd���|�dj��S)z4Serialize an element and its child nodes to a stringcs
t|d�sH|jjr:|jjr$|jj}nd|jj}�j|��|j��n�|jtkrf�jd|j	f�n�|j
s��jd|jf�n.djdd�|j
j�D��}�jd|j|f�|j	r��j|j	�x|D]}�|�q�W�jd	|jf�t|d
�o�|j
�r�j|j
�dS)Nr!z
<!DOCTYPE %s>z	<!--%s-->z<%s>r"cSsg|]\}}d||f�qS)z%s="%s"r)�.0r
r@rrr�
<listcomp>�sz6tostring.<locals>.serializeElement.<locals>.<listcomp>z<%s %s>z</%s>r#)r%r'r(�doctyper)r&rr!r0r1r$rGr8r#)rr<�attrrA)rCrDrrrD�s*





z"tostring.<locals>.serializeElement�)rG)rr)rCrDr�tostring�s rNcszeZdZeZeZdZdZeZ	e
Zddd�Zdd�Z
dd�Zd	d
�Zdd�Zd
d�Zddd�Zd�fdd�	Zdd�Z�ZS)�TreeBuilderNFcs�tjt|d��tjdd��|_||_G�fdd�dt��G���fdd�d�j�}G��fdd	�d	�j	�}||_
||_tj
j||�dS)
N)�fullTreeT)r cs&eZdZd�fdd�	Z�fdd�ZdS)z(TreeBuilder.__init__.<locals>.AttributesNcsv|dkri}||_tj||�xR|j�D]F\}}t|t�rVd|d�j|d�f}n
�j|�}||jjj|<q(WdS)Nz{%s}%srr)r�dictrr8r*�tuple�coerceAttributer$)rrr@�keyr
)rBrrr�s

z1TreeBuilder.__init__.<locals>.Attributes.__init__csPtj|||�t|t�r4d|d�j|d�f}n
�j|�}||jjj|<dS)Nz{%s}%srr)rQ�__setitem__r*rRrSrr$)rrTr@r
)rBrrrU�s


z4TreeBuilder.__init__.<locals>.Attributes.__setitem__)N)rrrrrUr)rBrr�
Attributes�srVcsxeZdZ���fdd�Z�fdd�Z�fdd�Zeee�Zdd�Z�fd	d
�Z	eee	�Z
d��fdd
�	Z�fdd�ZdS)z%TreeBuilder.__init__.<locals>.Elementcs*�j|�}�jj|||d��|�|_dS)N)�	namespace)�
coerceElement�Elementr�_attributes)rr
rW)rV�builderrBrrr�s
z.TreeBuilder.__init__.<locals>.Element.__init__cs$�j|�|_|j|j|j�|j_dS)N)rX�_nameZ_getETreeTagZ
_namespacerr!)rr
)rBrr�_setName�sz.TreeBuilder.__init__.<locals>.Element._setNamecs�j|j�S)N)r7r\)r)rBrr�_getName�sz.TreeBuilder.__init__.<locals>.Element._getNamecSs|jS)N)rZ)rrrr�_getAttributes�sz4TreeBuilder.__init__.<locals>.Element._getAttributescs�||�|_dS)N)rZ)rr?)rVrr�_setAttributes�sz4TreeBuilder.__init__.<locals>.Element._setAttributesNcs�j|�}�jj|||�dS)N)ZcoerceCharactersrY�
insertText)r�dataZinsertBefore)r[rBrrra�s
z0TreeBuilder.__init__.<locals>.Element.insertTextcs�jj||�dS)N)rYr)rrA)r[rrr�sz1TreeBuilder.__init__.<locals>.Element.appendChild)N)
rrrrr]r^rr
r_r`r?rarr)rVr[rBrrrY�s

rYcs8eZdZ��fdd�Z�fdd�Zdd�Zeee�ZdS)z%TreeBuilder.__init__.<locals>.Commentcs�j|�}�jj||�dS)N)�
coerceComment�Commentr)rrb)r[rBrrr�s
z.TreeBuilder.__init__.<locals>.Comment.__init__cs�j|�}||j_dS)N)rcrr1)rrb)rBrr�_setData�s
z.TreeBuilder.__init__.<locals>.Comment._setDatacSs|jjS)N)rr1)rrrr�_getData�sz.TreeBuilder.__init__.<locals>.Comment._getDataN)rrrrrerfrrbr)r[rBrrrd�srd)r2ZgetETreeModuler
rrFrB�namespaceHTMLElementsrQrYrd�elementClass�commentClassrrOr)rrgrPrYrdr)rVr[rBrr�szTreeBuilder.__init__cCs$tjj|�|j|_g|_d|_dS)N)rrO�reset�insertCommentInitial�
insertComment�initial_commentsrK)rrrrrjszTreeBuilder.resetcCst|�S)N)rH)rrrrrrH	szTreeBuilder.testSerializercCstr|jjS|jjj�SdS)N)rP�documentrr)rrrr�getDocumentszTreeBuilder.getDocumentcCsFg}|jdj}|jr"|j|j�|jt|��|jrB|j|j�|S)Nr)�openElementsrr1r&�extend�listr#)rZfragmentrrrr�getFragmentszTreeBuilder.getFragmentcCsh|d}|d}|d}|s0tjdt�d|_n4|jj|�}||krPtjdt�|j|||�}||_dS)Nr
rrz#lxml cannot represent empty doctypez%lxml cannot represent non-xml doctype)�warnings�warnrrKrBrX�doctypeClass)r�tokenr
rrZcoercedNamerKrrr�
insertDoctypeszTreeBuilder.insertDoctypecCs6|dks||jkst�|jjdks&t�|jj|�dS)N)rnr/rrmr&)rrb�parentrrrrk,sz TreeBuilder.insertCommentInitialcsB||jkr,|jjj�djtkr,tjdt�tt	|�j
||�dS)Nrz@lxml cannot represent adjacent comments beyond the root elements���)rnrrr!r0rtrur�superrOrl)rrbry)�	__class__rr�insertCommentMain1s
zTreeBuilder.insertCommentMaincCs�d}|jr�|jjst�|d|jj7}|jjdk	s>|jjdk	r�|d|jj|jjpRd�7}|jjr�|jj}|jd�dkr�|jd�dkr�tj	dt
�|jdd	�}|jd�dkr�|d
|7}q�|d|7}n|d7}|d
7}|jj|dkr�tj	dt
�|d7}tj
|�}x*|jD] }|j|d�}|j|j��qW|j�|_|j�|j_|d}|jd|j�}|dk�rn|}	nd||f}	|	|_|j||�}
||
_|jjj|
�|jj|
�|j|_dS)zCreate the document rootrMz<!DOCTYPE %sNz
 PUBLIC "%s" �'r�"z6DOCTYPE system cannot contain single and double quotesZU00027z"%s"z'%s'z''�>r
zGlxml cannot represent doctype with a different name to the root elementz$<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>rbrWz{%s}%s)rKr
r/rrrBZcoercePubid�findrtrur�replacer
Z
fromstringrmriZaddpreviousr�
documentClassrnZgetroottreer�getZdefaultNamespacer!rhrr&rpr}rl)rrwZdocStrZsysid�root�
comment_tokenZcommentr
rWZ	etree_tagZroot_elementrrr�
insertRoot7sL


zTreeBuilder.insertRoot)F)N)N)rrrrr�rrvrhriZ
fragmentClassr
�implementationrrjrHrorsrxrkr}r��
__classcell__rr)r|rrO�s
L

rO)�__doc__Z
__future__rrrrt�rer-rMrr	rr
r2rZ
lxml.etreerP�compiler3rdr!r0�objectrrrHrNrOrrrr�<module>
s$

O)_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-36.opt-1.pyc000064400000005742151733136440021764 0ustar003

�PfN
�@s6dZddlmZmZmZddlmZiZddd�ZdS)	a�A collection of modules for building different kinds of tree from
HTML documents.

To create a treebuilder for a new type of tree, you need to do
implement several things:

1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.etree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.

2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree

3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
�)�absolute_import�division�unicode_literals�)�
default_etreeNcKs�|j�}|tkr�|dkrLddlm}|dkr<ddlm}|}|j|f|�jS|dkrlddlm}|jt|<n<|d	kr�dd
lm	}|dkr�t
}|j|f|�jStd|��tj
|�S)a�Get a TreeBuilder class for various types of tree with built-in support

    treeType - the name of the tree type required (case-insensitive). Supported
               values are:

               "dom" - A generic builder for DOM implementations, defaulting to
                       a xml.dom.minidom based implementation.
               "etree" - A generic builder for tree implementations exposing an
                         ElementTree-like interface, defaulting to
                         xml.etree.cElementTree if available and
                         xml.etree.ElementTree if not.
               "lxml" - A etree-based builder for lxml.etree, handling
                        limitations of lxml's implementation.

    implementation - (Currently applies to the "etree" and "dom" tree types). A
                      module implementing the tree type e.g.
                      xml.etree.ElementTree or xml.etree.cElementTree.�dom�)rNr)�minidomZlxml)�
etree_lxml�etree)rzUnrecognised treebuilder "%s" )�lower�treeBuilderCache�rZxml.domr	ZgetDomModuleZTreeBuilderr
rrZgetETreeModule�
ValueError�get)ZtreeType�implementation�kwargsrr	r
r�r�/usr/lib/python3.6/__init__.py�getTreeBuilder$s$r)N)	�__doc__Z
__future__rrrZ_utilsrr
rrrrr�<module>s_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.pyc000064400000025515151733136440020200 0ustar003

�Pfv6�@s�ddlmZmZmZddlmZddlmZmZm	Z	dZ
ee�dfeeee	ddfg�B�dfeeee	dd	fe	dd
fg�B�dfee	ddfe	ddfg�dfee	ddfe	dd
fg�dfd�Z
Gdd�de�ZGdd�de�ZGdd�de�ZdS)�)�absolute_import�division�unicode_literals)�	text_type�)�scopingElements�tableInsertModeElements�
namespacesNF�html�buttonZolZul�table�optgroup�optionT)Nr�listrZselectc@s^eZdZdd�Zdd�Zdd�Zdd�Zdd
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zd	S)�NodecCs(||_d|_d|_i|_g|_g|_dS)a6Node representing an item in the tree.
        name - The tag name associated with the node
        parent - The parent of the current node (or None for the document node)
        value - The value of the current node (applies to text nodes and
        comments
        attributes - a dict holding name, value pairs for attributes of the node
        childNodes - a list of child nodes of the current node. This must
        include all elements but not necessarily other node types
        _flags - A list of miscellaneous flags that can be set on the node
        N)�name�parent�value�
attributes�
childNodesZ_flags)�selfr�r�/usr/lib/python3.6/base.py�__init__sz
Node.__init__cCs:djdd�|jj�D��}|r,d|j|fSd|jSdS)N� cSsg|]\}}d||f�qS)z%s="%s"r)�.0rrrrr�
<listcomp>+sz Node.__str__.<locals>.<listcomp>z<%s %s>z<%s>)�joinr�itemsr)rZ
attributesStrrrr�__str__*s

zNode.__str__cCs
d|jS)Nz<%s>)r)rrrr�__repr__3sz
Node.__repr__cCst�dS)z3Insert node as a child of the current node
        N)�NotImplementedError)r�noderrr�appendChild6szNode.appendChildNcCst�dS)z�Insert data as text in the current node, positioned before the
        start of node insertBefore or to the end of the node's text.
        N)r!)r�data�insertBeforerrr�
insertText;szNode.insertTextcCst�dS)z�Insert node as a child of the current node, before refNode in the
        list of child nodes. Raises ValueError if refNode is not a child of
        the current nodeN)r!)rr"ZrefNoderrrr%AszNode.insertBeforecCst�dS)z:Remove node from the children of the current node
        N)r!)rr"rrr�removeChildGszNode.removeChildcCs$x|jD]}|j|�qWg|_dS)z�Move all the children of the current node to newParent.
        This is needed so that trees that don't store text as nodes move the
        text in the correct way
        N)rr#)rZ	newParentZchildrrr�reparentChildrenLszNode.reparentChildrencCst�dS)z�Return a shallow copy of the current node i.e. a node with the same
        name and attributes but with no parent or child nodes
        N)r!)rrrr�	cloneNodeVszNode.cloneNodecCst�dS)zFReturn true if the node has children or text, false otherwise
        N)r!)rrrr�
hasContent\szNode.hasContent)N)
�__name__�
__module__�__qualname__rrr r#r&r%r'r(r)r*rrrrrs	

rc@seZdZdd�Zdd�ZdS)�ActiveFormattingElementscCsfd}|tkrVxH|ddd�D]6}|tkr*P|j||�r>|d7}|dkr|j|�PqWtj||�dS)Nr�����)�Marker�
nodesEqual�remover�append)rr"Z
equalCount�elementrrrr5cs
zActiveFormattingElements.appendcCs$|j|jksdS|j|jks dSdS)NFT)�	nameTupler)rZnode1Znode2rrrr3ps
z#ActiveFormattingElements.nodesEqualN)r+r,r-r5r3rrrrr.bs
r.c@s�eZdZdZdZdZdZdZdZdd�Z	dd�Z
d+dd�Zd	d
�Zdd�Z
d
d�Zdd�Zdd�Zd,dd�Zdd�Zdd�Zdd�Zeee�Zdd�Zdd�Zd-dd �Zd!d"�Zd.d#d$�Zd%d&�Zd'd(�Zd)d*�ZdS)/�TreeBuilderaBase treebuilder implementation
    documentClass - the class to use for the bottommost node of a document
    elementClass - the class to use for HTML Elements
    commentClass - the class to use for comments
    doctypeClass - the class to use for doctypes
    NcCs|rd|_nd|_|j�dS)Nzhttp://www.w3.org/1999/xhtml)�defaultNamespace�reset)rZnamespaceHTMLElementsrrrr�szTreeBuilder.__init__cCs.g|_t�|_d|_d|_d|_|j�|_dS)NF)�openElementsr.�activeFormattingElementsZheadPointerZformPointer�insertFromTable�
documentClass�document)rrrrr:�szTreeBuilder.resetcCs�t|d�}|s2t|t�r$td|f}t|t�s2t�t|\}}xHt|j�D]:}|r^||kr^dS|rr|j	|krrdS||j	|kArJdSqJWds�t�dS)Nr7r
TF)
�hasattr�
isinstancerr	�tuple�AssertionError�listElementsMap�reversedr;r7)r�targetZvariantZ	exactNodeZlistElements�invertr"rrr�elementInScope�s

zTreeBuilder.elementInScopecCs�|js
dSt|j�d}|j|}|tks4||jkr8dSx6|tkrn||jkrn|dkrZd}P|d8}|j|}q:WxR|d7}|j|}|j�}|jd|j|j|jd��}||j|<||jdkrrPqrWdS)Nr/rZStartTag)�typer�	namespacer$r1r1)	r<�lenr2r;r)�
insertElementrrJr)r�i�entryZcloner6rrr�#reconstructActiveFormattingElements�s.


z/TreeBuilder.reconstructActiveFormattingElementscCs,|jj�}x|jr&|tkr&|jj�}qWdS)N)r<�popr2)rrNrrr�clearActiveFormattingElements�s
z)TreeBuilder.clearActiveFormattingElementscCs8x2|jddd�D]}|tkr"Pq|j|kr|SqWdS)z�Check if an element exists between the end of the active
        formatting elements and the last marker. If it does, return it, else
        return falseNr/Fr1)r<r2r)rr�itemrrr�!elementInActiveFormattingElements�s
z-TreeBuilder.elementInActiveFormattingElementscCs&|j|�}|jj|�|jj|�dS)N)�
createElementr;r5r?r#)r�tokenr6rrr�
insertRoot�s
zTreeBuilder.insertRootcCs6|d}|d}|d}|j|||�}|jj|�dS)Nr�publicId�systemId)�doctypeClassr?r#)rrUrrWrXZdoctyperrr�
insertDoctypes
zTreeBuilder.insertDoctypecCs*|dkr|jd}|j|j|d��dS)Nr/r$r1)r;r#�commentClass)rrUrrrr�
insertComment	s
zTreeBuilder.insertCommentcCs0|d}|jd|j�}|j||�}|d|_|S)z.Create an element but don't insert it anywhererrJr$)�getr9�elementClassr)rrUrrJr6rrrrTs

zTreeBuilder.createElementcCs|jS)N)�_insertFromTable)rrrr�_getInsertFromTableszTreeBuilder._getInsertFromTablecCs ||_|r|j|_n|j|_dS)zsSwitch the function used to insert an element from the
        normal one to the misnested table one and back againN)r_�insertElementTablerL�insertElementNormal)rrrrr�_setInsertFromTables
zTreeBuilder._setInsertFromTablecCsb|d}t|t�std|��|jd|j�}|j||�}|d|_|jdj|�|jj	|�|S)NrzElement %s not unicoderJr$r/r1)
rArrCr]r9r^rr;r#r5)rrUrrJr6rrrrb$s
zTreeBuilder.insertElementNormalcCs`|j|�}|jdjtkr$|j|�S|j�\}}|dkrD|j|�n|j||�|jj|�|S)z-Create an element and insert it into the treer/Nr1)	rTr;rrrb�getTableMisnestedNodePositionr#r%r5)rrUr6rr%rrrra.s

zTreeBuilder.insertElementTablecCsX|dkr|jd}|js0|jr<|jdjtkr<|j|�n|j�\}}|j||�dS)zInsert text data.Nr/r1r1)r;r=rrr&rd)rr$rr%rrrr&>s

zTreeBuilder.insertTextcCsvd}d}d}x(|jddd�D]}|jdkr|}PqW|rd|jrL|j}|}qn|j|jj|�d}n
|jd}||fS)zsGet the foster parent element, and sibling to insert before
        (or None) when inserting a misnested table nodeNr/rrr1)r;rr�index)rZ	lastTableZfosterParentr%ZelmrrrrdMs

z)TreeBuilder.getTableMisnestedNodePositionc
Cs8|jd
j}|td�kr4||kr4|jj�|j|�dS)Nr/�dd�dt�lirr
�p�rp�rtr1)rfrgrhrr
rirjrk)r;r�	frozensetrP�generateImpliedEndTags)r�excluderrrrrmgs

z"TreeBuilder.generateImpliedEndTagscCs|jS)zReturn the final tree)r?)rrrr�getDocumentqszTreeBuilder.getDocumentcCs|j�}|jdj|�|S)zReturn the final fragmentr)�
fragmentClassr;r()rZfragmentrrr�getFragmentuszTreeBuilder.getFragmentcCst�dS)zzSerialize the subtree of node in the format required by unit tests
        node - the node from which to start serializingN)r!)rr"rrr�testSerializer|szTreeBuilder.testSerializer)N)N)N)N)r+r,r-�__doc__r>r^r[rYrprr:rHrOrQrSrVrZr\rTr`rc�propertyr=rbrar&rdrmrorqrrrrrrr8zs6
.
	




r8)Z
__future__rrrZpip._vendor.sixrZ	constantsrrr	r2rl�setrD�objectrrr.r8rrrr�<module>s
K_vendor/html5lib/treebuilders/__pycache__/base.cpython-36.opt-1.pyc000064400000025337151733136440021141 0ustar003

�Pfv6�@s�ddlmZmZmZddlmZddlmZmZm	Z	dZ
ee�dfeeee	ddfg�B�dfeeee	dd	fe	dd
fg�B�dfee	ddfe	ddfg�dfee	ddfe	dd
fg�dfd�Z
Gdd�de�ZGdd�de�ZGdd�de�ZdS)�)�absolute_import�division�unicode_literals)�	text_type�)�scopingElements�tableInsertModeElements�
namespacesNF�html�buttonZolZul�table�optgroup�optionT)Nr�listrZselectc@s^eZdZdd�Zdd�Zdd�Zdd�Zdd
d�Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zd	S)�NodecCs(||_d|_d|_i|_g|_g|_dS)a6Node representing an item in the tree.
        name - The tag name associated with the node
        parent - The parent of the current node (or None for the document node)
        value - The value of the current node (applies to text nodes and
        comments
        attributes - a dict holding name, value pairs for attributes of the node
        childNodes - a list of child nodes of the current node. This must
        include all elements but not necessarily other node types
        _flags - A list of miscellaneous flags that can be set on the node
        N)�name�parent�value�
attributes�
childNodesZ_flags)�selfr�r�/usr/lib/python3.6/base.py�__init__sz
Node.__init__cCs:djdd�|jj�D��}|r,d|j|fSd|jSdS)N� cSsg|]\}}d||f�qS)z%s="%s"r)�.0rrrrr�
<listcomp>+sz Node.__str__.<locals>.<listcomp>z<%s %s>z<%s>)�joinr�itemsr)rZ
attributesStrrrr�__str__*s

zNode.__str__cCs
d|jS)Nz<%s>)r)rrrr�__repr__3sz
Node.__repr__cCst�dS)z3Insert node as a child of the current node
        N)�NotImplementedError)r�noderrr�appendChild6szNode.appendChildNcCst�dS)z�Insert data as text in the current node, positioned before the
        start of node insertBefore or to the end of the node's text.
        N)r!)r�data�insertBeforerrr�
insertText;szNode.insertTextcCst�dS)z�Insert node as a child of the current node, before refNode in the
        list of child nodes. Raises ValueError if refNode is not a child of
        the current nodeN)r!)rr"ZrefNoderrrr%AszNode.insertBeforecCst�dS)z:Remove node from the children of the current node
        N)r!)rr"rrr�removeChildGszNode.removeChildcCs$x|jD]}|j|�qWg|_dS)z�Move all the children of the current node to newParent.
        This is needed so that trees that don't store text as nodes move the
        text in the correct way
        N)rr#)rZ	newParentZchildrrr�reparentChildrenLszNode.reparentChildrencCst�dS)z�Return a shallow copy of the current node i.e. a node with the same
        name and attributes but with no parent or child nodes
        N)r!)rrrr�	cloneNodeVszNode.cloneNodecCst�dS)zFReturn true if the node has children or text, false otherwise
        N)r!)rrrr�
hasContent\szNode.hasContent)N)
�__name__�
__module__�__qualname__rrr r#r&r%r'r(r)r*rrrrrs	

rc@seZdZdd�Zdd�ZdS)�ActiveFormattingElementscCsfd}|tkrVxH|ddd�D]6}|tkr*P|j||�r>|d7}|dkr|j|�PqWtj||�dS)Nr�����)�Marker�
nodesEqual�remover�append)rr"Z
equalCount�elementrrrr5cs
zActiveFormattingElements.appendcCs$|j|jksdS|j|jks dSdS)NFT)�	nameTupler)rZnode1Znode2rrrr3ps
z#ActiveFormattingElements.nodesEqualN)r+r,r-r5r3rrrrr.bs
r.c@s�eZdZdZdZdZdZdZdZdd�Z	dd�Z
d+dd�Zd	d
�Zdd�Z
d
d�Zdd�Zdd�Zd,dd�Zdd�Zdd�Zdd�Zeee�Zdd�Zdd�Zd-dd �Zd!d"�Zd.d#d$�Zd%d&�Zd'd(�Zd)d*�ZdS)/�TreeBuilderaBase treebuilder implementation
    documentClass - the class to use for the bottommost node of a document
    elementClass - the class to use for HTML Elements
    commentClass - the class to use for comments
    doctypeClass - the class to use for doctypes
    NcCs|rd|_nd|_|j�dS)Nzhttp://www.w3.org/1999/xhtml)�defaultNamespace�reset)rZnamespaceHTMLElementsrrrr�szTreeBuilder.__init__cCs.g|_t�|_d|_d|_d|_|j�|_dS)NF)�openElementsr.�activeFormattingElementsZheadPointerZformPointer�insertFromTable�
documentClass�document)rrrrr:�szTreeBuilder.resetcCs~t|d�}|s$t|t�r$td|f}t|\}}xHt|j�D]:}|rP||krPdS|rd|j|krddS||j|kAr<dSq<WdS)Nr7r
TF)�hasattr�
isinstancerr	�listElementsMap�reversedr;r7)r�targetZvariantZ	exactNodeZlistElements�invertr"rrr�elementInScope�s

zTreeBuilder.elementInScopecCs�|js
dSt|j�d}|j|}|tks4||jkr8dSx6|tkrn||jkrn|dkrZd}P|d8}|j|}q:WxR|d7}|j|}|j�}|jd|j|j|jd��}||j|<||jdkrrPqrWdS)Nr/rZStartTag)�typer�	namespacer$r1r1)	r<�lenr2r;r)�
insertElementrrHr)r�i�entryZcloner6rrr�#reconstructActiveFormattingElements�s.


z/TreeBuilder.reconstructActiveFormattingElementscCs,|jj�}x|jr&|tkr&|jj�}qWdS)N)r<�popr2)rrLrrr�clearActiveFormattingElements�s
z)TreeBuilder.clearActiveFormattingElementscCs8x2|jddd�D]}|tkr"Pq|j|kr|SqWdS)z�Check if an element exists between the end of the active
        formatting elements and the last marker. If it does, return it, else
        return falseNr/Fr1)r<r2r)rr�itemrrr�!elementInActiveFormattingElements�s
z-TreeBuilder.elementInActiveFormattingElementscCs&|j|�}|jj|�|jj|�dS)N)�
createElementr;r5r?r#)r�tokenr6rrr�
insertRoot�s
zTreeBuilder.insertRootcCs6|d}|d}|d}|j|||�}|jj|�dS)Nr�publicId�systemId)�doctypeClassr?r#)rrSrrUrVZdoctyperrr�
insertDoctypes
zTreeBuilder.insertDoctypecCs*|dkr|jd}|j|j|d��dS)Nr/r$r1)r;r#�commentClass)rrSrrrr�
insertComment	s
zTreeBuilder.insertCommentcCs0|d}|jd|j�}|j||�}|d|_|S)z.Create an element but don't insert it anywhererrHr$)�getr9�elementClassr)rrSrrHr6rrrrRs

zTreeBuilder.createElementcCs|jS)N)�_insertFromTable)rrrr�_getInsertFromTableszTreeBuilder._getInsertFromTablecCs ||_|r|j|_n|j|_dS)zsSwitch the function used to insert an element from the
        normal one to the misnested table one and back againN)r]�insertElementTablerJ�insertElementNormal)rrrrr�_setInsertFromTables
zTreeBuilder._setInsertFromTablecCsL|d}|jd|j�}|j||�}|d|_|jdj|�|jj|�|S)NrrHr$r/r1)r[r9r\rr;r#r5)rrSrrHr6rrrr`$s
zTreeBuilder.insertElementNormalcCs`|j|�}|jdjtkr$|j|�S|j�\}}|dkrD|j|�n|j||�|jj|�|S)z-Create an element and insert it into the treer/Nr1)	rRr;rrr`�getTableMisnestedNodePositionr#r%r5)rrSr6rr%rrrr_.s

zTreeBuilder.insertElementTablecCsX|dkr|jd}|js0|jr<|jdjtkr<|j|�n|j�\}}|j||�dS)zInsert text data.Nr/r1r1)r;r=rrr&rb)rr$rr%rrrr&>s

zTreeBuilder.insertTextcCsvd}d}d}x(|jddd�D]}|jdkr|}PqW|rd|jrL|j}|}qn|j|jj|�d}n
|jd}||fS)zsGet the foster parent element, and sibling to insert before
        (or None) when inserting a misnested table nodeNr/rrr1)r;rr�index)rZ	lastTableZfosterParentr%ZelmrrrrbMs

z)TreeBuilder.getTableMisnestedNodePositionc
Cs8|jd
j}|td�kr4||kr4|jj�|j|�dS)Nr/�dd�dt�lirr
�p�rp�rtr1)rdrerfrr
rgrhri)r;r�	frozensetrN�generateImpliedEndTags)r�excluderrrrrkgs

z"TreeBuilder.generateImpliedEndTagscCs|jS)zReturn the final tree)r?)rrrr�getDocumentqszTreeBuilder.getDocumentcCs|j�}|jdj|�|S)zReturn the final fragmentr)�
fragmentClassr;r()rZfragmentrrr�getFragmentuszTreeBuilder.getFragmentcCst�dS)zzSerialize the subtree of node in the format required by unit tests
        node - the node from which to start serializingN)r!)rr"rrr�testSerializer|szTreeBuilder.testSerializer)N)N)N)N)r+r,r-�__doc__r>r\rYrWrnrr:rFrMrOrQrTrXrZrRr^ra�propertyr=r`r_r&rbrkrmrorprrrrr8zs6
.
	




r8)Z
__future__rrrZpip._vendor.sixrZ	constantsrrr	r2rj�setrB�objectrrr.r8rrrr�<module>s
K_vendor/html5lib/treebuilders/__pycache__/etree.cpython-36.opt-1.pyc000064400000026630151733136440021330 0ustar003

�Pf�1�@s�ddlmZmZmZddlmZddlZddlmZddlm	Z	ddlm
Z
dd	l
mZdd
lm
Z
ejd�Zdd
d�Ze
e�ZdS)�)�absolute_import�division�unicode_literals)�	text_typeN�)�base�)�	_ihatexml)�	constants)�
namespaces)�moduleFactoryFactoryz
{([^}]*)}(.*)Fc	s����jd�j�G�fdd�dtj��G�fdd�d���G�fdd�d���G�fdd	�d	���G�fd
d�d����fdd
��	��fdd�}G��������	fdd�dtj�}t�S)NZasdcs�eZdZd$�fdd�	Zdd�Zdd�Zdd	�Zeee�Zd
d�Z	dd
�Z
ee
e	�Zdd�Zdd�Z
eee
�Zdd�Zdd�Zeee�Zdd�Zdd�Zdd�Zdd�Zd%dd�Zd d!�Zd"d#�ZdS)&z getETreeBuilder.<locals>.ElementNcs^||_||_�j|j||��|_|dkr:td|jf|_n|j|jf|_d|_g|_g|_	dS)N�html)
�_name�
_namespace�Element�_getETreeTag�_elementrZ	nameTuple�parent�_childNodes�_flags)�self�name�	namespace)�ElementTree��/usr/lib/python3.6/etree.py�__init__s

z)getETreeBuilder.<locals>.Element.__init__cSs|dkr|}nd||f}|S)Nz{%s}%sr)rrrZ	etree_tagrrrr#sz-getETreeBuilder.<locals>.Element._getETreeTagcSs||_|j|j|j�|j_dS)N)rrrr�tag)rrrrr�_setName*sz)getETreeBuilder.<locals>.Element._setNamecSs|jS)N)r)rrrr�_getName.sz)getETreeBuilder.<locals>.Element._getNamecSs||_|j|j|j�|j_dS)N)rrrrr)rrrrr�
_setNamespace3sz.getETreeBuilder.<locals>.Element._setNamespacecSs|jS)N)r)rrrr�
_getNamespace7sz.getETreeBuilder.<locals>.Element._getNamespacecSs|jjS)N)r�attrib)rrrr�_getAttributes<sz/getETreeBuilder.<locals>.Element._getAttributescSspx"t|jjj��D]}|jj|=qWxF|j�D]:\}}t|t�rVd|d|df}n|}|jj||�q.WdS)Nz{%s}%srr)�listrr"�keys�items�
isinstance�tuple�set)r�
attributes�key�valuerrrr�_setAttributes?s
z/getETreeBuilder.<locals>.Element._setAttributescSs|jS)N)r)rrrr�_getChildNodesMsz/getETreeBuilder.<locals>.Element._getChildNodescSs.|jdd�=g|_x|D]}|j|�qWdS)N)rrZinsertChild)rr,�elementrrr�_setChildNodesPs
z/getETreeBuilder.<locals>.Element._setChildNodescSst|jjpt|j��S)z,Return true if the node has children or text)�boolr�text�len)rrrr�
hasContentXsz+getETreeBuilder.<locals>.Element.hasContentcSs$|jj|�|jj|j�||_dS)N)r�appendrr)r�noderrr�appendChild\sz,getETreeBuilder.<locals>.Element.appendChildcSs,t|j�j|j�}|jj||j�||_dS)N)r$r�index�insertr)rr6ZrefNoder8rrr�insertBeforeasz-getETreeBuilder.<locals>.Element.insertBeforecSs$|jj|�|jj|j�d|_dS)N)r�removerr)rr6rrr�removeChildfsz,getETreeBuilder.<locals>.Element.removeChildcSs�t|j�s,|jjsd|j_|jj|7_n�|dkrb|jdjsLd|jd_|jdj|7_nxt|j�}|j|j�}|dkr�|j|djs�d|j|d_|j|dj|7_n |jjs�d|j_|jj|7_dS)N�rr���r>r>)r3rr2�tailr$r8)r�datar:Zchildrenr8rrr�
insertTextks"

z+getETreeBuilder.<locals>.Element.insertTextcSs8t|�|j|j�}x |jj�D]\}}||j|<qW|S)N)�typerrr*r&)rr/rr,rrr�	cloneNode�sz*getETreeBuilder.<locals>.Element.cloneNodecSsl|jr"|jdjj|jj7_n0|jjs2d|j_|jjdk	rR|jj|jj7_d|j_tjj||�dS)Nrr=r>)�
childNodesrr?r2r�Node�reparentChildren)rZ	newParentrrrrF�sz1getETreeBuilder.<locals>.Element.reparentChildren)N)N)�__name__�
__module__�__qualname__rrrr�propertyrr r!rr#r-r*r.r0rDr4r7r:r<rArCrFr)rrrrs*





rcs2eZdZ�fdd�Zdd�Zdd�Zeee�ZdS)z getETreeBuilder.<locals>.Commentcs"�j|�|_d|_g|_g|_dS)N)�Commentrrrr)rr@)rrrr�sz)getETreeBuilder.<locals>.Comment.__init__cSs|jjS)N)rr2)rrrr�_getData�sz)getETreeBuilder.<locals>.Comment._getDatacSs||j_dS)N)rr2)rr,rrr�_setData�sz)getETreeBuilder.<locals>.Comment._setDataN)rGrHrIrrLrMrJr@r)rrrrK�srKcsLeZdZ�fdd�Zdd�Zdd�Zeee�Zdd�Zd	d
�Z	eee	�Z
dS)z%getETreeBuilder.<locals>.DocumentTypecs$�j|d�||j_||_||_dS)Nz
<!DOCTYPE>)rrr2�publicId�systemId)rrrNrO)rrrr�sz.getETreeBuilder.<locals>.DocumentType.__init__cSs|jjdd�S)NrNr=)r�get)rrrr�_getPublicId�sz2getETreeBuilder.<locals>.DocumentType._getPublicIdcSs|dk	r|jjd|�dS)NrN)rr))rr,rrr�_setPublicId�sz2getETreeBuilder.<locals>.DocumentType._setPublicIdcSs|jjdd�S)NrOr=)rrP)rrrr�_getSystemId�sz2getETreeBuilder.<locals>.DocumentType._getSystemIdcSs|dk	r|jjd|�dS)NrO)rr))rr,rrr�_setSystemId�sz2getETreeBuilder.<locals>.DocumentType._setSystemIdN)rGrHrIrrQrRrJrNrSrTrOr)rrr�DocumentType�s
rUcseZdZ�fdd�ZdS)z!getETreeBuilder.<locals>.Documentcs�j|d�dS)N�
DOCUMENT_ROOT)r)r)rrrr�sz*getETreeBuilder.<locals>.Document.__init__N)rGrHrIrr)rrr�Document�srWcseZdZ�fdd�ZdS)z)getETreeBuilder.<locals>.DocumentFragmentcs�j|d�dS)NZDOCUMENT_FRAGMENT)r)r)rrrr�sz2getETreeBuilder.<locals>.DocumentFragment.__init__N)rGrHrIrr)rrr�DocumentFragment�srXcs*g�d���fdd�	��|d�dj��S)Nrcszt|d�s|j�}|jdkrz|jd�s0|jd�rd|jd�p<d}|jd�pJd}�jd|j||f�n�jd|jf��n�|jdkr�jd	�|jdk	r��jd
d|d|jf�|jdk	r�td
��t|d�r�t|j	�r�td���nJ|j�k�r�jdd||jf��n"t
j|j�}|dk�r,|j}n"|j�\}}t
j|}d||f}�jdd||f�t|d��rg}xb|j	j�D]T\}}	t
j|�}|dk	�r�|j�\}}t
j|}d||f}
n|}
|j|
|	f��q�Wx2t|�D]&\}}	�jdd|d||	f��q�W|j�r0�jd
d|d|jf�|d7}x|D]}�||��q>W|j�rv�jd
d|d|jf�dS)Nrz
<!DOCTYPE>rNrOr=z<!DOCTYPE %s "%s" "%s">z
<!DOCTYPE %s>rVz	#documentz|%s"%s"� rzDocument node cannot have tailr"z$Document node cannot have attributesz|%s<!-- %s -->z%s %sz|%s<%s>z
|%s%s="%s")�hasattr�getrootrrPr5r2r?�	TypeErrorr3r"�
tag_regexp�match�groupsr
�prefixesr&�sorted)r/�indentrNrOZnsmatchr�ns�prefixr*r,Zattr_string�child)�ElementTreeCommentType�rv�serializeElementrrrh�sZ










"
zAgetETreeBuilder.<locals>.testSerializer.<locals>.serializeElement�
)r)�join)r/)rf)rgrhr�testSerializer�s7
z'getETreeBuilder.<locals>.testSerializercs2g�tj�������fdd���|�dj��S)z4Serialize an element and its child nodes to a stringcs�t|�j�r|j�}|jdkr||jd�s2|jd�rf|jd�p>d}|jd�pLd}�jd|j||f�n�jd|jf��n|jdkr�|jdk	r��j|j�|jdk	r�td��t	|d	�r�t
|j�r�td
��x�|D]}�|�q�Wn�|j�k�r�jd|jf�n�|j�s$�jd�j|j�f�n2d
j
�fdd�|jj�D��}�jd|j|f�|j�rj�j|j�x|D]}�|��qpW�jd|jf�|j�r��j|j�dS)Nz
<!DOCTYPE>rNrOr=z<!DOCTYPE %s PUBLIC "%s" "%s">z
<!DOCTYPE %s>rVzDocument node cannot have tailr"z$Document node cannot have attributesz	<!--%s-->z<%s>rYcs"g|]\}}d�j|�|f�qS)z%s="%s")�fromXmlName)�.0rr,)�filterrr�
<listcomp>&szOgetETreeBuilder.<locals>.tostring.<locals>.serializeElement.<locals>.<listcomp>z<%s %s>z</%s>)r'rr[rrPr5r2r?r\rZr3r"rlrjr&)r/rNrOre�attr)rrfrnrgrhrrrhs@





z;getETreeBuilder.<locals>.tostring.<locals>.serializeElementr=)r	Z
InfosetFilterrj)r/)rrf)rnrgrhr�tostrings
-z!getETreeBuilder.<locals>.tostringcsDeZdZ�Z�Z�Z�Z�Z�Z�fdd�Z	�fdd�Z
dd�ZdS)z$getETreeBuilder.<locals>.TreeBuildercs�|�S)Nr)rr/)rkrrrkAsz3getETreeBuilder.<locals>.TreeBuilder.testSerializercs<�r|jjS|jdk	r*|jjjd|j�S|jjjd�SdS)Nz{%s}htmlr
)ZdocumentrZdefaultNamespace�find)r)�fullTreerr�getDocumentDs
z0getETreeBuilder.<locals>.TreeBuilder.getDocumentcSstjj|�jS)N)r�TreeBuilder�getFragmentr)rrrrrvNsz0getETreeBuilder.<locals>.TreeBuilder.getFragmentN)rGrHrIZ
documentClassZdoctypeClassZelementClassZcommentClassZ
fragmentClass�implementationrkrtrvr)rKrWrXrUr�ElementTreeImplementationrsrkrrru9s
ru)rKrrrEru�locals)rxrsrqrur)
rKrWrXrUrrrfrxrsrkr�getETreeBuilders~>6$rz)F)Z
__future__rrrZpip._vendor.sixr�rer=rr	r
rZ_utilsr�compiler]rzZgetETreeModulerrrr�<module>s

E_vendor/html5lib/treebuilders/__init__.py000064400000006516151733136440014541 0ustar00"""A collection of modules for building different kinds of tree from
HTML documents.

To create a treebuilder for a new type of tree, you need to do
implement several things:

1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.etree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.

2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree

3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
"""

from __future__ import absolute_import, division, unicode_literals

from .._utils import default_etree

treeBuilderCache = {}


def getTreeBuilder(treeType, implementation=None, **kwargs):
    """Get a TreeBuilder class for various types of tree with built-in support

    treeType - the name of the tree type required (case-insensitive). Supported
               values are:

               "dom" - A generic builder for DOM implementations, defaulting to
                       a xml.dom.minidom based implementation.
               "etree" - A generic builder for tree implementations exposing an
                         ElementTree-like interface, defaulting to
                         xml.etree.cElementTree if available and
                         xml.etree.ElementTree if not.
               "lxml" - A etree-based builder for lxml.etree, handling
                        limitations of lxml's implementation.

    implementation - (Currently applies to the "etree" and "dom" tree types). A
                      module implementing the tree type e.g.
                      xml.etree.ElementTree or xml.etree.cElementTree."""

    treeType = treeType.lower()
    if treeType not in treeBuilderCache:
        if treeType == "dom":
            from . import dom
            # Come up with a sane default (pref. from the stdlib)
            if implementation is None:
                from xml.dom import minidom
                implementation = minidom
            # NEVER cache here, caching is done in the dom submodule
            return dom.getDomModule(implementation, **kwargs).TreeBuilder
        elif treeType == "lxml":
            from . import etree_lxml
            treeBuilderCache[treeType] = etree_lxml.TreeBuilder
        elif treeType == "etree":
            from . import etree
            if implementation is None:
                implementation = default_etree
            # NEVER cache here, caching is done in the etree submodule
            return etree.getETreeModule(implementation, **kwargs).TreeBuilder
        else:
            raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
    return treeBuilderCache.get(treeType)
_vendor/html5lib/treebuilders/dom.py000064400000021203151733136440013547 0ustar00from __future__ import absolute_import, division, unicode_literals


from collections import MutableMapping
from xml.dom import minidom, Node
import weakref

from . import base
from .. import constants
from ..constants import namespaces
from .._utils import moduleFactoryFactory


def getDomBuilder(DomImplementation):
    Dom = DomImplementation

    class AttrList(MutableMapping):
        def __init__(self, element):
            self.element = element

        def __iter__(self):
            return iter(self.element.attributes.keys())

        def __setitem__(self, name, value):
            if isinstance(name, tuple):
                raise NotImplementedError
            else:
                attr = self.element.ownerDocument.createAttribute(name)
                attr.value = value
                self.element.attributes[name] = attr

        def __len__(self):
            return len(self.element.attributes)

        def items(self):
            return list(self.element.attributes.items())

        def values(self):
            return list(self.element.attributes.values())

        def __getitem__(self, name):
            if isinstance(name, tuple):
                raise NotImplementedError
            else:
                return self.element.attributes[name].value

        def __delitem__(self, name):
            if isinstance(name, tuple):
                raise NotImplementedError
            else:
                del self.element.attributes[name]

    class NodeBuilder(base.Node):
        def __init__(self, element):
            base.Node.__init__(self, element.nodeName)
            self.element = element

        namespace = property(lambda self: hasattr(self.element, "namespaceURI") and
                             self.element.namespaceURI or None)

        def appendChild(self, node):
            node.parent = self
            self.element.appendChild(node.element)

        def insertText(self, data, insertBefore=None):
            text = self.element.ownerDocument.createTextNode(data)
            if insertBefore:
                self.element.insertBefore(text, insertBefore.element)
            else:
                self.element.appendChild(text)

        def insertBefore(self, node, refNode):
            self.element.insertBefore(node.element, refNode.element)
            node.parent = self

        def removeChild(self, node):
            if node.element.parentNode == self.element:
                self.element.removeChild(node.element)
            node.parent = None

        def reparentChildren(self, newParent):
            while self.element.hasChildNodes():
                child = self.element.firstChild
                self.element.removeChild(child)
                newParent.element.appendChild(child)
            self.childNodes = []

        def getAttributes(self):
            return AttrList(self.element)

        def setAttributes(self, attributes):
            if attributes:
                for name, value in list(attributes.items()):
                    if isinstance(name, tuple):
                        if name[0] is not None:
                            qualifiedName = (name[0] + ":" + name[1])
                        else:
                            qualifiedName = name[1]
                        self.element.setAttributeNS(name[2], qualifiedName,
                                                    value)
                    else:
                        self.element.setAttribute(
                            name, value)
        attributes = property(getAttributes, setAttributes)

        def cloneNode(self):
            return NodeBuilder(self.element.cloneNode(False))

        def hasContent(self):
            return self.element.hasChildNodes()

        def getNameTuple(self):
            if self.namespace is None:
                return namespaces["html"], self.name
            else:
                return self.namespace, self.name

        nameTuple = property(getNameTuple)

    class TreeBuilder(base.TreeBuilder):  # pylint:disable=unused-variable
        def documentClass(self):
            self.dom = Dom.getDOMImplementation().createDocument(None, None, None)
            return weakref.proxy(self)

        def insertDoctype(self, token):
            name = token["name"]
            publicId = token["publicId"]
            systemId = token["systemId"]

            domimpl = Dom.getDOMImplementation()
            doctype = domimpl.createDocumentType(name, publicId, systemId)
            self.document.appendChild(NodeBuilder(doctype))
            if Dom == minidom:
                doctype.ownerDocument = self.dom

        def elementClass(self, name, namespace=None):
            if namespace is None and self.defaultNamespace is None:
                node = self.dom.createElement(name)
            else:
                node = self.dom.createElementNS(namespace, name)

            return NodeBuilder(node)

        def commentClass(self, data):
            return NodeBuilder(self.dom.createComment(data))

        def fragmentClass(self):
            return NodeBuilder(self.dom.createDocumentFragment())

        def appendChild(self, node):
            self.dom.appendChild(node.element)

        def testSerializer(self, element):
            return testSerializer(element)

        def getDocument(self):
            return self.dom

        def getFragment(self):
            return base.TreeBuilder.getFragment(self).element

        def insertText(self, data, parent=None):
            data = data
            if parent != self:
                base.TreeBuilder.insertText(self, data, parent)
            else:
                # HACK: allow text nodes as children of the document node
                if hasattr(self.dom, '_child_node_types'):
                    # pylint:disable=protected-access
                    if Node.TEXT_NODE not in self.dom._child_node_types:
                        self.dom._child_node_types = list(self.dom._child_node_types)
                        self.dom._child_node_types.append(Node.TEXT_NODE)
                self.dom.appendChild(self.dom.createTextNode(data))

        implementation = DomImplementation
        name = None

    def testSerializer(element):
        element.normalize()
        rv = []

        def serializeElement(element, indent=0):
            if element.nodeType == Node.DOCUMENT_TYPE_NODE:
                if element.name:
                    if element.publicId or element.systemId:
                        publicId = element.publicId or ""
                        systemId = element.systemId or ""
                        rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
                                  (' ' * indent, element.name, publicId, systemId))
                    else:
                        rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))
                else:
                    rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
            elif element.nodeType == Node.DOCUMENT_NODE:
                rv.append("#document")
            elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
                rv.append("#document-fragment")
            elif element.nodeType == Node.COMMENT_NODE:
                rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))
            elif element.nodeType == Node.TEXT_NODE:
                rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))
            else:
                if (hasattr(element, "namespaceURI") and
                        element.namespaceURI is not None):
                    name = "%s %s" % (constants.prefixes[element.namespaceURI],
                                      element.nodeName)
                else:
                    name = element.nodeName
                rv.append("|%s<%s>" % (' ' * indent, name))
                if element.hasAttributes():
                    attributes = []
                    for i in range(len(element.attributes)):
                        attr = element.attributes.item(i)
                        name = attr.nodeName
                        value = attr.value
                        ns = attr.namespaceURI
                        if ns:
                            name = "%s %s" % (constants.prefixes[ns], attr.localName)
                        else:
                            name = attr.nodeName
                        attributes.append((name, value))

                    for name, value in sorted(attributes):
                        rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
            indent += 2
            for child in element.childNodes:
                serializeElement(child, indent)
        serializeElement(element, 0)

        return "\n".join(rv)

    return locals()


# The actual means to get a module!
getDomModule = moduleFactoryFactory(getDomBuilder)
_vendor/html5lib/treebuilders/etree.py000064400000030734151733136440014105 0ustar00from __future__ import absolute_import, division, unicode_literals
# pylint:disable=protected-access

from pip._vendor.six import text_type

import re

from . import base
from .. import _ihatexml
from .. import constants
from ..constants import namespaces
from .._utils import moduleFactoryFactory

tag_regexp = re.compile("{([^}]*)}(.*)")


def getETreeBuilder(ElementTreeImplementation, fullTree=False):
    ElementTree = ElementTreeImplementation
    ElementTreeCommentType = ElementTree.Comment("asd").tag

    class Element(base.Node):
        def __init__(self, name, namespace=None):
            self._name = name
            self._namespace = namespace
            self._element = ElementTree.Element(self._getETreeTag(name,
                                                                  namespace))
            if namespace is None:
                self.nameTuple = namespaces["html"], self._name
            else:
                self.nameTuple = self._namespace, self._name
            self.parent = None
            self._childNodes = []
            self._flags = []

        def _getETreeTag(self, name, namespace):
            if namespace is None:
                etree_tag = name
            else:
                etree_tag = "{%s}%s" % (namespace, name)
            return etree_tag

        def _setName(self, name):
            self._name = name
            self._element.tag = self._getETreeTag(self._name, self._namespace)

        def _getName(self):
            return self._name

        name = property(_getName, _setName)

        def _setNamespace(self, namespace):
            self._namespace = namespace
            self._element.tag = self._getETreeTag(self._name, self._namespace)

        def _getNamespace(self):
            return self._namespace

        namespace = property(_getNamespace, _setNamespace)

        def _getAttributes(self):
            return self._element.attrib

        def _setAttributes(self, attributes):
            # Delete existing attributes first
            # XXX - there may be a better way to do this...
            for key in list(self._element.attrib.keys()):
                del self._element.attrib[key]
            for key, value in attributes.items():
                if isinstance(key, tuple):
                    name = "{%s}%s" % (key[2], key[1])
                else:
                    name = key
                self._element.set(name, value)

        attributes = property(_getAttributes, _setAttributes)

        def _getChildNodes(self):
            return self._childNodes

        def _setChildNodes(self, value):
            del self._element[:]
            self._childNodes = []
            for element in value:
                self.insertChild(element)

        childNodes = property(_getChildNodes, _setChildNodes)

        def hasContent(self):
            """Return true if the node has children or text"""
            return bool(self._element.text or len(self._element))

        def appendChild(self, node):
            self._childNodes.append(node)
            self._element.append(node._element)
            node.parent = self

        def insertBefore(self, node, refNode):
            index = list(self._element).index(refNode._element)
            self._element.insert(index, node._element)
            node.parent = self

        def removeChild(self, node):
            self._childNodes.remove(node)
            self._element.remove(node._element)
            node.parent = None

        def insertText(self, data, insertBefore=None):
            if not(len(self._element)):
                if not self._element.text:
                    self._element.text = ""
                self._element.text += data
            elif insertBefore is None:
                # Insert the text as the tail of the last child element
                if not self._element[-1].tail:
                    self._element[-1].tail = ""
                self._element[-1].tail += data
            else:
                # Insert the text before the specified node
                children = list(self._element)
                index = children.index(insertBefore._element)
                if index > 0:
                    if not self._element[index - 1].tail:
                        self._element[index - 1].tail = ""
                    self._element[index - 1].tail += data
                else:
                    if not self._element.text:
                        self._element.text = ""
                    self._element.text += data

        def cloneNode(self):
            element = type(self)(self.name, self.namespace)
            for name, value in self.attributes.items():
                element.attributes[name] = value
            return element

        def reparentChildren(self, newParent):
            if newParent.childNodes:
                newParent.childNodes[-1]._element.tail += self._element.text
            else:
                if not newParent._element.text:
                    newParent._element.text = ""
                if self._element.text is not None:
                    newParent._element.text += self._element.text
            self._element.text = ""
            base.Node.reparentChildren(self, newParent)

    class Comment(Element):
        def __init__(self, data):
            # Use the superclass constructor to set all properties on the
            # wrapper element
            self._element = ElementTree.Comment(data)
            self.parent = None
            self._childNodes = []
            self._flags = []

        def _getData(self):
            return self._element.text

        def _setData(self, value):
            self._element.text = value

        data = property(_getData, _setData)

    class DocumentType(Element):
        def __init__(self, name, publicId, systemId):
            Element.__init__(self, "<!DOCTYPE>")
            self._element.text = name
            self.publicId = publicId
            self.systemId = systemId

        def _getPublicId(self):
            return self._element.get("publicId", "")

        def _setPublicId(self, value):
            if value is not None:
                self._element.set("publicId", value)

        publicId = property(_getPublicId, _setPublicId)

        def _getSystemId(self):
            return self._element.get("systemId", "")

        def _setSystemId(self, value):
            if value is not None:
                self._element.set("systemId", value)

        systemId = property(_getSystemId, _setSystemId)

    class Document(Element):
        def __init__(self):
            Element.__init__(self, "DOCUMENT_ROOT")

    class DocumentFragment(Element):
        def __init__(self):
            Element.__init__(self, "DOCUMENT_FRAGMENT")

    def testSerializer(element):
        rv = []

        def serializeElement(element, indent=0):
            if not(hasattr(element, "tag")):
                element = element.getroot()
            if element.tag == "<!DOCTYPE>":
                if element.get("publicId") or element.get("systemId"):
                    publicId = element.get("publicId") or ""
                    systemId = element.get("systemId") or ""
                    rv.append("""<!DOCTYPE %s "%s" "%s">""" %
                              (element.text, publicId, systemId))
                else:
                    rv.append("<!DOCTYPE %s>" % (element.text,))
            elif element.tag == "DOCUMENT_ROOT":
                rv.append("#document")
                if element.text is not None:
                    rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
                if element.tail is not None:
                    raise TypeError("Document node cannot have tail")
                if hasattr(element, "attrib") and len(element.attrib):
                    raise TypeError("Document node cannot have attributes")
            elif element.tag == ElementTreeCommentType:
                rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
            else:
                assert isinstance(element.tag, text_type), \
                    "Expected unicode, got %s, %s" % (type(element.tag), element.tag)
                nsmatch = tag_regexp.match(element.tag)

                if nsmatch is None:
                    name = element.tag
                else:
                    ns, name = nsmatch.groups()
                    prefix = constants.prefixes[ns]
                    name = "%s %s" % (prefix, name)
                rv.append("|%s<%s>" % (' ' * indent, name))

                if hasattr(element, "attrib"):
                    attributes = []
                    for name, value in element.attrib.items():
                        nsmatch = tag_regexp.match(name)
                        if nsmatch is not None:
                            ns, name = nsmatch.groups()
                            prefix = constants.prefixes[ns]
                            attr_string = "%s %s" % (prefix, name)
                        else:
                            attr_string = name
                        attributes.append((attr_string, value))

                    for name, value in sorted(attributes):
                        rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
                if element.text:
                    rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
            indent += 2
            for child in element:
                serializeElement(child, indent)
            if element.tail:
                rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
        serializeElement(element, 0)

        return "\n".join(rv)

    def tostring(element):  # pylint:disable=unused-variable
        """Serialize an element and its child nodes to a string"""
        rv = []
        filter = _ihatexml.InfosetFilter()

        def serializeElement(element):
            if isinstance(element, ElementTree.ElementTree):
                element = element.getroot()

            if element.tag == "<!DOCTYPE>":
                if element.get("publicId") or element.get("systemId"):
                    publicId = element.get("publicId") or ""
                    systemId = element.get("systemId") or ""
                    rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %
                              (element.text, publicId, systemId))
                else:
                    rv.append("<!DOCTYPE %s>" % (element.text,))
            elif element.tag == "DOCUMENT_ROOT":
                if element.text is not None:
                    rv.append(element.text)
                if element.tail is not None:
                    raise TypeError("Document node cannot have tail")
                if hasattr(element, "attrib") and len(element.attrib):
                    raise TypeError("Document node cannot have attributes")

                for child in element:
                    serializeElement(child)

            elif element.tag == ElementTreeCommentType:
                rv.append("<!--%s-->" % (element.text,))
            else:
                # This is assumed to be an ordinary element
                if not element.attrib:
                    rv.append("<%s>" % (filter.fromXmlName(element.tag),))
                else:
                    attr = " ".join(["%s=\"%s\"" % (
                        filter.fromXmlName(name), value)
                        for name, value in element.attrib.items()])
                    rv.append("<%s %s>" % (element.tag, attr))
                if element.text:
                    rv.append(element.text)

                for child in element:
                    serializeElement(child)

                rv.append("</%s>" % (element.tag,))

            if element.tail:
                rv.append(element.tail)

        serializeElement(element)

        return "".join(rv)

    class TreeBuilder(base.TreeBuilder):  # pylint:disable=unused-variable
        documentClass = Document
        doctypeClass = DocumentType
        elementClass = Element
        commentClass = Comment
        fragmentClass = DocumentFragment
        implementation = ElementTreeImplementation

        def testSerializer(self, element):
            return testSerializer(element)

        def getDocument(self):
            if fullTree:
                return self.document._element
            else:
                if self.defaultNamespace is not None:
                    return self.document._element.find(
                        "{%s}html" % self.defaultNamespace)
                else:
                    return self.document._element.find("html")

        def getFragment(self):
            return base.TreeBuilder.getFragment(self)._element

    return locals()


getETreeModule = moduleFactoryFactory(getETreeBuilder)
_vendor/html5lib/treebuilders/base.py000064400000033166151733136440013715 0ustar00from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type

from ..constants import scopingElements, tableInsertModeElements, namespaces

# The scope markers are inserted when entering object elements,
# marquees, table cells, and table captions, and are used to prevent formatting
# from "leaking" into tables, object elements, and marquees.
Marker = None

listElementsMap = {
    None: (frozenset(scopingElements), False),
    "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False),
    "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"),
                                              (namespaces["html"], "ul")])), False),
    "table": (frozenset([(namespaces["html"], "html"),
                         (namespaces["html"], "table")]), False),
    "select": (frozenset([(namespaces["html"], "optgroup"),
                          (namespaces["html"], "option")]), True)
}


class Node(object):
    def __init__(self, name):
        """Node representing an item in the tree.
        name - The tag name associated with the node
        parent - The parent of the current node (or None for the document node)
        value - The value of the current node (applies to text nodes and
        comments
        attributes - a dict holding name, value pairs for attributes of the node
        childNodes - a list of child nodes of the current node. This must
        include all elements but not necessarily other node types
        _flags - A list of miscellaneous flags that can be set on the node
        """
        self.name = name
        self.parent = None
        self.value = None
        self.attributes = {}
        self.childNodes = []
        self._flags = []

    def __str__(self):
        attributesStr = " ".join(["%s=\"%s\"" % (name, value)
                                  for name, value in
                                  self.attributes.items()])
        if attributesStr:
            return "<%s %s>" % (self.name, attributesStr)
        else:
            return "<%s>" % (self.name)

    def __repr__(self):
        return "<%s>" % (self.name)

    def appendChild(self, node):
        """Insert node as a child of the current node
        """
        raise NotImplementedError

    def insertText(self, data, insertBefore=None):
        """Insert data as text in the current node, positioned before the
        start of node insertBefore or to the end of the node's text.
        """
        raise NotImplementedError

    def insertBefore(self, node, refNode):
        """Insert node as a child of the current node, before refNode in the
        list of child nodes. Raises ValueError if refNode is not a child of
        the current node"""
        raise NotImplementedError

    def removeChild(self, node):
        """Remove node from the children of the current node
        """
        raise NotImplementedError

    def reparentChildren(self, newParent):
        """Move all the children of the current node to newParent.
        This is needed so that trees that don't store text as nodes move the
        text in the correct way
        """
        # XXX - should this method be made more general?
        for child in self.childNodes:
            newParent.appendChild(child)
        self.childNodes = []

    def cloneNode(self):
        """Return a shallow copy of the current node i.e. a node with the same
        name and attributes but with no parent or child nodes
        """
        raise NotImplementedError

    def hasContent(self):
        """Return true if the node has children or text, false otherwise
        """
        raise NotImplementedError


class ActiveFormattingElements(list):
    def append(self, node):
        equalCount = 0
        if node != Marker:
            for element in self[::-1]:
                if element == Marker:
                    break
                if self.nodesEqual(element, node):
                    equalCount += 1
                if equalCount == 3:
                    self.remove(element)
                    break
        list.append(self, node)

    def nodesEqual(self, node1, node2):
        if not node1.nameTuple == node2.nameTuple:
            return False

        if not node1.attributes == node2.attributes:
            return False

        return True


class TreeBuilder(object):
    """Base treebuilder implementation
    documentClass - the class to use for the bottommost node of a document
    elementClass - the class to use for HTML Elements
    commentClass - the class to use for comments
    doctypeClass - the class to use for doctypes
    """
    # pylint:disable=not-callable

    # Document class
    documentClass = None

    # The class to use for creating a node
    elementClass = None

    # The class to use for creating comments
    commentClass = None

    # The class to use for creating doctypes
    doctypeClass = None

    # Fragment class
    fragmentClass = None

    def __init__(self, namespaceHTMLElements):
        if namespaceHTMLElements:
            self.defaultNamespace = "http://www.w3.org/1999/xhtml"
        else:
            self.defaultNamespace = None
        self.reset()

    def reset(self):
        self.openElements = []
        self.activeFormattingElements = ActiveFormattingElements()

        # XXX - rename these to headElement, formElement
        self.headPointer = None
        self.formPointer = None

        self.insertFromTable = False

        self.document = self.documentClass()

    def elementInScope(self, target, variant=None):

        # If we pass a node in we match that. if we pass a string
        # match any node with that name
        exactNode = hasattr(target, "nameTuple")
        if not exactNode:
            if isinstance(target, text_type):
                target = (namespaces["html"], target)
            assert isinstance(target, tuple)

        listElements, invert = listElementsMap[variant]

        for node in reversed(self.openElements):
            if exactNode and node == target:
                return True
            elif not exactNode and node.nameTuple == target:
                return True
            elif (invert ^ (node.nameTuple in listElements)):
                return False

        assert False  # We should never reach this point

    def reconstructActiveFormattingElements(self):
        # Within this algorithm the order of steps described in the
        # specification is not quite the same as the order of steps in the
        # code. It should still do the same though.

        # Step 1: stop the algorithm when there's nothing to do.
        if not self.activeFormattingElements:
            return

        # Step 2 and step 3: we start with the last element. So i is -1.
        i = len(self.activeFormattingElements) - 1
        entry = self.activeFormattingElements[i]
        if entry == Marker or entry in self.openElements:
            return

        # Step 6
        while entry != Marker and entry not in self.openElements:
            if i == 0:
                # This will be reset to 0 below
                i = -1
                break
            i -= 1
            # Step 5: let entry be one earlier in the list.
            entry = self.activeFormattingElements[i]

        while True:
            # Step 7
            i += 1

            # Step 8
            entry = self.activeFormattingElements[i]
            clone = entry.cloneNode()  # Mainly to get a new copy of the attributes

            # Step 9
            element = self.insertElement({"type": "StartTag",
                                          "name": clone.name,
                                          "namespace": clone.namespace,
                                          "data": clone.attributes})

            # Step 10
            self.activeFormattingElements[i] = element

            # Step 11
            if element == self.activeFormattingElements[-1]:
                break

    def clearActiveFormattingElements(self):
        entry = self.activeFormattingElements.pop()
        while self.activeFormattingElements and entry != Marker:
            entry = self.activeFormattingElements.pop()

    def elementInActiveFormattingElements(self, name):
        """Check if an element exists between the end of the active
        formatting elements and the last marker. If it does, return it, else
        return false"""

        for item in self.activeFormattingElements[::-1]:
            # Check for Marker first because if it's a Marker it doesn't have a
            # name attribute.
            if item == Marker:
                break
            elif item.name == name:
                return item
        return False

    def insertRoot(self, token):
        element = self.createElement(token)
        self.openElements.append(element)
        self.document.appendChild(element)

    def insertDoctype(self, token):
        name = token["name"]
        publicId = token["publicId"]
        systemId = token["systemId"]

        doctype = self.doctypeClass(name, publicId, systemId)
        self.document.appendChild(doctype)

    def insertComment(self, token, parent=None):
        if parent is None:
            parent = self.openElements[-1]
        parent.appendChild(self.commentClass(token["data"]))

    def createElement(self, token):
        """Create an element but don't insert it anywhere"""
        name = token["name"]
        namespace = token.get("namespace", self.defaultNamespace)
        element = self.elementClass(name, namespace)
        element.attributes = token["data"]
        return element

    def _getInsertFromTable(self):
        return self._insertFromTable

    def _setInsertFromTable(self, value):
        """Switch the function used to insert an element from the
        normal one to the misnested table one and back again"""
        self._insertFromTable = value
        if value:
            self.insertElement = self.insertElementTable
        else:
            self.insertElement = self.insertElementNormal

    insertFromTable = property(_getInsertFromTable, _setInsertFromTable)

    def insertElementNormal(self, token):
        name = token["name"]
        assert isinstance(name, text_type), "Element %s not unicode" % name
        namespace = token.get("namespace", self.defaultNamespace)
        element = self.elementClass(name, namespace)
        element.attributes = token["data"]
        self.openElements[-1].appendChild(element)
        self.openElements.append(element)
        return element

    def insertElementTable(self, token):
        """Create an element and insert it into the tree"""
        element = self.createElement(token)
        if self.openElements[-1].name not in tableInsertModeElements:
            return self.insertElementNormal(token)
        else:
            # We should be in the InTable mode. This means we want to do
            # special magic element rearranging
            parent, insertBefore = self.getTableMisnestedNodePosition()
            if insertBefore is None:
                parent.appendChild(element)
            else:
                parent.insertBefore(element, insertBefore)
            self.openElements.append(element)
        return element

    def insertText(self, data, parent=None):
        """Insert text data."""
        if parent is None:
            parent = self.openElements[-1]

        if (not self.insertFromTable or (self.insertFromTable and
                                         self.openElements[-1].name
                                         not in tableInsertModeElements)):
            parent.insertText(data)
        else:
            # We should be in the InTable mode. This means we want to do
            # special magic element rearranging
            parent, insertBefore = self.getTableMisnestedNodePosition()
            parent.insertText(data, insertBefore)

    def getTableMisnestedNodePosition(self):
        """Get the foster parent element, and sibling to insert before
        (or None) when inserting a misnested table node"""
        # The foster parent element is the one which comes before the most
        # recently opened table element
        # XXX - this is really inelegant
        lastTable = None
        fosterParent = None
        insertBefore = None
        for elm in self.openElements[::-1]:
            if elm.name == "table":
                lastTable = elm
                break
        if lastTable:
            # XXX - we should really check that this parent is actually a
            # node here
            if lastTable.parent:
                fosterParent = lastTable.parent
                insertBefore = lastTable
            else:
                fosterParent = self.openElements[
                    self.openElements.index(lastTable) - 1]
        else:
            fosterParent = self.openElements[0]
        return fosterParent, insertBefore

    def generateImpliedEndTags(self, exclude=None):
        name = self.openElements[-1].name
        # XXX td, th and tr are not actually needed
        if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and
                name != exclude):
            self.openElements.pop()
            # XXX This is not entirely what the specification says. We should
            # investigate it more closely.
            self.generateImpliedEndTags(exclude)

    def getDocument(self):
        "Return the final tree"
        return self.document

    def getFragment(self):
        "Return the final fragment"
        # assert self.innerHTML
        fragment = self.fragmentClass()
        self.openElements[0].reparentChildren(fragment)
        return fragment

    def testSerializer(self, node):
        """Serialize the subtree of node in the format required by unit tests
        node - the node from which to start serializing"""
        raise NotImplementedError
_vendor/html5lib/treebuilders/etree_lxml.py000064400000033521151733136440015136 0ustar00"""Module for supporting the lxml.etree library. The idea here is to use as much
of the native library as possible, without using fragile hacks like custom element
names that break between releases. The downside of this is that we cannot represent
all possible trees; specifically the following are known to cause problems:

Text or comments as siblings of the root element
Docypes with no name

When any of these things occur, we emit a DataLossWarning
"""

from __future__ import absolute_import, division, unicode_literals
# pylint:disable=protected-access

import warnings
import re
import sys

from . import base
from ..constants import DataLossWarning
from .. import constants
from . import etree as etree_builders
from .. import _ihatexml

import lxml.etree as etree


fullTree = True
tag_regexp = re.compile("{([^}]*)}(.*)")

comment_type = etree.Comment("asd").tag


class DocumentType(object):
    def __init__(self, name, publicId, systemId):
        self.name = name
        self.publicId = publicId
        self.systemId = systemId


class Document(object):
    def __init__(self):
        self._elementTree = None
        self._childNodes = []

    def appendChild(self, element):
        self._elementTree.getroot().addnext(element._element)

    def _getChildNodes(self):
        return self._childNodes

    childNodes = property(_getChildNodes)


def testSerializer(element):
    rv = []
    infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)

    def serializeElement(element, indent=0):
        if not hasattr(element, "tag"):
            if hasattr(element, "getroot"):
                # Full tree case
                rv.append("#document")
                if element.docinfo.internalDTD:
                    if not (element.docinfo.public_id or
                            element.docinfo.system_url):
                        dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
                    else:
                        dtd_str = """<!DOCTYPE %s "%s" "%s">""" % (
                            element.docinfo.root_name,
                            element.docinfo.public_id,
                            element.docinfo.system_url)
                    rv.append("|%s%s" % (' ' * (indent + 2), dtd_str))
                next_element = element.getroot()
                while next_element.getprevious() is not None:
                    next_element = next_element.getprevious()
                while next_element is not None:
                    serializeElement(next_element, indent + 2)
                    next_element = next_element.getnext()
            elif isinstance(element, str) or isinstance(element, bytes):
                # Text in a fragment
                assert isinstance(element, str) or sys.version_info[0] == 2
                rv.append("|%s\"%s\"" % (' ' * indent, element))
            else:
                # Fragment case
                rv.append("#document-fragment")
                for next_element in element:
                    serializeElement(next_element, indent + 2)
        elif element.tag == comment_type:
            rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
            if hasattr(element, "tail") and element.tail:
                rv.append("|%s\"%s\"" % (' ' * indent, element.tail))
        else:
            assert isinstance(element, etree._Element)
            nsmatch = etree_builders.tag_regexp.match(element.tag)
            if nsmatch is not None:
                ns = nsmatch.group(1)
                tag = nsmatch.group(2)
                prefix = constants.prefixes[ns]
                rv.append("|%s<%s %s>" % (' ' * indent, prefix,
                                          infosetFilter.fromXmlName(tag)))
            else:
                rv.append("|%s<%s>" % (' ' * indent,
                                       infosetFilter.fromXmlName(element.tag)))

            if hasattr(element, "attrib"):
                attributes = []
                for name, value in element.attrib.items():
                    nsmatch = tag_regexp.match(name)
                    if nsmatch is not None:
                        ns, name = nsmatch.groups()
                        name = infosetFilter.fromXmlName(name)
                        prefix = constants.prefixes[ns]
                        attr_string = "%s %s" % (prefix, name)
                    else:
                        attr_string = infosetFilter.fromXmlName(name)
                    attributes.append((attr_string, value))

                for name, value in sorted(attributes):
                    rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))

            if element.text:
                rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
            indent += 2
            for child in element:
                serializeElement(child, indent)
            if hasattr(element, "tail") and element.tail:
                rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
    serializeElement(element, 0)

    return "\n".join(rv)


def tostring(element):
    """Serialize an element and its child nodes to a string"""
    rv = []

    def serializeElement(element):
        if not hasattr(element, "tag"):
            if element.docinfo.internalDTD:
                if element.docinfo.doctype:
                    dtd_str = element.docinfo.doctype
                else:
                    dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
                rv.append(dtd_str)
            serializeElement(element.getroot())

        elif element.tag == comment_type:
            rv.append("<!--%s-->" % (element.text,))

        else:
            # This is assumed to be an ordinary element
            if not element.attrib:
                rv.append("<%s>" % (element.tag,))
            else:
                attr = " ".join(["%s=\"%s\"" % (name, value)
                                 for name, value in element.attrib.items()])
                rv.append("<%s %s>" % (element.tag, attr))
            if element.text:
                rv.append(element.text)

            for child in element:
                serializeElement(child)

            rv.append("</%s>" % (element.tag,))

        if hasattr(element, "tail") and element.tail:
            rv.append(element.tail)

    serializeElement(element)

    return "".join(rv)


class TreeBuilder(base.TreeBuilder):
    documentClass = Document
    doctypeClass = DocumentType
    elementClass = None
    commentClass = None
    fragmentClass = Document
    implementation = etree

    def __init__(self, namespaceHTMLElements, fullTree=False):
        builder = etree_builders.getETreeModule(etree, fullTree=fullTree)
        infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)
        self.namespaceHTMLElements = namespaceHTMLElements

        class Attributes(dict):
            def __init__(self, element, value=None):
                if value is None:
                    value = {}
                self._element = element
                dict.__init__(self, value)  # pylint:disable=non-parent-init-called
                for key, value in self.items():
                    if isinstance(key, tuple):
                        name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
                    else:
                        name = infosetFilter.coerceAttribute(key)
                    self._element._element.attrib[name] = value

            def __setitem__(self, key, value):
                dict.__setitem__(self, key, value)
                if isinstance(key, tuple):
                    name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
                else:
                    name = infosetFilter.coerceAttribute(key)
                self._element._element.attrib[name] = value

        class Element(builder.Element):
            def __init__(self, name, namespace):
                name = infosetFilter.coerceElement(name)
                builder.Element.__init__(self, name, namespace=namespace)
                self._attributes = Attributes(self)

            def _setName(self, name):
                self._name = infosetFilter.coerceElement(name)
                self._element.tag = self._getETreeTag(
                    self._name, self._namespace)

            def _getName(self):
                return infosetFilter.fromXmlName(self._name)

            name = property(_getName, _setName)

            def _getAttributes(self):
                return self._attributes

            def _setAttributes(self, attributes):
                self._attributes = Attributes(self, attributes)

            attributes = property(_getAttributes, _setAttributes)

            def insertText(self, data, insertBefore=None):
                data = infosetFilter.coerceCharacters(data)
                builder.Element.insertText(self, data, insertBefore)

            def appendChild(self, child):
                builder.Element.appendChild(self, child)

        class Comment(builder.Comment):
            def __init__(self, data):
                data = infosetFilter.coerceComment(data)
                builder.Comment.__init__(self, data)

            def _setData(self, data):
                data = infosetFilter.coerceComment(data)
                self._element.text = data

            def _getData(self):
                return self._element.text

            data = property(_getData, _setData)

        self.elementClass = Element
        self.commentClass = Comment
        # self.fragmentClass = builder.DocumentFragment
        base.TreeBuilder.__init__(self, namespaceHTMLElements)

    def reset(self):
        base.TreeBuilder.reset(self)
        self.insertComment = self.insertCommentInitial
        self.initial_comments = []
        self.doctype = None

    def testSerializer(self, element):
        return testSerializer(element)

    def getDocument(self):
        if fullTree:
            return self.document._elementTree
        else:
            return self.document._elementTree.getroot()

    def getFragment(self):
        fragment = []
        element = self.openElements[0]._element
        if element.text:
            fragment.append(element.text)
        fragment.extend(list(element))
        if element.tail:
            fragment.append(element.tail)
        return fragment

    def insertDoctype(self, token):
        name = token["name"]
        publicId = token["publicId"]
        systemId = token["systemId"]

        if not name:
            warnings.warn("lxml cannot represent empty doctype", DataLossWarning)
            self.doctype = None
        else:
            coercedName = self.infosetFilter.coerceElement(name)
            if coercedName != name:
                warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning)

            doctype = self.doctypeClass(coercedName, publicId, systemId)
            self.doctype = doctype

    def insertCommentInitial(self, data, parent=None):
        assert parent is None or parent is self.document
        assert self.document._elementTree is None
        self.initial_comments.append(data)

    def insertCommentMain(self, data, parent=None):
        if (parent == self.document and
                self.document._elementTree.getroot()[-1].tag == comment_type):
            warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning)
        super(TreeBuilder, self).insertComment(data, parent)

    def insertRoot(self, token):
        """Create the document root"""
        # Because of the way libxml2 works, it doesn't seem to be possible to
        # alter information like the doctype after the tree has been parsed.
        # Therefore we need to use the built-in parser to create our initial
        # tree, after which we can add elements like normal
        docStr = ""
        if self.doctype:
            assert self.doctype.name
            docStr += "<!DOCTYPE %s" % self.doctype.name
            if (self.doctype.publicId is not None or
                    self.doctype.systemId is not None):
                docStr += (' PUBLIC "%s" ' %
                           (self.infosetFilter.coercePubid(self.doctype.publicId or "")))
                if self.doctype.systemId:
                    sysid = self.doctype.systemId
                    if sysid.find("'") >= 0 and sysid.find('"') >= 0:
                        warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning)
                        sysid = sysid.replace("'", 'U00027')
                    if sysid.find("'") >= 0:
                        docStr += '"%s"' % sysid
                    else:
                        docStr += "'%s'" % sysid
                else:
                    docStr += "''"
            docStr += ">"
            if self.doctype.name != token["name"]:
                warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning)
        docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>"
        root = etree.fromstring(docStr)

        # Append the initial comments:
        for comment_token in self.initial_comments:
            comment = self.commentClass(comment_token["data"])
            root.addprevious(comment._element)

        # Create the root document and add the ElementTree to it
        self.document = self.documentClass()
        self.document._elementTree = root.getroottree()

        # Give the root element the right name
        name = token["name"]
        namespace = token.get("namespace", self.defaultNamespace)
        if namespace is None:
            etree_tag = name
        else:
            etree_tag = "{%s}%s" % (namespace, name)
        root.tag = etree_tag

        # Add the root element to the internal child/open data structures
        root_element = self.elementClass(name, namespace)
        root_element._element = root
        self.document._childNodes.append(root_element)
        self.openElements.append(root_element)

        # Reset to the default insert comment function
        self.insertComment = self.insertCommentMain
_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.opt-1.pyc000064400000002335151733136440021005 0ustar003

�Pf}�@shddlmZmZmZddlmZddlmZmZiZ	x&ej
�D]\ZZZ
edk	r>e
e	e<q>Wdd�ZdS)�)�absolute_import�division�unicode_literals)�AttributesNSImpl�)�adjustForeignAttributes�unadjustForeignAttributesNcCs|j�x tj�D]\}}|j||�qWx�|D]�}|d}|dkrHq0q0|d
kr�t|dt�}|j|d|df|d|�|dkr�|j|d|df|d�q0|dkr�|j|d|df|d�q0|dkr�|j|d�q0|dkr0q0q0Wx tj�D]\}}|j	|��q�W|j
�dS)z8Call SAX-like content handler based on treewalker walker�typeZDoctype�StartTag�EmptyTag�data�	namespace�nameZEndTag�
Characters�SpaceCharacters�CommentN)r
r)rr)Z
startDocument�prefix_mapping�itemsZstartPrefixMappingrrZstartElementNSZendElementNSZ
charactersZendPrefixMappingZendDocument)ZwalkerZhandler�prefixr
�tokenr	Zattrs�r�/usr/lib/python3.6/sax.py�to_sax
s6
r)Z
__future__rrrZxml.sax.xmlreaderrZ	constantsrrr�valuesrZ	localNamer
rrrrr�<module>s_vendor/html5lib/treeadapters/__pycache__/sax.cpython-36.pyc000064400000002416151733136450020047 0ustar003

�Pf}�@shddlmZmZmZddlmZddlmZmZiZ	x&ej
�D]\ZZZ
edk	r>e
e	e<q>Wdd�ZdS)�)�absolute_import�division�unicode_literals)�AttributesNSImpl�)�adjustForeignAttributes�unadjustForeignAttributesNcCs(|j�x tj�D]\}}|j||�qWx�|D]�}|d}|dkrHq0q0|dkr�t|dt�}|j|d|df|d|�|dkr�|j|d|df|d�q0|dkr�|j|d|df|d�q0|dkr�|j|d�q0|dkr�q0ds0t	d
��q0Wx tj�D]\}}|j
|��qW|j�dS)z8Call SAX-like content handler based on treewalker walker�typeZDoctype�StartTag�EmptyTag�data�	namespace�nameZEndTag�
Characters�SpaceCharacters�CommentFzUnknown token typeN)r
r)rr)Z
startDocument�prefix_mapping�itemsZstartPrefixMappingrrZstartElementNSZendElementNSZ
characters�AssertionErrorZendPrefixMappingZendDocument)ZwalkerZhandler�prefixr
�tokenr	Zattrs�r�/usr/lib/python3.6/sax.py�to_sax
s6
r)Z
__future__rrrZxml.sax.xmlreaderrZ	constantsrrr�valuesrZ	localNamer
rrrrr�<module>s_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.pyc000064400000000553151733136450021013 0ustar003

�Pf��@sZddlmZmZmZddlmZdgZyddlmZWnek
rJYnXej	d�dS)�)�absolute_import�division�unicode_literals�)�saxr)�genshirN)
Z
__future__rrr�r�__all__r�ImportError�append�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000553151733136450021752 0ustar003

�Pf��@sZddlmZmZmZddlmZdgZyddlmZWnek
rJYnXej	d�dS)�)�absolute_import�division�unicode_literals�)�saxr)�genshirN)
Z
__future__rrr�r�__all__r�ImportError�append�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.pyc000064400000002640151733136450020530 0ustar003

�Pf�@sLddlmZmZmZddlmZmZddlmZmZm	Z	m
Z
mZdd�ZdS)�)�absolute_import�division�unicode_literals)�QName�Attrs)�START�END�TEXT�COMMENT�DOCTYPEccsZg}�x6|D�],}|d}|dkr2|j|d�n|rLtdj|�dfVg}|dkr�|d	rrd
|d	|df}n|d}tdd
�|dj�D��}tt|�|fdfV|dkr�d}|dk�r�|d	r�d
|d	|df}n|d}tt|�dfVq|dk�rt|dd fVq|dkrt	|d|d|dfd#fVqqW|�rVtdj|�d&fVdS)'N�type�
Characters�SpaceCharacters�data���StartTag�EmptyTag�	namespacez{%s}%s�namecSs4g|],\}}t|ddk	r"d|n|d�|f�qS)rNz{%s}%sr)r)�.0�attr�value�r�/usr/lib/python3.6/genshi.py�
<listcomp>szto_genshi.<locals>.<listcomp>ZEndTag�CommentZDoctypeZpublicIdZsystemId)r
r���r)Nrr)rrrr)Nrrrr)Nrrrr)Nrrrr)Nrrrr)Nrr)
�appendr	�joinr�itemsrrrr
r)Zwalker�text�tokenrrZattrsrrr�	to_genshis<

r#N)
Z
__future__rrrZgenshi.corerrrrr	r
rr#rrrr�<module>s_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-36.opt-1.pyc000064400000002640151733136450021467 0ustar003

�Pf�@sLddlmZmZmZddlmZmZddlmZmZm	Z	m
Z
mZdd�ZdS)�)�absolute_import�division�unicode_literals)�QName�Attrs)�START�END�TEXT�COMMENT�DOCTYPEccsZg}�x6|D�],}|d}|dkr2|j|d�n|rLtdj|�dfVg}|dkr�|d	rrd
|d	|df}n|d}tdd
�|dj�D��}tt|�|fdfV|dkr�d}|dk�r�|d	r�d
|d	|df}n|d}tt|�dfVq|dk�rt|dd fVq|dkrt	|d|d|dfd#fVqqW|�rVtdj|�d&fVdS)'N�type�
Characters�SpaceCharacters�data���StartTag�EmptyTag�	namespacez{%s}%s�namecSs4g|],\}}t|ddk	r"d|n|d�|f�qS)rNz{%s}%sr)r)�.0�attr�value�r�/usr/lib/python3.6/genshi.py�
<listcomp>szto_genshi.<locals>.<listcomp>ZEndTag�CommentZDoctypeZpublicIdZsystemId)r
r���r)Nrr)rrrr)Nrrrr)Nrrrr)Nrrrr)Nrrrr)Nrr)
�appendr	�joinr�itemsrrrr
r)Zwalker�text�tokenrrZattrsrrr�	to_genshis<

r#N)
Z
__future__rrrZgenshi.corerrrrr	r
rr#rrrr�<module>s_vendor/html5lib/treeadapters/__init__.py000064400000000320151733136450014517 0ustar00from __future__ import absolute_import, division, unicode_literals

from . import sax

__all__ = ["sax"]

try:
    from . import genshi  # noqa
except ImportError:
    pass
else:
    __all__.append("genshi")
_vendor/html5lib/treeadapters/genshi.py000064400000003023151733136450014240 0ustar00from __future__ import absolute_import, division, unicode_literals

from genshi.core import QName, Attrs
from genshi.core import START, END, TEXT, COMMENT, DOCTYPE


def to_genshi(walker):
    text = []
    for token in walker:
        type = token["type"]
        if type in ("Characters", "SpaceCharacters"):
            text.append(token["data"])
        elif text:
            yield TEXT, "".join(text), (None, -1, -1)
            text = []

        if type in ("StartTag", "EmptyTag"):
            if token["namespace"]:
                name = "{%s}%s" % (token["namespace"], token["name"])
            else:
                name = token["name"]
            attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value)
                           for attr, value in token["data"].items()])
            yield (START, (QName(name), attrs), (None, -1, -1))
            if type == "EmptyTag":
                type = "EndTag"

        if type == "EndTag":
            if token["namespace"]:
                name = "{%s}%s" % (token["namespace"], token["name"])
            else:
                name = token["name"]

            yield END, QName(name), (None, -1, -1)

        elif type == "Comment":
            yield COMMENT, token["data"], (None, -1, -1)

        elif type == "Doctype":
            yield DOCTYPE, (token["name"], token["publicId"],
                            token["systemId"]), (None, -1, -1)

        else:
            pass  # FIXME: What to do?

    if text:
        yield TEXT, "".join(text), (None, -1, -1)
_vendor/html5lib/treeadapters/sax.py000064400000003175151733136450013566 0ustar00from __future__ import absolute_import, division, unicode_literals

from xml.sax.xmlreader import AttributesNSImpl

from ..constants import adjustForeignAttributes, unadjustForeignAttributes

prefix_mapping = {}
for prefix, localName, namespace in adjustForeignAttributes.values():
    if prefix is not None:
        prefix_mapping[prefix] = namespace


def to_sax(walker, handler):
    """Call SAX-like content handler based on treewalker walker"""
    handler.startDocument()
    for prefix, namespace in prefix_mapping.items():
        handler.startPrefixMapping(prefix, namespace)

    for token in walker:
        type = token["type"]
        if type == "Doctype":
            continue
        elif type in ("StartTag", "EmptyTag"):
            attrs = AttributesNSImpl(token["data"],
                                     unadjustForeignAttributes)
            handler.startElementNS((token["namespace"], token["name"]),
                                   token["name"],
                                   attrs)
            if type == "EmptyTag":
                handler.endElementNS((token["namespace"], token["name"]),
                                     token["name"])
        elif type == "EndTag":
            handler.endElementNS((token["namespace"], token["name"]),
                                 token["name"])
        elif type in ("Characters", "SpaceCharacters"):
            handler.characters(token["data"])
        elif type == "Comment":
            pass
        else:
            assert False, "Unknown token type"

    for prefix, namespace in prefix_mapping.items():
        handler.endPrefixMapping(prefix)
    handler.endDocument()
_vendor/html5lib/_ihatexml.py000064400000040501151733136450012254 0ustar00from __future__ import absolute_import, division, unicode_literals

import re
import warnings

from .constants import DataLossWarning

baseChar = """
[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]"""

ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]"""

combiningCharacter = """
[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
#x3099 | #x309A"""

digit = """
[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]"""

extender = """
#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]"""

letter = " | ".join([baseChar, ideographic])

# Without the
name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter,
                   extender])
nameFirst = " | ".join([letter, "_"])

reChar = re.compile(r"#x([\d|A-F]{4,4})")
reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]")


def charStringToList(chars):
    charRanges = [item.strip() for item in chars.split(" | ")]
    rv = []
    for item in charRanges:
        foundMatch = False
        for regexp in (reChar, reCharRange):
            match = regexp.match(item)
            if match is not None:
                rv.append([hexToInt(item) for item in match.groups()])
                if len(rv[-1]) == 1:
                    rv[-1] = rv[-1] * 2
                foundMatch = True
                break
        if not foundMatch:
            assert len(item) == 1

            rv.append([ord(item)] * 2)
    rv = normaliseCharList(rv)
    return rv


def normaliseCharList(charList):
    charList = sorted(charList)
    for item in charList:
        assert item[1] >= item[0]
    rv = []
    i = 0
    while i < len(charList):
        j = 1
        rv.append(charList[i])
        while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1:
            rv[-1][1] = charList[i + j][1]
            j += 1
        i += j
    return rv

# We don't really support characters above the BMP :(
max_unicode = int("FFFF", 16)


def missingRanges(charList):
    rv = []
    if charList[0] != 0:
        rv.append([0, charList[0][0] - 1])
    for i, item in enumerate(charList[:-1]):
        rv.append([item[1] + 1, charList[i + 1][0] - 1])
    if charList[-1][1] != max_unicode:
        rv.append([charList[-1][1] + 1, max_unicode])
    return rv


def listToRegexpStr(charList):
    rv = []
    for item in charList:
        if item[0] == item[1]:
            rv.append(escapeRegexp(chr(item[0])))
        else:
            rv.append(escapeRegexp(chr(item[0])) + "-" +
                      escapeRegexp(chr(item[1])))
    return "[%s]" % "".join(rv)


def hexToInt(hex_str):
    return int(hex_str, 16)


def escapeRegexp(string):
    specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}",
                         "[", "]", "|", "(", ")", "-")
    for char in specialCharacters:
        string = string.replace(char, "\\" + char)

    return string

# output from the above
nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')  # noqa

nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')  # noqa

# Simpler things
nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\-\'()+,./:=?;!*#@$_%]")


class InfosetFilter(object):
    replacementRegexp = re.compile(r"U[\dA-F]{5,5}")

    def __init__(self,
                 dropXmlnsLocalName=False,
                 dropXmlnsAttrNs=False,
                 preventDoubleDashComments=False,
                 preventDashAtCommentEnd=False,
                 replaceFormFeedCharacters=True,
                 preventSingleQuotePubid=False):

        self.dropXmlnsLocalName = dropXmlnsLocalName
        self.dropXmlnsAttrNs = dropXmlnsAttrNs

        self.preventDoubleDashComments = preventDoubleDashComments
        self.preventDashAtCommentEnd = preventDashAtCommentEnd

        self.replaceFormFeedCharacters = replaceFormFeedCharacters

        self.preventSingleQuotePubid = preventSingleQuotePubid

        self.replaceCache = {}

    def coerceAttribute(self, name, namespace=None):
        if self.dropXmlnsLocalName and name.startswith("xmlns:"):
            warnings.warn("Attributes cannot begin with xmlns", DataLossWarning)
            return None
        elif (self.dropXmlnsAttrNs and
              namespace == "http://www.w3.org/2000/xmlns/"):
            warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning)
            return None
        else:
            return self.toXmlName(name)

    def coerceElement(self, name):
        return self.toXmlName(name)

    def coerceComment(self, data):
        if self.preventDoubleDashComments:
            while "--" in data:
                warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning)
                data = data.replace("--", "- -")
            if data.endswith("-"):
                warnings.warn("Comments cannot end in a dash", DataLossWarning)
                data += " "
        return data

    def coerceCharacters(self, data):
        if self.replaceFormFeedCharacters:
            for _ in range(data.count("\x0C")):
                warnings.warn("Text cannot contain U+000C", DataLossWarning)
            data = data.replace("\x0C", " ")
        # Other non-xml characters
        return data

    def coercePubid(self, data):
        dataOutput = data
        for char in nonPubidCharRegexp.findall(data):
            warnings.warn("Coercing non-XML pubid", DataLossWarning)
            replacement = self.getReplacementCharacter(char)
            dataOutput = dataOutput.replace(char, replacement)
        if self.preventSingleQuotePubid and dataOutput.find("'") >= 0:
            warnings.warn("Pubid cannot contain single quote", DataLossWarning)
            dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'"))
        return dataOutput

    def toXmlName(self, name):
        nameFirst = name[0]
        nameRest = name[1:]
        m = nonXmlNameFirstBMPRegexp.match(nameFirst)
        if m:
            warnings.warn("Coercing non-XML name", DataLossWarning)
            nameFirstOutput = self.getReplacementCharacter(nameFirst)
        else:
            nameFirstOutput = nameFirst

        nameRestOutput = nameRest
        replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest))
        for char in replaceChars:
            warnings.warn("Coercing non-XML name", DataLossWarning)
            replacement = self.getReplacementCharacter(char)
            nameRestOutput = nameRestOutput.replace(char, replacement)
        return nameFirstOutput + nameRestOutput

    def getReplacementCharacter(self, char):
        if char in self.replaceCache:
            replacement = self.replaceCache[char]
        else:
            replacement = self.escapeChar(char)
        return replacement

    def fromXmlName(self, name):
        for item in set(self.replacementRegexp.findall(name)):
            name = name.replace(item, self.unescapeChar(item))
        return name

    def escapeChar(self, char):
        replacement = "U%05X" % ord(char)
        self.replaceCache[char] = replacement
        return replacement

    def unescapeChar(self, charcode):
        return chr(int(charcode[1:], 16))
_vendor/html5lib/html5parser.py000064400000344662151733136450012567 0ustar00from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import with_metaclass, viewkeys, PY3

import types

try:
    from collections import OrderedDict
except ImportError:
    from pip._vendor.ordereddict import OrderedDict

from . import _inputstream
from . import _tokenizer

from . import treebuilders
from .treebuilders.base import Marker

from . import _utils
from .constants import (
    spaceCharacters, asciiUpper2Lower,
    specialElements, headingElements, cdataElements, rcdataElements,
    tokenTypes, tagTokenTypes,
    namespaces,
    htmlIntegrationPointElements, mathmlTextIntegrationPointElements,
    adjustForeignAttributes as adjustForeignAttributesMap,
    adjustMathMLAttributes, adjustSVGAttributes,
    E,
    ReparseException
)


def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
    """Parse a string or file-like object into a tree"""
    tb = treebuilders.getTreeBuilder(treebuilder)
    p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
    return p.parse(doc, **kwargs)


def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs):
    tb = treebuilders.getTreeBuilder(treebuilder)
    p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
    return p.parseFragment(doc, container=container, **kwargs)


def method_decorator_metaclass(function):
    class Decorated(type):
        def __new__(meta, classname, bases, classDict):
            for attributeName, attribute in classDict.items():
                if isinstance(attribute, types.FunctionType):
                    attribute = function(attribute)

                classDict[attributeName] = attribute
            return type.__new__(meta, classname, bases, classDict)
    return Decorated


class HTMLParser(object):
    """HTML parser. Generates a tree structure from a stream of (possibly
        malformed) HTML"""

    def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):
        """
        strict - raise an exception when a parse error is encountered

        tree - a treebuilder class controlling the type of tree that will be
        returned. Built in treebuilders can be accessed through
        html5lib.treebuilders.getTreeBuilder(treeType)
        """

        # Raise an exception on the first error encountered
        self.strict = strict

        if tree is None:
            tree = treebuilders.getTreeBuilder("etree")
        self.tree = tree(namespaceHTMLElements)
        self.errors = []

        self.phases = dict([(name, cls(self, self.tree)) for name, cls in
                            getPhases(debug).items()])

    def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs):

        self.innerHTMLMode = innerHTML
        self.container = container
        self.scripting = scripting
        self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs)
        self.reset()

        try:
            self.mainLoop()
        except ReparseException:
            self.reset()
            self.mainLoop()

    def reset(self):
        self.tree.reset()
        self.firstStartTag = False
        self.errors = []
        self.log = []  # only used with debug mode
        # "quirks" / "limited quirks" / "no quirks"
        self.compatMode = "no quirks"

        if self.innerHTMLMode:
            self.innerHTML = self.container.lower()

            if self.innerHTML in cdataElements:
                self.tokenizer.state = self.tokenizer.rcdataState
            elif self.innerHTML in rcdataElements:
                self.tokenizer.state = self.tokenizer.rawtextState
            elif self.innerHTML == 'plaintext':
                self.tokenizer.state = self.tokenizer.plaintextState
            else:
                # state already is data state
                # self.tokenizer.state = self.tokenizer.dataState
                pass
            self.phase = self.phases["beforeHtml"]
            self.phase.insertHtmlElement()
            self.resetInsertionMode()
        else:
            self.innerHTML = False  # pylint:disable=redefined-variable-type
            self.phase = self.phases["initial"]

        self.lastPhase = None

        self.beforeRCDataPhase = None

        self.framesetOK = True

    @property
    def documentEncoding(self):
        """The name of the character encoding
        that was used to decode the input stream,
        or :obj:`None` if that is not determined yet.

        """
        if not hasattr(self, 'tokenizer'):
            return None
        return self.tokenizer.stream.charEncoding[0].name

    def isHTMLIntegrationPoint(self, element):
        if (element.name == "annotation-xml" and
                element.namespace == namespaces["mathml"]):
            return ("encoding" in element.attributes and
                    element.attributes["encoding"].translate(
                        asciiUpper2Lower) in
                    ("text/html", "application/xhtml+xml"))
        else:
            return (element.namespace, element.name) in htmlIntegrationPointElements

    def isMathMLTextIntegrationPoint(self, element):
        return (element.namespace, element.name) in mathmlTextIntegrationPointElements

    def mainLoop(self):
        CharactersToken = tokenTypes["Characters"]
        SpaceCharactersToken = tokenTypes["SpaceCharacters"]
        StartTagToken = tokenTypes["StartTag"]
        EndTagToken = tokenTypes["EndTag"]
        CommentToken = tokenTypes["Comment"]
        DoctypeToken = tokenTypes["Doctype"]
        ParseErrorToken = tokenTypes["ParseError"]

        for token in self.normalizedTokens():
            prev_token = None
            new_token = token
            while new_token is not None:
                prev_token = new_token
                currentNode = self.tree.openElements[-1] if self.tree.openElements else None
                currentNodeNamespace = currentNode.namespace if currentNode else None
                currentNodeName = currentNode.name if currentNode else None

                type = new_token["type"]

                if type == ParseErrorToken:
                    self.parseError(new_token["data"], new_token.get("datavars", {}))
                    new_token = None
                else:
                    if (len(self.tree.openElements) == 0 or
                        currentNodeNamespace == self.tree.defaultNamespace or
                        (self.isMathMLTextIntegrationPoint(currentNode) and
                         ((type == StartTagToken and
                           token["name"] not in frozenset(["mglyph", "malignmark"])) or
                          type in (CharactersToken, SpaceCharactersToken))) or
                        (currentNodeNamespace == namespaces["mathml"] and
                         currentNodeName == "annotation-xml" and
                         type == StartTagToken and
                         token["name"] == "svg") or
                        (self.isHTMLIntegrationPoint(currentNode) and
                         type in (StartTagToken, CharactersToken, SpaceCharactersToken))):
                        phase = self.phase
                    else:
                        phase = self.phases["inForeignContent"]

                    if type == CharactersToken:
                        new_token = phase.processCharacters(new_token)
                    elif type == SpaceCharactersToken:
                        new_token = phase.processSpaceCharacters(new_token)
                    elif type == StartTagToken:
                        new_token = phase.processStartTag(new_token)
                    elif type == EndTagToken:
                        new_token = phase.processEndTag(new_token)
                    elif type == CommentToken:
                        new_token = phase.processComment(new_token)
                    elif type == DoctypeToken:
                        new_token = phase.processDoctype(new_token)

            if (type == StartTagToken and prev_token["selfClosing"] and
                    not prev_token["selfClosingAcknowledged"]):
                self.parseError("non-void-element-with-trailing-solidus",
                                {"name": prev_token["name"]})

        # When the loop finishes it's EOF
        reprocess = True
        phases = []
        while reprocess:
            phases.append(self.phase)
            reprocess = self.phase.processEOF()
            if reprocess:
                assert self.phase not in phases

    def normalizedTokens(self):
        for token in self.tokenizer:
            yield self.normalizeToken(token)

    def parse(self, stream, *args, **kwargs):
        """Parse a HTML document into a well-formed tree

        stream - a filelike object or string containing the HTML to be parsed

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        scripting - treat noscript elements as if javascript was turned on
        """
        self._parse(stream, False, None, *args, **kwargs)
        return self.tree.getDocument()

    def parseFragment(self, stream, *args, **kwargs):
        """Parse a HTML fragment into a well-formed tree fragment

        container - name of the element we're setting the innerHTML property
        if set to None, default to 'div'

        stream - a filelike object or string containing the HTML to be parsed

        The optional encoding parameter must be a string that indicates
        the encoding.  If specified, that encoding will be used,
        regardless of any BOM or later declaration (such as in a meta
        element)

        scripting - treat noscript elements as if javascript was turned on
        """
        self._parse(stream, True, *args, **kwargs)
        return self.tree.getFragment()

    def parseError(self, errorcode="XXX-undefined-error", datavars=None):
        # XXX The idea is to make errorcode mandatory.
        if datavars is None:
            datavars = {}
        self.errors.append((self.tokenizer.stream.position(), errorcode, datavars))
        if self.strict:
            raise ParseError(E[errorcode] % datavars)

    def normalizeToken(self, token):
        """ HTML5 specific normalizations to the token stream """

        if token["type"] == tokenTypes["StartTag"]:
            raw = token["data"]
            token["data"] = OrderedDict(raw)
            if len(raw) > len(token["data"]):
                # we had some duplicated attribute, fix so first wins
                token["data"].update(raw[::-1])

        return token

    def adjustMathMLAttributes(self, token):
        adjust_attributes(token, adjustMathMLAttributes)

    def adjustSVGAttributes(self, token):
        adjust_attributes(token, adjustSVGAttributes)

    def adjustForeignAttributes(self, token):
        adjust_attributes(token, adjustForeignAttributesMap)

    def reparseTokenNormal(self, token):
        # pylint:disable=unused-argument
        self.parser.phase()

    def resetInsertionMode(self):
        # The name of this method is mostly historical. (It's also used in the
        # specification.)
        last = False
        newModes = {
            "select": "inSelect",
            "td": "inCell",
            "th": "inCell",
            "tr": "inRow",
            "tbody": "inTableBody",
            "thead": "inTableBody",
            "tfoot": "inTableBody",
            "caption": "inCaption",
            "colgroup": "inColumnGroup",
            "table": "inTable",
            "head": "inBody",
            "body": "inBody",
            "frameset": "inFrameset",
            "html": "beforeHead"
        }
        for node in self.tree.openElements[::-1]:
            nodeName = node.name
            new_phase = None
            if node == self.tree.openElements[0]:
                assert self.innerHTML
                last = True
                nodeName = self.innerHTML
            # Check for conditions that should only happen in the innerHTML
            # case
            if nodeName in ("select", "colgroup", "head", "html"):
                assert self.innerHTML

            if not last and node.namespace != self.tree.defaultNamespace:
                continue

            if nodeName in newModes:
                new_phase = self.phases[newModes[nodeName]]
                break
            elif last:
                new_phase = self.phases["inBody"]
                break

        self.phase = new_phase

    def parseRCDataRawtext(self, token, contentType):
        """Generic RCDATA/RAWTEXT Parsing algorithm
        contentType - RCDATA or RAWTEXT
        """
        assert contentType in ("RAWTEXT", "RCDATA")

        self.tree.insertElement(token)

        if contentType == "RAWTEXT":
            self.tokenizer.state = self.tokenizer.rawtextState
        else:
            self.tokenizer.state = self.tokenizer.rcdataState

        self.originalPhase = self.phase

        self.phase = self.phases["text"]


@_utils.memoize
def getPhases(debug):
    def log(function):
        """Logger that records which phase processes each token"""
        type_names = dict((value, key) for key, value in
                          tokenTypes.items())

        def wrapped(self, *args, **kwargs):
            if function.__name__.startswith("process") and len(args) > 0:
                token = args[0]
                try:
                    info = {"type": type_names[token['type']]}
                except:
                    raise
                if token['type'] in tagTokenTypes:
                    info["name"] = token['name']

                self.parser.log.append((self.parser.tokenizer.state.__name__,
                                        self.parser.phase.__class__.__name__,
                                        self.__class__.__name__,
                                        function.__name__,
                                        info))
                return function(self, *args, **kwargs)
            else:
                return function(self, *args, **kwargs)
        return wrapped

    def getMetaclass(use_metaclass, metaclass_func):
        if use_metaclass:
            return method_decorator_metaclass(metaclass_func)
        else:
            return type

    # pylint:disable=unused-argument
    class Phase(with_metaclass(getMetaclass(debug, log))):
        """Base class for helper object that implements each phase of processing
        """

        def __init__(self, parser, tree):
            self.parser = parser
            self.tree = tree

        def processEOF(self):
            raise NotImplementedError

        def processComment(self, token):
            # For most phases the following is correct. Where it's not it will be
            # overridden.
            self.tree.insertComment(token, self.tree.openElements[-1])

        def processDoctype(self, token):
            self.parser.parseError("unexpected-doctype")

        def processCharacters(self, token):
            self.tree.insertText(token["data"])

        def processSpaceCharacters(self, token):
            self.tree.insertText(token["data"])

        def processStartTag(self, token):
            return self.startTagHandler[token["name"]](token)

        def startTagHtml(self, token):
            if not self.parser.firstStartTag and token["name"] == "html":
                self.parser.parseError("non-html-root")
            # XXX Need a check here to see if the first start tag token emitted is
            # this token... If it's not, invoke self.parser.parseError().
            for attr, value in token["data"].items():
                if attr not in self.tree.openElements[0].attributes:
                    self.tree.openElements[0].attributes[attr] = value
            self.parser.firstStartTag = False

        def processEndTag(self, token):
            return self.endTagHandler[token["name"]](token)

    class InitialPhase(Phase):
        def processSpaceCharacters(self, token):
            pass

        def processComment(self, token):
            self.tree.insertComment(token, self.tree.document)

        def processDoctype(self, token):
            name = token["name"]
            publicId = token["publicId"]
            systemId = token["systemId"]
            correct = token["correct"]

            if (name != "html" or publicId is not None or
                    systemId is not None and systemId != "about:legacy-compat"):
                self.parser.parseError("unknown-doctype")

            if publicId is None:
                publicId = ""

            self.tree.insertDoctype(token)

            if publicId != "":
                publicId = publicId.translate(asciiUpper2Lower)

            if (not correct or token["name"] != "html" or
                    publicId.startswith(
                        ("+//silmaril//dtd html pro v0r11 19970101//",
                         "-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
                         "-//as//dtd html 3.0 aswedit + extensions//",
                         "-//ietf//dtd html 2.0 level 1//",
                         "-//ietf//dtd html 2.0 level 2//",
                         "-//ietf//dtd html 2.0 strict level 1//",
                         "-//ietf//dtd html 2.0 strict level 2//",
                         "-//ietf//dtd html 2.0 strict//",
                         "-//ietf//dtd html 2.0//",
                         "-//ietf//dtd html 2.1e//",
                         "-//ietf//dtd html 3.0//",
                         "-//ietf//dtd html 3.2 final//",
                         "-//ietf//dtd html 3.2//",
                         "-//ietf//dtd html 3//",
                         "-//ietf//dtd html level 0//",
                         "-//ietf//dtd html level 1//",
                         "-//ietf//dtd html level 2//",
                         "-//ietf//dtd html level 3//",
                         "-//ietf//dtd html strict level 0//",
                         "-//ietf//dtd html strict level 1//",
                         "-//ietf//dtd html strict level 2//",
                         "-//ietf//dtd html strict level 3//",
                         "-//ietf//dtd html strict//",
                         "-//ietf//dtd html//",
                         "-//metrius//dtd metrius presentational//",
                         "-//microsoft//dtd internet explorer 2.0 html strict//",
                         "-//microsoft//dtd internet explorer 2.0 html//",
                         "-//microsoft//dtd internet explorer 2.0 tables//",
                         "-//microsoft//dtd internet explorer 3.0 html strict//",
                         "-//microsoft//dtd internet explorer 3.0 html//",
                         "-//microsoft//dtd internet explorer 3.0 tables//",
                         "-//netscape comm. corp.//dtd html//",
                         "-//netscape comm. corp.//dtd strict html//",
                         "-//o'reilly and associates//dtd html 2.0//",
                         "-//o'reilly and associates//dtd html extended 1.0//",
                         "-//o'reilly and associates//dtd html extended relaxed 1.0//",
                         "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//",
                         "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//",
                         "-//spyglass//dtd html 2.0 extended//",
                         "-//sq//dtd html 2.0 hotmetal + extensions//",
                         "-//sun microsystems corp.//dtd hotjava html//",
                         "-//sun microsystems corp.//dtd hotjava strict html//",
                         "-//w3c//dtd html 3 1995-03-24//",
                         "-//w3c//dtd html 3.2 draft//",
                         "-//w3c//dtd html 3.2 final//",
                         "-//w3c//dtd html 3.2//",
                         "-//w3c//dtd html 3.2s draft//",
                         "-//w3c//dtd html 4.0 frameset//",
                         "-//w3c//dtd html 4.0 transitional//",
                         "-//w3c//dtd html experimental 19960712//",
                         "-//w3c//dtd html experimental 970421//",
                         "-//w3c//dtd w3 html//",
                         "-//w3o//dtd w3 html 3.0//",
                         "-//webtechs//dtd mozilla html 2.0//",
                         "-//webtechs//dtd mozilla html//")) or
                    publicId in ("-//w3o//dtd w3 html strict 3.0//en//",
                                 "-/w3c/dtd html 4.0 transitional/en",
                                 "html") or
                    publicId.startswith(
                        ("-//w3c//dtd html 4.01 frameset//",
                         "-//w3c//dtd html 4.01 transitional//")) and
                    systemId is None or
                    systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"):
                self.parser.compatMode = "quirks"
            elif (publicId.startswith(
                    ("-//w3c//dtd xhtml 1.0 frameset//",
                     "-//w3c//dtd xhtml 1.0 transitional//")) or
                  publicId.startswith(
                      ("-//w3c//dtd html 4.01 frameset//",
                       "-//w3c//dtd html 4.01 transitional//")) and
                  systemId is not None):
                self.parser.compatMode = "limited quirks"

            self.parser.phase = self.parser.phases["beforeHtml"]

        def anythingElse(self):
            self.parser.compatMode = "quirks"
            self.parser.phase = self.parser.phases["beforeHtml"]

        def processCharacters(self, token):
            self.parser.parseError("expected-doctype-but-got-chars")
            self.anythingElse()
            return token

        def processStartTag(self, token):
            self.parser.parseError("expected-doctype-but-got-start-tag",
                                   {"name": token["name"]})
            self.anythingElse()
            return token

        def processEndTag(self, token):
            self.parser.parseError("expected-doctype-but-got-end-tag",
                                   {"name": token["name"]})
            self.anythingElse()
            return token

        def processEOF(self):
            self.parser.parseError("expected-doctype-but-got-eof")
            self.anythingElse()
            return True

    class BeforeHtmlPhase(Phase):
        # helper methods
        def insertHtmlElement(self):
            self.tree.insertRoot(impliedTagToken("html", "StartTag"))
            self.parser.phase = self.parser.phases["beforeHead"]

        # other
        def processEOF(self):
            self.insertHtmlElement()
            return True

        def processComment(self, token):
            self.tree.insertComment(token, self.tree.document)

        def processSpaceCharacters(self, token):
            pass

        def processCharacters(self, token):
            self.insertHtmlElement()
            return token

        def processStartTag(self, token):
            if token["name"] == "html":
                self.parser.firstStartTag = True
            self.insertHtmlElement()
            return token

        def processEndTag(self, token):
            if token["name"] not in ("head", "body", "html", "br"):
                self.parser.parseError("unexpected-end-tag-before-html",
                                       {"name": token["name"]})
            else:
                self.insertHtmlElement()
                return token

    class BeforeHeadPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("head", self.startTagHead)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                (("head", "body", "html", "br"), self.endTagImplyHead)
            ])
            self.endTagHandler.default = self.endTagOther

        def processEOF(self):
            self.startTagHead(impliedTagToken("head", "StartTag"))
            return True

        def processSpaceCharacters(self, token):
            pass

        def processCharacters(self, token):
            self.startTagHead(impliedTagToken("head", "StartTag"))
            return token

        def startTagHtml(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagHead(self, token):
            self.tree.insertElement(token)
            self.tree.headPointer = self.tree.openElements[-1]
            self.parser.phase = self.parser.phases["inHead"]

        def startTagOther(self, token):
            self.startTagHead(impliedTagToken("head", "StartTag"))
            return token

        def endTagImplyHead(self, token):
            self.startTagHead(impliedTagToken("head", "StartTag"))
            return token

        def endTagOther(self, token):
            self.parser.parseError("end-tag-after-implied-root",
                                   {"name": token["name"]})

    class InHeadPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("title", self.startTagTitle),
                (("noframes", "style"), self.startTagNoFramesStyle),
                ("noscript", self.startTagNoscript),
                ("script", self.startTagScript),
                (("base", "basefont", "bgsound", "command", "link"),
                 self.startTagBaseLinkCommand),
                ("meta", self.startTagMeta),
                ("head", self.startTagHead)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("head", self.endTagHead),
                (("br", "html", "body"), self.endTagHtmlBodyBr)
            ])
            self.endTagHandler.default = self.endTagOther

        # the real thing
        def processEOF(self):
            self.anythingElse()
            return True

        def processCharacters(self, token):
            self.anythingElse()
            return token

        def startTagHtml(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagHead(self, token):
            self.parser.parseError("two-heads-are-not-better-than-one")

        def startTagBaseLinkCommand(self, token):
            self.tree.insertElement(token)
            self.tree.openElements.pop()
            token["selfClosingAcknowledged"] = True

        def startTagMeta(self, token):
            self.tree.insertElement(token)
            self.tree.openElements.pop()
            token["selfClosingAcknowledged"] = True

            attributes = token["data"]
            if self.parser.tokenizer.stream.charEncoding[1] == "tentative":
                if "charset" in attributes:
                    self.parser.tokenizer.stream.changeEncoding(attributes["charset"])
                elif ("content" in attributes and
                      "http-equiv" in attributes and
                      attributes["http-equiv"].lower() == "content-type"):
                    # Encoding it as UTF-8 here is a hack, as really we should pass
                    # the abstract Unicode string, and just use the
                    # ContentAttrParser on that, but using UTF-8 allows all chars
                    # to be encoded and as a ASCII-superset works.
                    data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8"))
                    parser = _inputstream.ContentAttrParser(data)
                    codec = parser.parse()
                    self.parser.tokenizer.stream.changeEncoding(codec)

        def startTagTitle(self, token):
            self.parser.parseRCDataRawtext(token, "RCDATA")

        def startTagNoFramesStyle(self, token):
            # Need to decide whether to implement the scripting-disabled case
            self.parser.parseRCDataRawtext(token, "RAWTEXT")

        def startTagNoscript(self, token):
            if self.parser.scripting:
                self.parser.parseRCDataRawtext(token, "RAWTEXT")
            else:
                self.tree.insertElement(token)
                self.parser.phase = self.parser.phases["inHeadNoscript"]

        def startTagScript(self, token):
            self.tree.insertElement(token)
            self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState
            self.parser.originalPhase = self.parser.phase
            self.parser.phase = self.parser.phases["text"]

        def startTagOther(self, token):
            self.anythingElse()
            return token

        def endTagHead(self, token):
            node = self.parser.tree.openElements.pop()
            assert node.name == "head", "Expected head got %s" % node.name
            self.parser.phase = self.parser.phases["afterHead"]

        def endTagHtmlBodyBr(self, token):
            self.anythingElse()
            return token

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

        def anythingElse(self):
            self.endTagHead(impliedTagToken("head"))

    class InHeadNoscriptPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand),
                (("head", "noscript"), self.startTagHeadNoscript),
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("noscript", self.endTagNoscript),
                ("br", self.endTagBr),
            ])
            self.endTagHandler.default = self.endTagOther

        def processEOF(self):
            self.parser.parseError("eof-in-head-noscript")
            self.anythingElse()
            return True

        def processComment(self, token):
            return self.parser.phases["inHead"].processComment(token)

        def processCharacters(self, token):
            self.parser.parseError("char-in-head-noscript")
            self.anythingElse()
            return token

        def processSpaceCharacters(self, token):
            return self.parser.phases["inHead"].processSpaceCharacters(token)

        def startTagHtml(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagBaseLinkCommand(self, token):
            return self.parser.phases["inHead"].processStartTag(token)

        def startTagHeadNoscript(self, token):
            self.parser.parseError("unexpected-start-tag", {"name": token["name"]})

        def startTagOther(self, token):
            self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
            self.anythingElse()
            return token

        def endTagNoscript(self, token):
            node = self.parser.tree.openElements.pop()
            assert node.name == "noscript", "Expected noscript got %s" % node.name
            self.parser.phase = self.parser.phases["inHead"]

        def endTagBr(self, token):
            self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
            self.anythingElse()
            return token

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

        def anythingElse(self):
            # Caller must raise parse error first!
            self.endTagNoscript(impliedTagToken("noscript"))

    class AfterHeadPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("body", self.startTagBody),
                ("frameset", self.startTagFrameset),
                (("base", "basefont", "bgsound", "link", "meta", "noframes", "script",
                  "style", "title"),
                 self.startTagFromHead),
                ("head", self.startTagHead)
            ])
            self.startTagHandler.default = self.startTagOther
            self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"),
                                                           self.endTagHtmlBodyBr)])
            self.endTagHandler.default = self.endTagOther

        def processEOF(self):
            self.anythingElse()
            return True

        def processCharacters(self, token):
            self.anythingElse()
            return token

        def startTagHtml(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagBody(self, token):
            self.parser.framesetOK = False
            self.tree.insertElement(token)
            self.parser.phase = self.parser.phases["inBody"]

        def startTagFrameset(self, token):
            self.tree.insertElement(token)
            self.parser.phase = self.parser.phases["inFrameset"]

        def startTagFromHead(self, token):
            self.parser.parseError("unexpected-start-tag-out-of-my-head",
                                   {"name": token["name"]})
            self.tree.openElements.append(self.tree.headPointer)
            self.parser.phases["inHead"].processStartTag(token)
            for node in self.tree.openElements[::-1]:
                if node.name == "head":
                    self.tree.openElements.remove(node)
                    break

        def startTagHead(self, token):
            self.parser.parseError("unexpected-start-tag", {"name": token["name"]})

        def startTagOther(self, token):
            self.anythingElse()
            return token

        def endTagHtmlBodyBr(self, token):
            self.anythingElse()
            return token

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

        def anythingElse(self):
            self.tree.insertElement(impliedTagToken("body", "StartTag"))
            self.parser.phase = self.parser.phases["inBody"]
            self.parser.framesetOK = True

    class InBodyPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody
        # the really-really-really-very crazy mode
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            # Set this to the default handler
            self.processSpaceCharacters = self.processSpaceCharactersNonPre

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                (("base", "basefont", "bgsound", "command", "link", "meta",
                  "script", "style", "title"),
                 self.startTagProcessInHead),
                ("body", self.startTagBody),
                ("frameset", self.startTagFrameset),
                (("address", "article", "aside", "blockquote", "center", "details",
                  "dir", "div", "dl", "fieldset", "figcaption", "figure",
                  "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p",
                  "section", "summary", "ul"),
                 self.startTagCloseP),
                (headingElements, self.startTagHeading),
                (("pre", "listing"), self.startTagPreListing),
                ("form", self.startTagForm),
                (("li", "dd", "dt"), self.startTagListItem),
                ("plaintext", self.startTagPlaintext),
                ("a", self.startTagA),
                (("b", "big", "code", "em", "font", "i", "s", "small", "strike",
                  "strong", "tt", "u"), self.startTagFormatting),
                ("nobr", self.startTagNobr),
                ("button", self.startTagButton),
                (("applet", "marquee", "object"), self.startTagAppletMarqueeObject),
                ("xmp", self.startTagXmp),
                ("table", self.startTagTable),
                (("area", "br", "embed", "img", "keygen", "wbr"),
                 self.startTagVoidFormatting),
                (("param", "source", "track"), self.startTagParamSource),
                ("input", self.startTagInput),
                ("hr", self.startTagHr),
                ("image", self.startTagImage),
                ("isindex", self.startTagIsIndex),
                ("textarea", self.startTagTextarea),
                ("iframe", self.startTagIFrame),
                ("noscript", self.startTagNoscript),
                (("noembed", "noframes"), self.startTagRawtext),
                ("select", self.startTagSelect),
                (("rp", "rt"), self.startTagRpRt),
                (("option", "optgroup"), self.startTagOpt),
                (("math"), self.startTagMath),
                (("svg"), self.startTagSvg),
                (("caption", "col", "colgroup", "frame", "head",
                  "tbody", "td", "tfoot", "th", "thead",
                  "tr"), self.startTagMisplaced)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("body", self.endTagBody),
                ("html", self.endTagHtml),
                (("address", "article", "aside", "blockquote", "button", "center",
                  "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure",
                  "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre",
                  "section", "summary", "ul"), self.endTagBlock),
                ("form", self.endTagForm),
                ("p", self.endTagP),
                (("dd", "dt", "li"), self.endTagListItem),
                (headingElements, self.endTagHeading),
                (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small",
                  "strike", "strong", "tt", "u"), self.endTagFormatting),
                (("applet", "marquee", "object"), self.endTagAppletMarqueeObject),
                ("br", self.endTagBr),
            ])
            self.endTagHandler.default = self.endTagOther

        def isMatchingFormattingElement(self, node1, node2):
            return (node1.name == node2.name and
                    node1.namespace == node2.namespace and
                    node1.attributes == node2.attributes)

        # helper
        def addFormattingElement(self, token):
            self.tree.insertElement(token)
            element = self.tree.openElements[-1]

            matchingElements = []
            for node in self.tree.activeFormattingElements[::-1]:
                if node is Marker:
                    break
                elif self.isMatchingFormattingElement(node, element):
                    matchingElements.append(node)

            assert len(matchingElements) <= 3
            if len(matchingElements) == 3:
                self.tree.activeFormattingElements.remove(matchingElements[-1])
            self.tree.activeFormattingElements.append(element)

        # the real deal
        def processEOF(self):
            allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td",
                                          "tfoot", "th", "thead", "tr", "body",
                                          "html"))
            for node in self.tree.openElements[::-1]:
                if node.name not in allowed_elements:
                    self.parser.parseError("expected-closing-tag-but-got-eof")
                    break
            # Stop parsing

        def processSpaceCharactersDropNewline(self, token):
            # Sometimes (start of <pre>, <listing>, and <textarea> blocks) we
            # want to drop leading newlines
            data = token["data"]
            self.processSpaceCharacters = self.processSpaceCharactersNonPre
            if (data.startswith("\n") and
                self.tree.openElements[-1].name in ("pre", "listing", "textarea") and
                    not self.tree.openElements[-1].hasContent()):
                data = data[1:]
            if data:
                self.tree.reconstructActiveFormattingElements()
                self.tree.insertText(data)

        def processCharacters(self, token):
            if token["data"] == "\u0000":
                # The tokenizer should always emit null on its own
                return
            self.tree.reconstructActiveFormattingElements()
            self.tree.insertText(token["data"])
            # This must be bad for performance
            if (self.parser.framesetOK and
                any([char not in spaceCharacters
                     for char in token["data"]])):
                self.parser.framesetOK = False

        def processSpaceCharactersNonPre(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.tree.insertText(token["data"])

        def startTagProcessInHead(self, token):
            return self.parser.phases["inHead"].processStartTag(token)

        def startTagBody(self, token):
            self.parser.parseError("unexpected-start-tag", {"name": "body"})
            if (len(self.tree.openElements) == 1 or
                    self.tree.openElements[1].name != "body"):
                assert self.parser.innerHTML
            else:
                self.parser.framesetOK = False
                for attr, value in token["data"].items():
                    if attr not in self.tree.openElements[1].attributes:
                        self.tree.openElements[1].attributes[attr] = value

        def startTagFrameset(self, token):
            self.parser.parseError("unexpected-start-tag", {"name": "frameset"})
            if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"):
                assert self.parser.innerHTML
            elif not self.parser.framesetOK:
                pass
            else:
                if self.tree.openElements[1].parent:
                    self.tree.openElements[1].parent.removeChild(self.tree.openElements[1])
                while self.tree.openElements[-1].name != "html":
                    self.tree.openElements.pop()
                self.tree.insertElement(token)
                self.parser.phase = self.parser.phases["inFrameset"]

        def startTagCloseP(self, token):
            if self.tree.elementInScope("p", variant="button"):
                self.endTagP(impliedTagToken("p"))
            self.tree.insertElement(token)

        def startTagPreListing(self, token):
            if self.tree.elementInScope("p", variant="button"):
                self.endTagP(impliedTagToken("p"))
            self.tree.insertElement(token)
            self.parser.framesetOK = False
            self.processSpaceCharacters = self.processSpaceCharactersDropNewline

        def startTagForm(self, token):
            if self.tree.formPointer:
                self.parser.parseError("unexpected-start-tag", {"name": "form"})
            else:
                if self.tree.elementInScope("p", variant="button"):
                    self.endTagP(impliedTagToken("p"))
                self.tree.insertElement(token)
                self.tree.formPointer = self.tree.openElements[-1]

        def startTagListItem(self, token):
            self.parser.framesetOK = False

            stopNamesMap = {"li": ["li"],
                            "dt": ["dt", "dd"],
                            "dd": ["dt", "dd"]}
            stopNames = stopNamesMap[token["name"]]
            for node in reversed(self.tree.openElements):
                if node.name in stopNames:
                    self.parser.phase.processEndTag(
                        impliedTagToken(node.name, "EndTag"))
                    break
                if (node.nameTuple in specialElements and
                        node.name not in ("address", "div", "p")):
                    break

            if self.tree.elementInScope("p", variant="button"):
                self.parser.phase.processEndTag(
                    impliedTagToken("p", "EndTag"))

            self.tree.insertElement(token)

        def startTagPlaintext(self, token):
            if self.tree.elementInScope("p", variant="button"):
                self.endTagP(impliedTagToken("p"))
            self.tree.insertElement(token)
            self.parser.tokenizer.state = self.parser.tokenizer.plaintextState

        def startTagHeading(self, token):
            if self.tree.elementInScope("p", variant="button"):
                self.endTagP(impliedTagToken("p"))
            if self.tree.openElements[-1].name in headingElements:
                self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
                self.tree.openElements.pop()
            self.tree.insertElement(token)

        def startTagA(self, token):
            afeAElement = self.tree.elementInActiveFormattingElements("a")
            if afeAElement:
                self.parser.parseError("unexpected-start-tag-implies-end-tag",
                                       {"startName": "a", "endName": "a"})
                self.endTagFormatting(impliedTagToken("a"))
                if afeAElement in self.tree.openElements:
                    self.tree.openElements.remove(afeAElement)
                if afeAElement in self.tree.activeFormattingElements:
                    self.tree.activeFormattingElements.remove(afeAElement)
            self.tree.reconstructActiveFormattingElements()
            self.addFormattingElement(token)

        def startTagFormatting(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.addFormattingElement(token)

        def startTagNobr(self, token):
            self.tree.reconstructActiveFormattingElements()
            if self.tree.elementInScope("nobr"):
                self.parser.parseError("unexpected-start-tag-implies-end-tag",
                                       {"startName": "nobr", "endName": "nobr"})
                self.processEndTag(impliedTagToken("nobr"))
                # XXX Need tests that trigger the following
                self.tree.reconstructActiveFormattingElements()
            self.addFormattingElement(token)

        def startTagButton(self, token):
            if self.tree.elementInScope("button"):
                self.parser.parseError("unexpected-start-tag-implies-end-tag",
                                       {"startName": "button", "endName": "button"})
                self.processEndTag(impliedTagToken("button"))
                return token
            else:
                self.tree.reconstructActiveFormattingElements()
                self.tree.insertElement(token)
                self.parser.framesetOK = False

        def startTagAppletMarqueeObject(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.tree.insertElement(token)
            self.tree.activeFormattingElements.append(Marker)
            self.parser.framesetOK = False

        def startTagXmp(self, token):
            if self.tree.elementInScope("p", variant="button"):
                self.endTagP(impliedTagToken("p"))
            self.tree.reconstructActiveFormattingElements()
            self.parser.framesetOK = False
            self.parser.parseRCDataRawtext(token, "RAWTEXT")

        def startTagTable(self, token):
            if self.parser.compatMode != "quirks":
                if self.tree.elementInScope("p", variant="button"):
                    self.processEndTag(impliedTagToken("p"))
            self.tree.insertElement(token)
            self.parser.framesetOK = False
            self.parser.phase = self.parser.phases["inTable"]

        def startTagVoidFormatting(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.tree.insertElement(token)
            self.tree.openElements.pop()
            token["selfClosingAcknowledged"] = True
            self.parser.framesetOK = False

        def startTagInput(self, token):
            framesetOK = self.parser.framesetOK
            self.startTagVoidFormatting(token)
            if ("type" in token["data"] and
                    token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
                # input type=hidden doesn't change framesetOK
                self.parser.framesetOK = framesetOK

        def startTagParamSource(self, token):
            self.tree.insertElement(token)
            self.tree.openElements.pop()
            token["selfClosingAcknowledged"] = True

        def startTagHr(self, token):
            if self.tree.elementInScope("p", variant="button"):
                self.endTagP(impliedTagToken("p"))
            self.tree.insertElement(token)
            self.tree.openElements.pop()
            token["selfClosingAcknowledged"] = True
            self.parser.framesetOK = False

        def startTagImage(self, token):
            # No really...
            self.parser.parseError("unexpected-start-tag-treated-as",
                                   {"originalName": "image", "newName": "img"})
            self.processStartTag(impliedTagToken("img", "StartTag",
                                                 attributes=token["data"],
                                                 selfClosing=token["selfClosing"]))

        def startTagIsIndex(self, token):
            self.parser.parseError("deprecated-tag", {"name": "isindex"})
            if self.tree.formPointer:
                return
            form_attrs = {}
            if "action" in token["data"]:
                form_attrs["action"] = token["data"]["action"]
            self.processStartTag(impliedTagToken("form", "StartTag",
                                                 attributes=form_attrs))
            self.processStartTag(impliedTagToken("hr", "StartTag"))
            self.processStartTag(impliedTagToken("label", "StartTag"))
            # XXX Localization ...
            if "prompt" in token["data"]:
                prompt = token["data"]["prompt"]
            else:
                prompt = "This is a searchable index. Enter search keywords: "
            self.processCharacters(
                {"type": tokenTypes["Characters"], "data": prompt})
            attributes = token["data"].copy()
            if "action" in attributes:
                del attributes["action"]
            if "prompt" in attributes:
                del attributes["prompt"]
            attributes["name"] = "isindex"
            self.processStartTag(impliedTagToken("input", "StartTag",
                                                 attributes=attributes,
                                                 selfClosing=token["selfClosing"]))
            self.processEndTag(impliedTagToken("label"))
            self.processStartTag(impliedTagToken("hr", "StartTag"))
            self.processEndTag(impliedTagToken("form"))

        def startTagTextarea(self, token):
            self.tree.insertElement(token)
            self.parser.tokenizer.state = self.parser.tokenizer.rcdataState
            self.processSpaceCharacters = self.processSpaceCharactersDropNewline
            self.parser.framesetOK = False

        def startTagIFrame(self, token):
            self.parser.framesetOK = False
            self.startTagRawtext(token)

        def startTagNoscript(self, token):
            if self.parser.scripting:
                self.startTagRawtext(token)
            else:
                self.startTagOther(token)

        def startTagRawtext(self, token):
            """iframe, noembed noframes, noscript(if scripting enabled)"""
            self.parser.parseRCDataRawtext(token, "RAWTEXT")

        def startTagOpt(self, token):
            if self.tree.openElements[-1].name == "option":
                self.parser.phase.processEndTag(impliedTagToken("option"))
            self.tree.reconstructActiveFormattingElements()
            self.parser.tree.insertElement(token)

        def startTagSelect(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.tree.insertElement(token)
            self.parser.framesetOK = False
            if self.parser.phase in (self.parser.phases["inTable"],
                                     self.parser.phases["inCaption"],
                                     self.parser.phases["inColumnGroup"],
                                     self.parser.phases["inTableBody"],
                                     self.parser.phases["inRow"],
                                     self.parser.phases["inCell"]):
                self.parser.phase = self.parser.phases["inSelectInTable"]
            else:
                self.parser.phase = self.parser.phases["inSelect"]

        def startTagRpRt(self, token):
            if self.tree.elementInScope("ruby"):
                self.tree.generateImpliedEndTags()
                if self.tree.openElements[-1].name != "ruby":
                    self.parser.parseError()
            self.tree.insertElement(token)

        def startTagMath(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.parser.adjustMathMLAttributes(token)
            self.parser.adjustForeignAttributes(token)
            token["namespace"] = namespaces["mathml"]
            self.tree.insertElement(token)
            # Need to get the parse error right for the case where the token
            # has a namespace not equal to the xmlns attribute
            if token["selfClosing"]:
                self.tree.openElements.pop()
                token["selfClosingAcknowledged"] = True

        def startTagSvg(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.parser.adjustSVGAttributes(token)
            self.parser.adjustForeignAttributes(token)
            token["namespace"] = namespaces["svg"]
            self.tree.insertElement(token)
            # Need to get the parse error right for the case where the token
            # has a namespace not equal to the xmlns attribute
            if token["selfClosing"]:
                self.tree.openElements.pop()
                token["selfClosingAcknowledged"] = True

        def startTagMisplaced(self, token):
            """ Elements that should be children of other elements that have a
            different insertion mode; here they are ignored
            "caption", "col", "colgroup", "frame", "frameset", "head",
            "option", "optgroup", "tbody", "td", "tfoot", "th", "thead",
            "tr", "noscript"
            """
            self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]})

        def startTagOther(self, token):
            self.tree.reconstructActiveFormattingElements()
            self.tree.insertElement(token)

        def endTagP(self, token):
            if not self.tree.elementInScope("p", variant="button"):
                self.startTagCloseP(impliedTagToken("p", "StartTag"))
                self.parser.parseError("unexpected-end-tag", {"name": "p"})
                self.endTagP(impliedTagToken("p", "EndTag"))
            else:
                self.tree.generateImpliedEndTags("p")
                if self.tree.openElements[-1].name != "p":
                    self.parser.parseError("unexpected-end-tag", {"name": "p"})
                node = self.tree.openElements.pop()
                while node.name != "p":
                    node = self.tree.openElements.pop()

        def endTagBody(self, token):
            if not self.tree.elementInScope("body"):
                self.parser.parseError()
                return
            elif self.tree.openElements[-1].name != "body":
                for node in self.tree.openElements[2:]:
                    if node.name not in frozenset(("dd", "dt", "li", "optgroup",
                                                   "option", "p", "rp", "rt",
                                                   "tbody", "td", "tfoot",
                                                   "th", "thead", "tr", "body",
                                                   "html")):
                        # Not sure this is the correct name for the parse error
                        self.parser.parseError(
                            "expected-one-end-tag-but-got-another",
                            {"gotName": "body", "expectedName": node.name})
                        break
            self.parser.phase = self.parser.phases["afterBody"]

        def endTagHtml(self, token):
            # We repeat the test for the body end tag token being ignored here
            if self.tree.elementInScope("body"):
                self.endTagBody(impliedTagToken("body"))
                return token

        def endTagBlock(self, token):
            # Put us back in the right whitespace handling mode
            if token["name"] == "pre":
                self.processSpaceCharacters = self.processSpaceCharactersNonPre
            inScope = self.tree.elementInScope(token["name"])
            if inScope:
                self.tree.generateImpliedEndTags()
            if self.tree.openElements[-1].name != token["name"]:
                self.parser.parseError("end-tag-too-early", {"name": token["name"]})
            if inScope:
                node = self.tree.openElements.pop()
                while node.name != token["name"]:
                    node = self.tree.openElements.pop()

        def endTagForm(self, token):
            node = self.tree.formPointer
            self.tree.formPointer = None
            if node is None or not self.tree.elementInScope(node):
                self.parser.parseError("unexpected-end-tag",
                                       {"name": "form"})
            else:
                self.tree.generateImpliedEndTags()
                if self.tree.openElements[-1] != node:
                    self.parser.parseError("end-tag-too-early-ignored",
                                           {"name": "form"})
                self.tree.openElements.remove(node)

        def endTagListItem(self, token):
            if token["name"] == "li":
                variant = "list"
            else:
                variant = None
            if not self.tree.elementInScope(token["name"], variant=variant):
                self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
            else:
                self.tree.generateImpliedEndTags(exclude=token["name"])
                if self.tree.openElements[-1].name != token["name"]:
                    self.parser.parseError(
                        "end-tag-too-early",
                        {"name": token["name"]})
                node = self.tree.openElements.pop()
                while node.name != token["name"]:
                    node = self.tree.openElements.pop()

        def endTagHeading(self, token):
            for item in headingElements:
                if self.tree.elementInScope(item):
                    self.tree.generateImpliedEndTags()
                    break
            if self.tree.openElements[-1].name != token["name"]:
                self.parser.parseError("end-tag-too-early", {"name": token["name"]})

            for item in headingElements:
                if self.tree.elementInScope(item):
                    item = self.tree.openElements.pop()
                    while item.name not in headingElements:
                        item = self.tree.openElements.pop()
                    break

        def endTagFormatting(self, token):
            """The much-feared adoption agency algorithm"""
            # http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867
            # XXX Better parseError messages appreciated.

            # Step 1
            outerLoopCounter = 0

            # Step 2
            while outerLoopCounter < 8:

                # Step 3
                outerLoopCounter += 1

                # Step 4:

                # Let the formatting element be the last element in
                # the list of active formatting elements that:
                # - is between the end of the list and the last scope
                # marker in the list, if any, or the start of the list
                # otherwise, and
                # - has the same tag name as the token.
                formattingElement = self.tree.elementInActiveFormattingElements(
                    token["name"])
                if (not formattingElement or
                    (formattingElement in self.tree.openElements and
                     not self.tree.elementInScope(formattingElement.name))):
                    # If there is no such node, then abort these steps
                    # and instead act as described in the "any other
                    # end tag" entry below.
                    self.endTagOther(token)
                    return

                # Otherwise, if there is such a node, but that node is
                # not in the stack of open elements, then this is a
                # parse error; remove the element from the list, and
                # abort these steps.
                elif formattingElement not in self.tree.openElements:
                    self.parser.parseError("adoption-agency-1.2", {"name": token["name"]})
                    self.tree.activeFormattingElements.remove(formattingElement)
                    return

                # Otherwise, if there is such a node, and that node is
                # also in the stack of open elements, but the element
                # is not in scope, then this is a parse error; ignore
                # the token, and abort these steps.
                elif not self.tree.elementInScope(formattingElement.name):
                    self.parser.parseError("adoption-agency-4.4", {"name": token["name"]})
                    return

                # Otherwise, there is a formatting element and that
                # element is in the stack and is in scope. If the
                # element is not the current node, this is a parse
                # error. In any case, proceed with the algorithm as
                # written in the following steps.
                else:
                    if formattingElement != self.tree.openElements[-1]:
                        self.parser.parseError("adoption-agency-1.3", {"name": token["name"]})

                # Step 5:

                # Let the furthest block be the topmost node in the
                # stack of open elements that is lower in the stack
                # than the formatting element, and is an element in
                # the special category. There might not be one.
                afeIndex = self.tree.openElements.index(formattingElement)
                furthestBlock = None
                for element in self.tree.openElements[afeIndex:]:
                    if element.nameTuple in specialElements:
                        furthestBlock = element
                        break

                # Step 6:

                # If there is no furthest block, then the UA must
                # first pop all the nodes from the bottom of the stack
                # of open elements, from the current node up to and
                # including the formatting element, then remove the
                # formatting element from the list of active
                # formatting elements, and finally abort these steps.
                if furthestBlock is None:
                    element = self.tree.openElements.pop()
                    while element != formattingElement:
                        element = self.tree.openElements.pop()
                    self.tree.activeFormattingElements.remove(element)
                    return

                # Step 7
                commonAncestor = self.tree.openElements[afeIndex - 1]

                # Step 8:
                # The bookmark is supposed to help us identify where to reinsert
                # nodes in step 15. We have to ensure that we reinsert nodes after
                # the node before the active formatting element. Note the bookmark
                # can move in step 9.7
                bookmark = self.tree.activeFormattingElements.index(formattingElement)

                # Step 9
                lastNode = node = furthestBlock
                innerLoopCounter = 0

                index = self.tree.openElements.index(node)
                while innerLoopCounter < 3:
                    innerLoopCounter += 1
                    # Node is element before node in open elements
                    index -= 1
                    node = self.tree.openElements[index]
                    if node not in self.tree.activeFormattingElements:
                        self.tree.openElements.remove(node)
                        continue
                    # Step 9.6
                    if node == formattingElement:
                        break
                    # Step 9.7
                    if lastNode == furthestBlock:
                        bookmark = self.tree.activeFormattingElements.index(node) + 1
                    # Step 9.8
                    clone = node.cloneNode()
                    # Replace node with clone
                    self.tree.activeFormattingElements[
                        self.tree.activeFormattingElements.index(node)] = clone
                    self.tree.openElements[
                        self.tree.openElements.index(node)] = clone
                    node = clone
                    # Step 9.9
                    # Remove lastNode from its parents, if any
                    if lastNode.parent:
                        lastNode.parent.removeChild(lastNode)
                    node.appendChild(lastNode)
                    # Step 9.10
                    lastNode = node

                # Step 10
                # Foster parent lastNode if commonAncestor is a
                # table, tbody, tfoot, thead, or tr we need to foster
                # parent the lastNode
                if lastNode.parent:
                    lastNode.parent.removeChild(lastNode)

                if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")):
                    parent, insertBefore = self.tree.getTableMisnestedNodePosition()
                    parent.insertBefore(lastNode, insertBefore)
                else:
                    commonAncestor.appendChild(lastNode)

                # Step 11
                clone = formattingElement.cloneNode()

                # Step 12
                furthestBlock.reparentChildren(clone)

                # Step 13
                furthestBlock.appendChild(clone)

                # Step 14
                self.tree.activeFormattingElements.remove(formattingElement)
                self.tree.activeFormattingElements.insert(bookmark, clone)

                # Step 15
                self.tree.openElements.remove(formattingElement)
                self.tree.openElements.insert(
                    self.tree.openElements.index(furthestBlock) + 1, clone)

        def endTagAppletMarqueeObject(self, token):
            if self.tree.elementInScope(token["name"]):
                self.tree.generateImpliedEndTags()
            if self.tree.openElements[-1].name != token["name"]:
                self.parser.parseError("end-tag-too-early", {"name": token["name"]})

            if self.tree.elementInScope(token["name"]):
                element = self.tree.openElements.pop()
                while element.name != token["name"]:
                    element = self.tree.openElements.pop()
                self.tree.clearActiveFormattingElements()

        def endTagBr(self, token):
            self.parser.parseError("unexpected-end-tag-treated-as",
                                   {"originalName": "br", "newName": "br element"})
            self.tree.reconstructActiveFormattingElements()
            self.tree.insertElement(impliedTagToken("br", "StartTag"))
            self.tree.openElements.pop()

        def endTagOther(self, token):
            for node in self.tree.openElements[::-1]:
                if node.name == token["name"]:
                    self.tree.generateImpliedEndTags(exclude=token["name"])
                    if self.tree.openElements[-1].name != token["name"]:
                        self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
                    while self.tree.openElements.pop() != node:
                        pass
                    break
                else:
                    if node.nameTuple in specialElements:
                        self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
                        break

    class TextPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)
            self.startTagHandler = _utils.MethodDispatcher([])
            self.startTagHandler.default = self.startTagOther
            self.endTagHandler = _utils.MethodDispatcher([
                ("script", self.endTagScript)])
            self.endTagHandler.default = self.endTagOther

        def processCharacters(self, token):
            self.tree.insertText(token["data"])

        def processEOF(self):
            self.parser.parseError("expected-named-closing-tag-but-got-eof",
                                   {"name": self.tree.openElements[-1].name})
            self.tree.openElements.pop()
            self.parser.phase = self.parser.originalPhase
            return True

        def startTagOther(self, token):
            assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name']

        def endTagScript(self, token):
            node = self.tree.openElements.pop()
            assert node.name == "script"
            self.parser.phase = self.parser.originalPhase
            # The rest of this method is all stuff that only happens if
            # document.write works

        def endTagOther(self, token):
            self.tree.openElements.pop()
            self.parser.phase = self.parser.originalPhase

    class InTablePhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#in-table
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)
            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("caption", self.startTagCaption),
                ("colgroup", self.startTagColgroup),
                ("col", self.startTagCol),
                (("tbody", "tfoot", "thead"), self.startTagRowGroup),
                (("td", "th", "tr"), self.startTagImplyTbody),
                ("table", self.startTagTable),
                (("style", "script"), self.startTagStyleScript),
                ("input", self.startTagInput),
                ("form", self.startTagForm)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("table", self.endTagTable),
                (("body", "caption", "col", "colgroup", "html", "tbody", "td",
                  "tfoot", "th", "thead", "tr"), self.endTagIgnore)
            ])
            self.endTagHandler.default = self.endTagOther

        # helper methods
        def clearStackToTableContext(self):
            # "clear the stack back to a table context"
            while self.tree.openElements[-1].name not in ("table", "html"):
                # self.parser.parseError("unexpected-implied-end-tag-in-table",
                #  {"name":  self.tree.openElements[-1].name})
                self.tree.openElements.pop()
            # When the current node is <html> it's an innerHTML case

        # processing methods
        def processEOF(self):
            if self.tree.openElements[-1].name != "html":
                self.parser.parseError("eof-in-table")
            else:
                assert self.parser.innerHTML
            # Stop parsing

        def processSpaceCharacters(self, token):
            originalPhase = self.parser.phase
            self.parser.phase = self.parser.phases["inTableText"]
            self.parser.phase.originalPhase = originalPhase
            self.parser.phase.processSpaceCharacters(token)

        def processCharacters(self, token):
            originalPhase = self.parser.phase
            self.parser.phase = self.parser.phases["inTableText"]
            self.parser.phase.originalPhase = originalPhase
            self.parser.phase.processCharacters(token)

        def insertText(self, token):
            # If we get here there must be at least one non-whitespace character
            # Do the table magic!
            self.tree.insertFromTable = True
            self.parser.phases["inBody"].processCharacters(token)
            self.tree.insertFromTable = False

        def startTagCaption(self, token):
            self.clearStackToTableContext()
            self.tree.activeFormattingElements.append(Marker)
            self.tree.insertElement(token)
            self.parser.phase = self.parser.phases["inCaption"]

        def startTagColgroup(self, token):
            self.clearStackToTableContext()
            self.tree.insertElement(token)
            self.parser.phase = self.parser.phases["inColumnGroup"]

        def startTagCol(self, token):
            self.startTagColgroup(impliedTagToken("colgroup", "StartTag"))
            return token

        def startTagRowGroup(self, token):
            self.clearStackToTableContext()
            self.tree.insertElement(token)
            self.parser.phase = self.parser.phases["inTableBody"]

        def startTagImplyTbody(self, token):
            self.startTagRowGroup(impliedTagToken("tbody", "StartTag"))
            return token

        def startTagTable(self, token):
            self.parser.parseError("unexpected-start-tag-implies-end-tag",
                                   {"startName": "table", "endName": "table"})
            self.parser.phase.processEndTag(impliedTagToken("table"))
            if not self.parser.innerHTML:
                return token

        def startTagStyleScript(self, token):
            return self.parser.phases["inHead"].processStartTag(token)

        def startTagInput(self, token):
            if ("type" in token["data"] and
                    token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
                self.parser.parseError("unexpected-hidden-input-in-table")
                self.tree.insertElement(token)
                # XXX associate with form
                self.tree.openElements.pop()
            else:
                self.startTagOther(token)

        def startTagForm(self, token):
            self.parser.parseError("unexpected-form-in-table")
            if self.tree.formPointer is None:
                self.tree.insertElement(token)
                self.tree.formPointer = self.tree.openElements[-1]
                self.tree.openElements.pop()

        def startTagOther(self, token):
            self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]})
            # Do the table magic!
            self.tree.insertFromTable = True
            self.parser.phases["inBody"].processStartTag(token)
            self.tree.insertFromTable = False

        def endTagTable(self, token):
            if self.tree.elementInScope("table", variant="table"):
                self.tree.generateImpliedEndTags()
                if self.tree.openElements[-1].name != "table":
                    self.parser.parseError("end-tag-too-early-named",
                                           {"gotName": "table",
                                            "expectedName": self.tree.openElements[-1].name})
                while self.tree.openElements[-1].name != "table":
                    self.tree.openElements.pop()
                self.tree.openElements.pop()
                self.parser.resetInsertionMode()
            else:
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()

        def endTagIgnore(self, token):
            self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]})
            # Do the table magic!
            self.tree.insertFromTable = True
            self.parser.phases["inBody"].processEndTag(token)
            self.tree.insertFromTable = False

    class InTableTextPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)
            self.originalPhase = None
            self.characterTokens = []

        def flushCharacters(self):
            data = "".join([item["data"] for item in self.characterTokens])
            if any([item not in spaceCharacters for item in data]):
                token = {"type": tokenTypes["Characters"], "data": data}
                self.parser.phases["inTable"].insertText(token)
            elif data:
                self.tree.insertText(data)
            self.characterTokens = []

        def processComment(self, token):
            self.flushCharacters()
            self.parser.phase = self.originalPhase
            return token

        def processEOF(self):
            self.flushCharacters()
            self.parser.phase = self.originalPhase
            return True

        def processCharacters(self, token):
            if token["data"] == "\u0000":
                return
            self.characterTokens.append(token)

        def processSpaceCharacters(self, token):
            # pretty sure we should never reach here
            self.characterTokens.append(token)
    #        assert False

        def processStartTag(self, token):
            self.flushCharacters()
            self.parser.phase = self.originalPhase
            return token

        def processEndTag(self, token):
            self.flushCharacters()
            self.parser.phase = self.originalPhase
            return token

    class InCaptionPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#in-caption
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
                  "thead", "tr"), self.startTagTableElement)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("caption", self.endTagCaption),
                ("table", self.endTagTable),
                (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th",
                  "thead", "tr"), self.endTagIgnore)
            ])
            self.endTagHandler.default = self.endTagOther

        def ignoreEndTagCaption(self):
            return not self.tree.elementInScope("caption", variant="table")

        def processEOF(self):
            self.parser.phases["inBody"].processEOF()

        def processCharacters(self, token):
            return self.parser.phases["inBody"].processCharacters(token)

        def startTagTableElement(self, token):
            self.parser.parseError()
            # XXX Have to duplicate logic here to find out if the tag is ignored
            ignoreEndTag = self.ignoreEndTagCaption()
            self.parser.phase.processEndTag(impliedTagToken("caption"))
            if not ignoreEndTag:
                return token

        def startTagOther(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def endTagCaption(self, token):
            if not self.ignoreEndTagCaption():
                # AT this code is quite similar to endTagTable in "InTable"
                self.tree.generateImpliedEndTags()
                if self.tree.openElements[-1].name != "caption":
                    self.parser.parseError("expected-one-end-tag-but-got-another",
                                           {"gotName": "caption",
                                            "expectedName": self.tree.openElements[-1].name})
                while self.tree.openElements[-1].name != "caption":
                    self.tree.openElements.pop()
                self.tree.openElements.pop()
                self.tree.clearActiveFormattingElements()
                self.parser.phase = self.parser.phases["inTable"]
            else:
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()

        def endTagTable(self, token):
            self.parser.parseError()
            ignoreEndTag = self.ignoreEndTagCaption()
            self.parser.phase.processEndTag(impliedTagToken("caption"))
            if not ignoreEndTag:
                return token

        def endTagIgnore(self, token):
            self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

        def endTagOther(self, token):
            return self.parser.phases["inBody"].processEndTag(token)

    class InColumnGroupPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#in-column

        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("col", self.startTagCol)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("colgroup", self.endTagColgroup),
                ("col", self.endTagCol)
            ])
            self.endTagHandler.default = self.endTagOther

        def ignoreEndTagColgroup(self):
            return self.tree.openElements[-1].name == "html"

        def processEOF(self):
            if self.tree.openElements[-1].name == "html":
                assert self.parser.innerHTML
                return
            else:
                ignoreEndTag = self.ignoreEndTagColgroup()
                self.endTagColgroup(impliedTagToken("colgroup"))
                if not ignoreEndTag:
                    return True

        def processCharacters(self, token):
            ignoreEndTag = self.ignoreEndTagColgroup()
            self.endTagColgroup(impliedTagToken("colgroup"))
            if not ignoreEndTag:
                return token

        def startTagCol(self, token):
            self.tree.insertElement(token)
            self.tree.openElements.pop()
            token["selfClosingAcknowledged"] = True

        def startTagOther(self, token):
            ignoreEndTag = self.ignoreEndTagColgroup()
            self.endTagColgroup(impliedTagToken("colgroup"))
            if not ignoreEndTag:
                return token

        def endTagColgroup(self, token):
            if self.ignoreEndTagColgroup():
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()
            else:
                self.tree.openElements.pop()
                self.parser.phase = self.parser.phases["inTable"]

        def endTagCol(self, token):
            self.parser.parseError("no-end-tag", {"name": "col"})

        def endTagOther(self, token):
            ignoreEndTag = self.ignoreEndTagColgroup()
            self.endTagColgroup(impliedTagToken("colgroup"))
            if not ignoreEndTag:
                return token

    class InTableBodyPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#in-table0
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)
            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("tr", self.startTagTr),
                (("td", "th"), self.startTagTableCell),
                (("caption", "col", "colgroup", "tbody", "tfoot", "thead"),
                 self.startTagTableOther)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                (("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
                ("table", self.endTagTable),
                (("body", "caption", "col", "colgroup", "html", "td", "th",
                  "tr"), self.endTagIgnore)
            ])
            self.endTagHandler.default = self.endTagOther

        # helper methods
        def clearStackToTableBodyContext(self):
            while self.tree.openElements[-1].name not in ("tbody", "tfoot",
                                                          "thead", "html"):
                # self.parser.parseError("unexpected-implied-end-tag-in-table",
                #  {"name": self.tree.openElements[-1].name})
                self.tree.openElements.pop()
            if self.tree.openElements[-1].name == "html":
                assert self.parser.innerHTML

        # the rest
        def processEOF(self):
            self.parser.phases["inTable"].processEOF()

        def processSpaceCharacters(self, token):
            return self.parser.phases["inTable"].processSpaceCharacters(token)

        def processCharacters(self, token):
            return self.parser.phases["inTable"].processCharacters(token)

        def startTagTr(self, token):
            self.clearStackToTableBodyContext()
            self.tree.insertElement(token)
            self.parser.phase = self.parser.phases["inRow"]

        def startTagTableCell(self, token):
            self.parser.parseError("unexpected-cell-in-table-body",
                                   {"name": token["name"]})
            self.startTagTr(impliedTagToken("tr", "StartTag"))
            return token

        def startTagTableOther(self, token):
            # XXX AT Any ideas on how to share this with endTagTable?
            if (self.tree.elementInScope("tbody", variant="table") or
                self.tree.elementInScope("thead", variant="table") or
                    self.tree.elementInScope("tfoot", variant="table")):
                self.clearStackToTableBodyContext()
                self.endTagTableRowGroup(
                    impliedTagToken(self.tree.openElements[-1].name))
                return token
            else:
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()

        def startTagOther(self, token):
            return self.parser.phases["inTable"].processStartTag(token)

        def endTagTableRowGroup(self, token):
            if self.tree.elementInScope(token["name"], variant="table"):
                self.clearStackToTableBodyContext()
                self.tree.openElements.pop()
                self.parser.phase = self.parser.phases["inTable"]
            else:
                self.parser.parseError("unexpected-end-tag-in-table-body",
                                       {"name": token["name"]})

        def endTagTable(self, token):
            if (self.tree.elementInScope("tbody", variant="table") or
                self.tree.elementInScope("thead", variant="table") or
                    self.tree.elementInScope("tfoot", variant="table")):
                self.clearStackToTableBodyContext()
                self.endTagTableRowGroup(
                    impliedTagToken(self.tree.openElements[-1].name))
                return token
            else:
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()

        def endTagIgnore(self, token):
            self.parser.parseError("unexpected-end-tag-in-table-body",
                                   {"name": token["name"]})

        def endTagOther(self, token):
            return self.parser.phases["inTable"].processEndTag(token)

    class InRowPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#in-row
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)
            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                (("td", "th"), self.startTagTableCell),
                (("caption", "col", "colgroup", "tbody", "tfoot", "thead",
                  "tr"), self.startTagTableOther)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("tr", self.endTagTr),
                ("table", self.endTagTable),
                (("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
                (("body", "caption", "col", "colgroup", "html", "td", "th"),
                 self.endTagIgnore)
            ])
            self.endTagHandler.default = self.endTagOther

        # helper methods (XXX unify this with other table helper methods)
        def clearStackToTableRowContext(self):
            while self.tree.openElements[-1].name not in ("tr", "html"):
                self.parser.parseError("unexpected-implied-end-tag-in-table-row",
                                       {"name": self.tree.openElements[-1].name})
                self.tree.openElements.pop()

        def ignoreEndTagTr(self):
            return not self.tree.elementInScope("tr", variant="table")

        # the rest
        def processEOF(self):
            self.parser.phases["inTable"].processEOF()

        def processSpaceCharacters(self, token):
            return self.parser.phases["inTable"].processSpaceCharacters(token)

        def processCharacters(self, token):
            return self.parser.phases["inTable"].processCharacters(token)

        def startTagTableCell(self, token):
            self.clearStackToTableRowContext()
            self.tree.insertElement(token)
            self.parser.phase = self.parser.phases["inCell"]
            self.tree.activeFormattingElements.append(Marker)

        def startTagTableOther(self, token):
            ignoreEndTag = self.ignoreEndTagTr()
            self.endTagTr(impliedTagToken("tr"))
            # XXX how are we sure it's always ignored in the innerHTML case?
            if not ignoreEndTag:
                return token

        def startTagOther(self, token):
            return self.parser.phases["inTable"].processStartTag(token)

        def endTagTr(self, token):
            if not self.ignoreEndTagTr():
                self.clearStackToTableRowContext()
                self.tree.openElements.pop()
                self.parser.phase = self.parser.phases["inTableBody"]
            else:
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()

        def endTagTable(self, token):
            ignoreEndTag = self.ignoreEndTagTr()
            self.endTagTr(impliedTagToken("tr"))
            # Reprocess the current tag if the tr end tag was not ignored
            # XXX how are we sure it's always ignored in the innerHTML case?
            if not ignoreEndTag:
                return token

        def endTagTableRowGroup(self, token):
            if self.tree.elementInScope(token["name"], variant="table"):
                self.endTagTr(impliedTagToken("tr"))
                return token
            else:
                self.parser.parseError()

        def endTagIgnore(self, token):
            self.parser.parseError("unexpected-end-tag-in-table-row",
                                   {"name": token["name"]})

        def endTagOther(self, token):
            return self.parser.phases["inTable"].processEndTag(token)

    class InCellPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#in-cell
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)
            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
                  "thead", "tr"), self.startTagTableOther)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                (("td", "th"), self.endTagTableCell),
                (("body", "caption", "col", "colgroup", "html"), self.endTagIgnore),
                (("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply)
            ])
            self.endTagHandler.default = self.endTagOther

        # helper
        def closeCell(self):
            if self.tree.elementInScope("td", variant="table"):
                self.endTagTableCell(impliedTagToken("td"))
            elif self.tree.elementInScope("th", variant="table"):
                self.endTagTableCell(impliedTagToken("th"))

        # the rest
        def processEOF(self):
            self.parser.phases["inBody"].processEOF()

        def processCharacters(self, token):
            return self.parser.phases["inBody"].processCharacters(token)

        def startTagTableOther(self, token):
            if (self.tree.elementInScope("td", variant="table") or
                    self.tree.elementInScope("th", variant="table")):
                self.closeCell()
                return token
            else:
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()

        def startTagOther(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def endTagTableCell(self, token):
            if self.tree.elementInScope(token["name"], variant="table"):
                self.tree.generateImpliedEndTags(token["name"])
                if self.tree.openElements[-1].name != token["name"]:
                    self.parser.parseError("unexpected-cell-end-tag",
                                           {"name": token["name"]})
                    while True:
                        node = self.tree.openElements.pop()
                        if node.name == token["name"]:
                            break
                else:
                    self.tree.openElements.pop()
                self.tree.clearActiveFormattingElements()
                self.parser.phase = self.parser.phases["inRow"]
            else:
                self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

        def endTagIgnore(self, token):
            self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

        def endTagImply(self, token):
            if self.tree.elementInScope(token["name"], variant="table"):
                self.closeCell()
                return token
            else:
                # sometimes innerHTML case
                self.parser.parseError()

        def endTagOther(self, token):
            return self.parser.phases["inBody"].processEndTag(token)

    class InSelectPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("option", self.startTagOption),
                ("optgroup", self.startTagOptgroup),
                ("select", self.startTagSelect),
                (("input", "keygen", "textarea"), self.startTagInput),
                ("script", self.startTagScript)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("option", self.endTagOption),
                ("optgroup", self.endTagOptgroup),
                ("select", self.endTagSelect)
            ])
            self.endTagHandler.default = self.endTagOther

        # http://www.whatwg.org/specs/web-apps/current-work/#in-select
        def processEOF(self):
            if self.tree.openElements[-1].name != "html":
                self.parser.parseError("eof-in-select")
            else:
                assert self.parser.innerHTML

        def processCharacters(self, token):
            if token["data"] == "\u0000":
                return
            self.tree.insertText(token["data"])

        def startTagOption(self, token):
            # We need to imply </option> if <option> is the current node.
            if self.tree.openElements[-1].name == "option":
                self.tree.openElements.pop()
            self.tree.insertElement(token)

        def startTagOptgroup(self, token):
            if self.tree.openElements[-1].name == "option":
                self.tree.openElements.pop()
            if self.tree.openElements[-1].name == "optgroup":
                self.tree.openElements.pop()
            self.tree.insertElement(token)

        def startTagSelect(self, token):
            self.parser.parseError("unexpected-select-in-select")
            self.endTagSelect(impliedTagToken("select"))

        def startTagInput(self, token):
            self.parser.parseError("unexpected-input-in-select")
            if self.tree.elementInScope("select", variant="select"):
                self.endTagSelect(impliedTagToken("select"))
                return token
            else:
                assert self.parser.innerHTML

        def startTagScript(self, token):
            return self.parser.phases["inHead"].processStartTag(token)

        def startTagOther(self, token):
            self.parser.parseError("unexpected-start-tag-in-select",
                                   {"name": token["name"]})

        def endTagOption(self, token):
            if self.tree.openElements[-1].name == "option":
                self.tree.openElements.pop()
            else:
                self.parser.parseError("unexpected-end-tag-in-select",
                                       {"name": "option"})

        def endTagOptgroup(self, token):
            # </optgroup> implicitly closes <option>
            if (self.tree.openElements[-1].name == "option" and
                    self.tree.openElements[-2].name == "optgroup"):
                self.tree.openElements.pop()
            # It also closes </optgroup>
            if self.tree.openElements[-1].name == "optgroup":
                self.tree.openElements.pop()
            # But nothing else
            else:
                self.parser.parseError("unexpected-end-tag-in-select",
                                       {"name": "optgroup"})

        def endTagSelect(self, token):
            if self.tree.elementInScope("select", variant="select"):
                node = self.tree.openElements.pop()
                while node.name != "select":
                    node = self.tree.openElements.pop()
                self.parser.resetInsertionMode()
            else:
                # innerHTML case
                assert self.parser.innerHTML
                self.parser.parseError()

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag-in-select",
                                   {"name": token["name"]})

    class InSelectInTablePhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
                 self.startTagTable)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
                 self.endTagTable)
            ])
            self.endTagHandler.default = self.endTagOther

        def processEOF(self):
            self.parser.phases["inSelect"].processEOF()

        def processCharacters(self, token):
            return self.parser.phases["inSelect"].processCharacters(token)

        def startTagTable(self, token):
            self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]})
            self.endTagOther(impliedTagToken("select"))
            return token

        def startTagOther(self, token):
            return self.parser.phases["inSelect"].processStartTag(token)

        def endTagTable(self, token):
            self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]})
            if self.tree.elementInScope(token["name"], variant="table"):
                self.endTagOther(impliedTagToken("select"))
                return token

        def endTagOther(self, token):
            return self.parser.phases["inSelect"].processEndTag(token)

    class InForeignContentPhase(Phase):
        breakoutElements = frozenset(["b", "big", "blockquote", "body", "br",
                                      "center", "code", "dd", "div", "dl", "dt",
                                      "em", "embed", "h1", "h2", "h3",
                                      "h4", "h5", "h6", "head", "hr", "i", "img",
                                      "li", "listing", "menu", "meta", "nobr",
                                      "ol", "p", "pre", "ruby", "s", "small",
                                      "span", "strong", "strike", "sub", "sup",
                                      "table", "tt", "u", "ul", "var"])

        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

        def adjustSVGTagNames(self, token):
            replacements = {"altglyph": "altGlyph",
                            "altglyphdef": "altGlyphDef",
                            "altglyphitem": "altGlyphItem",
                            "animatecolor": "animateColor",
                            "animatemotion": "animateMotion",
                            "animatetransform": "animateTransform",
                            "clippath": "clipPath",
                            "feblend": "feBlend",
                            "fecolormatrix": "feColorMatrix",
                            "fecomponenttransfer": "feComponentTransfer",
                            "fecomposite": "feComposite",
                            "feconvolvematrix": "feConvolveMatrix",
                            "fediffuselighting": "feDiffuseLighting",
                            "fedisplacementmap": "feDisplacementMap",
                            "fedistantlight": "feDistantLight",
                            "feflood": "feFlood",
                            "fefunca": "feFuncA",
                            "fefuncb": "feFuncB",
                            "fefuncg": "feFuncG",
                            "fefuncr": "feFuncR",
                            "fegaussianblur": "feGaussianBlur",
                            "feimage": "feImage",
                            "femerge": "feMerge",
                            "femergenode": "feMergeNode",
                            "femorphology": "feMorphology",
                            "feoffset": "feOffset",
                            "fepointlight": "fePointLight",
                            "fespecularlighting": "feSpecularLighting",
                            "fespotlight": "feSpotLight",
                            "fetile": "feTile",
                            "feturbulence": "feTurbulence",
                            "foreignobject": "foreignObject",
                            "glyphref": "glyphRef",
                            "lineargradient": "linearGradient",
                            "radialgradient": "radialGradient",
                            "textpath": "textPath"}

            if token["name"] in replacements:
                token["name"] = replacements[token["name"]]

        def processCharacters(self, token):
            if token["data"] == "\u0000":
                token["data"] = "\uFFFD"
            elif (self.parser.framesetOK and
                  any(char not in spaceCharacters for char in token["data"])):
                self.parser.framesetOK = False
            Phase.processCharacters(self, token)

        def processStartTag(self, token):
            currentNode = self.tree.openElements[-1]
            if (token["name"] in self.breakoutElements or
                (token["name"] == "font" and
                 set(token["data"].keys()) & set(["color", "face", "size"]))):
                self.parser.parseError("unexpected-html-element-in-foreign-content",
                                       {"name": token["name"]})
                while (self.tree.openElements[-1].namespace !=
                       self.tree.defaultNamespace and
                       not self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) and
                       not self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])):
                    self.tree.openElements.pop()
                return token

            else:
                if currentNode.namespace == namespaces["mathml"]:
                    self.parser.adjustMathMLAttributes(token)
                elif currentNode.namespace == namespaces["svg"]:
                    self.adjustSVGTagNames(token)
                    self.parser.adjustSVGAttributes(token)
                self.parser.adjustForeignAttributes(token)
                token["namespace"] = currentNode.namespace
                self.tree.insertElement(token)
                if token["selfClosing"]:
                    self.tree.openElements.pop()
                    token["selfClosingAcknowledged"] = True

        def processEndTag(self, token):
            nodeIndex = len(self.tree.openElements) - 1
            node = self.tree.openElements[-1]
            if node.name.translate(asciiUpper2Lower) != token["name"]:
                self.parser.parseError("unexpected-end-tag", {"name": token["name"]})

            while True:
                if node.name.translate(asciiUpper2Lower) == token["name"]:
                    # XXX this isn't in the spec but it seems necessary
                    if self.parser.phase == self.parser.phases["inTableText"]:
                        self.parser.phase.flushCharacters()
                        self.parser.phase = self.parser.phase.originalPhase
                    while self.tree.openElements.pop() != node:
                        assert self.tree.openElements
                    new_token = None
                    break
                nodeIndex -= 1

                node = self.tree.openElements[nodeIndex]
                if node.namespace != self.tree.defaultNamespace:
                    continue
                else:
                    new_token = self.parser.phase.processEndTag(token)
                    break
            return new_token

    class AfterBodyPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)])
            self.endTagHandler.default = self.endTagOther

        def processEOF(self):
            # Stop parsing
            pass

        def processComment(self, token):
            # This is needed because data is to be appended to the <html> element
            # here and not to whatever is currently open.
            self.tree.insertComment(token, self.tree.openElements[0])

        def processCharacters(self, token):
            self.parser.parseError("unexpected-char-after-body")
            self.parser.phase = self.parser.phases["inBody"]
            return token

        def startTagHtml(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagOther(self, token):
            self.parser.parseError("unexpected-start-tag-after-body",
                                   {"name": token["name"]})
            self.parser.phase = self.parser.phases["inBody"]
            return token

        def endTagHtml(self, name):
            if self.parser.innerHTML:
                self.parser.parseError("unexpected-end-tag-after-body-innerhtml")
            else:
                self.parser.phase = self.parser.phases["afterAfterBody"]

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag-after-body",
                                   {"name": token["name"]})
            self.parser.phase = self.parser.phases["inBody"]
            return token

    class InFramesetPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("frameset", self.startTagFrameset),
                ("frame", self.startTagFrame),
                ("noframes", self.startTagNoframes)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("frameset", self.endTagFrameset)
            ])
            self.endTagHandler.default = self.endTagOther

        def processEOF(self):
            if self.tree.openElements[-1].name != "html":
                self.parser.parseError("eof-in-frameset")
            else:
                assert self.parser.innerHTML

        def processCharacters(self, token):
            self.parser.parseError("unexpected-char-in-frameset")

        def startTagFrameset(self, token):
            self.tree.insertElement(token)

        def startTagFrame(self, token):
            self.tree.insertElement(token)
            self.tree.openElements.pop()

        def startTagNoframes(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagOther(self, token):
            self.parser.parseError("unexpected-start-tag-in-frameset",
                                   {"name": token["name"]})

        def endTagFrameset(self, token):
            if self.tree.openElements[-1].name == "html":
                # innerHTML case
                self.parser.parseError("unexpected-frameset-in-frameset-innerhtml")
            else:
                self.tree.openElements.pop()
            if (not self.parser.innerHTML and
                    self.tree.openElements[-1].name != "frameset"):
                # If we're not in innerHTML mode and the current node is not a
                # "frameset" element (anymore) then switch.
                self.parser.phase = self.parser.phases["afterFrameset"]

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag-in-frameset",
                                   {"name": token["name"]})

    class AfterFramesetPhase(Phase):
        # http://www.whatwg.org/specs/web-apps/current-work/#after3
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("noframes", self.startTagNoframes)
            ])
            self.startTagHandler.default = self.startTagOther

            self.endTagHandler = _utils.MethodDispatcher([
                ("html", self.endTagHtml)
            ])
            self.endTagHandler.default = self.endTagOther

        def processEOF(self):
            # Stop parsing
            pass

        def processCharacters(self, token):
            self.parser.parseError("unexpected-char-after-frameset")

        def startTagNoframes(self, token):
            return self.parser.phases["inHead"].processStartTag(token)

        def startTagOther(self, token):
            self.parser.parseError("unexpected-start-tag-after-frameset",
                                   {"name": token["name"]})

        def endTagHtml(self, token):
            self.parser.phase = self.parser.phases["afterAfterFrameset"]

        def endTagOther(self, token):
            self.parser.parseError("unexpected-end-tag-after-frameset",
                                   {"name": token["name"]})

    class AfterAfterBodyPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml)
            ])
            self.startTagHandler.default = self.startTagOther

        def processEOF(self):
            pass

        def processComment(self, token):
            self.tree.insertComment(token, self.tree.document)

        def processSpaceCharacters(self, token):
            return self.parser.phases["inBody"].processSpaceCharacters(token)

        def processCharacters(self, token):
            self.parser.parseError("expected-eof-but-got-char")
            self.parser.phase = self.parser.phases["inBody"]
            return token

        def startTagHtml(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagOther(self, token):
            self.parser.parseError("expected-eof-but-got-start-tag",
                                   {"name": token["name"]})
            self.parser.phase = self.parser.phases["inBody"]
            return token

        def processEndTag(self, token):
            self.parser.parseError("expected-eof-but-got-end-tag",
                                   {"name": token["name"]})
            self.parser.phase = self.parser.phases["inBody"]
            return token

    class AfterAfterFramesetPhase(Phase):
        def __init__(self, parser, tree):
            Phase.__init__(self, parser, tree)

            self.startTagHandler = _utils.MethodDispatcher([
                ("html", self.startTagHtml),
                ("noframes", self.startTagNoFrames)
            ])
            self.startTagHandler.default = self.startTagOther

        def processEOF(self):
            pass

        def processComment(self, token):
            self.tree.insertComment(token, self.tree.document)

        def processSpaceCharacters(self, token):
            return self.parser.phases["inBody"].processSpaceCharacters(token)

        def processCharacters(self, token):
            self.parser.parseError("expected-eof-but-got-char")

        def startTagHtml(self, token):
            return self.parser.phases["inBody"].processStartTag(token)

        def startTagNoFrames(self, token):
            return self.parser.phases["inHead"].processStartTag(token)

        def startTagOther(self, token):
            self.parser.parseError("expected-eof-but-got-start-tag",
                                   {"name": token["name"]})

        def processEndTag(self, token):
            self.parser.parseError("expected-eof-but-got-end-tag",
                                   {"name": token["name"]})
    # pylint:enable=unused-argument

    return {
        "initial": InitialPhase,
        "beforeHtml": BeforeHtmlPhase,
        "beforeHead": BeforeHeadPhase,
        "inHead": InHeadPhase,
        "inHeadNoscript": InHeadNoscriptPhase,
        "afterHead": AfterHeadPhase,
        "inBody": InBodyPhase,
        "text": TextPhase,
        "inTable": InTablePhase,
        "inTableText": InTableTextPhase,
        "inCaption": InCaptionPhase,
        "inColumnGroup": InColumnGroupPhase,
        "inTableBody": InTableBodyPhase,
        "inRow": InRowPhase,
        "inCell": InCellPhase,
        "inSelect": InSelectPhase,
        "inSelectInTable": InSelectInTablePhase,
        "inForeignContent": InForeignContentPhase,
        "afterBody": AfterBodyPhase,
        "inFrameset": InFramesetPhase,
        "afterFrameset": AfterFramesetPhase,
        "afterAfterBody": AfterAfterBodyPhase,
        "afterAfterFrameset": AfterAfterFramesetPhase,
        # XXX after after frameset
    }


def adjust_attributes(token, replacements):
    if PY3 or _utils.PY27:
        needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)
    else:
        needs_adjustment = frozenset(token['data']) & frozenset(replacements)
    if needs_adjustment:
        token['data'] = OrderedDict((replacements.get(k, k), v)
                                    for k, v in token['data'].items())


def impliedTagToken(name, type="EndTag", attributes=None,
                    selfClosing=False):
    if attributes is None:
        attributes = {}
    return {"type": tokenTypes[type], "name": name, "data": attributes,
            "selfClosing": selfClosing}


class ParseError(Exception):
    """Error in parsed document"""
    pass
_vendor/certifi/__pycache__/core.cpython-36.pyc000064400000002112151733136450015417 0ustar003

�Pf&�@sJdZddlZddlZGdd�de�Zdd�Zdd�Zed	krFee��dS)
zU
certifi.py
~~~~~~~~~~

This module returns the installation location of cacert.pem.
�Nc@seZdZdZdS)�DeprecatedBundleWarningz�
    The weak security bundle is being deprecated. Please bother your service
    provider to get them to stop using cross-signed roots.
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/core.pyrsrcCsdS)Nz /etc/pki/tls/certs/ca-bundle.crtrrrrr�wheresr	cCstjdt�t�S)Nz�The weak security bundle has been removed. certifi.old_where() is now an alias of certifi.where(). Please update your code to use certifi.where() instead. certifi.old_where() will be removed in 2018.)�warnings�warnrr	rrrr�	old_wheresr�__main__)	r�osr
�DeprecationWarningrr	rr�printrrrr�<module>	s	_vendor/certifi/__pycache__/__init__.cpython-36.pyc000064400000000306151733136450016231 0ustar003

�Pf?�@sddlmZmZdZdS)�)�where�	old_wherez
2018.01.18N)Zcorerr�__version__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/certifi/__pycache__/__main__.cpython-36.pyc000064400000000251151733136450016211 0ustar003

�Pf)�@sddlmZee��dS)�)�whereN)Zcertifir�print�rr�/usr/lib/python3.6/__main__.py�<module>s_vendor/certifi/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000306151733136450017170 0ustar003

�Pf?�@sddlmZmZdZdS)�)�where�	old_wherez
2018.01.18N)Zcorerr�__version__�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/certifi/__pycache__/__main__.cpython-36.opt-1.pyc000064400000000251151733136450017150 0ustar003

�Pf)�@sddlmZee��dS)�)�whereN)Zcertifir�print�rr�/usr/lib/python3.6/__main__.py�<module>s_vendor/certifi/__pycache__/core.cpython-36.opt-1.pyc000064400000002112151733136450016356 0ustar003

�Pf&�@sJdZddlZddlZGdd�de�Zdd�Zdd�Zed	krFee��dS)
zU
certifi.py
~~~~~~~~~~

This module returns the installation location of cacert.pem.
�Nc@seZdZdZdS)�DeprecatedBundleWarningz�
    The weak security bundle is being deprecated. Please bother your service
    provider to get them to stop using cross-signed roots.
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/core.pyrsrcCsdS)Nz /etc/pki/tls/certs/ca-bundle.crtrrrrr�wheresr	cCstjdt�t�S)Nz�The weak security bundle has been removed. certifi.old_where() is now an alias of certifi.where(). Please update your code to use certifi.where() instead. certifi.old_where() will be removed in 2018.)�warnings�warnrr	rrrr�	old_wheresr�__main__)	r�osr
�DeprecationWarningrr	rr�printrrrr�<module>	s	_vendor/certifi/__init__.py000064400000000077151733136450011752 0ustar00from .core import where, old_where

__version__ = "2018.01.18"
_vendor/certifi/core.py000064400000001446151733136450011144 0ustar00#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
certifi.py
~~~~~~~~~~

This module returns the installation location of cacert.pem.
"""
import os
import warnings


class DeprecatedBundleWarning(DeprecationWarning):
    """
    The weak security bundle is being deprecated. Please bother your service
    provider to get them to stop using cross-signed roots.
    """


def where():
    return '/etc/pki/tls/certs/ca-bundle.crt'


def old_where():
    warnings.warn(
        "The weak security bundle has been removed. certifi.old_where() is now an alias "
        "of certifi.where(). Please update your code to use certifi.where() instead. "
        "certifi.old_where() will be removed in 2018.",
        DeprecatedBundleWarning
    )
    return where()

if __name__ == '__main__':
    print(where())
_vendor/certifi/__main__.py000064400000000051151733136450011723 0ustar00from certifi import where
print(where())
_vendor/packaging/__pycache__/_compat.cpython-36.opt-1.pyc000064400000001634151733136450017357 0ustar003

�Pf\�@sVddlmZmZmZddlZejddkZejddkZerDefZ	ne
fZ	dd�ZdS)�)�absolute_import�division�print_functionN��cs&G��fdd�d��}tj|dfi�S)z/
    Create a base class with a metaclass.
    cseZdZ��fdd�ZdS)z!with_metaclass.<locals>.metaclasscs�|�|�S)N�)�cls�nameZ
this_bases�d)�bases�metar�/usr/lib/python3.6/_compat.py�__new__sz)with_metaclass.<locals>.metaclass.__new__N)�__name__�
__module__�__qualname__rr)rrrr
�	metaclasssrZtemporary_class)�typer)rrrr)rrr
�with_metaclasssr)Z
__future__rrr�sys�version_infoZPY2ZPY3�strZstring_typesZ
basestringrrrrr
�<module>s_vendor/packaging/__pycache__/version.cpython-36.pyc000064400000024426151733136450016467 0ustar003

�Pf$-�@s�ddlmZmZmZddlZddlZddlZddlmZddddd	gZ	ej
d
ddd
dddg�Zdd�ZGdd�de
�ZGdd�de�ZGdd�de�Zejdej�Zdddddd�Zdd�Zdd�ZdZGd d�de�Zd!d"�Zejd#�Zd$d%�Zd&d'�ZdS)(�)�absolute_import�division�print_functionN�)�Infinity�parse�Version�
LegacyVersion�InvalidVersion�VERSION_PATTERN�_Version�epoch�release�dev�pre�post�localcCs&yt|�Stk
r t|�SXdS)z�
    Parse the given version string and return either a :class:`Version` object
    or a :class:`LegacyVersion` object depending on if the given version is
    a valid PEP 440 version or a legacy version.
    N)rr
r	)�version�r�/usr/lib/python3.6/version.pyrsc@seZdZdZdS)r
zF
    An invalid version was found, users should refer to PEP 440.
    N)�__name__�
__module__�__qualname__�__doc__rrrrr
$sc@sLeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)�_BaseVersioncCs
t|j�S)N)�hash�_key)�selfrrr�__hash__,sz_BaseVersion.__hash__cCs|j|dd��S)NcSs||kS)Nr)�s�orrr�<lambda>0sz%_BaseVersion.__lt__.<locals>.<lambda>)�_compare)r�otherrrr�__lt__/sz_BaseVersion.__lt__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!3sz%_BaseVersion.__le__.<locals>.<lambda>)r")rr#rrr�__le__2sz_BaseVersion.__le__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!6sz%_BaseVersion.__eq__.<locals>.<lambda>)r")rr#rrr�__eq__5sz_BaseVersion.__eq__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!9sz%_BaseVersion.__ge__.<locals>.<lambda>)r")rr#rrr�__ge__8sz_BaseVersion.__ge__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!<sz%_BaseVersion.__gt__.<locals>.<lambda>)r")rr#rrr�__gt__;sz_BaseVersion.__gt__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!?sz%_BaseVersion.__ne__.<locals>.<lambda>)r")rr#rrr�__ne__>sz_BaseVersion.__ne__cCst|t�stS||j|j�S)N)�
isinstancer�NotImplementedr)rr#�methodrrrr"As
z_BaseVersion._compareN)rrrrr$r%r&r'r(r)r"rrrrr*src@s`eZdZdd�Zdd�Zdd�Zedd��Zed	d
��Zedd��Z	ed
d��Z
edd��ZdS)r	cCst|�|_t|j�|_dS)N)�str�_version�_legacy_cmpkeyr)rrrrr�__init__Js
zLegacyVersion.__init__cCs|jS)N)r.)rrrr�__str__NszLegacyVersion.__str__cCsdjtt|���S)Nz<LegacyVersion({0})>)�format�reprr-)rrrr�__repr__QszLegacyVersion.__repr__cCs|jS)N)r.)rrrr�publicTszLegacyVersion.publiccCs|jS)N)r.)rrrr�base_versionXszLegacyVersion.base_versioncCsdS)Nr)rrrrr\szLegacyVersion.localcCsdS)NFr)rrrr�
is_prerelease`szLegacyVersion.is_prereleasecCsdS)NFr)rrrr�is_postreleasedszLegacyVersion.is_postreleaseN)rrrr0r1r4�propertyr5r6rr7r8rrrrr	Hsz(\d+ | [a-z]+ | \.| -)�czfinal-�@)r�preview�-�rcrccsbxVtj|�D]H}tj||�}|s|dkr,q|dd�dkrJ|jd�Vqd|VqWdVdS)N�.r�
0123456789��*z*final)�_legacy_version_component_re�split�_legacy_version_replacement_map�get�zfill)r�partrrr�_parse_version_partsrsrIcCs�d}g}xlt|j��D]\}|jd�rh|dkrJx|rH|ddkrH|j�q.Wx|rf|ddkrf|j�qLW|j|�qWt|�}||fS)	NrrBz*finalz*final-Z00000000���rJrJ)rI�lower�
startswith�pop�append�tuple)rr
�partsrHrrrr/�s
r/a�
    v?
    (?:
        (?:(?P<epoch>[0-9]+)!)?                           # epoch
        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
        (?P<pre>                                          # pre-release
            [-_\.]?
            (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P<pre_n>[0-9]+)?
        )?
        (?P<post>                                         # post release
            (?:-(?P<post_n1>[0-9]+))
            |
            (?:
                [-_\.]?
                (?P<post_l>post|rev|r)
                [-_\.]?
                (?P<post_n2>[0-9]+)?
            )
        )?
        (?P<dev>                                          # dev release
            [-_\.]?
            (?P<dev_l>dev)
            [-_\.]?
            (?P<dev_n>[0-9]+)?
        )?
    )
    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
c@s|eZdZejdedejejB�Zdd�Z	dd�Z
dd�Zed	d
��Z
edd��Zed
d��Zedd��Zedd��ZdS)rz^\s*z\s*$c	Cs�|jj|�}|stdj|���t|jd�r8t|jd��ndtdd�|jd�jd�D��t	|jd�|jd	��t	|jd
�|jd�p�|jd��t	|jd
�|jd��t
|jd��d�|_t|jj
|jj|jj|jj|jj|jj�|_dS)NzInvalid version: '{0}'r
rcss|]}t|�VqdS)N)�int)�.0�irrr�	<genexpr>�sz#Version.__init__.<locals>.<genexpr>rr?Zpre_lZpre_nZpost_lZpost_n1Zpost_n2Zdev_lZdev_nr)r
rrrrr)�_regex�searchr
r2r�grouprQrOrD�_parse_letter_version�_parse_local_versionr.�_cmpkeyr
rrrrrr)rr�matchrrrr0�s.

zVersion.__init__cCsdjtt|���S)Nz<Version({0})>)r2r3r-)rrrrr4�szVersion.__repr__cCs�g}|jjdkr$|jdj|jj��|jdjdd�|jjD���|jjdk	rl|jdjdd�|jjD���|jjdk	r�|jdj|jjd	��|jjdk	r�|jd
j|jjd	��|jj	dk	r�|jdjdjdd�|jj	D����dj|�S)
Nrz{0}!r?css|]}t|�VqdS)N)r-)rR�xrrrrT�sz"Version.__str__.<locals>.<genexpr>�css|]}t|�VqdS)N)r-)rRr\rrrrT�sz.post{0}rz.dev{0}z+{0}css|]}t|�VqdS)N)r-)rRr\rrrrTs)
r.r
rNr2�joinrrrrr)rrPrrrr1�s zVersion.__str__cCst|�jdd�dS)N�+rr)r-rD)rrrrr5
szVersion.publiccCsLg}|jjdkr$|jdj|jj��|jdjdd�|jjD���dj|�S)Nrz{0}!r?css|]}t|�VqdS)N)r-)rRr\rrrrTsz'Version.base_version.<locals>.<genexpr>r])r.r
rNr2r^r)rrPrrrr6s
zVersion.base_versioncCs$t|�}d|kr |jdd�dSdS)Nr_r)r-rD)rZversion_stringrrrrsz
Version.localcCst|jjp|jj�S)N)�boolr.rr)rrrrr7!szVersion.is_prereleasecCst|jj�S)N)r`r.r)rrrrr8%szVersion.is_postreleaseN)rrr�re�compiler�VERBOSE�
IGNORECASErUr0r4r1r9r5r6rr7r8rrrrr�s
#
cCsx|rZ|dkrd}|j�}|dkr&d}n(|dkr4d}n|d
krBd	}n|dkrNd}|t|�fS|rt|rtd}|t|�fSdS)NrZalpha�aZbeta�br:rr<r>�rev�rr)r:rr<)rgrh)rKrQ)ZletterZnumberrrrrX*s 
rXz[\._-]cCs$|dk	r tdd�tj|�D��SdS)zR
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    Ncss&|]}|j�s|j�nt|�VqdS)N)�isdigitrKrQ)rRrHrrrrTRsz'_parse_local_version.<locals>.<genexpr>)rO�_local_version_seperatorsrD)rrrrrYLsrYcCs�ttttjdd�t|�����}|dkr@|dkr@|dk	r@t}n|dkrLt}|dkrZt}|dkrft}|dkrvt}ntdd�|D��}||||||fS)NcSs|dkS)Nrr)r\rrrr!`sz_cmpkey.<locals>.<lambda>css*|]"}t|t�r|dfnt|fVqdS)r]N)r*rQr)rRrSrrrrT�sz_cmpkey.<locals>.<genexpr>)rO�reversed�list�	itertools�	dropwhiler)r
rrrrrrrrrZWs&		
rZ)Z
__future__rrr�collectionsrmraZ_structuresr�__all__�
namedtuplerr�
ValueErrorr
�objectrr	rbrcrCrErIr/rrrXrjrYrZrrrr�<module>s.!
9k
_vendor/packaging/__pycache__/specifiers.cpython-36.opt-1.pyc000064400000046437151733136450020103 0ustar003

�Pfym�@s�ddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
ddlmZm
Z
mZGdd�de�ZGdd	�d	e
eje��ZGd
d�de�ZGdd
�d
e�Zdd�ZGdd�de�Zejd�Zdd�Zdd�ZGdd�de�ZdS)�)�absolute_import�division�print_functionN�)�string_types�with_metaclass)�Version�
LegacyVersion�parsec@seZdZdZdS)�InvalidSpecifierzH
    An invalid specifier was found, users should refer to PEP 440.
    N)�__name__�
__module__�__qualname__�__doc__�rr� /usr/lib/python3.6/specifiers.pyrsrc@s�eZdZejdd��Zejdd��Zejdd��Zejdd��Zej	d	d
��Z
e
jdd
��Z
ejdd
d��Zejddd��Z
dS)�
BaseSpecifiercCsdS)z�
        Returns the str representation of this Specifier like object. This
        should be representative of the Specifier itself.
        Nr)�selfrrr�__str__szBaseSpecifier.__str__cCsdS)zF
        Returns a hash value for this Specifier like object.
        Nr)rrrr�__hash__szBaseSpecifier.__hash__cCsdS)zq
        Returns a boolean representing whether or not the two Specifier like
        objects are equal.
        Nr)r�otherrrr�__eq__$szBaseSpecifier.__eq__cCsdS)zu
        Returns a boolean representing whether or not the two Specifier like
        objects are not equal.
        Nr)rrrrr�__ne__+szBaseSpecifier.__ne__cCsdS)zg
        Returns whether or not pre-releases as a whole are allowed by this
        specifier.
        Nr)rrrr�prereleases2szBaseSpecifier.prereleasescCsdS)zd
        Sets whether or not pre-releases as a whole are allowed by this
        specifier.
        Nr)r�valuerrrr9sNcCsdS)zR
        Determines if the given item is contained within this specifier.
        Nr)r�itemrrrr�contains@szBaseSpecifier.containscCsdS)z�
        Takes an iterable of items and filters them so that only items which
        are contained within this specifier are allowed in it.
        Nr)r�iterablerrrr�filterFszBaseSpecifier.filter)N)N)rr
r�abc�abstractmethodrrrr�abstractpropertyr�setterrrrrrrrsrc@s�eZdZiZd dd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zedd��Z
edd��Zedd��Zejdd��Zdd�Zd!dd�Zd"dd�ZdS)#�_IndividualSpecifier�NcCsF|jj|�}|stdj|���|jd�j�|jd�j�f|_||_dS)NzInvalid specifier: '{0}'�operator�version)�_regex�searchr�format�group�strip�_spec�_prereleases)r�specr�matchrrr�__init__Rsz_IndividualSpecifier.__init__cCs0|jdk	rdj|j�nd}dj|jjt|�|�S)Nz, prereleases={0!r}r$z<{0}({1!r}{2})>)r-r)r�	__class__r�str)r�prerrr�__repr___sz_IndividualSpecifier.__repr__cCsdj|j�S)Nz{0}{1})r)r,)rrrrrlsz_IndividualSpecifier.__str__cCs
t|j�S)N)�hashr,)rrrrrosz_IndividualSpecifier.__hash__cCsLt|t�r0y|j|�}Wq@tk
r,tSXnt||j�s@tS|j|jkS)N)�
isinstancerr1r�NotImplementedr,)rrrrrrrs
z_IndividualSpecifier.__eq__cCsLt|t�r0y|j|�}Wq@tk
r,tSXnt||j�s@tS|j|jkS)N)r6rr1rr7r,)rrrrrr}s
z_IndividualSpecifier.__ne__cCst|dj|j|��S)Nz_compare_{0})�getattrr)�
_operators)r�oprrr�
_get_operator�sz"_IndividualSpecifier._get_operatorcCst|ttf�st|�}|S)N)r6r	rr
)rr&rrr�_coerce_version�sz$_IndividualSpecifier._coerce_versioncCs
|jdS)Nr)r,)rrrrr%�sz_IndividualSpecifier.operatorcCs
|jdS)Nr)r,)rrrrr&�sz_IndividualSpecifier.versioncCs|jS)N)r-)rrrrr�sz _IndividualSpecifier.prereleasescCs
||_dS)N)r-)rrrrrr�scCs
|j|�S)N)r)rrrrr�__contains__�sz!_IndividualSpecifier.__contains__cCs<|dkr|j}|j|�}|jr(|r(dS|j|j�||j�S)NF)rr<�
is_prereleaser;r%r&)rrrrrrr�s
z_IndividualSpecifier.containsccs�d}g}d|dk	r|ndi}xL|D]D}|j|�}|j|f|�r"|jr\|pL|jr\|j|�q"d}|Vq"W|r�|r�x|D]
}|VqzWdS)NFrT)r<rr>r�append)rrrZyielded�found_prereleases�kwr&�parsed_versionrrrr�s




z_IndividualSpecifier.filter)r$N)N)N)rr
rr9r0r4rrrrr;r<�propertyr%r&rr"r=rrrrrrr#Ns 



r#c@sveZdZdZejdedejejB�Zdddddd	d
�Z	dd�Z
d
d�Zdd�Zdd�Z
dd�Zdd�Zdd�ZdS)�LegacySpecifiera�
        (?P<operator>(==|!=|<=|>=|<|>))
        \s*
        (?P<version>
            [^,;\s)]* # Since this is a "legacy" specifier, and the version
                      # string can be just about anything, we match everything
                      # except for whitespace, a semi-colon for marker support,
                      # a closing paren since versions can be enclosed in
                      # them, and a comma since it's a version separator.
        )
        z^\s*z\s*$�equal�	not_equal�less_than_equal�greater_than_equal�	less_than�greater_than)z==z!=z<=z>=�<�>cCst|t�stt|��}|S)N)r6r	r2)rr&rrrr<�s
zLegacySpecifier._coerce_versioncCs||j|�kS)N)r<)r�prospectiver.rrr�_compare_equal�szLegacySpecifier._compare_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_not_equal�sz"LegacySpecifier._compare_not_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_less_than_equal�sz(LegacySpecifier._compare_less_than_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_greater_than_equalsz+LegacySpecifier._compare_greater_than_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_less_thansz"LegacySpecifier._compare_less_thancCs||j|�kS)N)r<)rrMr.rrr�_compare_greater_thansz%LegacySpecifier._compare_greater_thanN)rr
r�
_regex_str�re�compile�VERBOSE�
IGNORECASEr'r9r<rNrOrPrQrRrSrrrrrD�s 
rDcstj���fdd��}|S)Ncst|t�sdS�|||�S)NF)r6r)rrMr.)�fnrr�wrappeds
z)_require_version_compare.<locals>.wrapped)�	functools�wraps)rYrZr)rYr�_require_version_compare
sr]c	@s�eZdZdZejdedejejB�Zdddddd	d
dd�Z	e
d
d��Ze
dd��Ze
dd��Z
e
dd��Ze
dd��Ze
dd��Ze
dd��Zdd�Zedd��Zejdd��Zd S)!�	Specifiera
        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
        (?P<version>
            (?:
                # The identity operators allow for an escape hatch that will
                # do an exact string match of the version you wish to install.
                # This will not be parsed by PEP 440 and we cannot determine
                # any semantic meaning from it. This operator is discouraged
                # but included entirely as an escape hatch.
                (?<====)  # Only match for the identity operator
                \s*
                [^\s]*    # We just match everything, except for whitespace
                          # since we are only testing for strict identity.
            )
            |
            (?:
                # The (non)equality operators allow for wild card and local
                # versions to be specified so we have to define these two
                # operators separately to enable that.
                (?<===|!=)            # Only match for equals and not equals

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?

                # You cannot use a wild card and a dev or local version
                # together so group them with a | and make them optional.
                (?:
                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
                    |
                    \.\*  # Wild card syntax of .*
                )?
            )
            |
            (?:
                # The compatible operator requires at least two digits in the
                # release segment.
                (?<=~=)               # Only match for the compatible operator

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
            |
            (?:
                # All other operators only allow a sub set of what the
                # (non)equality operators do. Specifically they do not allow
                # local versions to be specified nor do they allow the prefix
                # matching wild cards.
                (?<!==|!=|~=)         # We have special cases for these
                                      # operators so we want to make sure they
                                      # don't match here.

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
        )
        z^\s*z\s*$Z
compatiblerErFrGrHrIrJZ	arbitrary)z~=z==z!=z<=z>=rKrLz===cCsNdjttjdd�t|���dd��}|d7}|jd�||�oL|jd�||�S)	N�.cSs|jd�o|jd�S)NZpostZdev)�
startswith)�xrrr�<lambda>�sz/Specifier._compare_compatible.<locals>.<lambda>rz.*z>=z==���)�join�list�	itertools�	takewhile�_version_splitr;)rrMr.�prefixrrr�_compare_compatible�s
zSpecifier._compare_compatiblecCsp|jd�rPt|j�}t|dd��}tt|��}|dt|��}t||�\}}nt|�}|jsht|j�}||kS)Nz.*����)�endswithrZpublicrhr2�len�_pad_version�local)rrMr.rrrrN�s


zSpecifier._compare_equalcCs|j||�S)N)rN)rrMr.rrrrO�szSpecifier._compare_not_equalcCs|t|�kS)N)r)rrMr.rrrrP�sz"Specifier._compare_less_than_equalcCs|t|�kS)N)r)rrMr.rrrrQ�sz%Specifier._compare_greater_than_equalcCs>t|�}||ksdS|jr:|jr:t|j�t|j�kr:dSdS)NFT)rr>�base_version)rrMr.rrrrR�szSpecifier._compare_less_thancCs`t|�}||ksdS|jr:|jr:t|j�t|j�kr:dS|jdk	r\t|j�t|j�kr\dSdS)NFT)rZis_postreleaserqrp)rrMr.rrrrS�s
zSpecifier._compare_greater_thancCst|�j�t|�j�kS)N)r2�lower)rrMr.rrr�_compare_arbitraryszSpecifier._compare_arbitrarycCsR|jdk	r|jS|j\}}|d
krN|dkr@|jd�r@|dd�}t|�jrNdSd	S)N�==�>=�<=�~=�===z.*rkTF)rtrurvrwrxrl)r-r,rmr
r>)rr%r&rrrrs


zSpecifier.prereleasescCs
||_dS)N)r-)rrrrrrsN)rr
rrTrUrVrWrXr'r9r]rjrNrOrPrQrRrSrsrCrr"rrrrr^s*^#r^z^([0-9]+)((?:a|b|c|rc)[0-9]+)$cCsDg}x:|jd�D],}tj|�}|r2|j|j��q|j|�qW|S)Nr_)�split�
_prefix_regexr(�extend�groupsr?)r&�resultrr/rrrrh's
rhc	Cs�gg}}|jttjdd�|���|jttjdd�|���|j|t|d�d��|j|t|d�d��|jddgtdt|d�t|d���|jddgtdt|d�t|d���ttj|��ttj|��fS)NcSs|j�S)N)�isdigit)rarrrrb6sz_pad_version.<locals>.<lambda>cSs|j�S)N)r~)rarrrrb7srr�0)r?rerfrgrn�insert�max�chain)�left�rightZ
left_splitZright_splitrrrro2s
&&roc@s�eZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Zdd�Z	dd�Z
dd�Zedd��Z
e
jdd��Z
dd�Zddd�Zd dd�ZdS)!�SpecifierSetr$NcCsrdd�|jd�D�}t�}xB|D]:}y|jt|��Wq tk
rX|jt|��Yq Xq Wt|�|_||_dS)NcSsg|]}|j�r|j��qSr)r+)�.0�srrr�
<listcomp>Rsz)SpecifierSet.__init__.<locals>.<listcomp>�,)	ry�set�addr^rrD�	frozenset�_specsr-)rZ
specifiersrZparsed�	specifierrrrr0Os

zSpecifierSet.__init__cCs*|jdk	rdj|j�nd}djt|�|�S)Nz, prereleases={0!r}r$z<SpecifierSet({0!r}{1})>)r-r)rr2)rr3rrrr4dszSpecifierSet.__repr__cCsdjtdd�|jD���S)Nr�css|]}t|�VqdS)N)r2)r�r�rrr�	<genexpr>nsz'SpecifierSet.__str__.<locals>.<genexpr>)rd�sortedr�)rrrrrmszSpecifierSet.__str__cCs
t|j�S)N)r5r�)rrrrrpszSpecifierSet.__hash__cCs�t|t�rt|�}nt|t�s"tSt�}t|j|jB�|_|jdkrX|jdk	rX|j|_n<|jdk	rv|jdkrv|j|_n|j|jkr�|j|_ntd��|S)NzFCannot combine SpecifierSets with True and False prerelease overrides.)r6rr�r7r�r�r-�
ValueError)rrr�rrr�__and__ss





zSpecifierSet.__and__cCsFt|t�rt|�}n&t|t�r,tt|��}nt|t�s:tS|j|jkS)N)r6rr�r#r2r7r�)rrrrrr�s



zSpecifierSet.__eq__cCsFt|t�rt|�}n&t|t�r,tt|��}nt|t�s:tS|j|jkS)N)r6rr�r#r2r7r�)rrrrrr�s



zSpecifierSet.__ne__cCs
t|j�S)N)rnr�)rrrr�__len__�szSpecifierSet.__len__cCs
t|j�S)N)�iterr�)rrrr�__iter__�szSpecifierSet.__iter__cCs.|jdk	r|jS|jsdStdd�|jD��S)Ncss|]}|jVqdS)N)r)r�r�rrrr��sz+SpecifierSet.prereleases.<locals>.<genexpr>)r-r��any)rrrrr�s

zSpecifierSet.prereleasescCs
||_dS)N)r-)rrrrrr�scCs
|j|�S)N)r)rrrrrr=�szSpecifierSet.__contains__csNt�ttf�st����dkr$|j��r4�jr4dSt��fdd�|jD��S)NFc3s|]}|j��d�VqdS))rN)r)r�r�)rrrrr��sz(SpecifierSet.contains.<locals>.<genexpr>)r6r	rr
rr>�allr�)rrrr)rrrr�szSpecifierSet.containscCs�|dkr|j}|jr:x |jD]}|j|t|�d�}qW|Sg}g}xZ|D]R}t|ttf�sdt|�}n|}t|t�rtqH|jr�|r�|s�|j	|�qH|j	|�qHW|r�|r�|dkr�|S|SdS)N)r)
rr�r�boolr6r	rr
r>r?)rrrr.Zfilteredr@rrBrrrr�s*


zSpecifierSet.filter)r$N)N)N)rr
rr0r4rrr�rrr�r�rCrr"r=rrrrrrr�Ms
	


r�)Z
__future__rrrrr[rfrUZ_compatrrr&rr	r
r�r�ABCMeta�objectrr#rDr]r^rVrzrhror�rrrr�<module>s&9	4	
_vendor/packaging/__pycache__/__about__.cpython-36.opt-1.pyc000064400000001177151733136460017646 0ustar003

�Pf��@sPddlmZmZmZdddddddd	gZd
ZdZdZd
ZdZ	dZ
dZde	ZdS)�)�absolute_import�division�print_function�	__title__�__summary__�__uri__�__version__�
__author__�	__email__�__license__�
__copyright__Z	packagingz"Core utilities for Python packagesz!https://github.com/pypa/packagingz16.8z)Donald Stufft and individual contributorszdonald@stufft.ioz"BSD or Apache License, Version 2.0zCopyright 2014-2016 %sN)
Z
__future__rrr�__all__rrrrr	r
rr�rr�/usr/lib/python3.6/__about__.py�<module>s

_vendor/packaging/__pycache__/_structures.cpython-36.opt-1.pyc000064400000005335151733136460020322 0ustar003

�Pf��@sDddlmZmZmZGdd�de�Ze�ZGdd�de�Ze�ZdS)�)�absolute_import�division�print_functionc@sTeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)�InfinitycCsdS)Nr�)�selfrr�!/usr/lib/python3.6/_structures.py�__repr__	szInfinity.__repr__cCstt|��S)N)�hash�repr)rrrr�__hash__szInfinity.__hash__cCsdS)NFr)r�otherrrr�__lt__szInfinity.__lt__cCsdS)NFr)rr
rrr�__le__szInfinity.__le__cCst||j�S)N)�
isinstance�	__class__)rr
rrr�__eq__szInfinity.__eq__cCst||j�S)N)rr)rr
rrr�__ne__szInfinity.__ne__cCsdS)NTr)rr
rrr�__gt__szInfinity.__gt__cCsdS)NTr)rr
rrr�__ge__szInfinity.__ge__cCstS)N)�NegativeInfinity)rrrr�__neg__!szInfinity.__neg__N)�__name__�
__module__�__qualname__r	rrrrrrrrrrrrrsrc@sTeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)rcCsdS)Nz	-Infinityr)rrrrr	)szNegativeInfinity.__repr__cCstt|��S)N)r
r)rrrrr,szNegativeInfinity.__hash__cCsdS)NTr)rr
rrrr/szNegativeInfinity.__lt__cCsdS)NTr)rr
rrrr2szNegativeInfinity.__le__cCst||j�S)N)rr)rr
rrrr5szNegativeInfinity.__eq__cCst||j�S)N)rr)rr
rrrr8szNegativeInfinity.__ne__cCsdS)NFr)rr
rrrr;szNegativeInfinity.__gt__cCsdS)NFr)rr
rrrr>szNegativeInfinity.__ge__cCstS)N)r)rrrrrAszNegativeInfinity.__neg__N)rrrr	rrrrrrrrrrrrr'srN)Z
__future__rrr�objectrrrrrr�<module>s_vendor/packaging/__pycache__/utils.cpython-36.opt-1.pyc000064400000000630151733136460017071 0ustar003

�Pf��@s2ddlmZmZmZddlZejd�Zdd�ZdS)�)�absolute_import�division�print_functionNz[-_.]+cCstjd|�j�S)N�-)�_canonicalize_regex�sub�lower)�name�r
�/usr/lib/python3.6/utils.py�canonicalize_namesr)Z
__future__rrr�re�compilerrr
r
r
r�<module>s
_vendor/packaging/__pycache__/__init__.cpython-36.pyc000064400000000735151733136460016537 0ustar003

�Pf�@sTddlmZmZmZddlmZmZmZmZm	Z	m
Z
mZmZdddddd	d
dgZ
dS)
�)�absolute_import�division�print_function�)�
__author__�
__copyright__�	__email__�__license__�__summary__�	__title__�__uri__�__version__rr
rr
rrr	rN)Z
__future__rrr�	__about__rrrr	r
rrr
�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s(
_vendor/packaging/__pycache__/__about__.cpython-36.pyc000064400000001177151733136460016707 0ustar003

�Pf��@sPddlmZmZmZdddddddd	gZd
ZdZdZd
ZdZ	dZ
dZde	ZdS)�)�absolute_import�division�print_function�	__title__�__summary__�__uri__�__version__�
__author__�	__email__�__license__�
__copyright__Z	packagingz"Core utilities for Python packagesz!https://github.com/pypa/packagingz16.8z)Donald Stufft and individual contributorszdonald@stufft.ioz"BSD or Apache License, Version 2.0zCopyright 2014-2016 %sN)
Z
__future__rrr�__all__rrrrr	r
rr�rr�/usr/lib/python3.6/__about__.py�<module>s

_vendor/packaging/__pycache__/_structures.cpython-36.pyc000064400000005335151733136460017363 0ustar003

�Pf��@sDddlmZmZmZGdd�de�Ze�ZGdd�de�Ze�ZdS)�)�absolute_import�division�print_functionc@sTeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)�InfinitycCsdS)Nr�)�selfrr�!/usr/lib/python3.6/_structures.py�__repr__	szInfinity.__repr__cCstt|��S)N)�hash�repr)rrrr�__hash__szInfinity.__hash__cCsdS)NFr)r�otherrrr�__lt__szInfinity.__lt__cCsdS)NFr)rr
rrr�__le__szInfinity.__le__cCst||j�S)N)�
isinstance�	__class__)rr
rrr�__eq__szInfinity.__eq__cCst||j�S)N)rr)rr
rrr�__ne__szInfinity.__ne__cCsdS)NTr)rr
rrr�__gt__szInfinity.__gt__cCsdS)NTr)rr
rrr�__ge__szInfinity.__ge__cCstS)N)�NegativeInfinity)rrrr�__neg__!szInfinity.__neg__N)�__name__�
__module__�__qualname__r	rrrrrrrrrrrrrsrc@sTeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�ZdS)rcCsdS)Nz	-Infinityr)rrrrr	)szNegativeInfinity.__repr__cCstt|��S)N)r
r)rrrrr,szNegativeInfinity.__hash__cCsdS)NTr)rr
rrrr/szNegativeInfinity.__lt__cCsdS)NTr)rr
rrrr2szNegativeInfinity.__le__cCst||j�S)N)rr)rr
rrrr5szNegativeInfinity.__eq__cCst||j�S)N)rr)rr
rrrr8szNegativeInfinity.__ne__cCsdS)NFr)rr
rrrr;szNegativeInfinity.__gt__cCsdS)NFr)rr
rrrr>szNegativeInfinity.__ge__cCstS)N)r)rrrrrAszNegativeInfinity.__neg__N)rrrr	rrrrrrrrrrrrr'srN)Z
__future__rrr�objectrrrrrr�<module>s_vendor/packaging/__pycache__/_compat.cpython-36.pyc000064400000001634151733136460016421 0ustar003

�Pf\�@sVddlmZmZmZddlZejddkZejddkZerDefZ	ne
fZ	dd�ZdS)�)�absolute_import�division�print_functionN��cs&G��fdd�d��}tj|dfi�S)z/
    Create a base class with a metaclass.
    cseZdZ��fdd�ZdS)z!with_metaclass.<locals>.metaclasscs�|�|�S)N�)�cls�nameZ
this_bases�d)�bases�metar�/usr/lib/python3.6/_compat.py�__new__sz)with_metaclass.<locals>.metaclass.__new__N)�__name__�
__module__�__qualname__rr)rrrr
�	metaclasssrZtemporary_class)�typer)rrrr)rrr
�with_metaclasssr)Z
__future__rrr�sys�version_infoZPY2ZPY3�strZstring_typesZ
basestringrrrrr
�<module>s_vendor/packaging/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000735151733136460017476 0ustar003

�Pf�@sTddlmZmZmZddlmZmZmZmZm	Z	m
Z
mZmZdddddd	d
dgZ
dS)
�)�absolute_import�division�print_function�)�
__author__�
__copyright__�	__email__�__license__�__summary__�	__title__�__uri__�__version__rr
rr
rrr	rN)Z
__future__rrr�	__about__rrrr	r
rrr
�__all__�rr�/usr/lib/python3.6/__init__.py�<module>s(
_vendor/packaging/__pycache__/specifiers.cpython-36.pyc000064400000046437151733136460017145 0ustar003

�Pfym�@s�ddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
ddlmZm
Z
mZGdd�de�ZGdd	�d	e
eje��ZGd
d�de�ZGdd
�d
e�Zdd�ZGdd�de�Zejd�Zdd�Zdd�ZGdd�de�ZdS)�)�absolute_import�division�print_functionN�)�string_types�with_metaclass)�Version�
LegacyVersion�parsec@seZdZdZdS)�InvalidSpecifierzH
    An invalid specifier was found, users should refer to PEP 440.
    N)�__name__�
__module__�__qualname__�__doc__�rr� /usr/lib/python3.6/specifiers.pyrsrc@s�eZdZejdd��Zejdd��Zejdd��Zejdd��Zej	d	d
��Z
e
jdd
��Z
ejdd
d��Zejddd��Z
dS)�
BaseSpecifiercCsdS)z�
        Returns the str representation of this Specifier like object. This
        should be representative of the Specifier itself.
        Nr)�selfrrr�__str__szBaseSpecifier.__str__cCsdS)zF
        Returns a hash value for this Specifier like object.
        Nr)rrrr�__hash__szBaseSpecifier.__hash__cCsdS)zq
        Returns a boolean representing whether or not the two Specifier like
        objects are equal.
        Nr)r�otherrrr�__eq__$szBaseSpecifier.__eq__cCsdS)zu
        Returns a boolean representing whether or not the two Specifier like
        objects are not equal.
        Nr)rrrrr�__ne__+szBaseSpecifier.__ne__cCsdS)zg
        Returns whether or not pre-releases as a whole are allowed by this
        specifier.
        Nr)rrrr�prereleases2szBaseSpecifier.prereleasescCsdS)zd
        Sets whether or not pre-releases as a whole are allowed by this
        specifier.
        Nr)r�valuerrrr9sNcCsdS)zR
        Determines if the given item is contained within this specifier.
        Nr)r�itemrrrr�contains@szBaseSpecifier.containscCsdS)z�
        Takes an iterable of items and filters them so that only items which
        are contained within this specifier are allowed in it.
        Nr)r�iterablerrrr�filterFszBaseSpecifier.filter)N)N)rr
r�abc�abstractmethodrrrr�abstractpropertyr�setterrrrrrrrsrc@s�eZdZiZd dd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dd�Zedd��Z
edd��Zedd��Zejdd��Zdd�Zd!dd�Zd"dd�ZdS)#�_IndividualSpecifier�NcCsF|jj|�}|stdj|���|jd�j�|jd�j�f|_||_dS)NzInvalid specifier: '{0}'�operator�version)�_regex�searchr�format�group�strip�_spec�_prereleases)r�specr�matchrrr�__init__Rsz_IndividualSpecifier.__init__cCs0|jdk	rdj|j�nd}dj|jjt|�|�S)Nz, prereleases={0!r}r$z<{0}({1!r}{2})>)r-r)r�	__class__r�str)r�prerrr�__repr___sz_IndividualSpecifier.__repr__cCsdj|j�S)Nz{0}{1})r)r,)rrrrrlsz_IndividualSpecifier.__str__cCs
t|j�S)N)�hashr,)rrrrrosz_IndividualSpecifier.__hash__cCsLt|t�r0y|j|�}Wq@tk
r,tSXnt||j�s@tS|j|jkS)N)�
isinstancerr1r�NotImplementedr,)rrrrrrrs
z_IndividualSpecifier.__eq__cCsLt|t�r0y|j|�}Wq@tk
r,tSXnt||j�s@tS|j|jkS)N)r6rr1rr7r,)rrrrrr}s
z_IndividualSpecifier.__ne__cCst|dj|j|��S)Nz_compare_{0})�getattrr)�
_operators)r�oprrr�
_get_operator�sz"_IndividualSpecifier._get_operatorcCst|ttf�st|�}|S)N)r6r	rr
)rr&rrr�_coerce_version�sz$_IndividualSpecifier._coerce_versioncCs
|jdS)Nr)r,)rrrrr%�sz_IndividualSpecifier.operatorcCs
|jdS)Nr)r,)rrrrr&�sz_IndividualSpecifier.versioncCs|jS)N)r-)rrrrr�sz _IndividualSpecifier.prereleasescCs
||_dS)N)r-)rrrrrr�scCs
|j|�S)N)r)rrrrr�__contains__�sz!_IndividualSpecifier.__contains__cCs<|dkr|j}|j|�}|jr(|r(dS|j|j�||j�S)NF)rr<�
is_prereleaser;r%r&)rrrrrrr�s
z_IndividualSpecifier.containsccs�d}g}d|dk	r|ndi}xL|D]D}|j|�}|j|f|�r"|jr\|pL|jr\|j|�q"d}|Vq"W|r�|r�x|D]
}|VqzWdS)NFrT)r<rr>r�append)rrrZyielded�found_prereleases�kwr&�parsed_versionrrrr�s




z_IndividualSpecifier.filter)r$N)N)N)rr
rr9r0r4rrrrr;r<�propertyr%r&rr"r=rrrrrrr#Ns 



r#c@sveZdZdZejdedejejB�Zdddddd	d
�Z	dd�Z
d
d�Zdd�Zdd�Z
dd�Zdd�Zdd�ZdS)�LegacySpecifiera�
        (?P<operator>(==|!=|<=|>=|<|>))
        \s*
        (?P<version>
            [^,;\s)]* # Since this is a "legacy" specifier, and the version
                      # string can be just about anything, we match everything
                      # except for whitespace, a semi-colon for marker support,
                      # a closing paren since versions can be enclosed in
                      # them, and a comma since it's a version separator.
        )
        z^\s*z\s*$�equal�	not_equal�less_than_equal�greater_than_equal�	less_than�greater_than)z==z!=z<=z>=�<�>cCst|t�stt|��}|S)N)r6r	r2)rr&rrrr<�s
zLegacySpecifier._coerce_versioncCs||j|�kS)N)r<)r�prospectiver.rrr�_compare_equal�szLegacySpecifier._compare_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_not_equal�sz"LegacySpecifier._compare_not_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_less_than_equal�sz(LegacySpecifier._compare_less_than_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_greater_than_equalsz+LegacySpecifier._compare_greater_than_equalcCs||j|�kS)N)r<)rrMr.rrr�_compare_less_thansz"LegacySpecifier._compare_less_thancCs||j|�kS)N)r<)rrMr.rrr�_compare_greater_thansz%LegacySpecifier._compare_greater_thanN)rr
r�
_regex_str�re�compile�VERBOSE�
IGNORECASEr'r9r<rNrOrPrQrRrSrrrrrD�s 
rDcstj���fdd��}|S)Ncst|t�sdS�|||�S)NF)r6r)rrMr.)�fnrr�wrappeds
z)_require_version_compare.<locals>.wrapped)�	functools�wraps)rYrZr)rYr�_require_version_compare
sr]c	@s�eZdZdZejdedejejB�Zdddddd	d
dd�Z	e
d
d��Ze
dd��Ze
dd��Z
e
dd��Ze
dd��Ze
dd��Ze
dd��Zdd�Zedd��Zejdd��Zd S)!�	Specifiera
        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
        (?P<version>
            (?:
                # The identity operators allow for an escape hatch that will
                # do an exact string match of the version you wish to install.
                # This will not be parsed by PEP 440 and we cannot determine
                # any semantic meaning from it. This operator is discouraged
                # but included entirely as an escape hatch.
                (?<====)  # Only match for the identity operator
                \s*
                [^\s]*    # We just match everything, except for whitespace
                          # since we are only testing for strict identity.
            )
            |
            (?:
                # The (non)equality operators allow for wild card and local
                # versions to be specified so we have to define these two
                # operators separately to enable that.
                (?<===|!=)            # Only match for equals and not equals

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?

                # You cannot use a wild card and a dev or local version
                # together so group them with a | and make them optional.
                (?:
                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
                    |
                    \.\*  # Wild card syntax of .*
                )?
            )
            |
            (?:
                # The compatible operator requires at least two digits in the
                # release segment.
                (?<=~=)               # Only match for the compatible operator

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
            |
            (?:
                # All other operators only allow a sub set of what the
                # (non)equality operators do. Specifically they do not allow
                # local versions to be specified nor do they allow the prefix
                # matching wild cards.
                (?<!==|!=|~=)         # We have special cases for these
                                      # operators so we want to make sure they
                                      # don't match here.

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
        )
        z^\s*z\s*$Z
compatiblerErFrGrHrIrJZ	arbitrary)z~=z==z!=z<=z>=rKrLz===cCsNdjttjdd�t|���dd��}|d7}|jd�||�oL|jd�||�S)	N�.cSs|jd�o|jd�S)NZpostZdev)�
startswith)�xrrr�<lambda>�sz/Specifier._compare_compatible.<locals>.<lambda>rz.*z>=z==���)�join�list�	itertools�	takewhile�_version_splitr;)rrMr.�prefixrrr�_compare_compatible�s
zSpecifier._compare_compatiblecCsp|jd�rPt|j�}t|dd��}tt|��}|dt|��}t||�\}}nt|�}|jsht|j�}||kS)Nz.*����)�endswithrZpublicrhr2�len�_pad_version�local)rrMr.rrrrN�s


zSpecifier._compare_equalcCs|j||�S)N)rN)rrMr.rrrrO�szSpecifier._compare_not_equalcCs|t|�kS)N)r)rrMr.rrrrP�sz"Specifier._compare_less_than_equalcCs|t|�kS)N)r)rrMr.rrrrQ�sz%Specifier._compare_greater_than_equalcCs>t|�}||ksdS|jr:|jr:t|j�t|j�kr:dSdS)NFT)rr>�base_version)rrMr.rrrrR�szSpecifier._compare_less_thancCs`t|�}||ksdS|jr:|jr:t|j�t|j�kr:dS|jdk	r\t|j�t|j�kr\dSdS)NFT)rZis_postreleaserqrp)rrMr.rrrrS�s
zSpecifier._compare_greater_thancCst|�j�t|�j�kS)N)r2�lower)rrMr.rrr�_compare_arbitraryszSpecifier._compare_arbitrarycCsR|jdk	r|jS|j\}}|d
krN|dkr@|jd�r@|dd�}t|�jrNdSd	S)N�==�>=�<=�~=�===z.*rkTF)rtrurvrwrxrl)r-r,rmr
r>)rr%r&rrrrs


zSpecifier.prereleasescCs
||_dS)N)r-)rrrrrrsN)rr
rrTrUrVrWrXr'r9r]rjrNrOrPrQrRrSrsrCrr"rrrrr^s*^#r^z^([0-9]+)((?:a|b|c|rc)[0-9]+)$cCsDg}x:|jd�D],}tj|�}|r2|j|j��q|j|�qW|S)Nr_)�split�
_prefix_regexr(�extend�groupsr?)r&�resultrr/rrrrh's
rhc	Cs�gg}}|jttjdd�|���|jttjdd�|���|j|t|d�d��|j|t|d�d��|jddgtdt|d�t|d���|jddgtdt|d�t|d���ttj|��ttj|��fS)NcSs|j�S)N)�isdigit)rarrrrb6sz_pad_version.<locals>.<lambda>cSs|j�S)N)r~)rarrrrb7srr�0)r?rerfrgrn�insert�max�chain)�left�rightZ
left_splitZright_splitrrrro2s
&&roc@s�eZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Zdd�Z	dd�Z
dd�Zedd��Z
e
jdd��Z
dd�Zddd�Zd dd�ZdS)!�SpecifierSetr$NcCsrdd�|jd�D�}t�}xB|D]:}y|jt|��Wq tk
rX|jt|��Yq Xq Wt|�|_||_dS)NcSsg|]}|j�r|j��qSr)r+)�.0�srrr�
<listcomp>Rsz)SpecifierSet.__init__.<locals>.<listcomp>�,)	ry�set�addr^rrD�	frozenset�_specsr-)rZ
specifiersrZparsed�	specifierrrrr0Os

zSpecifierSet.__init__cCs*|jdk	rdj|j�nd}djt|�|�S)Nz, prereleases={0!r}r$z<SpecifierSet({0!r}{1})>)r-r)rr2)rr3rrrr4dszSpecifierSet.__repr__cCsdjtdd�|jD���S)Nr�css|]}t|�VqdS)N)r2)r�r�rrr�	<genexpr>nsz'SpecifierSet.__str__.<locals>.<genexpr>)rd�sortedr�)rrrrrmszSpecifierSet.__str__cCs
t|j�S)N)r5r�)rrrrrpszSpecifierSet.__hash__cCs�t|t�rt|�}nt|t�s"tSt�}t|j|jB�|_|jdkrX|jdk	rX|j|_n<|jdk	rv|jdkrv|j|_n|j|jkr�|j|_ntd��|S)NzFCannot combine SpecifierSets with True and False prerelease overrides.)r6rr�r7r�r�r-�
ValueError)rrr�rrr�__and__ss





zSpecifierSet.__and__cCsFt|t�rt|�}n&t|t�r,tt|��}nt|t�s:tS|j|jkS)N)r6rr�r#r2r7r�)rrrrrr�s



zSpecifierSet.__eq__cCsFt|t�rt|�}n&t|t�r,tt|��}nt|t�s:tS|j|jkS)N)r6rr�r#r2r7r�)rrrrrr�s



zSpecifierSet.__ne__cCs
t|j�S)N)rnr�)rrrr�__len__�szSpecifierSet.__len__cCs
t|j�S)N)�iterr�)rrrr�__iter__�szSpecifierSet.__iter__cCs.|jdk	r|jS|jsdStdd�|jD��S)Ncss|]}|jVqdS)N)r)r�r�rrrr��sz+SpecifierSet.prereleases.<locals>.<genexpr>)r-r��any)rrrrr�s

zSpecifierSet.prereleasescCs
||_dS)N)r-)rrrrrr�scCs
|j|�S)N)r)rrrrrr=�szSpecifierSet.__contains__csNt�ttf�st����dkr$|j��r4�jr4dSt��fdd�|jD��S)NFc3s|]}|j��d�VqdS))rN)r)r�r�)rrrrr��sz(SpecifierSet.contains.<locals>.<genexpr>)r6r	rr
rr>�allr�)rrrr)rrrr�szSpecifierSet.containscCs�|dkr|j}|jr:x |jD]}|j|t|�d�}qW|Sg}g}xZ|D]R}t|ttf�sdt|�}n|}t|t�rtqH|jr�|r�|s�|j	|�qH|j	|�qHW|r�|r�|dkr�|S|SdS)N)r)
rr�r�boolr6r	rr
r>r?)rrrr.Zfilteredr@rrBrrrr�s*


zSpecifierSet.filter)r$N)N)N)rr
rr0r4rrr�rrr�r�rCrr"r=rrrrrrr�Ms
	


r�)Z
__future__rrrrr[rfrUZ_compatrrr&rr	r
r�r�ABCMeta�objectrr#rDr]r^rVrzrhror�rrrr�<module>s&9	4	
_vendor/packaging/__pycache__/markers.cpython-36.opt-1.pyc000064400000020761151733136460017404 0ustar003

�Pf& �	@s@ddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
mZmZddlm
Z
mZmZmZddlmZddlmZddlmZmZd	d
ddd
gZGdd	�d	e�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�Z Gdd�de�Z!ed�ed�Bed�Bed�Bed�Bed�Bed�Bed �Bed!�Bed"�Bed#�Bed$�Bed%�Bed&�Bed'�Bed(�Bed)�Bed*�BZ"d#d"ddddd+�Z#e"j$d,d-��ed.�ed/�Bed0�Bed1�Bed2�Bed3�Bed4�Bed5�BZ%e%ed6�Bed7�BZ&e&j$d8d-��ed9�ed:�BZ'e'j$d;d-��ed<�ed=�BZ(e"e'BZ)ee)e&e)�Z*e*j$d>d-��ed?�j+�Z,ed@�j+�Z-e�Z.e*ee,e.e-�BZ/e.e/e
e(e.�>ee.eZ0dAdB�Z1dSdDdE�Z2dFd-�dGd-�ej3ej4ej5ej6ej7ej8dH�Z9dIdJ�Z:e�Z;dKdL�Z<dMdN�Z=dOdP�Z>dQd
�Z?GdRd�de�Z@dS)T�)�absolute_import�division�print_functionN)�ParseException�ParseResults�stringStart�	stringEnd)�
ZeroOrMore�Group�Forward�QuotedString)�Literal�)�string_types)�	Specifier�InvalidSpecifier�
InvalidMarker�UndefinedComparison�UndefinedEnvironmentName�Marker�default_environmentc@seZdZdZdS)rzE
    An invalid marker was found, users should refer to PEP 508.
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/markers.pyrsc@seZdZdZdS)rzP
    An invalid operation was attempted on a value that doesn't support it.
    N)rrrrrrrrr!sc@seZdZdZdS)rz\
    A name was attempted to be used that does not exist inside of the
    environment.
    N)rrrrrrrrr'sc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�NodecCs
||_dS)N)�value)�selfrrrr�__init__0sz
Node.__init__cCs
t|j�S)N)�strr)rrrr�__str__3szNode.__str__cCsdj|jjt|��S)Nz<{0}({1!r})>)�format�	__class__rr!)rrrr�__repr__6sz
Node.__repr__cCst�dS)N)�NotImplementedError)rrrr�	serialize9szNode.serializeN)rrrr r"r%r'rrrrr.src@seZdZdd�ZdS)�VariablecCst|�S)N)r!)rrrrr'?szVariable.serializeN)rrrr'rrrrr(=sr(c@seZdZdd�ZdS)�ValuecCs
dj|�S)Nz"{0}")r#)rrrrr'EszValue.serializeN)rrrr'rrrrr)Csr)c@seZdZdd�ZdS)�OpcCst|�S)N)r!)rrrrr'KszOp.serializeN)rrrr'rrrrr*Isr*�implementation_version�platform_python_implementation�implementation_name�python_full_version�platform_release�platform_version�platform_machine�platform_system�python_version�sys_platform�os_namezos.namezsys.platformzplatform.versionzplatform.machinezplatform.python_implementation�python_implementationZextra)zos.namezsys.platformzplatform.versionzplatform.machinezplatform.python_implementationr6cCsttj|d|d��S)Nr)r(�ALIASES�get)�s�l�trrr�<lambda>ksr<z===z==z>=z<=z!=z~=�>�<znot in�incCst|d�S)Nr)r*)r9r:r;rrrr<ys�'�"cCst|d�S)Nr)r))r9r:r;rrrr<|s�and�orcCst|d�S)Nr)�tuple)r9r:r;rrrr<�s�(�)cCs t|t�rdd�|D�S|SdS)NcSsg|]}t|��qSr)�_coerce_parse_result)�.0�irrr�
<listcomp>�sz(_coerce_parse_result.<locals>.<listcomp>)�
isinstancer)�resultsrrrrG�s
rGTcCs�t|t�r4t|�dkr4t|dttf�r4t|d�St|t�rndd�|D�}|rZdj|�Sddj|�dSn"t|t�r�djdd	�|D��S|SdS)
Nrrcss|]}t|dd�VqdS)F)�firstN)�_format_marker)rH�mrrr�	<genexpr>�sz!_format_marker.<locals>.<genexpr>� rErFcSsg|]}|j��qSr)r')rHrOrrrrJ�sz"_format_marker.<locals>.<listcomp>)rK�list�lenrDrN�join)�markerrM�innerrrrrN�s


rNcCs||kS)Nr)�lhs�rhsrrrr<�scCs||kS)Nr)rWrXrrrr<�s)r?znot inr>z<=z==z!=z>=r=c
Cslytdj|j�|g��}Wntk
r.YnX|j|�Stj|j��}|dkrbtdj|||���|||�S)N�z#Undefined {0!r} on {1!r} and {2!r}.)	rrTr'r�contains�
_operatorsr8rr#)rW�oprX�specZoperrrr�_eval_op�s
r^cCs&|j|t�}|tkr"tdj|���|S)Nz/{0!r} does not exist in evaluation environment.)r8�
_undefinedrr#)�environment�namerrrr�_get_env�s
rbc	Cs�gg}x�|D]�}t|t�r0|djt||��qt|t�r�|\}}}t|t�rbt||j�}|j}n|j}t||j�}|djt|||��q|dkr|jg�qWt	dd�|D��S)NrrCcss|]}t|�VqdS)N)�all)rH�itemrrrrP�sz$_evaluate_markers.<locals>.<genexpr>���re)
rKrR�append�_evaluate_markersrDr(rbrr^�any)	Zmarkersr`�groupsrUrWr\rXZ	lhs_valueZ	rhs_valuerrrrg�s




rgcCs2dj|�}|j}|dkr.||dt|j�7}|S)Nz{0.major}.{0.minor}.{0.micro}�finalr)r#�releaselevelr!�serial)�info�versionZkindrrr�format_full_version�s

rocCslttd�r ttjj�}tjj}nd}d}||tjtj�tj	�tj
�tj�tj�tj�tj�dd�tjd�S)N�implementation�0rY�)r-r+r5r1r/r2r0r.r,r3r4)
�hasattr�sysrorprnra�os�platform�machine�release�systemr3r6)Ziverr-rrrr�s 

c@s.eZdZdd�Zdd�Zdd�Zd
dd	�ZdS)rcCs`yttj|��|_WnFtk
rZ}z*dj|||j|jd��}t|��WYdd}~XnXdS)Nz+Invalid marker: {0!r}, parse error at {1!r}�)rG�MARKERZparseString�_markersrr#�locr)rrU�eZerr_strrrrr szMarker.__init__cCs
t|j�S)N)rNr|)rrrrr"szMarker.__str__cCsdjt|��S)Nz<Marker({0!r})>)r#r!)rrrrr%szMarker.__repr__NcCs$t�}|dk	r|j|�t|j|�S)a$Evaluate a marker.

        Return the boolean from evaluating the given marker against the
        environment. environment is an optional argument to override all or
        part of the determined environment.

        The environment is determined from the current Python process.
        N)r�updatergr|)rr`Zcurrent_environmentrrr�evaluate"s	
zMarker.evaluate)N)rrrr r"r%r�rrrrrs)T)AZ
__future__rrr�operatorrurvrtZpip._vendor.pyparsingrrrrr	r
rrr
�LZ_compatrZ
specifiersrr�__all__�
ValueErrorrrr�objectrr(r)r*ZVARIABLEr7ZsetParseActionZVERSION_CMPZ	MARKER_OPZMARKER_VALUEZBOOLOPZ
MARKER_VARZMARKER_ITEM�suppressZLPARENZRPARENZMARKER_EXPRZMARKER_ATOMr{rGrN�lt�le�eq�ne�ge�gtr[r^r_rbrgrorrrrrr�<module>sx�
	6


_vendor/packaging/__pycache__/markers.cpython-36.pyc000064400000021133151733136460016437 0ustar003

�Pf& �	@s@ddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
mZmZddlm
Z
mZmZmZddlmZddlmZddlmZmZd	d
ddd
gZGdd	�d	e�ZGdd
�d
e�ZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�Z Gdd�de�Z!ed�ed�Bed�Bed�Bed�Bed�Bed�Bed �Bed!�Bed"�Bed#�Bed$�Bed%�Bed&�Bed'�Bed(�Bed)�Bed*�BZ"d#d"ddddd+�Z#e"j$d,d-��ed.�ed/�Bed0�Bed1�Bed2�Bed3�Bed4�Bed5�BZ%e%ed6�Bed7�BZ&e&j$d8d-��ed9�ed:�BZ'e'j$d;d-��ed<�ed=�BZ(e"e'BZ)ee)e&e)�Z*e*j$d>d-��ed?�j+�Z,ed@�j+�Z-e�Z.e*ee,e.e-�BZ/e.e/e
e(e.�>ee.eZ0dAdB�Z1dSdDdE�Z2dFd-�dGd-�ej3ej4ej5ej6ej7ej8dH�Z9dIdJ�Z:e�Z;dKdL�Z<dMdN�Z=dOdP�Z>dQd
�Z?GdRd�de�Z@dS)T�)�absolute_import�division�print_functionN)�ParseException�ParseResults�stringStart�	stringEnd)�
ZeroOrMore�Group�Forward�QuotedString)�Literal�)�string_types)�	Specifier�InvalidSpecifier�
InvalidMarker�UndefinedComparison�UndefinedEnvironmentName�Marker�default_environmentc@seZdZdZdS)rzE
    An invalid marker was found, users should refer to PEP 508.
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/markers.pyrsc@seZdZdZdS)rzP
    An invalid operation was attempted on a value that doesn't support it.
    N)rrrrrrrrr!sc@seZdZdZdS)rz\
    A name was attempted to be used that does not exist inside of the
    environment.
    N)rrrrrrrrr'sc@s,eZdZdd�Zdd�Zdd�Zdd�Zd	S)
�NodecCs
||_dS)N)�value)�selfrrrr�__init__0sz
Node.__init__cCs
t|j�S)N)�strr)rrrr�__str__3szNode.__str__cCsdj|jjt|��S)Nz<{0}({1!r})>)�format�	__class__rr!)rrrr�__repr__6sz
Node.__repr__cCst�dS)N)�NotImplementedError)rrrr�	serialize9szNode.serializeN)rrrr r"r%r'rrrrr.src@seZdZdd�ZdS)�VariablecCst|�S)N)r!)rrrrr'?szVariable.serializeN)rrrr'rrrrr(=sr(c@seZdZdd�ZdS)�ValuecCs
dj|�S)Nz"{0}")r#)rrrrr'EszValue.serializeN)rrrr'rrrrr)Csr)c@seZdZdd�ZdS)�OpcCst|�S)N)r!)rrrrr'KszOp.serializeN)rrrr'rrrrr*Isr*�implementation_version�platform_python_implementation�implementation_name�python_full_version�platform_release�platform_version�platform_machine�platform_system�python_version�sys_platform�os_namezos.namezsys.platformzplatform.versionzplatform.machinezplatform.python_implementation�python_implementationZextra)zos.namezsys.platformzplatform.versionzplatform.machinezplatform.python_implementationr6cCsttj|d|d��S)Nr)r(�ALIASES�get)�s�l�trrr�<lambda>ksr<z===z==z>=z<=z!=z~=�>�<znot in�incCst|d�S)Nr)r*)r9r:r;rrrr<ys�'�"cCst|d�S)Nr)r))r9r:r;rrrr<|s�and�orcCst|d�S)Nr)�tuple)r9r:r;rrrr<�s�(�)cCs t|t�rdd�|D�S|SdS)NcSsg|]}t|��qSr)�_coerce_parse_result)�.0�irrr�
<listcomp>�sz(_coerce_parse_result.<locals>.<listcomp>)�
isinstancer)�resultsrrrrG�s
rGTcCs�t|tttf�st�t|t�rHt|�dkrHt|dttf�rHt|d�St|t�r�dd�|D�}|rndj|�Sddj|�dSn"t|t�r�djdd	�|D��S|SdS)
Nrrcss|]}t|dd�VqdS)F)�firstN)�_format_marker)rH�mrrr�	<genexpr>�sz!_format_marker.<locals>.<genexpr>� rErFcSsg|]}|j��qSr)r')rHrOrrrrJ�sz"_format_marker.<locals>.<listcomp>)rK�listrDr�AssertionError�lenrN�join)�markerrM�innerrrrrN�s


rNcCs||kS)Nr)�lhs�rhsrrrr<�scCs||kS)Nr)rXrYrrrr<�s)r?znot inr>z<=z==z!=z>=r=c
Cslytdj|j�|g��}Wntk
r.YnX|j|�Stj|j��}|dkrbtdj|||���|||�S)N�z#Undefined {0!r} on {1!r} and {2!r}.)	rrUr'r�contains�
_operatorsr8rr#)rX�oprY�specZoperrrr�_eval_op�s
r_cCs&|j|t�}|tkr"tdj|���|S)Nz/{0!r} does not exist in evaluation environment.)r8�
_undefinedrr#)�environment�namerrrr�_get_env�s
rcc	Cs�gg}x�|D]�}t|tttf�s$t�t|t�rD|djt||��qt|t�r�|\}}}t|t�rvt||j	�}|j	}n|j	}t||j	�}|djt
|||��q|dks�t�|dkr|jg�qWtdd�|D��S)	NrrBrCcss|]}t|�VqdS)N)�all)rH�itemrrrrP�sz$_evaluate_markers.<locals>.<genexpr>���rf)rBrC)rKrRrDrrS�append�_evaluate_markersr(rcrr_�any)	Zmarkersra�groupsrVrXr]rYZ	lhs_valueZ	rhs_valuerrrrh�s"




rhcCs2dj|�}|j}|dkr.||dt|j�7}|S)Nz{0.major}.{0.minor}.{0.micro}�finalr)r#�releaselevelr!�serial)�info�versionZkindrrr�format_full_version�s

rpcCslttd�r ttjj�}tjj}nd}d}||tjtj�tj	�tj
�tj�tj�tj�tj�dd�tjd�S)N�implementation�0rZ�)r-r+r5r1r/r2r0r.r,r3r4)
�hasattr�sysrprqrorb�os�platform�machine�release�systemr3r6)Ziverr-rrrr�s 

c@s.eZdZdd�Zdd�Zdd�Zd
dd	�ZdS)rcCs`yttj|��|_WnFtk
rZ}z*dj|||j|jd��}t|��WYdd}~XnXdS)Nz+Invalid marker: {0!r}, parse error at {1!r}�)rG�MARKERZparseString�_markersrr#�locr)rrV�eZerr_strrrrr szMarker.__init__cCs
t|j�S)N)rNr})rrrrr"szMarker.__str__cCsdjt|��S)Nz<Marker({0!r})>)r#r!)rrrrr%szMarker.__repr__NcCs$t�}|dk	r|j|�t|j|�S)a$Evaluate a marker.

        Return the boolean from evaluating the given marker against the
        environment. environment is an optional argument to override all or
        part of the determined environment.

        The environment is determined from the current Python process.
        N)r�updaterhr})rraZcurrent_environmentrrr�evaluate"s	
zMarker.evaluate)N)rrrr r"r%r�rrrrrs)T)AZ
__future__rrr�operatorrvrwruZpip._vendor.pyparsingrrrrr	r
rrr
�LZ_compatrZ
specifiersrr�__all__�
ValueErrorrrr�objectrr(r)r*ZVARIABLEr7ZsetParseActionZVERSION_CMPZ	MARKER_OPZMARKER_VALUEZBOOLOPZ
MARKER_VARZMARKER_ITEM�suppressZLPARENZRPARENZMARKER_EXPRZMARKER_ATOMr|rGrN�lt�le�eq�ne�ge�gtr\r_r`rcrhrprrrrrr�<module>sx�
	6


_vendor/packaging/__pycache__/utils.cpython-36.pyc000064400000000630151733136460016132 0ustar003

�Pf��@s2ddlmZmZmZddlZejd�Zdd�ZdS)�)�absolute_import�division�print_functionNz[-_.]+cCstjd|�j�S)N�-)�_canonicalize_regex�sub�lower)�name�r
�/usr/lib/python3.6/utils.py�canonicalize_namesr)Z
__future__rrr�re�compilerrr
r
r
r�<module>s
_vendor/packaging/__pycache__/requirements.cpython-36.opt-1.pyc000064400000007306151733136460020463 0ustar003

�Pf��@srddlmZmZmZddlZddlZddlmZmZm	Z	m
Z
ddlmZmZm
Z
mZmZddlmZddlmZddlmZmZdd	lmZmZmZGd
d�de�Zeejej�Z ed�j!�Z"ed
�j!�Z#ed�j!�Z$ed�j!�Z%ed�j!�Z&ed�j!�Z'ed�j!�Z(ed�Z)e ee)�e BZ*ee ee*��Z+e+d�Z,e+Z-ed�d�Z.e(e.Z/e-ee&e-�Z0e"e
e0�e#d�Z1eej2ej3ej4B�Z5eej2ej3ej4B�Z6e5e6AZ7ee7ee&e7�ddd�d�Z8e
e$e8e%e8B�Z9e9j:dd��e	e9�d�Z;e;j:dd��e	e��d�Zej:d d��e'Z<e<eZ=e;e
e=�Z>e/e
e=�Z?e,e
e1�e?e>BZ@ee@eZAGd!d"�d"eB�ZCdS)#�)�absolute_import�division�print_functionN)�stringStart�	stringEnd�originalTextFor�ParseException)�
ZeroOrMore�Word�Optional�Regex�Combine)�Literal)�parse�)�MARKER_EXPR�Marker)�LegacySpecifier�	Specifier�SpecifierSetc@seZdZdZdS)�InvalidRequirementzJ
    An invalid requirement was found, users should refer to PEP 508.
    N)�__name__�
__module__�__qualname__�__doc__�rr�"/usr/lib/python3.6/requirements.pyrsr�[�]�(�)�,�;�@z-_.�namez[^ ]+�url�extrasF)Z
joinStringZadjacent�	_raw_speccCs
|jpdS)N�)r')�s�l�trrr�<lambda>8sr,�	specifiercCs|dS)Nrr)r)r*r+rrrr,;s�markercCst||j|j��S)N)rZ_original_startZ
_original_end)r)r*r+rrrr,?sc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�Requirementz�Parse a requirement.

    Parse a given requirement string into its parts, such as name, specifier,
    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
    string.
    cCs�ytj|�}Wn@tk
rN}z$tdj||j|jd����WYdd}~XnX|j|_|jr�tj|j�}|j	ot|j
s�|j	r�|j
r�td��|j|_nd|_t|jr�|jj
�ng�|_t|j�|_|jr�|jnd|_dS)Nz+Invalid requirement, parse error at "{0!r}"�zInvalid URL given)�REQUIREMENTZparseStringrr�format�locr$r%�urlparse�schemeZnetloc�setr&ZasListrr-r.)�selfZrequirement_stringZreq�eZ
parsed_urlrrr�__init__Zs"*
zRequirement.__init__cCsz|jg}|jr*|jdjdjt|j����|jr@|jt|j��|jrX|jdj|j��|j	rp|jdj|j	��dj|�S)Nz[{0}]r!z@ {0}z; {0}r()
r$r&�appendr2�join�sortedr-�strr%r.)r7�partsrrr�__str__oszRequirement.__str__cCsdjt|��S)Nz<Requirement({0!r})>)r2r=)r7rrr�__repr__�szRequirement.__repr__N)rrrrr9r?r@rrrrr/Msr/)DZ
__future__rrr�string�reZpip._vendor.pyparsingrrrrr	r
rrr
r�LZpip._vendor.six.moves.urllibrr4ZmarkersrrZ
specifiersrrr�
ValueErrorrZ
ascii_lettersZdigitsZALPHANUM�suppressZLBRACKETZRBRACKETZLPARENZRPAREN�COMMAZ	SEMICOLON�ATZPUNCTUATIONZIDENTIFIER_ENDZ
IDENTIFIER�NAMEZEXTRAZURIZURLZEXTRAS_LISTZEXTRASZ
_regex_str�VERBOSE�
IGNORECASEZVERSION_PEP440ZVERSION_LEGACYZVERSION_ONEZVERSION_MANYZ
_VERSION_SPECZsetParseActionZVERSION_SPECZMARKER_SEPERATORZMARKERZVERSION_AND_MARKERZURL_AND_MARKERZNAMED_REQUIREMENTr1�objectr/rrrr�<module>sZ
_vendor/packaging/__pycache__/requirements.cpython-36.pyc000064400000007306151733136460017524 0ustar003

�Pf��@srddlmZmZmZddlZddlZddlmZmZm	Z	m
Z
ddlmZmZm
Z
mZmZddlmZddlmZddlmZmZdd	lmZmZmZGd
d�de�Zeejej�Z ed�j!�Z"ed
�j!�Z#ed�j!�Z$ed�j!�Z%ed�j!�Z&ed�j!�Z'ed�j!�Z(ed�Z)e ee)�e BZ*ee ee*��Z+e+d�Z,e+Z-ed�d�Z.e(e.Z/e-ee&e-�Z0e"e
e0�e#d�Z1eej2ej3ej4B�Z5eej2ej3ej4B�Z6e5e6AZ7ee7ee&e7�ddd�d�Z8e
e$e8e%e8B�Z9e9j:dd��e	e9�d�Z;e;j:dd��e	e��d�Zej:d d��e'Z<e<eZ=e;e
e=�Z>e/e
e=�Z?e,e
e1�e?e>BZ@ee@eZAGd!d"�d"eB�ZCdS)#�)�absolute_import�division�print_functionN)�stringStart�	stringEnd�originalTextFor�ParseException)�
ZeroOrMore�Word�Optional�Regex�Combine)�Literal)�parse�)�MARKER_EXPR�Marker)�LegacySpecifier�	Specifier�SpecifierSetc@seZdZdZdS)�InvalidRequirementzJ
    An invalid requirement was found, users should refer to PEP 508.
    N)�__name__�
__module__�__qualname__�__doc__�rr�"/usr/lib/python3.6/requirements.pyrsr�[�]�(�)�,�;�@z-_.�namez[^ ]+�url�extrasF)Z
joinStringZadjacent�	_raw_speccCs
|jpdS)N�)r')�s�l�trrr�<lambda>8sr,�	specifiercCs|dS)Nrr)r)r*r+rrrr,;s�markercCst||j|j��S)N)rZ_original_startZ
_original_end)r)r*r+rrrr,?sc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�Requirementz�Parse a requirement.

    Parse a given requirement string into its parts, such as name, specifier,
    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
    string.
    cCs�ytj|�}Wn@tk
rN}z$tdj||j|jd����WYdd}~XnX|j|_|jr�tj|j�}|j	ot|j
s�|j	r�|j
r�td��|j|_nd|_t|jr�|jj
�ng�|_t|j�|_|jr�|jnd|_dS)Nz+Invalid requirement, parse error at "{0!r}"�zInvalid URL given)�REQUIREMENTZparseStringrr�format�locr$r%�urlparse�schemeZnetloc�setr&ZasListrr-r.)�selfZrequirement_stringZreq�eZ
parsed_urlrrr�__init__Zs"*
zRequirement.__init__cCsz|jg}|jr*|jdjdjt|j����|jr@|jt|j��|jrX|jdj|j��|j	rp|jdj|j	��dj|�S)Nz[{0}]r!z@ {0}z; {0}r()
r$r&�appendr2�join�sortedr-�strr%r.)r7�partsrrr�__str__oszRequirement.__str__cCsdjt|��S)Nz<Requirement({0!r})>)r2r=)r7rrr�__repr__�szRequirement.__repr__N)rrrrr9r?r@rrrrr/Msr/)DZ
__future__rrr�string�reZpip._vendor.pyparsingrrrrr	r
rrr
r�LZpip._vendor.six.moves.urllibrr4ZmarkersrrZ
specifiersrrr�
ValueErrorrZ
ascii_lettersZdigitsZALPHANUM�suppressZLBRACKETZRBRACKETZLPARENZRPAREN�COMMAZ	SEMICOLON�ATZPUNCTUATIONZIDENTIFIER_ENDZ
IDENTIFIER�NAMEZEXTRAZURIZURLZEXTRAS_LISTZEXTRASZ
_regex_str�VERBOSE�
IGNORECASEZVERSION_PEP440ZVERSION_LEGACYZVERSION_ONEZVERSION_MANYZ
_VERSION_SPECZsetParseActionZVERSION_SPECZMARKER_SEPERATORZMARKERZVERSION_AND_MARKERZURL_AND_MARKERZNAMED_REQUIREMENTr1�objectr/rrrr�<module>sZ
_vendor/packaging/__pycache__/version.cpython-36.opt-1.pyc000064400000024426151733136460017427 0ustar003

�Pf$-�@s�ddlmZmZmZddlZddlZddlZddlmZddddd	gZ	ej
d
ddd
dddg�Zdd�ZGdd�de
�ZGdd�de�ZGdd�de�Zejdej�Zdddddd�Zdd�Zdd�ZdZGd d�de�Zd!d"�Zejd#�Zd$d%�Zd&d'�ZdS)(�)�absolute_import�division�print_functionN�)�Infinity�parse�Version�
LegacyVersion�InvalidVersion�VERSION_PATTERN�_Version�epoch�release�dev�pre�post�localcCs&yt|�Stk
r t|�SXdS)z�
    Parse the given version string and return either a :class:`Version` object
    or a :class:`LegacyVersion` object depending on if the given version is
    a valid PEP 440 version or a legacy version.
    N)rr
r	)�version�r�/usr/lib/python3.6/version.pyrsc@seZdZdZdS)r
zF
    An invalid version was found, users should refer to PEP 440.
    N)�__name__�
__module__�__qualname__�__doc__rrrrr
$sc@sLeZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)�_BaseVersioncCs
t|j�S)N)�hash�_key)�selfrrr�__hash__,sz_BaseVersion.__hash__cCs|j|dd��S)NcSs||kS)Nr)�s�orrr�<lambda>0sz%_BaseVersion.__lt__.<locals>.<lambda>)�_compare)r�otherrrr�__lt__/sz_BaseVersion.__lt__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!3sz%_BaseVersion.__le__.<locals>.<lambda>)r")rr#rrr�__le__2sz_BaseVersion.__le__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!6sz%_BaseVersion.__eq__.<locals>.<lambda>)r")rr#rrr�__eq__5sz_BaseVersion.__eq__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!9sz%_BaseVersion.__ge__.<locals>.<lambda>)r")rr#rrr�__ge__8sz_BaseVersion.__ge__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!<sz%_BaseVersion.__gt__.<locals>.<lambda>)r")rr#rrr�__gt__;sz_BaseVersion.__gt__cCs|j|dd��S)NcSs||kS)Nr)rr rrrr!?sz%_BaseVersion.__ne__.<locals>.<lambda>)r")rr#rrr�__ne__>sz_BaseVersion.__ne__cCst|t�stS||j|j�S)N)�
isinstancer�NotImplementedr)rr#�methodrrrr"As
z_BaseVersion._compareN)rrrrr$r%r&r'r(r)r"rrrrr*src@s`eZdZdd�Zdd�Zdd�Zedd��Zed	d
��Zedd��Z	ed
d��Z
edd��ZdS)r	cCst|�|_t|j�|_dS)N)�str�_version�_legacy_cmpkeyr)rrrrr�__init__Js
zLegacyVersion.__init__cCs|jS)N)r.)rrrr�__str__NszLegacyVersion.__str__cCsdjtt|���S)Nz<LegacyVersion({0})>)�format�reprr-)rrrr�__repr__QszLegacyVersion.__repr__cCs|jS)N)r.)rrrr�publicTszLegacyVersion.publiccCs|jS)N)r.)rrrr�base_versionXszLegacyVersion.base_versioncCsdS)Nr)rrrrr\szLegacyVersion.localcCsdS)NFr)rrrr�
is_prerelease`szLegacyVersion.is_prereleasecCsdS)NFr)rrrr�is_postreleasedszLegacyVersion.is_postreleaseN)rrrr0r1r4�propertyr5r6rr7r8rrrrr	Hsz(\d+ | [a-z]+ | \.| -)�czfinal-�@)r�preview�-�rcrccsbxVtj|�D]H}tj||�}|s|dkr,q|dd�dkrJ|jd�Vqd|VqWdVdS)N�.r�
0123456789��*z*final)�_legacy_version_component_re�split�_legacy_version_replacement_map�get�zfill)r�partrrr�_parse_version_partsrsrIcCs�d}g}xlt|j��D]\}|jd�rh|dkrJx|rH|ddkrH|j�q.Wx|rf|ddkrf|j�qLW|j|�qWt|�}||fS)	NrrBz*finalz*final-Z00000000���rJrJ)rI�lower�
startswith�pop�append�tuple)rr
�partsrHrrrr/�s
r/a�
    v?
    (?:
        (?:(?P<epoch>[0-9]+)!)?                           # epoch
        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
        (?P<pre>                                          # pre-release
            [-_\.]?
            (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P<pre_n>[0-9]+)?
        )?
        (?P<post>                                         # post release
            (?:-(?P<post_n1>[0-9]+))
            |
            (?:
                [-_\.]?
                (?P<post_l>post|rev|r)
                [-_\.]?
                (?P<post_n2>[0-9]+)?
            )
        )?
        (?P<dev>                                          # dev release
            [-_\.]?
            (?P<dev_l>dev)
            [-_\.]?
            (?P<dev_n>[0-9]+)?
        )?
    )
    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
c@s|eZdZejdedejejB�Zdd�Z	dd�Z
dd�Zed	d
��Z
edd��Zed
d��Zedd��Zedd��ZdS)rz^\s*z\s*$c	Cs�|jj|�}|stdj|���t|jd�r8t|jd��ndtdd�|jd�jd�D��t	|jd�|jd	��t	|jd
�|jd�p�|jd��t	|jd
�|jd��t
|jd��d�|_t|jj
|jj|jj|jj|jj|jj�|_dS)NzInvalid version: '{0}'r
rcss|]}t|�VqdS)N)�int)�.0�irrr�	<genexpr>�sz#Version.__init__.<locals>.<genexpr>rr?Zpre_lZpre_nZpost_lZpost_n1Zpost_n2Zdev_lZdev_nr)r
rrrrr)�_regex�searchr
r2r�grouprQrOrD�_parse_letter_version�_parse_local_versionr.�_cmpkeyr
rrrrrr)rr�matchrrrr0�s.

zVersion.__init__cCsdjtt|���S)Nz<Version({0})>)r2r3r-)rrrrr4�szVersion.__repr__cCs�g}|jjdkr$|jdj|jj��|jdjdd�|jjD���|jjdk	rl|jdjdd�|jjD���|jjdk	r�|jdj|jjd	��|jjdk	r�|jd
j|jjd	��|jj	dk	r�|jdjdjdd�|jj	D����dj|�S)
Nrz{0}!r?css|]}t|�VqdS)N)r-)rR�xrrrrT�sz"Version.__str__.<locals>.<genexpr>�css|]}t|�VqdS)N)r-)rRr\rrrrT�sz.post{0}rz.dev{0}z+{0}css|]}t|�VqdS)N)r-)rRr\rrrrTs)
r.r
rNr2�joinrrrrr)rrPrrrr1�s zVersion.__str__cCst|�jdd�dS)N�+rr)r-rD)rrrrr5
szVersion.publiccCsLg}|jjdkr$|jdj|jj��|jdjdd�|jjD���dj|�S)Nrz{0}!r?css|]}t|�VqdS)N)r-)rRr\rrrrTsz'Version.base_version.<locals>.<genexpr>r])r.r
rNr2r^r)rrPrrrr6s
zVersion.base_versioncCs$t|�}d|kr |jdd�dSdS)Nr_r)r-rD)rZversion_stringrrrrsz
Version.localcCst|jjp|jj�S)N)�boolr.rr)rrrrr7!szVersion.is_prereleasecCst|jj�S)N)r`r.r)rrrrr8%szVersion.is_postreleaseN)rrr�re�compiler�VERBOSE�
IGNORECASErUr0r4r1r9r5r6rr7r8rrrrr�s
#
cCsx|rZ|dkrd}|j�}|dkr&d}n(|dkr4d}n|d
krBd	}n|dkrNd}|t|�fS|rt|rtd}|t|�fSdS)NrZalpha�aZbeta�br:rr<r>�rev�rr)r:rr<)rgrh)rKrQ)ZletterZnumberrrrrX*s 
rXz[\._-]cCs$|dk	r tdd�tj|�D��SdS)zR
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    Ncss&|]}|j�s|j�nt|�VqdS)N)�isdigitrKrQ)rRrHrrrrTRsz'_parse_local_version.<locals>.<genexpr>)rO�_local_version_seperatorsrD)rrrrrYLsrYcCs�ttttjdd�t|�����}|dkr@|dkr@|dk	r@t}n|dkrLt}|dkrZt}|dkrft}|dkrvt}ntdd�|D��}||||||fS)NcSs|dkS)Nrr)r\rrrr!`sz_cmpkey.<locals>.<lambda>css*|]"}t|t�r|dfnt|fVqdS)r]N)r*rQr)rRrSrrrrT�sz_cmpkey.<locals>.<genexpr>)rO�reversed�list�	itertools�	dropwhiler)r
rrrrrrrrrZWs&		
rZ)Z
__future__rrr�collectionsrmraZ_structuresr�__all__�
namedtuplerr�
ValueErrorr
�objectrr	rbrcrCrErIr/rrrXrjrYrZrrrr�<module>s.!
9k
_vendor/packaging/utils.py000064400000000645151733136460011654 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import re


_canonicalize_regex = re.compile(r"[-_.]+")


def canonicalize_name(name):
    # This is taken from PEP 503.
    return _canonicalize_regex.sub("-", name).lower()
_vendor/packaging/requirements.py000064400000010347151733136460013237 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import string
import re

from pip._vendor.pyparsing import (
    stringStart, stringEnd, originalTextFor, ParseException
)
from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
from pip._vendor.pyparsing import Literal as L  # noqa
from pip._vendor.six.moves.urllib import parse as urlparse

from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet


class InvalidRequirement(ValueError):
    """
    An invalid requirement was found, users should refer to PEP 508.
    """


ALPHANUM = Word(string.ascii_letters + string.digits)

LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()

PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))

NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER

URI = Regex(r'[^ ]+')("url")
URL = (AT + URI)

EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")

VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)

VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
                       joinString=",", adjacent=False)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')

VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])

MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
    lambda s, l, t: Marker(s[t._original_start:t._original_end])
)
MARKER_SEPERATOR = SEMICOLON
MARKER = MARKER_SEPERATOR + MARKER_EXPR

VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)

NAMED_REQUIREMENT = \
    NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)

REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd


class Requirement(object):
    """Parse a requirement.

    Parse a given requirement string into its parts, such as name, specifier,
    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
    string.
    """

    # TODO: Can we test whether something is contained within a requirement?
    #       If so how do we do that? Do we need to test against the _name_ of
    #       the thing as well as the version? What about the markers?
    # TODO: Can we normalize the name and extra name?

    def __init__(self, requirement_string):
        try:
            req = REQUIREMENT.parseString(requirement_string)
        except ParseException as e:
            raise InvalidRequirement(
                "Invalid requirement, parse error at \"{0!r}\"".format(
                    requirement_string[e.loc:e.loc + 8]))

        self.name = req.name
        if req.url:
            parsed_url = urlparse.urlparse(req.url)
            if not (parsed_url.scheme and parsed_url.netloc) or (
                    not parsed_url.scheme and not parsed_url.netloc):
                raise InvalidRequirement("Invalid URL given")
            self.url = req.url
        else:
            self.url = None
        self.extras = set(req.extras.asList() if req.extras else [])
        self.specifier = SpecifierSet(req.specifier)
        self.marker = req.marker if req.marker else None

    def __str__(self):
        parts = [self.name]

        if self.extras:
            parts.append("[{0}]".format(",".join(sorted(self.extras))))

        if self.specifier:
            parts.append(str(self.specifier))

        if self.url:
            parts.append("@ {0}".format(self.url))

        if self.marker:
            parts.append("; {0}".format(self.marker))

        return "".join(parts)

    def __repr__(self):
        return "<Requirement({0!r})>".format(str(self))
_vendor/packaging/__init__.py000064400000001001151733136460012236 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

from .__about__ import (
    __author__, __copyright__, __email__, __license__, __summary__, __title__,
    __uri__, __version__
)

__all__ = [
    "__title__", "__summary__", "__uri__", "__version__", "__author__",
    "__email__", "__license__", "__copyright__",
]
_vendor/packaging/_compat.py000064400000001534151733136460012134 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import sys


PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3

# flake8: noqa

if PY3:
    string_types = str,
else:
    string_types = basestring,


def with_metaclass(meta, *bases):
    """
    Create a base class with a metaclass.
    """
    # This requires a bit of explanation: the basic idea is to make a dummy
    # metaclass for one level of class instantiation that replaces itself with
    # the actual metaclass.
    class metaclass(meta):
        def __new__(cls, name, this_bases, d):
            return meta(name, bases, d)
    return type.__new__(metaclass, 'temporary_class', (), {})
_vendor/packaging/version.py000064400000026444151733136460012206 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import collections
import itertools
import re

from ._structures import Infinity


__all__ = [
    "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]


_Version = collections.namedtuple(
    "_Version",
    ["epoch", "release", "dev", "pre", "post", "local"],
)


def parse(version):
    """
    Parse the given version string and return either a :class:`Version` object
    or a :class:`LegacyVersion` object depending on if the given version is
    a valid PEP 440 version or a legacy version.
    """
    try:
        return Version(version)
    except InvalidVersion:
        return LegacyVersion(version)


class InvalidVersion(ValueError):
    """
    An invalid version was found, users should refer to PEP 440.
    """


class _BaseVersion(object):

    def __hash__(self):
        return hash(self._key)

    def __lt__(self, other):
        return self._compare(other, lambda s, o: s < o)

    def __le__(self, other):
        return self._compare(other, lambda s, o: s <= o)

    def __eq__(self, other):
        return self._compare(other, lambda s, o: s == o)

    def __ge__(self, other):
        return self._compare(other, lambda s, o: s >= o)

    def __gt__(self, other):
        return self._compare(other, lambda s, o: s > o)

    def __ne__(self, other):
        return self._compare(other, lambda s, o: s != o)

    def _compare(self, other, method):
        if not isinstance(other, _BaseVersion):
            return NotImplemented

        return method(self._key, other._key)


class LegacyVersion(_BaseVersion):

    def __init__(self, version):
        self._version = str(version)
        self._key = _legacy_cmpkey(self._version)

    def __str__(self):
        return self._version

    def __repr__(self):
        return "<LegacyVersion({0})>".format(repr(str(self)))

    @property
    def public(self):
        return self._version

    @property
    def base_version(self):
        return self._version

    @property
    def local(self):
        return None

    @property
    def is_prerelease(self):
        return False

    @property
    def is_postrelease(self):
        return False


_legacy_version_component_re = re.compile(
    r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)

_legacy_version_replacement_map = {
    "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}


def _parse_version_parts(s):
    for part in _legacy_version_component_re.split(s):
        part = _legacy_version_replacement_map.get(part, part)

        if not part or part == ".":
            continue

        if part[:1] in "0123456789":
            # pad for numeric comparison
            yield part.zfill(8)
        else:
            yield "*" + part

    # ensure that alpha/beta/candidate are before final
    yield "*final"


def _legacy_cmpkey(version):
    # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
    # greater than or equal to 0. This will effectively put the LegacyVersion,
    # which uses the defacto standard originally implemented by setuptools,
    # as before all PEP 440 versions.
    epoch = -1

    # This scheme is taken from pkg_resources.parse_version setuptools prior to
    # it's adoption of the packaging library.
    parts = []
    for part in _parse_version_parts(version.lower()):
        if part.startswith("*"):
            # remove "-" before a prerelease tag
            if part < "*final":
                while parts and parts[-1] == "*final-":
                    parts.pop()

            # remove trailing zeros from each series of numeric parts
            while parts and parts[-1] == "00000000":
                parts.pop()

        parts.append(part)
    parts = tuple(parts)

    return epoch, parts

# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
    v?
    (?:
        (?:(?P<epoch>[0-9]+)!)?                           # epoch
        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
        (?P<pre>                                          # pre-release
            [-_\.]?
            (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P<pre_n>[0-9]+)?
        )?
        (?P<post>                                         # post release
            (?:-(?P<post_n1>[0-9]+))
            |
            (?:
                [-_\.]?
                (?P<post_l>post|rev|r)
                [-_\.]?
                (?P<post_n2>[0-9]+)?
            )
        )?
        (?P<dev>                                          # dev release
            [-_\.]?
            (?P<dev_l>dev)
            [-_\.]?
            (?P<dev_n>[0-9]+)?
        )?
    )
    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
"""


class Version(_BaseVersion):

    _regex = re.compile(
        r"^\s*" + VERSION_PATTERN + r"\s*$",
        re.VERBOSE | re.IGNORECASE,
    )

    def __init__(self, version):
        # Validate the version and parse it into pieces
        match = self._regex.search(version)
        if not match:
            raise InvalidVersion("Invalid version: '{0}'".format(version))

        # Store the parsed out pieces of the version
        self._version = _Version(
            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
            release=tuple(int(i) for i in match.group("release").split(".")),
            pre=_parse_letter_version(
                match.group("pre_l"),
                match.group("pre_n"),
            ),
            post=_parse_letter_version(
                match.group("post_l"),
                match.group("post_n1") or match.group("post_n2"),
            ),
            dev=_parse_letter_version(
                match.group("dev_l"),
                match.group("dev_n"),
            ),
            local=_parse_local_version(match.group("local")),
        )

        # Generate a key which will be used for sorting
        self._key = _cmpkey(
            self._version.epoch,
            self._version.release,
            self._version.pre,
            self._version.post,
            self._version.dev,
            self._version.local,
        )

    def __repr__(self):
        return "<Version({0})>".format(repr(str(self)))

    def __str__(self):
        parts = []

        # Epoch
        if self._version.epoch != 0:
            parts.append("{0}!".format(self._version.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self._version.release))

        # Pre-release
        if self._version.pre is not None:
            parts.append("".join(str(x) for x in self._version.pre))

        # Post-release
        if self._version.post is not None:
            parts.append(".post{0}".format(self._version.post[1]))

        # Development release
        if self._version.dev is not None:
            parts.append(".dev{0}".format(self._version.dev[1]))

        # Local version segment
        if self._version.local is not None:
            parts.append(
                "+{0}".format(".".join(str(x) for x in self._version.local))
            )

        return "".join(parts)

    @property
    def public(self):
        return str(self).split("+", 1)[0]

    @property
    def base_version(self):
        parts = []

        # Epoch
        if self._version.epoch != 0:
            parts.append("{0}!".format(self._version.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self._version.release))

        return "".join(parts)

    @property
    def local(self):
        version_string = str(self)
        if "+" in version_string:
            return version_string.split("+", 1)[1]

    @property
    def is_prerelease(self):
        return bool(self._version.dev or self._version.pre)

    @property
    def is_postrelease(self):
        return bool(self._version.post)


def _parse_letter_version(letter, number):
    if letter:
        # We consider there to be an implicit 0 in a pre-release if there is
        # not a numeral associated with it.
        if number is None:
            number = 0

        # We normalize any letters to their lower case form
        letter = letter.lower()

        # We consider some words to be alternate spellings of other words and
        # in those cases we want to normalize the spellings to our preferred
        # spelling.
        if letter == "alpha":
            letter = "a"
        elif letter == "beta":
            letter = "b"
        elif letter in ["c", "pre", "preview"]:
            letter = "rc"
        elif letter in ["rev", "r"]:
            letter = "post"

        return letter, int(number)
    if not letter and number:
        # We assume if we are given a number, but we are not given a letter
        # then this is using the implicit post release syntax (e.g. 1.0-1)
        letter = "post"

        return letter, int(number)


_local_version_seperators = re.compile(r"[\._-]")


def _parse_local_version(local):
    """
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    """
    if local is not None:
        return tuple(
            part.lower() if not part.isdigit() else int(part)
            for part in _local_version_seperators.split(local)
        )


def _cmpkey(epoch, release, pre, post, dev, local):
    # When we compare a release version, we want to compare it with all of the
    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    # leading zeros until we come to something non zero, then take the rest
    # re-reverse it back into the correct order and make it a tuple and use
    # that for our sorting key.
    release = tuple(
        reversed(list(
            itertools.dropwhile(
                lambda x: x == 0,
                reversed(release),
            )
        ))
    )

    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    # We'll do this by abusing the pre segment, but we _only_ want to do this
    # if there is not a pre or a post segment. If we have one of those then
    # the normal sorting rules will handle this case correctly.
    if pre is None and post is None and dev is not None:
        pre = -Infinity
    # Versions without a pre-release (except as noted above) should sort after
    # those with one.
    elif pre is None:
        pre = Infinity

    # Versions without a post segment should sort before those with one.
    if post is None:
        post = -Infinity

    # Versions without a development segment should sort after those with one.
    if dev is None:
        dev = Infinity

    if local is None:
        # Versions without a local segment should sort before those with one.
        local = -Infinity
    else:
        # Versions with a local segment need that segment parsed to implement
        # the sorting rules in PEP440.
        # - Alpha numeric segments sort before numeric segments
        # - Alpha numeric segments sort lexicographically
        # - Numeric segments sort numerically
        # - Shorter versions sort before longer versions when the prefixes
        #   match exactly
        local = tuple(
            (i, "") if isinstance(i, int) else (-Infinity, i)
            for i in local
        )

    return epoch, release, pre, post, dev, local
_vendor/packaging/markers.py000064400000020046151733136460012155 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import operator
import os
import platform
import sys

from pip._vendor.pyparsing import (
    ParseException, ParseResults, stringStart, stringEnd,
)
from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString
from pip._vendor.pyparsing import Literal as L  # noqa

from ._compat import string_types
from .specifiers import Specifier, InvalidSpecifier


__all__ = [
    "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
    "Marker", "default_environment",
]


class InvalidMarker(ValueError):
    """
    An invalid marker was found, users should refer to PEP 508.
    """


class UndefinedComparison(ValueError):
    """
    An invalid operation was attempted on a value that doesn't support it.
    """


class UndefinedEnvironmentName(ValueError):
    """
    A name was attempted to be used that does not exist inside of the
    environment.
    """


class Node(object):

    def __init__(self, value):
        self.value = value

    def __str__(self):
        return str(self.value)

    def __repr__(self):
        return "<{0}({1!r})>".format(self.__class__.__name__, str(self))

    def serialize(self):
        raise NotImplementedError


class Variable(Node):

    def serialize(self):
        return str(self)


class Value(Node):

    def serialize(self):
        return '"{0}"'.format(self)


class Op(Node):

    def serialize(self):
        return str(self)


VARIABLE = (
    L("implementation_version") |
    L("platform_python_implementation") |
    L("implementation_name") |
    L("python_full_version") |
    L("platform_release") |
    L("platform_version") |
    L("platform_machine") |
    L("platform_system") |
    L("python_version") |
    L("sys_platform") |
    L("os_name") |
    L("os.name") |  # PEP-345
    L("sys.platform") |  # PEP-345
    L("platform.version") |  # PEP-345
    L("platform.machine") |  # PEP-345
    L("platform.python_implementation") |  # PEP-345
    L("python_implementation") |  # undocumented setuptools legacy
    L("extra")
)
ALIASES = {
    'os.name': 'os_name',
    'sys.platform': 'sys_platform',
    'platform.version': 'platform_version',
    'platform.machine': 'platform_machine',
    'platform.python_implementation': 'platform_python_implementation',
    'python_implementation': 'platform_python_implementation'
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))

VERSION_CMP = (
    L("===") |
    L("==") |
    L(">=") |
    L("<=") |
    L("!=") |
    L("~=") |
    L(">") |
    L("<")
)

MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))

MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))

BOOLOP = L("and") | L("or")

MARKER_VAR = VARIABLE | MARKER_VALUE

MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))

LPAREN = L("(").suppress()
RPAREN = L(")").suppress()

MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)

MARKER = stringStart + MARKER_EXPR + stringEnd


def _coerce_parse_result(results):
    if isinstance(results, ParseResults):
        return [_coerce_parse_result(i) for i in results]
    else:
        return results


def _format_marker(marker, first=True):
    assert isinstance(marker, (list, tuple, string_types))

    # Sometimes we have a structure like [[...]] which is a single item list
    # where the single item is itself it's own list. In that case we want skip
    # the rest of this function so that we don't get extraneous () on the
    # outside.
    if (isinstance(marker, list) and len(marker) == 1 and
            isinstance(marker[0], (list, tuple))):
        return _format_marker(marker[0])

    if isinstance(marker, list):
        inner = (_format_marker(m, first=False) for m in marker)
        if first:
            return " ".join(inner)
        else:
            return "(" + " ".join(inner) + ")"
    elif isinstance(marker, tuple):
        return " ".join([m.serialize() for m in marker])
    else:
        return marker


_operators = {
    "in": lambda lhs, rhs: lhs in rhs,
    "not in": lambda lhs, rhs: lhs not in rhs,
    "<": operator.lt,
    "<=": operator.le,
    "==": operator.eq,
    "!=": operator.ne,
    ">=": operator.ge,
    ">": operator.gt,
}


def _eval_op(lhs, op, rhs):
    try:
        spec = Specifier("".join([op.serialize(), rhs]))
    except InvalidSpecifier:
        pass
    else:
        return spec.contains(lhs)

    oper = _operators.get(op.serialize())
    if oper is None:
        raise UndefinedComparison(
            "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
        )

    return oper(lhs, rhs)


_undefined = object()


def _get_env(environment, name):
    value = environment.get(name, _undefined)

    if value is _undefined:
        raise UndefinedEnvironmentName(
            "{0!r} does not exist in evaluation environment.".format(name)
        )

    return value


def _evaluate_markers(markers, environment):
    groups = [[]]

    for marker in markers:
        assert isinstance(marker, (list, tuple, string_types))

        if isinstance(marker, list):
            groups[-1].append(_evaluate_markers(marker, environment))
        elif isinstance(marker, tuple):
            lhs, op, rhs = marker

            if isinstance(lhs, Variable):
                lhs_value = _get_env(environment, lhs.value)
                rhs_value = rhs.value
            else:
                lhs_value = lhs.value
                rhs_value = _get_env(environment, rhs.value)

            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
        else:
            assert marker in ["and", "or"]
            if marker == "or":
                groups.append([])

    return any(all(item) for item in groups)


def format_full_version(info):
    version = '{0.major}.{0.minor}.{0.micro}'.format(info)
    kind = info.releaselevel
    if kind != 'final':
        version += kind[0] + str(info.serial)
    return version


def default_environment():
    if hasattr(sys, 'implementation'):
        iver = format_full_version(sys.implementation.version)
        implementation_name = sys.implementation.name
    else:
        iver = '0'
        implementation_name = ''

    return {
        "implementation_name": implementation_name,
        "implementation_version": iver,
        "os_name": os.name,
        "platform_machine": platform.machine(),
        "platform_release": platform.release(),
        "platform_system": platform.system(),
        "platform_version": platform.version(),
        "python_full_version": platform.python_version(),
        "platform_python_implementation": platform.python_implementation(),
        "python_version": platform.python_version()[:3],
        "sys_platform": sys.platform,
    }


class Marker(object):

    def __init__(self, marker):
        try:
            self._markers = _coerce_parse_result(MARKER.parseString(marker))
        except ParseException as e:
            err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
                marker, marker[e.loc:e.loc + 8])
            raise InvalidMarker(err_str)

    def __str__(self):
        return _format_marker(self._markers)

    def __repr__(self):
        return "<Marker({0!r})>".format(str(self))

    def evaluate(self, environment=None):
        """Evaluate a marker.

        Return the boolean from evaluating the given marker against the
        environment. environment is an optional argument to override all or
        part of the determined environment.

        The environment is determined from the current Python process.
        """
        current_environment = default_environment()
        if environment is not None:
            current_environment.update(environment)

        return _evaluate_markers(self._markers, current_environment)
_vendor/packaging/__about__.py000064400000001320151733136460012411 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

__all__ = [
    "__title__", "__summary__", "__uri__", "__version__", "__author__",
    "__email__", "__license__", "__copyright__",
]

__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"

__version__ = "16.8"

__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"

__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2014-2016 %s" % __author__
_vendor/packaging/_structures.py000064400000002610151733136460013070 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function


class Infinity(object):

    def __repr__(self):
        return "Infinity"

    def __hash__(self):
        return hash(repr(self))

    def __lt__(self, other):
        return False

    def __le__(self, other):
        return False

    def __eq__(self, other):
        return isinstance(other, self.__class__)

    def __ne__(self, other):
        return not isinstance(other, self.__class__)

    def __gt__(self, other):
        return True

    def __ge__(self, other):
        return True

    def __neg__(self):
        return NegativeInfinity

Infinity = Infinity()


class NegativeInfinity(object):

    def __repr__(self):
        return "-Infinity"

    def __hash__(self):
        return hash(repr(self))

    def __lt__(self, other):
        return True

    def __le__(self, other):
        return True

    def __eq__(self, other):
        return isinstance(other, self.__class__)

    def __ne__(self, other):
        return not isinstance(other, self.__class__)

    def __gt__(self, other):
        return False

    def __ge__(self, other):
        return False

    def __neg__(self):
        return Infinity

NegativeInfinity = NegativeInfinity()
_vendor/packaging/specifiers.py000064400000066571151733136470012663 0ustar00# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import abc
import functools
import itertools
import re

from ._compat import string_types, with_metaclass
from .version import Version, LegacyVersion, parse


class InvalidSpecifier(ValueError):
    """
    An invalid specifier was found, users should refer to PEP 440.
    """


class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):

    @abc.abstractmethod
    def __str__(self):
        """
        Returns the str representation of this Specifier like object. This
        should be representative of the Specifier itself.
        """

    @abc.abstractmethod
    def __hash__(self):
        """
        Returns a hash value for this Specifier like object.
        """

    @abc.abstractmethod
    def __eq__(self, other):
        """
        Returns a boolean representing whether or not the two Specifier like
        objects are equal.
        """

    @abc.abstractmethod
    def __ne__(self, other):
        """
        Returns a boolean representing whether or not the two Specifier like
        objects are not equal.
        """

    @abc.abstractproperty
    def prereleases(self):
        """
        Returns whether or not pre-releases as a whole are allowed by this
        specifier.
        """

    @prereleases.setter
    def prereleases(self, value):
        """
        Sets whether or not pre-releases as a whole are allowed by this
        specifier.
        """

    @abc.abstractmethod
    def contains(self, item, prereleases=None):
        """
        Determines if the given item is contained within this specifier.
        """

    @abc.abstractmethod
    def filter(self, iterable, prereleases=None):
        """
        Takes an iterable of items and filters them so that only items which
        are contained within this specifier are allowed in it.
        """


class _IndividualSpecifier(BaseSpecifier):

    _operators = {}

    def __init__(self, spec="", prereleases=None):
        match = self._regex.search(spec)
        if not match:
            raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))

        self._spec = (
            match.group("operator").strip(),
            match.group("version").strip(),
        )

        # Store whether or not this Specifier should accept prereleases
        self._prereleases = prereleases

    def __repr__(self):
        pre = (
            ", prereleases={0!r}".format(self.prereleases)
            if self._prereleases is not None
            else ""
        )

        return "<{0}({1!r}{2})>".format(
            self.__class__.__name__,
            str(self),
            pre,
        )

    def __str__(self):
        return "{0}{1}".format(*self._spec)

    def __hash__(self):
        return hash(self._spec)

    def __eq__(self, other):
        if isinstance(other, string_types):
            try:
                other = self.__class__(other)
            except InvalidSpecifier:
                return NotImplemented
        elif not isinstance(other, self.__class__):
            return NotImplemented

        return self._spec == other._spec

    def __ne__(self, other):
        if isinstance(other, string_types):
            try:
                other = self.__class__(other)
            except InvalidSpecifier:
                return NotImplemented
        elif not isinstance(other, self.__class__):
            return NotImplemented

        return self._spec != other._spec

    def _get_operator(self, op):
        return getattr(self, "_compare_{0}".format(self._operators[op]))

    def _coerce_version(self, version):
        if not isinstance(version, (LegacyVersion, Version)):
            version = parse(version)
        return version

    @property
    def operator(self):
        return self._spec[0]

    @property
    def version(self):
        return self._spec[1]

    @property
    def prereleases(self):
        return self._prereleases

    @prereleases.setter
    def prereleases(self, value):
        self._prereleases = value

    def __contains__(self, item):
        return self.contains(item)

    def contains(self, item, prereleases=None):
        # Determine if prereleases are to be allowed or not.
        if prereleases is None:
            prereleases = self.prereleases

        # Normalize item to a Version or LegacyVersion, this allows us to have
        # a shortcut for ``"2.0" in Specifier(">=2")
        item = self._coerce_version(item)

        # Determine if we should be supporting prereleases in this specifier
        # or not, if we do not support prereleases than we can short circuit
        # logic if this version is a prereleases.
        if item.is_prerelease and not prereleases:
            return False

        # Actually do the comparison to determine if this item is contained
        # within this Specifier or not.
        return self._get_operator(self.operator)(item, self.version)

    def filter(self, iterable, prereleases=None):
        yielded = False
        found_prereleases = []

        kw = {"prereleases": prereleases if prereleases is not None else True}

        # Attempt to iterate over all the values in the iterable and if any of
        # them match, yield them.
        for version in iterable:
            parsed_version = self._coerce_version(version)

            if self.contains(parsed_version, **kw):
                # If our version is a prerelease, and we were not set to allow
                # prereleases, then we'll store it for later incase nothing
                # else matches this specifier.
                if (parsed_version.is_prerelease and not
                        (prereleases or self.prereleases)):
                    found_prereleases.append(version)
                # Either this is not a prerelease, or we should have been
                # accepting prereleases from the begining.
                else:
                    yielded = True
                    yield version

        # Now that we've iterated over everything, determine if we've yielded
        # any values, and if we have not and we have any prereleases stored up
        # then we will go ahead and yield the prereleases.
        if not yielded and found_prereleases:
            for version in found_prereleases:
                yield version


class LegacySpecifier(_IndividualSpecifier):

    _regex_str = (
        r"""
        (?P<operator>(==|!=|<=|>=|<|>))
        \s*
        (?P<version>
            [^,;\s)]* # Since this is a "legacy" specifier, and the version
                      # string can be just about anything, we match everything
                      # except for whitespace, a semi-colon for marker support,
                      # a closing paren since versions can be enclosed in
                      # them, and a comma since it's a version separator.
        )
        """
    )

    _regex = re.compile(
        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)

    _operators = {
        "==": "equal",
        "!=": "not_equal",
        "<=": "less_than_equal",
        ">=": "greater_than_equal",
        "<": "less_than",
        ">": "greater_than",
    }

    def _coerce_version(self, version):
        if not isinstance(version, LegacyVersion):
            version = LegacyVersion(str(version))
        return version

    def _compare_equal(self, prospective, spec):
        return prospective == self._coerce_version(spec)

    def _compare_not_equal(self, prospective, spec):
        return prospective != self._coerce_version(spec)

    def _compare_less_than_equal(self, prospective, spec):
        return prospective <= self._coerce_version(spec)

    def _compare_greater_than_equal(self, prospective, spec):
        return prospective >= self._coerce_version(spec)

    def _compare_less_than(self, prospective, spec):
        return prospective < self._coerce_version(spec)

    def _compare_greater_than(self, prospective, spec):
        return prospective > self._coerce_version(spec)


def _require_version_compare(fn):
    @functools.wraps(fn)
    def wrapped(self, prospective, spec):
        if not isinstance(prospective, Version):
            return False
        return fn(self, prospective, spec)
    return wrapped


class Specifier(_IndividualSpecifier):

    _regex_str = (
        r"""
        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
        (?P<version>
            (?:
                # The identity operators allow for an escape hatch that will
                # do an exact string match of the version you wish to install.
                # This will not be parsed by PEP 440 and we cannot determine
                # any semantic meaning from it. This operator is discouraged
                # but included entirely as an escape hatch.
                (?<====)  # Only match for the identity operator
                \s*
                [^\s]*    # We just match everything, except for whitespace
                          # since we are only testing for strict identity.
            )
            |
            (?:
                # The (non)equality operators allow for wild card and local
                # versions to be specified so we have to define these two
                # operators separately to enable that.
                (?<===|!=)            # Only match for equals and not equals

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?

                # You cannot use a wild card and a dev or local version
                # together so group them with a | and make them optional.
                (?:
                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
                    |
                    \.\*  # Wild card syntax of .*
                )?
            )
            |
            (?:
                # The compatible operator requires at least two digits in the
                # release segment.
                (?<=~=)               # Only match for the compatible operator

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
            |
            (?:
                # All other operators only allow a sub set of what the
                # (non)equality operators do. Specifically they do not allow
                # local versions to be specified nor do they allow the prefix
                # matching wild cards.
                (?<!==|!=|~=)         # We have special cases for these
                                      # operators so we want to make sure they
                                      # don't match here.

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
        )
        """
    )

    _regex = re.compile(
        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)

    _operators = {
        "~=": "compatible",
        "==": "equal",
        "!=": "not_equal",
        "<=": "less_than_equal",
        ">=": "greater_than_equal",
        "<": "less_than",
        ">": "greater_than",
        "===": "arbitrary",
    }

    @_require_version_compare
    def _compare_compatible(self, prospective, spec):
        # Compatible releases have an equivalent combination of >= and ==. That
        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
        # implement this in terms of the other specifiers instead of
        # implementing it ourselves. The only thing we need to do is construct
        # the other specifiers.

        # We want everything but the last item in the version, but we want to
        # ignore post and dev releases and we want to treat the pre-release as
        # it's own separate segment.
        prefix = ".".join(
            list(
                itertools.takewhile(
                    lambda x: (not x.startswith("post") and not
                               x.startswith("dev")),
                    _version_split(spec),
                )
            )[:-1]
        )

        # Add the prefix notation to the end of our string
        prefix += ".*"

        return (self._get_operator(">=")(prospective, spec) and
                self._get_operator("==")(prospective, prefix))

    @_require_version_compare
    def _compare_equal(self, prospective, spec):
        # We need special logic to handle prefix matching
        if spec.endswith(".*"):
            # In the case of prefix matching we want to ignore local segment.
            prospective = Version(prospective.public)
            # Split the spec out by dots, and pretend that there is an implicit
            # dot in between a release segment and a pre-release segment.
            spec = _version_split(spec[:-2])  # Remove the trailing .*

            # Split the prospective version out by dots, and pretend that there
            # is an implicit dot in between a release segment and a pre-release
            # segment.
            prospective = _version_split(str(prospective))

            # Shorten the prospective version to be the same length as the spec
            # so that we can determine if the specifier is a prefix of the
            # prospective version or not.
            prospective = prospective[:len(spec)]

            # Pad out our two sides with zeros so that they both equal the same
            # length.
            spec, prospective = _pad_version(spec, prospective)
        else:
            # Convert our spec string into a Version
            spec = Version(spec)

            # If the specifier does not have a local segment, then we want to
            # act as if the prospective version also does not have a local
            # segment.
            if not spec.local:
                prospective = Version(prospective.public)

        return prospective == spec

    @_require_version_compare
    def _compare_not_equal(self, prospective, spec):
        return not self._compare_equal(prospective, spec)

    @_require_version_compare
    def _compare_less_than_equal(self, prospective, spec):
        return prospective <= Version(spec)

    @_require_version_compare
    def _compare_greater_than_equal(self, prospective, spec):
        return prospective >= Version(spec)

    @_require_version_compare
    def _compare_less_than(self, prospective, spec):
        # Convert our spec to a Version instance, since we'll want to work with
        # it as a version.
        spec = Version(spec)

        # Check to see if the prospective version is less than the spec
        # version. If it's not we can short circuit and just return False now
        # instead of doing extra unneeded work.
        if not prospective < spec:
            return False

        # This special case is here so that, unless the specifier itself
        # includes is a pre-release version, that we do not accept pre-release
        # versions for the version mentioned in the specifier (e.g. <3.1 should
        # not match 3.1.dev0, but should match 3.0.dev0).
        if not spec.is_prerelease and prospective.is_prerelease:
            if Version(prospective.base_version) == Version(spec.base_version):
                return False

        # If we've gotten to here, it means that prospective version is both
        # less than the spec version *and* it's not a pre-release of the same
        # version in the spec.
        return True

    @_require_version_compare
    def _compare_greater_than(self, prospective, spec):
        # Convert our spec to a Version instance, since we'll want to work with
        # it as a version.
        spec = Version(spec)

        # Check to see if the prospective version is greater than the spec
        # version. If it's not we can short circuit and just return False now
        # instead of doing extra unneeded work.
        if not prospective > spec:
            return False

        # This special case is here so that, unless the specifier itself
        # includes is a post-release version, that we do not accept
        # post-release versions for the version mentioned in the specifier
        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
        if not spec.is_postrelease and prospective.is_postrelease:
            if Version(prospective.base_version) == Version(spec.base_version):
                return False

        # Ensure that we do not allow a local version of the version mentioned
        # in the specifier, which is techincally greater than, to match.
        if prospective.local is not None:
            if Version(prospective.base_version) == Version(spec.base_version):
                return False

        # If we've gotten to here, it means that prospective version is both
        # greater than the spec version *and* it's not a pre-release of the
        # same version in the spec.
        return True

    def _compare_arbitrary(self, prospective, spec):
        return str(prospective).lower() == str(spec).lower()

    @property
    def prereleases(self):
        # If there is an explicit prereleases set for this, then we'll just
        # blindly use that.
        if self._prereleases is not None:
            return self._prereleases

        # Look at all of our specifiers and determine if they are inclusive
        # operators, and if they are if they are including an explicit
        # prerelease.
        operator, version = self._spec
        if operator in ["==", ">=", "<=", "~=", "==="]:
            # The == specifier can include a trailing .*, if it does we
            # want to remove before parsing.
            if operator == "==" and version.endswith(".*"):
                version = version[:-2]

            # Parse the version, and if it is a pre-release than this
            # specifier allows pre-releases.
            if parse(version).is_prerelease:
                return True

        return False

    @prereleases.setter
    def prereleases(self, value):
        self._prereleases = value


_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")


def _version_split(version):
    result = []
    for item in version.split("."):
        match = _prefix_regex.search(item)
        if match:
            result.extend(match.groups())
        else:
            result.append(item)
    return result


def _pad_version(left, right):
    left_split, right_split = [], []

    # Get the release segment of our versions
    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))

    # Get the rest of our versions
    left_split.append(left[len(left_split[0]):])
    right_split.append(right[len(right_split[0]):])

    # Insert our padding
    left_split.insert(
        1,
        ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
    )
    right_split.insert(
        1,
        ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
    )

    return (
        list(itertools.chain(*left_split)),
        list(itertools.chain(*right_split)),
    )


class SpecifierSet(BaseSpecifier):

    def __init__(self, specifiers="", prereleases=None):
        # Split on , to break each indidivual specifier into it's own item, and
        # strip each item to remove leading/trailing whitespace.
        specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]

        # Parsed each individual specifier, attempting first to make it a
        # Specifier and falling back to a LegacySpecifier.
        parsed = set()
        for specifier in specifiers:
            try:
                parsed.add(Specifier(specifier))
            except InvalidSpecifier:
                parsed.add(LegacySpecifier(specifier))

        # Turn our parsed specifiers into a frozen set and save them for later.
        self._specs = frozenset(parsed)

        # Store our prereleases value so we can use it later to determine if
        # we accept prereleases or not.
        self._prereleases = prereleases

    def __repr__(self):
        pre = (
            ", prereleases={0!r}".format(self.prereleases)
            if self._prereleases is not None
            else ""
        )

        return "<SpecifierSet({0!r}{1})>".format(str(self), pre)

    def __str__(self):
        return ",".join(sorted(str(s) for s in self._specs))

    def __hash__(self):
        return hash(self._specs)

    def __and__(self, other):
        if isinstance(other, string_types):
            other = SpecifierSet(other)
        elif not isinstance(other, SpecifierSet):
            return NotImplemented

        specifier = SpecifierSet()
        specifier._specs = frozenset(self._specs | other._specs)

        if self._prereleases is None and other._prereleases is not None:
            specifier._prereleases = other._prereleases
        elif self._prereleases is not None and other._prereleases is None:
            specifier._prereleases = self._prereleases
        elif self._prereleases == other._prereleases:
            specifier._prereleases = self._prereleases
        else:
            raise ValueError(
                "Cannot combine SpecifierSets with True and False prerelease "
                "overrides."
            )

        return specifier

    def __eq__(self, other):
        if isinstance(other, string_types):
            other = SpecifierSet(other)
        elif isinstance(other, _IndividualSpecifier):
            other = SpecifierSet(str(other))
        elif not isinstance(other, SpecifierSet):
            return NotImplemented

        return self._specs == other._specs

    def __ne__(self, other):
        if isinstance(other, string_types):
            other = SpecifierSet(other)
        elif isinstance(other, _IndividualSpecifier):
            other = SpecifierSet(str(other))
        elif not isinstance(other, SpecifierSet):
            return NotImplemented

        return self._specs != other._specs

    def __len__(self):
        return len(self._specs)

    def __iter__(self):
        return iter(self._specs)

    @property
    def prereleases(self):
        # If we have been given an explicit prerelease modifier, then we'll
        # pass that through here.
        if self._prereleases is not None:
            return self._prereleases

        # If we don't have any specifiers, and we don't have a forced value,
        # then we'll just return None since we don't know if this should have
        # pre-releases or not.
        if not self._specs:
            return None

        # Otherwise we'll see if any of the given specifiers accept
        # prereleases, if any of them do we'll return True, otherwise False.
        return any(s.prereleases for s in self._specs)

    @prereleases.setter
    def prereleases(self, value):
        self._prereleases = value

    def __contains__(self, item):
        return self.contains(item)

    def contains(self, item, prereleases=None):
        # Ensure that our item is a Version or LegacyVersion instance.
        if not isinstance(item, (LegacyVersion, Version)):
            item = parse(item)

        # Determine if we're forcing a prerelease or not, if we're not forcing
        # one for this particular filter call, then we'll use whatever the
        # SpecifierSet thinks for whether or not we should support prereleases.
        if prereleases is None:
            prereleases = self.prereleases

        # We can determine if we're going to allow pre-releases by looking to
        # see if any of the underlying items supports them. If none of them do
        # and this item is a pre-release then we do not allow it and we can
        # short circuit that here.
        # Note: This means that 1.0.dev1 would not be contained in something
        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
        if not prereleases and item.is_prerelease:
            return False

        # We simply dispatch to the underlying specs here to make sure that the
        # given version is contained within all of them.
        # Note: This use of all() here means that an empty set of specifiers
        #       will always return True, this is an explicit design decision.
        return all(
            s.contains(item, prereleases=prereleases)
            for s in self._specs
        )

    def filter(self, iterable, prereleases=None):
        # Determine if we're forcing a prerelease or not, if we're not forcing
        # one for this particular filter call, then we'll use whatever the
        # SpecifierSet thinks for whether or not we should support prereleases.
        if prereleases is None:
            prereleases = self.prereleases

        # If we have any specifiers, then we want to wrap our iterable in the
        # filter method for each one, this will act as a logical AND amongst
        # each specifier.
        if self._specs:
            for spec in self._specs:
                iterable = spec.filter(iterable, prereleases=bool(prereleases))
            return iterable
        # If we do not have any specifiers, then we need to have a rough filter
        # which will filter out any pre-releases, unless there are no final
        # releases, and which will filter out LegacyVersion in general.
        else:
            filtered = []
            found_prereleases = []

            for item in iterable:
                # Ensure that we some kind of Version class for this item.
                if not isinstance(item, (LegacyVersion, Version)):
                    parsed_version = parse(item)
                else:
                    parsed_version = item

                # Filter out any item which is parsed as a LegacyVersion
                if isinstance(parsed_version, LegacyVersion):
                    continue

                # Store any item which is a pre-release for later unless we've
                # already found a final version or we are accepting prereleases
                if parsed_version.is_prerelease and not prereleases:
                    if not filtered:
                        found_prereleases.append(item)
                else:
                    filtered.append(item)

            # If we've found no items except for pre-releases, then we'll go
            # ahead and use the pre-releases
            if not filtered and found_prereleases and prereleases is None:
                return found_prereleases

            return filtered
_vendor/six.py000064400000072622151733136470007400 0ustar00"""Utilities for writing code that runs on Python 2 and 3"""

# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

from __future__ import absolute_import

import functools
import itertools
import operator
import sys
import types

__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"


# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)

if PY3:
    string_types = str,
    integer_types = int,
    class_types = type,
    text_type = str
    binary_type = bytes

    MAXSIZE = sys.maxsize
else:
    string_types = basestring,
    integer_types = (int, long)
    class_types = (type, types.ClassType)
    text_type = unicode
    binary_type = str

    if sys.platform.startswith("java"):
        # Jython always uses 32 bits.
        MAXSIZE = int((1 << 31) - 1)
    else:
        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
        class X(object):

            def __len__(self):
                return 1 << 31
        try:
            len(X())
        except OverflowError:
            # 32-bit
            MAXSIZE = int((1 << 31) - 1)
        else:
            # 64-bit
            MAXSIZE = int((1 << 63) - 1)
        del X


def _add_doc(func, doc):
    """Add documentation to a function."""
    func.__doc__ = doc


def _import_module(name):
    """Import module, returning the module after the last dot."""
    __import__(name)
    return sys.modules[name]


class _LazyDescr(object):

    def __init__(self, name):
        self.name = name

    def __get__(self, obj, tp):
        result = self._resolve()
        setattr(obj, self.name, result)  # Invokes __set__.
        try:
            # This is a bit ugly, but it avoids running this again by
            # removing this descriptor.
            delattr(obj.__class__, self.name)
        except AttributeError:
            pass
        return result


class MovedModule(_LazyDescr):

    def __init__(self, name, old, new=None):
        super(MovedModule, self).__init__(name)
        if PY3:
            if new is None:
                new = name
            self.mod = new
        else:
            self.mod = old

    def _resolve(self):
        return _import_module(self.mod)

    def __getattr__(self, attr):
        _module = self._resolve()
        value = getattr(_module, attr)
        setattr(self, attr, value)
        return value


class _LazyModule(types.ModuleType):

    def __init__(self, name):
        super(_LazyModule, self).__init__(name)
        self.__doc__ = self.__class__.__doc__

    def __dir__(self):
        attrs = ["__doc__", "__name__"]
        attrs += [attr.name for attr in self._moved_attributes]
        return attrs

    # Subclasses should override this
    _moved_attributes = []


class MovedAttribute(_LazyDescr):

    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
        super(MovedAttribute, self).__init__(name)
        if PY3:
            if new_mod is None:
                new_mod = name
            self.mod = new_mod
            if new_attr is None:
                if old_attr is None:
                    new_attr = name
                else:
                    new_attr = old_attr
            self.attr = new_attr
        else:
            self.mod = old_mod
            if old_attr is None:
                old_attr = name
            self.attr = old_attr

    def _resolve(self):
        module = _import_module(self.mod)
        return getattr(module, self.attr)


class _SixMetaPathImporter(object):

    """
    A meta path importer to import six.moves and its submodules.

    This class implements a PEP302 finder and loader. It should be compatible
    with Python 2.5 and all existing versions of Python3
    """

    def __init__(self, six_module_name):
        self.name = six_module_name
        self.known_modules = {}

    def _add_module(self, mod, *fullnames):
        for fullname in fullnames:
            self.known_modules[self.name + "." + fullname] = mod

    def _get_module(self, fullname):
        return self.known_modules[self.name + "." + fullname]

    def find_module(self, fullname, path=None):
        if fullname in self.known_modules:
            return self
        return None

    def __get_module(self, fullname):
        try:
            return self.known_modules[fullname]
        except KeyError:
            raise ImportError("This loader does not know module " + fullname)

    def load_module(self, fullname):
        try:
            # in case of a reload
            return sys.modules[fullname]
        except KeyError:
            pass
        mod = self.__get_module(fullname)
        if isinstance(mod, MovedModule):
            mod = mod._resolve()
        else:
            mod.__loader__ = self
        sys.modules[fullname] = mod
        return mod

    def is_package(self, fullname):
        """
        Return true, if the named module is a package.

        We need this method to get correct spec objects with
        Python 3.4 (see PEP451)
        """
        return hasattr(self.__get_module(fullname), "__path__")

    def get_code(self, fullname):
        """Return None

        Required, if is_package is implemented"""
        self.__get_module(fullname)  # eventually raises ImportError
        return None
    get_source = get_code  # same as get_code

_importer = _SixMetaPathImporter(__name__)


class _MovedItems(_LazyModule):

    """Lazy loading of moved objects"""
    __path__ = []  # mark as package


_moved_attributes = [
    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
    MovedAttribute("intern", "__builtin__", "sys"),
    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
    MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
    MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
    MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
    MovedAttribute("reduce", "__builtin__", "functools"),
    MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
    MovedAttribute("StringIO", "StringIO", "io"),
    MovedAttribute("UserDict", "UserDict", "collections"),
    MovedAttribute("UserList", "UserList", "collections"),
    MovedAttribute("UserString", "UserString", "collections"),
    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
    MovedModule("builtins", "__builtin__"),
    MovedModule("configparser", "ConfigParser"),
    MovedModule("copyreg", "copy_reg"),
    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
    MovedModule("http_cookies", "Cookie", "http.cookies"),
    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
    MovedModule("html_parser", "HTMLParser", "html.parser"),
    MovedModule("http_client", "httplib", "http.client"),
    MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
    MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
    MovedModule("cPickle", "cPickle", "pickle"),
    MovedModule("queue", "Queue"),
    MovedModule("reprlib", "repr"),
    MovedModule("socketserver", "SocketServer"),
    MovedModule("_thread", "thread", "_thread"),
    MovedModule("tkinter", "Tkinter"),
    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
    MovedModule("tkinter_colorchooser", "tkColorChooser",
                "tkinter.colorchooser"),
    MovedModule("tkinter_commondialog", "tkCommonDialog",
                "tkinter.commondialog"),
    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
                "tkinter.simpledialog"),
    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
    MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
    _moved_attributes += [
        MovedModule("winreg", "_winreg"),
    ]

for attr in _moved_attributes:
    setattr(_MovedItems, attr.name, attr)
    if isinstance(attr, MovedModule):
        _importer._add_module(attr, "moves." + attr.name)
del attr

_MovedItems._moved_attributes = _moved_attributes

moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")


class Module_six_moves_urllib_parse(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_parse"""


_urllib_parse_moved_attributes = [
    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
    MovedAttribute("quote", "urllib", "urllib.parse"),
    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
    MovedAttribute("unquote", "urllib", "urllib.parse"),
    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
    MovedAttribute("urlencode", "urllib", "urllib.parse"),
    MovedAttribute("splitquery", "urllib", "urllib.parse"),
    MovedAttribute("splittag", "urllib", "urllib.parse"),
    MovedAttribute("splituser", "urllib", "urllib.parse"),
    MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
    MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
    MovedAttribute("uses_params", "urlparse", "urllib.parse"),
    MovedAttribute("uses_query", "urlparse", "urllib.parse"),
    MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
    setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr

Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes

_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
                      "moves.urllib_parse", "moves.urllib.parse")


class Module_six_moves_urllib_error(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_error"""


_urllib_error_moved_attributes = [
    MovedAttribute("URLError", "urllib2", "urllib.error"),
    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
    setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr

Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes

_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
                      "moves.urllib_error", "moves.urllib.error")


class Module_six_moves_urllib_request(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_request"""


_urllib_request_moved_attributes = [
    MovedAttribute("urlopen", "urllib2", "urllib.request"),
    MovedAttribute("install_opener", "urllib2", "urllib.request"),
    MovedAttribute("build_opener", "urllib2", "urllib.request"),
    MovedAttribute("pathname2url", "urllib", "urllib.request"),
    MovedAttribute("url2pathname", "urllib", "urllib.request"),
    MovedAttribute("getproxies", "urllib", "urllib.request"),
    MovedAttribute("Request", "urllib2", "urllib.request"),
    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
    MovedAttribute("URLopener", "urllib", "urllib.request"),
    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
    setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr

Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes

_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
                      "moves.urllib_request", "moves.urllib.request")


class Module_six_moves_urllib_response(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_response"""


_urllib_response_moved_attributes = [
    MovedAttribute("addbase", "urllib", "urllib.response"),
    MovedAttribute("addclosehook", "urllib", "urllib.response"),
    MovedAttribute("addinfo", "urllib", "urllib.response"),
    MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
    setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr

Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes

_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
                      "moves.urllib_response", "moves.urllib.response")


class Module_six_moves_urllib_robotparser(_LazyModule):

    """Lazy loading of moved objects in six.moves.urllib_robotparser"""


_urllib_robotparser_moved_attributes = [
    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr

Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes

_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
                      "moves.urllib_robotparser", "moves.urllib.robotparser")


class Module_six_moves_urllib(types.ModuleType):

    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
    __path__ = []  # mark as package
    parse = _importer._get_module("moves.urllib_parse")
    error = _importer._get_module("moves.urllib_error")
    request = _importer._get_module("moves.urllib_request")
    response = _importer._get_module("moves.urllib_response")
    robotparser = _importer._get_module("moves.urllib_robotparser")

    def __dir__(self):
        return ['parse', 'error', 'request', 'response', 'robotparser']

_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
                      "moves.urllib")


def add_move(move):
    """Add an item to six.moves."""
    setattr(_MovedItems, move.name, move)


def remove_move(name):
    """Remove item from six.moves."""
    try:
        delattr(_MovedItems, name)
    except AttributeError:
        try:
            del moves.__dict__[name]
        except KeyError:
            raise AttributeError("no such move, %r" % (name,))


if PY3:
    _meth_func = "__func__"
    _meth_self = "__self__"

    _func_closure = "__closure__"
    _func_code = "__code__"
    _func_defaults = "__defaults__"
    _func_globals = "__globals__"
else:
    _meth_func = "im_func"
    _meth_self = "im_self"

    _func_closure = "func_closure"
    _func_code = "func_code"
    _func_defaults = "func_defaults"
    _func_globals = "func_globals"


try:
    advance_iterator = next
except NameError:
    def advance_iterator(it):
        return it.next()
next = advance_iterator


try:
    callable = callable
except NameError:
    def callable(obj):
        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)


if PY3:
    def get_unbound_function(unbound):
        return unbound

    create_bound_method = types.MethodType

    def create_unbound_method(func, cls):
        return func

    Iterator = object
else:
    def get_unbound_function(unbound):
        return unbound.im_func

    def create_bound_method(func, obj):
        return types.MethodType(func, obj, obj.__class__)

    def create_unbound_method(func, cls):
        return types.MethodType(func, None, cls)

    class Iterator(object):

        def next(self):
            return type(self).__next__(self)

    callable = callable
_add_doc(get_unbound_function,
         """Get the function out of a possibly unbound function""")


get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)


if PY3:
    def iterkeys(d, **kw):
        return iter(d.keys(**kw))

    def itervalues(d, **kw):
        return iter(d.values(**kw))

    def iteritems(d, **kw):
        return iter(d.items(**kw))

    def iterlists(d, **kw):
        return iter(d.lists(**kw))

    viewkeys = operator.methodcaller("keys")

    viewvalues = operator.methodcaller("values")

    viewitems = operator.methodcaller("items")
else:
    def iterkeys(d, **kw):
        return d.iterkeys(**kw)

    def itervalues(d, **kw):
        return d.itervalues(**kw)

    def iteritems(d, **kw):
        return d.iteritems(**kw)

    def iterlists(d, **kw):
        return d.iterlists(**kw)

    viewkeys = operator.methodcaller("viewkeys")

    viewvalues = operator.methodcaller("viewvalues")

    viewitems = operator.methodcaller("viewitems")

_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
         "Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
         "Return an iterator over the (key, [values]) pairs of a dictionary.")


if PY3:
    def b(s):
        return s.encode("latin-1")

    def u(s):
        return s
    unichr = chr
    import struct
    int2byte = struct.Struct(">B").pack
    del struct
    byte2int = operator.itemgetter(0)
    indexbytes = operator.getitem
    iterbytes = iter
    import io
    StringIO = io.StringIO
    BytesIO = io.BytesIO
    _assertCountEqual = "assertCountEqual"
    if sys.version_info[1] <= 1:
        _assertRaisesRegex = "assertRaisesRegexp"
        _assertRegex = "assertRegexpMatches"
    else:
        _assertRaisesRegex = "assertRaisesRegex"
        _assertRegex = "assertRegex"
else:
    def b(s):
        return s
    # Workaround for standalone backslash

    def u(s):
        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
    unichr = unichr
    int2byte = chr

    def byte2int(bs):
        return ord(bs[0])

    def indexbytes(buf, i):
        return ord(buf[i])
    iterbytes = functools.partial(itertools.imap, ord)
    import StringIO
    StringIO = BytesIO = StringIO.StringIO
    _assertCountEqual = "assertItemsEqual"
    _assertRaisesRegex = "assertRaisesRegexp"
    _assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")


def assertCountEqual(self, *args, **kwargs):
    return getattr(self, _assertCountEqual)(*args, **kwargs)


def assertRaisesRegex(self, *args, **kwargs):
    return getattr(self, _assertRaisesRegex)(*args, **kwargs)


def assertRegex(self, *args, **kwargs):
    return getattr(self, _assertRegex)(*args, **kwargs)


if PY3:
    exec_ = getattr(moves.builtins, "exec")

    def reraise(tp, value, tb=None):
        if value is None:
            value = tp()
        if value.__traceback__ is not tb:
            raise value.with_traceback(tb)
        raise value

else:
    def exec_(_code_, _globs_=None, _locs_=None):
        """Execute code in a namespace."""
        if _globs_ is None:
            frame = sys._getframe(1)
            _globs_ = frame.f_globals
            if _locs_ is None:
                _locs_ = frame.f_locals
            del frame
        elif _locs_ is None:
            _locs_ = _globs_
        exec("""exec _code_ in _globs_, _locs_""")

    exec_("""def reraise(tp, value, tb=None):
    raise tp, value, tb
""")


if sys.version_info[:2] == (3, 2):
    exec_("""def raise_from(value, from_value):
    if from_value is None:
        raise value
    raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
    exec_("""def raise_from(value, from_value):
    raise value from from_value
""")
else:
    def raise_from(value, from_value):
        raise value


print_ = getattr(moves.builtins, "print", None)
if print_ is None:
    def print_(*args, **kwargs):
        """The new-style print function for Python 2.4 and 2.5."""
        fp = kwargs.pop("file", sys.stdout)
        if fp is None:
            return

        def write(data):
            if not isinstance(data, basestring):
                data = str(data)
            # If the file has an encoding, encode unicode with it.
            if (isinstance(fp, file) and
                    isinstance(data, unicode) and
                    fp.encoding is not None):
                errors = getattr(fp, "errors", None)
                if errors is None:
                    errors = "strict"
                data = data.encode(fp.encoding, errors)
            fp.write(data)
        want_unicode = False
        sep = kwargs.pop("sep", None)
        if sep is not None:
            if isinstance(sep, unicode):
                want_unicode = True
            elif not isinstance(sep, str):
                raise TypeError("sep must be None or a string")
        end = kwargs.pop("end", None)
        if end is not None:
            if isinstance(end, unicode):
                want_unicode = True
            elif not isinstance(end, str):
                raise TypeError("end must be None or a string")
        if kwargs:
            raise TypeError("invalid keyword arguments to print()")
        if not want_unicode:
            for arg in args:
                if isinstance(arg, unicode):
                    want_unicode = True
                    break
        if want_unicode:
            newline = unicode("\n")
            space = unicode(" ")
        else:
            newline = "\n"
            space = " "
        if sep is None:
            sep = space
        if end is None:
            end = newline
        for i, arg in enumerate(args):
            if i:
                write(sep)
            write(arg)
        write(end)
if sys.version_info[:2] < (3, 3):
    _print = print_

    def print_(*args, **kwargs):
        fp = kwargs.get("file", sys.stdout)
        flush = kwargs.pop("flush", False)
        _print(*args, **kwargs)
        if flush and fp is not None:
            fp.flush()

_add_doc(reraise, """Reraise an exception.""")

if sys.version_info[0:2] < (3, 4):
    def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
              updated=functools.WRAPPER_UPDATES):
        def wrapper(f):
            f = functools.wraps(wrapped, assigned, updated)(f)
            f.__wrapped__ = wrapped
            return f
        return wrapper
else:
    wraps = functools.wraps


def with_metaclass(meta, *bases):
    """Create a base class with a metaclass."""
    # This requires a bit of explanation: the basic idea is to make a dummy
    # metaclass for one level of class instantiation that replaces itself with
    # the actual metaclass.
    class metaclass(meta):

        def __new__(cls, name, this_bases, d):
            return meta(name, bases, d)
    return type.__new__(metaclass, 'temporary_class', (), {})


def add_metaclass(metaclass):
    """Class decorator for creating a class with a metaclass."""
    def wrapper(cls):
        orig_vars = cls.__dict__.copy()
        slots = orig_vars.get('__slots__')
        if slots is not None:
            if isinstance(slots, str):
                slots = [slots]
            for slots_var in slots:
                orig_vars.pop(slots_var)
        orig_vars.pop('__dict__', None)
        orig_vars.pop('__weakref__', None)
        return metaclass(cls.__name__, cls.__bases__, orig_vars)
    return wrapper


def python_2_unicode_compatible(klass):
    """
    A decorator that defines __unicode__ and __str__ methods under Python 2.
    Under Python 3 it does nothing.

    To support Python 2 and 3 with a single code base, define a __str__ method
    returning text and apply this decorator to the class.
    """
    if PY2:
        if '__str__' not in klass.__dict__:
            raise ValueError("@python_2_unicode_compatible cannot be applied "
                             "to %s because it doesn't define __str__()." %
                             klass.__name__)
        klass.__unicode__ = klass.__str__
        klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
    return klass


# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = []  # required for PEP 302 and PEP 451
__package__ = __name__  # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
    __spec__.submodule_search_locations = []  # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
    for i, importer in enumerate(sys.meta_path):
        # Here's some real nastiness: Another "instance" of the six module might
        # be floating around. Therefore, we can't use isinstance() to check for
        # the six meta path importer, since the other six instance will have
        # inserted an importer with different class.
        if (type(importer).__name__ == "_SixMetaPathImporter" and
                importer.name == __name__):
            del sys.meta_path[i]
            break
    del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
_vendor/progress/__pycache__/helpers.cpython-36.opt-1.pyc000064400000005517151733136470017325 0ustar003

�Pf&�@sdddlmZdZdZGdd�de�ZGdd�de�ZddlmZmZdd	l	m
Z
Gd
d�de�ZdS)
�)�print_functionz[?25lz[?25hcs2eZdZdZd	�fdd�	Zdd�Zdd�Z�ZS)
�
WriteMixinFNcsbtt|�jf|�d|_|r"||_|jj�r^|jrBtt	d|jd�t|jd|jd�|jj
�dS)Nr�)�end�file)�superr�__init__�_width�messager�isatty�hide_cursor�print�HIDE_CURSOR�flush)�selfr
�kwargs)�	__class__��/usr/lib/python3.6/helpers.pyrs
zWriteMixin.__init__cCsT|jj�rPd|j}|j|j�}t||d|jd�t|jt|��|_|jj�dS)N�r)rr)rrr	�ljustr
�max�lenr)r�s�b�crrr�write%s

zWriteMixin.writecCs$|jj�r |jr ttd|jd�dS)Nr)rr)rrrr
�SHOW_CURSOR)rrrr�finish-szWriteMixin.finish)N)�__name__�
__module__�__qualname__rrrr�
__classcell__rr)rrrsrcs:eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�ZS)�WritelnMixinFNcs@tt|�jf|�|r||_|jj�r<|jr<ttd|jd�dS)Nr)rr)	rr#rr
rrrr
r)rr
r)rrrr5s
zWritelnMixin.__init__cCs|jj�rtdd|jd�dS)Nz
r)rr)rrr
)rrrr�clearln=s
zWritelnMixin.clearlncCs0|jj�r,|j�t|d|jd�|jj�dS)Nr)rr)rrr$r
r)r�linerrr�writelnAs
zWritelnMixin.writelncCs0|jj�r,t|jd�|jr,ttd|jd�dS)N)rr)rr)rrr
rr)rrrrrGs
zWritelnMixin.finish)N)	rr r!rrr$r&rr"rr)rrr#2s
r#)�signal�SIGINT)�exitcs(eZdZdZ�fdd�Zdd�Z�ZS)�SigIntMixinz6Registers a signal handler that calls finish on SIGINTcs"tt|�j||�tt|j�dS)N)rr*rr'r(�_sigint_handler)r�argsr)rrrrUszSigIntMixin.__init__cCs|j�td�dS)Nr)rr))rZsignum�framerrrr+YszSigIntMixin._sigint_handler)rr r!�__doc__rr+r"rr)rrr*Rsr*N)Z
__future__rrr�objectrr#r'r(�sysr)r*rrrr�<module>s_vendor/progress/__pycache__/__init__.cpython-36.pyc000064400000007266151733136470016466 0ustar003

�Pf��@spddlmZddlmZddlmZddlmZddlm	Z	ddl
m
Z
dZGdd	�d	e�Z
Gd
d�de
�ZdS)
�)�division)�deque)�	timedelta)�ceil)�stderr)�timez1.2c@sreZdZeZdZdd�Zdd�Zedd��Z	edd	��Z
ed
d��Zdd
�Zdd�Z
dd�Zddd�Zdd�ZdS)�Infinite�
cOsJd|_t�|_|j|_t|jd�|_x |j�D]\}}t|||�q.WdS)Nr)�maxlen)	�indexr�start_ts�_tsr�
sma_window�_dt�items�setattr)�self�args�kwargs�key�val�r�/usr/lib/python3.6/__init__.py�__init__szInfinite.__init__cCs|jd�rdSt||d�S)N�_)�
startswith�getattr)rrrrr�__getitem__'s
zInfinite.__getitem__cCs|jrt|j�t|j�SdS)Nr)r�sum�len)rrrr�avg,szInfinite.avgcCstt�|j�S)N)�intrr)rrrr�elapsed0szInfinite.elapsedcCst|jd�S)N)�seconds)rr")rrrr�
elapsed_td4szInfinite.elapsed_tdcCsdS)Nr)rrrr�update8szInfinite.updatecCsdS)Nr)rrrr�start;szInfinite.startcCsdS)Nr)rrrr�finish>szInfinite.finish�cCsF|dkr.t�}||j|}|jj|�||_|j||_|j�dS)Nr)rr
r�appendrr%)r�nZnowZdtrrr�nextAsz
Infinite.nextccs(x|D]}|V|j�qW|j�dS)N)r+r')r�it�xrrr�iterKs
z
Infinite.iterN)r()�__name__�
__module__�__qualname__r�filerrr�propertyr r"r$r%r&r'r+r.rrrrrs

rcspeZdZ�fdd�Zedd��Zedd��Zedd��Zed	d
��Zedd��Z	d
d�Z
dd�Zdd�Z�Z
S)�Progresscs$tt|�j||�|jdd�|_dS)N�max�d)�superr4r�getr5)rrr)�	__class__rrrSszProgress.__init__cCstt|j|j��S)N)r!rr �	remaining)rrrr�etaWszProgress.etacCst|jd�S)N)r#)rr;)rrrr�eta_td[szProgress.eta_tdcCs
|jdS)Nr6)�progress)rrrr�percent_szProgress.percentcCstd|j|j�S)Nr()�minrr5)rrrrr=cszProgress.progresscCst|j|jd�S)Nr)r5r)rrrrr:gszProgress.remainingcCs|j�dS)N)r%)rrrrr&kszProgress.startcCs||j}|j|�dS)N)rr+)rrZincrrrr�gotons
z
Progress.gotoccsLyt|�|_Wntk
r"YnXx|D]}|V|j�q*W|j�dS)N)rr5�	TypeErrorr+r')rr,r-rrrr.rs
z
Progress.iter)r/r0r1rr3r;r<r>r=r:r&r@r.�
__classcell__rr)r9rr4Rsr4N)Z
__future__r�collectionsrZdatetimerZmathr�sysrr�__version__�objectrr4rrrr�<module>s7_vendor/progress/__pycache__/spinner.cpython-36.pyc000064400000002177151733136470016401 0ustar003

�Pf"�@s^ddlmZddlmZGdd�dee�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZdS)�)�Infinite)�
WriteMixinc@s eZdZdZd
ZdZdd�Zd	S)�Spinner��-�\�|�/TcCs$|jt|j�}|j|j|�dS)N)�index�len�phases�write)�self�i�r�/usr/lib/python3.6/spinner.py�updateszSpinner.updateN)rrrr	)�__name__�
__module__�__qualname__�messagerZhide_cursorrrrrrrsrc@seZdZddddgZdS)�
PieSpinneru◷u◶u◵u◴N)rrrrrrrrrsrc@seZdZddddgZdS)�MoonSpinneru◑u◒u◐u◓N)rrrrrrrrr#src@seZdZddddddgZdS)�LineSpinneru⎺u⎻u⎼u⎽N)rrrrrrrrr'srN)rrZhelpersrrrrrrrrr�<module>s

_vendor/progress/__pycache__/helpers.cpython-36.pyc000064400000005517151733136470016366 0ustar003

�Pf&�@sdddlmZdZdZGdd�de�ZGdd�de�ZddlmZmZdd	l	m
Z
Gd
d�de�ZdS)
�)�print_functionz[?25lz[?25hcs2eZdZdZd	�fdd�	Zdd�Zdd�Z�ZS)
�
WriteMixinFNcsbtt|�jf|�d|_|r"||_|jj�r^|jrBtt	d|jd�t|jd|jd�|jj
�dS)Nr�)�end�file)�superr�__init__�_width�messager�isatty�hide_cursor�print�HIDE_CURSOR�flush)�selfr
�kwargs)�	__class__��/usr/lib/python3.6/helpers.pyrs
zWriteMixin.__init__cCsT|jj�rPd|j}|j|j�}t||d|jd�t|jt|��|_|jj�dS)N�r)rr)rrr	�ljustr
�max�lenr)r�s�b�crrr�write%s

zWriteMixin.writecCs$|jj�r |jr ttd|jd�dS)Nr)rr)rrrr
�SHOW_CURSOR)rrrr�finish-szWriteMixin.finish)N)�__name__�
__module__�__qualname__rrrr�
__classcell__rr)rrrsrcs:eZdZdZd�fdd�	Zdd�Zdd�Zd	d
�Z�ZS)�WritelnMixinFNcs@tt|�jf|�|r||_|jj�r<|jr<ttd|jd�dS)Nr)rr)	rr#rr
rrrr
r)rr
r)rrrr5s
zWritelnMixin.__init__cCs|jj�rtdd|jd�dS)Nz
r)rr)rrr
)rrrr�clearln=s
zWritelnMixin.clearlncCs0|jj�r,|j�t|d|jd�|jj�dS)Nr)rr)rrr$r
r)r�linerrr�writelnAs
zWritelnMixin.writelncCs0|jj�r,t|jd�|jr,ttd|jd�dS)N)rr)rr)rrr
rr)rrrrrGs
zWritelnMixin.finish)N)	rr r!rrr$r&rr"rr)rrr#2s
r#)�signal�SIGINT)�exitcs(eZdZdZ�fdd�Zdd�Z�ZS)�SigIntMixinz6Registers a signal handler that calls finish on SIGINTcs"tt|�j||�tt|j�dS)N)rr*rr'r(�_sigint_handler)r�argsr)rrrrUszSigIntMixin.__init__cCs|j�td�dS)Nr)rr))rZsignum�framerrrr+YszSigIntMixin._sigint_handler)rr r!�__doc__rr+r"rr)rrr*Rsr*N)Z
__future__rrr�objectrr#r'r(�sysr)r*rrrr�<module>s_vendor/progress/__pycache__/bar.cpython-36.pyc000064400000004521151733136470015462 0ustar003

�Pf}
�@s~ddlmZddlmZGdd�dee�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZGdd�de�ZGd
d�de�Z	dS)�)�Progress)�WritelnMixinc@s4eZdZdZdZdZdZdZdZdZ	dZ
d	d
�ZdS)�Bar� �z%(index)d/%(max)dz |z| � �#TcCsjt|j|j�}|j|}|j|}|j|}|j|}|j|}dj||j|||j	|g�}|j
|�dS)Nr)�int�width�progress�message�fill�
empty_fill�suffix�join�
bar_prefix�
bar_suffix�writeln)�self�
filled_length�empty_lengthr�bar�emptyr�line�r�/usr/lib/python3.6/bar.py�updates




z
Bar.updateN)�__name__�
__module__�__qualname__r
rrrrrr
Zhide_cursorrrrrrrsrc@s eZdZdZdZdZdZdZdS)�ChargingBarz
%(percent)d%%ru∙u█N)rrrrrrrr
rrrrr ,s
r c@seZdZdZdZdS)�FillingSquaresBaru▢u▣N)rrrrr
rrrrr!4sr!c@seZdZdZdZdS)�FillingCirclesBaru◯u◉N)rrrrr
rrrrr"9sr"c	@seZdZd
Zd
d�ZdS)�IncrementalBarr�▏�▎�▍�▌�▋�▊�▉�█cCs�t|j�}t||j|j�}t|j|j�}|j|}|||}|j|}|jd|}|dkrn|j|nd}|jtd|t|��}	|j|}
dj	||j
|||	|j|
g�}|j|�dS)Nr�r���)
�len�phasesr	r
rrr�maxrrrrr)rZnphasesZexpanded_lengthrrZphaserrZcurrentrrrrrrrAs



zIncrementalBar.updateN)	rr$r%r&r'r(r)r*r+)rrrr/rrrrrr#>sr#c@seZdZdZdS)�ShadyBarr�░�▒�▓�█N)rr2r3r4r5)rrrr/rrrrr1Rsr1N)
rrZhelpersrrr r!r"r#r1rrrr�<module>s_vendor/progress/__pycache__/counter.cpython-36.pyc000064400000002745151733136470016403 0ustar003

�Pf��@sfddlmZmZddlmZGdd�dee�ZGdd�dee�ZGdd�dee�ZGd	d
�d
e�ZdS)�)�Infinite�Progress)�
WriteMixinc@seZdZdZdZdd�ZdS)�Counter�TcCs|jt|j��dS)N)�write�str�index)�self�r�/usr/lib/python3.6/counter.py�updateszCounter.updateN)�__name__�
__module__�__qualname__�message�hide_cursorr
rrrrrsrc@seZdZdZdd�ZdS)�	CountdownTcCs|jt|j��dS)N)rrZ	remaining)r
rrrr
 szCountdown.updateN)rrrrr
rrrrrsrc	@seZdZdZd
Zdd�Zd
S)�Stack� �▁�▂�▃�▄�▅�▆�▇�█TcCs6t|j�}t|dt|j|��}|j|j|�dS)Nr)�len�phases�min�intZprogressr)r
Znphases�irrrr
(s
zStack.updateN)	rrrrrrrrr)rrrrrr
rrrrr$src@seZdZdZdS)�Pie�○�◔�◑�◕�●N)r$r%r&r'r()rrrrrrrrr#.sr#N)	rrrZhelpersrrrrr#rrrr�<module>s

_vendor/progress/__pycache__/__init__.cpython-36.opt-1.pyc000064400000007266151733136470017425 0ustar003

�Pf��@spddlmZddlmZddlmZddlmZddlm	Z	ddl
m
Z
dZGdd	�d	e�Z
Gd
d�de
�ZdS)
�)�division)�deque)�	timedelta)�ceil)�stderr)�timez1.2c@sreZdZeZdZdd�Zdd�Zedd��Z	edd	��Z
ed
d��Zdd
�Zdd�Z
dd�Zddd�Zdd�ZdS)�Infinite�
cOsJd|_t�|_|j|_t|jd�|_x |j�D]\}}t|||�q.WdS)Nr)�maxlen)	�indexr�start_ts�_tsr�
sma_window�_dt�items�setattr)�self�args�kwargs�key�val�r�/usr/lib/python3.6/__init__.py�__init__szInfinite.__init__cCs|jd�rdSt||d�S)N�_)�
startswith�getattr)rrrrr�__getitem__'s
zInfinite.__getitem__cCs|jrt|j�t|j�SdS)Nr)r�sum�len)rrrr�avg,szInfinite.avgcCstt�|j�S)N)�intrr)rrrr�elapsed0szInfinite.elapsedcCst|jd�S)N)�seconds)rr")rrrr�
elapsed_td4szInfinite.elapsed_tdcCsdS)Nr)rrrr�update8szInfinite.updatecCsdS)Nr)rrrr�start;szInfinite.startcCsdS)Nr)rrrr�finish>szInfinite.finish�cCsF|dkr.t�}||j|}|jj|�||_|j||_|j�dS)Nr)rr
r�appendrr%)r�nZnowZdtrrr�nextAsz
Infinite.nextccs(x|D]}|V|j�qW|j�dS)N)r+r')r�it�xrrr�iterKs
z
Infinite.iterN)r()�__name__�
__module__�__qualname__r�filerrr�propertyr r"r$r%r&r'r+r.rrrrrs

rcspeZdZ�fdd�Zedd��Zedd��Zedd��Zed	d
��Zedd��Z	d
d�Z
dd�Zdd�Z�Z
S)�Progresscs$tt|�j||�|jdd�|_dS)N�max�d)�superr4r�getr5)rrr)�	__class__rrrSszProgress.__init__cCstt|j|j��S)N)r!rr �	remaining)rrrr�etaWszProgress.etacCst|jd�S)N)r#)rr;)rrrr�eta_td[szProgress.eta_tdcCs
|jdS)Nr6)�progress)rrrr�percent_szProgress.percentcCstd|j|j�S)Nr()�minrr5)rrrrr=cszProgress.progresscCst|j|jd�S)Nr)r5r)rrrrr:gszProgress.remainingcCs|j�dS)N)r%)rrrrr&kszProgress.startcCs||j}|j|�dS)N)rr+)rrZincrrrr�gotons
z
Progress.gotoccsLyt|�|_Wntk
r"YnXx|D]}|V|j�q*W|j�dS)N)rr5�	TypeErrorr+r')rr,r-rrrr.rs
z
Progress.iter)r/r0r1rr3r;r<r>r=r:r&r@r.�
__classcell__rr)r9rr4Rsr4N)Z
__future__r�collectionsrZdatetimerZmathr�sysrr�__version__�objectrr4rrrr�<module>s7_vendor/progress/__pycache__/bar.cpython-36.opt-1.pyc000064400000004521151733136470016421 0ustar003

�Pf}
�@s~ddlmZddlmZGdd�dee�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZGdd�de�ZGd
d�de�Z	dS)�)�Progress)�WritelnMixinc@s4eZdZdZdZdZdZdZdZdZ	dZ
d	d
�ZdS)�Bar� �z%(index)d/%(max)dz |z| � �#TcCsjt|j|j�}|j|}|j|}|j|}|j|}|j|}dj||j|||j	|g�}|j
|�dS)Nr)�int�width�progress�message�fill�
empty_fill�suffix�join�
bar_prefix�
bar_suffix�writeln)�self�
filled_length�empty_lengthr�bar�emptyr�line�r�/usr/lib/python3.6/bar.py�updates




z
Bar.updateN)�__name__�
__module__�__qualname__r
rrrrrr
Zhide_cursorrrrrrrsrc@s eZdZdZdZdZdZdZdS)�ChargingBarz
%(percent)d%%ru∙u█N)rrrrrrrr
rrrrr ,s
r c@seZdZdZdZdS)�FillingSquaresBaru▢u▣N)rrrrr
rrrrr!4sr!c@seZdZdZdZdS)�FillingCirclesBaru◯u◉N)rrrrr
rrrrr"9sr"c	@seZdZd
Zd
d�ZdS)�IncrementalBarr�▏�▎�▍�▌�▋�▊�▉�█cCs�t|j�}t||j|j�}t|j|j�}|j|}|||}|j|}|jd|}|dkrn|j|nd}|jtd|t|��}	|j|}
dj	||j
|||	|j|
g�}|j|�dS)Nr�r���)
�len�phasesr	r
rrr�maxrrrrr)rZnphasesZexpanded_lengthrrZphaserrZcurrentrrrrrrrAs



zIncrementalBar.updateN)	rr$r%r&r'r(r)r*r+)rrrr/rrrrrr#>sr#c@seZdZdZdS)�ShadyBarr�░�▒�▓�█N)rr2r3r4r5)rrrr/rrrrr1Rsr1N)
rrZhelpersrrr r!r"r#r1rrrr�<module>s_vendor/progress/__pycache__/spinner.cpython-36.opt-1.pyc000064400000002177151733136470017340 0ustar003

�Pf"�@s^ddlmZddlmZGdd�dee�ZGdd�de�ZGdd�de�ZGd	d
�d
e�ZdS)�)�Infinite)�
WriteMixinc@s eZdZdZd
ZdZdd�Zd	S)�Spinner��-�\�|�/TcCs$|jt|j�}|j|j|�dS)N)�index�len�phases�write)�self�i�r�/usr/lib/python3.6/spinner.py�updateszSpinner.updateN)rrrr	)�__name__�
__module__�__qualname__�messagerZhide_cursorrrrrrrsrc@seZdZddddgZdS)�
PieSpinneru◷u◶u◵u◴N)rrrrrrrrrsrc@seZdZddddgZdS)�MoonSpinneru◑u◒u◐u◓N)rrrrrrrrr#src@seZdZddddddgZdS)�LineSpinneru⎺u⎻u⎼u⎽N)rrrrrrrrr'srN)rrZhelpersrrrrrrrrr�<module>s

_vendor/progress/__pycache__/counter.cpython-36.opt-1.pyc000064400000002745151733136470017342 0ustar003

�Pf��@sfddlmZmZddlmZGdd�dee�ZGdd�dee�ZGdd�dee�ZGd	d
�d
e�ZdS)�)�Infinite�Progress)�
WriteMixinc@seZdZdZdZdd�ZdS)�Counter�TcCs|jt|j��dS)N)�write�str�index)�self�r�/usr/lib/python3.6/counter.py�updateszCounter.updateN)�__name__�
__module__�__qualname__�message�hide_cursorr
rrrrrsrc@seZdZdZdd�ZdS)�	CountdownTcCs|jt|j��dS)N)rrZ	remaining)r
rrrr
 szCountdown.updateN)rrrrr
rrrrrsrc	@seZdZdZd
Zdd�Zd
S)�Stack� �▁�▂�▃�▄�▅�▆�▇�█TcCs6t|j�}t|dt|j|��}|j|j|�dS)Nr)�len�phases�min�intZprogressr)r
Znphases�irrrr
(s
zStack.updateN)	rrrrrrrrr)rrrrrr
rrrrr$src@seZdZdZdS)�Pie�○�◔�◑�◕�●N)r$r%r&r'r()rrrrrrrrr#.sr#N)	rrrZhelpersrrrrr#rrrr�<module>s

_vendor/progress/helpers.py000064400000005446151733136470012103 0ustar00# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

from __future__ import print_function


HIDE_CURSOR = '\x1b[?25l'
SHOW_CURSOR = '\x1b[?25h'


class WriteMixin(object):
    hide_cursor = False

    def __init__(self, message=None, **kwargs):
        super(WriteMixin, self).__init__(**kwargs)
        self._width = 0
        if message:
            self.message = message

        if self.file.isatty():
            if self.hide_cursor:
                print(HIDE_CURSOR, end='', file=self.file)
            print(self.message, end='', file=self.file)
            self.file.flush()

    def write(self, s):
        if self.file.isatty():
            b = '\b' * self._width
            c = s.ljust(self._width)
            print(b + c, end='', file=self.file)
            self._width = max(self._width, len(s))
            self.file.flush()

    def finish(self):
        if self.file.isatty() and self.hide_cursor:
            print(SHOW_CURSOR, end='', file=self.file)


class WritelnMixin(object):
    hide_cursor = False

    def __init__(self, message=None, **kwargs):
        super(WritelnMixin, self).__init__(**kwargs)
        if message:
            self.message = message

        if self.file.isatty() and self.hide_cursor:
            print(HIDE_CURSOR, end='', file=self.file)

    def clearln(self):
        if self.file.isatty():
            print('\r\x1b[K', end='', file=self.file)

    def writeln(self, line):
        if self.file.isatty():
            self.clearln()
            print(line, end='', file=self.file)
            self.file.flush()

    def finish(self):
        if self.file.isatty():
            print(file=self.file)
            if self.hide_cursor:
                print(SHOW_CURSOR, end='', file=self.file)


from signal import signal, SIGINT
from sys import exit


class SigIntMixin(object):
    """Registers a signal handler that calls finish on SIGINT"""

    def __init__(self, *args, **kwargs):
        super(SigIntMixin, self).__init__(*args, **kwargs)
        signal(SIGINT, self._sigint_handler)

    def _sigint_handler(self, signum, frame):
        self.finish()
        exit(0)
_vendor/progress/__init__.py000064400000005717151733136470012201 0ustar00# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

from __future__ import division

from collections import deque
from datetime import timedelta
from math import ceil
from sys import stderr
from time import time


__version__ = '1.2'


class Infinite(object):
    file = stderr
    sma_window = 10

    def __init__(self, *args, **kwargs):
        self.index = 0
        self.start_ts = time()
        self._ts = self.start_ts
        self._dt = deque(maxlen=self.sma_window)
        for key, val in kwargs.items():
            setattr(self, key, val)

    def __getitem__(self, key):
        if key.startswith('_'):
            return None
        return getattr(self, key, None)

    @property
    def avg(self):
        return sum(self._dt) / len(self._dt) if self._dt else 0

    @property
    def elapsed(self):
        return int(time() - self.start_ts)

    @property
    def elapsed_td(self):
        return timedelta(seconds=self.elapsed)

    def update(self):
        pass

    def start(self):
        pass

    def finish(self):
        pass

    def next(self, n=1):
        if n > 0:
            now = time()
            dt = (now - self._ts) / n
            self._dt.append(dt)
            self._ts = now

        self.index = self.index + n
        self.update()

    def iter(self, it):
        for x in it:
            yield x
            self.next()
        self.finish()


class Progress(Infinite):
    def __init__(self, *args, **kwargs):
        super(Progress, self).__init__(*args, **kwargs)
        self.max = kwargs.get('max', 100)

    @property
    def eta(self):
        return int(ceil(self.avg * self.remaining))

    @property
    def eta_td(self):
        return timedelta(seconds=self.eta)

    @property
    def percent(self):
        return self.progress * 100

    @property
    def progress(self):
        return min(1, self.index / self.max)

    @property
    def remaining(self):
        return max(self.max - self.index, 0)

    def start(self):
        self.update()

    def goto(self, index):
        incr = index - self.index
        self.next(incr)

    def iter(self, it):
        try:
            self.max = len(it)
        except TypeError:
            pass

        for x in it:
            yield x
            self.next()
        self.finish()
_vendor/progress/bar.py000064400000005175151733136470011204 0ustar00# -*- coding: utf-8 -*-

# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

from . import Progress
from .helpers import WritelnMixin


class Bar(WritelnMixin, Progress):
    width = 32
    message = ''
    suffix = '%(index)d/%(max)d'
    bar_prefix = ' |'
    bar_suffix = '| '
    empty_fill = ' '
    fill = '#'
    hide_cursor = True

    def update(self):
        filled_length = int(self.width * self.progress)
        empty_length = self.width - filled_length

        message = self.message % self
        bar = self.fill * filled_length
        empty = self.empty_fill * empty_length
        suffix = self.suffix % self
        line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
                        suffix])
        self.writeln(line)


class ChargingBar(Bar):
    suffix = '%(percent)d%%'
    bar_prefix = ' '
    bar_suffix = ' '
    empty_fill = u'∙'
    fill = u'█'


class FillingSquaresBar(ChargingBar):
    empty_fill = u'▢'
    fill = u'▣'


class FillingCirclesBar(ChargingBar):
    empty_fill = u'◯'
    fill = u'◉'


class IncrementalBar(Bar):
    phases = (u' ', u'▏', u'▎', u'▍', u'▌', u'▋', u'▊', u'▉', u'█')

    def update(self):
        nphases = len(self.phases)
        expanded_length = int(nphases * self.width * self.progress)
        filled_length = int(self.width * self.progress)
        empty_length = self.width - filled_length
        phase = expanded_length - (filled_length * nphases)

        message = self.message % self
        bar = self.phases[-1] * filled_length
        current = self.phases[phase] if phase > 0 else ''
        empty = self.empty_fill * max(0, empty_length - len(current))
        suffix = self.suffix % self
        line = ''.join([message, self.bar_prefix, bar, current, empty,
                        self.bar_suffix, suffix])
        self.writeln(line)


class ShadyBar(IncrementalBar):
    phases = (u' ', u'░', u'▒', u'▓', u'█')
_vendor/progress/counter.py000064400000002736151733136470012117 0ustar00# -*- coding: utf-8 -*-

# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

from . import Infinite, Progress
from .helpers import WriteMixin


class Counter(WriteMixin, Infinite):
    message = ''
    hide_cursor = True

    def update(self):
        self.write(str(self.index))


class Countdown(WriteMixin, Progress):
    hide_cursor = True

    def update(self):
        self.write(str(self.remaining))


class Stack(WriteMixin, Progress):
    phases = (u' ', u'▁', u'▂', u'▃', u'▄', u'▅', u'▆', u'▇', u'█')
    hide_cursor = True

    def update(self):
        nphases = len(self.phases)
        i = min(nphases - 1, int(self.progress * nphases))
        self.write(self.phases[i])


class Pie(Stack):
    phases = (u'○', u'◔', u'◑', u'◕', u'●')
_vendor/progress/spinner.py000064400000002442151733136470012110 0ustar00# -*- coding: utf-8 -*-

# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

from . import Infinite
from .helpers import WriteMixin


class Spinner(WriteMixin, Infinite):
    message = ''
    phases = ('-', '\\', '|', '/')
    hide_cursor = True

    def update(self):
        i = self.index % len(self.phases)
        self.write(self.phases[i])


class PieSpinner(Spinner):
    phases = [u'◷', u'◶', u'◵', u'◴']


class MoonSpinner(Spinner):
    phases = [u'◑', u'◒', u'◐', u'◓']


class LineSpinner(Spinner):
    phases = [u'⎺', u'⎻', u'⎼', u'⎽', u'⎼', u'⎻']
_vendor/idna/idnadata.py000064400000100347151733136470011251 0ustar00# This file is automatically generated by tools/idna-data

__version__ = "6.3.0"
scripts = {
    'Greek': (
        0x37000000374,
        0x37500000378,
        0x37a0000037e,
        0x38400000385,
        0x38600000387,
        0x3880000038b,
        0x38c0000038d,
        0x38e000003a2,
        0x3a3000003e2,
        0x3f000000400,
        0x1d2600001d2b,
        0x1d5d00001d62,
        0x1d6600001d6b,
        0x1dbf00001dc0,
        0x1f0000001f16,
        0x1f1800001f1e,
        0x1f2000001f46,
        0x1f4800001f4e,
        0x1f5000001f58,
        0x1f5900001f5a,
        0x1f5b00001f5c,
        0x1f5d00001f5e,
        0x1f5f00001f7e,
        0x1f8000001fb5,
        0x1fb600001fc5,
        0x1fc600001fd4,
        0x1fd600001fdc,
        0x1fdd00001ff0,
        0x1ff200001ff5,
        0x1ff600001fff,
        0x212600002127,
        0x101400001018b,
        0x1d2000001d246,
    ),
    'Han': (
        0x2e8000002e9a,
        0x2e9b00002ef4,
        0x2f0000002fd6,
        0x300500003006,
        0x300700003008,
        0x30210000302a,
        0x30380000303c,
        0x340000004db6,
        0x4e0000009fcd,
        0xf9000000fa6e,
        0xfa700000fada,
        0x200000002a6d7,
        0x2a7000002b735,
        0x2b7400002b81e,
        0x2f8000002fa1e,
    ),
    'Hebrew': (
        0x591000005c8,
        0x5d0000005eb,
        0x5f0000005f5,
        0xfb1d0000fb37,
        0xfb380000fb3d,
        0xfb3e0000fb3f,
        0xfb400000fb42,
        0xfb430000fb45,
        0xfb460000fb50,
    ),
    'Hiragana': (
        0x304100003097,
        0x309d000030a0,
        0x1b0010001b002,
        0x1f2000001f201,
    ),
    'Katakana': (
        0x30a1000030fb,
        0x30fd00003100,
        0x31f000003200,
        0x32d0000032ff,
        0x330000003358,
        0xff660000ff70,
        0xff710000ff9e,
        0x1b0000001b001,
    ),
}
joining_types = {
    0x600: 85,
    0x601: 85,
    0x602: 85,
    0x603: 85,
    0x604: 85,
    0x608: 85,
    0x60b: 85,
    0x620: 68,
    0x621: 85,
    0x622: 82,
    0x623: 82,
    0x624: 82,
    0x625: 82,
    0x626: 68,
    0x627: 82,
    0x628: 68,
    0x629: 82,
    0x62a: 68,
    0x62b: 68,
    0x62c: 68,
    0x62d: 68,
    0x62e: 68,
    0x62f: 82,
    0x630: 82,
    0x631: 82,
    0x632: 82,
    0x633: 68,
    0x634: 68,
    0x635: 68,
    0x636: 68,
    0x637: 68,
    0x638: 68,
    0x639: 68,
    0x63a: 68,
    0x63b: 68,
    0x63c: 68,
    0x63d: 68,
    0x63e: 68,
    0x63f: 68,
    0x640: 67,
    0x641: 68,
    0x642: 68,
    0x643: 68,
    0x644: 68,
    0x645: 68,
    0x646: 68,
    0x647: 68,
    0x648: 82,
    0x649: 68,
    0x64a: 68,
    0x66e: 68,
    0x66f: 68,
    0x671: 82,
    0x672: 82,
    0x673: 82,
    0x674: 85,
    0x675: 82,
    0x676: 82,
    0x677: 82,
    0x678: 68,
    0x679: 68,
    0x67a: 68,
    0x67b: 68,
    0x67c: 68,
    0x67d: 68,
    0x67e: 68,
    0x67f: 68,
    0x680: 68,
    0x681: 68,
    0x682: 68,
    0x683: 68,
    0x684: 68,
    0x685: 68,
    0x686: 68,
    0x687: 68,
    0x688: 82,
    0x689: 82,
    0x68a: 82,
    0x68b: 82,
    0x68c: 82,
    0x68d: 82,
    0x68e: 82,
    0x68f: 82,
    0x690: 82,
    0x691: 82,
    0x692: 82,
    0x693: 82,
    0x694: 82,
    0x695: 82,
    0x696: 82,
    0x697: 82,
    0x698: 82,
    0x699: 82,
    0x69a: 68,
    0x69b: 68,
    0x69c: 68,
    0x69d: 68,
    0x69e: 68,
    0x69f: 68,
    0x6a0: 68,
    0x6a1: 68,
    0x6a2: 68,
    0x6a3: 68,
    0x6a4: 68,
    0x6a5: 68,
    0x6a6: 68,
    0x6a7: 68,
    0x6a8: 68,
    0x6a9: 68,
    0x6aa: 68,
    0x6ab: 68,
    0x6ac: 68,
    0x6ad: 68,
    0x6ae: 68,
    0x6af: 68,
    0x6b0: 68,
    0x6b1: 68,
    0x6b2: 68,
    0x6b3: 68,
    0x6b4: 68,
    0x6b5: 68,
    0x6b6: 68,
    0x6b7: 68,
    0x6b8: 68,
    0x6b9: 68,
    0x6ba: 68,
    0x6bb: 68,
    0x6bc: 68,
    0x6bd: 68,
    0x6be: 68,
    0x6bf: 68,
    0x6c0: 82,
    0x6c1: 68,
    0x6c2: 68,
    0x6c3: 82,
    0x6c4: 82,
    0x6c5: 82,
    0x6c6: 82,
    0x6c7: 82,
    0x6c8: 82,
    0x6c9: 82,
    0x6ca: 82,
    0x6cb: 82,
    0x6cc: 68,
    0x6cd: 82,
    0x6ce: 68,
    0x6cf: 82,
    0x6d0: 68,
    0x6d1: 68,
    0x6d2: 82,
    0x6d3: 82,
    0x6d5: 82,
    0x6dd: 85,
    0x6ee: 82,
    0x6ef: 82,
    0x6fa: 68,
    0x6fb: 68,
    0x6fc: 68,
    0x6ff: 68,
    0x710: 82,
    0x712: 68,
    0x713: 68,
    0x714: 68,
    0x715: 82,
    0x716: 82,
    0x717: 82,
    0x718: 82,
    0x719: 82,
    0x71a: 68,
    0x71b: 68,
    0x71c: 68,
    0x71d: 68,
    0x71e: 82,
    0x71f: 68,
    0x720: 68,
    0x721: 68,
    0x722: 68,
    0x723: 68,
    0x724: 68,
    0x725: 68,
    0x726: 68,
    0x727: 68,
    0x728: 82,
    0x729: 68,
    0x72a: 82,
    0x72b: 68,
    0x72c: 82,
    0x72d: 68,
    0x72e: 68,
    0x72f: 82,
    0x74d: 82,
    0x74e: 68,
    0x74f: 68,
    0x750: 68,
    0x751: 68,
    0x752: 68,
    0x753: 68,
    0x754: 68,
    0x755: 68,
    0x756: 68,
    0x757: 68,
    0x758: 68,
    0x759: 82,
    0x75a: 82,
    0x75b: 82,
    0x75c: 68,
    0x75d: 68,
    0x75e: 68,
    0x75f: 68,
    0x760: 68,
    0x761: 68,
    0x762: 68,
    0x763: 68,
    0x764: 68,
    0x765: 68,
    0x766: 68,
    0x767: 68,
    0x768: 68,
    0x769: 68,
    0x76a: 68,
    0x76b: 82,
    0x76c: 82,
    0x76d: 68,
    0x76e: 68,
    0x76f: 68,
    0x770: 68,
    0x771: 82,
    0x772: 68,
    0x773: 82,
    0x774: 82,
    0x775: 68,
    0x776: 68,
    0x777: 68,
    0x778: 82,
    0x779: 82,
    0x77a: 68,
    0x77b: 68,
    0x77c: 68,
    0x77d: 68,
    0x77e: 68,
    0x77f: 68,
    0x7ca: 68,
    0x7cb: 68,
    0x7cc: 68,
    0x7cd: 68,
    0x7ce: 68,
    0x7cf: 68,
    0x7d0: 68,
    0x7d1: 68,
    0x7d2: 68,
    0x7d3: 68,
    0x7d4: 68,
    0x7d5: 68,
    0x7d6: 68,
    0x7d7: 68,
    0x7d8: 68,
    0x7d9: 68,
    0x7da: 68,
    0x7db: 68,
    0x7dc: 68,
    0x7dd: 68,
    0x7de: 68,
    0x7df: 68,
    0x7e0: 68,
    0x7e1: 68,
    0x7e2: 68,
    0x7e3: 68,
    0x7e4: 68,
    0x7e5: 68,
    0x7e6: 68,
    0x7e7: 68,
    0x7e8: 68,
    0x7e9: 68,
    0x7ea: 68,
    0x7fa: 67,
    0x840: 82,
    0x841: 68,
    0x842: 68,
    0x843: 68,
    0x844: 68,
    0x845: 68,
    0x846: 82,
    0x847: 68,
    0x848: 68,
    0x849: 82,
    0x84a: 68,
    0x84b: 68,
    0x84c: 68,
    0x84d: 68,
    0x84e: 68,
    0x84f: 82,
    0x850: 68,
    0x851: 68,
    0x852: 68,
    0x853: 68,
    0x854: 82,
    0x855: 68,
    0x856: 85,
    0x857: 85,
    0x858: 85,
    0x8a0: 68,
    0x8a2: 68,
    0x8a3: 68,
    0x8a4: 68,
    0x8a5: 68,
    0x8a6: 68,
    0x8a7: 68,
    0x8a8: 68,
    0x8a9: 68,
    0x8aa: 82,
    0x8ab: 82,
    0x8ac: 82,
    0x1806: 85,
    0x1807: 68,
    0x180a: 67,
    0x180e: 85,
    0x1820: 68,
    0x1821: 68,
    0x1822: 68,
    0x1823: 68,
    0x1824: 68,
    0x1825: 68,
    0x1826: 68,
    0x1827: 68,
    0x1828: 68,
    0x1829: 68,
    0x182a: 68,
    0x182b: 68,
    0x182c: 68,
    0x182d: 68,
    0x182e: 68,
    0x182f: 68,
    0x1830: 68,
    0x1831: 68,
    0x1832: 68,
    0x1833: 68,
    0x1834: 68,
    0x1835: 68,
    0x1836: 68,
    0x1837: 68,
    0x1838: 68,
    0x1839: 68,
    0x183a: 68,
    0x183b: 68,
    0x183c: 68,
    0x183d: 68,
    0x183e: 68,
    0x183f: 68,
    0x1840: 68,
    0x1841: 68,
    0x1842: 68,
    0x1843: 68,
    0x1844: 68,
    0x1845: 68,
    0x1846: 68,
    0x1847: 68,
    0x1848: 68,
    0x1849: 68,
    0x184a: 68,
    0x184b: 68,
    0x184c: 68,
    0x184d: 68,
    0x184e: 68,
    0x184f: 68,
    0x1850: 68,
    0x1851: 68,
    0x1852: 68,
    0x1853: 68,
    0x1854: 68,
    0x1855: 68,
    0x1856: 68,
    0x1857: 68,
    0x1858: 68,
    0x1859: 68,
    0x185a: 68,
    0x185b: 68,
    0x185c: 68,
    0x185d: 68,
    0x185e: 68,
    0x185f: 68,
    0x1860: 68,
    0x1861: 68,
    0x1862: 68,
    0x1863: 68,
    0x1864: 68,
    0x1865: 68,
    0x1866: 68,
    0x1867: 68,
    0x1868: 68,
    0x1869: 68,
    0x186a: 68,
    0x186b: 68,
    0x186c: 68,
    0x186d: 68,
    0x186e: 68,
    0x186f: 68,
    0x1870: 68,
    0x1871: 68,
    0x1872: 68,
    0x1873: 68,
    0x1874: 68,
    0x1875: 68,
    0x1876: 68,
    0x1877: 68,
    0x1880: 85,
    0x1881: 85,
    0x1882: 85,
    0x1883: 85,
    0x1884: 85,
    0x1885: 85,
    0x1886: 85,
    0x1887: 68,
    0x1888: 68,
    0x1889: 68,
    0x188a: 68,
    0x188b: 68,
    0x188c: 68,
    0x188d: 68,
    0x188e: 68,
    0x188f: 68,
    0x1890: 68,
    0x1891: 68,
    0x1892: 68,
    0x1893: 68,
    0x1894: 68,
    0x1895: 68,
    0x1896: 68,
    0x1897: 68,
    0x1898: 68,
    0x1899: 68,
    0x189a: 68,
    0x189b: 68,
    0x189c: 68,
    0x189d: 68,
    0x189e: 68,
    0x189f: 68,
    0x18a0: 68,
    0x18a1: 68,
    0x18a2: 68,
    0x18a3: 68,
    0x18a4: 68,
    0x18a5: 68,
    0x18a6: 68,
    0x18a7: 68,
    0x18a8: 68,
    0x18aa: 68,
    0x200c: 85,
    0x200d: 67,
    0x2066: 85,
    0x2067: 85,
    0x2068: 85,
    0x2069: 85,
    0xa840: 68,
    0xa841: 68,
    0xa842: 68,
    0xa843: 68,
    0xa844: 68,
    0xa845: 68,
    0xa846: 68,
    0xa847: 68,
    0xa848: 68,
    0xa849: 68,
    0xa84a: 68,
    0xa84b: 68,
    0xa84c: 68,
    0xa84d: 68,
    0xa84e: 68,
    0xa84f: 68,
    0xa850: 68,
    0xa851: 68,
    0xa852: 68,
    0xa853: 68,
    0xa854: 68,
    0xa855: 68,
    0xa856: 68,
    0xa857: 68,
    0xa858: 68,
    0xa859: 68,
    0xa85a: 68,
    0xa85b: 68,
    0xa85c: 68,
    0xa85d: 68,
    0xa85e: 68,
    0xa85f: 68,
    0xa860: 68,
    0xa861: 68,
    0xa862: 68,
    0xa863: 68,
    0xa864: 68,
    0xa865: 68,
    0xa866: 68,
    0xa867: 68,
    0xa868: 68,
    0xa869: 68,
    0xa86a: 68,
    0xa86b: 68,
    0xa86c: 68,
    0xa86d: 68,
    0xa86e: 68,
    0xa86f: 68,
    0xa870: 68,
    0xa871: 68,
    0xa872: 76,
    0xa873: 85,
}
codepoint_classes = {
    'PVALID': (
        0x2d0000002e,
        0x300000003a,
        0x610000007b,
        0xdf000000f7,
        0xf800000100,
        0x10100000102,
        0x10300000104,
        0x10500000106,
        0x10700000108,
        0x1090000010a,
        0x10b0000010c,
        0x10d0000010e,
        0x10f00000110,
        0x11100000112,
        0x11300000114,
        0x11500000116,
        0x11700000118,
        0x1190000011a,
        0x11b0000011c,
        0x11d0000011e,
        0x11f00000120,
        0x12100000122,
        0x12300000124,
        0x12500000126,
        0x12700000128,
        0x1290000012a,
        0x12b0000012c,
        0x12d0000012e,
        0x12f00000130,
        0x13100000132,
        0x13500000136,
        0x13700000139,
        0x13a0000013b,
        0x13c0000013d,
        0x13e0000013f,
        0x14200000143,
        0x14400000145,
        0x14600000147,
        0x14800000149,
        0x14b0000014c,
        0x14d0000014e,
        0x14f00000150,
        0x15100000152,
        0x15300000154,
        0x15500000156,
        0x15700000158,
        0x1590000015a,
        0x15b0000015c,
        0x15d0000015e,
        0x15f00000160,
        0x16100000162,
        0x16300000164,
        0x16500000166,
        0x16700000168,
        0x1690000016a,
        0x16b0000016c,
        0x16d0000016e,
        0x16f00000170,
        0x17100000172,
        0x17300000174,
        0x17500000176,
        0x17700000178,
        0x17a0000017b,
        0x17c0000017d,
        0x17e0000017f,
        0x18000000181,
        0x18300000184,
        0x18500000186,
        0x18800000189,
        0x18c0000018e,
        0x19200000193,
        0x19500000196,
        0x1990000019c,
        0x19e0000019f,
        0x1a1000001a2,
        0x1a3000001a4,
        0x1a5000001a6,
        0x1a8000001a9,
        0x1aa000001ac,
        0x1ad000001ae,
        0x1b0000001b1,
        0x1b4000001b5,
        0x1b6000001b7,
        0x1b9000001bc,
        0x1bd000001c4,
        0x1ce000001cf,
        0x1d0000001d1,
        0x1d2000001d3,
        0x1d4000001d5,
        0x1d6000001d7,
        0x1d8000001d9,
        0x1da000001db,
        0x1dc000001de,
        0x1df000001e0,
        0x1e1000001e2,
        0x1e3000001e4,
        0x1e5000001e6,
        0x1e7000001e8,
        0x1e9000001ea,
        0x1eb000001ec,
        0x1ed000001ee,
        0x1ef000001f1,
        0x1f5000001f6,
        0x1f9000001fa,
        0x1fb000001fc,
        0x1fd000001fe,
        0x1ff00000200,
        0x20100000202,
        0x20300000204,
        0x20500000206,
        0x20700000208,
        0x2090000020a,
        0x20b0000020c,
        0x20d0000020e,
        0x20f00000210,
        0x21100000212,
        0x21300000214,
        0x21500000216,
        0x21700000218,
        0x2190000021a,
        0x21b0000021c,
        0x21d0000021e,
        0x21f00000220,
        0x22100000222,
        0x22300000224,
        0x22500000226,
        0x22700000228,
        0x2290000022a,
        0x22b0000022c,
        0x22d0000022e,
        0x22f00000230,
        0x23100000232,
        0x2330000023a,
        0x23c0000023d,
        0x23f00000241,
        0x24200000243,
        0x24700000248,
        0x2490000024a,
        0x24b0000024c,
        0x24d0000024e,
        0x24f000002b0,
        0x2b9000002c2,
        0x2c6000002d2,
        0x2ec000002ed,
        0x2ee000002ef,
        0x30000000340,
        0x34200000343,
        0x3460000034f,
        0x35000000370,
        0x37100000372,
        0x37300000374,
        0x37700000378,
        0x37b0000037e,
        0x39000000391,
        0x3ac000003cf,
        0x3d7000003d8,
        0x3d9000003da,
        0x3db000003dc,
        0x3dd000003de,
        0x3df000003e0,
        0x3e1000003e2,
        0x3e3000003e4,
        0x3e5000003e6,
        0x3e7000003e8,
        0x3e9000003ea,
        0x3eb000003ec,
        0x3ed000003ee,
        0x3ef000003f0,
        0x3f3000003f4,
        0x3f8000003f9,
        0x3fb000003fd,
        0x43000000460,
        0x46100000462,
        0x46300000464,
        0x46500000466,
        0x46700000468,
        0x4690000046a,
        0x46b0000046c,
        0x46d0000046e,
        0x46f00000470,
        0x47100000472,
        0x47300000474,
        0x47500000476,
        0x47700000478,
        0x4790000047a,
        0x47b0000047c,
        0x47d0000047e,
        0x47f00000480,
        0x48100000482,
        0x48300000488,
        0x48b0000048c,
        0x48d0000048e,
        0x48f00000490,
        0x49100000492,
        0x49300000494,
        0x49500000496,
        0x49700000498,
        0x4990000049a,
        0x49b0000049c,
        0x49d0000049e,
        0x49f000004a0,
        0x4a1000004a2,
        0x4a3000004a4,
        0x4a5000004a6,
        0x4a7000004a8,
        0x4a9000004aa,
        0x4ab000004ac,
        0x4ad000004ae,
        0x4af000004b0,
        0x4b1000004b2,
        0x4b3000004b4,
        0x4b5000004b6,
        0x4b7000004b8,
        0x4b9000004ba,
        0x4bb000004bc,
        0x4bd000004be,
        0x4bf000004c0,
        0x4c2000004c3,
        0x4c4000004c5,
        0x4c6000004c7,
        0x4c8000004c9,
        0x4ca000004cb,
        0x4cc000004cd,
        0x4ce000004d0,
        0x4d1000004d2,
        0x4d3000004d4,
        0x4d5000004d6,
        0x4d7000004d8,
        0x4d9000004da,
        0x4db000004dc,
        0x4dd000004de,
        0x4df000004e0,
        0x4e1000004e2,
        0x4e3000004e4,
        0x4e5000004e6,
        0x4e7000004e8,
        0x4e9000004ea,
        0x4eb000004ec,
        0x4ed000004ee,
        0x4ef000004f0,
        0x4f1000004f2,
        0x4f3000004f4,
        0x4f5000004f6,
        0x4f7000004f8,
        0x4f9000004fa,
        0x4fb000004fc,
        0x4fd000004fe,
        0x4ff00000500,
        0x50100000502,
        0x50300000504,
        0x50500000506,
        0x50700000508,
        0x5090000050a,
        0x50b0000050c,
        0x50d0000050e,
        0x50f00000510,
        0x51100000512,
        0x51300000514,
        0x51500000516,
        0x51700000518,
        0x5190000051a,
        0x51b0000051c,
        0x51d0000051e,
        0x51f00000520,
        0x52100000522,
        0x52300000524,
        0x52500000526,
        0x52700000528,
        0x5590000055a,
        0x56100000587,
        0x591000005be,
        0x5bf000005c0,
        0x5c1000005c3,
        0x5c4000005c6,
        0x5c7000005c8,
        0x5d0000005eb,
        0x5f0000005f3,
        0x6100000061b,
        0x62000000640,
        0x64100000660,
        0x66e00000675,
        0x679000006d4,
        0x6d5000006dd,
        0x6df000006e9,
        0x6ea000006f0,
        0x6fa00000700,
        0x7100000074b,
        0x74d000007b2,
        0x7c0000007f6,
        0x8000000082e,
        0x8400000085c,
        0x8a0000008a1,
        0x8a2000008ad,
        0x8e4000008ff,
        0x90000000958,
        0x96000000964,
        0x96600000970,
        0x97100000978,
        0x97900000980,
        0x98100000984,
        0x9850000098d,
        0x98f00000991,
        0x993000009a9,
        0x9aa000009b1,
        0x9b2000009b3,
        0x9b6000009ba,
        0x9bc000009c5,
        0x9c7000009c9,
        0x9cb000009cf,
        0x9d7000009d8,
        0x9e0000009e4,
        0x9e6000009f2,
        0xa0100000a04,
        0xa0500000a0b,
        0xa0f00000a11,
        0xa1300000a29,
        0xa2a00000a31,
        0xa3200000a33,
        0xa3500000a36,
        0xa3800000a3a,
        0xa3c00000a3d,
        0xa3e00000a43,
        0xa4700000a49,
        0xa4b00000a4e,
        0xa5100000a52,
        0xa5c00000a5d,
        0xa6600000a76,
        0xa8100000a84,
        0xa8500000a8e,
        0xa8f00000a92,
        0xa9300000aa9,
        0xaaa00000ab1,
        0xab200000ab4,
        0xab500000aba,
        0xabc00000ac6,
        0xac700000aca,
        0xacb00000ace,
        0xad000000ad1,
        0xae000000ae4,
        0xae600000af0,
        0xb0100000b04,
        0xb0500000b0d,
        0xb0f00000b11,
        0xb1300000b29,
        0xb2a00000b31,
        0xb3200000b34,
        0xb3500000b3a,
        0xb3c00000b45,
        0xb4700000b49,
        0xb4b00000b4e,
        0xb5600000b58,
        0xb5f00000b64,
        0xb6600000b70,
        0xb7100000b72,
        0xb8200000b84,
        0xb8500000b8b,
        0xb8e00000b91,
        0xb9200000b96,
        0xb9900000b9b,
        0xb9c00000b9d,
        0xb9e00000ba0,
        0xba300000ba5,
        0xba800000bab,
        0xbae00000bba,
        0xbbe00000bc3,
        0xbc600000bc9,
        0xbca00000bce,
        0xbd000000bd1,
        0xbd700000bd8,
        0xbe600000bf0,
        0xc0100000c04,
        0xc0500000c0d,
        0xc0e00000c11,
        0xc1200000c29,
        0xc2a00000c34,
        0xc3500000c3a,
        0xc3d00000c45,
        0xc4600000c49,
        0xc4a00000c4e,
        0xc5500000c57,
        0xc5800000c5a,
        0xc6000000c64,
        0xc6600000c70,
        0xc8200000c84,
        0xc8500000c8d,
        0xc8e00000c91,
        0xc9200000ca9,
        0xcaa00000cb4,
        0xcb500000cba,
        0xcbc00000cc5,
        0xcc600000cc9,
        0xcca00000cce,
        0xcd500000cd7,
        0xcde00000cdf,
        0xce000000ce4,
        0xce600000cf0,
        0xcf100000cf3,
        0xd0200000d04,
        0xd0500000d0d,
        0xd0e00000d11,
        0xd1200000d3b,
        0xd3d00000d45,
        0xd4600000d49,
        0xd4a00000d4f,
        0xd5700000d58,
        0xd6000000d64,
        0xd6600000d70,
        0xd7a00000d80,
        0xd8200000d84,
        0xd8500000d97,
        0xd9a00000db2,
        0xdb300000dbc,
        0xdbd00000dbe,
        0xdc000000dc7,
        0xdca00000dcb,
        0xdcf00000dd5,
        0xdd600000dd7,
        0xdd800000de0,
        0xdf200000df4,
        0xe0100000e33,
        0xe3400000e3b,
        0xe4000000e4f,
        0xe5000000e5a,
        0xe8100000e83,
        0xe8400000e85,
        0xe8700000e89,
        0xe8a00000e8b,
        0xe8d00000e8e,
        0xe9400000e98,
        0xe9900000ea0,
        0xea100000ea4,
        0xea500000ea6,
        0xea700000ea8,
        0xeaa00000eac,
        0xead00000eb3,
        0xeb400000eba,
        0xebb00000ebe,
        0xec000000ec5,
        0xec600000ec7,
        0xec800000ece,
        0xed000000eda,
        0xede00000ee0,
        0xf0000000f01,
        0xf0b00000f0c,
        0xf1800000f1a,
        0xf2000000f2a,
        0xf3500000f36,
        0xf3700000f38,
        0xf3900000f3a,
        0xf3e00000f43,
        0xf4400000f48,
        0xf4900000f4d,
        0xf4e00000f52,
        0xf5300000f57,
        0xf5800000f5c,
        0xf5d00000f69,
        0xf6a00000f6d,
        0xf7100000f73,
        0xf7400000f75,
        0xf7a00000f81,
        0xf8200000f85,
        0xf8600000f93,
        0xf9400000f98,
        0xf9900000f9d,
        0xf9e00000fa2,
        0xfa300000fa7,
        0xfa800000fac,
        0xfad00000fb9,
        0xfba00000fbd,
        0xfc600000fc7,
        0x10000000104a,
        0x10500000109e,
        0x10d0000010fb,
        0x10fd00001100,
        0x120000001249,
        0x124a0000124e,
        0x125000001257,
        0x125800001259,
        0x125a0000125e,
        0x126000001289,
        0x128a0000128e,
        0x1290000012b1,
        0x12b2000012b6,
        0x12b8000012bf,
        0x12c0000012c1,
        0x12c2000012c6,
        0x12c8000012d7,
        0x12d800001311,
        0x131200001316,
        0x13180000135b,
        0x135d00001360,
        0x138000001390,
        0x13a0000013f5,
        0x14010000166d,
        0x166f00001680,
        0x16810000169b,
        0x16a0000016eb,
        0x17000000170d,
        0x170e00001715,
        0x172000001735,
        0x174000001754,
        0x17600000176d,
        0x176e00001771,
        0x177200001774,
        0x1780000017b4,
        0x17b6000017d4,
        0x17d7000017d8,
        0x17dc000017de,
        0x17e0000017ea,
        0x18100000181a,
        0x182000001878,
        0x1880000018ab,
        0x18b0000018f6,
        0x19000000191d,
        0x19200000192c,
        0x19300000193c,
        0x19460000196e,
        0x197000001975,
        0x1980000019ac,
        0x19b0000019ca,
        0x19d0000019da,
        0x1a0000001a1c,
        0x1a2000001a5f,
        0x1a6000001a7d,
        0x1a7f00001a8a,
        0x1a9000001a9a,
        0x1aa700001aa8,
        0x1b0000001b4c,
        0x1b5000001b5a,
        0x1b6b00001b74,
        0x1b8000001bf4,
        0x1c0000001c38,
        0x1c4000001c4a,
        0x1c4d00001c7e,
        0x1cd000001cd3,
        0x1cd400001cf7,
        0x1d0000001d2c,
        0x1d2f00001d30,
        0x1d3b00001d3c,
        0x1d4e00001d4f,
        0x1d6b00001d78,
        0x1d7900001d9b,
        0x1dc000001de7,
        0x1dfc00001e00,
        0x1e0100001e02,
        0x1e0300001e04,
        0x1e0500001e06,
        0x1e0700001e08,
        0x1e0900001e0a,
        0x1e0b00001e0c,
        0x1e0d00001e0e,
        0x1e0f00001e10,
        0x1e1100001e12,
        0x1e1300001e14,
        0x1e1500001e16,
        0x1e1700001e18,
        0x1e1900001e1a,
        0x1e1b00001e1c,
        0x1e1d00001e1e,
        0x1e1f00001e20,
        0x1e2100001e22,
        0x1e2300001e24,
        0x1e2500001e26,
        0x1e2700001e28,
        0x1e2900001e2a,
        0x1e2b00001e2c,
        0x1e2d00001e2e,
        0x1e2f00001e30,
        0x1e3100001e32,
        0x1e3300001e34,
        0x1e3500001e36,
        0x1e3700001e38,
        0x1e3900001e3a,
        0x1e3b00001e3c,
        0x1e3d00001e3e,
        0x1e3f00001e40,
        0x1e4100001e42,
        0x1e4300001e44,
        0x1e4500001e46,
        0x1e4700001e48,
        0x1e4900001e4a,
        0x1e4b00001e4c,
        0x1e4d00001e4e,
        0x1e4f00001e50,
        0x1e5100001e52,
        0x1e5300001e54,
        0x1e5500001e56,
        0x1e5700001e58,
        0x1e5900001e5a,
        0x1e5b00001e5c,
        0x1e5d00001e5e,
        0x1e5f00001e60,
        0x1e6100001e62,
        0x1e6300001e64,
        0x1e6500001e66,
        0x1e6700001e68,
        0x1e6900001e6a,
        0x1e6b00001e6c,
        0x1e6d00001e6e,
        0x1e6f00001e70,
        0x1e7100001e72,
        0x1e7300001e74,
        0x1e7500001e76,
        0x1e7700001e78,
        0x1e7900001e7a,
        0x1e7b00001e7c,
        0x1e7d00001e7e,
        0x1e7f00001e80,
        0x1e8100001e82,
        0x1e8300001e84,
        0x1e8500001e86,
        0x1e8700001e88,
        0x1e8900001e8a,
        0x1e8b00001e8c,
        0x1e8d00001e8e,
        0x1e8f00001e90,
        0x1e9100001e92,
        0x1e9300001e94,
        0x1e9500001e9a,
        0x1e9c00001e9e,
        0x1e9f00001ea0,
        0x1ea100001ea2,
        0x1ea300001ea4,
        0x1ea500001ea6,
        0x1ea700001ea8,
        0x1ea900001eaa,
        0x1eab00001eac,
        0x1ead00001eae,
        0x1eaf00001eb0,
        0x1eb100001eb2,
        0x1eb300001eb4,
        0x1eb500001eb6,
        0x1eb700001eb8,
        0x1eb900001eba,
        0x1ebb00001ebc,
        0x1ebd00001ebe,
        0x1ebf00001ec0,
        0x1ec100001ec2,
        0x1ec300001ec4,
        0x1ec500001ec6,
        0x1ec700001ec8,
        0x1ec900001eca,
        0x1ecb00001ecc,
        0x1ecd00001ece,
        0x1ecf00001ed0,
        0x1ed100001ed2,
        0x1ed300001ed4,
        0x1ed500001ed6,
        0x1ed700001ed8,
        0x1ed900001eda,
        0x1edb00001edc,
        0x1edd00001ede,
        0x1edf00001ee0,
        0x1ee100001ee2,
        0x1ee300001ee4,
        0x1ee500001ee6,
        0x1ee700001ee8,
        0x1ee900001eea,
        0x1eeb00001eec,
        0x1eed00001eee,
        0x1eef00001ef0,
        0x1ef100001ef2,
        0x1ef300001ef4,
        0x1ef500001ef6,
        0x1ef700001ef8,
        0x1ef900001efa,
        0x1efb00001efc,
        0x1efd00001efe,
        0x1eff00001f08,
        0x1f1000001f16,
        0x1f2000001f28,
        0x1f3000001f38,
        0x1f4000001f46,
        0x1f5000001f58,
        0x1f6000001f68,
        0x1f7000001f71,
        0x1f7200001f73,
        0x1f7400001f75,
        0x1f7600001f77,
        0x1f7800001f79,
        0x1f7a00001f7b,
        0x1f7c00001f7d,
        0x1fb000001fb2,
        0x1fb600001fb7,
        0x1fc600001fc7,
        0x1fd000001fd3,
        0x1fd600001fd8,
        0x1fe000001fe3,
        0x1fe400001fe8,
        0x1ff600001ff7,
        0x214e0000214f,
        0x218400002185,
        0x2c3000002c5f,
        0x2c6100002c62,
        0x2c6500002c67,
        0x2c6800002c69,
        0x2c6a00002c6b,
        0x2c6c00002c6d,
        0x2c7100002c72,
        0x2c7300002c75,
        0x2c7600002c7c,
        0x2c8100002c82,
        0x2c8300002c84,
        0x2c8500002c86,
        0x2c8700002c88,
        0x2c8900002c8a,
        0x2c8b00002c8c,
        0x2c8d00002c8e,
        0x2c8f00002c90,
        0x2c9100002c92,
        0x2c9300002c94,
        0x2c9500002c96,
        0x2c9700002c98,
        0x2c9900002c9a,
        0x2c9b00002c9c,
        0x2c9d00002c9e,
        0x2c9f00002ca0,
        0x2ca100002ca2,
        0x2ca300002ca4,
        0x2ca500002ca6,
        0x2ca700002ca8,
        0x2ca900002caa,
        0x2cab00002cac,
        0x2cad00002cae,
        0x2caf00002cb0,
        0x2cb100002cb2,
        0x2cb300002cb4,
        0x2cb500002cb6,
        0x2cb700002cb8,
        0x2cb900002cba,
        0x2cbb00002cbc,
        0x2cbd00002cbe,
        0x2cbf00002cc0,
        0x2cc100002cc2,
        0x2cc300002cc4,
        0x2cc500002cc6,
        0x2cc700002cc8,
        0x2cc900002cca,
        0x2ccb00002ccc,
        0x2ccd00002cce,
        0x2ccf00002cd0,
        0x2cd100002cd2,
        0x2cd300002cd4,
        0x2cd500002cd6,
        0x2cd700002cd8,
        0x2cd900002cda,
        0x2cdb00002cdc,
        0x2cdd00002cde,
        0x2cdf00002ce0,
        0x2ce100002ce2,
        0x2ce300002ce5,
        0x2cec00002ced,
        0x2cee00002cf2,
        0x2cf300002cf4,
        0x2d0000002d26,
        0x2d2700002d28,
        0x2d2d00002d2e,
        0x2d3000002d68,
        0x2d7f00002d97,
        0x2da000002da7,
        0x2da800002daf,
        0x2db000002db7,
        0x2db800002dbf,
        0x2dc000002dc7,
        0x2dc800002dcf,
        0x2dd000002dd7,
        0x2dd800002ddf,
        0x2de000002e00,
        0x2e2f00002e30,
        0x300500003008,
        0x302a0000302e,
        0x303c0000303d,
        0x304100003097,
        0x30990000309b,
        0x309d0000309f,
        0x30a1000030fb,
        0x30fc000030ff,
        0x31050000312e,
        0x31a0000031bb,
        0x31f000003200,
        0x340000004db6,
        0x4e0000009fcd,
        0xa0000000a48d,
        0xa4d00000a4fe,
        0xa5000000a60d,
        0xa6100000a62c,
        0xa6410000a642,
        0xa6430000a644,
        0xa6450000a646,
        0xa6470000a648,
        0xa6490000a64a,
        0xa64b0000a64c,
        0xa64d0000a64e,
        0xa64f0000a650,
        0xa6510000a652,
        0xa6530000a654,
        0xa6550000a656,
        0xa6570000a658,
        0xa6590000a65a,
        0xa65b0000a65c,
        0xa65d0000a65e,
        0xa65f0000a660,
        0xa6610000a662,
        0xa6630000a664,
        0xa6650000a666,
        0xa6670000a668,
        0xa6690000a66a,
        0xa66b0000a66c,
        0xa66d0000a670,
        0xa6740000a67e,
        0xa67f0000a680,
        0xa6810000a682,
        0xa6830000a684,
        0xa6850000a686,
        0xa6870000a688,
        0xa6890000a68a,
        0xa68b0000a68c,
        0xa68d0000a68e,
        0xa68f0000a690,
        0xa6910000a692,
        0xa6930000a694,
        0xa6950000a696,
        0xa6970000a698,
        0xa69f0000a6e6,
        0xa6f00000a6f2,
        0xa7170000a720,
        0xa7230000a724,
        0xa7250000a726,
        0xa7270000a728,
        0xa7290000a72a,
        0xa72b0000a72c,
        0xa72d0000a72e,
        0xa72f0000a732,
        0xa7330000a734,
        0xa7350000a736,
        0xa7370000a738,
        0xa7390000a73a,
        0xa73b0000a73c,
        0xa73d0000a73e,
        0xa73f0000a740,
        0xa7410000a742,
        0xa7430000a744,
        0xa7450000a746,
        0xa7470000a748,
        0xa7490000a74a,
        0xa74b0000a74c,
        0xa74d0000a74e,
        0xa74f0000a750,
        0xa7510000a752,
        0xa7530000a754,
        0xa7550000a756,
        0xa7570000a758,
        0xa7590000a75a,
        0xa75b0000a75c,
        0xa75d0000a75e,
        0xa75f0000a760,
        0xa7610000a762,
        0xa7630000a764,
        0xa7650000a766,
        0xa7670000a768,
        0xa7690000a76a,
        0xa76b0000a76c,
        0xa76d0000a76e,
        0xa76f0000a770,
        0xa7710000a779,
        0xa77a0000a77b,
        0xa77c0000a77d,
        0xa77f0000a780,
        0xa7810000a782,
        0xa7830000a784,
        0xa7850000a786,
        0xa7870000a789,
        0xa78c0000a78d,
        0xa78e0000a78f,
        0xa7910000a792,
        0xa7930000a794,
        0xa7a10000a7a2,
        0xa7a30000a7a4,
        0xa7a50000a7a6,
        0xa7a70000a7a8,
        0xa7a90000a7aa,
        0xa7fa0000a828,
        0xa8400000a874,
        0xa8800000a8c5,
        0xa8d00000a8da,
        0xa8e00000a8f8,
        0xa8fb0000a8fc,
        0xa9000000a92e,
        0xa9300000a954,
        0xa9800000a9c1,
        0xa9cf0000a9da,
        0xaa000000aa37,
        0xaa400000aa4e,
        0xaa500000aa5a,
        0xaa600000aa77,
        0xaa7a0000aa7c,
        0xaa800000aac3,
        0xaadb0000aade,
        0xaae00000aaf0,
        0xaaf20000aaf7,
        0xab010000ab07,
        0xab090000ab0f,
        0xab110000ab17,
        0xab200000ab27,
        0xab280000ab2f,
        0xabc00000abeb,
        0xabec0000abee,
        0xabf00000abfa,
        0xac000000d7a4,
        0xfa0e0000fa10,
        0xfa110000fa12,
        0xfa130000fa15,
        0xfa1f0000fa20,
        0xfa210000fa22,
        0xfa230000fa25,
        0xfa270000fa2a,
        0xfb1e0000fb1f,
        0xfe200000fe27,
        0xfe730000fe74,
        0x100000001000c,
        0x1000d00010027,
        0x100280001003b,
        0x1003c0001003e,
        0x1003f0001004e,
        0x100500001005e,
        0x10080000100fb,
        0x101fd000101fe,
        0x102800001029d,
        0x102a0000102d1,
        0x103000001031f,
        0x1033000010341,
        0x103420001034a,
        0x103800001039e,
        0x103a0000103c4,
        0x103c8000103d0,
        0x104280001049e,
        0x104a0000104aa,
        0x1080000010806,
        0x1080800010809,
        0x1080a00010836,
        0x1083700010839,
        0x1083c0001083d,
        0x1083f00010856,
        0x1090000010916,
        0x109200001093a,
        0x10980000109b8,
        0x109be000109c0,
        0x10a0000010a04,
        0x10a0500010a07,
        0x10a0c00010a14,
        0x10a1500010a18,
        0x10a1900010a34,
        0x10a3800010a3b,
        0x10a3f00010a40,
        0x10a6000010a7d,
        0x10b0000010b36,
        0x10b4000010b56,
        0x10b6000010b73,
        0x10c0000010c49,
        0x1100000011047,
        0x1106600011070,
        0x11080000110bb,
        0x110d0000110e9,
        0x110f0000110fa,
        0x1110000011135,
        0x1113600011140,
        0x11180000111c5,
        0x111d0000111da,
        0x11680000116b8,
        0x116c0000116ca,
        0x120000001236f,
        0x130000001342f,
        0x1680000016a39,
        0x16f0000016f45,
        0x16f5000016f7f,
        0x16f8f00016fa0,
        0x1b0000001b002,
        0x200000002a6d7,
        0x2a7000002b735,
        0x2b7400002b81e,
    ),
    'CONTEXTJ': (
        0x200c0000200e,
    ),
    'CONTEXTO': (
        0xb7000000b8,
        0x37500000376,
        0x5f3000005f5,
        0x6600000066a,
        0x6f0000006fa,
        0x30fb000030fc,
    ),
}
_vendor/idna/__pycache__/intranges.cpython-36.pyc000064400000003317151733136470015761 0ustar003

�Pf��@s0dZddlZdd�Zdd�Zdd�Zd	d
�ZdS)a	
Given a list of integers, made up of (hopefully) a small number of long runs
of consecutive integers, compute a representation of the form
((start1, end1), (start2, end2) ...). Then answer the question "was x present
in the original list?" in time O(log(# runs)).
�NcCs�t|�}g}d}xrtt|��D]b}|dt|�krL||||ddkrLq||d|d�}|jt|d|dd��|}qWt|�S)aRepresent a list of integers as a sequence of ranges:
    ((start_0, end_0), (start_1, end_1), ...), such that the original
    integers are exactly those x such that start_i <= x < end_i for some i.

    Ranges are encoded as single integers (start << 32 | end), not as tuples.
    �r���r)�sorted�range�len�append�
_encode_range�tuple)Zlist_Zsorted_list�rangesZ
last_write�iZ
current_range�r�/usr/lib/python3.6/intranges.py�intranges_from_list
srcCs|d>|BS)N� r)�start�endrrr
rsrcCs|d?|d@fS)Nrrll��r)�rrrr
�
_decode_range"srcCszt|d�}tj||�}|dkrNt||d�\}}||koD|knrNdS|t|�krvt||�\}}||krvdSdS)z=Determine if `int_` falls into one of the ranges in `ranges`.rrTF)r�bisectZbisect_leftrr)Zint_r
Ztuple_�pos�left�right�_rrr
�intranges_contain&s
r)�__doc__rrrrrrrrr
�<module>s
_vendor/idna/__pycache__/uts46data.cpython-36.opt-1.pyc000064400000671047151733136470016560 0ustar003

�Pfp��@sdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Zd2d3�Zd4d5�Zd6d7�Zd8d9�Zd:d;�Zd<d=�Zd>d?�Z d@dA�Z!dBdC�Z"dDdE�Z#dFdG�Z$dHdI�Z%dJdK�Z&dLdM�Z'dNdO�Z(dPdQ�Z)dRdS�Z*dTdU�Z+dVdW�Z,dXdY�Z-dZd[�Z.d\d]�Z/d^d_�Z0d`da�Z1dbdc�Z2ddde�Z3dfdg�Z4dhdi�Z5djdk�Z6dldm�Z7dndo�Z8dpdq�Z9drds�Z:dtdu�Z;dvdw�Z<dxdy�Z=dzd{�Z>d|d}�Z?d~d�Z@d�d��ZAd�d��ZBd�d��ZCd�d��ZDd�d��ZEd�d��ZFd�d��ZGd�d��ZHd�d��ZId�d��ZJeKe�e�e�e�e�e�e�e	�e
�e�e�e
�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e �e!�e"�e#�e$�e%�e&�e'�e(�e)�e*�e+�e,�e-�e.�e/�e0�e1�e2�e3�e4�e5�e6�e7�e8�e9�e:�e;�e<�e=�e>�e?�e@�eA�eB�eC�eD�eE�eF�eG�eH�eI�eJ��ZLd�S)�zIDNA Mapping Table from UTS46.z6.3.0ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��3���������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�V�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�M�a�B�b�C�c�D�d�E�e�F�f�G�g�H�h�I�i�J�j�K�k�L�l�M�m�N�n�O�o�P�p�Q�q�R�r�S�s�T�t�U�u�V�v�W�w�X�x�Y�y�Z�z�[�\�]�^�_�`�a�b�c)rr)rr)rr)rr)rr)rr)rr)r	r)r
r)rr)rr)r
r)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)r r)r!r)r"r)r#r)r$r)r%r)r&r)r'r)r(r)r)r)r*r)r+r)r,r)r-r)r.r)r/r0)r1r0)r2r)r3r0)r4r0)r5r0)r6r0)r7r0)r8r0)r9r0)r:r0)r;r0)r<r0)r=r)r>r)r?r)r@r)rAr)rBr)rCr)rDrErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryr)rzr)r{r)r|r)r}r)r~r)rr0)r�r0)r�r0�r�r�r��/usr/lib/python3.6/uts46data.py�_seg_0s�r�cfCs�dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�dr0�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{r�|�}�~���X��������������������������������� ��������� ̈��rErF����I��� ̄����2��� ́��μ���� ̧��1�rb���1⁄4��1⁄2��3⁄4���à��á���â���ã���ä���å���æ���ç)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r)r�r)r�r)r�r)r�r)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�rr�)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�rr�)r�r0)r�rErF)r�r0)r�r0)r�r�)r�r0)r�rr�)r�r0)r�r0)r�rEr�)r�rEr)r�rr�)r�rEr�)r�r0)r�r0)r�rr�)r�rEr�)r�rErb)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_1ps�r�ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rE�è���é���ê���ë���ì���í���î���ï���ð���ñ���ò���ó���ô���õ���ö��r0���ø���ù���ú���û���ü���ý���þ���D�ss�������������������������������������������������������ā���ă���ą���ć���ĉ�	�
�ċ���č�
��ď���đ���ē���ĕ���ė���ę���ě���ĝ���ğ�� �ġ�!�"�ģ�#�$�ĥ�%�&�ħ�'�(�ĩ�)�*�ī�+)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rr0)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,r-r.)r/r0)r0r0)r1r0)r2r0)r3r0)r4r0)r5r0)r6r0)r7r0)r8r0)r9r0)r:r0)r;r0)r<r0)r=r0)r>r0)r?r0)r@r0)rAr0)rBr0)rCr0)rDr0)rEr0)rFr0)rGr0)rHr0)rIr0)rJr0)rKr0)rLr0)rMr0)rNr0)rOrErP)rQr0)rRrErS)rTr0)rUrErV)rWr0)rXrErY)rZr0)r[rEr\)r]r0)r^rEr_)r`r0)rarErb)rcr0)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlr0)rmrErn)ror0)rprErq)rrr0)rsrErt)rur0)rvrErw)rxr0)ryrErz)r{r0)r|rEr})r~r0)rrEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0r�r�r�r�r��_seg_2�s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�dgdS(N�,rE�ĭ�-r0�.�į�/�0�i̇�1�2�ij�4�ĵ�5�6�ķ�7�9�ĺ�:�;�ļ�<�=�ľ�>�?�l·�A�ł�B�C�ń�D�E�ņ�F�G�ň�H�I�ʼn�J�ŋ�K�L�ō�M�N�ŏ�O�P�ő�Q�R�œ�S�T�ŕ�U�V�ŗ�W�X�ř�Y�Z�ś�[�\�ŝ�]�^�ş�_�`�š�a�b�ţ�c�d�ť�e�f�ŧ�g�h�ũ�i�j�ū�k�l�ŭ�m�n�ů�o�p�ű�q�r�ų�s�t�ŵ�u�v�ŷ�w�x�ÿ�y�ź�z�{�ż�|�}�ž�~�rj���ɓ��ƃ���ƅ���ɔ��ƈ���ɖ��ɗ��ƌ���ǝ��ə��ɛ��ƒ���ɠ)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)rr0)rrEr)rrEr)rr0)rrEr)rr0)r	rEr
)rr0)rrErj)r
r0)rrEr)rrEr)rr0)rrEr)rr0)rrEr)rrEr)rr0)rrEr)rrEr)rrEr )r!r0)r"rEr#)r$rEr%)r&rEr')r(rEr))r*r0)r+rEr,r�r�r�r�r��_seg_3@s�r-cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�dgdS(N�rE�ɣ�r0��ɩ��ɨ��ƙ���ɯ��ɲ���ɵ��ơ���ƣ���ƥ���ʀ��ƨ���ʃ���ƭ���ʈ��ư���ʊ��ʋ��ƴ���ƶ���ʒ��ƹ���ƽ����dž���lj���nj���ǎ�����ǐ�����ǒ�����ǔ�����ǖ�����ǘ�����ǚ�����ǜ�����ǟ�����ǡ�����ǣ�����ǥ�����ǧ�����ǩ�����ǫ�����ǭ�����ǯ�����dz���ǵ���ƕ��ƿ��ǹ���ǻ���ǽ���ǿ���ȁ���ȃ���ȅ���ȇ���ȉ�	�
�ȋ���ȍ)r.rEr/)r0r0)r1rEr2)r3rEr4)r5rEr6)r7r0)r8rEr9)r:rEr;)r<r0)r=rEr>)r?rEr@)rAr0)rBrErC)rDr0)rErErF)rGr0)rHrErI)rJrErK)rLr0)rMrErN)rOr0)rPrErQ)rRr0)rSrErT)rUrErV)rWr0)rXrErY)rZrEr[)r\rEr])r^r0)r_rEr`)rar0)rbrErc)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlrErm)rnrEro)rprErq)rrr0)rsrErt)rur0)rvrErw)rxr0)ryrErz)r{r0)r|rEr})r~r0)rrEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�r�r�r�r�r��_seg_4�s�r�ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	gdS(
N�
r0�rE�ȏ���ȑ���ȓ���ȕ���ȗ���ș���ț���ȝ���ȟ�� �ƞ�!�"�ȣ�#�$�ȥ�%�&�ȧ�'�(�ȩ�)�*�ȫ�+�,�ȭ�-�.�ȯ�/�0�ȱ�1�2�ȳ�3�:�ⱥ�;�ȼ�<�=�ƚ�>�ⱦ�?�A�ɂ�B�C�ƀ�D�ʉ�E�ʌ�F�ɇ�G�H�ɉ�I�J�ɋ�K�L�ɍ�M�N�ɏ�O�rT��ɦ�rX�rh��ɹ��ɻ��ʁ�rr�rv���r� ̆��� ̇��� ̊��� ̨��� ̃��� ̋�����ɣ��r\��rj��rt���ʕ���@�̀�A�́�B�C�̓�D�̈́�E�ι�F�Or��P�p�ͱ�q�r�ͳ�s�t�ʹ�u�v�ͷ�w)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)rr0)rrEr)rr0)rrEr)rr0)rrEr)r	rEr
)rr0)rrEr
)rrEr)rr0)rrEr)rr0)rrEr)rrEr)rrEr)rrEr)rr0)rrEr)rr0)r rEr!)r"r0)r#rEr$)r%r0)r&rEr')r(r0)r)rErT)r*rEr+)r,rErX)r-rErh)r.rEr/)r0rEr1)r2rEr3)r4rErr)r5rErv)r6r0)r7rr8)r9rr:)r;rr<)r=rr>)r?rr@)rArrB)rCr0)rDrErE)rFrEr\)rGrErj)rHrErt)rIrErJ)rKr0)rLrErM)rNrErO)rPr0)rQrErR)rSrErT)rUrErV)rWr0)rXr�)rYr0)rZrEr[)r\r0)r]rEr^)r_r0)r`rEra)rbr0)rcrErd)rer0r�r�r�r�r��_seg_5s�rfcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	gdS(
N�xr��zr� ι�{r0�~�;��� ́�� ̈́�rE�ά��·��έ��ή��ί���ό���ύ��ώ���α��β��γ��δ��ε��ζ��η��θ��ι��κ��λ��μ��ν��ξ��ο��π��ρ���σ��τ��υ��φ��χ��ψ��ω��ϊ��ϋ���r-�����ϗ�������������������ϙ�����ϛ�����ϝ�����ϟ�����ϡ�����ϣ�����ϥ�����ϧ�����ϩ�����ϫ�����ϭ�����ϯ����������������ϸ����ϻ���ͻ��ͼ��ͽ��ѐ��ё��ђ��ѓ)rgr�)rhrri)rjr0)rkrrl)rmr�)rnrro)rprrq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|r�)r}rEr~)rr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r-r�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrErr�r�r�r�r��_seg_6xs�rcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�dgdS(N�rE�є��ѕ��і��ї��ј�	�љ�
�њ��ћ��ќ�
�ѝ��ў��џ��а��б��в��г��д��е��ж��з��и��й��к��л��м��н��о��п� �р�!�с�"�т�#�у�$�ф�%�х�&�ц�'�ч�(�ш�)�щ�*�ъ�+�ы�,�ь�-�э�.�ю�/�я�0r0�`�ѡ�a�b�ѣ�c�d�ѥ�e�f�ѧ�g�h�ѩ�i�j�ѫ�k�l�ѭ�m�n�ѯ�o�p�ѱ�q�r�ѳ�s�t�ѵ�u�v�ѷ�w�x�ѹ�y�z�ѻ�{�|�ѽ�}�~�ѿ���ҁ���ҋ���ҍ���ҏ���ґ���ғ���ҕ���җ���ҙ���қ���ҝ���ҟ)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`r0)rarErb)rcr0)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlr0)rmrErn)ror0)rprErq)rrr0)rsrErt)rur0)rvrErw)rxr0)ryrErz)r{r0)r|rEr})r~r0)rrEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�r�r�r�r�r��_seg_7�s�r�ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�r0�rE�ҡ���ң���ҥ���ҧ���ҩ���ҫ���ҭ���ү���ұ���ҳ���ҵ���ҷ���ҹ���һ���ҽ���ҿ��r���ӂ�����ӄ�����ӆ�����ӈ�����ӊ�����ӌ�����ӎ�����ӑ�����ӓ�����ӕ�����ӗ�����ә�����ӛ�����ӝ�����ӟ�����ӡ�����ӣ�����ӥ�����ӧ�����ө�����ӫ�����ӭ�����ӯ�����ӱ�����ӳ�����ӵ���ӷ���ӹ���ӻ���ӽ���ӿ���ԁ���ԃ�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�r�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr)rr0)rrEr)rr0)rrEr)rr0)rrEr	)r
r0)rrEr)r
r0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)r rEr!)r"r0)r#rEr$)r%r0)r&rEr')r(r0)r)rEr*)r+r0)r,rEr-)r.r0)r/rEr0)r1r0)r2rEr3)r4r0)r5rEr6)r7r0)r8rEr9)r:r0)r;rEr<)r=r0)r>rEr?)r@r0)rArErB)rCr0)rDrErE)rFr0)rGrErH)rIr0r�r�r�r�r��_seg_8Hs�rJceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�dgdS(	N�rE�ԅ�r0��ԇ���ԉ�	�
�ԋ���ԍ�
��ԏ���ԑ���ԓ���ԕ���ԗ���ԙ���ԛ���ԝ���ԟ�� �ԡ�!�"�ԣ�#�$�ԥ�%�&�ԧ�'�(r��1�ա�2�բ�3�գ�4�դ�5�ե�6�զ�7�է�8�ը�9�թ�:�ժ�;�ի�<�լ�=�խ�>�ծ�?�կ�@�հ�A�ձ�B�ղ�C�ճ�D�մ�E�յ�F�ն�G�շ�H�ո�I�չ�J�պ�K�ջ�L�ռ�M�ս�N�վ�O�տ�P�ր�Q�ց�R�ւ�S�փ�T�ք�U�օ�V�ֆ�W�Y�`�a��եւ�������������������u�اٴ�v�وٴ�w�ۇٴ�x�يٴ�y��)rKrErL)rMr0)rNrErO)rPr0)rQrErR)rSr0)rTrErU)rVr0)rWrErX)rYr0)rZrEr[)r\r0)r]rEr^)r_r0)r`rEra)rbr0)rcrErd)rer0)rfrErg)rhr0)rirErj)rkr0)rlrErm)rnr0)rorErp)rqr0)rrrErs)rtr0)rurErv)rwr0)rxrEry)rzr0)r{rEr|)r}r0)r~rEr)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�r�r�r�r�r��_seg_9�s�r�ceCs�dydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��r0�r���K�M�����.�0�?�@�\�^�_��������	�X	rE�क़�Y	�ख़�Z	�ग़�[	�ज़�\	�ड़�]	�ढ़�^	�फ़�_	�य़�`	�x	�y	�	�	�	�	�	�	�	�	�	�	�	�	�	�	�	�	��	��	��	��	��	��	��	��	�ড়��	�ঢ়��	��	�য়��	��	��	�	�
�
�
�
�
�
�
�)
�*
�1
�2
�3
�ਲ਼�4
�5
�6
�ਸ਼�7
�8
�:
�<
�=
�>
�C
�G
�I
�K
�N
�Q
�R
�Y
�ਖ਼�Z
�ਗ਼�[
�ਜ਼�\
�]
�^
�ਫ਼�_
)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)rr�)rr0)rr�)rr0)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r r0)r!r�)r"r0)r#r�)r$r0)r%r�)r&r0)r'r�)r(r0)r)r�)r*r0)r+r�)r,r0)r-r�)r.rEr/)r0rEr1)r2r�)r3rEr4)r5r0)r6r�)r7r0)r8r�)r9r0)r:r�)r;r0)r<r�)r=r0)r>r�)r?r0)r@r�)rAr0)rBr�)rCr0)rDrErE)rFr�)rGr0)rHrErI)rJr�)rKr0)rLr�)rMr0)rNr�)rOr0)rPr�)rQr0)rRr�)rSr0)rTr�)rUr0)rVr�)rWrErX)rYrErZ)r[rEr\)r]r0)r^r�)r_rEr`)rar�r�r�r�r�r��_seg_10s�rbceCs�djdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�f
r0�v
r��
�
�
�
�
�
�
�
�
�
�
�
�
�
�
��
��
��
��
��
��
��
��
��
��
��
����
����)�*�1�2�4�5�:�<�E�G�I�K�N�V�X�\rE�ଡ଼�]�ଢ଼�^�_�d�f�x����������������������������������������������
����)�*�4�5)rcr0)rdr�)rer0)rfr�)rgr0)rhr�)rir0)rjr�)rkr0)rlr�)rmr0)rnr�)ror0)rpr�)rqr0)rrr�)rsr0)rtr�)rur0)rvr�)rwr0)rxr�)ryr0)rzr�)r{r0)r|r�)r}r0)r~r�)rr0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0r�r�r�r�r��_seg_11�s�r�ceCs�didjdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�:r��=r0�E�F�I�J�N�U�W�X�Z�`�d�f�p�x���������������������������������������������
�
�
�

�
�
�
�;
�=
�E
�F
�I
�J
�O
�W
�X
�`
�d
�f
�v
�y
�
�
�
�
�
�
�
�
�
�
�
�
��
��
��
��
��
��
��
��
��
��
�
��3rE�ํา�4�;�?�\�����)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r	r0)r
r�)rr0)rr�)r
r0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)r r�)r!r0)r"r�)r#r0)r$rEr%)r&r0)r'r�)r(r0)r)r�)r*r0)r+r�)r,r0)r-r�)r.r0r�r�r�r�r��_seg_12�s�r/ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�r��r0�����������������rE�ໍາ����������������������ຫນ���ຫມ�������་�
�C�གྷ�D�H�I�M�ཌྷ�N�R�དྷ�S�W�བྷ�X�\�ཛྷ�]�i�ཀྵ�j�m�q�s�ཱི�t�u�ཱུ�v�ྲྀ�w�	ྲཱྀ�x�ླྀ�y�	ླཱྀ�z��ཱྀ���ྒྷ�����ྜྷ���ྡྷ���ྦྷ���ྫྷ���ྐྵ��������������ⴧ�����ⴭ������ნ��_�a�I�J�N�P�W�X)r0r�)r1r0)r2r�)r3r0)r4r�)r5r0)r6r�)r7r0)r8r�)r9r0)r:r�)r;r0)r<r�)r=r0)r>r�)r?r0)r@r�)rAr0)rBrErC)rDr0)rEr�)rFr0)rGr�)rHr0)rIr�)rJr0)rKr�)rLr0)rMr�)rNr0)rOr�)rPrErQ)rRrErS)rTr0)rUr�)rVr0)rWrErX)rYr0)rZrEr[)r\r0)r]r�)r^r0)r_rEr`)rar0)rbrErc)rdr0)rerErf)rgr0)rhrEri)rjr0)rkrErl)rmr0)rnr�)ror0)rprErq)rrr0)rsrErt)rurErv)rwrErx)ryrErz)r{rEr|)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�r�)r�rEr�)r�r�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0r�r�r�r�r��_seg_13Ps�r�ceCs�dhdidjdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�Yr��Zr0�^�`��������������������������[�]�}�������������
��� �7�@�T�`�m�n�q�r�t����������������r����� �x������� �,�0�<�@�A�D�n�p�u��������������_�`�}����)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r	r0)r
r�)rr0)rr�)r
r0)rr�)rr0)rr�)rr0)rr�r�r�r�r�r��_seg_14�s�rcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�dgdS(N�r0�r���L�P�}�����8�;�J�M���������,rErF�-r��.rH�/�0rL�1rN�2�ǝ�3rR�4rT�5rV�6rX�7rZ�8r\�9r^�:r`�;�<rb�=�ȣ�>rd�?rh�@rl�Arn�Brr�C�D�ɐ�E�ɑ�F�ᴂ�G�H�I�J�ə�K�ɛ�L�ɜ�M�N�O�P�Q�ŋ�R�S�ɔ�T�ᴖ�U�ᴗ�V�W�X�Y�ᴝ�Z�ɯ�[rp�\�ᴥ�]�β�^�γ�_�δ�`�φ�a�χ�b�c�d�e�f�g�h�ρ�i�j�k�x�н�y��ɒ�rJ��ɕ�r��rP��ɟ��ɡ��ɥ��ɨ��ɩ��ɪ��ᵻ��ʝ��ɭ)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r r0)r!r�)r"r0)r#r�)r$r0)r%r�)r&r0)r'rErF)r(rEr�)r)rErH)r*r0)r+rErL)r,rErN)r-rEr.)r/rErR)r0rErT)r1rErV)r2rErX)r3rErZ)r4rEr\)r5rEr^)r6rEr`)r7r0)r8rErb)r9rEr:)r;rErd)r<rErh)r=rErl)r>rErn)r?rErr)r@rErF)rArErB)rCrErD)rErErF)rGrErH)rHrErL)rIrErN)rJrErK)rLrErM)rNrErO)rPrErR)rQr0)rRrErZ)rSrEr^)rTrErU)rVrErb)rWrErX)rYrErZ)r[rEr\)r]rErd)r^rErl)r_rErn)r`rEra)rbrErc)rdrErp)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErV)rrrErh)rsrErn)rtrErp)rurErh)rvrErj)rwrErx)ryrErn)rzrErp)r{r0)r|rEr})r~r0)rrEr�)r�rErJ)r�rEr�)r�rEr)r�rErO)r�rErP)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_15 s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�dgdS(N�rE�ᶅ��ʟ��ɱ��ɰ��ɲ��ɳ��ɴ��ɵ��ɸ��ʂ��ʃ��ƫ��ʉ��ʊ��ᴜ��ʋ��ʌ�rx��ʐ��ʑ��ʒ��θ�r0��r����ḁ���ḃ���ḅ���ḇ���ḉ�	�
�ḋ���ḍ�
��ḏ���ḑ���ḓ���ḕ���ḗ���ḙ���ḛ���ḝ���ḟ�� �ḡ�!�"�ḣ�#�$�ḥ�%�&�ḧ�'�(�ḩ�)�*�ḫ�+�,�ḭ�-�.�ḯ�/�0�ḱ�1�2�ḳ�3�4�ḵ�5�6�ḷ�7�8�ḹ�9�:�ḻ�;�<�ḽ�=�>�ḿ�?�@�ṁ�A�B�ṃ�C�D�ṅ�E�F�ṇ�G�H�ṉ�I�J�ṋ)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rErx)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r		r0)r
	rEr	)r	r0)r
	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr 	)r!	r0)r"	rEr#	)r$	r0)r%	rEr&	)r'	r0)r(	rEr)	)r*	r0)r+	rEr,	)r-	r0)r.	rEr/	)r0	r0)r1	rEr2	)r3	r0)r4	rEr5	)r6	r0)r7	rEr8	r�r�r�r�r��_seg_16�s�r9	ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�Kr0�LrE�ṍ�M�N�ṏ�O�P�ṑ�Q�R�ṓ�S�T�ṕ�U�V�ṗ�W�X�ṙ�Y�Z�ṛ�[�\�ṝ�]�^�ṟ�_�`�ṡ�a�b�ṣ�c�d�ṥ�e�f�ṧ�g�h�ṩ�i�j�ṫ�k�l�ṭ�m�n�ṯ�o�p�ṱ�q�r�ṳ�s�t�ṵ�u�v�ṷ�w�x�ṹ�y�z�ṻ�{�|�ṽ�}�~�ṿ���ẁ���ẃ���ẅ���ẇ���ẉ���ẋ���ẍ���ẏ���ẑ���ẓ���ẕ���aʾ���r.���ạ���ả���ấ���ầ���ẩ���ẫ���ậ���ắ���ằ���ẳ�)r:	r0)r;	rEr<	)r=	r0)r>	rEr?	)r@	r0)rA	rErB	)rC	r0)rD	rErE	)rF	r0)rG	rErH	)rI	r0)rJ	rErK	)rL	r0)rM	rErN	)rO	r0)rP	rErQ	)rR	r0)rS	rErT	)rU	r0)rV	rErW	)rX	r0)rY	rErZ	)r[	r0)r\	rEr]	)r^	r0)r_	rEr`	)ra	r0)rb	rErc	)rd	r0)re	rErf	)rg	r0)rh	rEri	)rj	r0)rk	rErl	)rm	r0)rn	rEro	)rp	r0)rq	rErr	)rs	r0)rt	rEru	)rv	r0)rw	rErx	)ry	r0)rz	rEr{	)r|	r0)r}	rEr~	)r	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	rErZ	)r�	r0)r�	rEr.)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0r�r�r�r�r��_seg_17�s�r�	cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�dgdS(N�rE�ẵ�r0��ặ���ẹ���ẻ���ẽ���ế���ề����ể�����ễ�����ệ�����ỉ�����ị�����ọ�����ỏ�����ố�����ồ�����ổ�����ỗ�����ộ�����ớ�����ờ�����ở�����ỡ�����ợ�����ụ�����ủ�����ứ�����ừ�����ử�����ữ�����ự�����ỳ�����ỵ���ỷ���ỹ���ỻ���ỽ���ỿ���ἀ�	�ἁ�
�ἂ��ἃ��ἄ�
�ἅ��ἆ��ἇ��r���ἐ��ἑ��ἒ��ἓ��ἔ��ἕ�� �(�ἠ�)�ἡ�*�ἢ�+�ἣ�,�ἤ�-�ἥ)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr	
)r

r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r 
rEr!
)r"
r0)r#
rEr$
)r%
r0)r&
rEr'
)r(
r0)r)
rEr*
)r+
r0)r,
rEr-
)r.
r0)r/
rEr0
)r1
r0)r2
rEr3
)r4
r0)r5
rEr6
)r7
r0)r8
rEr9
)r:
r0)r;
rEr<
)r=
r0)r>
rEr?
)r@
r0)rA
rErB
)rC
rErD
)rE
rErF
)rG
rErH
)rI
rErJ
)rK
rErL
)rM
rErN
)rO
rErP
)rQ
r0)rR
r�)rS
rErT
)rU
rErV
)rW
rErX
)rY
rErZ
)r[
rEr\
)r]
rEr^
)r_
r�)r`
r0)ra
rErb
)rc
rErd
)re
rErf
)rg
rErh
)ri
rErj
)rk
rErl
r�r�r�r�r��_seg_18Xs�rm
cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�dgdS(N�.rE�ἦ�/�ἧ�0r0�8�ἰ�9�ἱ�:�ἲ�;�ἳ�<�ἴ�=�ἵ�>�ἶ�?�ἷ�@�Fr��H�ὀ�I�ὁ�J�ὂ�K�ὃ�L�ὄ�M�ὅ�N�P�X�Y�ὑ�Z�[�ὓ�\�]�ὕ�^�_�ὗ�`�h�ὠ�i�ὡ�j�ὢ�k�ὣ�l�ὤ�m�ὥ�n�ὦ�o�ὧ�p�q�ά�r�s�έ�t�u�ή�v�w�ί�x�y�ό�z�{�ύ�|�}�ώ�~��ἀι��ἁι��ἂι��ἃι��ἄι��ἅι��ἆι��ἇι����������ἠι��ἡι��ἢι��ἣι��ἤι��ἥι��ἦι��ἧι����������ὠι��ὡι��ὢι��ὣι��ὤι��ὥι��ὦι��ὧι�������)rn
rEro
)rp
rErq
)rr
r0)rs
rErt
)ru
rErv
)rw
rErx
)ry
rErz
)r{
rEr|
)r}
rEr~
)r
rEr�
)r�
rEr�
)r�
r0)r�
r�)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r�)r�
r0)r�
r�)r�
rEr�
)r�
r�)r�
rEr�
)r�
r�)r�
rEr�
)r�
r�)r�
rEr�
)r�
r0)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r�)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)rrEr)rrEr)rrEr)rrEr�
)rrEr�
)rrEr�
)r	rEr�
)r
rEr�
)rrEr)rrErr�r�r�r�r��_seg_19�s�r
ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�dgdS(N�rE�ὧι�r0��ὰι��αι��άι�r����ᾶι��ᾰ��ᾱ��ὰ��ά��r� ̓��ι��� ͂�� ̈͂���ὴι���ηι���ήι�������ῆι���ὲ���έ���ὴ���ή����� ̓̀��� ̓́��� ̓͂�����ΐ�������ῐ���ῑ���ὶ���ί����� ̔̀��� ̔́��� ̔͂�����ΰ�����ῠ���ῡ���ὺ���ύ���ῥ��� ̈̀��� ̈́���`�����ὼι���ωι���ώι����ῶι��ὸ��ό��ὼ��ώ��� ́�� ̔�� r�� r�� r-�� � � �‐� � � ̳� �$ �' �( �/ �0 �3 �′′�4 �	′′′�5 �6 �‵‵�7 �	‵‵‵�8 �< �!!�= �> � ̅�? �G �??�H �?!�I �!?�J �W �′′′′�X )rrEr)rr0)rrEr)rrEr)rrEr)rr�)rr0)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr)r$rr%)r&rEr')r(rr%)r)rr*)r+rr,)r-rEr.)r/rEr0)r1rEr2)r3r�)r4r0)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr0)r@rrA)rBrrC)rDrrE)rFr0)rGrErH)rIr�)rJr0)rKrErL)rMrErN)rOrErP)rQrErR)rSr�)rTrrU)rVrrW)rXrrY)rZr0)r[rEr\)r]r0)r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrri)rjrrk)rlrrm)rnr�)rorErp)rqrErr)rsrErt)rur�)rvr0)rwrErx)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rErr)r�rr�)r�rr�)r�r�)r�rr�)r�r�)r�r-r�)r�r�)r�r0)r�rEr�)r�r0)r�rr�)r�r0)r�r�)r�r0)r�r�)r�rr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rr�)r�r0)r�rr�)r�r0)r�rr�)r�rr�)r�rr�)r�r0)r�rEr�)r�r0r�r�r�r�r��_seg_20(s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�dgdS(N�_ rr��` r��a r��d �e �p rE�0�q rV�r �t �4�u �5�v �6�w �7�x �8�y �9�z �+�{ �−�| �=�} �(�~ �)� r`� � r�� r�� � � � � � � � � � � � � � rF� rN� rb� rt� �ə� rT� rZ� r\� r^� � rd� rj� rl� � r0� �rs� � �� �� �!�a/c�!�a/s�!rJ�!�°c�!�!�c/o�!�c/u�!�ɛ�!�	!�°f�
!rR�!�!�ħ�!�!�!�!�!�no�!�!�!rf�!rh�!� !�sm�!!�tel�"!�tm�#!�$!rx�%!�&!�ω�'!�(!�)!�*!�+!r��,!rH�-!�.!�/!�1!rP�2!�3!�4!�5!�א)r�rr�)r�r�)r�r�)r�r�)r�r�)r�rEr�)r�rErV)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rEr`)r�rEr�)r�rEr�)r�rEr�)r�rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�r�)r�rErF)r�rErN)r�rErb)r�rErt)r�rEr�)r�rErT)r�rErZ)r�rEr\)r�rEr^)r�rEr`)r�rErd)r�rErj)r�rErl)r�r�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�rr�)r�rr�)r�rErJ)r�rEr�)rr0)rrr)rrr)rrEr)rr0)rrEr	)r
rErR)rrErT)rrEr
)rrErV)rrEr\)rr0)rrEr`)rrEr)rr0)rrErd)rrErf)rrErh)rr0)rrEr)rrEr)rrEr)rr0)r rErx)r!r0)r"rEr#)r$r0)r%rErx)r&r0)r'rErZ)r(rEr�)r)rErH)r*rErJ)r+r0)r,rErN)r-rErP)r.r�)r/rEr^)r0rErb)r1rEr2r�r�r�r�r��_seg_21�s�r3cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�dgdS(N�6!rE�ב�7!�ג�8!�ד�9!rV�:!r0�;!�fax�<!�π�=!�γ�?!�@!�∑�A!�E!rL�G!rN�H!�I!rX�J!�P!�1⁄7�Q!�1⁄9�R!�1⁄10�S!�1⁄3�T!�2⁄3�U!�1⁄5�V!�2⁄5�W!�3⁄5�X!�4⁄5�Y!�1⁄6�Z!�5⁄6�[!�1⁄8�\!�3⁄8�]!�5⁄8�^!�7⁄8�_!�1⁄�`!�a!�ii�b!�iii�c!�iv�d!rp�e!�vi�f!�vii�g!�viii�h!�ix�i!rt�j!�xi�k!�xii�l!r\�m!rJ�n!�o!r^�p!�q!�r!�s!�t!�u!�v!�w!�x!�y!�z!�{!�|!�}!�~!�!�!�!r��!�!�0⁄3�!�!�,"�∫∫�-"�	∫∫∫�."�/"�∮∮�0"�	∮∮∮�1"�`"r�a"�n"�p"�)#�〈�*#�〉�+#��#�$�'$�@$�K$�`$r��a$r��b$�c$r��d$r��e$r��f$r��g$r��h$r��i$�10�j$�11�k$�12)r4rEr5)r6rEr7)r8rEr9)r:rErV)r;r0)r<rEr=)r>rEr?)r@rErA)rBrEr?)rCrErD)rEr0)rFrErL)rGrErN)rHrErV)rIrErX)rJr0)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErV)rlrErm)rnrEro)rprErq)rrrErp)rsrErt)rurErv)rwrErx)ryrErz)r{rErt)r|rEr})r~rEr)r�rEr\)r�rErJ)r�rErL)r�rEr^)r�rErV)r�rErm)r�rEro)r�rErq)r�rErp)r�rErt)r�rErv)r�rErx)r�rErz)r�rErt)r�rEr})r�rEr)r�rEr\)r�rErJ)r�rErL)r�rEr^)r�r0)r�r�)r�r0)r�rEr�)r�r�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�r)r�r0)r�r)r�r0)r�rEr�)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_22�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N�l$rE�13�m$�14�n$�15�o$�16�p$�17�q$�18�r$�19�s$�20�t$r�(1)�u$�(2)�v$�(3)�w$�(4)�x$�(5)�y$�(6)�z$�(7)�{$�(8)�|$�(9)�}$�(10)�~$�(11)�$�(12)�$�(13)�$�(14)�$�(15)�$�(16)�$�(17)�$�(18)�$�(19)�$�(20)�$r��$�(a)�$�(b)�$�(c)�$�(d)�$�(e)�$�(f)�$�(g)�$�(h)�$�(i)�$�(j)�$�(k)�$�(l)�$�(m)�$�(n)�$�(o)�$�(p)�$�(q)�$�(r)�$�(s)�$�(t)�$�(u)�$�(v)�$�(w)�$�(x)�$�(y)�$�(z)�$rF�$rH�$rJ�$rL�$rN�$rP�$rR�$rT�$rV�$rX�$rZ�$r\��$r^��$r`��$rb��$rd��$rf��$rh��$rj��$rl��$rn��$rp��$rr��$rt��$rv��$rx��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rr�)r
rr
)r
rr
)r
rr
)r
rr
)r
rr	
)r

rr
)r
rr

)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r 
rr!
)r"
rr#
)r$
rr%
)r&
rr'
)r(
rr)
)r*
rr+
)r,
rr-
)r.
rr/
)r0
rErF)r1
rErH)r2
rErJ)r3
rErL)r4
rErN)r5
rErP)r6
rErR)r7
rErT)r8
rErV)r9
rErX)r:
rErZ)r;
rEr\)r<
rEr^)r=
rEr`)r>
rErb)r?
rErd)r@
rErf)rA
rErh)rB
rErj)rC
rErl)rD
rErn)rE
rErp)rF
rErr)rG
rErt)rH
rErv)rI
rErx)rJ
rErF)rK
rErH)rL
rErJ)rM
rErL)rN
rErN)rO
rErP)rP
rErR)rQ
rErT)rR
rErV)rS
rErX)rT
rErZ)rU
rEr\)rV
rEr^)rW
rEr`)rX
rErb)rY
rErd)rZ
rErf)r[
rErh)r\
rErjr�r�r�r�r��_seg_23`	s�r]
cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N��$rErl��$rn��$rp��$rr��$rt��$rv��$rx��$r���$r0�'r��'�*�∫∫∫∫�
*�t*r�::=�u*�==�v*�===�w*��*�⫝̸��*�M+�P+�Z+�,�ⰰ�,�ⰱ�,�ⰲ�,�ⰳ�,�ⰴ�,�ⰵ�,�ⰶ�,�ⰷ�,�ⰸ�	,�ⰹ�
,�ⰺ�,�ⰻ�,�ⰼ�
,�ⰽ�,�ⰾ�,�ⰿ�,�ⱀ�,�ⱁ�,�ⱂ�,�ⱃ�,�ⱄ�,�ⱅ�,�ⱆ�,�ⱇ�,�ⱈ�,�ⱉ�,�ⱊ�,�ⱋ�,�ⱌ�,�ⱍ�,�ⱎ�,�ⱏ� ,�ⱐ�!,�ⱑ�",�ⱒ�#,�ⱓ�$,�ⱔ�%,�ⱕ�&,�ⱖ�',�ⱗ�(,�ⱘ�),�ⱙ�*,�ⱚ�+,�ⱛ�,,�ⱜ�-,�ⱝ�.,�ⱞ�/,�0,�_,�`,�ⱡ�a,�b,�ɫ�c,�ᵽ�d,�ɽ�e,�g,�ⱨ�h,�i,�ⱪ�j,�k,�ⱬ�l,�m,�ɑ�n,�ɱ�o,�ɐ�p,�ɒ�q,�r,�ⱳ�s,�u,�ⱶ�v,�|,rX�},�~,�ȿ�,�ɀ�,�ⲁ�,�,�ⲃ)r^
rErl)r_
rErn)r`
rErp)ra
rErr)rb
rErt)rc
rErv)rd
rErx)re
rEr�)rf
r0)rg
r�)rh
r0)ri
rErj
)rk
r0)rl
rrm
)rn
rro
)rp
rrq
)rr
r0)rs
rErt
)ru
r0)rv
r�)rw
r0)rx
r�)ry
rErz
)r{
rEr|
)r}
rEr~
)r
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r�)r�
r0)r�
r�)r�
rEr�
)r�
r0)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rErX)r�
rErp)r�
rEr�
)rrEr)rrEr)rr0)rrErr�r�r�r�r��_seg_24�	s�rcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�,r0�,rE�ⲅ�,�,�ⲇ�,�,�ⲉ�,�,�ⲋ�,�,�ⲍ�,�,�ⲏ�,�,�ⲑ�,�,�ⲓ�,�,�ⲕ�,�,�ⲗ�,�,�ⲙ�,�,�ⲛ�,�,�ⲝ�,�,�ⲟ�,�,�ⲡ�,�,�ⲣ�,�,�ⲥ�,�,�ⲧ�,�,�ⲩ�,�,�ⲫ�,�,�ⲭ�,�,�ⲯ�,�,�ⲱ�,�,�ⲳ�,�,�ⲵ�,�,�ⲷ�,�,�ⲹ�,�,�ⲻ�,�,�ⲽ�,�,�ⲿ�,�,�ⳁ�,��,�ⳃ��,��,�ⳅ��,��,�ⳇ��,��,�ⳉ��,��,�ⳋ��,��,�ⳍ��,��,�ⳏ��,��,�ⳑ��,��,�ⳓ��,��,�ⳕ��,��,�ⳗ��,��,�ⳙ��,��,�ⳛ��,��,�ⳝ��,��,�ⳟ��,��,�ⳡ��,��,�ⳣ��,��,�ⳬ��,��,�ⳮ)rr0)r	rEr
)rr0)rrEr
)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)r r0)r!rEr")r#r0)r$rEr%)r&r0)r'rEr()r)r0)r*rEr+)r,r0)r-rEr.)r/r0)r0rEr1)r2r0)r3rEr4)r5r0)r6rEr7)r8r0)r9rEr:)r;r0)r<rEr=)r>r0)r?rEr@)rAr0)rBrErC)rDr0)rErErF)rGr0)rHrErI)rJr0)rKrErL)rMr0)rNrErO)rPr0)rQrErR)rSr0)rTrErU)rVr0)rWrErX)rYr0)rZrEr[)r\r0)r]rEr^)r_r0)r`rEra)rbr0)rcrErd)rer0)rfrErg)rhr0)rirErj)rkr0)rlrErm)rnr0)rorErp)rqr0)rrrErs)rtr0)rurErv)rwr0)rxrEry)rzr0)r{rEr|)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�r�r�r�r�r��_seg_250
s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	gdS(
N��,r0��,rE�ⳳ��,��,r��,�&-�'-�(-�--�.-�0-�h-�o-�ⵡ�p-�q-�-�-�-�-�-�-�-�-�-�-�-��-��-��-��-��-��-��-��-�<.�.�.�.�.�母�.��.�龟��.�/�一�/�丨�/�丶�/�丿�/�乙�/�亅�/�二�/�亠�/�人�	/�儿�
/�入�/�八�/�冂�
/�冖�/�冫�/�几�/�凵�/�刀�/�力�/�勹�/�匕�/�匚�/�匸�/�十�/�卜�/�卩�/�厂�/�厶�/�又�/�口�/�囗�/�土� /�士�!/�夂�"/�夊�#/�夕�$/�大�%/�女�&/�子�'/�宀�(/�寸�)/�小�*/�尢�+/�尸�,/�屮�-/�山�./�巛�//�工�0/�己�1/�巾�2/�干�3/�幺�4/�广�5/�廴�6/�廾�7/�弋�8/�弓�9/�彐)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@r�r�r�r�r��_seg_26�
s�rAcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�:/rE�彡�;/�彳�</�心�=/�戈�>/�戶�?/�手�@/�支�A/�攴�B/�文�C/�斗�D/�斤�E/�方�F/�无�G/�日�H/�曰�I/�月�J/�木�K/�欠�L/�止�M/�歹�N/�殳�O/�毋�P/�比�Q/�毛�R/�氏�S/�气�T/�水�U/�火�V/�爪�W/�父�X/�爻�Y/�爿�Z/�片�[/�牙�\/�牛�]/�犬�^/�玄�_/�玉�`/�瓜�a/�瓦�b/�甘�c/�生�d/�用�e/�田�f/�疋�g/�疒�h/�癶�i/�白�j/�皮�k/�皿�l/�目�m/�矛�n/�矢�o/�石�p/�示�q/�禸�r/�禾�s/�穴�t/�立�u/�竹�v/�米�w/�糸�x/�缶�y/�网�z/�羊�{/�羽�|/�老�}/�而�~/�耒�/�耳�/�聿�/�肉�/�臣�/�自�/�至�/�臼�/�舌�/�舛�/�舟�/�艮�/�色�/�艸�/�虍�/�虫�/�血�/�行�/�衣�/�襾�/�見�/�角�/�言�/�谷�/�豆�/�豕�/�豸�/�貝�/�赤�/�走�/�足�/�身)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)rrEr)rrEr)rrEr)rrEr)rrEr	r�r�r�r�r��_seg_27s�r
cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"gdS(#N�/rE�車�/�辛�/�辰�/�辵�/�邑�/�酉�/�釆�/�里�/�金�/�長�/�門�/�阜�/�隶�/�隹�/�雨�/�靑�/�非�/�面�/�革�/�韋�/�韭�/�音�/�頁�/�風�/�飛�/�食�/�首�/�香�/�馬�/�骨�/�高�/�髟�/�鬥�/�鬯�/�鬲�/�鬼��/�魚��/�鳥��/�鹵��/�鹿��/�麥��/�麻��/�黃��/�黍��/�黑��/�黹��/�黽��/�鼎��/�鼓��/�鼠��/�鼻��/�齊��/�齒��/�龍��/�龜��/�龠��/r��0rr��0r0�0�.�0�60�〒�70�80�十�90�卄�:0�卅�;0�@0�A0�0�0�0� ゙�0� ゚�0�0�より�0�0�コト�1�1�.1�11�ᄀ�21�ᄁ�31�ᆪ�41�ᄂ�51�ᆬ�61�ᆭ�71�ᄃ�81�ᄄ�91�ᄅ�:1�ᆰ�;1�ᆱ�<1�ᆲ�=1�ᆳ�>1�ᆴ�?1�ᆵ�@1�ᄚ�A1�ᄆ�B1�ᄇ�C1�ᄈ�D1�ᄡ)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryrErz)r{r�)r|rr�)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rr�)r�rr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_28hs�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(gdS()N�E1rE�ᄉ�F1�ᄊ�G1�ᄋ�H1�ᄌ�I1�ᄍ�J1�ᄎ�K1�ᄏ�L1�ᄐ�M1�ᄑ�N1�ᄒ�O1�ᅡ�P1�ᅢ�Q1�ᅣ�R1�ᅤ�S1�ᅥ�T1�ᅦ�U1�ᅧ�V1�ᅨ�W1�ᅩ�X1�ᅪ�Y1�ᅫ�Z1�ᅬ�[1�ᅭ�\1�ᅮ�]1�ᅯ�^1�ᅰ�_1�ᅱ�`1�ᅲ�a1�ᅳ�b1�ᅴ�c1�ᅵ�d1r��e1�ᄔ�f1�ᄕ�g1�ᇇ�h1�ᇈ�i1�ᇌ�j1�ᇎ�k1�ᇓ�l1�ᇗ�m1�ᇙ�n1�ᄜ�o1�ᇝ�p1�ᇟ�q1�ᄝ�r1�ᄞ�s1�ᄠ�t1�ᄢ�u1�ᄣ�v1�ᄧ�w1�ᄩ�x1�ᄫ�y1�ᄬ�z1�ᄭ�{1�ᄮ�|1�ᄯ�}1�ᄲ�~1�ᄶ�1�ᅀ�1�ᅇ�1�ᅌ�1�ᇱ�1�ᇲ�1�ᅗ�1�ᅘ�1�ᅙ�1�ᆄ�1�ᆅ�1�ᆈ�1�ᆑ�1�ᆒ�1�ᆔ�1�ᆞ�1�ᆡ�1�1r0�1�一�1�二�1�三�1�四�1�上�1�中�1�下�1�甲�1�乙�1�丙�1�丁�1�天�1�地�1�人�1�1�1��1��1�2r�(ᄀ)�2�(ᄂ)�2�(ᄃ)�2�(ᄅ)�2�(ᄆ))r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rr�)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXr�)rYr0)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvr0)rwr�)rxr0)ryr�)rzr0)r{rr|)r}rr~)rrr�)r�rr�)r�rr�r�r�r�r�r��_seg_29�s�r�cfCs*d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.gdS(/N�2r�(ᄇ)�2�(ᄉ)�2�(ᄋ)�2�(ᄌ)�	2�(ᄎ)�
2�(ᄏ)�2�(ᄐ)�2�(ᄑ)�
2�(ᄒ)�2�(가)�2�(나)�2�(다)�2�(라)�2�(마)�2�(바)�2�(사)�2�(아)�2�(자)�2�(차)�2�(카)�2�(타)�2�(파)�2�(하)�2�(주)�2�(오전)�2�(오후)�2r�� 2�(一)�!2�(二)�"2�(三)�#2�(四)�$2�(五)�%2�(六)�&2�(七)�'2�(八)�(2�(九)�)2�(十)�*2�(月)�+2�(火)�,2�(水)�-2�(木)�.2�(金)�/2�(土)�02�(日)�12�(株)�22�(有)�32�(社)�42�(名)�52�(特)�62�(財)�72�(祝)�82�(労)�92�(代)�:2�(呼)�;2�(学)�<2�(監)�=2�(企)�>2�(資)�?2�(協)�@2�(祭)�A2�(休)�B2�(自)�C2�(至)�D2rE�問�E2�幼�F2�文�G2�箏�H2r0�P2�pte�Q2�21�R2�22�S2�23�T2�24�U2�25�V2�26�W2�27�X2�28�Y2�29�Z2�30�[2�31�\2�32�]2�33�^2�34�_2�35�`2�ᄀ�a2�ᄂ�b2�ᄃ�c2�ᄅ�d2�ᄆ�e2�ᄇ�f2�ᄉ�g2�ᄋ�h2�ᄌ�i2�ᄎ�j2�ᄏ�k2�ᄐ�l2�ᄑ�m2�ᄒ�n2�가�o2�나)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr)rrr)rrEr)rrEr)rrEr)r	rEr
)rr0)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErKr�r�r�r�r��_seg_308s�rLcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�p2rE�다�q2�라�r2�마�s2�바�t2�사�u2�아�v2�자�w2�차�x2�카�y2�타�z2�파�{2�하�|2�참고�}2�주의�~2�우�2r0�2�一�2�二�2�三�2�四�2�五�2�六�2�七�2�八�2�九�2�十�2�月�2�火�2�水�2�木�2�金�2�土�2�日�2�株�2�有�2�社�2�名�2�特�2�財�2�祝�2�労�2�秘�2�男�2�女�2�適�2�優�2�印�2�注�2�項�2�休�2�写�2�正�2�上�2�中�2�下�2�左�2�右�2�医�2�宗�2�学�2�監�2�企�2�資�2�協�2�夜�2�36�2�37�2�38�2�39�2�40�2�41�2�42�2�43�2�44�2�45�2�46�2�47�2�48�2�49�2�50�2�1月�2�2月��2�3月��2�4月��2�5月��2�6月��2�7月��2�8月��2�9月��2�10月��2�11月��2�12月��2�hg��2�erg��2�ev��2�ltd��2�ア��2�イ��2�ウ��2�エ)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkr0)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)rrEr)rrEr)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrErr�r�r�r�r��_seg_31�s�rcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N��2rE�オ��2�カ��2�キ��2�ク��2�ケ��2�コ��2�サ��2�シ��2�ス��2�セ��2�ソ��2�タ��2�チ��2�ツ��2�テ��2�ト��2�ナ��2�ニ��2�ヌ��2�ネ��2�ノ��2�ハ��2�ヒ��2�フ��2�ヘ��2�ホ��2�マ��2�ミ��2�ム��2�メ��2�モ��2�ヤ��2�ユ�2�ヨ�2�ラ�2�リ�2�ル�2�レ�2�ロ�2�ワ�2�ヰ�2�ヱ�2�ヲ�2r��3�アパート�3�アルファ�3�アンペア�3�	アール�3�イニング�3�	インチ�3�	ウォン�3�エスクード�3�エーカー�	3�	オンス�
3�	オーム�3�	カイリ�3�カラット�
3�カロリー�3�	ガロン�3�	ガンマ�3�ギガ�3�	ギニー�3�キュリー�3�ギルダー�3�キロ�3�キログラム�3�キロメートル�3�キロワット�3�	グラム�3�グラムトン�3�クルゼイロ�3�クローネ�3�	ケース�3�	コルナ�3�	コーポ�3�サイクル� 3�サンチーム�!3�シリング�"3�	センチ�#3�	セント�$3�	ダース�%3�デシ�&3�ドル�'3�トン�(3�ナノ�)3�	ノット�*3�	ハイツ�+3�パーセント�,3�	パーツ�-3�バーレル�.3�ピアストル�/3�	ピクル�03�ピコ�13�ビル�23�ファラッド�33�フィート�43�ブッシェル�53�	フラン�63�ヘクタール�73�ペソ)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkr�)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_32
s�r�cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�83rE�	ペニヒ�93�	ヘルツ�:3�	ペンス�;3�	ページ�<3�	ベータ�=3�ポイント�>3�	ボルト�?3�ホン�@3�	ポンド�A3�	ホール�B3�	ホーン�C3�マイクロ�D3�	マイル�E3�	マッハ�F3�	マルク�G3�マンション�H3�ミクロン�I3�ミリ�J3�ミリバール�K3�メガ�L3�メガトン�M3�メートル�N3�	ヤード�O3�	ヤール�P3�	ユアン�Q3�リットル�R3�リラ�S3�	ルピー�T3�ルーブル�U3�レム�V3�レントゲン�W3�	ワット�X3�0点�Y3�1点�Z3�2点�[3�3点�\3�4点�]3�5点�^3�6点�_3�7点�`3�8点�a3�9点�b3�10点�c3�11点�d3�12点�e3�13点�f3�14点�g3�15点�h3�16点�i3�17点�j3�18点�k3�19点�l3�20点�m3�21点�n3�22点�o3�23点�p3�24点�q3�hpa�r3�da�s3�au�t3�bar�u3�ov�v3�pc�w3�dm�x3�dm2�y3�dm3�z3�iu�{3�平成�|3�昭和�}3�大正�~3�明治�3�株式会社�3�pa�3�na�3�μa�3�ma�3�ka�3�kb�3�mb�3�gb�3�cal�3�kcal�3�pf�3�nf�3�μf�3�μg�3�mg�3�kg�3�hz�3�khz�3�mhz�3�ghz�3�thz�3�μl�3�ml�3�dl�3�kl�3�fm�3�nm�3�μm)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_33p
s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(gdS()N�3rE�mm�3�cm�3�km�3�mm2�3�cm2�3�m2�3�km2�3�mm3�3�cm3�3�m3�3�km3�3�m∕s�3�m∕s2�3rn�3�kpa�3�mpa�3�gpa�3�rad�3�rad∕s�3�rad∕s2�3�ps�3�ns�3�μs�3�ms�3�pv�3�nv�3�μv�3�mv�3�kv�3�3�pw�3�nw�3�μw�3�mw�3�kw�3�3�kω�3�mω��3r���3�bq��3�cc��3�cd��3�c∕kg��3��3�db��3�gy��3�ha��3�hp��3�in��3�kk��3��3�kt��3�lm��3�ln��3�log��3�lx��3rz��3�mil��3�mol��3�ph��3��3�ppm��3�pr��3�sr��3�sv��3�wb��3�v∕m��3�a∕m��3�1日��3�2日��3�3日��3�4日��3�5日��3�6日��3�7日��3�8日��3�9日��3�10日��3�11日��3�12日��3�13日��3�14日��3�15日��3�16日��3�17日��3�18日��3�19日��3�20日��3�21日�3�22日�3�23日�3�24日�3�25日�3�26日�3�27日�3�28日�3�29日�3�30日�3�31日�3�gal)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rErn)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr�)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrErz)rrEr)rrEr)rrEr)rr�)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrErer�r�r�r�r��_seg_34�
s�rfceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�4r0�Mr��M�͟��鍤鐤�Ǥ�Ф�,��@�rE�ꙁ�A��B��ꙃ�C��D��ꙅ�E��F��ꙇ�G��H��ꙉ�I��J��ꙋ�K��L��ꙍ�M��N��ꙏ�O��P��ꙑ�Q��R��ꙓ�S��T��ꙕ�U��V��ꙗ�W��X��ꙙ�Y��Z��ꙛ�[��\��ꙝ�]��^��ꙟ�_��`��ꙡ�a��b��ꙣ�c��d��ꙥ�e��f��ꙧ�g��h��ꙩ�i��j��ꙫ�k��l��ꙭ�m�逦�ꚁ遦邦�ꚃ郦鄦�ꚅ酦醦�ꚇ釦鈦�ꚉ鉦銦�ꚋ鋦錦�ꚍ鍦鎦�ꚏ鏦鐦�ꚑ鑦钦�ꚓ铦锦�ꚕ镦閦�ꚗ闦阦韦����"��ꜣ�#��$��ꜥ�%��&��ꜧ�'��(��ꜩ�)��*��ꜫ�+��,��ꜭ�-��.��ꜯ�/��2��ꜳ�3�)rgr0)rhr�)rir0)rjr�)rkr0)rlr�)rmr0)rnr�)ror0)rpr�)rqrErr)rsr0)rtrEru)rvr0)rwrErx)ryr0)rzrEr{)r|r0)r}rEr~)rr0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0r�r�r�r�r��_seg_35@s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�4�rE�ꜵ�5�r0�6��ꜷ�7��8��ꜹ�9��:��ꜻ�;��<��ꜽ�=��>��ꜿ�?��@��ꝁ�A��B��ꝃ�C��D��ꝅ�E��F��ꝇ�G��H��ꝉ�I��J��ꝋ�K��L��ꝍ�M��N��ꝏ�O��P��ꝑ�Q��R��ꝓ�S��T��ꝕ�U��V��ꝗ�W��X��ꝙ�Y��Z��ꝛ�[��\��ꝝ�]��^��ꝟ�_��`��ꝡ�a��b��ꝣ�c��d��ꝥ�e��f��ꝧ�g��h��ꝩ�i��j��ꝫ�k��l��ꝭ�m��n��ꝯ�o��p��q��y��ꝺ�z��{��ꝼ�|��}��ᵹ�~��ꝿ��逧�ꞁ遧邧�ꞃ郧鄧�ꞅ酧醧�ꞇ釧鋧�ꞌ錧鍧�ɥ鎧鏧r�鐧�ꞑ鑧钧�ꞓ铧锧頧�ꞡ顧颧�ꞣ飧餧�ꞥ饧馧�ꞧ駧騧�ꞩ驧骧�ɦ髧��ħ)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)rrEr)rr0)rrEr)rr0)rrEr)rr0)r	rEr
)rr0)rrEr
)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)r r0)r!rEr")r#r0)r$rEr%)r&r0)r'rEr()r)r0)r*rEr+)r,r0)r-rEr.)r/r0)r0rEr1)r2r0)r3rEr4)r5r0)r6rEr7)r8r0)r9rEr:)r;r0)r<rEr=)r>r0)r?rEr@)rAr0)rBrErC)rDr0)rErErF)rGr0)rHrErI)rJr0)rKrErL)rMr0)rNrErO)rPr0)rQrErO)rRr0)rSrErT)rUr0)rVrErW)rXr0)rYrErZ)r[rEr\)r]r0)r^rEr_)r`r0)rarErb)rcr0)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlr0)rmrErn)ror0)rpr�)rqrErr)rsr0)rtrEru)rvr0)rwr�)rxrEry)rzr0)r{rEr|)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r�)r�rEr�r�r�r�r�r��_seg_36�s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�rE�œ�r0�,�r��0��:��@��x�逨�Ũ�Ψ�ڨ������T��_��}�逩�Ω�ϩ�ک�ީ�����7��@��N��P��Z��\��|�逪�ê�۪������	�������� ��'��(��/������������������������豈���更���車���賈���滑���串���句���龜�	��契�
��金���喇���奈�
��懶���癩���羅���蘿���螺���裸���邏���樂���洛���烙���珞���落���酪���駱���亂���卵���欄���爛���蘭� ��鸞�!��嵐�"��濫�#��藍�$��襤�%��拉�&��臘�'��蠟�(��廊�)��朗�*��浪�+��狼�,��郎�-��來)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrErr�r�r�r�r��_seg_37s�rcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�.�rE�冷�/��勞�0��擄�1��櫓�2��爐�3��盧�4��老�5��蘆�6��虜�7��路�8��露�9��魯�:��鷺�;��碌�<��祿�=��綠�>��菉�?��錄�@��鹿�A��論�B��壟�C��弄�D��籠�E��聾�F��牢�G��磊�H��賂�I��雷�J��壘�K��屢�L��樓�M��淚�N��漏�O��累�P��縷�Q��陋�R��勒�S��肋�T��凜�U��凌�V��稜�W��綾�X��菱�Y��陵�Z��讀�[��拏�\��樂�]��諾�^��丹�_��寧�`��怒�a��率�b��異�c��北�d��磻�e��便�f��復�g��不�h��泌�i��數�j��索�k��參�l��塞�m��省�n��葉�o��說�p��殺�q��辰�r��沈�s��拾�t��若�u��掠�v��略�w��亮�x��兩�y��凉�z��梁�{��糧�|��良�}��諒�~��量���勵��呂��女��廬��旅��濾��礪��閭��驪��麗��黎��力��曆��歷��轢��年��憐��戀��撚)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_38xs�r�cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�rE�漣��煉��璉��秊��練��聯��輦��蓮��連��鍊��列��劣��咽��烈��裂��說��廉��念��捻��殮��簾��獵��令��囹��寧��嶺��怜��玲��瑩��羚��聆��鈴��零��靈��領��例��禮��醴��隸��惡��了��僚��寮��尿��料��樂��燎��療���蓼���遼���龍���暈���阮���劉���杻���柳���流���溜���琉���留���硫���紐���類���六���戮���陸���倫���崙���淪���輪���律���慄���栗���率���隆���利���吏���履���易���李���梨���泥���理���痢���罹���裏���裡���里���離���匿���溺���吝���燐���璘���藺���隣���鱗���麟���林��淋)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_39�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'gdS((N�rE�臨��立��笠��粒��狀��炙��識��什��茶��刺���切���度���拓���糖���宅���洞���暴���輻���行�	��降�
��見���廓���兀�
��嗀��r0���塚�����晴�����凞���猪���益���礼���神���祥���福���靖���精���羽��� ��蘒�!��"��諸�#��%��逸�&��都�'��*��飯�+��飼�,��館�-��鶴�.��郞�/��隷�0��侮�1��僧�2��免�3��勉�4��勤�5��卑�6��喝�7��嘆�8��器�9��塀�:��墨�;��層�<��屮�=��悔�>��慨�?��憎�@��懲�A��敏�B��既�C��暑�D��梅�E��海�F��渚�G��漢�H��煮�I��爫�J��琢�K��碑�L��社�M��祉�N��祈�O��祐�P��祖�Q��祝�R��禍�S��禎�T��穀�U��突�V��節�W��練�X��縉�Y��繁�Z��署�[��者�\��臭�]��艹�_��著)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)rr0)rrEr)rr0)rrEr)rrEr)rr0)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErrr�r�r�r�r��_seg_40Hs�rscfCs d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)gdS(*N�`�rE�褐�a��視�b��謁�c��謹�d��賓�e��贈�f��辶�g��逸�h��難�i��響�j��頻�k��恵�l��𤋮�m��舘�n�r��p��並�q��况�r��全�s��侀�t��充�u��冀�v��勇�w��勺�x��喝�y��啕�z��喙�{��嗢�|��塚�}��墳�~��奄���奔��婢��嬨��廒��廙��彩��徭��惘��慎��愈��憎��慠��懲��戴��揄��搜��摒��敖��晴��朗��望��杖��歹��殺��流��滛��滋��漢��瀞��煮��瞧��爵��犯��猪��瑱��甆��画��瘝��瘟��益��盛��直��睊��着��磌��窱��節��类��絛��練��缾��者��荒��華��蝹��襁��覆���調��諸��請���諾��諭���變����輸���遲���醙)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rErw)r"rEr#)r$rEr%)r&rEr')r(rEry)r)rEr*)r+rEr,)r-rEr{)r.rEr/)r0rEr)r1rEr2)r3rEr4)r5rEr6r�r�r�r�r��_seg_41�s�r7cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'gdS((N��rE�鉶���陼���難���靖���韛���響���頋���頻���鬒���龜���𢡊���𢡄���𣏕���㮝���䀘���䀹���𥉉���𥳐���𧻓���齃���龎��r����ff���fi���fl���ffi���ffl���st�����մն���մե���մի���վն���մխ�����יִ��r0���ײַ� ��ע�!��א�"��ד�#��ה�$��כ�%��ל�&��ם�'��ר�(��ת�)�rr��*��שׁ�+��שׂ�,��שּׁ�-��שּׂ�.��אַ�/��אָ�0��אּ�1��בּ�2��גּ�3��דּ�4��הּ�5��וּ�6��זּ�7��8��טּ�9��יּ�:��ךּ�;��כּ�<��לּ�=��>��מּ�?��@��נּ�A��סּ�B��C��ףּ�D��פּ�E��F��צּ�G��קּ�H��רּ�I��שּ�J��תּ�K��וֹ�L��בֿ�M��כֿ�N��פֿ�O��אל�P��ٱ�R��ٻ�V��پ�Z��ڀ�^��ٺ�b��ٿ�f��ٹ�j��ڤ�n��ڦ�r��ڄ�v��ڃ�z��چ�~��ڇ��ڍ)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbr�)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)ror�)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzr�)r{rEr|)r}r0)r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_42s�r�cfCs&d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,gdS(-N�rE�ڌ��ڎ��ڈ��ژ��ڑ��ک��گ��ڳ��ڱ��ں��ڻ��ۀ��ہ��ھ��ے��ۓ�r0��r����ڭ���ۇ���ۆ���ۈ���ۇٴ���ۋ���ۅ���ۉ���ې���ى���ئا���ئە���ئو���ئۇ���ئۆ���ئۈ��ئې��ئى��ی���ئج���ئح���ئم�����ئي���بج���بح���بخ���بم�	��بى�
��بي���تج���تح�
��تخ���تم���تى���تي���ثج���ثم���ثى���ثي���جح���جم���حج���حم���خج���خح���خم���سج���سح���سخ���سم� ��صح�!��صم�"��ضج�#��ضح�$��ضخ�%��ضم�&��طح�'��طم�(��ظم�)��عج�*��عم�+��غج�,��غم�-��فج�.��فح�/��فخ�0��فم�1��فى�2��في�3��قح�4��قم�5��قى�6��قي�7��كا�8��كج�9��كح�:��كخ�;��كل�<��كم�=��كى�>��كي)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rr0)rr�)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErEr<)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_43�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!gdS("N�?�rE�لج�@��لح�A��لخ�B��لم�C��لى�D��لي�E��مج�F��مح�G��مخ�H��مم�I��مى�J��مي�K��نج�L��نح�M��نخ�N��نم�O��نى�P��ني�Q��هج�R��هم�S��هى�T��هي�U��يج�V��يح�W��يخ�X��يم�Y��يى�Z��يي�[��ذٰ�\��رٰ�]��ىٰ�^�r� ٌّ�_�� ٍّ�`�� َّ�a�� ُّ�b�� ِّ�c�� ّٰ�d��ئر�e��ئز�f��ئم�g��ئن�h��ئى�i��ئي�j��بر�k��بز�l��بم�m��بن�n��بى�o��بي�p��تر�q��تز�r��تم�s��تن�t��تى�u��تي�v��ثر�w��ثز�x��ثم�y��ثن�z��ثى�{��ثي�|��فى�}��في�~��قى���قي��كا��كل��كم��كى��كي�����ما���نر��نز���نن�����ير��يز���ين����ئج��ئح��ئخ���ئه��بج��بح��بخ���به��تج��تح)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr)rrr)rrr)rrr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrEr�)rJrEr�)rKrEr�)rLrErM)rNrEr�)rOrErP)rQrErR)rSrEr�)rTrErU)rVrEr�)rWrEr�)rXrEr�)rYrErZ)r[rEr\)r]rEr�)r^rEr_)r`rEr�)rarEr�)rbrErc)rdrEre)rfrErg)rhrEr)rirErj)rkrErl)rmrErn)rorErp)rqrEr)rrrErs)rtrEru)rvrErwr�r�r�r�r��_seg_44�s�rxcfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"gdS(#N�rE�تخ��تم��ته��ثم��جح��جم��حج��حم��خج��خم��سج��سح��سخ��سم��صح��صخ��صم��ضج��ضح��ضخ��ضم��طح��ظم��عج��عم��غج��غم��فج��فح��فخ��فم���قح���قم���كج���كح���كخ���كل���كم���لج���لح���لخ���لم���له���مج���مح���مخ���مم���نج���نح���نخ���نم���نه���هج���هم���هٰ���يج���يح���يخ���يم���يه���ئم���ئه���بم���به���������ثه�����سه���شم���شه�����������������ـَّ���ـُّ���ـِّ��طى��طي��عى��عي��غى��غي��سى��سي��شى��شي��حى���حي���جى���جي���خى���خي���صى���صي)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr|)r�rEr~)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr�)rrEr�)rrEr�)rrEr�)r	rEr�)r
rEr�)rrEr�)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5r�r�r�r�r��_seg_45Ps�r6cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N��rE�ضى���ضي�	��شج�
��شح���شخ���شم�
��شر���سر���صر���ضر���طى���طي���عى���عي���غى���غي���سى���سي���شى���شي���حى���حي���جى���جي���خى� ��خي�!��صى�"��صي�#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��سه�2��شه�3��طم�4��سج�5��سح�6��سخ�7��8��9��:��;��ظم�<��اً�>�r0�@�r��P��تجم�Q��تحج�S��تحم�T��تخم�U��تمج�V��تمح�W��تمخ�X��جمح�Z��حمي�[��حمى�\��سحج�]��سجح�^��سجى�_��سمح�a��سمج�b��سمم�d��صحح�f��صمم�g��شحم�i��شجي�j��شمخ�l��شمم�n��ضحى�o��ضخم�q��طمح�s��طمم�t��طمي�u��عجم�v��عمم�x��عمى�y��غمم�z��غمي�{��غمى�|��فخم�~��قمح���قمم��لحم��لحي��لحى��لجج��لخم��لمح��محج��محم)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorEr8)rprEr:)rqrEr<)rrrEr>)rsrEr@)rtrErB)rurErD)rvrErF)rwrErH)rxrErJ)ryrEr<)rzrEr>)r{rEr@)r|rErB)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr<)r�rEr>)r�rEr@)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_46�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%gdS(&N�rE�محي��مجح��مجم��مخج��مخم�r���مجخ��همج��همم��نحم��نحى��نجم��نجى��نمي��نمى��يمم��بخي��تجي��تجى��تخي��تخى��تمي��تمى��جمي��جحى��جمى��سخى��صحي��شحي��ضحي��لجي��لمي��يحي��يجي��يمي��ممي��قمي��نحي��قمح��لحم��عمي��كمي��نجح��مخي��لجم��كمم����جحي��حجي��مجي��فمي���بحي�����عجم���صمم���سخي���نجي�����صلے���قلے���الله���اكبر���محمد��صلعم��رسول��عليه��وسلم��صلى�r�!صلى الله عليه وسلم��جل جلاله��ریال�r0���r����,���、�����:��rl���!���?���〖���〗��� ��'��1��—�2��–�3��_�5�r��6�r��7��{�8��}�9��〔�:��〕�;��【�<��】�=��《�>��》)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErD)rHrEr@)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErF)rTrErU)rVrErW)rXrErY)rZrEr[)r\r�)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrrr)rsrrt)rurErv)rwr0)rxr�)ryr�)rzrr{)r|rEr})r~r�)rrr�)r�rrl)r�rr�)r�rr�)r�rEr�)r�rEr�)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_47 s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'gdS((N�?�rE�〈�@��〉�A��「�B��」�C��『�D��』�E�r0�G�r�[�H��]�I�� ̅�M�r��P�r{�Q��、�R�r��T�rl�U�r��V�r��W�r��X��—�Y�r��Z�r��[�r��\�r��]��〔�^��〕�_��#�`��&�a��*�b�r��c��-�d��<�e��>�f�r��g��h��\�i��$�j��%�k��@�l��p�� ً�q��ـً�r�� ٌ�s��t�� ٍ�u��v�� َ�w��ـَ�x�� ُ�y��ـُ�z�� ِ�{��ـِ�|�� ّ�}��ـّ�~�� ْ���ـْ��ء��آ��أ��ؤ��إ��ئ��ا��ب��ة��ت��ث��ج��ح��خ��د��ذ��ر��ز��س��ش��ص��ض��ط���ظ���ع���غ���ف���ق���ك���ل���م���ن���ه���و���ى���ي��لآ��لأ��لإ��لا��r��������")r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rr�)r�rr�)r�rr�)r�rr�)r�rr{)r�rEr�)r�r�)r�rrl)r�rr�)r�rr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rEr�)r�rr�)r�r0)r�rr�)r�r�)r�rr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rEr�)r�rr�)rrEr)rrEr)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRr�)rSr�)rTr�)rUrr�)rVrrWr�r�r�r�r��_seg_48�s�rXcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�dgdS(N��rr���r���r���r����'��r��	�r��
�r���r���r{�
�rEr���r���/��r���r���r�����r���r���r���r���r���r���r���rl��r���r���r���r�� �r��!�rF�"�rH�#�rJ�$�rL�%�rN�&�rP�'�rR�(�rT�)�rV�*�rX�+�rZ�,�r\�-�r^�.�r`�/�rb�0�rd�1�rf�2�rh�3�rj�4�rl�5�rn�6�rp�7�rr�8�rt�9�rv�:�rx�;�r��<�r��=�r��>��^�?�r��@�rm�A��B��C��D��E��F��G��H��I��J��K��L��M��N��O��P��Q��R��S��T��U��V��W��X��Y��Z��[�r��\��|�]�r��^��~�_��⦅�`��⦆�a��b��「�c��」�d��、�e��・�f��ヲ)rYrr�)rZrr�)r[rr�)r\rr�)r]rr^)r_rr�)r`rr�)rarr�)rbrr�)rcrr{)rdrEr�)rerEr)rfrrg)rhrEr�)rirEr�)rjrEr�)rkrEr)rlrEr�)rmrEr�)rnrEr�)rorEr�)rprEr�)rqrEr�)rrrr�)rsrrl)rtrr�)rurr�)rvrr�)rwrr�)rxrr�)ryrErF)rzrErH)r{rErJ)r|rErL)r}rErN)r~rErP)rrErR)r�rErT)r�rErV)r�rErX)r�rErZ)r�rEr\)r�rEr^)r�rEr`)r�rErb)r�rErd)r�rErf)r�rErh)r�rErj)r�rErl)r�rErn)r�rErp)r�rErr)r�rErt)r�rErv)r�rErx)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rrm)r�rErF)r�rErH)r�rErJ)r�rErL)r�rErN)r�rErP)r�rErR)r�rErT)r�rErV)r�rErX)r�rErZ)r�rEr\)r�rEr^)r�rEr`)r�rErb)r�rErd)r�rErf)r�rErh)r�rErj)r�rErl)r�rErn)r�rErp)r�rErr)r�rErt)r�rErv)r�rErx)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rEr�)r�rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_49�s�r�cfCs$d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+gdS(,N�g�rE�ァ�h��ィ�i��ゥ�j��ェ�k��ォ�l��ャ�m��ュ�n��ョ�o��ッ�p��ー�q��ア�r��イ�s��ウ�t��エ�u��オ�v��カ�w��キ�x��ク�y��ケ�z��コ�{��サ�|��シ�}��ス�~��セ���ソ��タ��チ��ツ��テ��ト��ナ��ニ��ヌ��ネ��ノ��ハ��ヒ��フ��ヘ��ホ��マ��ミ��ム��メ��モ��ヤ��ユ��ヨ��ラ��リ��ル��レ��ロ��ワ��ン��゙��゚�r���ᄀ��ᄁ��ᆪ��ᄂ��ᆬ��ᆭ��ᄃ��ᄄ��ᄅ��ᆰ��ᆱ��ᆲ��ᆳ��ᆴ��ᆵ��ᄚ��ᄆ��ᄇ��ᄈ��ᄡ��ᄉ��ᄊ��ᄋ��ᄌ��ᄍ��ᄎ��ᄏ��ᄐ��ᄑ��ᄒ����ᅡ���ᅢ���ᅣ���ᅤ���ᅥ���ᅦ�����ᅧ���ᅨ���ᅩ���ᅪ)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r rEr )r rEr )r rEr )r rEr )r rEr	 )r
 rEr )r rEr
 )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r  rEr! )r" rEr# )r$ rEr% )r& rEr' )r( rEr) )r* rEr+ )r, rEr- )r. rEr/ )r0 rEr1 )r2 rEr3 )r4 rEr5 )r6 rEr7 )r8 rEr9 )r: rEr; )r< r�)r= rEr> )r? rEr@ )rA rErB )rC rErD )rE rErF )rG rErH )rI rErJ )rK rErL )rM rErN )rO rErP )rQ rErR )rS rErT )rU rErV )rW rErX )rY rErZ )r[ rEr\ )r] rEr^ )r_ rEr` )ra rErb )rc rErd )re rErf )rg rErh )ri rErj )rk rErl )rm rErn )ro rErp )rq rErr )rs rErt )ru rErv )rw rErx )ry r�)rz rEr{ )r| rEr} )r~ rEr )r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� r�r�r�r�r��_seg_50Xs�r� cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�dgdS(N��rE�ᅫ���ᅬ��r����ᅭ���ᅮ���ᅯ���ᅰ���ᅱ���ᅲ�����ᅳ���ᅴ���ᅵ�����¢���£���¬��r� ̄���¦���¥���₩�����│���←���↑���→���↓���■���○���r0��
�'�(�;�<�>�?�N�P�^������4�7�������������� �$�0�K�����������𐐨��𐐩��𐐪��𐐫��𐐬��𐐭��𐐮��𐐯��𐐰�	�𐐱�
�𐐲��𐐳��𐐴�
�𐐵��𐐶��𐐷��𐐸��𐐹��𐐺��𐐻��𐐼��𐐽��𐐾��𐐿��𐑀��𐑁��𐑂��𐑃��𐑄��𐑅)r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rr� )r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r	!rEr
!)r!rEr!)r
!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr !)r!!rEr"!)r#!rEr$!)r%!rEr&!)r'!rEr(!)r)!rEr*!r�r�r�r�r��_seg_51�s�r+!ceCs�drdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�rE�𐑆��𐑇� �𐑈�!�𐑉�"�𐑊�#�𐑋�$�𐑌�%�𐑍�&�𐑎�'�𐑏�(r0�r�������	�
�6�7�9�<�=�?�V�W�`�	�	�	�:	�?	�@	�	�	�	�	�
�
�
�
�
�
�
�
�
�4
�8
�;
�?
�H
�P
�Y
�`
�
��6�9�V�X�s�x���I�`���N�R�p��������������5�6�D������������� �o#�$�c$�p$�t$�0�/4)r,!rEr-!)r.!rEr/!)r0!rEr1!)r2!rEr3!)r4!rEr5!)r6!rEr7!)r8!rEr9!)r:!rEr;!)r<!rEr=!)r>!rEr?!)r@!r0)rA!r�)rB!r0)rC!r�)rD!r0)rE!r�)rF!r0)rG!r�)rH!r0)rI!r�)rJ!r0)rK!r�)rL!r0)rM!r�)rN!r0)rO!r�)rP!r0)rQ!r�)rR!r0)rS!r�)rT!r0)rU!r�)rV!r0)rW!r�)rX!r0)rY!r�)rZ!r0)r[!r�)r\!r0)r]!r�)r^!r0)r_!r�)r`!r0)ra!r�)rb!r0)rc!r�)rd!r0)re!r�)rf!r0)rg!r�)rh!r0)ri!r�)rj!r0)rk!r�)rl!r0)rm!r�)rn!r0)ro!r�)rp!r0)rq!r�)rr!r0)rs!r�)rt!r0)ru!r�)rv!r0)rw!r�)rx!r0)ry!r�)rz!r0)r{!r�)r|!r0)r}!r�)r~!r0)r!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�r�r�r�r�r��_seg_52(s�r�!cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�hr0�9jr��o�Eo�Po�o�o�o�����������'��)��^�rE�𝅗𝅥�_��𝅘𝅥�`��𝅘𝅥𝅮�a��𝅘𝅥𝅯�b��𝅘𝅥𝅰�c��𝅘𝅥𝅱�d��𝅘𝅥𝅲�e��s��{����𝆹𝅥���𝆺𝅥���𝆹𝅥𝅮���𝆺𝅥𝅮���𝆹𝅥𝅯���𝆺𝅥𝅯��������F����W��`��r���rF��rH��rJ��rL��rN��rP��rR��rT��rV�	�rX�
�rZ��r\��r^�
�r`��rb��rd��rf��rh��rj��rl��rn��rp��rr��rt��rv��rx������������� ��!��"��#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��2��3��4��5��6��7��8��9��:��;��<�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!r0)r�!r�)r�!r0)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!rErF)r�!rErH)r�!rErJ)r�!rErL)r�!rErN)r�!rErP)r�!rErR)r�!rErT)r�!rErV)r�!rErX)r�!rErZ)r�!rEr\)r�!rEr^)r�!rEr`)r�!rErb)r�!rErd)r�!rErf)r�!rErh)r�!rErj)r�!rErl)r�!rErn)r�!rErp)r�!rErr)r�!rErt)r�!rErv)r�!rErx)r�!rErF)r�!rErH)r�!rErJ)r�!rErL)r�!rErN)r�!rErP)r�!rErR)r�!rErT)r�!rErV)r�!rErX)r�!rErZ)r�!rEr\)r�!rEr^)r�!rEr`)r�!rErb)r�!rErd)r�!rErf)r�!rErh)r�!rErj)r�!rErl)r�!rErn)r�!rErp)r�!rErr)r"rErt)r"rErv)r"rErx)r"rErF)r"rErH)r"rErJ)r"rErL)r"rErN)r"rErP)r	"rErR)r
"rErT)r"rErVr�r�r�r�r��_seg_53�s�r"ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�=�rErX�>�rZ�?�r\�@�r^�A�r`�B�rb�C�rd�D�rf�E�rh�F�rj�G�rl�H�rn�I�rp�J�rr�K�rt�L�rv�M�rx�N�rF�O�rH�P�rJ�Q�rL�R�rN�S�rP�T�rR�U�r��V�rV�W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f��g��h��i��j��k��l��m��n��o�rT�p��q��r��s��t��u��v��w��x��y��z��{��|��}��~���������������������������������������������������������������������)r
"rErX)r"rErZ)r"rEr\)r"rEr^)r"rEr`)r"rErb)r"rErd)r"rErf)r"rErh)r"rErj)r"rErl)r"rErn)r"rErp)r"rErr)r"rErt)r"rErv)r"rErx)r"rErF)r"rErH)r "rErJ)r!"rErL)r""rErN)r#"rErP)r$"rErR)r%"r�)r&"rErV)r'"rErX)r("rErZ)r)"rEr\)r*"rEr^)r+"rEr`)r,"rErb)r-"rErd)r."rErf)r/"rErh)r0"rErj)r1"rErl)r2"rErn)r3"rErp)r4"rErr)r5"rErt)r6"rErv)r7"rErx)r8"rErF)r9"rErH)r:"rErJ)r;"rErL)r<"rErN)r="rErP)r>"rErR)r?"rErT)r@"rErV)rA"rErX)rB"rErZ)rC"rEr\)rD"rEr^)rE"rEr`)rF"rErb)rG"rErd)rH"rErf)rI"rErh)rJ"rErj)rK"rErl)rL"rErn)rM"rErp)rN"rErr)rO"rErt)rP"rErv)rQ"rErx)rR"rErF)rS"rErH)rT"rErJ)rU"rErL)rV"rErN)rW"rErP)rX"rErR)rY"rErT)rZ"rErV)r["rErX)r\"rErZ)r]"rEr\)r^"rEr^)r_"rEr`)r`"rErb)ra"rErd)rb"rErf)rc"rErh)rd"rErj)re"rErl)rf"rErn)rg"rErp)rh"rErr)ri"rErt)rj"rErv)rk"rErx)rl"rErF)rm"r�)rn"rErJ)ro"rErL)rp"r�r�r�r�r�r��_seg_54�s�rq"cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rErR��r���rX��rZ����r`��rb��rd��rf����rj��rl��rn��rp��rr��rt��rv��rx��rF��rH��rJ��rL����rP����rT��rV������r\���r^���������������rh���������������������������������������rN��������������������������������������������������������������������������������������������������������������������������������������)rr"rErR)rs"r�)rt"rErX)ru"rErZ)rv"r�)rw"rEr`)rx"rErb)ry"rErd)rz"rErf)r{"r�)r|"rErj)r}"rErl)r~"rErn)r"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"r�)r�"rErP)r�"r�)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"r�)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"rErN)r�"rErP)r�"rErR)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"rErN)r�"rErP)r�"rErR)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"r�)r�"rErLr�r�r�r�r��_seg_55`s�r�"cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rErN�	�rP�
�rR��r��
�rX��rZ��r\��r^��r`��rb��rd��rf����rj��rl��rn��rp��rr��rt��rv����rF��rH� �rJ�!�rL�"��#��$��%�rT�&�rV�'��(��)��*��+��,��-��.��/�rh�0��1��2��3��4��5��6��7�rx�8��9��:��;��<��=��>��?��@��A��B��C��D��E��F��G��J��K��L��M��N��O��P��Q��R��S��T��U��V��W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f��g��h��i��j��k��l��m��n�)r�"rErN)r�"rErP)r�"rErR)r�"r�)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"r�)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"r�)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"rErN)r�"rErP)r�"rErR)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r#rErn)r#rErp)r#rErr)r#rErt)r#rErv)r#rErx)r#rErF)r#rErH)r#r�)r	#rErL)r
#rErN)r#rErP)r#rErR)r
#r�)r#rErV)r#rErX)r#rErZ)r#rEr\)r#rEr^)r#r�)r#rErb)r#r�)r#rErj)r#rErl)r#rErn)r#rErp)r#rErr)r#rErt)r#rErv)r#r�)r#rErF)r#rErH)r #rErJ)r!#rErL)r"#rErN)r##rErP)r$#rErR)r%#rErT)r&#rErV)r'#rErX)r(#rErZ)r)#rEr\)r*#rEr^)r+#rEr`)r,#rErb)r-#rErd)r.#rErf)r/#rErh)r0#rErj)r1#rErl)r2#rErn)r3#rErp)r4#rErr)r5#rErt)r6#rErv)r7#rErx)r8#rErF)r9#rErH)r:#rErJr�r�r�r�r��_seg_56�s�r;#cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�o�rErL�p�rN�q�rP�r�rR�s�rT�t�rV�u�rX�v�rZ�w�r\�x�r^�y�r`�z�rb�{�rd�|�rf�}�rh�~�rj��rl��rn��rp��rr��rt��rv��rx��rF��rH��rJ���������������������������������������������������������������������������������������������������������������������������������������������������������������������)r<#rErL)r=#rErN)r>#rErP)r?#rErR)r@#rErT)rA#rErV)rB#rErX)rC#rErZ)rD#rEr\)rE#rEr^)rF#rEr`)rG#rErb)rH#rErd)rI#rErf)rJ#rErh)rK#rErj)rL#rErl)rM#rErn)rN#rErp)rO#rErr)rP#rErt)rQ#rErv)rR#rErx)rS#rErF)rT#rErH)rU#rErJ)rV#rErL)rW#rErN)rX#rErP)rY#rErR)rZ#rErT)r[#rErV)r\#rErX)r]#rErZ)r^#rEr\)r_#rEr^)r`#rEr`)ra#rErb)rb#rErd)rc#rErf)rd#rErh)re#rErj)rf#rErl)rg#rErn)rh#rErp)ri#rErr)rj#rErt)rk#rErv)rl#rErx)rm#rErF)rn#rErH)ro#rErJ)rp#rErL)rq#rErN)rr#rErP)rs#rErR)rt#rErT)ru#rErV)rv#rErX)rw#rErZ)rx#rEr\)ry#rEr^)rz#rEr`)r{#rErb)r|#rErd)r}#rErf)r~#rErh)r#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErvr�r�r�r�r��_seg_570s�r�#cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N���rErx���rF���rH���rJ���rL���rN���rP���rR���rT���rV���rX���rZ���r\���r^���r`���rb���rd���rf���rh���rj���rl���rn���rp���rr���rt���rv�����������������������������������������������������������������	��
������
�������������������������������������� ��!��"��#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��2��3��4��5��6�)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r$rErf)r$rErh)r$rErj)r$rErl)r$rErnr�r�r�r�r��_seg_58�s�r$cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�7�rErp�8�rr�9�rt�:�rv�;�rx�<�rF�=�rH�>�rJ�?�rL�@�rN�A�rP�B�rR�C�rT�D�rV�E�rX�F�rZ�G�r\�H�r^�I�r`�J�rb�K�rd�L�rf�M�rh�N�rj�O�rl�P�rn�Q��R��S��T��U��V��W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f��g��h��i��j��k��l��m��n��o��p��q��r��s��t��u��v��w��x��y��z��{��|��}��~���������������������������������������������������������)r$rErp)r$rErr)r$rErt)r	$rErv)r
$rErx)r$rErF)r$rErH)r
$rErJ)r$rErL)r$rErN)r$rErP)r$rErR)r$rErT)r$rErV)r$rErX)r$rErZ)r$rEr\)r$rEr^)r$rEr`)r$rErb)r$rErd)r$rErf)r$rErh)r$rErj)r$rErl)r$rErn)r $rErp)r!$rErr)r"$rErt)r#$rErv)r$$rErx)r%$rErF)r&$rErH)r'$rErJ)r($rErL)r)$rErN)r*$rErP)r+$rErR)r,$rErT)r-$rErV)r.$rErX)r/$rErZ)r0$rEr\)r1$rEr^)r2$rEr`)r3$rErb)r4$rErd)r5$rErf)r6$rErh)r7$rErj)r8$rErl)r9$rErn)r:$rErp)r;$rErr)r<$rErt)r=$rErv)r>$rErx)r?$rErF)r@$rErH)rA$rErJ)rB$rErL)rC$rErN)rD$rErP)rE$rErR)rF$rErT)rG$rErV)rH$rErX)rI$rErZ)rJ$rEr\)rK$rEr^)rL$rEr`)rM$rErb)rN$rErd)rO$rErf)rP$rErh)rQ$rErj)rR$rErl)rS$rErn)rT$rErp)rU$rErr)rV$rErt)rW$rErv)rX$rErx)rY$rErF)rZ$rErH)r[$rErJ)r\$rErL)r]$rErN)r^$rErP)r_$rErR)r`$rErT)ra$rErV)rb$rErX)rc$rErZ)rd$rEr\)re$rEr^)rf$rEr`)rg$rErb)rh$rErd)ri$rErfr�r�r�r�r��_seg_59s�rj$cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rErh��rj��rl��rn��rp��rr��rt��rv��rx���ı���ȷ��r����α���β���γ���δ���ε���ζ���η���θ���ι���κ���λ���μ���ν���ξ���ο���π���ρ�����σ���τ���υ���φ���χ���ψ���ω���∇����������������������������������������������������������������������������∂���������������������������������������������������������������������������������������������������)rk$rErh)rl$rErj)rm$rErl)rn$rErn)ro$rErp)rp$rErr)rq$rErt)rr$rErv)rs$rErx)rt$rEru$)rv$rErw$)rx$r�)ry$rErz$)r{$rEr|$)r}$rEr~$)r$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rErz$)r�$rEr|$)r�$rEr~$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rErz$)r�$rEr|$)r�$rEr~$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rErz$)r�$rEr|$)r�$rEr~$)r�$rEr�$)r�$rEr�$r�r�r�r�r��_seg_60hs�r�$cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rE�ζ���η���θ���ι���κ���λ���μ���ν�	��ξ�
��ο���π���ρ�
��σ���τ���υ���φ���χ���ψ���ω���∂���ε�������������α���β���γ���δ� ��!��"��#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��2��3��4��5��∇�6��7��8��9��:��;��<��=��>��?��@��A��B��C��D��E��F��G��I��J��K��L��M��N��O��P��Q��R��S��T��U��V��W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f�)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr	%)r
%rEr%)r%rEr
%)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr�$)r%rEr�$)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr %)r!%rEr"%)r#%rEr%)r$%rEr�$)r%%rEr�$)r&%rEr�$)r'%rEr�$)r(%rEr�$)r)%rEr�$)r*%rEr�$)r+%rEr�$)r,%rEr�$)r-%rEr�$)r.%rEr%)r/%rEr%)r0%rEr�$)r1%rEr%)r2%rEr%)r3%rEr	%)r4%rEr%)r5%rEr
%)r6%rEr%)r7%rEr%)r8%rEr9%)r:%rEr%)r;%rEr%)r<%rEr %)r=%rEr"%)r>%rEr%)r?%rEr�$)r@%rEr�$)rA%rEr�$)rB%rEr�$)rC%rEr�$)rD%rEr�$)rE%rEr�$)rF%rEr�$)rG%rEr�$)rH%rEr�$)rI%rEr%)rJ%rEr%)rK%rEr%)rL%rEr%)rM%rEr	%)rN%rEr%)rO%rEr
%)rP%rEr%)rQ%rEr%)rR%rEr%)rS%rEr%)rT%rEr�$)rU%rEr�$)rV%rEr%)rW%rEr%)rX%rEr%)rY%rEr%)rZ%rEr%)r[%rEr %)r\%rEr"%)r]%rEr%)r^%rEr�$)r_%rEr�$)r`%rEr�$)ra%rEr�$)rb%rEr�$)rc%rEr�$)rd%rEr�$)re%rEr�$)rf%rEr�$)rg%rEr�$)rh%rEr%)ri%rEr%r�r�r�r�r��_seg_61�s�rj%cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�g�rE�θ�h��σ�i��τ�j��υ�k��φ�l��χ�m��ψ�n��ω�o��∇�p��α�q��β�r��γ�s��δ�t��ε�u��ζ�v��η�w��x��ι�y��κ�z��λ�{��μ�|��ν�}��ξ�~��ο���π���ρ�����������������∂������������������������������������������������������������������������������������������������������������������������������������������ϝ���r����r�)rk%rErl%)rm%rErn%)ro%rErp%)rq%rErr%)rs%rErt%)ru%rErv%)rw%rErx%)ry%rErz%)r{%rEr|%)r}%rEr~%)r%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErn%)r�%rErp%)r�%rErr%)r�%rErt%)r�%rErv%)r�%rErx%)r�%rErz%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rErt%)r�%rEr�%)r�%rEr�%)r�%rEr~%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rErn%)r�%rErp%)r�%rErr%)r�%rErt%)r�%rErv%)r�%rErx%)r�%rErz%)r�%rEr|%)r�%rEr~%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErn%)r�%rErp%)r�%rErr%)r�%rErt%)r�%rErv%)r�%rErx%)r�%rErz%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rErt%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%r�)r�%rEr�r�r�r�r�r��_seg_628s�r�%cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N���rEr����r����r���r����r����r����r����r����r����r�������������������������������������������������������������������������������������������������������������r����ا���ب���ج���د�����و���ز���ح���ط�	��ي�
��ك���ل���م�
��ن���س���ع���ف���ص���ق���ر���ش���ت���ث���خ���ذ���ض���ظ���غ���ٮ���ں���ڡ���ٯ� ��!��"��#��$��ه�%��'��(��)��*��+��,��-��.��/��0��1��2�)r�%rEr�)r�%rEr�)r�%rEr)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r&rEr�)r&rEr)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r	&rEr�)r
&rEr�)r&rEr)r&rEr�)r
&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&r�)r&rEr&)r&rEr &)r!&rEr"&)r#&rEr$&)r%&r�)r&&rEr'&)r(&rEr)&)r*&rEr+&)r,&rEr-&)r.&rEr/&)r0&rEr1&)r2&rEr3&)r4&rEr5&)r6&rEr7&)r8&rEr9&)r:&rEr;&)r<&rEr=&)r>&rEr?&)r@&rErA&)rB&rErC&)rD&rErE&)rF&rErG&)rH&rErI&)rJ&rErK&)rL&rErM&)rN&rErO&)rP&rErQ&)rR&rErS&)rT&rErU&)rV&rErW&)rX&rErY&)rZ&rEr[&)r\&r�)r]&rEr &)r^&rEr"&)r_&r�)r`&rEra&)rb&r�)rc&rEr+&)rd&r�)re&rEr/&)rf&rEr1&)rg&rEr3&)rh&rEr5&)ri&rEr7&)rj&rEr9&)rk&rEr;&)rl&rEr=&)rm&rEr?&)rn&rErA&r�r�r�r�r��_seg_63�s�ro&ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�3�r��4�rE�ش�5��ت�6��ث�7��خ�8��9��ض�:��;��غ�<��B��ج�C��G��ح�H��I��ي�J��K��ل�L��M��ن�N��س�O��ع�P��Q��ص�R��ق�S��T��U��W��X��Y��Z��[��\��]��ں�^��_��ٯ�`��a��ب�b��c��d��ه�e��g��h��ط�i��j��ك�k��l��م�m��n��o��p��ف�q��r��s��t��u��v��w��x��y��z��ظ�{��|��ٮ�}��~��ڡ�����ا�������د�����و���ز���������������������������ر�����������ذ����������������)rp&r�)rq&rErr&)rs&rErt&)ru&rErv&)rw&rErx&)ry&r�)rz&rEr{&)r|&r�)r}&rEr~&)r&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&r�)r�&rErr&)r�&r�)r�&rErx&)r�&r�)r�&rEr{&)r�&r�)r�&rEr~&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rErr&)r�&rErt&)r�&rErv&)r�&rErx&)r�&r�)r�&rEr{&)r�&rEr�&)r�&rEr~&)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rErr&)r�&rErt&)r�&rErv&)r�&rErx&)r�&rEr�&)r�&rEr{&)r�&rEr�&)r�&rEr~&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�r�r�r�r�r��_seg_64s�r�&cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N��rE�و���ز���ح���ط���ي��r����ل���م���ن���س���ع���ف���ص���ق���ر���ش���ت���ث���خ���ذ���ض���ظ���غ�����r0������,��0������������������������r�0,���1,���2,���3,���4,���5,���6,���7,�	��8,�
��9,�����(a)���(b)���(c)���(d)���(e)���(f)���(g)���(h)���(i)���(j)���(k)���(l)���(m)���(n)���(o)���(p)� ��(q)�!��(r)�"��(s)�#��(t)�$��(u)�%��(v)�&��(w)�'��(x)�(��(y)�)��(z)�*��〔s〕�+�rJ�,�rh�-�r��.��wz�/��0�rF�1�rH�2��3�rL�4�rN�5�rP�6�rR�7�rT�8�rV�9�rX�:�rZ�;�r\�<�r^�=�r`�>�rb�?�rd�@�rf�A��B�rj)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r'rEr')r'rEr')r'rEr')r'rEr')r'rEr	')r
'rEr')r'rEr
')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r 'rEr!')r"'r�)r#'r0)r$'r�)r%'r0)r&'r�)r''r0)r('r�)r)'r0)r*'r�)r+'r0)r,'r�)r-'r0)r.'r�)r/'r0)r0'r�)r1'rr2')r3'rr4')r5'rr6')r7'rr8')r9'rr:')r;'rr<')r='rr>')r?'rr@')rA'rrB')rC'rrD')rE'r�)rF'rrG')rH'rrI')rJ'rrK')rL'rrM')rN'rrO')rP'rrQ')rR'rrS')rT'rrU')rV'rrW')rX'rrY')rZ'rr[')r\'rr]')r^'rr_')r`'rra')rb'rrc')rd'rre')rf'rrg')rh'rri')rj'rrk')rl'rrm')rn'rro')rp'rrq')rr'rrs')rt'rru')rv'rrw')rx'rry')rz'rEr{')r|'rErJ)r}'rErh)r~'rEr�)r'rEr�')r�'r�)r�'rErF)r�'rErH)r�'rErJ)r�'rErL)r�'rErN)r�'rErP)r�'rErR)r�'rErT)r�'rErV)r�'rErX)r�'rErZ)r�'rEr\)r�'rEr^)r�'rEr`)r�'rErb)r�'rErd)r�'rErf)r�'rErh)r�'rErjr�r�r�r�r��_seg_65ps�r�'ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�dgdS(N�C�rErl�D�rn�E�rp�F�rr�G�rt�H�rv�I�rx�J��hv�K�r��L��sd�M�r.�N��ppv�O��wc�P�r0�j��mc�k��md�l�r��p����dj����������ほか���ココ���サ�����手���字���双���デ���二���多���解���天���交���映���無���料���前���後���再���新� ��初�!��終�"��生�#��販�$��声�%��吹�&��演�'��投�(��捕�)��一�*��三�+��遊�,��左�-��中�.��右�/��指�0��走�1��打�2��禁�3��空�4��合�5��満�6��有�7��月�8��申�9��割�:��営�;��@��	〔本〕�A��	〔三〕�B��	〔二〕�C��	〔安〕�D��	〔点〕�E��	〔打〕�F��	〔盗〕�G��	〔勝〕�H��	〔敗〕�I��P��得�Q��可�R����!��0��6��7��}�������������������������?��@�)r�'rErl)r�'rErn)r�'rErp)r�'rErr)r�'rErt)r�'rErv)r�'rErx)r�'rEr�')r�'rEr�)r�'rEr�')r�'rEr.)r�'rEr�')r�'rEr�')r�'r0)r�'rEr�')r�'rEr�')r�'r�)r�'r0)r�'rEr�')r�'r0)r�'r�)r�'r0)r�'rEr�')r�'rEr�')r�'rEr�')r�'r�)r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r(rEr()r(rEr()r(rEr()r(rEr()r(rEr	()r
(rEr()r(rEr
()r(rEr()r(r�)r(rEr()r(rEr()r(rEr()r(rEr()r(rEr()r(rEr()r(rEr()r(rEr ()r!(rEr"()r#(r�)r$(rEr%()r&(rEr'()r((r�)r)(r0)r*(r�)r+(r0)r,(r�)r-(r0)r.(r�)r/(r0)r0(r�)r1(r0)r2(r�)r3(r0)r4(r�)r5(r0)r6(r�)r7(r0)r8(r�)r9(r0r�r�r�r�r��_seg_66�s�r:(cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�dgdS(N�A�r��B�r0���������>��@��D��P��h���A��E��P�������t���צ���5��@�����rE�丽���丸���乁���𠄢���你���侮���侻���倂���偺�	��備�
��僧���像���㒞�
��𠘺���免���兔���兤���具���𠔜���㒹���內���再���𠕋���冗���冤���仌���冬���况���𩇟���凵���刃���㓟� ��刻�!��剆�"��割�#��剷�$��㔕�%��勇�&��勉�'��勤�(��勺�)��包�*��匆�+��北�,��卉�-��卑�.��博�/��即�0��卽�1��卿�4��𠨬�5��灰�6��及�7��叟�8��𠭣�9��叫�:��叱�;��吆�<��咞�=��吸�>��呈�?��周�@��咢�A��哶�B��唐�C��啓�D��啣�E��善�G��喙�H��喫�I��喳�J��嗂�K��圖�L��嘆�M��圗)r;(r�)r<(r0)r=(r�)r>(r0)r?(r�)r@(r0)rA(r�)rB(r0)rC(r�)rD(r0)rE(r�)rF(r0)rG(r�)rH(r0)rI(r�)rJ(r0)rK(r�)rL(r0)rM(r�)rN(r0)rO(r�)rP(r0)rQ(r�)rR(r0)rS(r�)rT(rErU()rV(rErW()rX(rErY()rZ(rEr[()r\(rEr]()r^(rEr_()r`(rEra()rb(rErc()rd(rEre()rf(rErg()rh(rEri()rj(rErk()rl(rErm()rn(rEro()rp(rErq()rr(rErs()rt(rEru()rv(rErw()rx(rEry()rz(rEr{()r|(rEr}()r~(rEr()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�(r�r�r�r�r��_seg_67@s�r�(cfCs$d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+gdS(,N�N�rE�噑�O��噴�P��切�Q��壮�R��城�S��埴�T��堍�U��型�V��堲�W��報�X��墬�Y��𡓤�Z��売�[��壷�\��夆�]��多�^��夢�_��奢�`��𡚨�a��𡛪�b��姬�c��娛�d��娧�e��姘�f��婦�g��㛮�h�r��i��嬈�j��嬾�l��𡧈�m��寃�n��寘�o��寧�p��寳�q��𡬘�r��寿�s��将�t��u��尢�v��㞁�w��屠�x��屮�y��峀�z��岍�{��𡷤�|��嵃�}��𡷦�~��嵮���嵫��嵼��巡��巢��㠯��巽��帨��帽��幩��㡢��𢆃��㡼��庰��庳��庶��廊��𪎒��廾��𢌱��舁��弢��㣇��𣊸��𦇚��形��彫��㣣��徚��忍��志��忹��悁��㤺��㤜��悔��𢛔��惇��慈��慌��慎���慺��憎��憲��憤��憯��懞��懲��懶��成��戛��扝)r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr))r)rEr))r)rEr))r)rEr))r)rEr))r	)rEr
))r)rEr))r
)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)r�)r )rEr!))r")rEr#))r$)rEr%))r&)rEr'))r()rEr)))r*)rEr+))r,)rEr-))r.)rEr/))r0)rEr1))r2)rEr3))r4)r�)r5)rEr6))r7)rEr8))r9)rEr:))r;)rEr<))r=)rEr>))r?)rEr@))rA)rErB))rC)rErD))rE)rErF))rG)rErH))rI)rErJ))rK)rErL))rM)rErN))rO)rErP))rQ)rErR))rS)rErT))rU)rErV))rW)rErX))rY)rErZ))r[)rEr\))r])rEr^))r_)rEr`))ra)rErb))rc)rErd))re)rErf))rg)rErh))ri)rErj))rk)rErl))rm)rErn))ro)rErp))rq)rErr))rs)rErt))ru)rErv))rw)rErx))ry)rErz))r{)rEr|))r})rEr~))r)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�)r�r�r�r�r��_seg_68�s�r�)cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�rE�抱��拔��捐��𢬌��挽��拼��捨��掃��揤��𢯱��搢��揅��掩���㨮���摩���摾���撝���摷���㩬���敏���敬���𣀊���旣���書���晉���㬙���暑���㬈���㫤���冒���冕���最���暜���肭���䏙���朗���望���朡���杞���杓���𣏃���㭉���柺���枅���桒���梅���𣑭���梎���栟���椔���㮝���楂���榣���槪���檨���𣚣���櫛���㰘���次���𣢧���歔���㱎���歲���殟��殺��殻��𣪍��𡴋��𣫺��汎��𣲼��沿��泍��汧��洖���派���海���流���浩���浸���涅���𣴞���洴���港�	��湮�
��㴳���滋���滇�
��𣻑���淹���潮���𣽞���𣾎���濆���瀹���瀞���瀛���㶖���灊���災)r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r	*rEr
*)r*rEr*)r
*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr *)r!*rEr"*)r#*rEr$*)r%*rEr&*)r'*rEr(*)r)*rEr**)r+*rEr,*)r-*rEr.*)r/*rEr0*)r1*rEr2*)r3*rEr4*)r5*rEr6*)r7*rEr8*)r9*rEr:*)r;*rEr<*)r=*rEr>*)r?*rEr@*)rA*rErB*)rC*rErD*)rE*rErF*)rG*rErH*)rI*rErJ*)rK*rErL*)rM*rErN*)rO*rErP*)rQ*rErR*)rS*rErT*)rU*rErV*)rW*rErX*)rY*rErZ*)r[*rEr\*)r]*rEr^*)r_*rEr`*)ra*rErb*)rc*rErd*)re*rErf*)rg*rErh*)ri*rErj*)rk*rErl*)rm*rErn*)ro*rErp*)rq*rErr*)rs*rErt*)ru*rErv*)rw*rErx*r�r�r�r�r��_seg_69s�ry*cfCs&d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,gdS(-N��rE�灷���炭���𠔥���煅���𤉣���熜��r�� ��爨�!��爵�"��牐�#��𤘈�$��犀�%��犕�&��𤜵�'��𤠔�(��獺�)��王�*��㺬�+��玥�,��㺸�.��瑇�/��瑜�0��瑱�1��璅�2��瓊�3��㼛�4��甤�5��𤰶�6��甾�7��𤲒�8��異�9��𢆟�:��瘐�;��𤾡�<��𤾸�=��𥁄�>��㿼�?��䀈�@��直�A��𥃳�B��𥃲�C��𥄙�D��𥄳�E��眞�F��真�H��睊�I��䀹�J��瞋�K��䁆�L��䂖�M��𥐝�N��硎�O��碌�P��磌�Q��䃣�R��𥘦�S��祖�T��𥚚�U��𥛅�V��福�W��秫�X��䄯�Y��穀�Z��穊�[��穏�\��𥥼�]��𥪧�_��`��䈂�a��𥮫�b��篆�c��築�d��䈧�e��𥲀�f��糒�g��䊠�h��糨�i��糣�j��紀�k��𥾆�l��絣�m��䌁�n��緇�o��縂�p��繅�q��䌴�r��𦈨�s��𦉇�t��䍙�u��𦋙�v��罺�w��𦌾�x��羕�y��翺�z��者�{��𦓚�|��𦔣�}��聠�~��𦖨���聰)rz*rEr{*)r|*rEr}*)r~*rEr*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*r�)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*r�)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr	+)r
+rEr+)r+rEr
+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r +rEr!+)r"+rEr#+)r$+rEr%+)r&+rEr'+)r(+rEr)+)r*+rEr++)r,+rEr-+)r.+rEr/+)r0+rEr1+)r2+rEr3+)r4+rEr5+)r6+rEr7+)r8+rEr9+)r:+rEr;+)r<+rEr=+)r>+rEr?+r�r�r�r�r��_seg_70xs�r@+cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�rE�𣍟��䏕��育��脃��䐋��脾��媵��𦞧��𦞵��𣎓��𣎜��舁��舄��辞��䑫��芑��芋��芝��劳��花��芳��芽��苦��𦬼��若��茝��荣��莭��茣��莽��菧��著��荓��菊��菌��菜��𦰶��𦵫��𦳕��䔫��蓱��蓳��蔖��𧏊��蕤��𦼬��䕝��䕡��𦾱��𧃒��䕫��虐��虜��虧��虩��蚩��蚈��蜎��蛢��蝹��蜨��蝫��螆�r���蟡��蠁���䗹���衠���衣���𧙧���裗���裞���䘵���裺���㒻���𧢮���𧥦���䚾���䛇���誠���諭���變���豕���𧲨���貫���賁���贛���起���𧼯���𠠄���跋���趼���跰���𠣞���軔���輸���𨗒���𨗭���邔���郱)rA+rErB+)rC+rErD+)rE+rErF+)rG+rErH+)rI+rErJ+)rK+rErL+)rM+rErN+)rO+rErP+)rQ+rErR+)rS+rErT+)rU+rErV+)rW+rErX+)rY+rErZ+)r[+rEr\+)r]+rEr^+)r_+rEr`+)ra+rErb+)rc+rErd+)re+rErf+)rg+rErh+)ri+rErj+)rk+rErl+)rm+rErn+)ro+rErp+)rq+rErr+)rs+rErt+)ru+rErv+)rw+rErx+)ry+rErz+)r{+rEr|+)r}+rEr~+)r+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+r�)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r,rEr,)r,rEr,)r,rEr,)r,rEr,r�r�r�r�r��_seg_71�s�r,c=Cs|dydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�g<S)�N��rE�鄑���𨜮���鄛���鈸���鋗���鋘���鉼���鏹���鐕���𨯺���開���䦕���閷���𨵷���䧦���雃���嶲��霣��𩅅��𩈚��䩮��䩶��韠��𩐊��䪲��𩒖��頋���頩���𩖶���飢���䬳���餩���馧���駂���駾���䯎�	��𩬰�
��鬒���鱀���鳽�
��䳎���䳭���鵧���𪃎���䳸���𪄅���𪈎���𪊑���麻���䵖���黹���黾���鼅���鼏���鼖���鼻���𪘀��r��r���)r	,rEr
,)r,rEr,)r
,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr ,)r!,rEr",)r#,rEr$,)r%,rEr&,)r',rEr(,)r),rEr*,)r+,rEr,,)r-,rEr.,)r/,rEr0,)r1,rEr2,)r3,rEr4,)r5,rEr6,)r7,rEr8,)r9,rEr:,)r;,rEr<,)r=,rEr>,)r?,rEr@,)rA,rErB,)rC,rErD,)rE,rErF,)rG,rErH,)rI,rErJ,)rK,rErL,)rM,rErN,)rO,rErP,)rQ,rErR,)rS,rErT,)rU,rErV,)rW,rErX,)rY,rErZ,)r[,rEr\,)r],rEr^,)r_,rEr`,)ra,rErb,)rc,rErd,)re,rErf,)rg,rErh,)ri,rErj,)rk,rErl,)rm,rErn,)ro,rErp,)rq,rErr,)rs,rErt,)ru,rErv,)rw,rErx,)ry,rErz,)r{,r�)r|,r�)r},r�r�r�r�r�r��_seg_72Hsxr~,N)M�__doc__�__version__r�r�r�r-r�rfrr�rJr�rbr�r/r�rr�r9	r�	rm
r
r�r3r�r]
rr�rAr
r�r�rLrr�r�rfr�r�rr�r�rsr7r�r�rxr6r�r�rXr�r� r+!r�!r"rq"r�"r;#r�#r$rj$r�$rj%r�%ro&r�&r�'r:(r�(r�)ry*r@+r,r~,�tupleZ	uts46datar�r�r�r��<module>s�hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh@I_vendor/idna/__pycache__/codec.cpython-36.pyc000064400000005725151733136470015051 0ustar003

�Pf��@s�ddlmZmZmZmZmZddlZddlZejd�Z	Gdd�dej
�Z
Gdd�dej�ZGd	d
�d
ej
�ZGdd�de
ej�ZGd
d�de
ej�Zdd�ZdS)�)�encode�decode�alabel�ulabel�	IDNAError�Nu[.。.。]c@s eZdZddd�Zddd�ZdS)	�Codec�strictcCs.|dkrtdj|���|sdSt|�t|�fS)Nr	z Unsupported error handling "{0}"�r)r
r)r�formatr�len)�self�data�errors�r�/usr/lib/python3.6/codec.pyr	s
zCodec.encodecCs.|dkrtdj|���|sdSt|�t|�fS)Nr	z Unsupported error handling "{0}"r
r)r
r)rrrr)r
rrrrrrs
zCodec.decodeN)r	)r	)�__name__�
__module__�__qualname__rrrrrrrs

rc@seZdZdd�ZdS)�IncrementalEncoderc	Cs�|dkrtdj|���|sdStj|�}d}|rV|dsDd}|d	=n|sV|d
=|rVd}g}d}x2|D]*}|jt|��|r�|d7}|t|�7}qdWdj|�|}|t|�7}||fS)Nr	z Unsupported error handling "{0}"r
rr�.)r
r���rr)rr�_unicode_dots_re�split�appendrr�join)	r
rr�final�labels�trailing_dot�result�size�labelrrr�_buffer_encodes0

z!IncrementalEncoder._buffer_encodeN)rrrr"rrrrrsrc@seZdZdd�ZdS)�IncrementalDecoderc	Cs�|dkrtdj|���|sdSt|t�r4tj|�}nt|�}t|d�|jd�}d}|r~|d	sld}|d
=n|s~|d=|r~d}g}d}x2|D]*}|jt|��|r�|d7}|t	|�7}q�Wdj
|�|}|t	|�7}||fS)Nr	z Unsupported error handling "{0}"r
r�asciirr)r
rrrr)rr�
isinstanceZunicoderr�strrrrr)	r
rrrrrrr r!rrr�_buffer_decode?s8



z!IncrementalDecoder._buffer_decodeN)rrrr'rrrrr#>sr#c@seZdZdS)�StreamWriterN)rrrrrrrr(gsr(c@seZdZdS)�StreamReaderN)rrrrrrrr)jsr)c	Cs tjdt�jt�jttttd�S)NZidna)�namerr�incrementalencoder�incrementaldecoder�streamwriter�streamreader)	�codecs�	CodecInforrrrr#r(r)rrrr�getregentrymsr1)Zcorerrrrrr/�re�compilerr�BufferedIncrementalEncoderr�BufferedIncrementalDecoderr#r(r)r1rrrr�<module>s
!)_vendor/idna/__pycache__/idnadata.cpython-36.opt-1.pyc000064400000057557151733136500016504 0ustar003

�Pf���@s0dZ�d �d!�d"�d#�d$dF�ZdGdGdGdGdGdGdGdHdGdIdIdIdIdHdIdHdIdHdHdHdHdHdIdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdJdHdHdHdHdHdHdHdIdHdHdHdHdIdIdIdGdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdIdHdHdIdIdIdIdIdIdIdIdIdHdIdHdIdHdHdIdIdIdGdIdIdHdHdHdHdIdHdHdHdIdIdIdIdIdHdHdHdHdIdHdHdHdHdHdHdHdHdHdIdHdIdHdIdHdHdIdIdHdHdHdHdHdHdHdHdHdHdHdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdIdIdHdHdHdHdIdHdIdIdHdHdHdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdJdIdHdHdHdHdHdIdHdHdIdHdHdHdHdHdIdHdHdHdHdIdHdGdGdGdHdHdHdHdHdHdHdHdHdIdIdIdGdHdJdGdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdGdGdGdGdGdGdGdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdGdJdGdGdGdGdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdKdGdL���Z�d%�d&�d'�d�Z�dS((z6.3.0�t�
�x�
�~�
������ ��0��8������+�t�btu�k�u���v�|�`|�F�|�N }�X@}�Zd}�\l}�^t}�~|}��~���~����X��t�������'!����FRH��.:��.l:��/<�0@�0@�*0�@�<0�@��MP��8�nzd��z�i��&�57�8]�z`��D��@����7{tl�={�l�?{�l�B{m�E{m�P{m��0A��0tB�0@
�rH��0�B�1�C�2�G��2@K�X3L�p�}���}�0@
)ZGreekZHanZHebrewZHiraganaZKatakana�U�D�R�C�L(�iiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJinioiqirisitiuiviwixiyizi{i|i}i~ii�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�iiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/iMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~ii�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXi�i�i�i�i�i�i�i�i�i�i�i�iii
ii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwi�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i i
 if ig ih ii i@�iA�iB�iC�iD�iE�iF�iG�iH�iI�iJ�iK�iL�iM�iN�iO�iP�iQ�iR�iS�iT�iU�iV�iW�iX�iY�iZ�i[�i\�i]�i^�i_�i`�ia�ib�ic�id�ie�if�ig�ih�ii�ij�ik�il�im�in�io�ip�iq�ir�is��.��:��{���|�������
$�,�4�<�D�L�T�\�d�l�t� |�"��$��&��(��*��,��.��0��2��6��9��;��=��?��C�E�G�I �L,�N4�P<�RD�TL�VT�X\�Zd�\l�^t�`|�b��d��f��h��j��l��n��p��r��t��v��x��{��}����������� ��0��H��T��d��x�����������������������������������8��@��H��P��X��`��h��p��|�������������������������������������������
$�,�4�<�D�L�T�\�d�l�t� |�"��$��&��(��*��,��.��0��2��:��=��A��C	�H	�J$	�L,	�N4	��<	���
���������@�C
�O
�p@
�r�
�t�
�x�
�~�
��@�����\��d��l��t��|����������������������������������`��b��d��f��h��j��l��n��p��r��t��v��x��z��|��~����������,��4��<��D��L��T��\��d��l��t��|�������������������������������������������������������� ��(��0��8��D��L��T��\��d��l��t��|����������������������������������������������������
$�,�4�<�D�L�T�\�d�l�t� |�"��$��&��(��Zd�����D�������������@�@��`�u������T��|������K@��4���. �\!���"���"���#�X	$�d	�%�p	�%�x	�%��	�%��	&��	&��	<&��	L&��	�&��	�&��	�&��	�&��	'��	,'��	\'��	�'��	�'�
(�
(�
<(�)
L(�1
�(�3
�(�6
�(�:
�(�=
�(�C
�(�I
)�N
,)�R
D)�]
p)�v
�)��
*��
*��
<*��
L*��
�*��
�*��
�*��
�*��
+��
,+��
@+��
�+��
�+�,�
,�<,�)L,�1�,�4�,�:�,�E�,�I-�N,-�XX-�d|-�p�-�r�-��.��.��8.��H.��d.��p.��x.���.���.���.���.��/��(/��@/��\/���/�0�
0�80�)H0�4�0�:�0�E�0�I1�N(1�WT1�Z`1�d�1�p�1��2��2��82��H2���2���2���2��3��(3��T3��x3���3���3���3�
4�

4�
84�;
H4�E
�4�I
5�O
(5�X
\5�d
�5�p
�5��
�5��
6��
6��
h6��
�6��
�6��
7��
(7��
<7��
X7��
`7��
�7�38�;�8�O9�Z@9��:��:��:��(:��4:��P:��d:���:���:���:���:���:���:���:��;��;�� ;��@;��x;�<�,<�`<�*�<�6�<�8�<�:�<�C�<�H=�M$=�R8=�WL=�\`=�it=�m�=�s�=�u�=���=��>��>��P>��d>��x>���>���>���>���>��?�J@��@A��@C��C�IH�N(I�W@I�Y`I�^hI���I��(J��@J���J���J��K��K�� K�`K�HL�[`L�`tM��N���N�mP���Y��Z���Z�
\�8\�5�\�T]�m�]�q�]�t�]��^���^��\_��p_���_�@`�x�`��b���b�d�,�d�<�d�ne�u�e��f���f��@g�h�_�h�}�i���i��@j���j�Ll�Z@m�t�m��n�8p�Jq�~4q��@s��Ps�,t�0�t�<�t�O8u�x�u���u��w��w�x�x�x�x�
$x�,x�4x�<x�Dx�Lx�Tx�\x�dx�lx�tx� |x�"�x�$�x�&�x�(�x�*�x�,�x�.�x�0�x�2�x�4�x�6�x�8�x�:�x�<�x�>�x�@�x�By�Dy�Fy�Hy�J$y�L,y�N4y�P<y�RDy�TLy�VTy�X\y�Zdy�\ly�^ty�`|y�b�y�d�y�f�y�h�y�j�y�l�y�n�y�p�y�r�y�t�y�v�y�x�y�z�y�|�y�~�y���y��z��z��z��z��$z��,z��4z��<z��Dz��Lz��Tz��pz��|z���z���z���z���z���z���z���z���z���z���z���z���z���z���z���z���z��{��{��{��{��${��,{��4{��<{��D{��L{��T{��\{��d{��l{��t{��|{���{���{���{���{���{���{���{���{���{���{���{���{���{���{���{��{�@|�(�|�8�|�F}�h�}�q�}�s�}�u�}�w�}�y�}�{�}�}�}���~���~����@��X����������O!8��!�_,�0�b,�1�g,�1�i,�1�k,�1�m,�1�r,�1�u,�1�|,�1��,2��,2��,2��,2��,$2��,,2��,42��,<2��,D2��,L2��,T2��,\2��,d2��,l2��,t2��,|2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,3��,3��,3��,3��,$3��,,3��,43��,<3��,D3��,L3��,T3��,\3��,d3��,l3��,t3��,|3��,�3��,�3��,�3��,�3��,�3�&-4�(-�4�.-�4�h-�4��-�5��-�6��-�6��-�6��-�6��-7��- 7��-@7��-`7�.�7�0.�8�0@�.0�@�=0�@��0dB��0tB��0�C�.1D��1�F��$��$@�
&�,&@�B&�D&�F&�H&�J&$�L&,�N&4�P&<�R&D�T&L�V&T�X&\�Z&d�\&l�^&t�`&|�b&��d&��f&��h&��j&��l&��p&��~&���&���&��&��&��&��&$��&,��&4��&<��&D��&L��&T��&\��&|��&�� '\�$'��&'��('��*'��,'��.'��2'��4'��6'��8'��:'��<'��>'��@'��B'�D'�F'�H'�J'$�L',�N'4�P'<�R'D�T'L�V'T�X'\�Z'd�\'l�^'t�`'|�b'��d'��f'��h'��j'��l'��n'��p'��y'��{'��}'���'���'��'��'��'��'0��'8��'D��'L��'���'���'���'���'��((��t(!��("��(@#��(�#��(�#�.)$�T)�$��)&��)<'�7*(�N*)�Z*@)�w*�)�|*�)��**��*l+��*�+��*�+�+,�+$,�+D,�'+�,�/+�,��+/��+�/��+�/��W0�z8h�zDh�zLh� z|h�"z�h�%z�h�*z�h�{xl�'~�x�t~�y��'4�;��>��N��^@�������
���
��A��J
������� ������� �	  �6( �9� �=� �V� �	$�:	�$��	&��	�&�
(�
(�
0(�
T(�4
d(�;
�(�@
�(�}
�)�6,�V-�s�-�I0�G@�p�A��B��@C���C�5D�@�D��F��@G��Z��[�o#	�/4@	�9j �Eo<�o@=��o<>�0@
� 0����v�
����j������0�C)ZPVALIDZCONTEXTJZCONTEXTON)!rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!)r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0)	r1r2r3r4r5r6r7r8r9)r:r;r<r=)r>r?r@rArBrCrDrE(�rKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r2r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLr:rMrNr>rOrPrQr@r)r*rRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrr-r.r/)r)rrrrrr)�__version__�scriptsZ
joining_typesZcodepoint_classes�rr�/usr/lib/python3.6/idnadata.py�<module>s0

_vendor/idna/__pycache__/codec.cpython-36.opt-1.pyc000064400000005725151733136500016002 0ustar003

�Pf��@s�ddlmZmZmZmZmZddlZddlZejd�Z	Gdd�dej
�Z
Gdd�dej�ZGd	d
�d
ej
�ZGdd�de
ej�ZGd
d�de
ej�Zdd�ZdS)�)�encode�decode�alabel�ulabel�	IDNAError�Nu[.。.。]c@s eZdZddd�Zddd�ZdS)	�Codec�strictcCs.|dkrtdj|���|sdSt|�t|�fS)Nr	z Unsupported error handling "{0}"�r)r
r)r�formatr�len)�self�data�errors�r�/usr/lib/python3.6/codec.pyr	s
zCodec.encodecCs.|dkrtdj|���|sdSt|�t|�fS)Nr	z Unsupported error handling "{0}"r
r)r
r)rrrr)r
rrrrrrs
zCodec.decodeN)r	)r	)�__name__�
__module__�__qualname__rrrrrrrs

rc@seZdZdd�ZdS)�IncrementalEncoderc	Cs�|dkrtdj|���|sdStj|�}d}|rV|dsDd}|d	=n|sV|d
=|rVd}g}d}x2|D]*}|jt|��|r�|d7}|t|�7}qdWdj|�|}|t|�7}||fS)Nr	z Unsupported error handling "{0}"r
rr�.)r
r���rr)rr�_unicode_dots_re�split�appendrr�join)	r
rr�final�labels�trailing_dot�result�size�labelrrr�_buffer_encodes0

z!IncrementalEncoder._buffer_encodeN)rrrr"rrrrrsrc@seZdZdd�ZdS)�IncrementalDecoderc	Cs�|dkrtdj|���|sdSt|t�r4tj|�}nt|�}t|d�|jd�}d}|r~|d	sld}|d
=n|s~|d=|r~d}g}d}x2|D]*}|jt|��|r�|d7}|t	|�7}q�Wdj
|�|}|t	|�7}||fS)Nr	z Unsupported error handling "{0}"r
r�asciirr)r
rrrr)rr�
isinstanceZunicoderr�strrrrr)	r
rrrrrrr r!rrr�_buffer_decode?s8



z!IncrementalDecoder._buffer_decodeN)rrrr'rrrrr#>sr#c@seZdZdS)�StreamWriterN)rrrrrrrr(gsr(c@seZdZdS)�StreamReaderN)rrrrrrrr)jsr)c	Cs tjdt�jt�jttttd�S)NZidna)�namerr�incrementalencoder�incrementaldecoder�streamwriter�streamreader)	�codecs�	CodecInforrrrr#r(r)rrrr�getregentrymsr1)Zcorerrrrrr/�re�compilerr�BufferedIncrementalEncoderr�BufferedIncrementalDecoderr#r(r)r1rrrr�<module>s
!)_vendor/idna/__pycache__/uts46data.cpython-36.pyc000064400000671047151733136500015613 0ustar003

�Pfp��@sdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Zdd�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Zd2d3�Zd4d5�Zd6d7�Zd8d9�Zd:d;�Zd<d=�Zd>d?�Z d@dA�Z!dBdC�Z"dDdE�Z#dFdG�Z$dHdI�Z%dJdK�Z&dLdM�Z'dNdO�Z(dPdQ�Z)dRdS�Z*dTdU�Z+dVdW�Z,dXdY�Z-dZd[�Z.d\d]�Z/d^d_�Z0d`da�Z1dbdc�Z2ddde�Z3dfdg�Z4dhdi�Z5djdk�Z6dldm�Z7dndo�Z8dpdq�Z9drds�Z:dtdu�Z;dvdw�Z<dxdy�Z=dzd{�Z>d|d}�Z?d~d�Z@d�d��ZAd�d��ZBd�d��ZCd�d��ZDd�d��ZEd�d��ZFd�d��ZGd�d��ZHd�d��ZId�d��ZJeKe�e�e�e�e�e�e�e	�e
�e�e�e
�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e�e �e!�e"�e#�e$�e%�e&�e'�e(�e)�e*�e+�e,�e-�e.�e/�e0�e1�e2�e3�e4�e5�e6�e7�e8�e9�e:�e;�e<�e=�e>�e?�e@�eA�eB�eC�eD�eE�eF�eG�eH�eI�eJ��ZLd�S)�zIDNA Mapping Table from UTS46.z6.3.0ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��3���������	�
���
������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�V�.�/�0�1�2�3�4�5�6�7�8�9�:�;�<�=�>�?�@�A�M�a�B�b�C�c�D�d�E�e�F�f�G�g�H�h�I�i�J�j�K�k�L�l�M�m�N�n�O�o�P�p�Q�q�R�r�S�s�T�t�U�u�V�v�W�w�X�x�Y�y�Z�z�[�\�]�^�_�`�a�b�c)rr)rr)rr)rr)rr)rr)rr)r	r)r
r)rr)rr)r
r)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)rr)r r)r!r)r"r)r#r)r$r)r%r)r&r)r'r)r(r)r)r)r*r)r+r)r,r)r-r)r.r)r/r0)r1r0)r2r)r3r0)r4r0)r5r0)r6r0)r7r0)r8r0)r9r0)r:r0)r;r0)r<r0)r=r)r>r)r?r)r@r)rAr)rBr)rCr)rDrErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryr)rzr)r{r)r|r)r}r)r~r)rr0)r�r0)r�r0�r�r�r��/usr/lib/python3.6/uts46data.py�_seg_0s�r�cfCs�dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�dr0�e�f�g�h�i�j�k�l�m�n�o�p�q�r�s�t�u�v�w�x�y�z�{r�|�}�~���X��������������������������������� ��������� ̈��rErF����I��� ̄����2��� ́��μ���� ̧��1�rb���1⁄4��1⁄2��3⁄4���à��á���â���ã���ä���å���æ���ç)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r)r�r)r�r)r�r)r�r)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�r�)r�rr�)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�r0)r�rr�)r�r0)r�rErF)r�r0)r�r0)r�r�)r�r0)r�rr�)r�r0)r�r0)r�rEr�)r�rEr)r�rr�)r�rEr�)r�r0)r�r0)r�rr�)r�rEr�)r�rErb)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_1ps�r�ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rE�è���é���ê���ë���ì���í���î���ï���ð���ñ���ò���ó���ô���õ���ö��r0���ø���ù���ú���û���ü���ý���þ���D�ss�������������������������������������������������������ā���ă���ą���ć���ĉ�	�
�ċ���č�
��ď���đ���ē���ĕ���ė���ę���ě���ĝ���ğ�� �ġ�!�"�ģ�#�$�ĥ�%�&�ħ�'�(�ĩ�)�*�ī�+)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rr0)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,r-r.)r/r0)r0r0)r1r0)r2r0)r3r0)r4r0)r5r0)r6r0)r7r0)r8r0)r9r0)r:r0)r;r0)r<r0)r=r0)r>r0)r?r0)r@r0)rAr0)rBr0)rCr0)rDr0)rEr0)rFr0)rGr0)rHr0)rIr0)rJr0)rKr0)rLr0)rMr0)rNr0)rOrErP)rQr0)rRrErS)rTr0)rUrErV)rWr0)rXrErY)rZr0)r[rEr\)r]r0)r^rEr_)r`r0)rarErb)rcr0)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlr0)rmrErn)ror0)rprErq)rrr0)rsrErt)rur0)rvrErw)rxr0)ryrErz)r{r0)r|rEr})r~r0)rrEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0r�r�r�r�r��_seg_2�s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�dgdS(N�,rE�ĭ�-r0�.�į�/�0�i̇�1�2�ij�4�ĵ�5�6�ķ�7�9�ĺ�:�;�ļ�<�=�ľ�>�?�l·�A�ł�B�C�ń�D�E�ņ�F�G�ň�H�I�ʼn�J�ŋ�K�L�ō�M�N�ŏ�O�P�ő�Q�R�œ�S�T�ŕ�U�V�ŗ�W�X�ř�Y�Z�ś�[�\�ŝ�]�^�ş�_�`�š�a�b�ţ�c�d�ť�e�f�ŧ�g�h�ũ�i�j�ū�k�l�ŭ�m�n�ů�o�p�ű�q�r�ų�s�t�ŵ�u�v�ŷ�w�x�ÿ�y�ź�z�{�ż�|�}�ž�~�rj���ɓ��ƃ���ƅ���ɔ��ƈ���ɖ��ɗ��ƌ���ǝ��ə��ɛ��ƒ���ɠ)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)rr0)rrEr)rrEr)rr0)rrEr)rr0)r	rEr
)rr0)rrErj)r
r0)rrEr)rrEr)rr0)rrEr)rr0)rrEr)rrEr)rr0)rrEr)rrEr)rrEr )r!r0)r"rEr#)r$rEr%)r&rEr')r(rEr))r*r0)r+rEr,r�r�r�r�r��_seg_3@s�r-cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�dgdS(N�rE�ɣ�r0��ɩ��ɨ��ƙ���ɯ��ɲ���ɵ��ơ���ƣ���ƥ���ʀ��ƨ���ʃ���ƭ���ʈ��ư���ʊ��ʋ��ƴ���ƶ���ʒ��ƹ���ƽ����dž���lj���nj���ǎ�����ǐ�����ǒ�����ǔ�����ǖ�����ǘ�����ǚ�����ǜ�����ǟ�����ǡ�����ǣ�����ǥ�����ǧ�����ǩ�����ǫ�����ǭ�����ǯ�����dz���ǵ���ƕ��ƿ��ǹ���ǻ���ǽ���ǿ���ȁ���ȃ���ȅ���ȇ���ȉ�	�
�ȋ���ȍ)r.rEr/)r0r0)r1rEr2)r3rEr4)r5rEr6)r7r0)r8rEr9)r:rEr;)r<r0)r=rEr>)r?rEr@)rAr0)rBrErC)rDr0)rErErF)rGr0)rHrErI)rJrErK)rLr0)rMrErN)rOr0)rPrErQ)rRr0)rSrErT)rUrErV)rWr0)rXrErY)rZrEr[)r\rEr])r^r0)r_rEr`)rar0)rbrErc)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlrErm)rnrEro)rprErq)rrr0)rsrErt)rur0)rvrErw)rxr0)ryrErz)r{r0)r|rEr})r~r0)rrEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�r�r�r�r�r��_seg_4�s�r�ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	gdS(
N�
r0�rE�ȏ���ȑ���ȓ���ȕ���ȗ���ș���ț���ȝ���ȟ�� �ƞ�!�"�ȣ�#�$�ȥ�%�&�ȧ�'�(�ȩ�)�*�ȫ�+�,�ȭ�-�.�ȯ�/�0�ȱ�1�2�ȳ�3�:�ⱥ�;�ȼ�<�=�ƚ�>�ⱦ�?�A�ɂ�B�C�ƀ�D�ʉ�E�ʌ�F�ɇ�G�H�ɉ�I�J�ɋ�K�L�ɍ�M�N�ɏ�O�rT��ɦ�rX�rh��ɹ��ɻ��ʁ�rr�rv���r� ̆��� ̇��� ̊��� ̨��� ̃��� ̋�����ɣ��r\��rj��rt���ʕ���@�̀�A�́�B�C�̓�D�̈́�E�ι�F�Or��P�p�ͱ�q�r�ͳ�s�t�ʹ�u�v�ͷ�w)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)rr0)rrEr)rr0)rrEr)rr0)rrEr)r	rEr
)rr0)rrEr
)rrEr)rr0)rrEr)rr0)rrEr)rrEr)rrEr)rrEr)rr0)rrEr)rr0)r rEr!)r"r0)r#rEr$)r%r0)r&rEr')r(r0)r)rErT)r*rEr+)r,rErX)r-rErh)r.rEr/)r0rEr1)r2rEr3)r4rErr)r5rErv)r6r0)r7rr8)r9rr:)r;rr<)r=rr>)r?rr@)rArrB)rCr0)rDrErE)rFrEr\)rGrErj)rHrErt)rIrErJ)rKr0)rLrErM)rNrErO)rPr0)rQrErR)rSrErT)rUrErV)rWr0)rXr�)rYr0)rZrEr[)r\r0)r]rEr^)r_r0)r`rEra)rbr0)rcrErd)rer0r�r�r�r�r��_seg_5s�rfcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	gdS(
N�xr��zr� ι�{r0�~�;��� ́�� ̈́�rE�ά��·��έ��ή��ί���ό���ύ��ώ���α��β��γ��δ��ε��ζ��η��θ��ι��κ��λ��μ��ν��ξ��ο��π��ρ���σ��τ��υ��φ��χ��ψ��ω��ϊ��ϋ���r-�����ϗ�������������������ϙ�����ϛ�����ϝ�����ϟ�����ϡ�����ϣ�����ϥ�����ϧ�����ϩ�����ϫ�����ϭ�����ϯ����������������ϸ����ϻ���ͻ��ͼ��ͽ��ѐ��ё��ђ��ѓ)rgr�)rhrri)rjr0)rkrrl)rmr�)rnrro)rprrq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|r�)r}rEr~)rr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r-r�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrErr�r�r�r�r��_seg_6xs�rcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�dgdS(N�rE�є��ѕ��і��ї��ј�	�љ�
�њ��ћ��ќ�
�ѝ��ў��џ��а��б��в��г��д��е��ж��з��и��й��к��л��м��н��о��п� �р�!�с�"�т�#�у�$�ф�%�х�&�ц�'�ч�(�ш�)�щ�*�ъ�+�ы�,�ь�-�э�.�ю�/�я�0r0�`�ѡ�a�b�ѣ�c�d�ѥ�e�f�ѧ�g�h�ѩ�i�j�ѫ�k�l�ѭ�m�n�ѯ�o�p�ѱ�q�r�ѳ�s�t�ѵ�u�v�ѷ�w�x�ѹ�y�z�ѻ�{�|�ѽ�}�~�ѿ���ҁ���ҋ���ҍ���ҏ���ґ���ғ���ҕ���җ���ҙ���қ���ҝ���ҟ)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`r0)rarErb)rcr0)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlr0)rmrErn)ror0)rprErq)rrr0)rsrErt)rur0)rvrErw)rxr0)ryrErz)r{r0)r|rEr})r~r0)rrEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�r�r�r�r�r��_seg_7�s�r�ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�r0�rE�ҡ���ң���ҥ���ҧ���ҩ���ҫ���ҭ���ү���ұ���ҳ���ҵ���ҷ���ҹ���һ���ҽ���ҿ��r���ӂ�����ӄ�����ӆ�����ӈ�����ӊ�����ӌ�����ӎ�����ӑ�����ӓ�����ӕ�����ӗ�����ә�����ӛ�����ӝ�����ӟ�����ӡ�����ӣ�����ӥ�����ӧ�����ө�����ӫ�����ӭ�����ӯ�����ӱ�����ӳ�����ӵ���ӷ���ӹ���ӻ���ӽ���ӿ���ԁ���ԃ�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�r�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr)rr0)rrEr)rr0)rrEr)rr0)rrEr	)r
r0)rrEr)r
r0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)r rEr!)r"r0)r#rEr$)r%r0)r&rEr')r(r0)r)rEr*)r+r0)r,rEr-)r.r0)r/rEr0)r1r0)r2rEr3)r4r0)r5rEr6)r7r0)r8rEr9)r:r0)r;rEr<)r=r0)r>rEr?)r@r0)rArErB)rCr0)rDrErE)rFr0)rGrErH)rIr0r�r�r�r�r��_seg_8Hs�rJceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�dgdS(	N�rE�ԅ�r0��ԇ���ԉ�	�
�ԋ���ԍ�
��ԏ���ԑ���ԓ���ԕ���ԗ���ԙ���ԛ���ԝ���ԟ�� �ԡ�!�"�ԣ�#�$�ԥ�%�&�ԧ�'�(r��1�ա�2�բ�3�գ�4�դ�5�ե�6�զ�7�է�8�ը�9�թ�:�ժ�;�ի�<�լ�=�խ�>�ծ�?�կ�@�հ�A�ձ�B�ղ�C�ճ�D�մ�E�յ�F�ն�G�շ�H�ո�I�չ�J�պ�K�ջ�L�ռ�M�ս�N�վ�O�տ�P�ր�Q�ց�R�ւ�S�փ�T�ք�U�օ�V�ֆ�W�Y�`�a��եւ�������������������u�اٴ�v�وٴ�w�ۇٴ�x�يٴ�y��)rKrErL)rMr0)rNrErO)rPr0)rQrErR)rSr0)rTrErU)rVr0)rWrErX)rYr0)rZrEr[)r\r0)r]rEr^)r_r0)r`rEra)rbr0)rcrErd)rer0)rfrErg)rhr0)rirErj)rkr0)rlrErm)rnr0)rorErp)rqr0)rrrErs)rtr0)rurErv)rwr0)rxrEry)rzr0)r{rEr|)r}r0)r~rEr)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�r�r�r�r�r��_seg_9�s�r�ceCs�dydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��r0�r���K�M�����.�0�?�@�\�^�_��������	�X	rE�क़�Y	�ख़�Z	�ग़�[	�ज़�\	�ड़�]	�ढ़�^	�फ़�_	�य़�`	�x	�y	�	�	�	�	�	�	�	�	�	�	�	�	�	�	�	�	��	��	��	��	��	��	��	��	�ড়��	�ঢ়��	��	�য়��	��	��	�	�
�
�
�
�
�
�
�)
�*
�1
�2
�3
�ਲ਼�4
�5
�6
�ਸ਼�7
�8
�:
�<
�=
�>
�C
�G
�I
�K
�N
�Q
�R
�Y
�ਖ਼�Z
�ਗ਼�[
�ਜ਼�\
�]
�^
�ਫ਼�_
)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)rr�)rr0)rr�)rr0)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r r0)r!r�)r"r0)r#r�)r$r0)r%r�)r&r0)r'r�)r(r0)r)r�)r*r0)r+r�)r,r0)r-r�)r.rEr/)r0rEr1)r2r�)r3rEr4)r5r0)r6r�)r7r0)r8r�)r9r0)r:r�)r;r0)r<r�)r=r0)r>r�)r?r0)r@r�)rAr0)rBr�)rCr0)rDrErE)rFr�)rGr0)rHrErI)rJr�)rKr0)rLr�)rMr0)rNr�)rOr0)rPr�)rQr0)rRr�)rSr0)rTr�)rUr0)rVr�)rWrErX)rYrErZ)r[rEr\)r]r0)r^r�)r_rEr`)rar�r�r�r�r�r��_seg_10s�rbceCs�djdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�f
r0�v
r��
�
�
�
�
�
�
�
�
�
�
�
�
�
�
��
��
��
��
��
��
��
��
��
��
��
����
����)�*�1�2�4�5�:�<�E�G�I�K�N�V�X�\rE�ଡ଼�]�ଢ଼�^�_�d�f�x����������������������������������������������
����)�*�4�5)rcr0)rdr�)rer0)rfr�)rgr0)rhr�)rir0)rjr�)rkr0)rlr�)rmr0)rnr�)ror0)rpr�)rqr0)rrr�)rsr0)rtr�)rur0)rvr�)rwr0)rxr�)ryr0)rzr�)r{r0)r|r�)r}r0)r~r�)rr0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0r�r�r�r�r��_seg_11�s�r�ceCs�didjdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�:r��=r0�E�F�I�J�N�U�W�X�Z�`�d�f�p�x���������������������������������������������
�
�
�

�
�
�
�;
�=
�E
�F
�I
�J
�O
�W
�X
�`
�d
�f
�v
�y
�
�
�
�
�
�
�
�
�
�
�
�
��
��
��
��
��
��
��
��
��
��
�
��3rE�ํา�4�;�?�\�����)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r	r0)r
r�)rr0)rr�)r
r0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)r r�)r!r0)r"r�)r#r0)r$rEr%)r&r0)r'r�)r(r0)r)r�)r*r0)r+r�)r,r0)r-r�)r.r0r�r�r�r�r��_seg_12�s�r/ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�r��r0�����������������rE�ໍາ����������������������ຫນ���ຫມ�������་�
�C�གྷ�D�H�I�M�ཌྷ�N�R�དྷ�S�W�བྷ�X�\�ཛྷ�]�i�ཀྵ�j�m�q�s�ཱི�t�u�ཱུ�v�ྲྀ�w�	ྲཱྀ�x�ླྀ�y�	ླཱྀ�z��ཱྀ���ྒྷ�����ྜྷ���ྡྷ���ྦྷ���ྫྷ���ྐྵ��������������ⴧ�����ⴭ������ნ��_�a�I�J�N�P�W�X)r0r�)r1r0)r2r�)r3r0)r4r�)r5r0)r6r�)r7r0)r8r�)r9r0)r:r�)r;r0)r<r�)r=r0)r>r�)r?r0)r@r�)rAr0)rBrErC)rDr0)rEr�)rFr0)rGr�)rHr0)rIr�)rJr0)rKr�)rLr0)rMr�)rNr0)rOr�)rPrErQ)rRrErS)rTr0)rUr�)rVr0)rWrErX)rYr0)rZrEr[)r\r0)r]r�)r^r0)r_rEr`)rar0)rbrErc)rdr0)rerErf)rgr0)rhrEri)rjr0)rkrErl)rmr0)rnr�)ror0)rprErq)rrr0)rsrErt)rurErv)rwrErx)ryrErz)r{rEr|)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�r�)r�rEr�)r�r�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0r�r�r�r�r��_seg_13Ps�r�ceCs�dhdidjdkdldmdndodpdqdrdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�Yr��Zr0�^�`��������������������������[�]�}�������������
��� �7�@�T�`�m�n�q�r�t����������������r����� �x������� �,�0�<�@�A�D�n�p�u��������������_�`�}����)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r	r0)r
r�)rr0)rr�)r
r0)rr�)rr0)rr�)rr0)rr�r�r�r�r�r��_seg_14�s�rcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�dgdS(N�r0�r���L�P�}�����8�;�J�M���������,rErF�-r��.rH�/�0rL�1rN�2�ǝ�3rR�4rT�5rV�6rX�7rZ�8r\�9r^�:r`�;�<rb�=�ȣ�>rd�?rh�@rl�Arn�Brr�C�D�ɐ�E�ɑ�F�ᴂ�G�H�I�J�ə�K�ɛ�L�ɜ�M�N�O�P�Q�ŋ�R�S�ɔ�T�ᴖ�U�ᴗ�V�W�X�Y�ᴝ�Z�ɯ�[rp�\�ᴥ�]�β�^�γ�_�δ�`�φ�a�χ�b�c�d�e�f�g�h�ρ�i�j�k�x�н�y��ɒ�rJ��ɕ�r��rP��ɟ��ɡ��ɥ��ɨ��ɩ��ɪ��ᵻ��ʝ��ɭ)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)rr0)rr�)r r0)r!r�)r"r0)r#r�)r$r0)r%r�)r&r0)r'rErF)r(rEr�)r)rErH)r*r0)r+rErL)r,rErN)r-rEr.)r/rErR)r0rErT)r1rErV)r2rErX)r3rErZ)r4rEr\)r5rEr^)r6rEr`)r7r0)r8rErb)r9rEr:)r;rErd)r<rErh)r=rErl)r>rErn)r?rErr)r@rErF)rArErB)rCrErD)rErErF)rGrErH)rHrErL)rIrErN)rJrErK)rLrErM)rNrErO)rPrErR)rQr0)rRrErZ)rSrEr^)rTrErU)rVrErb)rWrErX)rYrErZ)r[rEr\)r]rErd)r^rErl)r_rErn)r`rEra)rbrErc)rdrErp)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErV)rrrErh)rsrErn)rtrErp)rurErh)rvrErj)rwrErx)ryrErn)rzrErp)r{r0)r|rEr})r~r0)rrEr�)r�rErJ)r�rEr�)r�rEr)r�rErO)r�rErP)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_15 s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�dgdS(N�rE�ᶅ��ʟ��ɱ��ɰ��ɲ��ɳ��ɴ��ɵ��ɸ��ʂ��ʃ��ƫ��ʉ��ʊ��ᴜ��ʋ��ʌ�rx��ʐ��ʑ��ʒ��θ�r0��r����ḁ���ḃ���ḅ���ḇ���ḉ�	�
�ḋ���ḍ�
��ḏ���ḑ���ḓ���ḕ���ḗ���ḙ���ḛ���ḝ���ḟ�� �ḡ�!�"�ḣ�#�$�ḥ�%�&�ḧ�'�(�ḩ�)�*�ḫ�+�,�ḭ�-�.�ḯ�/�0�ḱ�1�2�ḳ�3�4�ḵ�5�6�ḷ�7�8�ḹ�9�:�ḻ�;�<�ḽ�=�>�ḿ�?�@�ṁ�A�B�ṃ�C�D�ṅ�E�F�ṇ�G�H�ṉ�I�J�ṋ)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rErx)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r		r0)r
	rEr	)r	r0)r
	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr	)r	r0)r	rEr 	)r!	r0)r"	rEr#	)r$	r0)r%	rEr&	)r'	r0)r(	rEr)	)r*	r0)r+	rEr,	)r-	r0)r.	rEr/	)r0	r0)r1	rEr2	)r3	r0)r4	rEr5	)r6	r0)r7	rEr8	r�r�r�r�r��_seg_16�s�r9	ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�Kr0�LrE�ṍ�M�N�ṏ�O�P�ṑ�Q�R�ṓ�S�T�ṕ�U�V�ṗ�W�X�ṙ�Y�Z�ṛ�[�\�ṝ�]�^�ṟ�_�`�ṡ�a�b�ṣ�c�d�ṥ�e�f�ṧ�g�h�ṩ�i�j�ṫ�k�l�ṭ�m�n�ṯ�o�p�ṱ�q�r�ṳ�s�t�ṵ�u�v�ṷ�w�x�ṹ�y�z�ṻ�{�|�ṽ�}�~�ṿ���ẁ���ẃ���ẅ���ẇ���ẉ���ẋ���ẍ���ẏ���ẑ���ẓ���ẕ���aʾ���r.���ạ���ả���ấ���ầ���ẩ���ẫ���ậ���ắ���ằ���ẳ�)r:	r0)r;	rEr<	)r=	r0)r>	rEr?	)r@	r0)rA	rErB	)rC	r0)rD	rErE	)rF	r0)rG	rErH	)rI	r0)rJ	rErK	)rL	r0)rM	rErN	)rO	r0)rP	rErQ	)rR	r0)rS	rErT	)rU	r0)rV	rErW	)rX	r0)rY	rErZ	)r[	r0)r\	rEr]	)r^	r0)r_	rEr`	)ra	r0)rb	rErc	)rd	r0)re	rErf	)rg	r0)rh	rEri	)rj	r0)rk	rErl	)rm	r0)rn	rEro	)rp	r0)rq	rErr	)rs	r0)rt	rEru	)rv	r0)rw	rErx	)ry	r0)rz	rEr{	)r|	r0)r}	rEr~	)r	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	rErZ	)r�	r0)r�	rEr.)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0r�r�r�r�r��_seg_17�s�r�	cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�dgdS(N�rE�ẵ�r0��ặ���ẹ���ẻ���ẽ���ế���ề����ể�����ễ�����ệ�����ỉ�����ị�����ọ�����ỏ�����ố�����ồ�����ổ�����ỗ�����ộ�����ớ�����ờ�����ở�����ỡ�����ợ�����ụ�����ủ�����ứ�����ừ�����ử�����ữ�����ự�����ỳ�����ỵ���ỷ���ỹ���ỻ���ỽ���ỿ���ἀ�	�ἁ�
�ἂ��ἃ��ἄ�
�ἅ��ἆ��ἇ��r���ἐ��ἑ��ἒ��ἓ��ἔ��ἕ�� �(�ἠ�)�ἡ�*�ἢ�+�ἣ�,�ἤ�-�ἥ)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr�	)r�	r0)r�	rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr	
)r

r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r
rEr
)r
r0)r 
rEr!
)r"
r0)r#
rEr$
)r%
r0)r&
rEr'
)r(
r0)r)
rEr*
)r+
r0)r,
rEr-
)r.
r0)r/
rEr0
)r1
r0)r2
rEr3
)r4
r0)r5
rEr6
)r7
r0)r8
rEr9
)r:
r0)r;
rEr<
)r=
r0)r>
rEr?
)r@
r0)rA
rErB
)rC
rErD
)rE
rErF
)rG
rErH
)rI
rErJ
)rK
rErL
)rM
rErN
)rO
rErP
)rQ
r0)rR
r�)rS
rErT
)rU
rErV
)rW
rErX
)rY
rErZ
)r[
rEr\
)r]
rEr^
)r_
r�)r`
r0)ra
rErb
)rc
rErd
)re
rErf
)rg
rErh
)ri
rErj
)rk
rErl
r�r�r�r�r��_seg_18Xs�rm
cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�dgdS(N�.rE�ἦ�/�ἧ�0r0�8�ἰ�9�ἱ�:�ἲ�;�ἳ�<�ἴ�=�ἵ�>�ἶ�?�ἷ�@�Fr��H�ὀ�I�ὁ�J�ὂ�K�ὃ�L�ὄ�M�ὅ�N�P�X�Y�ὑ�Z�[�ὓ�\�]�ὕ�^�_�ὗ�`�h�ὠ�i�ὡ�j�ὢ�k�ὣ�l�ὤ�m�ὥ�n�ὦ�o�ὧ�p�q�ά�r�s�έ�t�u�ή�v�w�ί�x�y�ό�z�{�ύ�|�}�ώ�~��ἀι��ἁι��ἂι��ἃι��ἄι��ἅι��ἆι��ἇι����������ἠι��ἡι��ἢι��ἣι��ἤι��ἥι��ἦι��ἧι����������ὠι��ὡι��ὢι��ὣι��ὤι��ὥι��ὦι��ὧι�������)rn
rEro
)rp
rErq
)rr
r0)rs
rErt
)ru
rErv
)rw
rErx
)ry
rErz
)r{
rEr|
)r}
rEr~
)r
rEr�
)r�
rEr�
)r�
r0)r�
r�)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r�)r�
r0)r�
r�)r�
rEr�
)r�
r�)r�
rEr�
)r�
r�)r�
rEr�
)r�
r�)r�
rEr�
)r�
r0)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r�)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)rrEr)rrEr)rrEr)rrEr�
)rrEr�
)rrEr�
)r	rEr�
)r
rEr�
)rrEr)rrErr�r�r�r�r��_seg_19�s�r
ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�dgdS(N�rE�ὧι�r0��ὰι��αι��άι�r����ᾶι��ᾰ��ᾱ��ὰ��ά��r� ̓��ι��� ͂�� ̈͂���ὴι���ηι���ήι�������ῆι���ὲ���έ���ὴ���ή����� ̓̀��� ̓́��� ̓͂�����ΐ�������ῐ���ῑ���ὶ���ί����� ̔̀��� ̔́��� ̔͂�����ΰ�����ῠ���ῡ���ὺ���ύ���ῥ��� ̈̀��� ̈́���`�����ὼι���ωι���ώι����ῶι��ὸ��ό��ὼ��ώ��� ́�� ̔�� r�� r�� r-�� � � �‐� � � ̳� �$ �' �( �/ �0 �3 �′′�4 �	′′′�5 �6 �‵‵�7 �	‵‵‵�8 �< �!!�= �> � ̅�? �G �??�H �?!�I �!?�J �W �′′′′�X )rrEr)rr0)rrEr)rrEr)rrEr)rr�)rr0)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr)r$rr%)r&rEr')r(rr%)r)rr*)r+rr,)r-rEr.)r/rEr0)r1rEr2)r3r�)r4r0)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr0)r@rrA)rBrrC)rDrrE)rFr0)rGrErH)rIr�)rJr0)rKrErL)rMrErN)rOrErP)rQrErR)rSr�)rTrrU)rVrrW)rXrrY)rZr0)r[rEr\)r]r0)r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrri)rjrrk)rlrrm)rnr�)rorErp)rqrErr)rsrErt)rur�)rvr0)rwrErx)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rErr)r�rr�)r�rr�)r�r�)r�rr�)r�r�)r�r-r�)r�r�)r�r0)r�rEr�)r�r0)r�rr�)r�r0)r�r�)r�r0)r�r�)r�rr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�rr�)r�r0)r�rr�)r�r0)r�rr�)r�rr�)r�rr�)r�r0)r�rEr�)r�r0r�r�r�r�r��_seg_20(s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�dgdS(N�_ rr��` r��a r��d �e �p rE�0�q rV�r �t �4�u �5�v �6�w �7�x �8�y �9�z �+�{ �−�| �=�} �(�~ �)� r`� � r�� r�� � � � � � � � � � � � � � rF� rN� rb� rt� �ə� rT� rZ� r\� r^� � rd� rj� rl� � r0� �rs� � �� �� �!�a/c�!�a/s�!rJ�!�°c�!�!�c/o�!�c/u�!�ɛ�!�	!�°f�
!rR�!�!�ħ�!�!�!�!�!�no�!�!�!rf�!rh�!� !�sm�!!�tel�"!�tm�#!�$!rx�%!�&!�ω�'!�(!�)!�*!�+!r��,!rH�-!�.!�/!�1!rP�2!�3!�4!�5!�א)r�rr�)r�r�)r�r�)r�r�)r�r�)r�rEr�)r�rErV)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rEr`)r�rEr�)r�rEr�)r�rEr�)r�rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�r�)r�rErF)r�rErN)r�rErb)r�rErt)r�rEr�)r�rErT)r�rErZ)r�rEr\)r�rEr^)r�rEr`)r�rErd)r�rErj)r�rErl)r�r�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�rr�)r�rr�)r�rErJ)r�rEr�)rr0)rrr)rrr)rrEr)rr0)rrEr	)r
rErR)rrErT)rrEr
)rrErV)rrEr\)rr0)rrEr`)rrEr)rr0)rrErd)rrErf)rrErh)rr0)rrEr)rrEr)rrEr)rr0)r rErx)r!r0)r"rEr#)r$r0)r%rErx)r&r0)r'rErZ)r(rEr�)r)rErH)r*rErJ)r+r0)r,rErN)r-rErP)r.r�)r/rEr^)r0rErb)r1rEr2r�r�r�r�r��_seg_21�s�r3cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�dgdS(N�6!rE�ב�7!�ג�8!�ד�9!rV�:!r0�;!�fax�<!�π�=!�γ�?!�@!�∑�A!�E!rL�G!rN�H!�I!rX�J!�P!�1⁄7�Q!�1⁄9�R!�1⁄10�S!�1⁄3�T!�2⁄3�U!�1⁄5�V!�2⁄5�W!�3⁄5�X!�4⁄5�Y!�1⁄6�Z!�5⁄6�[!�1⁄8�\!�3⁄8�]!�5⁄8�^!�7⁄8�_!�1⁄�`!�a!�ii�b!�iii�c!�iv�d!rp�e!�vi�f!�vii�g!�viii�h!�ix�i!rt�j!�xi�k!�xii�l!r\�m!rJ�n!�o!r^�p!�q!�r!�s!�t!�u!�v!�w!�x!�y!�z!�{!�|!�}!�~!�!�!�!r��!�!�0⁄3�!�!�,"�∫∫�-"�	∫∫∫�."�/"�∮∮�0"�	∮∮∮�1"�`"r�a"�n"�p"�)#�〈�*#�〉�+#��#�$�'$�@$�K$�`$r��a$r��b$�c$r��d$r��e$r��f$r��g$r��h$r��i$�10�j$�11�k$�12)r4rEr5)r6rEr7)r8rEr9)r:rErV)r;r0)r<rEr=)r>rEr?)r@rErA)rBrEr?)rCrErD)rEr0)rFrErL)rGrErN)rHrErV)rIrErX)rJr0)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErV)rlrErm)rnrEro)rprErq)rrrErp)rsrErt)rurErv)rwrErx)ryrErz)r{rErt)r|rEr})r~rEr)r�rEr\)r�rErJ)r�rErL)r�rEr^)r�rErV)r�rErm)r�rEro)r�rErq)r�rErp)r�rErt)r�rErv)r�rErx)r�rErz)r�rErt)r�rEr})r�rEr)r�rEr\)r�rErJ)r�rErL)r�rEr^)r�r0)r�r�)r�r0)r�rEr�)r�r�)r�r0)r�rEr�)r�rEr�)r�r0)r�rEr�)r�rEr�)r�r0)r�r)r�r0)r�r)r�r0)r�rEr�)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_22�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N�l$rE�13�m$�14�n$�15�o$�16�p$�17�q$�18�r$�19�s$�20�t$r�(1)�u$�(2)�v$�(3)�w$�(4)�x$�(5)�y$�(6)�z$�(7)�{$�(8)�|$�(9)�}$�(10)�~$�(11)�$�(12)�$�(13)�$�(14)�$�(15)�$�(16)�$�(17)�$�(18)�$�(19)�$�(20)�$r��$�(a)�$�(b)�$�(c)�$�(d)�$�(e)�$�(f)�$�(g)�$�(h)�$�(i)�$�(j)�$�(k)�$�(l)�$�(m)�$�(n)�$�(o)�$�(p)�$�(q)�$�(r)�$�(s)�$�(t)�$�(u)�$�(v)�$�(w)�$�(x)�$�(y)�$�(z)�$rF�$rH�$rJ�$rL�$rN�$rP�$rR�$rT�$rV�$rX�$rZ�$r\��$r^��$r`��$rb��$rd��$rf��$rh��$rj��$rl��$rn��$rp��$rr��$rt��$rv��$rx��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$��$)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rr�)r
rr
)r
rr
)r
rr
)r
rr
)r
rr	
)r

rr
)r
rr

)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r
rr
)r 
rr!
)r"
rr#
)r$
rr%
)r&
rr'
)r(
rr)
)r*
rr+
)r,
rr-
)r.
rr/
)r0
rErF)r1
rErH)r2
rErJ)r3
rErL)r4
rErN)r5
rErP)r6
rErR)r7
rErT)r8
rErV)r9
rErX)r:
rErZ)r;
rEr\)r<
rEr^)r=
rEr`)r>
rErb)r?
rErd)r@
rErf)rA
rErh)rB
rErj)rC
rErl)rD
rErn)rE
rErp)rF
rErr)rG
rErt)rH
rErv)rI
rErx)rJ
rErF)rK
rErH)rL
rErJ)rM
rErL)rN
rErN)rO
rErP)rP
rErR)rQ
rErT)rR
rErV)rS
rErX)rT
rErZ)rU
rEr\)rV
rEr^)rW
rEr`)rX
rErb)rY
rErd)rZ
rErf)r[
rErh)r\
rErjr�r�r�r�r��_seg_23`	s�r]
cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N��$rErl��$rn��$rp��$rr��$rt��$rv��$rx��$r���$r0�'r��'�*�∫∫∫∫�
*�t*r�::=�u*�==�v*�===�w*��*�⫝̸��*�M+�P+�Z+�,�ⰰ�,�ⰱ�,�ⰲ�,�ⰳ�,�ⰴ�,�ⰵ�,�ⰶ�,�ⰷ�,�ⰸ�	,�ⰹ�
,�ⰺ�,�ⰻ�,�ⰼ�
,�ⰽ�,�ⰾ�,�ⰿ�,�ⱀ�,�ⱁ�,�ⱂ�,�ⱃ�,�ⱄ�,�ⱅ�,�ⱆ�,�ⱇ�,�ⱈ�,�ⱉ�,�ⱊ�,�ⱋ�,�ⱌ�,�ⱍ�,�ⱎ�,�ⱏ� ,�ⱐ�!,�ⱑ�",�ⱒ�#,�ⱓ�$,�ⱔ�%,�ⱕ�&,�ⱖ�',�ⱗ�(,�ⱘ�),�ⱙ�*,�ⱚ�+,�ⱛ�,,�ⱜ�-,�ⱝ�.,�ⱞ�/,�0,�_,�`,�ⱡ�a,�b,�ɫ�c,�ᵽ�d,�ɽ�e,�g,�ⱨ�h,�i,�ⱪ�j,�k,�ⱬ�l,�m,�ɑ�n,�ɱ�o,�ɐ�p,�ɒ�q,�r,�ⱳ�s,�u,�ⱶ�v,�|,rX�},�~,�ȿ�,�ɀ�,�ⲁ�,�,�ⲃ)r^
rErl)r_
rErn)r`
rErp)ra
rErr)rb
rErt)rc
rErv)rd
rErx)re
rEr�)rf
r0)rg
r�)rh
r0)ri
rErj
)rk
r0)rl
rrm
)rn
rro
)rp
rrq
)rr
r0)rs
rErt
)ru
r0)rv
r�)rw
r0)rx
r�)ry
rErz
)r{
rEr|
)r}
rEr~
)r
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r�)r�
r0)r�
r�)r�
rEr�
)r�
r0)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rEr�
)r�
r0)r�
rErX)r�
rErp)r�
rEr�
)rrEr)rrEr)rr0)rrErr�r�r�r�r��_seg_24�	s�rcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�,r0�,rE�ⲅ�,�,�ⲇ�,�,�ⲉ�,�,�ⲋ�,�,�ⲍ�,�,�ⲏ�,�,�ⲑ�,�,�ⲓ�,�,�ⲕ�,�,�ⲗ�,�,�ⲙ�,�,�ⲛ�,�,�ⲝ�,�,�ⲟ�,�,�ⲡ�,�,�ⲣ�,�,�ⲥ�,�,�ⲧ�,�,�ⲩ�,�,�ⲫ�,�,�ⲭ�,�,�ⲯ�,�,�ⲱ�,�,�ⲳ�,�,�ⲵ�,�,�ⲷ�,�,�ⲹ�,�,�ⲻ�,�,�ⲽ�,�,�ⲿ�,�,�ⳁ�,��,�ⳃ��,��,�ⳅ��,��,�ⳇ��,��,�ⳉ��,��,�ⳋ��,��,�ⳍ��,��,�ⳏ��,��,�ⳑ��,��,�ⳓ��,��,�ⳕ��,��,�ⳗ��,��,�ⳙ��,��,�ⳛ��,��,�ⳝ��,��,�ⳟ��,��,�ⳡ��,��,�ⳣ��,��,�ⳬ��,��,�ⳮ)rr0)r	rEr
)rr0)rrEr
)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)r r0)r!rEr")r#r0)r$rEr%)r&r0)r'rEr()r)r0)r*rEr+)r,r0)r-rEr.)r/r0)r0rEr1)r2r0)r3rEr4)r5r0)r6rEr7)r8r0)r9rEr:)r;r0)r<rEr=)r>r0)r?rEr@)rAr0)rBrErC)rDr0)rErErF)rGr0)rHrErI)rJr0)rKrErL)rMr0)rNrErO)rPr0)rQrErR)rSr0)rTrErU)rVr0)rWrErX)rYr0)rZrEr[)r\r0)r]rEr^)r_r0)r`rEra)rbr0)rcrErd)rer0)rfrErg)rhr0)rirErj)rkr0)rlrErm)rnr0)rorErp)rqr0)rrrErs)rtr0)rurErv)rwr0)rxrEry)rzr0)r{rEr|)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�r�r�r�r�r��_seg_250
s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	gdS(
N��,r0��,rE�ⳳ��,��,r��,�&-�'-�(-�--�.-�0-�h-�o-�ⵡ�p-�q-�-�-�-�-�-�-�-�-�-�-�-��-��-��-��-��-��-��-��-�<.�.�.�.�.�母�.��.�龟��.�/�一�/�丨�/�丶�/�丿�/�乙�/�亅�/�二�/�亠�/�人�	/�儿�
/�入�/�八�/�冂�
/�冖�/�冫�/�几�/�凵�/�刀�/�力�/�勹�/�匕�/�匚�/�匸�/�十�/�卜�/�卩�/�厂�/�厶�/�又�/�口�/�囗�/�土� /�士�!/�夂�"/�夊�#/�夕�$/�大�%/�女�&/�子�'/�宀�(/�寸�)/�小�*/�尢�+/�尸�,/�屮�-/�山�./�巛�//�工�0/�己�1/�巾�2/�干�3/�幺�4/�广�5/�廴�6/�廾�7/�弋�8/�弓�9/�彐)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@r�r�r�r�r��_seg_26�
s�rAcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�:/rE�彡�;/�彳�</�心�=/�戈�>/�戶�?/�手�@/�支�A/�攴�B/�文�C/�斗�D/�斤�E/�方�F/�无�G/�日�H/�曰�I/�月�J/�木�K/�欠�L/�止�M/�歹�N/�殳�O/�毋�P/�比�Q/�毛�R/�氏�S/�气�T/�水�U/�火�V/�爪�W/�父�X/�爻�Y/�爿�Z/�片�[/�牙�\/�牛�]/�犬�^/�玄�_/�玉�`/�瓜�a/�瓦�b/�甘�c/�生�d/�用�e/�田�f/�疋�g/�疒�h/�癶�i/�白�j/�皮�k/�皿�l/�目�m/�矛�n/�矢�o/�石�p/�示�q/�禸�r/�禾�s/�穴�t/�立�u/�竹�v/�米�w/�糸�x/�缶�y/�网�z/�羊�{/�羽�|/�老�}/�而�~/�耒�/�耳�/�聿�/�肉�/�臣�/�自�/�至�/�臼�/�舌�/�舛�/�舟�/�艮�/�色�/�艸�/�虍�/�虫�/�血�/�行�/�衣�/�襾�/�見�/�角�/�言�/�谷�/�豆�/�豕�/�豸�/�貝�/�赤�/�走�/�足�/�身)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)rrEr)rrEr)rrEr)rrEr)rrEr	r�r�r�r�r��_seg_27s�r
cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"gdS(#N�/rE�車�/�辛�/�辰�/�辵�/�邑�/�酉�/�釆�/�里�/�金�/�長�/�門�/�阜�/�隶�/�隹�/�雨�/�靑�/�非�/�面�/�革�/�韋�/�韭�/�音�/�頁�/�風�/�飛�/�食�/�首�/�香�/�馬�/�骨�/�高�/�髟�/�鬥�/�鬯�/�鬲�/�鬼��/�魚��/�鳥��/�鹵��/�鹿��/�麥��/�麻��/�黃��/�黍��/�黑��/�黹��/�黽��/�鼎��/�鼓��/�鼠��/�鼻��/�齊��/�齒��/�龍��/�龜��/�龠��/r��0rr��0r0�0�.�0�60�〒�70�80�十�90�卄�:0�卅�;0�@0�A0�0�0�0� ゙�0� ゚�0�0�より�0�0�コト�1�1�.1�11�ᄀ�21�ᄁ�31�ᆪ�41�ᄂ�51�ᆬ�61�ᆭ�71�ᄃ�81�ᄄ�91�ᄅ�:1�ᆰ�;1�ᆱ�<1�ᆲ�=1�ᆳ�>1�ᆴ�?1�ᆵ�@1�ᄚ�A1�ᄆ�B1�ᄇ�C1�ᄈ�D1�ᄡ)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryrErz)r{r�)r|rr�)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rr�)r�rr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_28hs�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(gdS()N�E1rE�ᄉ�F1�ᄊ�G1�ᄋ�H1�ᄌ�I1�ᄍ�J1�ᄎ�K1�ᄏ�L1�ᄐ�M1�ᄑ�N1�ᄒ�O1�ᅡ�P1�ᅢ�Q1�ᅣ�R1�ᅤ�S1�ᅥ�T1�ᅦ�U1�ᅧ�V1�ᅨ�W1�ᅩ�X1�ᅪ�Y1�ᅫ�Z1�ᅬ�[1�ᅭ�\1�ᅮ�]1�ᅯ�^1�ᅰ�_1�ᅱ�`1�ᅲ�a1�ᅳ�b1�ᅴ�c1�ᅵ�d1r��e1�ᄔ�f1�ᄕ�g1�ᇇ�h1�ᇈ�i1�ᇌ�j1�ᇎ�k1�ᇓ�l1�ᇗ�m1�ᇙ�n1�ᄜ�o1�ᇝ�p1�ᇟ�q1�ᄝ�r1�ᄞ�s1�ᄠ�t1�ᄢ�u1�ᄣ�v1�ᄧ�w1�ᄩ�x1�ᄫ�y1�ᄬ�z1�ᄭ�{1�ᄮ�|1�ᄯ�}1�ᄲ�~1�ᄶ�1�ᅀ�1�ᅇ�1�ᅌ�1�ᇱ�1�ᇲ�1�ᅗ�1�ᅘ�1�ᅙ�1�ᆄ�1�ᆅ�1�ᆈ�1�ᆑ�1�ᆒ�1�ᆔ�1�ᆞ�1�ᆡ�1�1r0�1�一�1�二�1�三�1�四�1�上�1�中�1�下�1�甲�1�乙�1�丙�1�丁�1�天�1�地�1�人�1�1�1��1��1�2r�(ᄀ)�2�(ᄂ)�2�(ᄃ)�2�(ᄅ)�2�(ᄆ))r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rr�)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXr�)rYr0)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvr0)rwr�)rxr0)ryr�)rzr0)r{rr|)r}rr~)rrr�)r�rr�)r�rr�r�r�r�r�r��_seg_29�s�r�cfCs*d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-�d.gdS(/N�2r�(ᄇ)�2�(ᄉ)�2�(ᄋ)�2�(ᄌ)�	2�(ᄎ)�
2�(ᄏ)�2�(ᄐ)�2�(ᄑ)�
2�(ᄒ)�2�(가)�2�(나)�2�(다)�2�(라)�2�(마)�2�(바)�2�(사)�2�(아)�2�(자)�2�(차)�2�(카)�2�(타)�2�(파)�2�(하)�2�(주)�2�(오전)�2�(오후)�2r�� 2�(一)�!2�(二)�"2�(三)�#2�(四)�$2�(五)�%2�(六)�&2�(七)�'2�(八)�(2�(九)�)2�(十)�*2�(月)�+2�(火)�,2�(水)�-2�(木)�.2�(金)�/2�(土)�02�(日)�12�(株)�22�(有)�32�(社)�42�(名)�52�(特)�62�(財)�72�(祝)�82�(労)�92�(代)�:2�(呼)�;2�(学)�<2�(監)�=2�(企)�>2�(資)�?2�(協)�@2�(祭)�A2�(休)�B2�(自)�C2�(至)�D2rE�問�E2�幼�F2�文�G2�箏�H2r0�P2�pte�Q2�21�R2�22�S2�23�T2�24�U2�25�V2�26�W2�27�X2�28�Y2�29�Z2�30�[2�31�\2�32�]2�33�^2�34�_2�35�`2�ᄀ�a2�ᄂ�b2�ᄃ�c2�ᄅ�d2�ᄆ�e2�ᄇ�f2�ᄉ�g2�ᄋ�h2�ᄌ�i2�ᄎ�j2�ᄏ�k2�ᄐ�l2�ᄑ�m2�ᄒ�n2�가�o2�나)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr)rrr)rrEr)rrEr)rrEr)r	rEr
)rr0)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErKr�r�r�r�r��_seg_308s�rLcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�p2rE�다�q2�라�r2�마�s2�바�t2�사�u2�아�v2�자�w2�차�x2�카�y2�타�z2�파�{2�하�|2�참고�}2�주의�~2�우�2r0�2�一�2�二�2�三�2�四�2�五�2�六�2�七�2�八�2�九�2�十�2�月�2�火�2�水�2�木�2�金�2�土�2�日�2�株�2�有�2�社�2�名�2�特�2�財�2�祝�2�労�2�秘�2�男�2�女�2�適�2�優�2�印�2�注�2�項�2�休�2�写�2�正�2�上�2�中�2�下�2�左�2�右�2�医�2�宗�2�学�2�監�2�企�2�資�2�協�2�夜�2�36�2�37�2�38�2�39�2�40�2�41�2�42�2�43�2�44�2�45�2�46�2�47�2�48�2�49�2�50�2�1月�2�2月��2�3月��2�4月��2�5月��2�6月��2�7月��2�8月��2�9月��2�10月��2�11月��2�12月��2�hg��2�erg��2�ev��2�ltd��2�ア��2�イ��2�ウ��2�エ)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkr0)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)rrEr)rrEr)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrErr�r�r�r�r��_seg_31�s�rcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N��2rE�オ��2�カ��2�キ��2�ク��2�ケ��2�コ��2�サ��2�シ��2�ス��2�セ��2�ソ��2�タ��2�チ��2�ツ��2�テ��2�ト��2�ナ��2�ニ��2�ヌ��2�ネ��2�ノ��2�ハ��2�ヒ��2�フ��2�ヘ��2�ホ��2�マ��2�ミ��2�ム��2�メ��2�モ��2�ヤ��2�ユ�2�ヨ�2�ラ�2�リ�2�ル�2�レ�2�ロ�2�ワ�2�ヰ�2�ヱ�2�ヲ�2r��3�アパート�3�アルファ�3�アンペア�3�	アール�3�イニング�3�	インチ�3�	ウォン�3�エスクード�3�エーカー�	3�	オンス�
3�	オーム�3�	カイリ�3�カラット�
3�カロリー�3�	ガロン�3�	ガンマ�3�ギガ�3�	ギニー�3�キュリー�3�ギルダー�3�キロ�3�キログラム�3�キロメートル�3�キロワット�3�	グラム�3�グラムトン�3�クルゼイロ�3�クローネ�3�	ケース�3�	コルナ�3�	コーポ�3�サイクル� 3�サンチーム�!3�シリング�"3�	センチ�#3�	セント�$3�	ダース�%3�デシ�&3�ドル�'3�トン�(3�ナノ�)3�	ノット�*3�	ハイツ�+3�パーセント�,3�	パーツ�-3�バーレル�.3�ピアストル�/3�	ピクル�03�ピコ�13�ビル�23�ファラッド�33�フィート�43�ブッシェル�53�	フラン�63�ヘクタール�73�ペソ)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkr�)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_32
s�r�cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�83rE�	ペニヒ�93�	ヘルツ�:3�	ペンス�;3�	ページ�<3�	ベータ�=3�ポイント�>3�	ボルト�?3�ホン�@3�	ポンド�A3�	ホール�B3�	ホーン�C3�マイクロ�D3�	マイル�E3�	マッハ�F3�	マルク�G3�マンション�H3�ミクロン�I3�ミリ�J3�ミリバール�K3�メガ�L3�メガトン�M3�メートル�N3�	ヤード�O3�	ヤール�P3�	ユアン�Q3�リットル�R3�リラ�S3�	ルピー�T3�ルーブル�U3�レム�V3�レントゲン�W3�	ワット�X3�0点�Y3�1点�Z3�2点�[3�3点�\3�4点�]3�5点�^3�6点�_3�7点�`3�8点�a3�9点�b3�10点�c3�11点�d3�12点�e3�13点�f3�14点�g3�15点�h3�16点�i3�17点�j3�18点�k3�19点�l3�20点�m3�21点�n3�22点�o3�23点�p3�24点�q3�hpa�r3�da�s3�au�t3�bar�u3�ov�v3�pc�w3�dm�x3�dm2�y3�dm3�z3�iu�{3�平成�|3�昭和�}3�大正�~3�明治�3�株式会社�3�pa�3�na�3�μa�3�ma�3�ka�3�kb�3�mb�3�gb�3�cal�3�kcal�3�pf�3�nf�3�μf�3�μg�3�mg�3�kg�3�hz�3�khz�3�mhz�3�ghz�3�thz�3�μl�3�ml�3�dl�3�kl�3�fm�3�nm�3�μm)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_33p
s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(gdS()N�3rE�mm�3�cm�3�km�3�mm2�3�cm2�3�m2�3�km2�3�mm3�3�cm3�3�m3�3�km3�3�m∕s�3�m∕s2�3rn�3�kpa�3�mpa�3�gpa�3�rad�3�rad∕s�3�rad∕s2�3�ps�3�ns�3�μs�3�ms�3�pv�3�nv�3�μv�3�mv�3�kv�3�3�pw�3�nw�3�μw�3�mw�3�kw�3�3�kω�3�mω��3r���3�bq��3�cc��3�cd��3�c∕kg��3��3�db��3�gy��3�ha��3�hp��3�in��3�kk��3��3�kt��3�lm��3�ln��3�log��3�lx��3rz��3�mil��3�mol��3�ph��3��3�ppm��3�pr��3�sr��3�sv��3�wb��3�v∕m��3�a∕m��3�1日��3�2日��3�3日��3�4日��3�5日��3�6日��3�7日��3�8日��3�9日��3�10日��3�11日��3�12日��3�13日��3�14日��3�15日��3�16日��3�17日��3�18日��3�19日��3�20日��3�21日�3�22日�3�23日�3�24日�3�25日�3�26日�3�27日�3�28日�3�29日�3�30日�3�31日�3�gal)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rErn)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr�)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrErz)rrEr)rrEr)rrEr)rr�)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrErer�r�r�r�r��_seg_34�
s�rfceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�4r0�Mr��M�͟��鍤鐤�Ǥ�Ф�,��@�rE�ꙁ�A��B��ꙃ�C��D��ꙅ�E��F��ꙇ�G��H��ꙉ�I��J��ꙋ�K��L��ꙍ�M��N��ꙏ�O��P��ꙑ�Q��R��ꙓ�S��T��ꙕ�U��V��ꙗ�W��X��ꙙ�Y��Z��ꙛ�[��\��ꙝ�]��^��ꙟ�_��`��ꙡ�a��b��ꙣ�c��d��ꙥ�e��f��ꙧ�g��h��ꙩ�i��j��ꙫ�k��l��ꙭ�m�逦�ꚁ遦邦�ꚃ郦鄦�ꚅ酦醦�ꚇ釦鈦�ꚉ鉦銦�ꚋ鋦錦�ꚍ鍦鎦�ꚏ鏦鐦�ꚑ鑦钦�ꚓ铦锦�ꚕ镦閦�ꚗ闦阦韦����"��ꜣ�#��$��ꜥ�%��&��ꜧ�'��(��ꜩ�)��*��ꜫ�+��,��ꜭ�-��.��ꜯ�/��2��ꜳ�3�)rgr0)rhr�)rir0)rjr�)rkr0)rlr�)rmr0)rnr�)ror0)rpr�)rqrErr)rsr0)rtrEru)rvr0)rwrErx)ryr0)rzrEr{)r|r0)r}rEr~)rr0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0r�r�r�r�r��_seg_35@s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�4�rE�ꜵ�5�r0�6��ꜷ�7��8��ꜹ�9��:��ꜻ�;��<��ꜽ�=��>��ꜿ�?��@��ꝁ�A��B��ꝃ�C��D��ꝅ�E��F��ꝇ�G��H��ꝉ�I��J��ꝋ�K��L��ꝍ�M��N��ꝏ�O��P��ꝑ�Q��R��ꝓ�S��T��ꝕ�U��V��ꝗ�W��X��ꝙ�Y��Z��ꝛ�[��\��ꝝ�]��^��ꝟ�_��`��ꝡ�a��b��ꝣ�c��d��ꝥ�e��f��ꝧ�g��h��ꝩ�i��j��ꝫ�k��l��ꝭ�m��n��ꝯ�o��p��q��y��ꝺ�z��{��ꝼ�|��}��ᵹ�~��ꝿ��逧�ꞁ遧邧�ꞃ郧鄧�ꞅ酧醧�ꞇ釧鋧�ꞌ錧鍧�ɥ鎧鏧r�鐧�ꞑ鑧钧�ꞓ铧锧頧�ꞡ顧颧�ꞣ飧餧�ꞥ饧馧�ꞧ駧騧�ꞩ驧骧�ɦ髧��ħ)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)rrEr)rr0)rrEr)rr0)rrEr)rr0)r	rEr
)rr0)rrEr
)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)rr0)rrEr)r r0)r!rEr")r#r0)r$rEr%)r&r0)r'rEr()r)r0)r*rEr+)r,r0)r-rEr.)r/r0)r0rEr1)r2r0)r3rEr4)r5r0)r6rEr7)r8r0)r9rEr:)r;r0)r<rEr=)r>r0)r?rEr@)rAr0)rBrErC)rDr0)rErErF)rGr0)rHrErI)rJr0)rKrErL)rMr0)rNrErO)rPr0)rQrErO)rRr0)rSrErT)rUr0)rVrErW)rXr0)rYrErZ)r[rEr\)r]r0)r^rEr_)r`r0)rarErb)rcr0)rdrEre)rfr0)rgrErh)rir0)rjrErk)rlr0)rmrErn)ror0)rpr�)rqrErr)rsr0)rtrEru)rvr0)rwr�)rxrEry)rzr0)r{rEr|)r}r0)r~rEr)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r�)r�rEr�r�r�r�r�r��_seg_36�s�r�cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�rE�œ�r0�,�r��0��:��@��x�逨�Ũ�Ψ�ڨ������T��_��}�逩�Ω�ϩ�ک�ީ�����7��@��N��P��Z��\��|�逪�ê�۪������	�������� ��'��(��/������������������������豈���更���車���賈���滑���串���句���龜�	��契�
��金���喇���奈�
��懶���癩���羅���蘿���螺���裸���邏���樂���洛���烙���珞���落���酪���駱���亂���卵���欄���爛���蘭� ��鸞�!��嵐�"��濫�#��藍�$��襤�%��拉�&��臘�'��蠟�(��廊�)��朗�*��浪�+��狼�,��郎�-��來)r�rEr�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrErr�r�r�r�r��_seg_37s�rcfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�.�rE�冷�/��勞�0��擄�1��櫓�2��爐�3��盧�4��老�5��蘆�6��虜�7��路�8��露�9��魯�:��鷺�;��碌�<��祿�=��綠�>��菉�?��錄�@��鹿�A��論�B��壟�C��弄�D��籠�E��聾�F��牢�G��磊�H��賂�I��雷�J��壘�K��屢�L��樓�M��淚�N��漏�O��累�P��縷�Q��陋�R��勒�S��肋�T��凜�U��凌�V��稜�W��綾�X��菱�Y��陵�Z��讀�[��拏�\��樂�]��諾�^��丹�_��寧�`��怒�a��率�b��異�c��北�d��磻�e��便�f��復�g��不�h��泌�i��數�j��索�k��參�l��塞�m��省�n��葉�o��說�p��殺�q��辰�r��沈�s��拾�t��若�u��掠�v��略�w��亮�x��兩�y��凉�z��梁�{��糧�|��良�}��諒�~��量���勵��呂��女��廬��旅��濾��礪��閭��驪��麗��黎��力��曆��歷��轢��年��憐��戀��撚)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_38xs�r�cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�rE�漣��煉��璉��秊��練��聯��輦��蓮��連��鍊��列��劣��咽��烈��裂��說��廉��念��捻��殮��簾��獵��令��囹��寧��嶺��怜��玲��瑩��羚��聆��鈴��零��靈��領��例��禮��醴��隸��惡��了��僚��寮��尿��料��樂��燎��療���蓼���遼���龍���暈���阮���劉���杻���柳���流���溜���琉���留���硫���紐���類���六���戮���陸���倫���崙���淪���輪���律���慄���栗���率���隆���利���吏���履���易���李���梨���泥���理���痢���罹���裏���裡���里���離���匿���溺���吝���燐���璘���藺���隣���鱗���麟���林��淋)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErr)rsrErt)rurErv)rwrErx)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_39�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'gdS((N�rE�臨��立��笠��粒��狀��炙��識��什��茶��刺���切���度���拓���糖���宅���洞���暴���輻���行�	��降�
��見���廓���兀�
��嗀��r0���塚�����晴�����凞���猪���益���礼���神���祥���福���靖���精���羽��� ��蘒�!��"��諸�#��%��逸�&��都�'��*��飯�+��飼�,��館�-��鶴�.��郞�/��隷�0��侮�1��僧�2��免�3��勉�4��勤�5��卑�6��喝�7��嘆�8��器�9��塀�:��墨�;��層�<��屮�=��悔�>��慨�?��憎�@��懲�A��敏�B��既�C��暑�D��梅�E��海�F��渚�G��漢�H��煮�I��爫�J��琢�K��碑�L��社�M��祉�N��祈�O��祐�P��祖�Q��祝�R��禍�S��禎�T��穀�U��突�V��節�W��練�X��縉�Y��繁�Z��署�[��者�\��臭�]��艹�_��著)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�r0)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rEr�)rr0)rrEr)rr0)rrEr)rrEr)rr0)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrErrr�r�r�r�r��_seg_40Hs�rscfCs d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)gdS(*N�`�rE�褐�a��視�b��謁�c��謹�d��賓�e��贈�f��辶�g��逸�h��難�i��響�j��頻�k��恵�l��𤋮�m��舘�n�r��p��並�q��况�r��全�s��侀�t��充�u��冀�v��勇�w��勺�x��喝�y��啕�z��喙�{��嗢�|��塚�}��墳�~��奄���奔��婢��嬨��廒��廙��彩��徭��惘��慎��愈��憎��慠��懲��戴��揄��搜��摒��敖��晴��朗��望��杖��歹��殺��流��滛��滋��漢��瀞��煮��瞧��爵��犯��猪��瑱��甆��画��瘝��瘟��益��盛��直��睊��着��磌��窱��節��类��絛��練��缾��者��荒��華��蝹��襁��覆���調��諸��請���諾��諭���變����輸���遲���醙)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rErw)r"rEr#)r$rEr%)r&rEr')r(rEry)r)rEr*)r+rEr,)r-rEr{)r.rEr/)r0rEr)r1rEr2)r3rEr4)r5rEr6r�r�r�r�r��_seg_41�s�r7cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'gdS((N��rE�鉶���陼���難���靖���韛���響���頋���頻���鬒���龜���𢡊���𢡄���𣏕���㮝���䀘���䀹���𥉉���𥳐���𧻓���齃���龎��r����ff���fi���fl���ffi���ffl���st�����մն���մե���մի���վն���մխ�����יִ��r0���ײַ� ��ע�!��א�"��ד�#��ה�$��כ�%��ל�&��ם�'��ר�(��ת�)�rr��*��שׁ�+��שׂ�,��שּׁ�-��שּׂ�.��אַ�/��אָ�0��אּ�1��בּ�2��גּ�3��דּ�4��הּ�5��וּ�6��זּ�7��8��טּ�9��יּ�:��ךּ�;��כּ�<��לּ�=��>��מּ�?��@��נּ�A��סּ�B��C��ףּ�D��פּ�E��F��צּ�G��קּ�H��רּ�I��שּ�J��תּ�K��וֹ�L��בֿ�M��כֿ�N��פֿ�O��אל�P��ٱ�R��ٻ�V��پ�Z��ڀ�^��ٺ�b��ٿ�f��ٹ�j��ڤ�n��ڦ�r��ڄ�v��ڃ�z��چ�~��ڇ��ڍ)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbr�)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)ror�)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzr�)r{rEr|)r}r0)r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_42s�r�cfCs&d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,gdS(-N�rE�ڌ��ڎ��ڈ��ژ��ڑ��ک��گ��ڳ��ڱ��ں��ڻ��ۀ��ہ��ھ��ے��ۓ�r0��r����ڭ���ۇ���ۆ���ۈ���ۇٴ���ۋ���ۅ���ۉ���ې���ى���ئا���ئە���ئو���ئۇ���ئۆ���ئۈ��ئې��ئى��ی���ئج���ئح���ئم�����ئي���بج���بح���بخ���بم�	��بى�
��بي���تج���تح�
��تخ���تم���تى���تي���ثج���ثم���ثى���ثي���جح���جم���حج���حم���خج���خح���خم���سج���سح���سخ���سم� ��صح�!��صم�"��ضج�#��ضح�$��ضخ�%��ضم�&��طح�'��طم�(��ظم�)��عج�*��عم�+��غج�,��غم�-��فج�.��فح�/��فخ�0��فم�1��فى�2��في�3��قح�4��قم�5��قى�6��قي�7��كا�8��كج�9��كح�:��كخ�;��كل�<��كم�=��كى�>��كي)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rr0)rr�)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErEr<)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRrErS)rTrErU)rVrErW)rXrErY)rZrEr[)r\rEr])r^rEr_)r`rEra)rbrErc)rdrEre)rfrErg)rhrEri)rjrErk)rlrErm)rnrEro)rprErq)rrrErs)rtrEru)rvrErw)rxrEry)rzrEr{)r|rEr})r~rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_43�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!gdS("N�?�rE�لج�@��لح�A��لخ�B��لم�C��لى�D��لي�E��مج�F��مح�G��مخ�H��مم�I��مى�J��مي�K��نج�L��نح�M��نخ�N��نم�O��نى�P��ني�Q��هج�R��هم�S��هى�T��هي�U��يج�V��يح�W��يخ�X��يم�Y��يى�Z��يي�[��ذٰ�\��رٰ�]��ىٰ�^�r� ٌّ�_�� ٍّ�`�� َّ�a�� ُّ�b�� ِّ�c�� ّٰ�d��ئر�e��ئز�f��ئم�g��ئن�h��ئى�i��ئي�j��بر�k��بز�l��بم�m��بن�n��بى�o��بي�p��تر�q��تز�r��تم�s��تن�t��تى�u��تي�v��ثر�w��ثز�x��ثم�y��ثن�z��ثى�{��ثي�|��فى�}��في�~��قى���قي��كا��كل��كم��كى��كي�����ما���نر��نز���نن�����ير��يز���ين����ئج��ئح��ئخ���ئه��بج��بح��بخ���به��تج��تح)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr)rrr)rrr)rrr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrEr�)rJrEr�)rKrEr�)rLrErM)rNrEr�)rOrErP)rQrErR)rSrEr�)rTrErU)rVrEr�)rWrEr�)rXrEr�)rYrErZ)r[rEr\)r]rEr�)r^rEr_)r`rEr�)rarEr�)rbrErc)rdrEre)rfrErg)rhrEr)rirErj)rkrErl)rmrErn)rorErp)rqrEr)rrrErs)rtrEru)rvrErwr�r�r�r�r��_seg_44�s�rxcfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"gdS(#N�rE�تخ��تم��ته��ثم��جح��جم��حج��حم��خج��خم��سج��سح��سخ��سم��صح��صخ��صم��ضج��ضح��ضخ��ضم��طح��ظم��عج��عم��غج��غم��فج��فح��فخ��فم���قح���قم���كج���كح���كخ���كل���كم���لج���لح���لخ���لم���له���مج���مح���مخ���مم���نج���نح���نخ���نم���نه���هج���هم���هٰ���يج���يح���يخ���يم���يه���ئم���ئه���بم���به���������ثه�����سه���شم���شه�����������������ـَّ���ـُّ���ـِّ��طى��طي��عى��عي��غى��غي��سى��سي��شى��شي��حى���حي���جى���جي���خى���خي���صى���صي)ryrErz)r{rEr|)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr|)r�rEr~)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr�)rrEr�)rrEr�)rrEr�)r	rEr�)r
rEr�)rrEr�)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5r�r�r�r�r��_seg_45Ps�r6cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N��rE�ضى���ضي�	��شج�
��شح���شخ���شم�
��شر���سر���صر���ضر���طى���طي���عى���عي���غى���غي���سى���سي���شى���شي���حى���حي���جى���جي���خى� ��خي�!��صى�"��صي�#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��سه�2��شه�3��طم�4��سج�5��سح�6��سخ�7��8��9��:��;��ظم�<��اً�>�r0�@�r��P��تجم�Q��تحج�S��تحم�T��تخم�U��تمج�V��تمح�W��تمخ�X��جمح�Z��حمي�[��حمى�\��سحج�]��سجح�^��سجى�_��سمح�a��سمج�b��سمم�d��صحح�f��صمم�g��شحم�i��شجي�j��شمخ�l��شمم�n��ضحى�o��ضخم�q��طمح�s��طمم�t��طمي�u��عجم�v��عمم�x��عمى�y��غمم�z��غمي�{��غمى�|��فخم�~��قمح���قمم��لحم��لحي��لحى��لجج��لخم��لمح��محج��محم)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErH)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErT)rUrErV)rWrErX)rYrErZ)r[rEr\)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorEr8)rprEr:)rqrEr<)rrrEr>)rsrEr@)rtrErB)rurErD)rvrErF)rwrErH)rxrErJ)ryrEr<)rzrEr>)r{rEr@)r|rErB)r}rEr~)rrEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr<)r�rEr>)r�rEr@)r�rEr�)r�rEr�)r�rEr�)r�r0)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_46�s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%gdS(&N�rE�محي��مجح��مجم��مخج��مخم�r���مجخ��همج��همم��نحم��نحى��نجم��نجى��نمي��نمى��يمم��بخي��تجي��تجى��تخي��تخى��تمي��تمى��جمي��جحى��جمى��سخى��صحي��شحي��ضحي��لجي��لمي��يحي��يجي��يمي��ممي��قمي��نحي��قمح��لحم��عمي��كمي��نجح��مخي��لجم��كمم����جحي��حجي��مجي��فمي���بحي�����عجم���صمم���سخي���نجي�����صلے���قلے���الله���اكبر���محمد��صلعم��رسول��عليه��وسلم��صلى�r�!صلى الله عليه وسلم��جل جلاله��ریال�r0���r����,���、�����:��rl���!���?���〖���〗��� ��'��1��—�2��–�3��_�5�r��6�r��7��{�8��}�9��〔�:��〕�;��【�<��】�=��《�>��》)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr)rrEr)rrEr)rrEr)rrEr)r	rEr
)rrEr)r
rEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr )r!rEr")r#rEr$)r%rEr&)r'rEr()r)rEr*)r+rEr,)r-rEr.)r/rEr0)r1rEr2)r3rEr4)r5rEr6)r7rEr8)r9rEr:)r;rEr<)r=rEr>)r?rEr@)rArErB)rCrErD)rErErF)rGrErD)rHrEr@)rIrErJ)rKrErL)rMrErN)rOrErP)rQrErR)rSrErF)rTrErU)rVrErW)rXrErY)rZrEr[)r\r�)r]rEr^)r_rEr`)rarErb)rcrErd)rerErf)rgrErh)rirErj)rkrErl)rmrErn)rorErp)rqrrr)rsrrt)rurErv)rwr0)rxr�)ryr�)rzrr{)r|rEr})r~r�)rrr�)r�rrl)r�rr�)r�rr�)r�rEr�)r�rEr�)r�r�)r�r0)r�r�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_47 s�r�cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'gdS((N�?�rE�〈�@��〉�A��「�B��」�C��『�D��』�E�r0�G�r�[�H��]�I�� ̅�M�r��P�r{�Q��、�R�r��T�rl�U�r��V�r��W�r��X��—�Y�r��Z�r��[�r��\�r��]��〔�^��〕�_��#�`��&�a��*�b�r��c��-�d��<�e��>�f�r��g��h��\�i��$�j��%�k��@�l��p�� ً�q��ـً�r�� ٌ�s��t�� ٍ�u��v�� َ�w��ـَ�x�� ُ�y��ـُ�z�� ِ�{��ـِ�|�� ّ�}��ـّ�~�� ْ���ـْ��ء��آ��أ��ؤ��إ��ئ��ا��ب��ة��ت��ث��ج��ح��خ��د��ذ��ر��ز��س��ش��ص��ض��ط���ظ���ع���غ���ف���ق���ك���ل���م���ن���ه���و���ى���ي��لآ��لأ��لإ��لا��r��������")r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�r0)r�rr�)r�rr�)r�rr�)r�rr�)r�rr{)r�rEr�)r�r�)r�rrl)r�rr�)r�rr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rr�)r�rr�)r�rr�)r�r�)r�rr�)r�rEr�)r�rr�)r�r0)r�rr�)r�r�)r�rr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rEr�)r�rr�)r�rEr�)r�rr�)rrEr)rrEr)rrEr)rrEr)rrEr	)r
rEr)rrEr
)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)rrEr)r rEr!)r"rEr#)r$rEr%)r&rEr')r(rEr))r*rEr+)r,rEr-)r.rEr/)r0rEr1)r2rEr3)r4rEr5)r6rEr7)r8rEr9)r:rEr;)r<rEr=)r>rEr?)r@rErA)rBrErC)rDrErE)rFrErG)rHrErI)rJrErK)rLrErM)rNrErO)rPrErQ)rRr�)rSr�)rTr�)rUrr�)rVrrWr�r�r�r�r��_seg_48�s�rXcfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�dgdS(N��rr���r���r���r����'��r��	�r��
�r���r���r{�
�rEr���r���/��r���r���r�����r���r���r���r���r���r���r���rl��r���r���r���r�� �r��!�rF�"�rH�#�rJ�$�rL�%�rN�&�rP�'�rR�(�rT�)�rV�*�rX�+�rZ�,�r\�-�r^�.�r`�/�rb�0�rd�1�rf�2�rh�3�rj�4�rl�5�rn�6�rp�7�rr�8�rt�9�rv�:�rx�;�r��<�r��=�r��>��^�?�r��@�rm�A��B��C��D��E��F��G��H��I��J��K��L��M��N��O��P��Q��R��S��T��U��V��W��X��Y��Z��[�r��\��|�]�r��^��~�_��⦅�`��⦆�a��b��「�c��」�d��、�e��・�f��ヲ)rYrr�)rZrr�)r[rr�)r\rr�)r]rr^)r_rr�)r`rr�)rarr�)rbrr�)rcrr{)rdrEr�)rerEr)rfrrg)rhrEr�)rirEr�)rjrEr�)rkrEr)rlrEr�)rmrEr�)rnrEr�)rorEr�)rprEr�)rqrEr�)rrrr�)rsrrl)rtrr�)rurr�)rvrr�)rwrr�)rxrr�)ryrErF)rzrErH)r{rErJ)r|rErL)r}rErN)r~rErP)rrErR)r�rErT)r�rErV)r�rErX)r�rErZ)r�rEr\)r�rEr^)r�rEr`)r�rErb)r�rErd)r�rErf)r�rErh)r�rErj)r�rErl)r�rErn)r�rErp)r�rErr)r�rErt)r�rErv)r�rErx)r�rr�)r�rr�)r�rr�)r�rr�)r�rr�)r�rrm)r�rErF)r�rErH)r�rErJ)r�rErL)r�rErN)r�rErP)r�rErR)r�rErT)r�rErV)r�rErX)r�rErZ)r�rEr\)r�rEr^)r�rEr`)r�rErb)r�rErd)r�rErf)r�rErh)r�rErj)r�rErl)r�rErn)r�rErp)r�rErr)r�rErt)r�rErv)r�rErx)r�rr�)r�rr�)r�rr�)r�rr�)r�rEr�)r�rEr�)r�rEr)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�r�r�r�r�r��_seg_49�s�r�cfCs$d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+gdS(,N�g�rE�ァ�h��ィ�i��ゥ�j��ェ�k��ォ�l��ャ�m��ュ�n��ョ�o��ッ�p��ー�q��ア�r��イ�s��ウ�t��エ�u��オ�v��カ�w��キ�x��ク�y��ケ�z��コ�{��サ�|��シ�}��ス�~��セ���ソ��タ��チ��ツ��テ��ト��ナ��ニ��ヌ��ネ��ノ��ハ��ヒ��フ��ヘ��ホ��マ��ミ��ム��メ��モ��ヤ��ユ��ヨ��ラ��リ��ル��レ��ロ��ワ��ン��゙��゚�r���ᄀ��ᄁ��ᆪ��ᄂ��ᆬ��ᆭ��ᄃ��ᄄ��ᄅ��ᆰ��ᆱ��ᆲ��ᆳ��ᆴ��ᆵ��ᄚ��ᄆ��ᄇ��ᄈ��ᄡ��ᄉ��ᄊ��ᄋ��ᄌ��ᄍ��ᄎ��ᄏ��ᄐ��ᄑ��ᄒ����ᅡ���ᅢ���ᅣ���ᅤ���ᅥ���ᅦ�����ᅧ���ᅨ���ᅩ���ᅪ)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r�rEr�)r rEr )r rEr )r rEr )r rEr )r rEr	 )r
 rEr )r rEr
 )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r rEr )r  rEr! )r" rEr# )r$ rEr% )r& rEr' )r( rEr) )r* rEr+ )r, rEr- )r. rEr/ )r0 rEr1 )r2 rEr3 )r4 rEr5 )r6 rEr7 )r8 rEr9 )r: rEr; )r< r�)r= rEr> )r? rEr@ )rA rErB )rC rErD )rE rErF )rG rErH )rI rErJ )rK rErL )rM rErN )rO rErP )rQ rErR )rS rErT )rU rErV )rW rErX )rY rErZ )r[ rEr\ )r] rEr^ )r_ rEr` )ra rErb )rc rErd )re rErf )rg rErh )ri rErj )rk rErl )rm rErn )ro rErp )rq rErr )rs rErt )ru rErv )rw rErx )ry r�)rz rEr{ )r| rEr} )r~ rEr )r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� r�r�r�r�r��_seg_50Xs�r� cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�dgdS(N��rE�ᅫ���ᅬ��r����ᅭ���ᅮ���ᅯ���ᅰ���ᅱ���ᅲ�����ᅳ���ᅴ���ᅵ�����¢���£���¬��r� ̄���¦���¥���₩�����│���←���↑���→���↓���■���○���r0��
�'�(�;�<�>�?�N�P�^������4�7�������������� �$�0�K�����������𐐨��𐐩��𐐪��𐐫��𐐬��𐐭��𐐮��𐐯��𐐰�	�𐐱�
�𐐲��𐐳��𐐴�
�𐐵��𐐶��𐐷��𐐸��𐐹��𐐺��𐐻��𐐼��𐐽��𐐾��𐐿��𐑀��𐑁��𐑂��𐑃��𐑄��𐑅)r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rr� )r� rEr� )r� rEr� )r� rEr� )r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� r0)r� r�)r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr� )r� rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r	!rEr
!)r!rEr!)r
!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr!)r!rEr !)r!!rEr"!)r#!rEr$!)r%!rEr&!)r'!rEr(!)r)!rEr*!r�r�r�r�r��_seg_51�s�r+!ceCs�drdsdtdudvdwdxdydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�rE�𐑆��𐑇� �𐑈�!�𐑉�"�𐑊�#�𐑋�$�𐑌�%�𐑍�&�𐑎�'�𐑏�(r0�r�������	�
�6�7�9�<�=�?�V�W�`�	�	�	�:	�?	�@	�	�	�	�	�
�
�
�
�
�
�
�
�
�4
�8
�;
�?
�H
�P
�Y
�`
�
��6�9�V�X�s�x���I�`���N�R�p��������������5�6�D������������� �o#�$�c$�p$�t$�0�/4)r,!rEr-!)r.!rEr/!)r0!rEr1!)r2!rEr3!)r4!rEr5!)r6!rEr7!)r8!rEr9!)r:!rEr;!)r<!rEr=!)r>!rEr?!)r@!r0)rA!r�)rB!r0)rC!r�)rD!r0)rE!r�)rF!r0)rG!r�)rH!r0)rI!r�)rJ!r0)rK!r�)rL!r0)rM!r�)rN!r0)rO!r�)rP!r0)rQ!r�)rR!r0)rS!r�)rT!r0)rU!r�)rV!r0)rW!r�)rX!r0)rY!r�)rZ!r0)r[!r�)r\!r0)r]!r�)r^!r0)r_!r�)r`!r0)ra!r�)rb!r0)rc!r�)rd!r0)re!r�)rf!r0)rg!r�)rh!r0)ri!r�)rj!r0)rk!r�)rl!r0)rm!r�)rn!r0)ro!r�)rp!r0)rq!r�)rr!r0)rs!r�)rt!r0)ru!r�)rv!r0)rw!r�)rx!r0)ry!r�)rz!r0)r{!r�)r|!r0)r}!r�)r~!r0)r!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�r�r�r�r�r��_seg_52(s�r�!cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�hr0�9jr��o�Eo�Po�o�o�o�����������'��)��^�rE�𝅗𝅥�_��𝅘𝅥�`��𝅘𝅥𝅮�a��𝅘𝅥𝅯�b��𝅘𝅥𝅰�c��𝅘𝅥𝅱�d��𝅘𝅥𝅲�e��s��{����𝆹𝅥���𝆺𝅥���𝆹𝅥𝅮���𝆺𝅥𝅮���𝆹𝅥𝅯���𝆺𝅥𝅯��������F����W��`��r���rF��rH��rJ��rL��rN��rP��rR��rT��rV�	�rX�
�rZ��r\��r^�
�r`��rb��rd��rf��rh��rj��rl��rn��rp��rr��rt��rv��rx������������� ��!��"��#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��2��3��4��5��6��7��8��9��:��;��<�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!r0)r�!r�)r�!r0)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!rEr�!)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!r0)r�!r�)r�!rErF)r�!rErH)r�!rErJ)r�!rErL)r�!rErN)r�!rErP)r�!rErR)r�!rErT)r�!rErV)r�!rErX)r�!rErZ)r�!rEr\)r�!rEr^)r�!rEr`)r�!rErb)r�!rErd)r�!rErf)r�!rErh)r�!rErj)r�!rErl)r�!rErn)r�!rErp)r�!rErr)r�!rErt)r�!rErv)r�!rErx)r�!rErF)r�!rErH)r�!rErJ)r�!rErL)r�!rErN)r�!rErP)r�!rErR)r�!rErT)r�!rErV)r�!rErX)r�!rErZ)r�!rEr\)r�!rEr^)r�!rEr`)r�!rErb)r�!rErd)r�!rErf)r�!rErh)r�!rErj)r�!rErl)r�!rErn)r�!rErp)r�!rErr)r"rErt)r"rErv)r"rErx)r"rErF)r"rErH)r"rErJ)r"rErL)r"rErN)r"rErP)r	"rErR)r
"rErT)r"rErVr�r�r�r�r��_seg_53�s�r"ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�=�rErX�>�rZ�?�r\�@�r^�A�r`�B�rb�C�rd�D�rf�E�rh�F�rj�G�rl�H�rn�I�rp�J�rr�K�rt�L�rv�M�rx�N�rF�O�rH�P�rJ�Q�rL�R�rN�S�rP�T�rR�U�r��V�rV�W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f��g��h��i��j��k��l��m��n��o�rT�p��q��r��s��t��u��v��w��x��y��z��{��|��}��~���������������������������������������������������������������������)r
"rErX)r"rErZ)r"rEr\)r"rEr^)r"rEr`)r"rErb)r"rErd)r"rErf)r"rErh)r"rErj)r"rErl)r"rErn)r"rErp)r"rErr)r"rErt)r"rErv)r"rErx)r"rErF)r"rErH)r "rErJ)r!"rErL)r""rErN)r#"rErP)r$"rErR)r%"r�)r&"rErV)r'"rErX)r("rErZ)r)"rEr\)r*"rEr^)r+"rEr`)r,"rErb)r-"rErd)r."rErf)r/"rErh)r0"rErj)r1"rErl)r2"rErn)r3"rErp)r4"rErr)r5"rErt)r6"rErv)r7"rErx)r8"rErF)r9"rErH)r:"rErJ)r;"rErL)r<"rErN)r="rErP)r>"rErR)r?"rErT)r@"rErV)rA"rErX)rB"rErZ)rC"rEr\)rD"rEr^)rE"rEr`)rF"rErb)rG"rErd)rH"rErf)rI"rErh)rJ"rErj)rK"rErl)rL"rErn)rM"rErp)rN"rErr)rO"rErt)rP"rErv)rQ"rErx)rR"rErF)rS"rErH)rT"rErJ)rU"rErL)rV"rErN)rW"rErP)rX"rErR)rY"rErT)rZ"rErV)r["rErX)r\"rErZ)r]"rEr\)r^"rEr^)r_"rEr`)r`"rErb)ra"rErd)rb"rErf)rc"rErh)rd"rErj)re"rErl)rf"rErn)rg"rErp)rh"rErr)ri"rErt)rj"rErv)rk"rErx)rl"rErF)rm"r�)rn"rErJ)ro"rErL)rp"r�r�r�r�r�r��_seg_54�s�rq"cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rErR��r���rX��rZ����r`��rb��rd��rf����rj��rl��rn��rp��rr��rt��rv��rx��rF��rH��rJ��rL����rP����rT��rV������r\���r^���������������rh���������������������������������������rN��������������������������������������������������������������������������������������������������������������������������������������)rr"rErR)rs"r�)rt"rErX)ru"rErZ)rv"r�)rw"rEr`)rx"rErb)ry"rErd)rz"rErf)r{"r�)r|"rErj)r}"rErl)r~"rErn)r"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"r�)r�"rErP)r�"r�)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"r�)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"rErN)r�"rErP)r�"rErR)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"rErN)r�"rErP)r�"rErR)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"rErx)r�"rErF)r�"rErH)r�"r�)r�"rErLr�r�r�r�r��_seg_55`s�r�"cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rErN�	�rP�
�rR��r��
�rX��rZ��r\��r^��r`��rb��rd��rf����rj��rl��rn��rp��rr��rt��rv����rF��rH� �rJ�!�rL�"��#��$��%�rT�&�rV�'��(��)��*��+��,��-��.��/�rh�0��1��2��3��4��5��6��7�rx�8��9��:��;��<��=��>��?��@��A��B��C��D��E��F��G��J��K��L��M��N��O��P��Q��R��S��T��U��V��W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f��g��h��i��j��k��l��m��n�)r�"rErN)r�"rErP)r�"rErR)r�"r�)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"r�)r�"rErj)r�"rErl)r�"rErn)r�"rErp)r�"rErr)r�"rErt)r�"rErv)r�"r�)r�"rErF)r�"rErH)r�"rErJ)r�"rErL)r�"rErN)r�"rErP)r�"rErR)r�"rErT)r�"rErV)r�"rErX)r�"rErZ)r�"rEr\)r�"rEr^)r�"rEr`)r�"rErb)r�"rErd)r�"rErf)r�"rErh)r�"rErj)r�"rErl)r#rErn)r#rErp)r#rErr)r#rErt)r#rErv)r#rErx)r#rErF)r#rErH)r#r�)r	#rErL)r
#rErN)r#rErP)r#rErR)r
#r�)r#rErV)r#rErX)r#rErZ)r#rEr\)r#rEr^)r#r�)r#rErb)r#r�)r#rErj)r#rErl)r#rErn)r#rErp)r#rErr)r#rErt)r#rErv)r#r�)r#rErF)r#rErH)r #rErJ)r!#rErL)r"#rErN)r##rErP)r$#rErR)r%#rErT)r&#rErV)r'#rErX)r(#rErZ)r)#rEr\)r*#rEr^)r+#rEr`)r,#rErb)r-#rErd)r.#rErf)r/#rErh)r0#rErj)r1#rErl)r2#rErn)r3#rErp)r4#rErr)r5#rErt)r6#rErv)r7#rErx)r8#rErF)r9#rErH)r:#rErJr�r�r�r�r��_seg_56�s�r;#cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�o�rErL�p�rN�q�rP�r�rR�s�rT�t�rV�u�rX�v�rZ�w�r\�x�r^�y�r`�z�rb�{�rd�|�rf�}�rh�~�rj��rl��rn��rp��rr��rt��rv��rx��rF��rH��rJ���������������������������������������������������������������������������������������������������������������������������������������������������������������������)r<#rErL)r=#rErN)r>#rErP)r?#rErR)r@#rErT)rA#rErV)rB#rErX)rC#rErZ)rD#rEr\)rE#rEr^)rF#rEr`)rG#rErb)rH#rErd)rI#rErf)rJ#rErh)rK#rErj)rL#rErl)rM#rErn)rN#rErp)rO#rErr)rP#rErt)rQ#rErv)rR#rErx)rS#rErF)rT#rErH)rU#rErJ)rV#rErL)rW#rErN)rX#rErP)rY#rErR)rZ#rErT)r[#rErV)r\#rErX)r]#rErZ)r^#rEr\)r_#rEr^)r`#rEr`)ra#rErb)rb#rErd)rc#rErf)rd#rErh)re#rErj)rf#rErl)rg#rErn)rh#rErp)ri#rErr)rj#rErt)rk#rErv)rl#rErx)rm#rErF)rn#rErH)ro#rErJ)rp#rErL)rq#rErN)rr#rErP)rs#rErR)rt#rErT)ru#rErV)rv#rErX)rw#rErZ)rx#rEr\)ry#rEr^)rz#rEr`)r{#rErb)r|#rErd)r}#rErf)r~#rErh)r#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErvr�r�r�r�r��_seg_570s�r�#cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N���rErx���rF���rH���rJ���rL���rN���rP���rR���rT���rV���rX���rZ���r\���r^���r`���rb���rd���rf���rh���rj���rl���rn���rp���rr���rt���rv�����������������������������������������������������������������	��
������
�������������������������������������� ��!��"��#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��2��3��4��5��6�)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r�#rErf)r�#rErh)r�#rErj)r�#rErl)r�#rErn)r�#rErp)r�#rErr)r�#rErt)r�#rErv)r�#rErx)r�#rErF)r�#rErH)r�#rErJ)r�#rErL)r�#rErN)r�#rErP)r�#rErR)r�#rErT)r�#rErV)r�#rErX)r�#rErZ)r�#rEr\)r�#rEr^)r�#rEr`)r�#rErb)r�#rErd)r$rErf)r$rErh)r$rErj)r$rErl)r$rErnr�r�r�r�r��_seg_58�s�r$cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�7�rErp�8�rr�9�rt�:�rv�;�rx�<�rF�=�rH�>�rJ�?�rL�@�rN�A�rP�B�rR�C�rT�D�rV�E�rX�F�rZ�G�r\�H�r^�I�r`�J�rb�K�rd�L�rf�M�rh�N�rj�O�rl�P�rn�Q��R��S��T��U��V��W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f��g��h��i��j��k��l��m��n��o��p��q��r��s��t��u��v��w��x��y��z��{��|��}��~���������������������������������������������������������)r$rErp)r$rErr)r$rErt)r	$rErv)r
$rErx)r$rErF)r$rErH)r
$rErJ)r$rErL)r$rErN)r$rErP)r$rErR)r$rErT)r$rErV)r$rErX)r$rErZ)r$rEr\)r$rEr^)r$rEr`)r$rErb)r$rErd)r$rErf)r$rErh)r$rErj)r$rErl)r$rErn)r $rErp)r!$rErr)r"$rErt)r#$rErv)r$$rErx)r%$rErF)r&$rErH)r'$rErJ)r($rErL)r)$rErN)r*$rErP)r+$rErR)r,$rErT)r-$rErV)r.$rErX)r/$rErZ)r0$rEr\)r1$rEr^)r2$rEr`)r3$rErb)r4$rErd)r5$rErf)r6$rErh)r7$rErj)r8$rErl)r9$rErn)r:$rErp)r;$rErr)r<$rErt)r=$rErv)r>$rErx)r?$rErF)r@$rErH)rA$rErJ)rB$rErL)rC$rErN)rD$rErP)rE$rErR)rF$rErT)rG$rErV)rH$rErX)rI$rErZ)rJ$rEr\)rK$rEr^)rL$rEr`)rM$rErb)rN$rErd)rO$rErf)rP$rErh)rQ$rErj)rR$rErl)rS$rErn)rT$rErp)rU$rErr)rV$rErt)rW$rErv)rX$rErx)rY$rErF)rZ$rErH)r[$rErJ)r\$rErL)r]$rErN)r^$rErP)r_$rErR)r`$rErT)ra$rErV)rb$rErX)rc$rErZ)rd$rEr\)re$rEr^)rf$rEr`)rg$rErb)rh$rErd)ri$rErfr�r�r�r�r��_seg_59s�rj$cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rErh��rj��rl��rn��rp��rr��rt��rv��rx���ı���ȷ��r����α���β���γ���δ���ε���ζ���η���θ���ι���κ���λ���μ���ν���ξ���ο���π���ρ�����σ���τ���υ���φ���χ���ψ���ω���∇����������������������������������������������������������������������������∂���������������������������������������������������������������������������������������������������)rk$rErh)rl$rErj)rm$rErl)rn$rErn)ro$rErp)rp$rErr)rq$rErt)rr$rErv)rs$rErx)rt$rEru$)rv$rErw$)rx$r�)ry$rErz$)r{$rEr|$)r}$rEr~$)r$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rErz$)r�$rEr|$)r�$rEr~$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rErz$)r�$rEr|$)r�$rEr~$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rErz$)r�$rEr|$)r�$rEr~$)r�$rEr�$)r�$rEr�$r�r�r�r�r��_seg_60hs�r�$cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N��rE�ζ���η���θ���ι���κ���λ���μ���ν�	��ξ�
��ο���π���ρ�
��σ���τ���υ���φ���χ���ψ���ω���∂���ε�������������α���β���γ���δ� ��!��"��#��$��%��&��'��(��)��*��+��,��-��.��/��0��1��2��3��4��5��∇�6��7��8��9��:��;��<��=��>��?��@��A��B��C��D��E��F��G��I��J��K��L��M��N��O��P��Q��R��S��T��U��V��W��X��Y��Z��[��\��]��^��_��`��a��b��c��d��e��f�)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r�$rEr�$)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr	%)r
%rEr%)r%rEr
%)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr�$)r%rEr�$)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr%)r%rEr %)r!%rEr"%)r#%rEr%)r$%rEr�$)r%%rEr�$)r&%rEr�$)r'%rEr�$)r(%rEr�$)r)%rEr�$)r*%rEr�$)r+%rEr�$)r,%rEr�$)r-%rEr�$)r.%rEr%)r/%rEr%)r0%rEr�$)r1%rEr%)r2%rEr%)r3%rEr	%)r4%rEr%)r5%rEr
%)r6%rEr%)r7%rEr%)r8%rEr9%)r:%rEr%)r;%rEr%)r<%rEr %)r=%rEr"%)r>%rEr%)r?%rEr�$)r@%rEr�$)rA%rEr�$)rB%rEr�$)rC%rEr�$)rD%rEr�$)rE%rEr�$)rF%rEr�$)rG%rEr�$)rH%rEr�$)rI%rEr%)rJ%rEr%)rK%rEr%)rL%rEr%)rM%rEr	%)rN%rEr%)rO%rEr
%)rP%rEr%)rQ%rEr%)rR%rEr%)rS%rEr%)rT%rEr�$)rU%rEr�$)rV%rEr%)rW%rEr%)rX%rEr%)rY%rEr%)rZ%rEr%)r[%rEr %)r\%rEr"%)r]%rEr%)r^%rEr�$)r_%rEr�$)r`%rEr�$)ra%rEr�$)rb%rEr�$)rc%rEr�$)rd%rEr�$)re%rEr�$)rf%rEr�$)rg%rEr�$)rh%rEr%)ri%rEr%r�r�r�r�r��_seg_61�s�rj%cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�g�rE�θ�h��σ�i��τ�j��υ�k��φ�l��χ�m��ψ�n��ω�o��∇�p��α�q��β�r��γ�s��δ�t��ε�u��ζ�v��η�w��x��ι�y��κ�z��λ�{��μ�|��ν�}��ξ�~��ο���π���ρ�����������������∂������������������������������������������������������������������������������������������������������������������������������������������ϝ���r����r�)rk%rErl%)rm%rErn%)ro%rErp%)rq%rErr%)rs%rErt%)ru%rErv%)rw%rErx%)ry%rErz%)r{%rEr|%)r}%rEr~%)r%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErn%)r�%rErp%)r�%rErr%)r�%rErt%)r�%rErv%)r�%rErx%)r�%rErz%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rErt%)r�%rEr�%)r�%rEr�%)r�%rEr~%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rErn%)r�%rErp%)r�%rErr%)r�%rErt%)r�%rErv%)r�%rErx%)r�%rErz%)r�%rEr|%)r�%rEr~%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%rErn%)r�%rErp%)r�%rErr%)r�%rErt%)r�%rErv%)r�%rErx%)r�%rErz%)r�%rEr�%)r�%rEr�%)r�%rErl%)r�%rEr�%)r�%rErt%)r�%rEr�%)r�%rEr�%)r�%rEr�%)r�%r�)r�%rEr�r�r�r�r�r��_seg_628s�r�%cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N���rEr����r����r���r����r����r����r����r����r����r�������������������������������������������������������������������������������������������������������������r����ا���ب���ج���د�����و���ز���ح���ط�	��ي�
��ك���ل���م�
��ن���س���ع���ف���ص���ق���ر���ش���ت���ث���خ���ذ���ض���ظ���غ���ٮ���ں���ڡ���ٯ� ��!��"��#��$��ه�%��'��(��)��*��+��,��-��.��/��0��1��2�)r�%rEr�)r�%rEr�)r�%rEr)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r�%rEr�)r&rEr�)r&rEr)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r	&rEr�)r
&rEr�)r&rEr)r&rEr�)r
&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&rEr�)r&r�)r&rEr&)r&rEr &)r!&rEr"&)r#&rEr$&)r%&r�)r&&rEr'&)r(&rEr)&)r*&rEr+&)r,&rEr-&)r.&rEr/&)r0&rEr1&)r2&rEr3&)r4&rEr5&)r6&rEr7&)r8&rEr9&)r:&rEr;&)r<&rEr=&)r>&rEr?&)r@&rErA&)rB&rErC&)rD&rErE&)rF&rErG&)rH&rErI&)rJ&rErK&)rL&rErM&)rN&rErO&)rP&rErQ&)rR&rErS&)rT&rErU&)rV&rErW&)rX&rErY&)rZ&rEr[&)r\&r�)r]&rEr &)r^&rEr"&)r_&r�)r`&rEra&)rb&r�)rc&rEr+&)rd&r�)re&rEr/&)rf&rEr1&)rg&rEr3&)rh&rEr5&)ri&rEr7&)rj&rEr9&)rk&rEr;&)rl&rEr=&)rm&rEr?&)rn&rErA&r�r�r�r�r��_seg_63�s�ro&ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�gdS)�N�3�r��4�rE�ش�5��ت�6��ث�7��خ�8��9��ض�:��;��غ�<��B��ج�C��G��ح�H��I��ي�J��K��ل�L��M��ن�N��س�O��ع�P��Q��ص�R��ق�S��T��U��W��X��Y��Z��[��\��]��ں�^��_��ٯ�`��a��ب�b��c��d��ه�e��g��h��ط�i��j��ك�k��l��م�m��n��o��p��ف�q��r��s��t��u��v��w��x��y��z��ظ�{��|��ٮ�}��~��ڡ�����ا�������د�����و���ز���������������������������ر�����������ذ����������������)rp&r�)rq&rErr&)rs&rErt&)ru&rErv&)rw&rErx&)ry&r�)rz&rEr{&)r|&r�)r}&rEr~&)r&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&r�)r�&rErr&)r�&r�)r�&rErx&)r�&r�)r�&rEr{&)r�&r�)r�&rEr~&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rErr&)r�&rErt&)r�&rErv&)r�&rErx&)r�&r�)r�&rEr{&)r�&rEr�&)r�&rEr~&)r�&rEr�&)r�&r�)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rErr&)r�&rErt&)r�&rErv&)r�&rErx&)r�&rEr�&)r�&rEr{&)r�&rEr�&)r�&rEr~&)r�&r�)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�r�r�r�r�r��_seg_64s�r�&cfCsd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�dgdS(N��rE�و���ز���ح���ط���ي��r����ل���م���ن���س���ع���ف���ص���ق���ر���ش���ت���ث���خ���ذ���ض���ظ���غ�����r0������,��0������������������������r�0,���1,���2,���3,���4,���5,���6,���7,�	��8,�
��9,�����(a)���(b)���(c)���(d)���(e)���(f)���(g)���(h)���(i)���(j)���(k)���(l)���(m)���(n)���(o)���(p)� ��(q)�!��(r)�"��(s)�#��(t)�$��(u)�%��(v)�&��(w)�'��(x)�(��(y)�)��(z)�*��〔s〕�+�rJ�,�rh�-�r��.��wz�/��0�rF�1�rH�2��3�rL�4�rN�5�rP�6�rR�7�rT�8�rV�9�rX�:�rZ�;�r\�<�r^�=�r`�>�rb�?�rd�@�rf�A��B�rj)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&rEr�&)r�&r�)r'rEr')r'rEr')r'rEr')r'rEr')r'rEr	')r
'rEr')r'rEr
')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r'rEr')r 'rEr!')r"'r�)r#'r0)r$'r�)r%'r0)r&'r�)r''r0)r('r�)r)'r0)r*'r�)r+'r0)r,'r�)r-'r0)r.'r�)r/'r0)r0'r�)r1'rr2')r3'rr4')r5'rr6')r7'rr8')r9'rr:')r;'rr<')r='rr>')r?'rr@')rA'rrB')rC'rrD')rE'r�)rF'rrG')rH'rrI')rJ'rrK')rL'rrM')rN'rrO')rP'rrQ')rR'rrS')rT'rrU')rV'rrW')rX'rrY')rZ'rr[')r\'rr]')r^'rr_')r`'rra')rb'rrc')rd'rre')rf'rrg')rh'rri')rj'rrk')rl'rrm')rn'rro')rp'rrq')rr'rrs')rt'rru')rv'rrw')rx'rry')rz'rEr{')r|'rErJ)r}'rErh)r~'rEr�)r'rEr�')r�'r�)r�'rErF)r�'rErH)r�'rErJ)r�'rErL)r�'rErN)r�'rErP)r�'rErR)r�'rErT)r�'rErV)r�'rErX)r�'rErZ)r�'rEr\)r�'rEr^)r�'rEr`)r�'rErb)r�'rErd)r�'rErf)r�'rErh)r�'rErjr�r�r�r�r��_seg_65ps�r�'ceCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�dgdS(N�C�rErl�D�rn�E�rp�F�rr�G�rt�H�rv�I�rx�J��hv�K�r��L��sd�M�r.�N��ppv�O��wc�P�r0�j��mc�k��md�l�r��p����dj����������ほか���ココ���サ�����手���字���双���デ���二���多���解���天���交���映���無���料���前���後���再���新� ��初�!��終�"��生�#��販�$��声�%��吹�&��演�'��投�(��捕�)��一�*��三�+��遊�,��左�-��中�.��右�/��指�0��走�1��打�2��禁�3��空�4��合�5��満�6��有�7��月�8��申�9��割�:��営�;��@��	〔本〕�A��	〔三〕�B��	〔二〕�C��	〔安〕�D��	〔点〕�E��	〔打〕�F��	〔盗〕�G��	〔勝〕�H��	〔敗〕�I��P��得�Q��可�R����!��0��6��7��}�������������������������?��@�)r�'rErl)r�'rErn)r�'rErp)r�'rErr)r�'rErt)r�'rErv)r�'rErx)r�'rEr�')r�'rEr�)r�'rEr�')r�'rEr.)r�'rEr�')r�'rEr�')r�'r0)r�'rEr�')r�'rEr�')r�'r�)r�'r0)r�'rEr�')r�'r0)r�'r�)r�'r0)r�'rEr�')r�'rEr�')r�'rEr�')r�'r�)r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r�'rEr�')r(rEr()r(rEr()r(rEr()r(rEr()r(rEr	()r
(rEr()r(rEr
()r(rEr()r(r�)r(rEr()r(rEr()r(rEr()r(rEr()r(rEr()r(rEr()r(rEr()r(rEr ()r!(rEr"()r#(r�)r$(rEr%()r&(rEr'()r((r�)r)(r0)r*(r�)r+(r0)r,(r�)r-(r0)r.(r�)r/(r0)r0(r�)r1(r0)r2(r�)r3(r0)r4(r�)r5(r0)r6(r�)r7(r0)r8(r�)r9(r0r�r�r�r�r��_seg_66�s�r:(cfCs�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�dgdS(N�A�r��B�r0���������>��@��D��P��h���A��E��P�������t���צ���5��@�����rE�丽���丸���乁���𠄢���你���侮���侻���倂���偺�	��備�
��僧���像���㒞�
��𠘺���免���兔���兤���具���𠔜���㒹���內���再���𠕋���冗���冤���仌���冬���况���𩇟���凵���刃���㓟� ��刻�!��剆�"��割�#��剷�$��㔕�%��勇�&��勉�'��勤�(��勺�)��包�*��匆�+��北�,��卉�-��卑�.��博�/��即�0��卽�1��卿�4��𠨬�5��灰�6��及�7��叟�8��𠭣�9��叫�:��叱�;��吆�<��咞�=��吸�>��呈�?��周�@��咢�A��哶�B��唐�C��啓�D��啣�E��善�G��喙�H��喫�I��喳�J��嗂�K��圖�L��嘆�M��圗)r;(r�)r<(r0)r=(r�)r>(r0)r?(r�)r@(r0)rA(r�)rB(r0)rC(r�)rD(r0)rE(r�)rF(r0)rG(r�)rH(r0)rI(r�)rJ(r0)rK(r�)rL(r0)rM(r�)rN(r0)rO(r�)rP(r0)rQ(r�)rR(r0)rS(r�)rT(rErU()rV(rErW()rX(rErY()rZ(rEr[()r\(rEr]()r^(rEr_()r`(rEra()rb(rErc()rd(rEre()rf(rErg()rh(rEri()rj(rErk()rl(rErm()rn(rEro()rp(rErq()rr(rErs()rt(rEru()rv(rErw()rx(rEry()rz(rEr{()r|(rEr}()r~(rEr()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�(r�r�r�r�r��_seg_67@s�r�(cfCs$d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+gdS(,N�N�rE�噑�O��噴�P��切�Q��壮�R��城�S��埴�T��堍�U��型�V��堲�W��報�X��墬�Y��𡓤�Z��売�[��壷�\��夆�]��多�^��夢�_��奢�`��𡚨�a��𡛪�b��姬�c��娛�d��娧�e��姘�f��婦�g��㛮�h�r��i��嬈�j��嬾�l��𡧈�m��寃�n��寘�o��寧�p��寳�q��𡬘�r��寿�s��将�t��u��尢�v��㞁�w��屠�x��屮�y��峀�z��岍�{��𡷤�|��嵃�}��𡷦�~��嵮���嵫��嵼��巡��巢��㠯��巽��帨��帽��幩��㡢��𢆃��㡼��庰��庳��庶��廊��𪎒��廾��𢌱��舁��弢��㣇��𣊸��𦇚��形��彫��㣣��徚��忍��志��忹��悁��㤺��㤜��悔��𢛔��惇��慈��慌��慎���慺��憎��憲��憤��憯��懞��懲��懶��成��戛��扝)r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr�()r�(rEr))r)rEr))r)rEr))r)rEr))r)rEr))r	)rEr
))r)rEr))r
)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)rEr))r)r�)r )rEr!))r")rEr#))r$)rEr%))r&)rEr'))r()rEr)))r*)rEr+))r,)rEr-))r.)rEr/))r0)rEr1))r2)rEr3))r4)r�)r5)rEr6))r7)rEr8))r9)rEr:))r;)rEr<))r=)rEr>))r?)rEr@))rA)rErB))rC)rErD))rE)rErF))rG)rErH))rI)rErJ))rK)rErL))rM)rErN))rO)rErP))rQ)rErR))rS)rErT))rU)rErV))rW)rErX))rY)rErZ))r[)rEr\))r])rEr^))r_)rEr`))ra)rErb))rc)rErd))re)rErf))rg)rErh))ri)rErj))rk)rErl))rm)rErn))ro)rErp))rq)rErr))rs)rErt))ru)rErv))rw)rErx))ry)rErz))r{)rEr|))r})rEr~))r)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�)r�r�r�r�r��_seg_68�s�r�)cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�rE�抱��拔��捐��𢬌��挽��拼��捨��掃��揤��𢯱��搢��揅��掩���㨮���摩���摾���撝���摷���㩬���敏���敬���𣀊���旣���書���晉���㬙���暑���㬈���㫤���冒���冕���最���暜���肭���䏙���朗���望���朡���杞���杓���𣏃���㭉���柺���枅���桒���梅���𣑭���梎���栟���椔���㮝���楂���榣���槪���檨���𣚣���櫛���㰘���次���𣢧���歔���㱎���歲���殟��殺��殻��𣪍��𡴋��𣫺��汎��𣲼��沿��泍��汧��洖���派���海���流���浩���浸���涅���𣴞���洴���港�	��湮�
��㴳���滋���滇�
��𣻑���淹���潮���𣽞���𣾎���濆���瀹���瀞���瀛���㶖���灊���災)r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr�))r�)rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r	*rEr
*)r*rEr*)r
*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr*)r*rEr *)r!*rEr"*)r#*rEr$*)r%*rEr&*)r'*rEr(*)r)*rEr**)r+*rEr,*)r-*rEr.*)r/*rEr0*)r1*rEr2*)r3*rEr4*)r5*rEr6*)r7*rEr8*)r9*rEr:*)r;*rEr<*)r=*rEr>*)r?*rEr@*)rA*rErB*)rC*rErD*)rE*rErF*)rG*rErH*)rI*rErJ*)rK*rErL*)rM*rErN*)rO*rErP*)rQ*rErR*)rS*rErT*)rU*rErV*)rW*rErX*)rY*rErZ*)r[*rEr\*)r]*rEr^*)r_*rEr`*)ra*rErb*)rc*rErd*)re*rErf*)rg*rErh*)ri*rErj*)rk*rErl*)rm*rErn*)ro*rErp*)rq*rErr*)rs*rErt*)ru*rErv*)rw*rErx*r�r�r�r�r��_seg_69s�ry*cfCs&d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,gdS(-N��rE�灷���炭���𠔥���煅���𤉣���熜��r�� ��爨�!��爵�"��牐�#��𤘈�$��犀�%��犕�&��𤜵�'��𤠔�(��獺�)��王�*��㺬�+��玥�,��㺸�.��瑇�/��瑜�0��瑱�1��璅�2��瓊�3��㼛�4��甤�5��𤰶�6��甾�7��𤲒�8��異�9��𢆟�:��瘐�;��𤾡�<��𤾸�=��𥁄�>��㿼�?��䀈�@��直�A��𥃳�B��𥃲�C��𥄙�D��𥄳�E��眞�F��真�H��睊�I��䀹�J��瞋�K��䁆�L��䂖�M��𥐝�N��硎�O��碌�P��磌�Q��䃣�R��𥘦�S��祖�T��𥚚�U��𥛅�V��福�W��秫�X��䄯�Y��穀�Z��穊�[��穏�\��𥥼�]��𥪧�_��`��䈂�a��𥮫�b��篆�c��築�d��䈧�e��𥲀�f��糒�g��䊠�h��糨�i��糣�j��紀�k��𥾆�l��絣�m��䌁�n��緇�o��縂�p��繅�q��䌴�r��𦈨�s��𦉇�t��䍙�u��𦋙�v��罺�w��𦌾�x��羕�y��翺�z��者�{��𦓚�|��𦔣�}��聠�~��𦖨���聰)rz*rEr{*)r|*rEr}*)r~*rEr*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*r�)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*rEr�*)r�*r�)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr	+)r
+rEr+)r+rEr
+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r+rEr+)r +rEr!+)r"+rEr#+)r$+rEr%+)r&+rEr'+)r(+rEr)+)r*+rEr++)r,+rEr-+)r.+rEr/+)r0+rEr1+)r2+rEr3+)r4+rEr5+)r6+rEr7+)r8+rEr9+)r:+rEr;+)r<+rEr=+)r>+rEr?+r�r�r�r�r��_seg_70xs�r@+cfCs(d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d��d�d�d�d�d�d�d�d�d�d	�d
�d�d�d
�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d �d!�d"�d#�d$�d%�d&�d'�d(�d)�d*�d+�d,�d-gdS(.N�rE�𣍟��䏕��育��脃��䐋��脾��媵��𦞧��𦞵��𣎓��𣎜��舁��舄��辞��䑫��芑��芋��芝��劳��花��芳��芽��苦��𦬼��若��茝��荣��莭��茣��莽��菧��著��荓��菊��菌��菜��𦰶��𦵫��𦳕��䔫��蓱��蓳��蔖��𧏊��蕤��𦼬��䕝��䕡��𦾱��𧃒��䕫��虐��虜��虧��虩��蚩��蚈��蜎��蛢��蝹��蜨��蝫��螆�r���蟡��蠁���䗹���衠���衣���𧙧���裗���裞���䘵���裺���㒻���𧢮���𧥦���䚾���䛇���誠���諭���變���豕���𧲨���貫���賁���贛���起���𧼯���𠠄���跋���趼���跰���𠣞���軔���輸���𨗒���𨗭���邔���郱)rA+rErB+)rC+rErD+)rE+rErF+)rG+rErH+)rI+rErJ+)rK+rErL+)rM+rErN+)rO+rErP+)rQ+rErR+)rS+rErT+)rU+rErV+)rW+rErX+)rY+rErZ+)r[+rEr\+)r]+rEr^+)r_+rEr`+)ra+rErb+)rc+rErd+)re+rErf+)rg+rErh+)ri+rErj+)rk+rErl+)rm+rErn+)ro+rErp+)rq+rErr+)rs+rErt+)ru+rErv+)rw+rErx+)ry+rErz+)r{+rEr|+)r}+rEr~+)r+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+r�)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r�+rEr�+)r,rEr,)r,rEr,)r,rEr,)r,rEr,r�r�r�r�r��_seg_71�s�r,c=Cs|dydzd{d|d}d~dd�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�d�g<S)�N��rE�鄑���𨜮���鄛���鈸���鋗���鋘���鉼���鏹���鐕���𨯺���開���䦕���閷���𨵷���䧦���雃���嶲��霣��𩅅��𩈚��䩮��䩶��韠��𩐊��䪲��𩒖��頋���頩���𩖶���飢���䬳���餩���馧���駂���駾���䯎�	��𩬰�
��鬒���鱀���鳽�
��䳎���䳭���鵧���𪃎���䳸���𪄅���𪈎���𪊑���麻���䵖���黹���黾���鼅���鼏���鼖���鼻���𪘀��r��r���)r	,rEr
,)r,rEr,)r
,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr,)r,rEr ,)r!,rEr",)r#,rEr$,)r%,rEr&,)r',rEr(,)r),rEr*,)r+,rEr,,)r-,rEr.,)r/,rEr0,)r1,rEr2,)r3,rEr4,)r5,rEr6,)r7,rEr8,)r9,rEr:,)r;,rEr<,)r=,rEr>,)r?,rEr@,)rA,rErB,)rC,rErD,)rE,rErF,)rG,rErH,)rI,rErJ,)rK,rErL,)rM,rErN,)rO,rErP,)rQ,rErR,)rS,rErT,)rU,rErV,)rW,rErX,)rY,rErZ,)r[,rEr\,)r],rEr^,)r_,rEr`,)ra,rErb,)rc,rErd,)re,rErf,)rg,rErh,)ri,rErj,)rk,rErl,)rm,rErn,)ro,rErp,)rq,rErr,)rs,rErt,)ru,rErv,)rw,rErx,)ry,rErz,)r{,r�)r|,r�)r},r�r�r�r�r�r��_seg_72Hsxr~,N)M�__doc__�__version__r�r�r�r-r�rfrr�rJr�rbr�r/r�rr�r9	r�	rm
r
r�r3r�r]
rr�rAr
r�r�rLrr�r�rfr�r�rr�r�rsr7r�r�rxr6r�r�rXr�r� r+!r�!r"rq"r�"r;#r�#r$rj$r�$rj%r�%ro&r�&r�'r:(r�(r�)ry*r@+r,r~,�tupleZ	uts46datar�r�r�r��<module>s�hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh@I_vendor/idna/__pycache__/intranges.cpython-36.opt-1.pyc000064400000003317151733136500016712 0ustar003

�Pf��@s0dZddlZdd�Zdd�Zdd�Zd	d
�ZdS)a	
Given a list of integers, made up of (hopefully) a small number of long runs
of consecutive integers, compute a representation of the form
((start1, end1), (start2, end2) ...). Then answer the question "was x present
in the original list?" in time O(log(# runs)).
�NcCs�t|�}g}d}xrtt|��D]b}|dt|�krL||||ddkrLq||d|d�}|jt|d|dd��|}qWt|�S)aRepresent a list of integers as a sequence of ranges:
    ((start_0, end_0), (start_1, end_1), ...), such that the original
    integers are exactly those x such that start_i <= x < end_i for some i.

    Ranges are encoded as single integers (start << 32 | end), not as tuples.
    �r���r)�sorted�range�len�append�
_encode_range�tuple)Zlist_Zsorted_list�rangesZ
last_write�iZ
current_range�r�/usr/lib/python3.6/intranges.py�intranges_from_list
srcCs|d>|BS)N� r)�start�endrrr
rsrcCs|d?|d@fS)Nrrll��r)�rrrr
�
_decode_range"srcCszt|d�}tj||�}|dkrNt||d�\}}||koD|knrNdS|t|�krvt||�\}}||krvdSdS)z=Determine if `int_` falls into one of the ranges in `ranges`.rrTF)r�bisectZbisect_leftrr)Zint_r
Ztuple_�pos�left�right�_rrr
�intranges_contain&s
r)�__doc__rrrrrrrrr
�<module>s
_vendor/idna/__pycache__/core.cpython-36.pyc000064400000021303151733136500014704 0ustar003

�Pf~,�@s>ddlmZddlZddlZddlZddlZddlmZdZdZ	ej
d�Zejddkr`e
ZeZGd	d
�d
e�ZGdd�de�ZGd
d�de�ZGdd�de�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zd7dd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Z d8d(d)�Z!d*d+�Z"d,d-�Z#d.d/�Z$d9d1d2�Z%d:d3d4�Z&d;d5d6�Z'dS)<�)�idnadata�N)�intranges_contain�	sxn--u[.。.。]�c@seZdZdZdS)�	IDNAErrorz7 Base exception for all IDNA-encoding related problems N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/core.pyrsrc@seZdZdZdS)�
IDNABidiErrorz= Exception when bidirectional requirements are not satisfied N)rr	r
rrrrr
rsrc@seZdZdZdS)�InvalidCodepointz> Exception when a disallowed or unallocated codepoint is used N)rr	r
rrrrr
rsrc@seZdZdZdS)�InvalidCodepointContextzE Exception when the codepoint is not valid in the context it is used N)rr	r
rrrrr
rsrcCstjt|��S)N)�unicodedataZ	combining�unichr)�cprrr
�_combining_class$srcCstt|�tj|�S)N)r�ordr�scripts)rZscriptrrr
�
_is_script'srcCs
|jd�S)N�punycode)�encode)�srrr
�	_punycode*srcCs
dj|�S)Nz	U+{0:04X})�format)rrrr
�_unot-srcCst|�dkrdSdS)N�?FT)�len)�labelrrr
�valid_label_length1sr!cCst|�|rdndkrdSdS)N��FT)r)r �trailing_dotrrr
�valid_string_length8sr%Fc	Cspd}xJt|d�D]<\}}tj|�}|dkr>tdjt|�|���|dkrd}PqW|r`|r`dStj|d	�}|dkr|d}n |d
kr�d}ntdjt|����d}d}x�t|d�D]�\}}tj|�}|�r&|dkr�tdj|���|dk�r�d}n|dk�r�d}|dk�rZ|�s|}n||k�rZtd��q�|dk�r>tdj|���|dk�rNd}q�|dkr�d}q�W|�sltd��dS)NFr�z3Unknown directionality in label {0} at position {1}�R�AL�ANTr�Lz>First codepoint in label {0} must be directionality L, R or AL�EN�ES�CS�ET�ON�BN�NSMzHInvalid direction for codepoint at position {0} in a right-to-left labelz2Can not mix numeral types in a right-to-left labelzHInvalid direction for codepoint at position {0} in a left-to-right labelz0Label ends with illegal codepoint directionality)r'r(r))r'r()
r'r(r)r+r,r-r.r/r0r1)r'r(r+r))r)r+)r*r+r,r-r.r/r0r1)r*r+)�	enumeraterZ
bidirectionalrr�repr)	r Z	check_ltrZ
bidi_label�idxr�	directionZrtlZvalid_endingZnumber_typerrr
�
check_bidi?sR








r6cCs"tj|d�ddkrtd��dS)Nr�Mz0Label begins with an illegal combining characterT)r�categoryr)r rrr
�check_initial_combiner|sr9cCs<|dd�dkrtd��|ddks0|d
dkr8td��d	S)N��z--z4Label has disallowed hyphens in 3rd and 4th positionr�-rz)Label must not start or end with a hyphenT���)r)r rrr
�check_hyphen_ok�s
r>cCstjd|�|krtd��dS)N�NFCz%Label must be in Normalization Form C)r�	normalizer)r rrr
�	check_nfc�srAcCs:t||�}|dk�r�|dkr:tt||d��tkr:dSd}xTt|ddd�D]@}tjjt||��}|td�krvqP|td�td�gkrPd}PqPW|s�dSd}xVt|dt|��D]@}tjjt||��}|td�kr�q�|td	�td�gkr�d}Pq�W|S|d
k�r2|dk�r.tt||d��tk�r.dSdSdSdS)
Ni rrTF�Tr*�Dr'i
 r=r=)rr�_virama_combining_class�rangerZ
joining_types�getr)r �pos�cp_value�ok�iZjoining_typerrr
�valid_contextj�s<


rKcCs�t||�}|dkrdd|ko.t|�dknr`t||d�dkr`t||d�dkr`dSdS|dkr�|t|�dkr�t|�dkr�t||dd�SdS|d	ks�|d
kr�|dkr�t||dd�SdS|dk�rx<|D]4}|d
kr�q�t|d��st|d��st|d�r�dSq�WdSd|k�o,dkn�rlx2|D]*}dt|�k�oVdkn�r:dS�q:WdSd|k�o�dkn�r�x2|D]*}dt|�k�o�dkn�r�dS�q�WdSdS)N�rr�lTFiuZGreeki�i�ZHebrewi�0u・ZHiraganaZKatakanaZHani`iii�i�)rrr)r rGZ	exceptionrHrrrr
�valid_contexto�s> (

"
 

 
rNcCst|ttf�r|jd�}t|�dkr,td��t|�t|�t|�x�t	|�D]�\}}t
|�}t|tj
d�rrqNqNt|tj
d�r�t||�s�tdjt|�|dt|����qNt|tj
d�r�t||�s�td	jt|�|dt|����qNtd
jt|�|dt|����qNWt|�dS)Nzutf-8rzEmpty LabelZPVALIDZCONTEXTJz-Joiner {0} not allowed at position {1} in {2}rZCONTEXTOz0Codepoint {0} not allowed at position {1} in {2}z0Codepoint {0} at position {1} of {2} not allowed)�
isinstance�bytes�	bytearray�decoderrrAr>r9r2rrrZcodepoint_classesrKrrrr3rNrr6)r rGrrHrrr
�check_label�s&

 
 "rScCs�yN|jd�}yt|�Wn"tk
r:tdj|���YnXt|�sLtd��|Stk
rbYnX|sptd��t|�}t|�t|�}t	|}t|�s�td��|S)N�asciiz$The label {0} is not a valid A-labelzLabel too longzNo Input)
r�ulabelrrr!�UnicodeEncodeError�unicoderSr�_alabel_prefix)r rrr
�alabels(
rYcCs�t|ttf�s:y|jd�}Wntk
r8t|�|SX|j�}|jt�r^|t	t�d�}nt|�|j
d�S|j
d�}t|�|S)NrTr)rOrPrQrrVrS�lower�
startswithrXrrR)r rrr
rUs


rUTcCs,ddlm}d}y�x�t|�D]�\}}t|�}||dkr:|ntj||df�d}|d}	t|�dkrl|dnd}
|	d	ks�|	d
kr�|s�|	dkr�|r�|
dkr�||7}q|
dk	r�|	dks�|	dkr�|s�|	d
kr�|r�||
7}q|	d
krt��qWtjd|�Stk
�r&t	dj
t|�|dt|����YnXdS)zBRe-map the characters in the string according to UTS46 processing.r)�	uts46datar&��Zrr:N�VrC�3r7�Ir?z0Codepoint {0} not allowed at position {1} in {2})
r\r2r�bisectZbisect_leftr�
IndexErrorrr@rrrr3)Zdomain�
std3_rules�transitionalr\�outputrG�charZ
code_pointZuts46rowZstatusZreplacementrrr
�uts46_remap3s0


rhc	Cs�t|ttf�r|jd�}|r(t|||�}d}g}|r@|jd�}n
tj|�}x|rb|drb|d=qLW|sptd��|ddkr�|d
=d}x|D]}|jt	|��q�W|r�|jd	�d
j
|�}t||�s�td��|S)NrTF�.rzEmpty domainrr&T��.zDomain too longr=r=)rOrPrQrRrh�split�_unicode_dots_rer�appendrY�joinr%)	r�strict�uts46rdrer$�result�labelsr rrr
rOs0






rcCs�t|ttf�r|jd�}|r(t||d�}d}g}|s@tj|�}n
|jd�}x|rb|drb|d=qLW|sptd��|d	s�|d
=d}x|D]}|jt	|��q�W|r�|jd�dj
|�S)NrTFrirzEmpty domainrTr&r=r=)rOrPrQrRrhrmrlrrnrUro)rrprqrdr$rrrsr rrr
rRls*




rR)F)F)TF)FFFF)FFF)(r&rrbr�re�sysZ	intrangesrrDrX�compilerm�version_info�strrW�chrr�UnicodeErrorrrrrrrrrr!r%r6r9r>rArKrNrSrYrUrhrrRrrrr
�<module>sB

=	,
)

_vendor/idna/__pycache__/__init__.cpython-36.pyc000064400000000266151733136500015520 0ustar003

�Pf:�@sddlmZddlTdS)�)�__version__)�*N)Zpackage_datarZcore�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/idna/__pycache__/compat.cpython-36.opt-1.pyc000064400000001036151733136500016177 0ustar003

�Pf��@s,ddlTddlTdd�Zdd�Zdd�ZdS)	�)�*cCst|�S)N)�encode)�label�r�/usr/lib/python3.6/compat.py�ToASCIIsrcCst|�S)N)�decode)rrrr�	ToUnicodesr	cCstd��dS)Nz,IDNA 2008 does not utilise nameprep protocol)�NotImplementedError)�srrr�nameprep
srN)Zcore�codecrr	rrrrr�<module>s_vendor/idna/__pycache__/package_data.cpython-36.opt-1.pyc000064400000000210151733136500017271 0ustar003

�Pf�@sdZdS)z2.6N)�__version__�rr�"/usr/lib/python3.6/package_data.py�<module>s_vendor/idna/__pycache__/compat.cpython-36.pyc000064400000001036151733136500015240 0ustar003

�Pf��@s,ddlTddlTdd�Zdd�Zdd�ZdS)	�)�*cCst|�S)N)�encode)�label�r�/usr/lib/python3.6/compat.py�ToASCIIsrcCst|�S)N)�decode)rrrr�	ToUnicodesr	cCstd��dS)Nz,IDNA 2008 does not utilise nameprep protocol)�NotImplementedError)�srrr�nameprep
srN)Zcore�codecrr	rrrrr�<module>s_vendor/idna/__pycache__/__init__.cpython-36.opt-1.pyc000064400000000266151733136500016457 0ustar003

�Pf:�@sddlmZddlTdS)�)�__version__)�*N)Zpackage_datarZcore�rr�/usr/lib/python3.6/__init__.py�<module>s_vendor/idna/__pycache__/idnadata.cpython-36.pyc000064400000057557151733136500015545 0ustar003

�Pf���@s0dZ�d �d!�d"�d#�d$dF�ZdGdGdGdGdGdGdGdHdGdIdIdIdIdHdIdHdIdHdHdHdHdHdIdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdJdHdHdHdHdHdHdHdIdHdHdHdHdIdIdIdGdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdIdHdHdIdIdIdIdIdIdIdIdIdHdIdHdIdHdHdIdIdIdGdIdIdHdHdHdHdIdHdHdHdIdIdIdIdIdHdHdHdHdIdHdHdHdHdHdHdHdHdHdIdHdIdHdIdHdHdIdIdHdHdHdHdHdHdHdHdHdHdHdIdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdIdIdHdHdHdHdIdHdIdIdHdHdHdIdIdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdJdIdHdHdHdHdHdIdHdHdIdHdHdHdHdHdIdHdHdHdHdIdHdGdGdGdHdHdHdHdHdHdHdHdHdIdIdIdGdHdJdGdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdGdGdGdGdGdGdGdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdGdJdGdGdGdGdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdHdKdGdL���Z�d%�d&�d'�d�Z�dS((z6.3.0�t�
�x�
�~�
������ ��0��8������+�t�btu�k�u���v�|�`|�F�|�N }�X@}�Zd}�\l}�^t}�~|}��~���~����X��t�������'!����FRH��.:��.l:��/<�0@�0@�*0�@�<0�@��MP��8�nzd��z�i��&�57�8]�z`��D��@����7{tl�={�l�?{�l�B{m�E{m�P{m��0A��0tB�0@
�rH��0�B�1�C�2�G��2@K�X3L�p�}���}�0@
)ZGreekZHanZHebrewZHiraganaZKatakana�U�D�R�C�L(�iiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJinioiqirisitiuiviwixiyizi{i|i}i~ii�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�iiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/iMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~ii�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXi�i�i�i�i�i�i�i�i�i�i�i�iii
ii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwi�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i�i i
 if ig ih ii i@�iA�iB�iC�iD�iE�iF�iG�iH�iI�iJ�iK�iL�iM�iN�iO�iP�iQ�iR�iS�iT�iU�iV�iW�iX�iY�iZ�i[�i\�i]�i^�i_�i`�ia�ib�ic�id�ie�if�ig�ih�ii�ij�ik�il�im�in�io�ip�iq�ir�is��.��:��{���|�������
$�,�4�<�D�L�T�\�d�l�t� |�"��$��&��(��*��,��.��0��2��6��9��;��=��?��C�E�G�I �L,�N4�P<�RD�TL�VT�X\�Zd�\l�^t�`|�b��d��f��h��j��l��n��p��r��t��v��x��{��}����������� ��0��H��T��d��x�����������������������������������8��@��H��P��X��`��h��p��|�������������������������������������������
$�,�4�<�D�L�T�\�d�l�t� |�"��$��&��(��*��,��.��0��2��:��=��A��C	�H	�J$	�L,	�N4	��<	���
���������@�C
�O
�p@
�r�
�t�
�x�
�~�
��@�����\��d��l��t��|����������������������������������`��b��d��f��h��j��l��n��p��r��t��v��x��z��|��~����������,��4��<��D��L��T��\��d��l��t��|�������������������������������������������������������� ��(��0��8��D��L��T��\��d��l��t��|����������������������������������������������������
$�,�4�<�D�L�T�\�d�l�t� |�"��$��&��(��Zd�����D�������������@�@��`�u������T��|������K@��4���. �\!���"���"���#�X	$�d	�%�p	�%�x	�%��	�%��	&��	&��	<&��	L&��	�&��	�&��	�&��	�&��	'��	,'��	\'��	�'��	�'�
(�
(�
<(�)
L(�1
�(�3
�(�6
�(�:
�(�=
�(�C
�(�I
)�N
,)�R
D)�]
p)�v
�)��
*��
*��
<*��
L*��
�*��
�*��
�*��
�*��
+��
,+��
@+��
�+��
�+�,�
,�<,�)L,�1�,�4�,�:�,�E�,�I-�N,-�XX-�d|-�p�-�r�-��.��.��8.��H.��d.��p.��x.���.���.���.���.��/��(/��@/��\/���/�0�
0�80�)H0�4�0�:�0�E�0�I1�N(1�WT1�Z`1�d�1�p�1��2��2��82��H2���2���2���2��3��(3��T3��x3���3���3���3�
4�

4�
84�;
H4�E
�4�I
5�O
(5�X
\5�d
�5�p
�5��
�5��
6��
6��
h6��
�6��
�6��
7��
(7��
<7��
X7��
`7��
�7�38�;�8�O9�Z@9��:��:��:��(:��4:��P:��d:���:���:���:���:���:���:���:��;��;�� ;��@;��x;�<�,<�`<�*�<�6�<�8�<�:�<�C�<�H=�M$=�R8=�WL=�\`=�it=�m�=�s�=�u�=���=��>��>��P>��d>��x>���>���>���>���>��?�J@��@A��@C��C�IH�N(I�W@I�Y`I�^hI���I��(J��@J���J���J��K��K�� K�`K�HL�[`L�`tM��N���N�mP���Y��Z���Z�
\�8\�5�\�T]�m�]�q�]�t�]��^���^��\_��p_���_�@`�x�`��b���b�d�,�d�<�d�ne�u�e��f���f��@g�h�_�h�}�i���i��@j���j�Ll�Z@m�t�m��n�8p�Jq�~4q��@s��Ps�,t�0�t�<�t�O8u�x�u���u��w��w�x�x�x�x�
$x�,x�4x�<x�Dx�Lx�Tx�\x�dx�lx�tx� |x�"�x�$�x�&�x�(�x�*�x�,�x�.�x�0�x�2�x�4�x�6�x�8�x�:�x�<�x�>�x�@�x�By�Dy�Fy�Hy�J$y�L,y�N4y�P<y�RDy�TLy�VTy�X\y�Zdy�\ly�^ty�`|y�b�y�d�y�f�y�h�y�j�y�l�y�n�y�p�y�r�y�t�y�v�y�x�y�z�y�|�y�~�y���y��z��z��z��z��$z��,z��4z��<z��Dz��Lz��Tz��pz��|z���z���z���z���z���z���z���z���z���z���z���z���z���z���z���z���z��{��{��{��{��${��,{��4{��<{��D{��L{��T{��\{��d{��l{��t{��|{���{���{���{���{���{���{���{���{���{���{���{���{���{���{���{��{�@|�(�|�8�|�F}�h�}�q�}�s�}�u�}�w�}�y�}�{�}�}�}���~���~����@��X����������O!8��!�_,�0�b,�1�g,�1�i,�1�k,�1�m,�1�r,�1�u,�1�|,�1��,2��,2��,2��,2��,$2��,,2��,42��,<2��,D2��,L2��,T2��,\2��,d2��,l2��,t2��,|2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,�2��,3��,3��,3��,3��,$3��,,3��,43��,<3��,D3��,L3��,T3��,\3��,d3��,l3��,t3��,|3��,�3��,�3��,�3��,�3��,�3�&-4�(-�4�.-�4�h-�4��-�5��-�6��-�6��-�6��-�6��-7��- 7��-@7��-`7�.�7�0.�8�0@�.0�@�=0�@��0dB��0tB��0�C�.1D��1�F��$��$@�
&�,&@�B&�D&�F&�H&�J&$�L&,�N&4�P&<�R&D�T&L�V&T�X&\�Z&d�\&l�^&t�`&|�b&��d&��f&��h&��j&��l&��p&��~&���&���&��&��&��&��&$��&,��&4��&<��&D��&L��&T��&\��&|��&�� '\�$'��&'��('��*'��,'��.'��2'��4'��6'��8'��:'��<'��>'��@'��B'�D'�F'�H'�J'$�L',�N'4�P'<�R'D�T'L�V'T�X'\�Z'd�\'l�^'t�`'|�b'��d'��f'��h'��j'��l'��n'��p'��y'��{'��}'���'���'��'��'��'��'0��'8��'D��'L��'���'���'���'���'��((��t(!��("��(@#��(�#��(�#�.)$�T)�$��)&��)<'�7*(�N*)�Z*@)�w*�)�|*�)��**��*l+��*�+��*�+�+,�+$,�+D,�'+�,�/+�,��+/��+�/��+�/��W0�z8h�zDh�zLh� z|h�"z�h�%z�h�*z�h�{xl�'~�x�t~�y��'4�;��>��N��^@�������
���
��A��J
������� ������� �	  �6( �9� �=� �V� �	$�:	�$��	&��	�&�
(�
(�
0(�
T(�4
d(�;
�(�@
�(�}
�)�6,�V-�s�-�I0�G@�p�A��B��@C���C�5D�@�D��F��@G��Z��[�o#	�/4@	�9j �Eo<�o@=��o<>�0@
� 0����v�
����j������0�C)ZPVALIDZCONTEXTJZCONTEXTON)!rrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!)r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0)	r1r2r3r4r5r6r7r8r9)r:r;r<r=)r>r?r@rArBrCrDrE(�rKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r2r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrMrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrrrrrrrrrrrrr r!r"r#r$r%r&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLr:rMrNr>rOrPrQr@r)r*rRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtrurvrwrxryrzr{r|r}r~rr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrrrrrrr	r
rrr
rrrrrrrr-r.r/)r)rrrrrr)�__version__�scriptsZ
joining_typesZcodepoint_classes�rr�/usr/lib/python3.6/idnadata.py�<module>s0

_vendor/idna/__pycache__/package_data.cpython-36.pyc000064400000000210151733136500016332 0ustar003

�Pf�@sdZdS)z2.6N)�__version__�rr�"/usr/lib/python3.6/package_data.py�<module>s_vendor/idna/__pycache__/core.cpython-36.opt-1.pyc000064400000021303151733136500015643 0ustar003

�Pf~,�@s>ddlmZddlZddlZddlZddlZddlmZdZdZ	ej
d�Zejddkr`e
ZeZGd	d
�d
e�ZGdd�de�ZGd
d�de�ZGdd�de�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zd7dd�Zd d!�Zd"d#�Zd$d%�Zd&d'�Z d8d(d)�Z!d*d+�Z"d,d-�Z#d.d/�Z$d9d1d2�Z%d:d3d4�Z&d;d5d6�Z'dS)<�)�idnadata�N)�intranges_contain�	sxn--u[.。.。]�c@seZdZdZdS)�	IDNAErrorz7 Base exception for all IDNA-encoding related problems N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/core.pyrsrc@seZdZdZdS)�
IDNABidiErrorz= Exception when bidirectional requirements are not satisfied N)rr	r
rrrrr
rsrc@seZdZdZdS)�InvalidCodepointz> Exception when a disallowed or unallocated codepoint is used N)rr	r
rrrrr
rsrc@seZdZdZdS)�InvalidCodepointContextzE Exception when the codepoint is not valid in the context it is used N)rr	r
rrrrr
rsrcCstjt|��S)N)�unicodedataZ	combining�unichr)�cprrr
�_combining_class$srcCstt|�tj|�S)N)r�ordr�scripts)rZscriptrrr
�
_is_script'srcCs
|jd�S)N�punycode)�encode)�srrr
�	_punycode*srcCs
dj|�S)Nz	U+{0:04X})�format)rrrr
�_unot-srcCst|�dkrdSdS)N�?FT)�len)�labelrrr
�valid_label_length1sr!cCst|�|rdndkrdSdS)N��FT)r)r �trailing_dotrrr
�valid_string_length8sr%Fc	Cspd}xJt|d�D]<\}}tj|�}|dkr>tdjt|�|���|dkrd}PqW|r`|r`dStj|d	�}|dkr|d}n |d
kr�d}ntdjt|����d}d}x�t|d�D]�\}}tj|�}|�r&|dkr�tdj|���|dk�r�d}n|dk�r�d}|dk�rZ|�s|}n||k�rZtd��q�|dk�r>tdj|���|dk�rNd}q�|dkr�d}q�W|�sltd��dS)NFr�z3Unknown directionality in label {0} at position {1}�R�AL�ANTr�Lz>First codepoint in label {0} must be directionality L, R or AL�EN�ES�CS�ET�ON�BN�NSMzHInvalid direction for codepoint at position {0} in a right-to-left labelz2Can not mix numeral types in a right-to-left labelzHInvalid direction for codepoint at position {0} in a left-to-right labelz0Label ends with illegal codepoint directionality)r'r(r))r'r()
r'r(r)r+r,r-r.r/r0r1)r'r(r+r))r)r+)r*r+r,r-r.r/r0r1)r*r+)�	enumeraterZ
bidirectionalrr�repr)	r Z	check_ltrZ
bidi_label�idxr�	directionZrtlZvalid_endingZnumber_typerrr
�
check_bidi?sR








r6cCs"tj|d�ddkrtd��dS)Nr�Mz0Label begins with an illegal combining characterT)r�categoryr)r rrr
�check_initial_combiner|sr9cCs<|dd�dkrtd��|ddks0|d
dkr8td��d	S)N��z--z4Label has disallowed hyphens in 3rd and 4th positionr�-rz)Label must not start or end with a hyphenT���)r)r rrr
�check_hyphen_ok�s
r>cCstjd|�|krtd��dS)N�NFCz%Label must be in Normalization Form C)r�	normalizer)r rrr
�	check_nfc�srAcCs:t||�}|dk�r�|dkr:tt||d��tkr:dSd}xTt|ddd�D]@}tjjt||��}|td�krvqP|td�td�gkrPd}PqPW|s�dSd}xVt|dt|��D]@}tjjt||��}|td�kr�q�|td	�td�gkr�d}Pq�W|S|d
k�r2|dk�r.tt||d��tk�r.dSdSdSdS)
Ni rrTF�Tr*�Dr'i
 r=r=)rr�_virama_combining_class�rangerZ
joining_types�getr)r �pos�cp_value�ok�iZjoining_typerrr
�valid_contextj�s<


rKcCs�t||�}|dkrdd|ko.t|�dknr`t||d�dkr`t||d�dkr`dSdS|dkr�|t|�dkr�t|�dkr�t||dd�SdS|d	ks�|d
kr�|dkr�t||dd�SdS|dk�rx<|D]4}|d
kr�q�t|d��st|d��st|d�r�dSq�WdSd|k�o,dkn�rlx2|D]*}dt|�k�oVdkn�r:dS�q:WdSd|k�o�dkn�r�x2|D]*}dt|�k�o�dkn�r�dS�q�WdSdS)N�rr�lTFiuZGreeki�i�ZHebrewi�0u・ZHiraganaZKatakanaZHani`iii�i�)rrr)r rGZ	exceptionrHrrrr
�valid_contexto�s> (

"
 

 
rNcCst|ttf�r|jd�}t|�dkr,td��t|�t|�t|�x�t	|�D]�\}}t
|�}t|tj
d�rrqNqNt|tj
d�r�t||�s�tdjt|�|dt|����qNt|tj
d�r�t||�s�td	jt|�|dt|����qNtd
jt|�|dt|����qNWt|�dS)Nzutf-8rzEmpty LabelZPVALIDZCONTEXTJz-Joiner {0} not allowed at position {1} in {2}rZCONTEXTOz0Codepoint {0} not allowed at position {1} in {2}z0Codepoint {0} at position {1} of {2} not allowed)�
isinstance�bytes�	bytearray�decoderrrAr>r9r2rrrZcodepoint_classesrKrrrr3rNrr6)r rGrrHrrr
�check_label�s&

 
 "rScCs�yN|jd�}yt|�Wn"tk
r:tdj|���YnXt|�sLtd��|Stk
rbYnX|sptd��t|�}t|�t|�}t	|}t|�s�td��|S)N�asciiz$The label {0} is not a valid A-labelzLabel too longzNo Input)
r�ulabelrrr!�UnicodeEncodeError�unicoderSr�_alabel_prefix)r rrr
�alabels(
rYcCs�t|ttf�s:y|jd�}Wntk
r8t|�|SX|j�}|jt�r^|t	t�d�}nt|�|j
d�S|j
d�}t|�|S)NrTr)rOrPrQrrVrS�lower�
startswithrXrrR)r rrr
rUs


rUTcCs,ddlm}d}y�x�t|�D]�\}}t|�}||dkr:|ntj||df�d}|d}	t|�dkrl|dnd}
|	d	ks�|	d
kr�|s�|	dkr�|r�|
dkr�||7}q|
dk	r�|	dks�|	dkr�|s�|	d
kr�|r�||
7}q|	d
krt��qWtjd|�Stk
�r&t	dj
t|�|dt|����YnXdS)zBRe-map the characters in the string according to UTS46 processing.r)�	uts46datar&��Zrr:N�VrC�3r7�Ir?z0Codepoint {0} not allowed at position {1} in {2})
r\r2r�bisectZbisect_leftr�
IndexErrorrr@rrrr3)Zdomain�
std3_rules�transitionalr\�outputrG�charZ
code_pointZuts46rowZstatusZreplacementrrr
�uts46_remap3s0


rhc	Cs�t|ttf�r|jd�}|r(t|||�}d}g}|r@|jd�}n
tj|�}x|rb|drb|d=qLW|sptd��|ddkr�|d
=d}x|D]}|jt	|��q�W|r�|jd	�d
j
|�}t||�s�td��|S)NrTF�.rzEmpty domainrr&T��.zDomain too longr=r=)rOrPrQrRrh�split�_unicode_dots_rer�appendrY�joinr%)	r�strict�uts46rdrer$�result�labelsr rrr
rOs0






rcCs�t|ttf�r|jd�}|r(t||d�}d}g}|s@tj|�}n
|jd�}x|rb|drb|d=qLW|sptd��|d	s�|d
=d}x|D]}|jt	|��q�W|r�|jd�dj
|�S)NrTFrirzEmpty domainrTr&r=r=)rOrPrQrRrhrmrlrrnrUro)rrprqrdr$rrrsr rrr
rRls*




rR)F)F)TF)FFFF)FFF)(r&rrbr�re�sysZ	intrangesrrDrX�compilerm�version_info�strrW�chrr�UnicodeErrorrrrrrrrrr!r%r6r9r>rArKrNrSrYrUrhrrRrrrr
�<module>sB

=	,
)

_vendor/idna/uts46data.py000064400000551160151733136500011320 0ustar00# This file is automatically generated by tools/idna-data
# vim: set fileencoding=utf-8 :

"""IDNA Mapping Table from UTS46."""


__version__ = "6.3.0"
def _seg_0():
    return [
    (0x0, '3'),
    (0x1, '3'),
    (0x2, '3'),
    (0x3, '3'),
    (0x4, '3'),
    (0x5, '3'),
    (0x6, '3'),
    (0x7, '3'),
    (0x8, '3'),
    (0x9, '3'),
    (0xA, '3'),
    (0xB, '3'),
    (0xC, '3'),
    (0xD, '3'),
    (0xE, '3'),
    (0xF, '3'),
    (0x10, '3'),
    (0x11, '3'),
    (0x12, '3'),
    (0x13, '3'),
    (0x14, '3'),
    (0x15, '3'),
    (0x16, '3'),
    (0x17, '3'),
    (0x18, '3'),
    (0x19, '3'),
    (0x1A, '3'),
    (0x1B, '3'),
    (0x1C, '3'),
    (0x1D, '3'),
    (0x1E, '3'),
    (0x1F, '3'),
    (0x20, '3'),
    (0x21, '3'),
    (0x22, '3'),
    (0x23, '3'),
    (0x24, '3'),
    (0x25, '3'),
    (0x26, '3'),
    (0x27, '3'),
    (0x28, '3'),
    (0x29, '3'),
    (0x2A, '3'),
    (0x2B, '3'),
    (0x2C, '3'),
    (0x2D, 'V'),
    (0x2E, 'V'),
    (0x2F, '3'),
    (0x30, 'V'),
    (0x31, 'V'),
    (0x32, 'V'),
    (0x33, 'V'),
    (0x34, 'V'),
    (0x35, 'V'),
    (0x36, 'V'),
    (0x37, 'V'),
    (0x38, 'V'),
    (0x39, 'V'),
    (0x3A, '3'),
    (0x3B, '3'),
    (0x3C, '3'),
    (0x3D, '3'),
    (0x3E, '3'),
    (0x3F, '3'),
    (0x40, '3'),
    (0x41, 'M', u'a'),
    (0x42, 'M', u'b'),
    (0x43, 'M', u'c'),
    (0x44, 'M', u'd'),
    (0x45, 'M', u'e'),
    (0x46, 'M', u'f'),
    (0x47, 'M', u'g'),
    (0x48, 'M', u'h'),
    (0x49, 'M', u'i'),
    (0x4A, 'M', u'j'),
    (0x4B, 'M', u'k'),
    (0x4C, 'M', u'l'),
    (0x4D, 'M', u'm'),
    (0x4E, 'M', u'n'),
    (0x4F, 'M', u'o'),
    (0x50, 'M', u'p'),
    (0x51, 'M', u'q'),
    (0x52, 'M', u'r'),
    (0x53, 'M', u's'),
    (0x54, 'M', u't'),
    (0x55, 'M', u'u'),
    (0x56, 'M', u'v'),
    (0x57, 'M', u'w'),
    (0x58, 'M', u'x'),
    (0x59, 'M', u'y'),
    (0x5A, 'M', u'z'),
    (0x5B, '3'),
    (0x5C, '3'),
    (0x5D, '3'),
    (0x5E, '3'),
    (0x5F, '3'),
    (0x60, '3'),
    (0x61, 'V'),
    (0x62, 'V'),
    (0x63, 'V'),
    ]

def _seg_1():
    return [
    (0x64, 'V'),
    (0x65, 'V'),
    (0x66, 'V'),
    (0x67, 'V'),
    (0x68, 'V'),
    (0x69, 'V'),
    (0x6A, 'V'),
    (0x6B, 'V'),
    (0x6C, 'V'),
    (0x6D, 'V'),
    (0x6E, 'V'),
    (0x6F, 'V'),
    (0x70, 'V'),
    (0x71, 'V'),
    (0x72, 'V'),
    (0x73, 'V'),
    (0x74, 'V'),
    (0x75, 'V'),
    (0x76, 'V'),
    (0x77, 'V'),
    (0x78, 'V'),
    (0x79, 'V'),
    (0x7A, 'V'),
    (0x7B, '3'),
    (0x7C, '3'),
    (0x7D, '3'),
    (0x7E, '3'),
    (0x7F, '3'),
    (0x80, 'X'),
    (0x81, 'X'),
    (0x82, 'X'),
    (0x83, 'X'),
    (0x84, 'X'),
    (0x85, 'X'),
    (0x86, 'X'),
    (0x87, 'X'),
    (0x88, 'X'),
    (0x89, 'X'),
    (0x8A, 'X'),
    (0x8B, 'X'),
    (0x8C, 'X'),
    (0x8D, 'X'),
    (0x8E, 'X'),
    (0x8F, 'X'),
    (0x90, 'X'),
    (0x91, 'X'),
    (0x92, 'X'),
    (0x93, 'X'),
    (0x94, 'X'),
    (0x95, 'X'),
    (0x96, 'X'),
    (0x97, 'X'),
    (0x98, 'X'),
    (0x99, 'X'),
    (0x9A, 'X'),
    (0x9B, 'X'),
    (0x9C, 'X'),
    (0x9D, 'X'),
    (0x9E, 'X'),
    (0x9F, 'X'),
    (0xA0, '3', u' '),
    (0xA1, 'V'),
    (0xA2, 'V'),
    (0xA3, 'V'),
    (0xA4, 'V'),
    (0xA5, 'V'),
    (0xA6, 'V'),
    (0xA7, 'V'),
    (0xA8, '3', u' ̈'),
    (0xA9, 'V'),
    (0xAA, 'M', u'a'),
    (0xAB, 'V'),
    (0xAC, 'V'),
    (0xAD, 'I'),
    (0xAE, 'V'),
    (0xAF, '3', u' ̄'),
    (0xB0, 'V'),
    (0xB1, 'V'),
    (0xB2, 'M', u'2'),
    (0xB3, 'M', u'3'),
    (0xB4, '3', u' ́'),
    (0xB5, 'M', u'μ'),
    (0xB6, 'V'),
    (0xB7, 'V'),
    (0xB8, '3', u' ̧'),
    (0xB9, 'M', u'1'),
    (0xBA, 'M', u'o'),
    (0xBB, 'V'),
    (0xBC, 'M', u'1⁄4'),
    (0xBD, 'M', u'1⁄2'),
    (0xBE, 'M', u'3⁄4'),
    (0xBF, 'V'),
    (0xC0, 'M', u'à'),
    (0xC1, 'M', u'á'),
    (0xC2, 'M', u'â'),
    (0xC3, 'M', u'ã'),
    (0xC4, 'M', u'ä'),
    (0xC5, 'M', u'å'),
    (0xC6, 'M', u'æ'),
    (0xC7, 'M', u'ç'),
    ]

def _seg_2():
    return [
    (0xC8, 'M', u'è'),
    (0xC9, 'M', u'é'),
    (0xCA, 'M', u'ê'),
    (0xCB, 'M', u'ë'),
    (0xCC, 'M', u'ì'),
    (0xCD, 'M', u'í'),
    (0xCE, 'M', u'î'),
    (0xCF, 'M', u'ï'),
    (0xD0, 'M', u'ð'),
    (0xD1, 'M', u'ñ'),
    (0xD2, 'M', u'ò'),
    (0xD3, 'M', u'ó'),
    (0xD4, 'M', u'ô'),
    (0xD5, 'M', u'õ'),
    (0xD6, 'M', u'ö'),
    (0xD7, 'V'),
    (0xD8, 'M', u'ø'),
    (0xD9, 'M', u'ù'),
    (0xDA, 'M', u'ú'),
    (0xDB, 'M', u'û'),
    (0xDC, 'M', u'ü'),
    (0xDD, 'M', u'ý'),
    (0xDE, 'M', u'þ'),
    (0xDF, 'D', u'ss'),
    (0xE0, 'V'),
    (0xE1, 'V'),
    (0xE2, 'V'),
    (0xE3, 'V'),
    (0xE4, 'V'),
    (0xE5, 'V'),
    (0xE6, 'V'),
    (0xE7, 'V'),
    (0xE8, 'V'),
    (0xE9, 'V'),
    (0xEA, 'V'),
    (0xEB, 'V'),
    (0xEC, 'V'),
    (0xED, 'V'),
    (0xEE, 'V'),
    (0xEF, 'V'),
    (0xF0, 'V'),
    (0xF1, 'V'),
    (0xF2, 'V'),
    (0xF3, 'V'),
    (0xF4, 'V'),
    (0xF5, 'V'),
    (0xF6, 'V'),
    (0xF7, 'V'),
    (0xF8, 'V'),
    (0xF9, 'V'),
    (0xFA, 'V'),
    (0xFB, 'V'),
    (0xFC, 'V'),
    (0xFD, 'V'),
    (0xFE, 'V'),
    (0xFF, 'V'),
    (0x100, 'M', u'ā'),
    (0x101, 'V'),
    (0x102, 'M', u'ă'),
    (0x103, 'V'),
    (0x104, 'M', u'ą'),
    (0x105, 'V'),
    (0x106, 'M', u'ć'),
    (0x107, 'V'),
    (0x108, 'M', u'ĉ'),
    (0x109, 'V'),
    (0x10A, 'M', u'ċ'),
    (0x10B, 'V'),
    (0x10C, 'M', u'č'),
    (0x10D, 'V'),
    (0x10E, 'M', u'ď'),
    (0x10F, 'V'),
    (0x110, 'M', u'đ'),
    (0x111, 'V'),
    (0x112, 'M', u'ē'),
    (0x113, 'V'),
    (0x114, 'M', u'ĕ'),
    (0x115, 'V'),
    (0x116, 'M', u'ė'),
    (0x117, 'V'),
    (0x118, 'M', u'ę'),
    (0x119, 'V'),
    (0x11A, 'M', u'ě'),
    (0x11B, 'V'),
    (0x11C, 'M', u'ĝ'),
    (0x11D, 'V'),
    (0x11E, 'M', u'ğ'),
    (0x11F, 'V'),
    (0x120, 'M', u'ġ'),
    (0x121, 'V'),
    (0x122, 'M', u'ģ'),
    (0x123, 'V'),
    (0x124, 'M', u'ĥ'),
    (0x125, 'V'),
    (0x126, 'M', u'ħ'),
    (0x127, 'V'),
    (0x128, 'M', u'ĩ'),
    (0x129, 'V'),
    (0x12A, 'M', u'ī'),
    (0x12B, 'V'),
    ]

def _seg_3():
    return [
    (0x12C, 'M', u'ĭ'),
    (0x12D, 'V'),
    (0x12E, 'M', u'į'),
    (0x12F, 'V'),
    (0x130, 'M', u'i̇'),
    (0x131, 'V'),
    (0x132, 'M', u'ij'),
    (0x134, 'M', u'ĵ'),
    (0x135, 'V'),
    (0x136, 'M', u'ķ'),
    (0x137, 'V'),
    (0x139, 'M', u'ĺ'),
    (0x13A, 'V'),
    (0x13B, 'M', u'ļ'),
    (0x13C, 'V'),
    (0x13D, 'M', u'ľ'),
    (0x13E, 'V'),
    (0x13F, 'M', u'l·'),
    (0x141, 'M', u'ł'),
    (0x142, 'V'),
    (0x143, 'M', u'ń'),
    (0x144, 'V'),
    (0x145, 'M', u'ņ'),
    (0x146, 'V'),
    (0x147, 'M', u'ň'),
    (0x148, 'V'),
    (0x149, 'M', u'ʼn'),
    (0x14A, 'M', u'ŋ'),
    (0x14B, 'V'),
    (0x14C, 'M', u'ō'),
    (0x14D, 'V'),
    (0x14E, 'M', u'ŏ'),
    (0x14F, 'V'),
    (0x150, 'M', u'ő'),
    (0x151, 'V'),
    (0x152, 'M', u'œ'),
    (0x153, 'V'),
    (0x154, 'M', u'ŕ'),
    (0x155, 'V'),
    (0x156, 'M', u'ŗ'),
    (0x157, 'V'),
    (0x158, 'M', u'ř'),
    (0x159, 'V'),
    (0x15A, 'M', u'ś'),
    (0x15B, 'V'),
    (0x15C, 'M', u'ŝ'),
    (0x15D, 'V'),
    (0x15E, 'M', u'ş'),
    (0x15F, 'V'),
    (0x160, 'M', u'š'),
    (0x161, 'V'),
    (0x162, 'M', u'ţ'),
    (0x163, 'V'),
    (0x164, 'M', u'ť'),
    (0x165, 'V'),
    (0x166, 'M', u'ŧ'),
    (0x167, 'V'),
    (0x168, 'M', u'ũ'),
    (0x169, 'V'),
    (0x16A, 'M', u'ū'),
    (0x16B, 'V'),
    (0x16C, 'M', u'ŭ'),
    (0x16D, 'V'),
    (0x16E, 'M', u'ů'),
    (0x16F, 'V'),
    (0x170, 'M', u'ű'),
    (0x171, 'V'),
    (0x172, 'M', u'ų'),
    (0x173, 'V'),
    (0x174, 'M', u'ŵ'),
    (0x175, 'V'),
    (0x176, 'M', u'ŷ'),
    (0x177, 'V'),
    (0x178, 'M', u'ÿ'),
    (0x179, 'M', u'ź'),
    (0x17A, 'V'),
    (0x17B, 'M', u'ż'),
    (0x17C, 'V'),
    (0x17D, 'M', u'ž'),
    (0x17E, 'V'),
    (0x17F, 'M', u's'),
    (0x180, 'V'),
    (0x181, 'M', u'ɓ'),
    (0x182, 'M', u'ƃ'),
    (0x183, 'V'),
    (0x184, 'M', u'ƅ'),
    (0x185, 'V'),
    (0x186, 'M', u'ɔ'),
    (0x187, 'M', u'ƈ'),
    (0x188, 'V'),
    (0x189, 'M', u'ɖ'),
    (0x18A, 'M', u'ɗ'),
    (0x18B, 'M', u'ƌ'),
    (0x18C, 'V'),
    (0x18E, 'M', u'ǝ'),
    (0x18F, 'M', u'ə'),
    (0x190, 'M', u'ɛ'),
    (0x191, 'M', u'ƒ'),
    (0x192, 'V'),
    (0x193, 'M', u'ɠ'),
    ]

def _seg_4():
    return [
    (0x194, 'M', u'ɣ'),
    (0x195, 'V'),
    (0x196, 'M', u'ɩ'),
    (0x197, 'M', u'ɨ'),
    (0x198, 'M', u'ƙ'),
    (0x199, 'V'),
    (0x19C, 'M', u'ɯ'),
    (0x19D, 'M', u'ɲ'),
    (0x19E, 'V'),
    (0x19F, 'M', u'ɵ'),
    (0x1A0, 'M', u'ơ'),
    (0x1A1, 'V'),
    (0x1A2, 'M', u'ƣ'),
    (0x1A3, 'V'),
    (0x1A4, 'M', u'ƥ'),
    (0x1A5, 'V'),
    (0x1A6, 'M', u'ʀ'),
    (0x1A7, 'M', u'ƨ'),
    (0x1A8, 'V'),
    (0x1A9, 'M', u'ʃ'),
    (0x1AA, 'V'),
    (0x1AC, 'M', u'ƭ'),
    (0x1AD, 'V'),
    (0x1AE, 'M', u'ʈ'),
    (0x1AF, 'M', u'ư'),
    (0x1B0, 'V'),
    (0x1B1, 'M', u'ʊ'),
    (0x1B2, 'M', u'ʋ'),
    (0x1B3, 'M', u'ƴ'),
    (0x1B4, 'V'),
    (0x1B5, 'M', u'ƶ'),
    (0x1B6, 'V'),
    (0x1B7, 'M', u'ʒ'),
    (0x1B8, 'M', u'ƹ'),
    (0x1B9, 'V'),
    (0x1BC, 'M', u'ƽ'),
    (0x1BD, 'V'),
    (0x1C4, 'M', u'dž'),
    (0x1C7, 'M', u'lj'),
    (0x1CA, 'M', u'nj'),
    (0x1CD, 'M', u'ǎ'),
    (0x1CE, 'V'),
    (0x1CF, 'M', u'ǐ'),
    (0x1D0, 'V'),
    (0x1D1, 'M', u'ǒ'),
    (0x1D2, 'V'),
    (0x1D3, 'M', u'ǔ'),
    (0x1D4, 'V'),
    (0x1D5, 'M', u'ǖ'),
    (0x1D6, 'V'),
    (0x1D7, 'M', u'ǘ'),
    (0x1D8, 'V'),
    (0x1D9, 'M', u'ǚ'),
    (0x1DA, 'V'),
    (0x1DB, 'M', u'ǜ'),
    (0x1DC, 'V'),
    (0x1DE, 'M', u'ǟ'),
    (0x1DF, 'V'),
    (0x1E0, 'M', u'ǡ'),
    (0x1E1, 'V'),
    (0x1E2, 'M', u'ǣ'),
    (0x1E3, 'V'),
    (0x1E4, 'M', u'ǥ'),
    (0x1E5, 'V'),
    (0x1E6, 'M', u'ǧ'),
    (0x1E7, 'V'),
    (0x1E8, 'M', u'ǩ'),
    (0x1E9, 'V'),
    (0x1EA, 'M', u'ǫ'),
    (0x1EB, 'V'),
    (0x1EC, 'M', u'ǭ'),
    (0x1ED, 'V'),
    (0x1EE, 'M', u'ǯ'),
    (0x1EF, 'V'),
    (0x1F1, 'M', u'dz'),
    (0x1F4, 'M', u'ǵ'),
    (0x1F5, 'V'),
    (0x1F6, 'M', u'ƕ'),
    (0x1F7, 'M', u'ƿ'),
    (0x1F8, 'M', u'ǹ'),
    (0x1F9, 'V'),
    (0x1FA, 'M', u'ǻ'),
    (0x1FB, 'V'),
    (0x1FC, 'M', u'ǽ'),
    (0x1FD, 'V'),
    (0x1FE, 'M', u'ǿ'),
    (0x1FF, 'V'),
    (0x200, 'M', u'ȁ'),
    (0x201, 'V'),
    (0x202, 'M', u'ȃ'),
    (0x203, 'V'),
    (0x204, 'M', u'ȅ'),
    (0x205, 'V'),
    (0x206, 'M', u'ȇ'),
    (0x207, 'V'),
    (0x208, 'M', u'ȉ'),
    (0x209, 'V'),
    (0x20A, 'M', u'ȋ'),
    (0x20B, 'V'),
    (0x20C, 'M', u'ȍ'),
    ]

def _seg_5():
    return [
    (0x20D, 'V'),
    (0x20E, 'M', u'ȏ'),
    (0x20F, 'V'),
    (0x210, 'M', u'ȑ'),
    (0x211, 'V'),
    (0x212, 'M', u'ȓ'),
    (0x213, 'V'),
    (0x214, 'M', u'ȕ'),
    (0x215, 'V'),
    (0x216, 'M', u'ȗ'),
    (0x217, 'V'),
    (0x218, 'M', u'ș'),
    (0x219, 'V'),
    (0x21A, 'M', u'ț'),
    (0x21B, 'V'),
    (0x21C, 'M', u'ȝ'),
    (0x21D, 'V'),
    (0x21E, 'M', u'ȟ'),
    (0x21F, 'V'),
    (0x220, 'M', u'ƞ'),
    (0x221, 'V'),
    (0x222, 'M', u'ȣ'),
    (0x223, 'V'),
    (0x224, 'M', u'ȥ'),
    (0x225, 'V'),
    (0x226, 'M', u'ȧ'),
    (0x227, 'V'),
    (0x228, 'M', u'ȩ'),
    (0x229, 'V'),
    (0x22A, 'M', u'ȫ'),
    (0x22B, 'V'),
    (0x22C, 'M', u'ȭ'),
    (0x22D, 'V'),
    (0x22E, 'M', u'ȯ'),
    (0x22F, 'V'),
    (0x230, 'M', u'ȱ'),
    (0x231, 'V'),
    (0x232, 'M', u'ȳ'),
    (0x233, 'V'),
    (0x23A, 'M', u'ⱥ'),
    (0x23B, 'M', u'ȼ'),
    (0x23C, 'V'),
    (0x23D, 'M', u'ƚ'),
    (0x23E, 'M', u'ⱦ'),
    (0x23F, 'V'),
    (0x241, 'M', u'ɂ'),
    (0x242, 'V'),
    (0x243, 'M', u'ƀ'),
    (0x244, 'M', u'ʉ'),
    (0x245, 'M', u'ʌ'),
    (0x246, 'M', u'ɇ'),
    (0x247, 'V'),
    (0x248, 'M', u'ɉ'),
    (0x249, 'V'),
    (0x24A, 'M', u'ɋ'),
    (0x24B, 'V'),
    (0x24C, 'M', u'ɍ'),
    (0x24D, 'V'),
    (0x24E, 'M', u'ɏ'),
    (0x24F, 'V'),
    (0x2B0, 'M', u'h'),
    (0x2B1, 'M', u'ɦ'),
    (0x2B2, 'M', u'j'),
    (0x2B3, 'M', u'r'),
    (0x2B4, 'M', u'ɹ'),
    (0x2B5, 'M', u'ɻ'),
    (0x2B6, 'M', u'ʁ'),
    (0x2B7, 'M', u'w'),
    (0x2B8, 'M', u'y'),
    (0x2B9, 'V'),
    (0x2D8, '3', u' ̆'),
    (0x2D9, '3', u' ̇'),
    (0x2DA, '3', u' ̊'),
    (0x2DB, '3', u' ̨'),
    (0x2DC, '3', u' ̃'),
    (0x2DD, '3', u' ̋'),
    (0x2DE, 'V'),
    (0x2E0, 'M', u'ɣ'),
    (0x2E1, 'M', u'l'),
    (0x2E2, 'M', u's'),
    (0x2E3, 'M', u'x'),
    (0x2E4, 'M', u'ʕ'),
    (0x2E5, 'V'),
    (0x340, 'M', u'̀'),
    (0x341, 'M', u'́'),
    (0x342, 'V'),
    (0x343, 'M', u'̓'),
    (0x344, 'M', u'̈́'),
    (0x345, 'M', u'ι'),
    (0x346, 'V'),
    (0x34F, 'I'),
    (0x350, 'V'),
    (0x370, 'M', u'ͱ'),
    (0x371, 'V'),
    (0x372, 'M', u'ͳ'),
    (0x373, 'V'),
    (0x374, 'M', u'ʹ'),
    (0x375, 'V'),
    (0x376, 'M', u'ͷ'),
    (0x377, 'V'),
    ]

def _seg_6():
    return [
    (0x378, 'X'),
    (0x37A, '3', u' ι'),
    (0x37B, 'V'),
    (0x37E, '3', u';'),
    (0x37F, 'X'),
    (0x384, '3', u' ́'),
    (0x385, '3', u' ̈́'),
    (0x386, 'M', u'ά'),
    (0x387, 'M', u'·'),
    (0x388, 'M', u'έ'),
    (0x389, 'M', u'ή'),
    (0x38A, 'M', u'ί'),
    (0x38B, 'X'),
    (0x38C, 'M', u'ό'),
    (0x38D, 'X'),
    (0x38E, 'M', u'ύ'),
    (0x38F, 'M', u'ώ'),
    (0x390, 'V'),
    (0x391, 'M', u'α'),
    (0x392, 'M', u'β'),
    (0x393, 'M', u'γ'),
    (0x394, 'M', u'δ'),
    (0x395, 'M', u'ε'),
    (0x396, 'M', u'ζ'),
    (0x397, 'M', u'η'),
    (0x398, 'M', u'θ'),
    (0x399, 'M', u'ι'),
    (0x39A, 'M', u'κ'),
    (0x39B, 'M', u'λ'),
    (0x39C, 'M', u'μ'),
    (0x39D, 'M', u'ν'),
    (0x39E, 'M', u'ξ'),
    (0x39F, 'M', u'ο'),
    (0x3A0, 'M', u'π'),
    (0x3A1, 'M', u'ρ'),
    (0x3A2, 'X'),
    (0x3A3, 'M', u'σ'),
    (0x3A4, 'M', u'τ'),
    (0x3A5, 'M', u'υ'),
    (0x3A6, 'M', u'φ'),
    (0x3A7, 'M', u'χ'),
    (0x3A8, 'M', u'ψ'),
    (0x3A9, 'M', u'ω'),
    (0x3AA, 'M', u'ϊ'),
    (0x3AB, 'M', u'ϋ'),
    (0x3AC, 'V'),
    (0x3C2, 'D', u'σ'),
    (0x3C3, 'V'),
    (0x3CF, 'M', u'ϗ'),
    (0x3D0, 'M', u'β'),
    (0x3D1, 'M', u'θ'),
    (0x3D2, 'M', u'υ'),
    (0x3D3, 'M', u'ύ'),
    (0x3D4, 'M', u'ϋ'),
    (0x3D5, 'M', u'φ'),
    (0x3D6, 'M', u'π'),
    (0x3D7, 'V'),
    (0x3D8, 'M', u'ϙ'),
    (0x3D9, 'V'),
    (0x3DA, 'M', u'ϛ'),
    (0x3DB, 'V'),
    (0x3DC, 'M', u'ϝ'),
    (0x3DD, 'V'),
    (0x3DE, 'M', u'ϟ'),
    (0x3DF, 'V'),
    (0x3E0, 'M', u'ϡ'),
    (0x3E1, 'V'),
    (0x3E2, 'M', u'ϣ'),
    (0x3E3, 'V'),
    (0x3E4, 'M', u'ϥ'),
    (0x3E5, 'V'),
    (0x3E6, 'M', u'ϧ'),
    (0x3E7, 'V'),
    (0x3E8, 'M', u'ϩ'),
    (0x3E9, 'V'),
    (0x3EA, 'M', u'ϫ'),
    (0x3EB, 'V'),
    (0x3EC, 'M', u'ϭ'),
    (0x3ED, 'V'),
    (0x3EE, 'M', u'ϯ'),
    (0x3EF, 'V'),
    (0x3F0, 'M', u'κ'),
    (0x3F1, 'M', u'ρ'),
    (0x3F2, 'M', u'σ'),
    (0x3F3, 'V'),
    (0x3F4, 'M', u'θ'),
    (0x3F5, 'M', u'ε'),
    (0x3F6, 'V'),
    (0x3F7, 'M', u'ϸ'),
    (0x3F8, 'V'),
    (0x3F9, 'M', u'σ'),
    (0x3FA, 'M', u'ϻ'),
    (0x3FB, 'V'),
    (0x3FD, 'M', u'ͻ'),
    (0x3FE, 'M', u'ͼ'),
    (0x3FF, 'M', u'ͽ'),
    (0x400, 'M', u'ѐ'),
    (0x401, 'M', u'ё'),
    (0x402, 'M', u'ђ'),
    (0x403, 'M', u'ѓ'),
    ]

def _seg_7():
    return [
    (0x404, 'M', u'є'),
    (0x405, 'M', u'ѕ'),
    (0x406, 'M', u'і'),
    (0x407, 'M', u'ї'),
    (0x408, 'M', u'ј'),
    (0x409, 'M', u'љ'),
    (0x40A, 'M', u'њ'),
    (0x40B, 'M', u'ћ'),
    (0x40C, 'M', u'ќ'),
    (0x40D, 'M', u'ѝ'),
    (0x40E, 'M', u'ў'),
    (0x40F, 'M', u'џ'),
    (0x410, 'M', u'а'),
    (0x411, 'M', u'б'),
    (0x412, 'M', u'в'),
    (0x413, 'M', u'г'),
    (0x414, 'M', u'д'),
    (0x415, 'M', u'е'),
    (0x416, 'M', u'ж'),
    (0x417, 'M', u'з'),
    (0x418, 'M', u'и'),
    (0x419, 'M', u'й'),
    (0x41A, 'M', u'к'),
    (0x41B, 'M', u'л'),
    (0x41C, 'M', u'м'),
    (0x41D, 'M', u'н'),
    (0x41E, 'M', u'о'),
    (0x41F, 'M', u'п'),
    (0x420, 'M', u'р'),
    (0x421, 'M', u'с'),
    (0x422, 'M', u'т'),
    (0x423, 'M', u'у'),
    (0x424, 'M', u'ф'),
    (0x425, 'M', u'х'),
    (0x426, 'M', u'ц'),
    (0x427, 'M', u'ч'),
    (0x428, 'M', u'ш'),
    (0x429, 'M', u'щ'),
    (0x42A, 'M', u'ъ'),
    (0x42B, 'M', u'ы'),
    (0x42C, 'M', u'ь'),
    (0x42D, 'M', u'э'),
    (0x42E, 'M', u'ю'),
    (0x42F, 'M', u'я'),
    (0x430, 'V'),
    (0x460, 'M', u'ѡ'),
    (0x461, 'V'),
    (0x462, 'M', u'ѣ'),
    (0x463, 'V'),
    (0x464, 'M', u'ѥ'),
    (0x465, 'V'),
    (0x466, 'M', u'ѧ'),
    (0x467, 'V'),
    (0x468, 'M', u'ѩ'),
    (0x469, 'V'),
    (0x46A, 'M', u'ѫ'),
    (0x46B, 'V'),
    (0x46C, 'M', u'ѭ'),
    (0x46D, 'V'),
    (0x46E, 'M', u'ѯ'),
    (0x46F, 'V'),
    (0x470, 'M', u'ѱ'),
    (0x471, 'V'),
    (0x472, 'M', u'ѳ'),
    (0x473, 'V'),
    (0x474, 'M', u'ѵ'),
    (0x475, 'V'),
    (0x476, 'M', u'ѷ'),
    (0x477, 'V'),
    (0x478, 'M', u'ѹ'),
    (0x479, 'V'),
    (0x47A, 'M', u'ѻ'),
    (0x47B, 'V'),
    (0x47C, 'M', u'ѽ'),
    (0x47D, 'V'),
    (0x47E, 'M', u'ѿ'),
    (0x47F, 'V'),
    (0x480, 'M', u'ҁ'),
    (0x481, 'V'),
    (0x48A, 'M', u'ҋ'),
    (0x48B, 'V'),
    (0x48C, 'M', u'ҍ'),
    (0x48D, 'V'),
    (0x48E, 'M', u'ҏ'),
    (0x48F, 'V'),
    (0x490, 'M', u'ґ'),
    (0x491, 'V'),
    (0x492, 'M', u'ғ'),
    (0x493, 'V'),
    (0x494, 'M', u'ҕ'),
    (0x495, 'V'),
    (0x496, 'M', u'җ'),
    (0x497, 'V'),
    (0x498, 'M', u'ҙ'),
    (0x499, 'V'),
    (0x49A, 'M', u'қ'),
    (0x49B, 'V'),
    (0x49C, 'M', u'ҝ'),
    (0x49D, 'V'),
    (0x49E, 'M', u'ҟ'),
    ]

def _seg_8():
    return [
    (0x49F, 'V'),
    (0x4A0, 'M', u'ҡ'),
    (0x4A1, 'V'),
    (0x4A2, 'M', u'ң'),
    (0x4A3, 'V'),
    (0x4A4, 'M', u'ҥ'),
    (0x4A5, 'V'),
    (0x4A6, 'M', u'ҧ'),
    (0x4A7, 'V'),
    (0x4A8, 'M', u'ҩ'),
    (0x4A9, 'V'),
    (0x4AA, 'M', u'ҫ'),
    (0x4AB, 'V'),
    (0x4AC, 'M', u'ҭ'),
    (0x4AD, 'V'),
    (0x4AE, 'M', u'ү'),
    (0x4AF, 'V'),
    (0x4B0, 'M', u'ұ'),
    (0x4B1, 'V'),
    (0x4B2, 'M', u'ҳ'),
    (0x4B3, 'V'),
    (0x4B4, 'M', u'ҵ'),
    (0x4B5, 'V'),
    (0x4B6, 'M', u'ҷ'),
    (0x4B7, 'V'),
    (0x4B8, 'M', u'ҹ'),
    (0x4B9, 'V'),
    (0x4BA, 'M', u'һ'),
    (0x4BB, 'V'),
    (0x4BC, 'M', u'ҽ'),
    (0x4BD, 'V'),
    (0x4BE, 'M', u'ҿ'),
    (0x4BF, 'V'),
    (0x4C0, 'X'),
    (0x4C1, 'M', u'ӂ'),
    (0x4C2, 'V'),
    (0x4C3, 'M', u'ӄ'),
    (0x4C4, 'V'),
    (0x4C5, 'M', u'ӆ'),
    (0x4C6, 'V'),
    (0x4C7, 'M', u'ӈ'),
    (0x4C8, 'V'),
    (0x4C9, 'M', u'ӊ'),
    (0x4CA, 'V'),
    (0x4CB, 'M', u'ӌ'),
    (0x4CC, 'V'),
    (0x4CD, 'M', u'ӎ'),
    (0x4CE, 'V'),
    (0x4D0, 'M', u'ӑ'),
    (0x4D1, 'V'),
    (0x4D2, 'M', u'ӓ'),
    (0x4D3, 'V'),
    (0x4D4, 'M', u'ӕ'),
    (0x4D5, 'V'),
    (0x4D6, 'M', u'ӗ'),
    (0x4D7, 'V'),
    (0x4D8, 'M', u'ә'),
    (0x4D9, 'V'),
    (0x4DA, 'M', u'ӛ'),
    (0x4DB, 'V'),
    (0x4DC, 'M', u'ӝ'),
    (0x4DD, 'V'),
    (0x4DE, 'M', u'ӟ'),
    (0x4DF, 'V'),
    (0x4E0, 'M', u'ӡ'),
    (0x4E1, 'V'),
    (0x4E2, 'M', u'ӣ'),
    (0x4E3, 'V'),
    (0x4E4, 'M', u'ӥ'),
    (0x4E5, 'V'),
    (0x4E6, 'M', u'ӧ'),
    (0x4E7, 'V'),
    (0x4E8, 'M', u'ө'),
    (0x4E9, 'V'),
    (0x4EA, 'M', u'ӫ'),
    (0x4EB, 'V'),
    (0x4EC, 'M', u'ӭ'),
    (0x4ED, 'V'),
    (0x4EE, 'M', u'ӯ'),
    (0x4EF, 'V'),
    (0x4F0, 'M', u'ӱ'),
    (0x4F1, 'V'),
    (0x4F2, 'M', u'ӳ'),
    (0x4F3, 'V'),
    (0x4F4, 'M', u'ӵ'),
    (0x4F5, 'V'),
    (0x4F6, 'M', u'ӷ'),
    (0x4F7, 'V'),
    (0x4F8, 'M', u'ӹ'),
    (0x4F9, 'V'),
    (0x4FA, 'M', u'ӻ'),
    (0x4FB, 'V'),
    (0x4FC, 'M', u'ӽ'),
    (0x4FD, 'V'),
    (0x4FE, 'M', u'ӿ'),
    (0x4FF, 'V'),
    (0x500, 'M', u'ԁ'),
    (0x501, 'V'),
    (0x502, 'M', u'ԃ'),
    (0x503, 'V'),
    ]

def _seg_9():
    return [
    (0x504, 'M', u'ԅ'),
    (0x505, 'V'),
    (0x506, 'M', u'ԇ'),
    (0x507, 'V'),
    (0x508, 'M', u'ԉ'),
    (0x509, 'V'),
    (0x50A, 'M', u'ԋ'),
    (0x50B, 'V'),
    (0x50C, 'M', u'ԍ'),
    (0x50D, 'V'),
    (0x50E, 'M', u'ԏ'),
    (0x50F, 'V'),
    (0x510, 'M', u'ԑ'),
    (0x511, 'V'),
    (0x512, 'M', u'ԓ'),
    (0x513, 'V'),
    (0x514, 'M', u'ԕ'),
    (0x515, 'V'),
    (0x516, 'M', u'ԗ'),
    (0x517, 'V'),
    (0x518, 'M', u'ԙ'),
    (0x519, 'V'),
    (0x51A, 'M', u'ԛ'),
    (0x51B, 'V'),
    (0x51C, 'M', u'ԝ'),
    (0x51D, 'V'),
    (0x51E, 'M', u'ԟ'),
    (0x51F, 'V'),
    (0x520, 'M', u'ԡ'),
    (0x521, 'V'),
    (0x522, 'M', u'ԣ'),
    (0x523, 'V'),
    (0x524, 'M', u'ԥ'),
    (0x525, 'V'),
    (0x526, 'M', u'ԧ'),
    (0x527, 'V'),
    (0x528, 'X'),
    (0x531, 'M', u'ա'),
    (0x532, 'M', u'բ'),
    (0x533, 'M', u'գ'),
    (0x534, 'M', u'դ'),
    (0x535, 'M', u'ե'),
    (0x536, 'M', u'զ'),
    (0x537, 'M', u'է'),
    (0x538, 'M', u'ը'),
    (0x539, 'M', u'թ'),
    (0x53A, 'M', u'ժ'),
    (0x53B, 'M', u'ի'),
    (0x53C, 'M', u'լ'),
    (0x53D, 'M', u'խ'),
    (0x53E, 'M', u'ծ'),
    (0x53F, 'M', u'կ'),
    (0x540, 'M', u'հ'),
    (0x541, 'M', u'ձ'),
    (0x542, 'M', u'ղ'),
    (0x543, 'M', u'ճ'),
    (0x544, 'M', u'մ'),
    (0x545, 'M', u'յ'),
    (0x546, 'M', u'ն'),
    (0x547, 'M', u'շ'),
    (0x548, 'M', u'ո'),
    (0x549, 'M', u'չ'),
    (0x54A, 'M', u'պ'),
    (0x54B, 'M', u'ջ'),
    (0x54C, 'M', u'ռ'),
    (0x54D, 'M', u'ս'),
    (0x54E, 'M', u'վ'),
    (0x54F, 'M', u'տ'),
    (0x550, 'M', u'ր'),
    (0x551, 'M', u'ց'),
    (0x552, 'M', u'ւ'),
    (0x553, 'M', u'փ'),
    (0x554, 'M', u'ք'),
    (0x555, 'M', u'օ'),
    (0x556, 'M', u'ֆ'),
    (0x557, 'X'),
    (0x559, 'V'),
    (0x560, 'X'),
    (0x561, 'V'),
    (0x587, 'M', u'եւ'),
    (0x588, 'X'),
    (0x589, 'V'),
    (0x58B, 'X'),
    (0x58F, 'V'),
    (0x590, 'X'),
    (0x591, 'V'),
    (0x5C8, 'X'),
    (0x5D0, 'V'),
    (0x5EB, 'X'),
    (0x5F0, 'V'),
    (0x5F5, 'X'),
    (0x606, 'V'),
    (0x61C, 'X'),
    (0x61E, 'V'),
    (0x675, 'M', u'اٴ'),
    (0x676, 'M', u'وٴ'),
    (0x677, 'M', u'ۇٴ'),
    (0x678, 'M', u'يٴ'),
    (0x679, 'V'),
    (0x6DD, 'X'),
    ]

def _seg_10():
    return [
    (0x6DE, 'V'),
    (0x70E, 'X'),
    (0x710, 'V'),
    (0x74B, 'X'),
    (0x74D, 'V'),
    (0x7B2, 'X'),
    (0x7C0, 'V'),
    (0x7FB, 'X'),
    (0x800, 'V'),
    (0x82E, 'X'),
    (0x830, 'V'),
    (0x83F, 'X'),
    (0x840, 'V'),
    (0x85C, 'X'),
    (0x85E, 'V'),
    (0x85F, 'X'),
    (0x8A0, 'V'),
    (0x8A1, 'X'),
    (0x8A2, 'V'),
    (0x8AD, 'X'),
    (0x8E4, 'V'),
    (0x8FF, 'X'),
    (0x900, 'V'),
    (0x958, 'M', u'क़'),
    (0x959, 'M', u'ख़'),
    (0x95A, 'M', u'ग़'),
    (0x95B, 'M', u'ज़'),
    (0x95C, 'M', u'ड़'),
    (0x95D, 'M', u'ढ़'),
    (0x95E, 'M', u'फ़'),
    (0x95F, 'M', u'य़'),
    (0x960, 'V'),
    (0x978, 'X'),
    (0x979, 'V'),
    (0x980, 'X'),
    (0x981, 'V'),
    (0x984, 'X'),
    (0x985, 'V'),
    (0x98D, 'X'),
    (0x98F, 'V'),
    (0x991, 'X'),
    (0x993, 'V'),
    (0x9A9, 'X'),
    (0x9AA, 'V'),
    (0x9B1, 'X'),
    (0x9B2, 'V'),
    (0x9B3, 'X'),
    (0x9B6, 'V'),
    (0x9BA, 'X'),
    (0x9BC, 'V'),
    (0x9C5, 'X'),
    (0x9C7, 'V'),
    (0x9C9, 'X'),
    (0x9CB, 'V'),
    (0x9CF, 'X'),
    (0x9D7, 'V'),
    (0x9D8, 'X'),
    (0x9DC, 'M', u'ড়'),
    (0x9DD, 'M', u'ঢ়'),
    (0x9DE, 'X'),
    (0x9DF, 'M', u'য়'),
    (0x9E0, 'V'),
    (0x9E4, 'X'),
    (0x9E6, 'V'),
    (0x9FC, 'X'),
    (0xA01, 'V'),
    (0xA04, 'X'),
    (0xA05, 'V'),
    (0xA0B, 'X'),
    (0xA0F, 'V'),
    (0xA11, 'X'),
    (0xA13, 'V'),
    (0xA29, 'X'),
    (0xA2A, 'V'),
    (0xA31, 'X'),
    (0xA32, 'V'),
    (0xA33, 'M', u'ਲ਼'),
    (0xA34, 'X'),
    (0xA35, 'V'),
    (0xA36, 'M', u'ਸ਼'),
    (0xA37, 'X'),
    (0xA38, 'V'),
    (0xA3A, 'X'),
    (0xA3C, 'V'),
    (0xA3D, 'X'),
    (0xA3E, 'V'),
    (0xA43, 'X'),
    (0xA47, 'V'),
    (0xA49, 'X'),
    (0xA4B, 'V'),
    (0xA4E, 'X'),
    (0xA51, 'V'),
    (0xA52, 'X'),
    (0xA59, 'M', u'ਖ਼'),
    (0xA5A, 'M', u'ਗ਼'),
    (0xA5B, 'M', u'ਜ਼'),
    (0xA5C, 'V'),
    (0xA5D, 'X'),
    (0xA5E, 'M', u'ਫ਼'),
    (0xA5F, 'X'),
    ]

def _seg_11():
    return [
    (0xA66, 'V'),
    (0xA76, 'X'),
    (0xA81, 'V'),
    (0xA84, 'X'),
    (0xA85, 'V'),
    (0xA8E, 'X'),
    (0xA8F, 'V'),
    (0xA92, 'X'),
    (0xA93, 'V'),
    (0xAA9, 'X'),
    (0xAAA, 'V'),
    (0xAB1, 'X'),
    (0xAB2, 'V'),
    (0xAB4, 'X'),
    (0xAB5, 'V'),
    (0xABA, 'X'),
    (0xABC, 'V'),
    (0xAC6, 'X'),
    (0xAC7, 'V'),
    (0xACA, 'X'),
    (0xACB, 'V'),
    (0xACE, 'X'),
    (0xAD0, 'V'),
    (0xAD1, 'X'),
    (0xAE0, 'V'),
    (0xAE4, 'X'),
    (0xAE6, 'V'),
    (0xAF2, 'X'),
    (0xB01, 'V'),
    (0xB04, 'X'),
    (0xB05, 'V'),
    (0xB0D, 'X'),
    (0xB0F, 'V'),
    (0xB11, 'X'),
    (0xB13, 'V'),
    (0xB29, 'X'),
    (0xB2A, 'V'),
    (0xB31, 'X'),
    (0xB32, 'V'),
    (0xB34, 'X'),
    (0xB35, 'V'),
    (0xB3A, 'X'),
    (0xB3C, 'V'),
    (0xB45, 'X'),
    (0xB47, 'V'),
    (0xB49, 'X'),
    (0xB4B, 'V'),
    (0xB4E, 'X'),
    (0xB56, 'V'),
    (0xB58, 'X'),
    (0xB5C, 'M', u'ଡ଼'),
    (0xB5D, 'M', u'ଢ଼'),
    (0xB5E, 'X'),
    (0xB5F, 'V'),
    (0xB64, 'X'),
    (0xB66, 'V'),
    (0xB78, 'X'),
    (0xB82, 'V'),
    (0xB84, 'X'),
    (0xB85, 'V'),
    (0xB8B, 'X'),
    (0xB8E, 'V'),
    (0xB91, 'X'),
    (0xB92, 'V'),
    (0xB96, 'X'),
    (0xB99, 'V'),
    (0xB9B, 'X'),
    (0xB9C, 'V'),
    (0xB9D, 'X'),
    (0xB9E, 'V'),
    (0xBA0, 'X'),
    (0xBA3, 'V'),
    (0xBA5, 'X'),
    (0xBA8, 'V'),
    (0xBAB, 'X'),
    (0xBAE, 'V'),
    (0xBBA, 'X'),
    (0xBBE, 'V'),
    (0xBC3, 'X'),
    (0xBC6, 'V'),
    (0xBC9, 'X'),
    (0xBCA, 'V'),
    (0xBCE, 'X'),
    (0xBD0, 'V'),
    (0xBD1, 'X'),
    (0xBD7, 'V'),
    (0xBD8, 'X'),
    (0xBE6, 'V'),
    (0xBFB, 'X'),
    (0xC01, 'V'),
    (0xC04, 'X'),
    (0xC05, 'V'),
    (0xC0D, 'X'),
    (0xC0E, 'V'),
    (0xC11, 'X'),
    (0xC12, 'V'),
    (0xC29, 'X'),
    (0xC2A, 'V'),
    (0xC34, 'X'),
    (0xC35, 'V'),
    ]

def _seg_12():
    return [
    (0xC3A, 'X'),
    (0xC3D, 'V'),
    (0xC45, 'X'),
    (0xC46, 'V'),
    (0xC49, 'X'),
    (0xC4A, 'V'),
    (0xC4E, 'X'),
    (0xC55, 'V'),
    (0xC57, 'X'),
    (0xC58, 'V'),
    (0xC5A, 'X'),
    (0xC60, 'V'),
    (0xC64, 'X'),
    (0xC66, 'V'),
    (0xC70, 'X'),
    (0xC78, 'V'),
    (0xC80, 'X'),
    (0xC82, 'V'),
    (0xC84, 'X'),
    (0xC85, 'V'),
    (0xC8D, 'X'),
    (0xC8E, 'V'),
    (0xC91, 'X'),
    (0xC92, 'V'),
    (0xCA9, 'X'),
    (0xCAA, 'V'),
    (0xCB4, 'X'),
    (0xCB5, 'V'),
    (0xCBA, 'X'),
    (0xCBC, 'V'),
    (0xCC5, 'X'),
    (0xCC6, 'V'),
    (0xCC9, 'X'),
    (0xCCA, 'V'),
    (0xCCE, 'X'),
    (0xCD5, 'V'),
    (0xCD7, 'X'),
    (0xCDE, 'V'),
    (0xCDF, 'X'),
    (0xCE0, 'V'),
    (0xCE4, 'X'),
    (0xCE6, 'V'),
    (0xCF0, 'X'),
    (0xCF1, 'V'),
    (0xCF3, 'X'),
    (0xD02, 'V'),
    (0xD04, 'X'),
    (0xD05, 'V'),
    (0xD0D, 'X'),
    (0xD0E, 'V'),
    (0xD11, 'X'),
    (0xD12, 'V'),
    (0xD3B, 'X'),
    (0xD3D, 'V'),
    (0xD45, 'X'),
    (0xD46, 'V'),
    (0xD49, 'X'),
    (0xD4A, 'V'),
    (0xD4F, 'X'),
    (0xD57, 'V'),
    (0xD58, 'X'),
    (0xD60, 'V'),
    (0xD64, 'X'),
    (0xD66, 'V'),
    (0xD76, 'X'),
    (0xD79, 'V'),
    (0xD80, 'X'),
    (0xD82, 'V'),
    (0xD84, 'X'),
    (0xD85, 'V'),
    (0xD97, 'X'),
    (0xD9A, 'V'),
    (0xDB2, 'X'),
    (0xDB3, 'V'),
    (0xDBC, 'X'),
    (0xDBD, 'V'),
    (0xDBE, 'X'),
    (0xDC0, 'V'),
    (0xDC7, 'X'),
    (0xDCA, 'V'),
    (0xDCB, 'X'),
    (0xDCF, 'V'),
    (0xDD5, 'X'),
    (0xDD6, 'V'),
    (0xDD7, 'X'),
    (0xDD8, 'V'),
    (0xDE0, 'X'),
    (0xDF2, 'V'),
    (0xDF5, 'X'),
    (0xE01, 'V'),
    (0xE33, 'M', u'ํา'),
    (0xE34, 'V'),
    (0xE3B, 'X'),
    (0xE3F, 'V'),
    (0xE5C, 'X'),
    (0xE81, 'V'),
    (0xE83, 'X'),
    (0xE84, 'V'),
    (0xE85, 'X'),
    (0xE87, 'V'),
    ]

def _seg_13():
    return [
    (0xE89, 'X'),
    (0xE8A, 'V'),
    (0xE8B, 'X'),
    (0xE8D, 'V'),
    (0xE8E, 'X'),
    (0xE94, 'V'),
    (0xE98, 'X'),
    (0xE99, 'V'),
    (0xEA0, 'X'),
    (0xEA1, 'V'),
    (0xEA4, 'X'),
    (0xEA5, 'V'),
    (0xEA6, 'X'),
    (0xEA7, 'V'),
    (0xEA8, 'X'),
    (0xEAA, 'V'),
    (0xEAC, 'X'),
    (0xEAD, 'V'),
    (0xEB3, 'M', u'ໍາ'),
    (0xEB4, 'V'),
    (0xEBA, 'X'),
    (0xEBB, 'V'),
    (0xEBE, 'X'),
    (0xEC0, 'V'),
    (0xEC5, 'X'),
    (0xEC6, 'V'),
    (0xEC7, 'X'),
    (0xEC8, 'V'),
    (0xECE, 'X'),
    (0xED0, 'V'),
    (0xEDA, 'X'),
    (0xEDC, 'M', u'ຫນ'),
    (0xEDD, 'M', u'ຫມ'),
    (0xEDE, 'V'),
    (0xEE0, 'X'),
    (0xF00, 'V'),
    (0xF0C, 'M', u'་'),
    (0xF0D, 'V'),
    (0xF43, 'M', u'གྷ'),
    (0xF44, 'V'),
    (0xF48, 'X'),
    (0xF49, 'V'),
    (0xF4D, 'M', u'ཌྷ'),
    (0xF4E, 'V'),
    (0xF52, 'M', u'དྷ'),
    (0xF53, 'V'),
    (0xF57, 'M', u'བྷ'),
    (0xF58, 'V'),
    (0xF5C, 'M', u'ཛྷ'),
    (0xF5D, 'V'),
    (0xF69, 'M', u'ཀྵ'),
    (0xF6A, 'V'),
    (0xF6D, 'X'),
    (0xF71, 'V'),
    (0xF73, 'M', u'ཱི'),
    (0xF74, 'V'),
    (0xF75, 'M', u'ཱུ'),
    (0xF76, 'M', u'ྲྀ'),
    (0xF77, 'M', u'ྲཱྀ'),
    (0xF78, 'M', u'ླྀ'),
    (0xF79, 'M', u'ླཱྀ'),
    (0xF7A, 'V'),
    (0xF81, 'M', u'ཱྀ'),
    (0xF82, 'V'),
    (0xF93, 'M', u'ྒྷ'),
    (0xF94, 'V'),
    (0xF98, 'X'),
    (0xF99, 'V'),
    (0xF9D, 'M', u'ྜྷ'),
    (0xF9E, 'V'),
    (0xFA2, 'M', u'ྡྷ'),
    (0xFA3, 'V'),
    (0xFA7, 'M', u'ྦྷ'),
    (0xFA8, 'V'),
    (0xFAC, 'M', u'ྫྷ'),
    (0xFAD, 'V'),
    (0xFB9, 'M', u'ྐྵ'),
    (0xFBA, 'V'),
    (0xFBD, 'X'),
    (0xFBE, 'V'),
    (0xFCD, 'X'),
    (0xFCE, 'V'),
    (0xFDB, 'X'),
    (0x1000, 'V'),
    (0x10A0, 'X'),
    (0x10C7, 'M', u'ⴧ'),
    (0x10C8, 'X'),
    (0x10CD, 'M', u'ⴭ'),
    (0x10CE, 'X'),
    (0x10D0, 'V'),
    (0x10FC, 'M', u'ნ'),
    (0x10FD, 'V'),
    (0x115F, 'X'),
    (0x1161, 'V'),
    (0x1249, 'X'),
    (0x124A, 'V'),
    (0x124E, 'X'),
    (0x1250, 'V'),
    (0x1257, 'X'),
    (0x1258, 'V'),
    ]

def _seg_14():
    return [
    (0x1259, 'X'),
    (0x125A, 'V'),
    (0x125E, 'X'),
    (0x1260, 'V'),
    (0x1289, 'X'),
    (0x128A, 'V'),
    (0x128E, 'X'),
    (0x1290, 'V'),
    (0x12B1, 'X'),
    (0x12B2, 'V'),
    (0x12B6, 'X'),
    (0x12B8, 'V'),
    (0x12BF, 'X'),
    (0x12C0, 'V'),
    (0x12C1, 'X'),
    (0x12C2, 'V'),
    (0x12C6, 'X'),
    (0x12C8, 'V'),
    (0x12D7, 'X'),
    (0x12D8, 'V'),
    (0x1311, 'X'),
    (0x1312, 'V'),
    (0x1316, 'X'),
    (0x1318, 'V'),
    (0x135B, 'X'),
    (0x135D, 'V'),
    (0x137D, 'X'),
    (0x1380, 'V'),
    (0x139A, 'X'),
    (0x13A0, 'V'),
    (0x13F5, 'X'),
    (0x1400, 'V'),
    (0x1680, 'X'),
    (0x1681, 'V'),
    (0x169D, 'X'),
    (0x16A0, 'V'),
    (0x16F1, 'X'),
    (0x1700, 'V'),
    (0x170D, 'X'),
    (0x170E, 'V'),
    (0x1715, 'X'),
    (0x1720, 'V'),
    (0x1737, 'X'),
    (0x1740, 'V'),
    (0x1754, 'X'),
    (0x1760, 'V'),
    (0x176D, 'X'),
    (0x176E, 'V'),
    (0x1771, 'X'),
    (0x1772, 'V'),
    (0x1774, 'X'),
    (0x1780, 'V'),
    (0x17B4, 'X'),
    (0x17B6, 'V'),
    (0x17DE, 'X'),
    (0x17E0, 'V'),
    (0x17EA, 'X'),
    (0x17F0, 'V'),
    (0x17FA, 'X'),
    (0x1800, 'V'),
    (0x1806, 'X'),
    (0x1807, 'V'),
    (0x180B, 'I'),
    (0x180E, 'X'),
    (0x1810, 'V'),
    (0x181A, 'X'),
    (0x1820, 'V'),
    (0x1878, 'X'),
    (0x1880, 'V'),
    (0x18AB, 'X'),
    (0x18B0, 'V'),
    (0x18F6, 'X'),
    (0x1900, 'V'),
    (0x191D, 'X'),
    (0x1920, 'V'),
    (0x192C, 'X'),
    (0x1930, 'V'),
    (0x193C, 'X'),
    (0x1940, 'V'),
    (0x1941, 'X'),
    (0x1944, 'V'),
    (0x196E, 'X'),
    (0x1970, 'V'),
    (0x1975, 'X'),
    (0x1980, 'V'),
    (0x19AC, 'X'),
    (0x19B0, 'V'),
    (0x19CA, 'X'),
    (0x19D0, 'V'),
    (0x19DB, 'X'),
    (0x19DE, 'V'),
    (0x1A1C, 'X'),
    (0x1A1E, 'V'),
    (0x1A5F, 'X'),
    (0x1A60, 'V'),
    (0x1A7D, 'X'),
    (0x1A7F, 'V'),
    (0x1A8A, 'X'),
    (0x1A90, 'V'),
    (0x1A9A, 'X'),
    ]

def _seg_15():
    return [
    (0x1AA0, 'V'),
    (0x1AAE, 'X'),
    (0x1B00, 'V'),
    (0x1B4C, 'X'),
    (0x1B50, 'V'),
    (0x1B7D, 'X'),
    (0x1B80, 'V'),
    (0x1BF4, 'X'),
    (0x1BFC, 'V'),
    (0x1C38, 'X'),
    (0x1C3B, 'V'),
    (0x1C4A, 'X'),
    (0x1C4D, 'V'),
    (0x1C80, 'X'),
    (0x1CC0, 'V'),
    (0x1CC8, 'X'),
    (0x1CD0, 'V'),
    (0x1CF7, 'X'),
    (0x1D00, 'V'),
    (0x1D2C, 'M', u'a'),
    (0x1D2D, 'M', u'æ'),
    (0x1D2E, 'M', u'b'),
    (0x1D2F, 'V'),
    (0x1D30, 'M', u'd'),
    (0x1D31, 'M', u'e'),
    (0x1D32, 'M', u'ǝ'),
    (0x1D33, 'M', u'g'),
    (0x1D34, 'M', u'h'),
    (0x1D35, 'M', u'i'),
    (0x1D36, 'M', u'j'),
    (0x1D37, 'M', u'k'),
    (0x1D38, 'M', u'l'),
    (0x1D39, 'M', u'm'),
    (0x1D3A, 'M', u'n'),
    (0x1D3B, 'V'),
    (0x1D3C, 'M', u'o'),
    (0x1D3D, 'M', u'ȣ'),
    (0x1D3E, 'M', u'p'),
    (0x1D3F, 'M', u'r'),
    (0x1D40, 'M', u't'),
    (0x1D41, 'M', u'u'),
    (0x1D42, 'M', u'w'),
    (0x1D43, 'M', u'a'),
    (0x1D44, 'M', u'ɐ'),
    (0x1D45, 'M', u'ɑ'),
    (0x1D46, 'M', u'ᴂ'),
    (0x1D47, 'M', u'b'),
    (0x1D48, 'M', u'd'),
    (0x1D49, 'M', u'e'),
    (0x1D4A, 'M', u'ə'),
    (0x1D4B, 'M', u'ɛ'),
    (0x1D4C, 'M', u'ɜ'),
    (0x1D4D, 'M', u'g'),
    (0x1D4E, 'V'),
    (0x1D4F, 'M', u'k'),
    (0x1D50, 'M', u'm'),
    (0x1D51, 'M', u'ŋ'),
    (0x1D52, 'M', u'o'),
    (0x1D53, 'M', u'ɔ'),
    (0x1D54, 'M', u'ᴖ'),
    (0x1D55, 'M', u'ᴗ'),
    (0x1D56, 'M', u'p'),
    (0x1D57, 'M', u't'),
    (0x1D58, 'M', u'u'),
    (0x1D59, 'M', u'ᴝ'),
    (0x1D5A, 'M', u'ɯ'),
    (0x1D5B, 'M', u'v'),
    (0x1D5C, 'M', u'ᴥ'),
    (0x1D5D, 'M', u'β'),
    (0x1D5E, 'M', u'γ'),
    (0x1D5F, 'M', u'δ'),
    (0x1D60, 'M', u'φ'),
    (0x1D61, 'M', u'χ'),
    (0x1D62, 'M', u'i'),
    (0x1D63, 'M', u'r'),
    (0x1D64, 'M', u'u'),
    (0x1D65, 'M', u'v'),
    (0x1D66, 'M', u'β'),
    (0x1D67, 'M', u'γ'),
    (0x1D68, 'M', u'ρ'),
    (0x1D69, 'M', u'φ'),
    (0x1D6A, 'M', u'χ'),
    (0x1D6B, 'V'),
    (0x1D78, 'M', u'н'),
    (0x1D79, 'V'),
    (0x1D9B, 'M', u'ɒ'),
    (0x1D9C, 'M', u'c'),
    (0x1D9D, 'M', u'ɕ'),
    (0x1D9E, 'M', u'ð'),
    (0x1D9F, 'M', u'ɜ'),
    (0x1DA0, 'M', u'f'),
    (0x1DA1, 'M', u'ɟ'),
    (0x1DA2, 'M', u'ɡ'),
    (0x1DA3, 'M', u'ɥ'),
    (0x1DA4, 'M', u'ɨ'),
    (0x1DA5, 'M', u'ɩ'),
    (0x1DA6, 'M', u'ɪ'),
    (0x1DA7, 'M', u'ᵻ'),
    (0x1DA8, 'M', u'ʝ'),
    (0x1DA9, 'M', u'ɭ'),
    ]

def _seg_16():
    return [
    (0x1DAA, 'M', u'ᶅ'),
    (0x1DAB, 'M', u'ʟ'),
    (0x1DAC, 'M', u'ɱ'),
    (0x1DAD, 'M', u'ɰ'),
    (0x1DAE, 'M', u'ɲ'),
    (0x1DAF, 'M', u'ɳ'),
    (0x1DB0, 'M', u'ɴ'),
    (0x1DB1, 'M', u'ɵ'),
    (0x1DB2, 'M', u'ɸ'),
    (0x1DB3, 'M', u'ʂ'),
    (0x1DB4, 'M', u'ʃ'),
    (0x1DB5, 'M', u'ƫ'),
    (0x1DB6, 'M', u'ʉ'),
    (0x1DB7, 'M', u'ʊ'),
    (0x1DB8, 'M', u'ᴜ'),
    (0x1DB9, 'M', u'ʋ'),
    (0x1DBA, 'M', u'ʌ'),
    (0x1DBB, 'M', u'z'),
    (0x1DBC, 'M', u'ʐ'),
    (0x1DBD, 'M', u'ʑ'),
    (0x1DBE, 'M', u'ʒ'),
    (0x1DBF, 'M', u'θ'),
    (0x1DC0, 'V'),
    (0x1DE7, 'X'),
    (0x1DFC, 'V'),
    (0x1E00, 'M', u'ḁ'),
    (0x1E01, 'V'),
    (0x1E02, 'M', u'ḃ'),
    (0x1E03, 'V'),
    (0x1E04, 'M', u'ḅ'),
    (0x1E05, 'V'),
    (0x1E06, 'M', u'ḇ'),
    (0x1E07, 'V'),
    (0x1E08, 'M', u'ḉ'),
    (0x1E09, 'V'),
    (0x1E0A, 'M', u'ḋ'),
    (0x1E0B, 'V'),
    (0x1E0C, 'M', u'ḍ'),
    (0x1E0D, 'V'),
    (0x1E0E, 'M', u'ḏ'),
    (0x1E0F, 'V'),
    (0x1E10, 'M', u'ḑ'),
    (0x1E11, 'V'),
    (0x1E12, 'M', u'ḓ'),
    (0x1E13, 'V'),
    (0x1E14, 'M', u'ḕ'),
    (0x1E15, 'V'),
    (0x1E16, 'M', u'ḗ'),
    (0x1E17, 'V'),
    (0x1E18, 'M', u'ḙ'),
    (0x1E19, 'V'),
    (0x1E1A, 'M', u'ḛ'),
    (0x1E1B, 'V'),
    (0x1E1C, 'M', u'ḝ'),
    (0x1E1D, 'V'),
    (0x1E1E, 'M', u'ḟ'),
    (0x1E1F, 'V'),
    (0x1E20, 'M', u'ḡ'),
    (0x1E21, 'V'),
    (0x1E22, 'M', u'ḣ'),
    (0x1E23, 'V'),
    (0x1E24, 'M', u'ḥ'),
    (0x1E25, 'V'),
    (0x1E26, 'M', u'ḧ'),
    (0x1E27, 'V'),
    (0x1E28, 'M', u'ḩ'),
    (0x1E29, 'V'),
    (0x1E2A, 'M', u'ḫ'),
    (0x1E2B, 'V'),
    (0x1E2C, 'M', u'ḭ'),
    (0x1E2D, 'V'),
    (0x1E2E, 'M', u'ḯ'),
    (0x1E2F, 'V'),
    (0x1E30, 'M', u'ḱ'),
    (0x1E31, 'V'),
    (0x1E32, 'M', u'ḳ'),
    (0x1E33, 'V'),
    (0x1E34, 'M', u'ḵ'),
    (0x1E35, 'V'),
    (0x1E36, 'M', u'ḷ'),
    (0x1E37, 'V'),
    (0x1E38, 'M', u'ḹ'),
    (0x1E39, 'V'),
    (0x1E3A, 'M', u'ḻ'),
    (0x1E3B, 'V'),
    (0x1E3C, 'M', u'ḽ'),
    (0x1E3D, 'V'),
    (0x1E3E, 'M', u'ḿ'),
    (0x1E3F, 'V'),
    (0x1E40, 'M', u'ṁ'),
    (0x1E41, 'V'),
    (0x1E42, 'M', u'ṃ'),
    (0x1E43, 'V'),
    (0x1E44, 'M', u'ṅ'),
    (0x1E45, 'V'),
    (0x1E46, 'M', u'ṇ'),
    (0x1E47, 'V'),
    (0x1E48, 'M', u'ṉ'),
    (0x1E49, 'V'),
    (0x1E4A, 'M', u'ṋ'),
    ]

def _seg_17():
    return [
    (0x1E4B, 'V'),
    (0x1E4C, 'M', u'ṍ'),
    (0x1E4D, 'V'),
    (0x1E4E, 'M', u'ṏ'),
    (0x1E4F, 'V'),
    (0x1E50, 'M', u'ṑ'),
    (0x1E51, 'V'),
    (0x1E52, 'M', u'ṓ'),
    (0x1E53, 'V'),
    (0x1E54, 'M', u'ṕ'),
    (0x1E55, 'V'),
    (0x1E56, 'M', u'ṗ'),
    (0x1E57, 'V'),
    (0x1E58, 'M', u'ṙ'),
    (0x1E59, 'V'),
    (0x1E5A, 'M', u'ṛ'),
    (0x1E5B, 'V'),
    (0x1E5C, 'M', u'ṝ'),
    (0x1E5D, 'V'),
    (0x1E5E, 'M', u'ṟ'),
    (0x1E5F, 'V'),
    (0x1E60, 'M', u'ṡ'),
    (0x1E61, 'V'),
    (0x1E62, 'M', u'ṣ'),
    (0x1E63, 'V'),
    (0x1E64, 'M', u'ṥ'),
    (0x1E65, 'V'),
    (0x1E66, 'M', u'ṧ'),
    (0x1E67, 'V'),
    (0x1E68, 'M', u'ṩ'),
    (0x1E69, 'V'),
    (0x1E6A, 'M', u'ṫ'),
    (0x1E6B, 'V'),
    (0x1E6C, 'M', u'ṭ'),
    (0x1E6D, 'V'),
    (0x1E6E, 'M', u'ṯ'),
    (0x1E6F, 'V'),
    (0x1E70, 'M', u'ṱ'),
    (0x1E71, 'V'),
    (0x1E72, 'M', u'ṳ'),
    (0x1E73, 'V'),
    (0x1E74, 'M', u'ṵ'),
    (0x1E75, 'V'),
    (0x1E76, 'M', u'ṷ'),
    (0x1E77, 'V'),
    (0x1E78, 'M', u'ṹ'),
    (0x1E79, 'V'),
    (0x1E7A, 'M', u'ṻ'),
    (0x1E7B, 'V'),
    (0x1E7C, 'M', u'ṽ'),
    (0x1E7D, 'V'),
    (0x1E7E, 'M', u'ṿ'),
    (0x1E7F, 'V'),
    (0x1E80, 'M', u'ẁ'),
    (0x1E81, 'V'),
    (0x1E82, 'M', u'ẃ'),
    (0x1E83, 'V'),
    (0x1E84, 'M', u'ẅ'),
    (0x1E85, 'V'),
    (0x1E86, 'M', u'ẇ'),
    (0x1E87, 'V'),
    (0x1E88, 'M', u'ẉ'),
    (0x1E89, 'V'),
    (0x1E8A, 'M', u'ẋ'),
    (0x1E8B, 'V'),
    (0x1E8C, 'M', u'ẍ'),
    (0x1E8D, 'V'),
    (0x1E8E, 'M', u'ẏ'),
    (0x1E8F, 'V'),
    (0x1E90, 'M', u'ẑ'),
    (0x1E91, 'V'),
    (0x1E92, 'M', u'ẓ'),
    (0x1E93, 'V'),
    (0x1E94, 'M', u'ẕ'),
    (0x1E95, 'V'),
    (0x1E9A, 'M', u'aʾ'),
    (0x1E9B, 'M', u'ṡ'),
    (0x1E9C, 'V'),
    (0x1E9E, 'M', u'ss'),
    (0x1E9F, 'V'),
    (0x1EA0, 'M', u'ạ'),
    (0x1EA1, 'V'),
    (0x1EA2, 'M', u'ả'),
    (0x1EA3, 'V'),
    (0x1EA4, 'M', u'ấ'),
    (0x1EA5, 'V'),
    (0x1EA6, 'M', u'ầ'),
    (0x1EA7, 'V'),
    (0x1EA8, 'M', u'ẩ'),
    (0x1EA9, 'V'),
    (0x1EAA, 'M', u'ẫ'),
    (0x1EAB, 'V'),
    (0x1EAC, 'M', u'ậ'),
    (0x1EAD, 'V'),
    (0x1EAE, 'M', u'ắ'),
    (0x1EAF, 'V'),
    (0x1EB0, 'M', u'ằ'),
    (0x1EB1, 'V'),
    (0x1EB2, 'M', u'ẳ'),
    (0x1EB3, 'V'),
    ]

def _seg_18():
    return [
    (0x1EB4, 'M', u'ẵ'),
    (0x1EB5, 'V'),
    (0x1EB6, 'M', u'ặ'),
    (0x1EB7, 'V'),
    (0x1EB8, 'M', u'ẹ'),
    (0x1EB9, 'V'),
    (0x1EBA, 'M', u'ẻ'),
    (0x1EBB, 'V'),
    (0x1EBC, 'M', u'ẽ'),
    (0x1EBD, 'V'),
    (0x1EBE, 'M', u'ế'),
    (0x1EBF, 'V'),
    (0x1EC0, 'M', u'ề'),
    (0x1EC1, 'V'),
    (0x1EC2, 'M', u'ể'),
    (0x1EC3, 'V'),
    (0x1EC4, 'M', u'ễ'),
    (0x1EC5, 'V'),
    (0x1EC6, 'M', u'ệ'),
    (0x1EC7, 'V'),
    (0x1EC8, 'M', u'ỉ'),
    (0x1EC9, 'V'),
    (0x1ECA, 'M', u'ị'),
    (0x1ECB, 'V'),
    (0x1ECC, 'M', u'ọ'),
    (0x1ECD, 'V'),
    (0x1ECE, 'M', u'ỏ'),
    (0x1ECF, 'V'),
    (0x1ED0, 'M', u'ố'),
    (0x1ED1, 'V'),
    (0x1ED2, 'M', u'ồ'),
    (0x1ED3, 'V'),
    (0x1ED4, 'M', u'ổ'),
    (0x1ED5, 'V'),
    (0x1ED6, 'M', u'ỗ'),
    (0x1ED7, 'V'),
    (0x1ED8, 'M', u'ộ'),
    (0x1ED9, 'V'),
    (0x1EDA, 'M', u'ớ'),
    (0x1EDB, 'V'),
    (0x1EDC, 'M', u'ờ'),
    (0x1EDD, 'V'),
    (0x1EDE, 'M', u'ở'),
    (0x1EDF, 'V'),
    (0x1EE0, 'M', u'ỡ'),
    (0x1EE1, 'V'),
    (0x1EE2, 'M', u'ợ'),
    (0x1EE3, 'V'),
    (0x1EE4, 'M', u'ụ'),
    (0x1EE5, 'V'),
    (0x1EE6, 'M', u'ủ'),
    (0x1EE7, 'V'),
    (0x1EE8, 'M', u'ứ'),
    (0x1EE9, 'V'),
    (0x1EEA, 'M', u'ừ'),
    (0x1EEB, 'V'),
    (0x1EEC, 'M', u'ử'),
    (0x1EED, 'V'),
    (0x1EEE, 'M', u'ữ'),
    (0x1EEF, 'V'),
    (0x1EF0, 'M', u'ự'),
    (0x1EF1, 'V'),
    (0x1EF2, 'M', u'ỳ'),
    (0x1EF3, 'V'),
    (0x1EF4, 'M', u'ỵ'),
    (0x1EF5, 'V'),
    (0x1EF6, 'M', u'ỷ'),
    (0x1EF7, 'V'),
    (0x1EF8, 'M', u'ỹ'),
    (0x1EF9, 'V'),
    (0x1EFA, 'M', u'ỻ'),
    (0x1EFB, 'V'),
    (0x1EFC, 'M', u'ỽ'),
    (0x1EFD, 'V'),
    (0x1EFE, 'M', u'ỿ'),
    (0x1EFF, 'V'),
    (0x1F08, 'M', u'ἀ'),
    (0x1F09, 'M', u'ἁ'),
    (0x1F0A, 'M', u'ἂ'),
    (0x1F0B, 'M', u'ἃ'),
    (0x1F0C, 'M', u'ἄ'),
    (0x1F0D, 'M', u'ἅ'),
    (0x1F0E, 'M', u'ἆ'),
    (0x1F0F, 'M', u'ἇ'),
    (0x1F10, 'V'),
    (0x1F16, 'X'),
    (0x1F18, 'M', u'ἐ'),
    (0x1F19, 'M', u'ἑ'),
    (0x1F1A, 'M', u'ἒ'),
    (0x1F1B, 'M', u'ἓ'),
    (0x1F1C, 'M', u'ἔ'),
    (0x1F1D, 'M', u'ἕ'),
    (0x1F1E, 'X'),
    (0x1F20, 'V'),
    (0x1F28, 'M', u'ἠ'),
    (0x1F29, 'M', u'ἡ'),
    (0x1F2A, 'M', u'ἢ'),
    (0x1F2B, 'M', u'ἣ'),
    (0x1F2C, 'M', u'ἤ'),
    (0x1F2D, 'M', u'ἥ'),
    ]

def _seg_19():
    return [
    (0x1F2E, 'M', u'ἦ'),
    (0x1F2F, 'M', u'ἧ'),
    (0x1F30, 'V'),
    (0x1F38, 'M', u'ἰ'),
    (0x1F39, 'M', u'ἱ'),
    (0x1F3A, 'M', u'ἲ'),
    (0x1F3B, 'M', u'ἳ'),
    (0x1F3C, 'M', u'ἴ'),
    (0x1F3D, 'M', u'ἵ'),
    (0x1F3E, 'M', u'ἶ'),
    (0x1F3F, 'M', u'ἷ'),
    (0x1F40, 'V'),
    (0x1F46, 'X'),
    (0x1F48, 'M', u'ὀ'),
    (0x1F49, 'M', u'ὁ'),
    (0x1F4A, 'M', u'ὂ'),
    (0x1F4B, 'M', u'ὃ'),
    (0x1F4C, 'M', u'ὄ'),
    (0x1F4D, 'M', u'ὅ'),
    (0x1F4E, 'X'),
    (0x1F50, 'V'),
    (0x1F58, 'X'),
    (0x1F59, 'M', u'ὑ'),
    (0x1F5A, 'X'),
    (0x1F5B, 'M', u'ὓ'),
    (0x1F5C, 'X'),
    (0x1F5D, 'M', u'ὕ'),
    (0x1F5E, 'X'),
    (0x1F5F, 'M', u'ὗ'),
    (0x1F60, 'V'),
    (0x1F68, 'M', u'ὠ'),
    (0x1F69, 'M', u'ὡ'),
    (0x1F6A, 'M', u'ὢ'),
    (0x1F6B, 'M', u'ὣ'),
    (0x1F6C, 'M', u'ὤ'),
    (0x1F6D, 'M', u'ὥ'),
    (0x1F6E, 'M', u'ὦ'),
    (0x1F6F, 'M', u'ὧ'),
    (0x1F70, 'V'),
    (0x1F71, 'M', u'ά'),
    (0x1F72, 'V'),
    (0x1F73, 'M', u'έ'),
    (0x1F74, 'V'),
    (0x1F75, 'M', u'ή'),
    (0x1F76, 'V'),
    (0x1F77, 'M', u'ί'),
    (0x1F78, 'V'),
    (0x1F79, 'M', u'ό'),
    (0x1F7A, 'V'),
    (0x1F7B, 'M', u'ύ'),
    (0x1F7C, 'V'),
    (0x1F7D, 'M', u'ώ'),
    (0x1F7E, 'X'),
    (0x1F80, 'M', u'ἀι'),
    (0x1F81, 'M', u'ἁι'),
    (0x1F82, 'M', u'ἂι'),
    (0x1F83, 'M', u'ἃι'),
    (0x1F84, 'M', u'ἄι'),
    (0x1F85, 'M', u'ἅι'),
    (0x1F86, 'M', u'ἆι'),
    (0x1F87, 'M', u'ἇι'),
    (0x1F88, 'M', u'ἀι'),
    (0x1F89, 'M', u'ἁι'),
    (0x1F8A, 'M', u'ἂι'),
    (0x1F8B, 'M', u'ἃι'),
    (0x1F8C, 'M', u'ἄι'),
    (0x1F8D, 'M', u'ἅι'),
    (0x1F8E, 'M', u'ἆι'),
    (0x1F8F, 'M', u'ἇι'),
    (0x1F90, 'M', u'ἠι'),
    (0x1F91, 'M', u'ἡι'),
    (0x1F92, 'M', u'ἢι'),
    (0x1F93, 'M', u'ἣι'),
    (0x1F94, 'M', u'ἤι'),
    (0x1F95, 'M', u'ἥι'),
    (0x1F96, 'M', u'ἦι'),
    (0x1F97, 'M', u'ἧι'),
    (0x1F98, 'M', u'ἠι'),
    (0x1F99, 'M', u'ἡι'),
    (0x1F9A, 'M', u'ἢι'),
    (0x1F9B, 'M', u'ἣι'),
    (0x1F9C, 'M', u'ἤι'),
    (0x1F9D, 'M', u'ἥι'),
    (0x1F9E, 'M', u'ἦι'),
    (0x1F9F, 'M', u'ἧι'),
    (0x1FA0, 'M', u'ὠι'),
    (0x1FA1, 'M', u'ὡι'),
    (0x1FA2, 'M', u'ὢι'),
    (0x1FA3, 'M', u'ὣι'),
    (0x1FA4, 'M', u'ὤι'),
    (0x1FA5, 'M', u'ὥι'),
    (0x1FA6, 'M', u'ὦι'),
    (0x1FA7, 'M', u'ὧι'),
    (0x1FA8, 'M', u'ὠι'),
    (0x1FA9, 'M', u'ὡι'),
    (0x1FAA, 'M', u'ὢι'),
    (0x1FAB, 'M', u'ὣι'),
    (0x1FAC, 'M', u'ὤι'),
    (0x1FAD, 'M', u'ὥι'),
    (0x1FAE, 'M', u'ὦι'),
    ]

def _seg_20():
    return [
    (0x1FAF, 'M', u'ὧι'),
    (0x1FB0, 'V'),
    (0x1FB2, 'M', u'ὰι'),
    (0x1FB3, 'M', u'αι'),
    (0x1FB4, 'M', u'άι'),
    (0x1FB5, 'X'),
    (0x1FB6, 'V'),
    (0x1FB7, 'M', u'ᾶι'),
    (0x1FB8, 'M', u'ᾰ'),
    (0x1FB9, 'M', u'ᾱ'),
    (0x1FBA, 'M', u'ὰ'),
    (0x1FBB, 'M', u'ά'),
    (0x1FBC, 'M', u'αι'),
    (0x1FBD, '3', u' ̓'),
    (0x1FBE, 'M', u'ι'),
    (0x1FBF, '3', u' ̓'),
    (0x1FC0, '3', u' ͂'),
    (0x1FC1, '3', u' ̈͂'),
    (0x1FC2, 'M', u'ὴι'),
    (0x1FC3, 'M', u'ηι'),
    (0x1FC4, 'M', u'ήι'),
    (0x1FC5, 'X'),
    (0x1FC6, 'V'),
    (0x1FC7, 'M', u'ῆι'),
    (0x1FC8, 'M', u'ὲ'),
    (0x1FC9, 'M', u'έ'),
    (0x1FCA, 'M', u'ὴ'),
    (0x1FCB, 'M', u'ή'),
    (0x1FCC, 'M', u'ηι'),
    (0x1FCD, '3', u' ̓̀'),
    (0x1FCE, '3', u' ̓́'),
    (0x1FCF, '3', u' ̓͂'),
    (0x1FD0, 'V'),
    (0x1FD3, 'M', u'ΐ'),
    (0x1FD4, 'X'),
    (0x1FD6, 'V'),
    (0x1FD8, 'M', u'ῐ'),
    (0x1FD9, 'M', u'ῑ'),
    (0x1FDA, 'M', u'ὶ'),
    (0x1FDB, 'M', u'ί'),
    (0x1FDC, 'X'),
    (0x1FDD, '3', u' ̔̀'),
    (0x1FDE, '3', u' ̔́'),
    (0x1FDF, '3', u' ̔͂'),
    (0x1FE0, 'V'),
    (0x1FE3, 'M', u'ΰ'),
    (0x1FE4, 'V'),
    (0x1FE8, 'M', u'ῠ'),
    (0x1FE9, 'M', u'ῡ'),
    (0x1FEA, 'M', u'ὺ'),
    (0x1FEB, 'M', u'ύ'),
    (0x1FEC, 'M', u'ῥ'),
    (0x1FED, '3', u' ̈̀'),
    (0x1FEE, '3', u' ̈́'),
    (0x1FEF, '3', u'`'),
    (0x1FF0, 'X'),
    (0x1FF2, 'M', u'ὼι'),
    (0x1FF3, 'M', u'ωι'),
    (0x1FF4, 'M', u'ώι'),
    (0x1FF5, 'X'),
    (0x1FF6, 'V'),
    (0x1FF7, 'M', u'ῶι'),
    (0x1FF8, 'M', u'ὸ'),
    (0x1FF9, 'M', u'ό'),
    (0x1FFA, 'M', u'ὼ'),
    (0x1FFB, 'M', u'ώ'),
    (0x1FFC, 'M', u'ωι'),
    (0x1FFD, '3', u' ́'),
    (0x1FFE, '3', u' ̔'),
    (0x1FFF, 'X'),
    (0x2000, '3', u' '),
    (0x200B, 'I'),
    (0x200C, 'D', u''),
    (0x200E, 'X'),
    (0x2010, 'V'),
    (0x2011, 'M', u'‐'),
    (0x2012, 'V'),
    (0x2017, '3', u' ̳'),
    (0x2018, 'V'),
    (0x2024, 'X'),
    (0x2027, 'V'),
    (0x2028, 'X'),
    (0x202F, '3', u' '),
    (0x2030, 'V'),
    (0x2033, 'M', u'′′'),
    (0x2034, 'M', u'′′′'),
    (0x2035, 'V'),
    (0x2036, 'M', u'‵‵'),
    (0x2037, 'M', u'‵‵‵'),
    (0x2038, 'V'),
    (0x203C, '3', u'!!'),
    (0x203D, 'V'),
    (0x203E, '3', u' ̅'),
    (0x203F, 'V'),
    (0x2047, '3', u'??'),
    (0x2048, '3', u'?!'),
    (0x2049, '3', u'!?'),
    (0x204A, 'V'),
    (0x2057, 'M', u'′′′′'),
    (0x2058, 'V'),
    ]

def _seg_21():
    return [
    (0x205F, '3', u' '),
    (0x2060, 'I'),
    (0x2061, 'X'),
    (0x2064, 'I'),
    (0x2065, 'X'),
    (0x2070, 'M', u'0'),
    (0x2071, 'M', u'i'),
    (0x2072, 'X'),
    (0x2074, 'M', u'4'),
    (0x2075, 'M', u'5'),
    (0x2076, 'M', u'6'),
    (0x2077, 'M', u'7'),
    (0x2078, 'M', u'8'),
    (0x2079, 'M', u'9'),
    (0x207A, '3', u'+'),
    (0x207B, 'M', u'−'),
    (0x207C, '3', u'='),
    (0x207D, '3', u'('),
    (0x207E, '3', u')'),
    (0x207F, 'M', u'n'),
    (0x2080, 'M', u'0'),
    (0x2081, 'M', u'1'),
    (0x2082, 'M', u'2'),
    (0x2083, 'M', u'3'),
    (0x2084, 'M', u'4'),
    (0x2085, 'M', u'5'),
    (0x2086, 'M', u'6'),
    (0x2087, 'M', u'7'),
    (0x2088, 'M', u'8'),
    (0x2089, 'M', u'9'),
    (0x208A, '3', u'+'),
    (0x208B, 'M', u'−'),
    (0x208C, '3', u'='),
    (0x208D, '3', u'('),
    (0x208E, '3', u')'),
    (0x208F, 'X'),
    (0x2090, 'M', u'a'),
    (0x2091, 'M', u'e'),
    (0x2092, 'M', u'o'),
    (0x2093, 'M', u'x'),
    (0x2094, 'M', u'ə'),
    (0x2095, 'M', u'h'),
    (0x2096, 'M', u'k'),
    (0x2097, 'M', u'l'),
    (0x2098, 'M', u'm'),
    (0x2099, 'M', u'n'),
    (0x209A, 'M', u'p'),
    (0x209B, 'M', u's'),
    (0x209C, 'M', u't'),
    (0x209D, 'X'),
    (0x20A0, 'V'),
    (0x20A8, 'M', u'rs'),
    (0x20A9, 'V'),
    (0x20BB, 'X'),
    (0x20D0, 'V'),
    (0x20F1, 'X'),
    (0x2100, '3', u'a/c'),
    (0x2101, '3', u'a/s'),
    (0x2102, 'M', u'c'),
    (0x2103, 'M', u'°c'),
    (0x2104, 'V'),
    (0x2105, '3', u'c/o'),
    (0x2106, '3', u'c/u'),
    (0x2107, 'M', u'ɛ'),
    (0x2108, 'V'),
    (0x2109, 'M', u'°f'),
    (0x210A, 'M', u'g'),
    (0x210B, 'M', u'h'),
    (0x210F, 'M', u'ħ'),
    (0x2110, 'M', u'i'),
    (0x2112, 'M', u'l'),
    (0x2114, 'V'),
    (0x2115, 'M', u'n'),
    (0x2116, 'M', u'no'),
    (0x2117, 'V'),
    (0x2119, 'M', u'p'),
    (0x211A, 'M', u'q'),
    (0x211B, 'M', u'r'),
    (0x211E, 'V'),
    (0x2120, 'M', u'sm'),
    (0x2121, 'M', u'tel'),
    (0x2122, 'M', u'tm'),
    (0x2123, 'V'),
    (0x2124, 'M', u'z'),
    (0x2125, 'V'),
    (0x2126, 'M', u'ω'),
    (0x2127, 'V'),
    (0x2128, 'M', u'z'),
    (0x2129, 'V'),
    (0x212A, 'M', u'k'),
    (0x212B, 'M', u'å'),
    (0x212C, 'M', u'b'),
    (0x212D, 'M', u'c'),
    (0x212E, 'V'),
    (0x212F, 'M', u'e'),
    (0x2131, 'M', u'f'),
    (0x2132, 'X'),
    (0x2133, 'M', u'm'),
    (0x2134, 'M', u'o'),
    (0x2135, 'M', u'א'),
    ]

def _seg_22():
    return [
    (0x2136, 'M', u'ב'),
    (0x2137, 'M', u'ג'),
    (0x2138, 'M', u'ד'),
    (0x2139, 'M', u'i'),
    (0x213A, 'V'),
    (0x213B, 'M', u'fax'),
    (0x213C, 'M', u'π'),
    (0x213D, 'M', u'γ'),
    (0x213F, 'M', u'π'),
    (0x2140, 'M', u'∑'),
    (0x2141, 'V'),
    (0x2145, 'M', u'd'),
    (0x2147, 'M', u'e'),
    (0x2148, 'M', u'i'),
    (0x2149, 'M', u'j'),
    (0x214A, 'V'),
    (0x2150, 'M', u'1⁄7'),
    (0x2151, 'M', u'1⁄9'),
    (0x2152, 'M', u'1⁄10'),
    (0x2153, 'M', u'1⁄3'),
    (0x2154, 'M', u'2⁄3'),
    (0x2155, 'M', u'1⁄5'),
    (0x2156, 'M', u'2⁄5'),
    (0x2157, 'M', u'3⁄5'),
    (0x2158, 'M', u'4⁄5'),
    (0x2159, 'M', u'1⁄6'),
    (0x215A, 'M', u'5⁄6'),
    (0x215B, 'M', u'1⁄8'),
    (0x215C, 'M', u'3⁄8'),
    (0x215D, 'M', u'5⁄8'),
    (0x215E, 'M', u'7⁄8'),
    (0x215F, 'M', u'1⁄'),
    (0x2160, 'M', u'i'),
    (0x2161, 'M', u'ii'),
    (0x2162, 'M', u'iii'),
    (0x2163, 'M', u'iv'),
    (0x2164, 'M', u'v'),
    (0x2165, 'M', u'vi'),
    (0x2166, 'M', u'vii'),
    (0x2167, 'M', u'viii'),
    (0x2168, 'M', u'ix'),
    (0x2169, 'M', u'x'),
    (0x216A, 'M', u'xi'),
    (0x216B, 'M', u'xii'),
    (0x216C, 'M', u'l'),
    (0x216D, 'M', u'c'),
    (0x216E, 'M', u'd'),
    (0x216F, 'M', u'm'),
    (0x2170, 'M', u'i'),
    (0x2171, 'M', u'ii'),
    (0x2172, 'M', u'iii'),
    (0x2173, 'M', u'iv'),
    (0x2174, 'M', u'v'),
    (0x2175, 'M', u'vi'),
    (0x2176, 'M', u'vii'),
    (0x2177, 'M', u'viii'),
    (0x2178, 'M', u'ix'),
    (0x2179, 'M', u'x'),
    (0x217A, 'M', u'xi'),
    (0x217B, 'M', u'xii'),
    (0x217C, 'M', u'l'),
    (0x217D, 'M', u'c'),
    (0x217E, 'M', u'd'),
    (0x217F, 'M', u'm'),
    (0x2180, 'V'),
    (0x2183, 'X'),
    (0x2184, 'V'),
    (0x2189, 'M', u'0⁄3'),
    (0x218A, 'X'),
    (0x2190, 'V'),
    (0x222C, 'M', u'∫∫'),
    (0x222D, 'M', u'∫∫∫'),
    (0x222E, 'V'),
    (0x222F, 'M', u'∮∮'),
    (0x2230, 'M', u'∮∮∮'),
    (0x2231, 'V'),
    (0x2260, '3'),
    (0x2261, 'V'),
    (0x226E, '3'),
    (0x2270, 'V'),
    (0x2329, 'M', u'〈'),
    (0x232A, 'M', u'〉'),
    (0x232B, 'V'),
    (0x23F4, 'X'),
    (0x2400, 'V'),
    (0x2427, 'X'),
    (0x2440, 'V'),
    (0x244B, 'X'),
    (0x2460, 'M', u'1'),
    (0x2461, 'M', u'2'),
    (0x2462, 'M', u'3'),
    (0x2463, 'M', u'4'),
    (0x2464, 'M', u'5'),
    (0x2465, 'M', u'6'),
    (0x2466, 'M', u'7'),
    (0x2467, 'M', u'8'),
    (0x2468, 'M', u'9'),
    (0x2469, 'M', u'10'),
    (0x246A, 'M', u'11'),
    (0x246B, 'M', u'12'),
    ]

def _seg_23():
    return [
    (0x246C, 'M', u'13'),
    (0x246D, 'M', u'14'),
    (0x246E, 'M', u'15'),
    (0x246F, 'M', u'16'),
    (0x2470, 'M', u'17'),
    (0x2471, 'M', u'18'),
    (0x2472, 'M', u'19'),
    (0x2473, 'M', u'20'),
    (0x2474, '3', u'(1)'),
    (0x2475, '3', u'(2)'),
    (0x2476, '3', u'(3)'),
    (0x2477, '3', u'(4)'),
    (0x2478, '3', u'(5)'),
    (0x2479, '3', u'(6)'),
    (0x247A, '3', u'(7)'),
    (0x247B, '3', u'(8)'),
    (0x247C, '3', u'(9)'),
    (0x247D, '3', u'(10)'),
    (0x247E, '3', u'(11)'),
    (0x247F, '3', u'(12)'),
    (0x2480, '3', u'(13)'),
    (0x2481, '3', u'(14)'),
    (0x2482, '3', u'(15)'),
    (0x2483, '3', u'(16)'),
    (0x2484, '3', u'(17)'),
    (0x2485, '3', u'(18)'),
    (0x2486, '3', u'(19)'),
    (0x2487, '3', u'(20)'),
    (0x2488, 'X'),
    (0x249C, '3', u'(a)'),
    (0x249D, '3', u'(b)'),
    (0x249E, '3', u'(c)'),
    (0x249F, '3', u'(d)'),
    (0x24A0, '3', u'(e)'),
    (0x24A1, '3', u'(f)'),
    (0x24A2, '3', u'(g)'),
    (0x24A3, '3', u'(h)'),
    (0x24A4, '3', u'(i)'),
    (0x24A5, '3', u'(j)'),
    (0x24A6, '3', u'(k)'),
    (0x24A7, '3', u'(l)'),
    (0x24A8, '3', u'(m)'),
    (0x24A9, '3', u'(n)'),
    (0x24AA, '3', u'(o)'),
    (0x24AB, '3', u'(p)'),
    (0x24AC, '3', u'(q)'),
    (0x24AD, '3', u'(r)'),
    (0x24AE, '3', u'(s)'),
    (0x24AF, '3', u'(t)'),
    (0x24B0, '3', u'(u)'),
    (0x24B1, '3', u'(v)'),
    (0x24B2, '3', u'(w)'),
    (0x24B3, '3', u'(x)'),
    (0x24B4, '3', u'(y)'),
    (0x24B5, '3', u'(z)'),
    (0x24B6, 'M', u'a'),
    (0x24B7, 'M', u'b'),
    (0x24B8, 'M', u'c'),
    (0x24B9, 'M', u'd'),
    (0x24BA, 'M', u'e'),
    (0x24BB, 'M', u'f'),
    (0x24BC, 'M', u'g'),
    (0x24BD, 'M', u'h'),
    (0x24BE, 'M', u'i'),
    (0x24BF, 'M', u'j'),
    (0x24C0, 'M', u'k'),
    (0x24C1, 'M', u'l'),
    (0x24C2, 'M', u'm'),
    (0x24C3, 'M', u'n'),
    (0x24C4, 'M', u'o'),
    (0x24C5, 'M', u'p'),
    (0x24C6, 'M', u'q'),
    (0x24C7, 'M', u'r'),
    (0x24C8, 'M', u's'),
    (0x24C9, 'M', u't'),
    (0x24CA, 'M', u'u'),
    (0x24CB, 'M', u'v'),
    (0x24CC, 'M', u'w'),
    (0x24CD, 'M', u'x'),
    (0x24CE, 'M', u'y'),
    (0x24CF, 'M', u'z'),
    (0x24D0, 'M', u'a'),
    (0x24D1, 'M', u'b'),
    (0x24D2, 'M', u'c'),
    (0x24D3, 'M', u'd'),
    (0x24D4, 'M', u'e'),
    (0x24D5, 'M', u'f'),
    (0x24D6, 'M', u'g'),
    (0x24D7, 'M', u'h'),
    (0x24D8, 'M', u'i'),
    (0x24D9, 'M', u'j'),
    (0x24DA, 'M', u'k'),
    (0x24DB, 'M', u'l'),
    (0x24DC, 'M', u'm'),
    (0x24DD, 'M', u'n'),
    (0x24DE, 'M', u'o'),
    (0x24DF, 'M', u'p'),
    (0x24E0, 'M', u'q'),
    (0x24E1, 'M', u'r'),
    (0x24E2, 'M', u's'),
    ]

def _seg_24():
    return [
    (0x24E3, 'M', u't'),
    (0x24E4, 'M', u'u'),
    (0x24E5, 'M', u'v'),
    (0x24E6, 'M', u'w'),
    (0x24E7, 'M', u'x'),
    (0x24E8, 'M', u'y'),
    (0x24E9, 'M', u'z'),
    (0x24EA, 'M', u'0'),
    (0x24EB, 'V'),
    (0x2700, 'X'),
    (0x2701, 'V'),
    (0x2A0C, 'M', u'∫∫∫∫'),
    (0x2A0D, 'V'),
    (0x2A74, '3', u'::='),
    (0x2A75, '3', u'=='),
    (0x2A76, '3', u'==='),
    (0x2A77, 'V'),
    (0x2ADC, 'M', u'⫝̸'),
    (0x2ADD, 'V'),
    (0x2B4D, 'X'),
    (0x2B50, 'V'),
    (0x2B5A, 'X'),
    (0x2C00, 'M', u'ⰰ'),
    (0x2C01, 'M', u'ⰱ'),
    (0x2C02, 'M', u'ⰲ'),
    (0x2C03, 'M', u'ⰳ'),
    (0x2C04, 'M', u'ⰴ'),
    (0x2C05, 'M', u'ⰵ'),
    (0x2C06, 'M', u'ⰶ'),
    (0x2C07, 'M', u'ⰷ'),
    (0x2C08, 'M', u'ⰸ'),
    (0x2C09, 'M', u'ⰹ'),
    (0x2C0A, 'M', u'ⰺ'),
    (0x2C0B, 'M', u'ⰻ'),
    (0x2C0C, 'M', u'ⰼ'),
    (0x2C0D, 'M', u'ⰽ'),
    (0x2C0E, 'M', u'ⰾ'),
    (0x2C0F, 'M', u'ⰿ'),
    (0x2C10, 'M', u'ⱀ'),
    (0x2C11, 'M', u'ⱁ'),
    (0x2C12, 'M', u'ⱂ'),
    (0x2C13, 'M', u'ⱃ'),
    (0x2C14, 'M', u'ⱄ'),
    (0x2C15, 'M', u'ⱅ'),
    (0x2C16, 'M', u'ⱆ'),
    (0x2C17, 'M', u'ⱇ'),
    (0x2C18, 'M', u'ⱈ'),
    (0x2C19, 'M', u'ⱉ'),
    (0x2C1A, 'M', u'ⱊ'),
    (0x2C1B, 'M', u'ⱋ'),
    (0x2C1C, 'M', u'ⱌ'),
    (0x2C1D, 'M', u'ⱍ'),
    (0x2C1E, 'M', u'ⱎ'),
    (0x2C1F, 'M', u'ⱏ'),
    (0x2C20, 'M', u'ⱐ'),
    (0x2C21, 'M', u'ⱑ'),
    (0x2C22, 'M', u'ⱒ'),
    (0x2C23, 'M', u'ⱓ'),
    (0x2C24, 'M', u'ⱔ'),
    (0x2C25, 'M', u'ⱕ'),
    (0x2C26, 'M', u'ⱖ'),
    (0x2C27, 'M', u'ⱗ'),
    (0x2C28, 'M', u'ⱘ'),
    (0x2C29, 'M', u'ⱙ'),
    (0x2C2A, 'M', u'ⱚ'),
    (0x2C2B, 'M', u'ⱛ'),
    (0x2C2C, 'M', u'ⱜ'),
    (0x2C2D, 'M', u'ⱝ'),
    (0x2C2E, 'M', u'ⱞ'),
    (0x2C2F, 'X'),
    (0x2C30, 'V'),
    (0x2C5F, 'X'),
    (0x2C60, 'M', u'ⱡ'),
    (0x2C61, 'V'),
    (0x2C62, 'M', u'ɫ'),
    (0x2C63, 'M', u'ᵽ'),
    (0x2C64, 'M', u'ɽ'),
    (0x2C65, 'V'),
    (0x2C67, 'M', u'ⱨ'),
    (0x2C68, 'V'),
    (0x2C69, 'M', u'ⱪ'),
    (0x2C6A, 'V'),
    (0x2C6B, 'M', u'ⱬ'),
    (0x2C6C, 'V'),
    (0x2C6D, 'M', u'ɑ'),
    (0x2C6E, 'M', u'ɱ'),
    (0x2C6F, 'M', u'ɐ'),
    (0x2C70, 'M', u'ɒ'),
    (0x2C71, 'V'),
    (0x2C72, 'M', u'ⱳ'),
    (0x2C73, 'V'),
    (0x2C75, 'M', u'ⱶ'),
    (0x2C76, 'V'),
    (0x2C7C, 'M', u'j'),
    (0x2C7D, 'M', u'v'),
    (0x2C7E, 'M', u'ȿ'),
    (0x2C7F, 'M', u'ɀ'),
    (0x2C80, 'M', u'ⲁ'),
    (0x2C81, 'V'),
    (0x2C82, 'M', u'ⲃ'),
    ]

def _seg_25():
    return [
    (0x2C83, 'V'),
    (0x2C84, 'M', u'ⲅ'),
    (0x2C85, 'V'),
    (0x2C86, 'M', u'ⲇ'),
    (0x2C87, 'V'),
    (0x2C88, 'M', u'ⲉ'),
    (0x2C89, 'V'),
    (0x2C8A, 'M', u'ⲋ'),
    (0x2C8B, 'V'),
    (0x2C8C, 'M', u'ⲍ'),
    (0x2C8D, 'V'),
    (0x2C8E, 'M', u'ⲏ'),
    (0x2C8F, 'V'),
    (0x2C90, 'M', u'ⲑ'),
    (0x2C91, 'V'),
    (0x2C92, 'M', u'ⲓ'),
    (0x2C93, 'V'),
    (0x2C94, 'M', u'ⲕ'),
    (0x2C95, 'V'),
    (0x2C96, 'M', u'ⲗ'),
    (0x2C97, 'V'),
    (0x2C98, 'M', u'ⲙ'),
    (0x2C99, 'V'),
    (0x2C9A, 'M', u'ⲛ'),
    (0x2C9B, 'V'),
    (0x2C9C, 'M', u'ⲝ'),
    (0x2C9D, 'V'),
    (0x2C9E, 'M', u'ⲟ'),
    (0x2C9F, 'V'),
    (0x2CA0, 'M', u'ⲡ'),
    (0x2CA1, 'V'),
    (0x2CA2, 'M', u'ⲣ'),
    (0x2CA3, 'V'),
    (0x2CA4, 'M', u'ⲥ'),
    (0x2CA5, 'V'),
    (0x2CA6, 'M', u'ⲧ'),
    (0x2CA7, 'V'),
    (0x2CA8, 'M', u'ⲩ'),
    (0x2CA9, 'V'),
    (0x2CAA, 'M', u'ⲫ'),
    (0x2CAB, 'V'),
    (0x2CAC, 'M', u'ⲭ'),
    (0x2CAD, 'V'),
    (0x2CAE, 'M', u'ⲯ'),
    (0x2CAF, 'V'),
    (0x2CB0, 'M', u'ⲱ'),
    (0x2CB1, 'V'),
    (0x2CB2, 'M', u'ⲳ'),
    (0x2CB3, 'V'),
    (0x2CB4, 'M', u'ⲵ'),
    (0x2CB5, 'V'),
    (0x2CB6, 'M', u'ⲷ'),
    (0x2CB7, 'V'),
    (0x2CB8, 'M', u'ⲹ'),
    (0x2CB9, 'V'),
    (0x2CBA, 'M', u'ⲻ'),
    (0x2CBB, 'V'),
    (0x2CBC, 'M', u'ⲽ'),
    (0x2CBD, 'V'),
    (0x2CBE, 'M', u'ⲿ'),
    (0x2CBF, 'V'),
    (0x2CC0, 'M', u'ⳁ'),
    (0x2CC1, 'V'),
    (0x2CC2, 'M', u'ⳃ'),
    (0x2CC3, 'V'),
    (0x2CC4, 'M', u'ⳅ'),
    (0x2CC5, 'V'),
    (0x2CC6, 'M', u'ⳇ'),
    (0x2CC7, 'V'),
    (0x2CC8, 'M', u'ⳉ'),
    (0x2CC9, 'V'),
    (0x2CCA, 'M', u'ⳋ'),
    (0x2CCB, 'V'),
    (0x2CCC, 'M', u'ⳍ'),
    (0x2CCD, 'V'),
    (0x2CCE, 'M', u'ⳏ'),
    (0x2CCF, 'V'),
    (0x2CD0, 'M', u'ⳑ'),
    (0x2CD1, 'V'),
    (0x2CD2, 'M', u'ⳓ'),
    (0x2CD3, 'V'),
    (0x2CD4, 'M', u'ⳕ'),
    (0x2CD5, 'V'),
    (0x2CD6, 'M', u'ⳗ'),
    (0x2CD7, 'V'),
    (0x2CD8, 'M', u'ⳙ'),
    (0x2CD9, 'V'),
    (0x2CDA, 'M', u'ⳛ'),
    (0x2CDB, 'V'),
    (0x2CDC, 'M', u'ⳝ'),
    (0x2CDD, 'V'),
    (0x2CDE, 'M', u'ⳟ'),
    (0x2CDF, 'V'),
    (0x2CE0, 'M', u'ⳡ'),
    (0x2CE1, 'V'),
    (0x2CE2, 'M', u'ⳣ'),
    (0x2CE3, 'V'),
    (0x2CEB, 'M', u'ⳬ'),
    (0x2CEC, 'V'),
    (0x2CED, 'M', u'ⳮ'),
    ]

def _seg_26():
    return [
    (0x2CEE, 'V'),
    (0x2CF2, 'M', u'ⳳ'),
    (0x2CF3, 'V'),
    (0x2CF4, 'X'),
    (0x2CF9, 'V'),
    (0x2D26, 'X'),
    (0x2D27, 'V'),
    (0x2D28, 'X'),
    (0x2D2D, 'V'),
    (0x2D2E, 'X'),
    (0x2D30, 'V'),
    (0x2D68, 'X'),
    (0x2D6F, 'M', u'ⵡ'),
    (0x2D70, 'V'),
    (0x2D71, 'X'),
    (0x2D7F, 'V'),
    (0x2D97, 'X'),
    (0x2DA0, 'V'),
    (0x2DA7, 'X'),
    (0x2DA8, 'V'),
    (0x2DAF, 'X'),
    (0x2DB0, 'V'),
    (0x2DB7, 'X'),
    (0x2DB8, 'V'),
    (0x2DBF, 'X'),
    (0x2DC0, 'V'),
    (0x2DC7, 'X'),
    (0x2DC8, 'V'),
    (0x2DCF, 'X'),
    (0x2DD0, 'V'),
    (0x2DD7, 'X'),
    (0x2DD8, 'V'),
    (0x2DDF, 'X'),
    (0x2DE0, 'V'),
    (0x2E3C, 'X'),
    (0x2E80, 'V'),
    (0x2E9A, 'X'),
    (0x2E9B, 'V'),
    (0x2E9F, 'M', u'母'),
    (0x2EA0, 'V'),
    (0x2EF3, 'M', u'龟'),
    (0x2EF4, 'X'),
    (0x2F00, 'M', u'一'),
    (0x2F01, 'M', u'丨'),
    (0x2F02, 'M', u'丶'),
    (0x2F03, 'M', u'丿'),
    (0x2F04, 'M', u'乙'),
    (0x2F05, 'M', u'亅'),
    (0x2F06, 'M', u'二'),
    (0x2F07, 'M', u'亠'),
    (0x2F08, 'M', u'人'),
    (0x2F09, 'M', u'儿'),
    (0x2F0A, 'M', u'入'),
    (0x2F0B, 'M', u'八'),
    (0x2F0C, 'M', u'冂'),
    (0x2F0D, 'M', u'冖'),
    (0x2F0E, 'M', u'冫'),
    (0x2F0F, 'M', u'几'),
    (0x2F10, 'M', u'凵'),
    (0x2F11, 'M', u'刀'),
    (0x2F12, 'M', u'力'),
    (0x2F13, 'M', u'勹'),
    (0x2F14, 'M', u'匕'),
    (0x2F15, 'M', u'匚'),
    (0x2F16, 'M', u'匸'),
    (0x2F17, 'M', u'十'),
    (0x2F18, 'M', u'卜'),
    (0x2F19, 'M', u'卩'),
    (0x2F1A, 'M', u'厂'),
    (0x2F1B, 'M', u'厶'),
    (0x2F1C, 'M', u'又'),
    (0x2F1D, 'M', u'口'),
    (0x2F1E, 'M', u'囗'),
    (0x2F1F, 'M', u'土'),
    (0x2F20, 'M', u'士'),
    (0x2F21, 'M', u'夂'),
    (0x2F22, 'M', u'夊'),
    (0x2F23, 'M', u'夕'),
    (0x2F24, 'M', u'大'),
    (0x2F25, 'M', u'女'),
    (0x2F26, 'M', u'子'),
    (0x2F27, 'M', u'宀'),
    (0x2F28, 'M', u'寸'),
    (0x2F29, 'M', u'小'),
    (0x2F2A, 'M', u'尢'),
    (0x2F2B, 'M', u'尸'),
    (0x2F2C, 'M', u'屮'),
    (0x2F2D, 'M', u'山'),
    (0x2F2E, 'M', u'巛'),
    (0x2F2F, 'M', u'工'),
    (0x2F30, 'M', u'己'),
    (0x2F31, 'M', u'巾'),
    (0x2F32, 'M', u'干'),
    (0x2F33, 'M', u'幺'),
    (0x2F34, 'M', u'广'),
    (0x2F35, 'M', u'廴'),
    (0x2F36, 'M', u'廾'),
    (0x2F37, 'M', u'弋'),
    (0x2F38, 'M', u'弓'),
    (0x2F39, 'M', u'彐'),
    ]

def _seg_27():
    return [
    (0x2F3A, 'M', u'彡'),
    (0x2F3B, 'M', u'彳'),
    (0x2F3C, 'M', u'心'),
    (0x2F3D, 'M', u'戈'),
    (0x2F3E, 'M', u'戶'),
    (0x2F3F, 'M', u'手'),
    (0x2F40, 'M', u'支'),
    (0x2F41, 'M', u'攴'),
    (0x2F42, 'M', u'文'),
    (0x2F43, 'M', u'斗'),
    (0x2F44, 'M', u'斤'),
    (0x2F45, 'M', u'方'),
    (0x2F46, 'M', u'无'),
    (0x2F47, 'M', u'日'),
    (0x2F48, 'M', u'曰'),
    (0x2F49, 'M', u'月'),
    (0x2F4A, 'M', u'木'),
    (0x2F4B, 'M', u'欠'),
    (0x2F4C, 'M', u'止'),
    (0x2F4D, 'M', u'歹'),
    (0x2F4E, 'M', u'殳'),
    (0x2F4F, 'M', u'毋'),
    (0x2F50, 'M', u'比'),
    (0x2F51, 'M', u'毛'),
    (0x2F52, 'M', u'氏'),
    (0x2F53, 'M', u'气'),
    (0x2F54, 'M', u'水'),
    (0x2F55, 'M', u'火'),
    (0x2F56, 'M', u'爪'),
    (0x2F57, 'M', u'父'),
    (0x2F58, 'M', u'爻'),
    (0x2F59, 'M', u'爿'),
    (0x2F5A, 'M', u'片'),
    (0x2F5B, 'M', u'牙'),
    (0x2F5C, 'M', u'牛'),
    (0x2F5D, 'M', u'犬'),
    (0x2F5E, 'M', u'玄'),
    (0x2F5F, 'M', u'玉'),
    (0x2F60, 'M', u'瓜'),
    (0x2F61, 'M', u'瓦'),
    (0x2F62, 'M', u'甘'),
    (0x2F63, 'M', u'生'),
    (0x2F64, 'M', u'用'),
    (0x2F65, 'M', u'田'),
    (0x2F66, 'M', u'疋'),
    (0x2F67, 'M', u'疒'),
    (0x2F68, 'M', u'癶'),
    (0x2F69, 'M', u'白'),
    (0x2F6A, 'M', u'皮'),
    (0x2F6B, 'M', u'皿'),
    (0x2F6C, 'M', u'目'),
    (0x2F6D, 'M', u'矛'),
    (0x2F6E, 'M', u'矢'),
    (0x2F6F, 'M', u'石'),
    (0x2F70, 'M', u'示'),
    (0x2F71, 'M', u'禸'),
    (0x2F72, 'M', u'禾'),
    (0x2F73, 'M', u'穴'),
    (0x2F74, 'M', u'立'),
    (0x2F75, 'M', u'竹'),
    (0x2F76, 'M', u'米'),
    (0x2F77, 'M', u'糸'),
    (0x2F78, 'M', u'缶'),
    (0x2F79, 'M', u'网'),
    (0x2F7A, 'M', u'羊'),
    (0x2F7B, 'M', u'羽'),
    (0x2F7C, 'M', u'老'),
    (0x2F7D, 'M', u'而'),
    (0x2F7E, 'M', u'耒'),
    (0x2F7F, 'M', u'耳'),
    (0x2F80, 'M', u'聿'),
    (0x2F81, 'M', u'肉'),
    (0x2F82, 'M', u'臣'),
    (0x2F83, 'M', u'自'),
    (0x2F84, 'M', u'至'),
    (0x2F85, 'M', u'臼'),
    (0x2F86, 'M', u'舌'),
    (0x2F87, 'M', u'舛'),
    (0x2F88, 'M', u'舟'),
    (0x2F89, 'M', u'艮'),
    (0x2F8A, 'M', u'色'),
    (0x2F8B, 'M', u'艸'),
    (0x2F8C, 'M', u'虍'),
    (0x2F8D, 'M', u'虫'),
    (0x2F8E, 'M', u'血'),
    (0x2F8F, 'M', u'行'),
    (0x2F90, 'M', u'衣'),
    (0x2F91, 'M', u'襾'),
    (0x2F92, 'M', u'見'),
    (0x2F93, 'M', u'角'),
    (0x2F94, 'M', u'言'),
    (0x2F95, 'M', u'谷'),
    (0x2F96, 'M', u'豆'),
    (0x2F97, 'M', u'豕'),
    (0x2F98, 'M', u'豸'),
    (0x2F99, 'M', u'貝'),
    (0x2F9A, 'M', u'赤'),
    (0x2F9B, 'M', u'走'),
    (0x2F9C, 'M', u'足'),
    (0x2F9D, 'M', u'身'),
    ]

def _seg_28():
    return [
    (0x2F9E, 'M', u'車'),
    (0x2F9F, 'M', u'辛'),
    (0x2FA0, 'M', u'辰'),
    (0x2FA1, 'M', u'辵'),
    (0x2FA2, 'M', u'邑'),
    (0x2FA3, 'M', u'酉'),
    (0x2FA4, 'M', u'釆'),
    (0x2FA5, 'M', u'里'),
    (0x2FA6, 'M', u'金'),
    (0x2FA7, 'M', u'長'),
    (0x2FA8, 'M', u'門'),
    (0x2FA9, 'M', u'阜'),
    (0x2FAA, 'M', u'隶'),
    (0x2FAB, 'M', u'隹'),
    (0x2FAC, 'M', u'雨'),
    (0x2FAD, 'M', u'靑'),
    (0x2FAE, 'M', u'非'),
    (0x2FAF, 'M', u'面'),
    (0x2FB0, 'M', u'革'),
    (0x2FB1, 'M', u'韋'),
    (0x2FB2, 'M', u'韭'),
    (0x2FB3, 'M', u'音'),
    (0x2FB4, 'M', u'頁'),
    (0x2FB5, 'M', u'風'),
    (0x2FB6, 'M', u'飛'),
    (0x2FB7, 'M', u'食'),
    (0x2FB8, 'M', u'首'),
    (0x2FB9, 'M', u'香'),
    (0x2FBA, 'M', u'馬'),
    (0x2FBB, 'M', u'骨'),
    (0x2FBC, 'M', u'高'),
    (0x2FBD, 'M', u'髟'),
    (0x2FBE, 'M', u'鬥'),
    (0x2FBF, 'M', u'鬯'),
    (0x2FC0, 'M', u'鬲'),
    (0x2FC1, 'M', u'鬼'),
    (0x2FC2, 'M', u'魚'),
    (0x2FC3, 'M', u'鳥'),
    (0x2FC4, 'M', u'鹵'),
    (0x2FC5, 'M', u'鹿'),
    (0x2FC6, 'M', u'麥'),
    (0x2FC7, 'M', u'麻'),
    (0x2FC8, 'M', u'黃'),
    (0x2FC9, 'M', u'黍'),
    (0x2FCA, 'M', u'黑'),
    (0x2FCB, 'M', u'黹'),
    (0x2FCC, 'M', u'黽'),
    (0x2FCD, 'M', u'鼎'),
    (0x2FCE, 'M', u'鼓'),
    (0x2FCF, 'M', u'鼠'),
    (0x2FD0, 'M', u'鼻'),
    (0x2FD1, 'M', u'齊'),
    (0x2FD2, 'M', u'齒'),
    (0x2FD3, 'M', u'龍'),
    (0x2FD4, 'M', u'龜'),
    (0x2FD5, 'M', u'龠'),
    (0x2FD6, 'X'),
    (0x3000, '3', u' '),
    (0x3001, 'V'),
    (0x3002, 'M', u'.'),
    (0x3003, 'V'),
    (0x3036, 'M', u'〒'),
    (0x3037, 'V'),
    (0x3038, 'M', u'十'),
    (0x3039, 'M', u'卄'),
    (0x303A, 'M', u'卅'),
    (0x303B, 'V'),
    (0x3040, 'X'),
    (0x3041, 'V'),
    (0x3097, 'X'),
    (0x3099, 'V'),
    (0x309B, '3', u' ゙'),
    (0x309C, '3', u' ゚'),
    (0x309D, 'V'),
    (0x309F, 'M', u'より'),
    (0x30A0, 'V'),
    (0x30FF, 'M', u'コト'),
    (0x3100, 'X'),
    (0x3105, 'V'),
    (0x312E, 'X'),
    (0x3131, 'M', u'ᄀ'),
    (0x3132, 'M', u'ᄁ'),
    (0x3133, 'M', u'ᆪ'),
    (0x3134, 'M', u'ᄂ'),
    (0x3135, 'M', u'ᆬ'),
    (0x3136, 'M', u'ᆭ'),
    (0x3137, 'M', u'ᄃ'),
    (0x3138, 'M', u'ᄄ'),
    (0x3139, 'M', u'ᄅ'),
    (0x313A, 'M', u'ᆰ'),
    (0x313B, 'M', u'ᆱ'),
    (0x313C, 'M', u'ᆲ'),
    (0x313D, 'M', u'ᆳ'),
    (0x313E, 'M', u'ᆴ'),
    (0x313F, 'M', u'ᆵ'),
    (0x3140, 'M', u'ᄚ'),
    (0x3141, 'M', u'ᄆ'),
    (0x3142, 'M', u'ᄇ'),
    (0x3143, 'M', u'ᄈ'),
    (0x3144, 'M', u'ᄡ'),
    ]

def _seg_29():
    return [
    (0x3145, 'M', u'ᄉ'),
    (0x3146, 'M', u'ᄊ'),
    (0x3147, 'M', u'ᄋ'),
    (0x3148, 'M', u'ᄌ'),
    (0x3149, 'M', u'ᄍ'),
    (0x314A, 'M', u'ᄎ'),
    (0x314B, 'M', u'ᄏ'),
    (0x314C, 'M', u'ᄐ'),
    (0x314D, 'M', u'ᄑ'),
    (0x314E, 'M', u'ᄒ'),
    (0x314F, 'M', u'ᅡ'),
    (0x3150, 'M', u'ᅢ'),
    (0x3151, 'M', u'ᅣ'),
    (0x3152, 'M', u'ᅤ'),
    (0x3153, 'M', u'ᅥ'),
    (0x3154, 'M', u'ᅦ'),
    (0x3155, 'M', u'ᅧ'),
    (0x3156, 'M', u'ᅨ'),
    (0x3157, 'M', u'ᅩ'),
    (0x3158, 'M', u'ᅪ'),
    (0x3159, 'M', u'ᅫ'),
    (0x315A, 'M', u'ᅬ'),
    (0x315B, 'M', u'ᅭ'),
    (0x315C, 'M', u'ᅮ'),
    (0x315D, 'M', u'ᅯ'),
    (0x315E, 'M', u'ᅰ'),
    (0x315F, 'M', u'ᅱ'),
    (0x3160, 'M', u'ᅲ'),
    (0x3161, 'M', u'ᅳ'),
    (0x3162, 'M', u'ᅴ'),
    (0x3163, 'M', u'ᅵ'),
    (0x3164, 'X'),
    (0x3165, 'M', u'ᄔ'),
    (0x3166, 'M', u'ᄕ'),
    (0x3167, 'M', u'ᇇ'),
    (0x3168, 'M', u'ᇈ'),
    (0x3169, 'M', u'ᇌ'),
    (0x316A, 'M', u'ᇎ'),
    (0x316B, 'M', u'ᇓ'),
    (0x316C, 'M', u'ᇗ'),
    (0x316D, 'M', u'ᇙ'),
    (0x316E, 'M', u'ᄜ'),
    (0x316F, 'M', u'ᇝ'),
    (0x3170, 'M', u'ᇟ'),
    (0x3171, 'M', u'ᄝ'),
    (0x3172, 'M', u'ᄞ'),
    (0x3173, 'M', u'ᄠ'),
    (0x3174, 'M', u'ᄢ'),
    (0x3175, 'M', u'ᄣ'),
    (0x3176, 'M', u'ᄧ'),
    (0x3177, 'M', u'ᄩ'),
    (0x3178, 'M', u'ᄫ'),
    (0x3179, 'M', u'ᄬ'),
    (0x317A, 'M', u'ᄭ'),
    (0x317B, 'M', u'ᄮ'),
    (0x317C, 'M', u'ᄯ'),
    (0x317D, 'M', u'ᄲ'),
    (0x317E, 'M', u'ᄶ'),
    (0x317F, 'M', u'ᅀ'),
    (0x3180, 'M', u'ᅇ'),
    (0x3181, 'M', u'ᅌ'),
    (0x3182, 'M', u'ᇱ'),
    (0x3183, 'M', u'ᇲ'),
    (0x3184, 'M', u'ᅗ'),
    (0x3185, 'M', u'ᅘ'),
    (0x3186, 'M', u'ᅙ'),
    (0x3187, 'M', u'ᆄ'),
    (0x3188, 'M', u'ᆅ'),
    (0x3189, 'M', u'ᆈ'),
    (0x318A, 'M', u'ᆑ'),
    (0x318B, 'M', u'ᆒ'),
    (0x318C, 'M', u'ᆔ'),
    (0x318D, 'M', u'ᆞ'),
    (0x318E, 'M', u'ᆡ'),
    (0x318F, 'X'),
    (0x3190, 'V'),
    (0x3192, 'M', u'一'),
    (0x3193, 'M', u'二'),
    (0x3194, 'M', u'三'),
    (0x3195, 'M', u'四'),
    (0x3196, 'M', u'上'),
    (0x3197, 'M', u'中'),
    (0x3198, 'M', u'下'),
    (0x3199, 'M', u'甲'),
    (0x319A, 'M', u'乙'),
    (0x319B, 'M', u'丙'),
    (0x319C, 'M', u'丁'),
    (0x319D, 'M', u'天'),
    (0x319E, 'M', u'地'),
    (0x319F, 'M', u'人'),
    (0x31A0, 'V'),
    (0x31BB, 'X'),
    (0x31C0, 'V'),
    (0x31E4, 'X'),
    (0x31F0, 'V'),
    (0x3200, '3', u'(ᄀ)'),
    (0x3201, '3', u'(ᄂ)'),
    (0x3202, '3', u'(ᄃ)'),
    (0x3203, '3', u'(ᄅ)'),
    (0x3204, '3', u'(ᄆ)'),
    ]

def _seg_30():
    return [
    (0x3205, '3', u'(ᄇ)'),
    (0x3206, '3', u'(ᄉ)'),
    (0x3207, '3', u'(ᄋ)'),
    (0x3208, '3', u'(ᄌ)'),
    (0x3209, '3', u'(ᄎ)'),
    (0x320A, '3', u'(ᄏ)'),
    (0x320B, '3', u'(ᄐ)'),
    (0x320C, '3', u'(ᄑ)'),
    (0x320D, '3', u'(ᄒ)'),
    (0x320E, '3', u'(가)'),
    (0x320F, '3', u'(나)'),
    (0x3210, '3', u'(다)'),
    (0x3211, '3', u'(라)'),
    (0x3212, '3', u'(마)'),
    (0x3213, '3', u'(바)'),
    (0x3214, '3', u'(사)'),
    (0x3215, '3', u'(아)'),
    (0x3216, '3', u'(자)'),
    (0x3217, '3', u'(차)'),
    (0x3218, '3', u'(카)'),
    (0x3219, '3', u'(타)'),
    (0x321A, '3', u'(파)'),
    (0x321B, '3', u'(하)'),
    (0x321C, '3', u'(주)'),
    (0x321D, '3', u'(오전)'),
    (0x321E, '3', u'(오후)'),
    (0x321F, 'X'),
    (0x3220, '3', u'(一)'),
    (0x3221, '3', u'(二)'),
    (0x3222, '3', u'(三)'),
    (0x3223, '3', u'(四)'),
    (0x3224, '3', u'(五)'),
    (0x3225, '3', u'(六)'),
    (0x3226, '3', u'(七)'),
    (0x3227, '3', u'(八)'),
    (0x3228, '3', u'(九)'),
    (0x3229, '3', u'(十)'),
    (0x322A, '3', u'(月)'),
    (0x322B, '3', u'(火)'),
    (0x322C, '3', u'(水)'),
    (0x322D, '3', u'(木)'),
    (0x322E, '3', u'(金)'),
    (0x322F, '3', u'(土)'),
    (0x3230, '3', u'(日)'),
    (0x3231, '3', u'(株)'),
    (0x3232, '3', u'(有)'),
    (0x3233, '3', u'(社)'),
    (0x3234, '3', u'(名)'),
    (0x3235, '3', u'(特)'),
    (0x3236, '3', u'(財)'),
    (0x3237, '3', u'(祝)'),
    (0x3238, '3', u'(労)'),
    (0x3239, '3', u'(代)'),
    (0x323A, '3', u'(呼)'),
    (0x323B, '3', u'(学)'),
    (0x323C, '3', u'(監)'),
    (0x323D, '3', u'(企)'),
    (0x323E, '3', u'(資)'),
    (0x323F, '3', u'(協)'),
    (0x3240, '3', u'(祭)'),
    (0x3241, '3', u'(休)'),
    (0x3242, '3', u'(自)'),
    (0x3243, '3', u'(至)'),
    (0x3244, 'M', u'問'),
    (0x3245, 'M', u'幼'),
    (0x3246, 'M', u'文'),
    (0x3247, 'M', u'箏'),
    (0x3248, 'V'),
    (0x3250, 'M', u'pte'),
    (0x3251, 'M', u'21'),
    (0x3252, 'M', u'22'),
    (0x3253, 'M', u'23'),
    (0x3254, 'M', u'24'),
    (0x3255, 'M', u'25'),
    (0x3256, 'M', u'26'),
    (0x3257, 'M', u'27'),
    (0x3258, 'M', u'28'),
    (0x3259, 'M', u'29'),
    (0x325A, 'M', u'30'),
    (0x325B, 'M', u'31'),
    (0x325C, 'M', u'32'),
    (0x325D, 'M', u'33'),
    (0x325E, 'M', u'34'),
    (0x325F, 'M', u'35'),
    (0x3260, 'M', u'ᄀ'),
    (0x3261, 'M', u'ᄂ'),
    (0x3262, 'M', u'ᄃ'),
    (0x3263, 'M', u'ᄅ'),
    (0x3264, 'M', u'ᄆ'),
    (0x3265, 'M', u'ᄇ'),
    (0x3266, 'M', u'ᄉ'),
    (0x3267, 'M', u'ᄋ'),
    (0x3268, 'M', u'ᄌ'),
    (0x3269, 'M', u'ᄎ'),
    (0x326A, 'M', u'ᄏ'),
    (0x326B, 'M', u'ᄐ'),
    (0x326C, 'M', u'ᄑ'),
    (0x326D, 'M', u'ᄒ'),
    (0x326E, 'M', u'가'),
    (0x326F, 'M', u'나'),
    ]

def _seg_31():
    return [
    (0x3270, 'M', u'다'),
    (0x3271, 'M', u'라'),
    (0x3272, 'M', u'마'),
    (0x3273, 'M', u'바'),
    (0x3274, 'M', u'사'),
    (0x3275, 'M', u'아'),
    (0x3276, 'M', u'자'),
    (0x3277, 'M', u'차'),
    (0x3278, 'M', u'카'),
    (0x3279, 'M', u'타'),
    (0x327A, 'M', u'파'),
    (0x327B, 'M', u'하'),
    (0x327C, 'M', u'참고'),
    (0x327D, 'M', u'주의'),
    (0x327E, 'M', u'우'),
    (0x327F, 'V'),
    (0x3280, 'M', u'一'),
    (0x3281, 'M', u'二'),
    (0x3282, 'M', u'三'),
    (0x3283, 'M', u'四'),
    (0x3284, 'M', u'五'),
    (0x3285, 'M', u'六'),
    (0x3286, 'M', u'七'),
    (0x3287, 'M', u'八'),
    (0x3288, 'M', u'九'),
    (0x3289, 'M', u'十'),
    (0x328A, 'M', u'月'),
    (0x328B, 'M', u'火'),
    (0x328C, 'M', u'水'),
    (0x328D, 'M', u'木'),
    (0x328E, 'M', u'金'),
    (0x328F, 'M', u'土'),
    (0x3290, 'M', u'日'),
    (0x3291, 'M', u'株'),
    (0x3292, 'M', u'有'),
    (0x3293, 'M', u'社'),
    (0x3294, 'M', u'名'),
    (0x3295, 'M', u'特'),
    (0x3296, 'M', u'財'),
    (0x3297, 'M', u'祝'),
    (0x3298, 'M', u'労'),
    (0x3299, 'M', u'秘'),
    (0x329A, 'M', u'男'),
    (0x329B, 'M', u'女'),
    (0x329C, 'M', u'適'),
    (0x329D, 'M', u'優'),
    (0x329E, 'M', u'印'),
    (0x329F, 'M', u'注'),
    (0x32A0, 'M', u'項'),
    (0x32A1, 'M', u'休'),
    (0x32A2, 'M', u'写'),
    (0x32A3, 'M', u'正'),
    (0x32A4, 'M', u'上'),
    (0x32A5, 'M', u'中'),
    (0x32A6, 'M', u'下'),
    (0x32A7, 'M', u'左'),
    (0x32A8, 'M', u'右'),
    (0x32A9, 'M', u'医'),
    (0x32AA, 'M', u'宗'),
    (0x32AB, 'M', u'学'),
    (0x32AC, 'M', u'監'),
    (0x32AD, 'M', u'企'),
    (0x32AE, 'M', u'資'),
    (0x32AF, 'M', u'協'),
    (0x32B0, 'M', u'夜'),
    (0x32B1, 'M', u'36'),
    (0x32B2, 'M', u'37'),
    (0x32B3, 'M', u'38'),
    (0x32B4, 'M', u'39'),
    (0x32B5, 'M', u'40'),
    (0x32B6, 'M', u'41'),
    (0x32B7, 'M', u'42'),
    (0x32B8, 'M', u'43'),
    (0x32B9, 'M', u'44'),
    (0x32BA, 'M', u'45'),
    (0x32BB, 'M', u'46'),
    (0x32BC, 'M', u'47'),
    (0x32BD, 'M', u'48'),
    (0x32BE, 'M', u'49'),
    (0x32BF, 'M', u'50'),
    (0x32C0, 'M', u'1月'),
    (0x32C1, 'M', u'2月'),
    (0x32C2, 'M', u'3月'),
    (0x32C3, 'M', u'4月'),
    (0x32C4, 'M', u'5月'),
    (0x32C5, 'M', u'6月'),
    (0x32C6, 'M', u'7月'),
    (0x32C7, 'M', u'8月'),
    (0x32C8, 'M', u'9月'),
    (0x32C9, 'M', u'10月'),
    (0x32CA, 'M', u'11月'),
    (0x32CB, 'M', u'12月'),
    (0x32CC, 'M', u'hg'),
    (0x32CD, 'M', u'erg'),
    (0x32CE, 'M', u'ev'),
    (0x32CF, 'M', u'ltd'),
    (0x32D0, 'M', u'ア'),
    (0x32D1, 'M', u'イ'),
    (0x32D2, 'M', u'ウ'),
    (0x32D3, 'M', u'エ'),
    ]

def _seg_32():
    return [
    (0x32D4, 'M', u'オ'),
    (0x32D5, 'M', u'カ'),
    (0x32D6, 'M', u'キ'),
    (0x32D7, 'M', u'ク'),
    (0x32D8, 'M', u'ケ'),
    (0x32D9, 'M', u'コ'),
    (0x32DA, 'M', u'サ'),
    (0x32DB, 'M', u'シ'),
    (0x32DC, 'M', u'ス'),
    (0x32DD, 'M', u'セ'),
    (0x32DE, 'M', u'ソ'),
    (0x32DF, 'M', u'タ'),
    (0x32E0, 'M', u'チ'),
    (0x32E1, 'M', u'ツ'),
    (0x32E2, 'M', u'テ'),
    (0x32E3, 'M', u'ト'),
    (0x32E4, 'M', u'ナ'),
    (0x32E5, 'M', u'ニ'),
    (0x32E6, 'M', u'ヌ'),
    (0x32E7, 'M', u'ネ'),
    (0x32E8, 'M', u'ノ'),
    (0x32E9, 'M', u'ハ'),
    (0x32EA, 'M', u'ヒ'),
    (0x32EB, 'M', u'フ'),
    (0x32EC, 'M', u'ヘ'),
    (0x32ED, 'M', u'ホ'),
    (0x32EE, 'M', u'マ'),
    (0x32EF, 'M', u'ミ'),
    (0x32F0, 'M', u'ム'),
    (0x32F1, 'M', u'メ'),
    (0x32F2, 'M', u'モ'),
    (0x32F3, 'M', u'ヤ'),
    (0x32F4, 'M', u'ユ'),
    (0x32F5, 'M', u'ヨ'),
    (0x32F6, 'M', u'ラ'),
    (0x32F7, 'M', u'リ'),
    (0x32F8, 'M', u'ル'),
    (0x32F9, 'M', u'レ'),
    (0x32FA, 'M', u'ロ'),
    (0x32FB, 'M', u'ワ'),
    (0x32FC, 'M', u'ヰ'),
    (0x32FD, 'M', u'ヱ'),
    (0x32FE, 'M', u'ヲ'),
    (0x32FF, 'X'),
    (0x3300, 'M', u'アパート'),
    (0x3301, 'M', u'アルファ'),
    (0x3302, 'M', u'アンペア'),
    (0x3303, 'M', u'アール'),
    (0x3304, 'M', u'イニング'),
    (0x3305, 'M', u'インチ'),
    (0x3306, 'M', u'ウォン'),
    (0x3307, 'M', u'エスクード'),
    (0x3308, 'M', u'エーカー'),
    (0x3309, 'M', u'オンス'),
    (0x330A, 'M', u'オーム'),
    (0x330B, 'M', u'カイリ'),
    (0x330C, 'M', u'カラット'),
    (0x330D, 'M', u'カロリー'),
    (0x330E, 'M', u'ガロン'),
    (0x330F, 'M', u'ガンマ'),
    (0x3310, 'M', u'ギガ'),
    (0x3311, 'M', u'ギニー'),
    (0x3312, 'M', u'キュリー'),
    (0x3313, 'M', u'ギルダー'),
    (0x3314, 'M', u'キロ'),
    (0x3315, 'M', u'キログラム'),
    (0x3316, 'M', u'キロメートル'),
    (0x3317, 'M', u'キロワット'),
    (0x3318, 'M', u'グラム'),
    (0x3319, 'M', u'グラムトン'),
    (0x331A, 'M', u'クルゼイロ'),
    (0x331B, 'M', u'クローネ'),
    (0x331C, 'M', u'ケース'),
    (0x331D, 'M', u'コルナ'),
    (0x331E, 'M', u'コーポ'),
    (0x331F, 'M', u'サイクル'),
    (0x3320, 'M', u'サンチーム'),
    (0x3321, 'M', u'シリング'),
    (0x3322, 'M', u'センチ'),
    (0x3323, 'M', u'セント'),
    (0x3324, 'M', u'ダース'),
    (0x3325, 'M', u'デシ'),
    (0x3326, 'M', u'ドル'),
    (0x3327, 'M', u'トン'),
    (0x3328, 'M', u'ナノ'),
    (0x3329, 'M', u'ノット'),
    (0x332A, 'M', u'ハイツ'),
    (0x332B, 'M', u'パーセント'),
    (0x332C, 'M', u'パーツ'),
    (0x332D, 'M', u'バーレル'),
    (0x332E, 'M', u'ピアストル'),
    (0x332F, 'M', u'ピクル'),
    (0x3330, 'M', u'ピコ'),
    (0x3331, 'M', u'ビル'),
    (0x3332, 'M', u'ファラッド'),
    (0x3333, 'M', u'フィート'),
    (0x3334, 'M', u'ブッシェル'),
    (0x3335, 'M', u'フラン'),
    (0x3336, 'M', u'ヘクタール'),
    (0x3337, 'M', u'ペソ'),
    ]

def _seg_33():
    return [
    (0x3338, 'M', u'ペニヒ'),
    (0x3339, 'M', u'ヘルツ'),
    (0x333A, 'M', u'ペンス'),
    (0x333B, 'M', u'ページ'),
    (0x333C, 'M', u'ベータ'),
    (0x333D, 'M', u'ポイント'),
    (0x333E, 'M', u'ボルト'),
    (0x333F, 'M', u'ホン'),
    (0x3340, 'M', u'ポンド'),
    (0x3341, 'M', u'ホール'),
    (0x3342, 'M', u'ホーン'),
    (0x3343, 'M', u'マイクロ'),
    (0x3344, 'M', u'マイル'),
    (0x3345, 'M', u'マッハ'),
    (0x3346, 'M', u'マルク'),
    (0x3347, 'M', u'マンション'),
    (0x3348, 'M', u'ミクロン'),
    (0x3349, 'M', u'ミリ'),
    (0x334A, 'M', u'ミリバール'),
    (0x334B, 'M', u'メガ'),
    (0x334C, 'M', u'メガトン'),
    (0x334D, 'M', u'メートル'),
    (0x334E, 'M', u'ヤード'),
    (0x334F, 'M', u'ヤール'),
    (0x3350, 'M', u'ユアン'),
    (0x3351, 'M', u'リットル'),
    (0x3352, 'M', u'リラ'),
    (0x3353, 'M', u'ルピー'),
    (0x3354, 'M', u'ルーブル'),
    (0x3355, 'M', u'レム'),
    (0x3356, 'M', u'レントゲン'),
    (0x3357, 'M', u'ワット'),
    (0x3358, 'M', u'0点'),
    (0x3359, 'M', u'1点'),
    (0x335A, 'M', u'2点'),
    (0x335B, 'M', u'3点'),
    (0x335C, 'M', u'4点'),
    (0x335D, 'M', u'5点'),
    (0x335E, 'M', u'6点'),
    (0x335F, 'M', u'7点'),
    (0x3360, 'M', u'8点'),
    (0x3361, 'M', u'9点'),
    (0x3362, 'M', u'10点'),
    (0x3363, 'M', u'11点'),
    (0x3364, 'M', u'12点'),
    (0x3365, 'M', u'13点'),
    (0x3366, 'M', u'14点'),
    (0x3367, 'M', u'15点'),
    (0x3368, 'M', u'16点'),
    (0x3369, 'M', u'17点'),
    (0x336A, 'M', u'18点'),
    (0x336B, 'M', u'19点'),
    (0x336C, 'M', u'20点'),
    (0x336D, 'M', u'21点'),
    (0x336E, 'M', u'22点'),
    (0x336F, 'M', u'23点'),
    (0x3370, 'M', u'24点'),
    (0x3371, 'M', u'hpa'),
    (0x3372, 'M', u'da'),
    (0x3373, 'M', u'au'),
    (0x3374, 'M', u'bar'),
    (0x3375, 'M', u'ov'),
    (0x3376, 'M', u'pc'),
    (0x3377, 'M', u'dm'),
    (0x3378, 'M', u'dm2'),
    (0x3379, 'M', u'dm3'),
    (0x337A, 'M', u'iu'),
    (0x337B, 'M', u'平成'),
    (0x337C, 'M', u'昭和'),
    (0x337D, 'M', u'大正'),
    (0x337E, 'M', u'明治'),
    (0x337F, 'M', u'株式会社'),
    (0x3380, 'M', u'pa'),
    (0x3381, 'M', u'na'),
    (0x3382, 'M', u'μa'),
    (0x3383, 'M', u'ma'),
    (0x3384, 'M', u'ka'),
    (0x3385, 'M', u'kb'),
    (0x3386, 'M', u'mb'),
    (0x3387, 'M', u'gb'),
    (0x3388, 'M', u'cal'),
    (0x3389, 'M', u'kcal'),
    (0x338A, 'M', u'pf'),
    (0x338B, 'M', u'nf'),
    (0x338C, 'M', u'μf'),
    (0x338D, 'M', u'μg'),
    (0x338E, 'M', u'mg'),
    (0x338F, 'M', u'kg'),
    (0x3390, 'M', u'hz'),
    (0x3391, 'M', u'khz'),
    (0x3392, 'M', u'mhz'),
    (0x3393, 'M', u'ghz'),
    (0x3394, 'M', u'thz'),
    (0x3395, 'M', u'μl'),
    (0x3396, 'M', u'ml'),
    (0x3397, 'M', u'dl'),
    (0x3398, 'M', u'kl'),
    (0x3399, 'M', u'fm'),
    (0x339A, 'M', u'nm'),
    (0x339B, 'M', u'μm'),
    ]

def _seg_34():
    return [
    (0x339C, 'M', u'mm'),
    (0x339D, 'M', u'cm'),
    (0x339E, 'M', u'km'),
    (0x339F, 'M', u'mm2'),
    (0x33A0, 'M', u'cm2'),
    (0x33A1, 'M', u'm2'),
    (0x33A2, 'M', u'km2'),
    (0x33A3, 'M', u'mm3'),
    (0x33A4, 'M', u'cm3'),
    (0x33A5, 'M', u'm3'),
    (0x33A6, 'M', u'km3'),
    (0x33A7, 'M', u'm∕s'),
    (0x33A8, 'M', u'm∕s2'),
    (0x33A9, 'M', u'pa'),
    (0x33AA, 'M', u'kpa'),
    (0x33AB, 'M', u'mpa'),
    (0x33AC, 'M', u'gpa'),
    (0x33AD, 'M', u'rad'),
    (0x33AE, 'M', u'rad∕s'),
    (0x33AF, 'M', u'rad∕s2'),
    (0x33B0, 'M', u'ps'),
    (0x33B1, 'M', u'ns'),
    (0x33B2, 'M', u'μs'),
    (0x33B3, 'M', u'ms'),
    (0x33B4, 'M', u'pv'),
    (0x33B5, 'M', u'nv'),
    (0x33B6, 'M', u'μv'),
    (0x33B7, 'M', u'mv'),
    (0x33B8, 'M', u'kv'),
    (0x33B9, 'M', u'mv'),
    (0x33BA, 'M', u'pw'),
    (0x33BB, 'M', u'nw'),
    (0x33BC, 'M', u'μw'),
    (0x33BD, 'M', u'mw'),
    (0x33BE, 'M', u'kw'),
    (0x33BF, 'M', u'mw'),
    (0x33C0, 'M', u'kω'),
    (0x33C1, 'M', u'mω'),
    (0x33C2, 'X'),
    (0x33C3, 'M', u'bq'),
    (0x33C4, 'M', u'cc'),
    (0x33C5, 'M', u'cd'),
    (0x33C6, 'M', u'c∕kg'),
    (0x33C7, 'X'),
    (0x33C8, 'M', u'db'),
    (0x33C9, 'M', u'gy'),
    (0x33CA, 'M', u'ha'),
    (0x33CB, 'M', u'hp'),
    (0x33CC, 'M', u'in'),
    (0x33CD, 'M', u'kk'),
    (0x33CE, 'M', u'km'),
    (0x33CF, 'M', u'kt'),
    (0x33D0, 'M', u'lm'),
    (0x33D1, 'M', u'ln'),
    (0x33D2, 'M', u'log'),
    (0x33D3, 'M', u'lx'),
    (0x33D4, 'M', u'mb'),
    (0x33D5, 'M', u'mil'),
    (0x33D6, 'M', u'mol'),
    (0x33D7, 'M', u'ph'),
    (0x33D8, 'X'),
    (0x33D9, 'M', u'ppm'),
    (0x33DA, 'M', u'pr'),
    (0x33DB, 'M', u'sr'),
    (0x33DC, 'M', u'sv'),
    (0x33DD, 'M', u'wb'),
    (0x33DE, 'M', u'v∕m'),
    (0x33DF, 'M', u'a∕m'),
    (0x33E0, 'M', u'1日'),
    (0x33E1, 'M', u'2日'),
    (0x33E2, 'M', u'3日'),
    (0x33E3, 'M', u'4日'),
    (0x33E4, 'M', u'5日'),
    (0x33E5, 'M', u'6日'),
    (0x33E6, 'M', u'7日'),
    (0x33E7, 'M', u'8日'),
    (0x33E8, 'M', u'9日'),
    (0x33E9, 'M', u'10日'),
    (0x33EA, 'M', u'11日'),
    (0x33EB, 'M', u'12日'),
    (0x33EC, 'M', u'13日'),
    (0x33ED, 'M', u'14日'),
    (0x33EE, 'M', u'15日'),
    (0x33EF, 'M', u'16日'),
    (0x33F0, 'M', u'17日'),
    (0x33F1, 'M', u'18日'),
    (0x33F2, 'M', u'19日'),
    (0x33F3, 'M', u'20日'),
    (0x33F4, 'M', u'21日'),
    (0x33F5, 'M', u'22日'),
    (0x33F6, 'M', u'23日'),
    (0x33F7, 'M', u'24日'),
    (0x33F8, 'M', u'25日'),
    (0x33F9, 'M', u'26日'),
    (0x33FA, 'M', u'27日'),
    (0x33FB, 'M', u'28日'),
    (0x33FC, 'M', u'29日'),
    (0x33FD, 'M', u'30日'),
    (0x33FE, 'M', u'31日'),
    (0x33FF, 'M', u'gal'),
    ]

def _seg_35():
    return [
    (0x3400, 'V'),
    (0x4DB6, 'X'),
    (0x4DC0, 'V'),
    (0x9FCD, 'X'),
    (0xA000, 'V'),
    (0xA48D, 'X'),
    (0xA490, 'V'),
    (0xA4C7, 'X'),
    (0xA4D0, 'V'),
    (0xA62C, 'X'),
    (0xA640, 'M', u'ꙁ'),
    (0xA641, 'V'),
    (0xA642, 'M', u'ꙃ'),
    (0xA643, 'V'),
    (0xA644, 'M', u'ꙅ'),
    (0xA645, 'V'),
    (0xA646, 'M', u'ꙇ'),
    (0xA647, 'V'),
    (0xA648, 'M', u'ꙉ'),
    (0xA649, 'V'),
    (0xA64A, 'M', u'ꙋ'),
    (0xA64B, 'V'),
    (0xA64C, 'M', u'ꙍ'),
    (0xA64D, 'V'),
    (0xA64E, 'M', u'ꙏ'),
    (0xA64F, 'V'),
    (0xA650, 'M', u'ꙑ'),
    (0xA651, 'V'),
    (0xA652, 'M', u'ꙓ'),
    (0xA653, 'V'),
    (0xA654, 'M', u'ꙕ'),
    (0xA655, 'V'),
    (0xA656, 'M', u'ꙗ'),
    (0xA657, 'V'),
    (0xA658, 'M', u'ꙙ'),
    (0xA659, 'V'),
    (0xA65A, 'M', u'ꙛ'),
    (0xA65B, 'V'),
    (0xA65C, 'M', u'ꙝ'),
    (0xA65D, 'V'),
    (0xA65E, 'M', u'ꙟ'),
    (0xA65F, 'V'),
    (0xA660, 'M', u'ꙡ'),
    (0xA661, 'V'),
    (0xA662, 'M', u'ꙣ'),
    (0xA663, 'V'),
    (0xA664, 'M', u'ꙥ'),
    (0xA665, 'V'),
    (0xA666, 'M', u'ꙧ'),
    (0xA667, 'V'),
    (0xA668, 'M', u'ꙩ'),
    (0xA669, 'V'),
    (0xA66A, 'M', u'ꙫ'),
    (0xA66B, 'V'),
    (0xA66C, 'M', u'ꙭ'),
    (0xA66D, 'V'),
    (0xA680, 'M', u'ꚁ'),
    (0xA681, 'V'),
    (0xA682, 'M', u'ꚃ'),
    (0xA683, 'V'),
    (0xA684, 'M', u'ꚅ'),
    (0xA685, 'V'),
    (0xA686, 'M', u'ꚇ'),
    (0xA687, 'V'),
    (0xA688, 'M', u'ꚉ'),
    (0xA689, 'V'),
    (0xA68A, 'M', u'ꚋ'),
    (0xA68B, 'V'),
    (0xA68C, 'M', u'ꚍ'),
    (0xA68D, 'V'),
    (0xA68E, 'M', u'ꚏ'),
    (0xA68F, 'V'),
    (0xA690, 'M', u'ꚑ'),
    (0xA691, 'V'),
    (0xA692, 'M', u'ꚓ'),
    (0xA693, 'V'),
    (0xA694, 'M', u'ꚕ'),
    (0xA695, 'V'),
    (0xA696, 'M', u'ꚗ'),
    (0xA697, 'V'),
    (0xA698, 'X'),
    (0xA69F, 'V'),
    (0xA6F8, 'X'),
    (0xA700, 'V'),
    (0xA722, 'M', u'ꜣ'),
    (0xA723, 'V'),
    (0xA724, 'M', u'ꜥ'),
    (0xA725, 'V'),
    (0xA726, 'M', u'ꜧ'),
    (0xA727, 'V'),
    (0xA728, 'M', u'ꜩ'),
    (0xA729, 'V'),
    (0xA72A, 'M', u'ꜫ'),
    (0xA72B, 'V'),
    (0xA72C, 'M', u'ꜭ'),
    (0xA72D, 'V'),
    (0xA72E, 'M', u'ꜯ'),
    (0xA72F, 'V'),
    (0xA732, 'M', u'ꜳ'),
    (0xA733, 'V'),
    ]

def _seg_36():
    return [
    (0xA734, 'M', u'ꜵ'),
    (0xA735, 'V'),
    (0xA736, 'M', u'ꜷ'),
    (0xA737, 'V'),
    (0xA738, 'M', u'ꜹ'),
    (0xA739, 'V'),
    (0xA73A, 'M', u'ꜻ'),
    (0xA73B, 'V'),
    (0xA73C, 'M', u'ꜽ'),
    (0xA73D, 'V'),
    (0xA73E, 'M', u'ꜿ'),
    (0xA73F, 'V'),
    (0xA740, 'M', u'ꝁ'),
    (0xA741, 'V'),
    (0xA742, 'M', u'ꝃ'),
    (0xA743, 'V'),
    (0xA744, 'M', u'ꝅ'),
    (0xA745, 'V'),
    (0xA746, 'M', u'ꝇ'),
    (0xA747, 'V'),
    (0xA748, 'M', u'ꝉ'),
    (0xA749, 'V'),
    (0xA74A, 'M', u'ꝋ'),
    (0xA74B, 'V'),
    (0xA74C, 'M', u'ꝍ'),
    (0xA74D, 'V'),
    (0xA74E, 'M', u'ꝏ'),
    (0xA74F, 'V'),
    (0xA750, 'M', u'ꝑ'),
    (0xA751, 'V'),
    (0xA752, 'M', u'ꝓ'),
    (0xA753, 'V'),
    (0xA754, 'M', u'ꝕ'),
    (0xA755, 'V'),
    (0xA756, 'M', u'ꝗ'),
    (0xA757, 'V'),
    (0xA758, 'M', u'ꝙ'),
    (0xA759, 'V'),
    (0xA75A, 'M', u'ꝛ'),
    (0xA75B, 'V'),
    (0xA75C, 'M', u'ꝝ'),
    (0xA75D, 'V'),
    (0xA75E, 'M', u'ꝟ'),
    (0xA75F, 'V'),
    (0xA760, 'M', u'ꝡ'),
    (0xA761, 'V'),
    (0xA762, 'M', u'ꝣ'),
    (0xA763, 'V'),
    (0xA764, 'M', u'ꝥ'),
    (0xA765, 'V'),
    (0xA766, 'M', u'ꝧ'),
    (0xA767, 'V'),
    (0xA768, 'M', u'ꝩ'),
    (0xA769, 'V'),
    (0xA76A, 'M', u'ꝫ'),
    (0xA76B, 'V'),
    (0xA76C, 'M', u'ꝭ'),
    (0xA76D, 'V'),
    (0xA76E, 'M', u'ꝯ'),
    (0xA76F, 'V'),
    (0xA770, 'M', u'ꝯ'),
    (0xA771, 'V'),
    (0xA779, 'M', u'ꝺ'),
    (0xA77A, 'V'),
    (0xA77B, 'M', u'ꝼ'),
    (0xA77C, 'V'),
    (0xA77D, 'M', u'ᵹ'),
    (0xA77E, 'M', u'ꝿ'),
    (0xA77F, 'V'),
    (0xA780, 'M', u'ꞁ'),
    (0xA781, 'V'),
    (0xA782, 'M', u'ꞃ'),
    (0xA783, 'V'),
    (0xA784, 'M', u'ꞅ'),
    (0xA785, 'V'),
    (0xA786, 'M', u'ꞇ'),
    (0xA787, 'V'),
    (0xA78B, 'M', u'ꞌ'),
    (0xA78C, 'V'),
    (0xA78D, 'M', u'ɥ'),
    (0xA78E, 'V'),
    (0xA78F, 'X'),
    (0xA790, 'M', u'ꞑ'),
    (0xA791, 'V'),
    (0xA792, 'M', u'ꞓ'),
    (0xA793, 'V'),
    (0xA794, 'X'),
    (0xA7A0, 'M', u'ꞡ'),
    (0xA7A1, 'V'),
    (0xA7A2, 'M', u'ꞣ'),
    (0xA7A3, 'V'),
    (0xA7A4, 'M', u'ꞥ'),
    (0xA7A5, 'V'),
    (0xA7A6, 'M', u'ꞧ'),
    (0xA7A7, 'V'),
    (0xA7A8, 'M', u'ꞩ'),
    (0xA7A9, 'V'),
    (0xA7AA, 'M', u'ɦ'),
    (0xA7AB, 'X'),
    (0xA7F8, 'M', u'ħ'),
    ]

def _seg_37():
    return [
    (0xA7F9, 'M', u'œ'),
    (0xA7FA, 'V'),
    (0xA82C, 'X'),
    (0xA830, 'V'),
    (0xA83A, 'X'),
    (0xA840, 'V'),
    (0xA878, 'X'),
    (0xA880, 'V'),
    (0xA8C5, 'X'),
    (0xA8CE, 'V'),
    (0xA8DA, 'X'),
    (0xA8E0, 'V'),
    (0xA8FC, 'X'),
    (0xA900, 'V'),
    (0xA954, 'X'),
    (0xA95F, 'V'),
    (0xA97D, 'X'),
    (0xA980, 'V'),
    (0xA9CE, 'X'),
    (0xA9CF, 'V'),
    (0xA9DA, 'X'),
    (0xA9DE, 'V'),
    (0xA9E0, 'X'),
    (0xAA00, 'V'),
    (0xAA37, 'X'),
    (0xAA40, 'V'),
    (0xAA4E, 'X'),
    (0xAA50, 'V'),
    (0xAA5A, 'X'),
    (0xAA5C, 'V'),
    (0xAA7C, 'X'),
    (0xAA80, 'V'),
    (0xAAC3, 'X'),
    (0xAADB, 'V'),
    (0xAAF7, 'X'),
    (0xAB01, 'V'),
    (0xAB07, 'X'),
    (0xAB09, 'V'),
    (0xAB0F, 'X'),
    (0xAB11, 'V'),
    (0xAB17, 'X'),
    (0xAB20, 'V'),
    (0xAB27, 'X'),
    (0xAB28, 'V'),
    (0xAB2F, 'X'),
    (0xABC0, 'V'),
    (0xABEE, 'X'),
    (0xABF0, 'V'),
    (0xABFA, 'X'),
    (0xAC00, 'V'),
    (0xD7A4, 'X'),
    (0xD7B0, 'V'),
    (0xD7C7, 'X'),
    (0xD7CB, 'V'),
    (0xD7FC, 'X'),
    (0xF900, 'M', u'豈'),
    (0xF901, 'M', u'更'),
    (0xF902, 'M', u'車'),
    (0xF903, 'M', u'賈'),
    (0xF904, 'M', u'滑'),
    (0xF905, 'M', u'串'),
    (0xF906, 'M', u'句'),
    (0xF907, 'M', u'龜'),
    (0xF909, 'M', u'契'),
    (0xF90A, 'M', u'金'),
    (0xF90B, 'M', u'喇'),
    (0xF90C, 'M', u'奈'),
    (0xF90D, 'M', u'懶'),
    (0xF90E, 'M', u'癩'),
    (0xF90F, 'M', u'羅'),
    (0xF910, 'M', u'蘿'),
    (0xF911, 'M', u'螺'),
    (0xF912, 'M', u'裸'),
    (0xF913, 'M', u'邏'),
    (0xF914, 'M', u'樂'),
    (0xF915, 'M', u'洛'),
    (0xF916, 'M', u'烙'),
    (0xF917, 'M', u'珞'),
    (0xF918, 'M', u'落'),
    (0xF919, 'M', u'酪'),
    (0xF91A, 'M', u'駱'),
    (0xF91B, 'M', u'亂'),
    (0xF91C, 'M', u'卵'),
    (0xF91D, 'M', u'欄'),
    (0xF91E, 'M', u'爛'),
    (0xF91F, 'M', u'蘭'),
    (0xF920, 'M', u'鸞'),
    (0xF921, 'M', u'嵐'),
    (0xF922, 'M', u'濫'),
    (0xF923, 'M', u'藍'),
    (0xF924, 'M', u'襤'),
    (0xF925, 'M', u'拉'),
    (0xF926, 'M', u'臘'),
    (0xF927, 'M', u'蠟'),
    (0xF928, 'M', u'廊'),
    (0xF929, 'M', u'朗'),
    (0xF92A, 'M', u'浪'),
    (0xF92B, 'M', u'狼'),
    (0xF92C, 'M', u'郎'),
    (0xF92D, 'M', u'來'),
    ]

def _seg_38():
    return [
    (0xF92E, 'M', u'冷'),
    (0xF92F, 'M', u'勞'),
    (0xF930, 'M', u'擄'),
    (0xF931, 'M', u'櫓'),
    (0xF932, 'M', u'爐'),
    (0xF933, 'M', u'盧'),
    (0xF934, 'M', u'老'),
    (0xF935, 'M', u'蘆'),
    (0xF936, 'M', u'虜'),
    (0xF937, 'M', u'路'),
    (0xF938, 'M', u'露'),
    (0xF939, 'M', u'魯'),
    (0xF93A, 'M', u'鷺'),
    (0xF93B, 'M', u'碌'),
    (0xF93C, 'M', u'祿'),
    (0xF93D, 'M', u'綠'),
    (0xF93E, 'M', u'菉'),
    (0xF93F, 'M', u'錄'),
    (0xF940, 'M', u'鹿'),
    (0xF941, 'M', u'論'),
    (0xF942, 'M', u'壟'),
    (0xF943, 'M', u'弄'),
    (0xF944, 'M', u'籠'),
    (0xF945, 'M', u'聾'),
    (0xF946, 'M', u'牢'),
    (0xF947, 'M', u'磊'),
    (0xF948, 'M', u'賂'),
    (0xF949, 'M', u'雷'),
    (0xF94A, 'M', u'壘'),
    (0xF94B, 'M', u'屢'),
    (0xF94C, 'M', u'樓'),
    (0xF94D, 'M', u'淚'),
    (0xF94E, 'M', u'漏'),
    (0xF94F, 'M', u'累'),
    (0xF950, 'M', u'縷'),
    (0xF951, 'M', u'陋'),
    (0xF952, 'M', u'勒'),
    (0xF953, 'M', u'肋'),
    (0xF954, 'M', u'凜'),
    (0xF955, 'M', u'凌'),
    (0xF956, 'M', u'稜'),
    (0xF957, 'M', u'綾'),
    (0xF958, 'M', u'菱'),
    (0xF959, 'M', u'陵'),
    (0xF95A, 'M', u'讀'),
    (0xF95B, 'M', u'拏'),
    (0xF95C, 'M', u'樂'),
    (0xF95D, 'M', u'諾'),
    (0xF95E, 'M', u'丹'),
    (0xF95F, 'M', u'寧'),
    (0xF960, 'M', u'怒'),
    (0xF961, 'M', u'率'),
    (0xF962, 'M', u'異'),
    (0xF963, 'M', u'北'),
    (0xF964, 'M', u'磻'),
    (0xF965, 'M', u'便'),
    (0xF966, 'M', u'復'),
    (0xF967, 'M', u'不'),
    (0xF968, 'M', u'泌'),
    (0xF969, 'M', u'數'),
    (0xF96A, 'M', u'索'),
    (0xF96B, 'M', u'參'),
    (0xF96C, 'M', u'塞'),
    (0xF96D, 'M', u'省'),
    (0xF96E, 'M', u'葉'),
    (0xF96F, 'M', u'說'),
    (0xF970, 'M', u'殺'),
    (0xF971, 'M', u'辰'),
    (0xF972, 'M', u'沈'),
    (0xF973, 'M', u'拾'),
    (0xF974, 'M', u'若'),
    (0xF975, 'M', u'掠'),
    (0xF976, 'M', u'略'),
    (0xF977, 'M', u'亮'),
    (0xF978, 'M', u'兩'),
    (0xF979, 'M', u'凉'),
    (0xF97A, 'M', u'梁'),
    (0xF97B, 'M', u'糧'),
    (0xF97C, 'M', u'良'),
    (0xF97D, 'M', u'諒'),
    (0xF97E, 'M', u'量'),
    (0xF97F, 'M', u'勵'),
    (0xF980, 'M', u'呂'),
    (0xF981, 'M', u'女'),
    (0xF982, 'M', u'廬'),
    (0xF983, 'M', u'旅'),
    (0xF984, 'M', u'濾'),
    (0xF985, 'M', u'礪'),
    (0xF986, 'M', u'閭'),
    (0xF987, 'M', u'驪'),
    (0xF988, 'M', u'麗'),
    (0xF989, 'M', u'黎'),
    (0xF98A, 'M', u'力'),
    (0xF98B, 'M', u'曆'),
    (0xF98C, 'M', u'歷'),
    (0xF98D, 'M', u'轢'),
    (0xF98E, 'M', u'年'),
    (0xF98F, 'M', u'憐'),
    (0xF990, 'M', u'戀'),
    (0xF991, 'M', u'撚'),
    ]

def _seg_39():
    return [
    (0xF992, 'M', u'漣'),
    (0xF993, 'M', u'煉'),
    (0xF994, 'M', u'璉'),
    (0xF995, 'M', u'秊'),
    (0xF996, 'M', u'練'),
    (0xF997, 'M', u'聯'),
    (0xF998, 'M', u'輦'),
    (0xF999, 'M', u'蓮'),
    (0xF99A, 'M', u'連'),
    (0xF99B, 'M', u'鍊'),
    (0xF99C, 'M', u'列'),
    (0xF99D, 'M', u'劣'),
    (0xF99E, 'M', u'咽'),
    (0xF99F, 'M', u'烈'),
    (0xF9A0, 'M', u'裂'),
    (0xF9A1, 'M', u'說'),
    (0xF9A2, 'M', u'廉'),
    (0xF9A3, 'M', u'念'),
    (0xF9A4, 'M', u'捻'),
    (0xF9A5, 'M', u'殮'),
    (0xF9A6, 'M', u'簾'),
    (0xF9A7, 'M', u'獵'),
    (0xF9A8, 'M', u'令'),
    (0xF9A9, 'M', u'囹'),
    (0xF9AA, 'M', u'寧'),
    (0xF9AB, 'M', u'嶺'),
    (0xF9AC, 'M', u'怜'),
    (0xF9AD, 'M', u'玲'),
    (0xF9AE, 'M', u'瑩'),
    (0xF9AF, 'M', u'羚'),
    (0xF9B0, 'M', u'聆'),
    (0xF9B1, 'M', u'鈴'),
    (0xF9B2, 'M', u'零'),
    (0xF9B3, 'M', u'靈'),
    (0xF9B4, 'M', u'領'),
    (0xF9B5, 'M', u'例'),
    (0xF9B6, 'M', u'禮'),
    (0xF9B7, 'M', u'醴'),
    (0xF9B8, 'M', u'隸'),
    (0xF9B9, 'M', u'惡'),
    (0xF9BA, 'M', u'了'),
    (0xF9BB, 'M', u'僚'),
    (0xF9BC, 'M', u'寮'),
    (0xF9BD, 'M', u'尿'),
    (0xF9BE, 'M', u'料'),
    (0xF9BF, 'M', u'樂'),
    (0xF9C0, 'M', u'燎'),
    (0xF9C1, 'M', u'療'),
    (0xF9C2, 'M', u'蓼'),
    (0xF9C3, 'M', u'遼'),
    (0xF9C4, 'M', u'龍'),
    (0xF9C5, 'M', u'暈'),
    (0xF9C6, 'M', u'阮'),
    (0xF9C7, 'M', u'劉'),
    (0xF9C8, 'M', u'杻'),
    (0xF9C9, 'M', u'柳'),
    (0xF9CA, 'M', u'流'),
    (0xF9CB, 'M', u'溜'),
    (0xF9CC, 'M', u'琉'),
    (0xF9CD, 'M', u'留'),
    (0xF9CE, 'M', u'硫'),
    (0xF9CF, 'M', u'紐'),
    (0xF9D0, 'M', u'類'),
    (0xF9D1, 'M', u'六'),
    (0xF9D2, 'M', u'戮'),
    (0xF9D3, 'M', u'陸'),
    (0xF9D4, 'M', u'倫'),
    (0xF9D5, 'M', u'崙'),
    (0xF9D6, 'M', u'淪'),
    (0xF9D7, 'M', u'輪'),
    (0xF9D8, 'M', u'律'),
    (0xF9D9, 'M', u'慄'),
    (0xF9DA, 'M', u'栗'),
    (0xF9DB, 'M', u'率'),
    (0xF9DC, 'M', u'隆'),
    (0xF9DD, 'M', u'利'),
    (0xF9DE, 'M', u'吏'),
    (0xF9DF, 'M', u'履'),
    (0xF9E0, 'M', u'易'),
    (0xF9E1, 'M', u'李'),
    (0xF9E2, 'M', u'梨'),
    (0xF9E3, 'M', u'泥'),
    (0xF9E4, 'M', u'理'),
    (0xF9E5, 'M', u'痢'),
    (0xF9E6, 'M', u'罹'),
    (0xF9E7, 'M', u'裏'),
    (0xF9E8, 'M', u'裡'),
    (0xF9E9, 'M', u'里'),
    (0xF9EA, 'M', u'離'),
    (0xF9EB, 'M', u'匿'),
    (0xF9EC, 'M', u'溺'),
    (0xF9ED, 'M', u'吝'),
    (0xF9EE, 'M', u'燐'),
    (0xF9EF, 'M', u'璘'),
    (0xF9F0, 'M', u'藺'),
    (0xF9F1, 'M', u'隣'),
    (0xF9F2, 'M', u'鱗'),
    (0xF9F3, 'M', u'麟'),
    (0xF9F4, 'M', u'林'),
    (0xF9F5, 'M', u'淋'),
    ]

def _seg_40():
    return [
    (0xF9F6, 'M', u'臨'),
    (0xF9F7, 'M', u'立'),
    (0xF9F8, 'M', u'笠'),
    (0xF9F9, 'M', u'粒'),
    (0xF9FA, 'M', u'狀'),
    (0xF9FB, 'M', u'炙'),
    (0xF9FC, 'M', u'識'),
    (0xF9FD, 'M', u'什'),
    (0xF9FE, 'M', u'茶'),
    (0xF9FF, 'M', u'刺'),
    (0xFA00, 'M', u'切'),
    (0xFA01, 'M', u'度'),
    (0xFA02, 'M', u'拓'),
    (0xFA03, 'M', u'糖'),
    (0xFA04, 'M', u'宅'),
    (0xFA05, 'M', u'洞'),
    (0xFA06, 'M', u'暴'),
    (0xFA07, 'M', u'輻'),
    (0xFA08, 'M', u'行'),
    (0xFA09, 'M', u'降'),
    (0xFA0A, 'M', u'見'),
    (0xFA0B, 'M', u'廓'),
    (0xFA0C, 'M', u'兀'),
    (0xFA0D, 'M', u'嗀'),
    (0xFA0E, 'V'),
    (0xFA10, 'M', u'塚'),
    (0xFA11, 'V'),
    (0xFA12, 'M', u'晴'),
    (0xFA13, 'V'),
    (0xFA15, 'M', u'凞'),
    (0xFA16, 'M', u'猪'),
    (0xFA17, 'M', u'益'),
    (0xFA18, 'M', u'礼'),
    (0xFA19, 'M', u'神'),
    (0xFA1A, 'M', u'祥'),
    (0xFA1B, 'M', u'福'),
    (0xFA1C, 'M', u'靖'),
    (0xFA1D, 'M', u'精'),
    (0xFA1E, 'M', u'羽'),
    (0xFA1F, 'V'),
    (0xFA20, 'M', u'蘒'),
    (0xFA21, 'V'),
    (0xFA22, 'M', u'諸'),
    (0xFA23, 'V'),
    (0xFA25, 'M', u'逸'),
    (0xFA26, 'M', u'都'),
    (0xFA27, 'V'),
    (0xFA2A, 'M', u'飯'),
    (0xFA2B, 'M', u'飼'),
    (0xFA2C, 'M', u'館'),
    (0xFA2D, 'M', u'鶴'),
    (0xFA2E, 'M', u'郞'),
    (0xFA2F, 'M', u'隷'),
    (0xFA30, 'M', u'侮'),
    (0xFA31, 'M', u'僧'),
    (0xFA32, 'M', u'免'),
    (0xFA33, 'M', u'勉'),
    (0xFA34, 'M', u'勤'),
    (0xFA35, 'M', u'卑'),
    (0xFA36, 'M', u'喝'),
    (0xFA37, 'M', u'嘆'),
    (0xFA38, 'M', u'器'),
    (0xFA39, 'M', u'塀'),
    (0xFA3A, 'M', u'墨'),
    (0xFA3B, 'M', u'層'),
    (0xFA3C, 'M', u'屮'),
    (0xFA3D, 'M', u'悔'),
    (0xFA3E, 'M', u'慨'),
    (0xFA3F, 'M', u'憎'),
    (0xFA40, 'M', u'懲'),
    (0xFA41, 'M', u'敏'),
    (0xFA42, 'M', u'既'),
    (0xFA43, 'M', u'暑'),
    (0xFA44, 'M', u'梅'),
    (0xFA45, 'M', u'海'),
    (0xFA46, 'M', u'渚'),
    (0xFA47, 'M', u'漢'),
    (0xFA48, 'M', u'煮'),
    (0xFA49, 'M', u'爫'),
    (0xFA4A, 'M', u'琢'),
    (0xFA4B, 'M', u'碑'),
    (0xFA4C, 'M', u'社'),
    (0xFA4D, 'M', u'祉'),
    (0xFA4E, 'M', u'祈'),
    (0xFA4F, 'M', u'祐'),
    (0xFA50, 'M', u'祖'),
    (0xFA51, 'M', u'祝'),
    (0xFA52, 'M', u'禍'),
    (0xFA53, 'M', u'禎'),
    (0xFA54, 'M', u'穀'),
    (0xFA55, 'M', u'突'),
    (0xFA56, 'M', u'節'),
    (0xFA57, 'M', u'練'),
    (0xFA58, 'M', u'縉'),
    (0xFA59, 'M', u'繁'),
    (0xFA5A, 'M', u'署'),
    (0xFA5B, 'M', u'者'),
    (0xFA5C, 'M', u'臭'),
    (0xFA5D, 'M', u'艹'),
    (0xFA5F, 'M', u'著'),
    ]

def _seg_41():
    return [
    (0xFA60, 'M', u'褐'),
    (0xFA61, 'M', u'視'),
    (0xFA62, 'M', u'謁'),
    (0xFA63, 'M', u'謹'),
    (0xFA64, 'M', u'賓'),
    (0xFA65, 'M', u'贈'),
    (0xFA66, 'M', u'辶'),
    (0xFA67, 'M', u'逸'),
    (0xFA68, 'M', u'難'),
    (0xFA69, 'M', u'響'),
    (0xFA6A, 'M', u'頻'),
    (0xFA6B, 'M', u'恵'),
    (0xFA6C, 'M', u'𤋮'),
    (0xFA6D, 'M', u'舘'),
    (0xFA6E, 'X'),
    (0xFA70, 'M', u'並'),
    (0xFA71, 'M', u'况'),
    (0xFA72, 'M', u'全'),
    (0xFA73, 'M', u'侀'),
    (0xFA74, 'M', u'充'),
    (0xFA75, 'M', u'冀'),
    (0xFA76, 'M', u'勇'),
    (0xFA77, 'M', u'勺'),
    (0xFA78, 'M', u'喝'),
    (0xFA79, 'M', u'啕'),
    (0xFA7A, 'M', u'喙'),
    (0xFA7B, 'M', u'嗢'),
    (0xFA7C, 'M', u'塚'),
    (0xFA7D, 'M', u'墳'),
    (0xFA7E, 'M', u'奄'),
    (0xFA7F, 'M', u'奔'),
    (0xFA80, 'M', u'婢'),
    (0xFA81, 'M', u'嬨'),
    (0xFA82, 'M', u'廒'),
    (0xFA83, 'M', u'廙'),
    (0xFA84, 'M', u'彩'),
    (0xFA85, 'M', u'徭'),
    (0xFA86, 'M', u'惘'),
    (0xFA87, 'M', u'慎'),
    (0xFA88, 'M', u'愈'),
    (0xFA89, 'M', u'憎'),
    (0xFA8A, 'M', u'慠'),
    (0xFA8B, 'M', u'懲'),
    (0xFA8C, 'M', u'戴'),
    (0xFA8D, 'M', u'揄'),
    (0xFA8E, 'M', u'搜'),
    (0xFA8F, 'M', u'摒'),
    (0xFA90, 'M', u'敖'),
    (0xFA91, 'M', u'晴'),
    (0xFA92, 'M', u'朗'),
    (0xFA93, 'M', u'望'),
    (0xFA94, 'M', u'杖'),
    (0xFA95, 'M', u'歹'),
    (0xFA96, 'M', u'殺'),
    (0xFA97, 'M', u'流'),
    (0xFA98, 'M', u'滛'),
    (0xFA99, 'M', u'滋'),
    (0xFA9A, 'M', u'漢'),
    (0xFA9B, 'M', u'瀞'),
    (0xFA9C, 'M', u'煮'),
    (0xFA9D, 'M', u'瞧'),
    (0xFA9E, 'M', u'爵'),
    (0xFA9F, 'M', u'犯'),
    (0xFAA0, 'M', u'猪'),
    (0xFAA1, 'M', u'瑱'),
    (0xFAA2, 'M', u'甆'),
    (0xFAA3, 'M', u'画'),
    (0xFAA4, 'M', u'瘝'),
    (0xFAA5, 'M', u'瘟'),
    (0xFAA6, 'M', u'益'),
    (0xFAA7, 'M', u'盛'),
    (0xFAA8, 'M', u'直'),
    (0xFAA9, 'M', u'睊'),
    (0xFAAA, 'M', u'着'),
    (0xFAAB, 'M', u'磌'),
    (0xFAAC, 'M', u'窱'),
    (0xFAAD, 'M', u'節'),
    (0xFAAE, 'M', u'类'),
    (0xFAAF, 'M', u'絛'),
    (0xFAB0, 'M', u'練'),
    (0xFAB1, 'M', u'缾'),
    (0xFAB2, 'M', u'者'),
    (0xFAB3, 'M', u'荒'),
    (0xFAB4, 'M', u'華'),
    (0xFAB5, 'M', u'蝹'),
    (0xFAB6, 'M', u'襁'),
    (0xFAB7, 'M', u'覆'),
    (0xFAB8, 'M', u'視'),
    (0xFAB9, 'M', u'調'),
    (0xFABA, 'M', u'諸'),
    (0xFABB, 'M', u'請'),
    (0xFABC, 'M', u'謁'),
    (0xFABD, 'M', u'諾'),
    (0xFABE, 'M', u'諭'),
    (0xFABF, 'M', u'謹'),
    (0xFAC0, 'M', u'變'),
    (0xFAC1, 'M', u'贈'),
    (0xFAC2, 'M', u'輸'),
    (0xFAC3, 'M', u'遲'),
    (0xFAC4, 'M', u'醙'),
    ]

def _seg_42():
    return [
    (0xFAC5, 'M', u'鉶'),
    (0xFAC6, 'M', u'陼'),
    (0xFAC7, 'M', u'難'),
    (0xFAC8, 'M', u'靖'),
    (0xFAC9, 'M', u'韛'),
    (0xFACA, 'M', u'響'),
    (0xFACB, 'M', u'頋'),
    (0xFACC, 'M', u'頻'),
    (0xFACD, 'M', u'鬒'),
    (0xFACE, 'M', u'龜'),
    (0xFACF, 'M', u'𢡊'),
    (0xFAD0, 'M', u'𢡄'),
    (0xFAD1, 'M', u'𣏕'),
    (0xFAD2, 'M', u'㮝'),
    (0xFAD3, 'M', u'䀘'),
    (0xFAD4, 'M', u'䀹'),
    (0xFAD5, 'M', u'𥉉'),
    (0xFAD6, 'M', u'𥳐'),
    (0xFAD7, 'M', u'𧻓'),
    (0xFAD8, 'M', u'齃'),
    (0xFAD9, 'M', u'龎'),
    (0xFADA, 'X'),
    (0xFB00, 'M', u'ff'),
    (0xFB01, 'M', u'fi'),
    (0xFB02, 'M', u'fl'),
    (0xFB03, 'M', u'ffi'),
    (0xFB04, 'M', u'ffl'),
    (0xFB05, 'M', u'st'),
    (0xFB07, 'X'),
    (0xFB13, 'M', u'մն'),
    (0xFB14, 'M', u'մե'),
    (0xFB15, 'M', u'մի'),
    (0xFB16, 'M', u'վն'),
    (0xFB17, 'M', u'մխ'),
    (0xFB18, 'X'),
    (0xFB1D, 'M', u'יִ'),
    (0xFB1E, 'V'),
    (0xFB1F, 'M', u'ײַ'),
    (0xFB20, 'M', u'ע'),
    (0xFB21, 'M', u'א'),
    (0xFB22, 'M', u'ד'),
    (0xFB23, 'M', u'ה'),
    (0xFB24, 'M', u'כ'),
    (0xFB25, 'M', u'ל'),
    (0xFB26, 'M', u'ם'),
    (0xFB27, 'M', u'ר'),
    (0xFB28, 'M', u'ת'),
    (0xFB29, '3', u'+'),
    (0xFB2A, 'M', u'שׁ'),
    (0xFB2B, 'M', u'שׂ'),
    (0xFB2C, 'M', u'שּׁ'),
    (0xFB2D, 'M', u'שּׂ'),
    (0xFB2E, 'M', u'אַ'),
    (0xFB2F, 'M', u'אָ'),
    (0xFB30, 'M', u'אּ'),
    (0xFB31, 'M', u'בּ'),
    (0xFB32, 'M', u'גּ'),
    (0xFB33, 'M', u'דּ'),
    (0xFB34, 'M', u'הּ'),
    (0xFB35, 'M', u'וּ'),
    (0xFB36, 'M', u'זּ'),
    (0xFB37, 'X'),
    (0xFB38, 'M', u'טּ'),
    (0xFB39, 'M', u'יּ'),
    (0xFB3A, 'M', u'ךּ'),
    (0xFB3B, 'M', u'כּ'),
    (0xFB3C, 'M', u'לּ'),
    (0xFB3D, 'X'),
    (0xFB3E, 'M', u'מּ'),
    (0xFB3F, 'X'),
    (0xFB40, 'M', u'נּ'),
    (0xFB41, 'M', u'סּ'),
    (0xFB42, 'X'),
    (0xFB43, 'M', u'ףּ'),
    (0xFB44, 'M', u'פּ'),
    (0xFB45, 'X'),
    (0xFB46, 'M', u'צּ'),
    (0xFB47, 'M', u'קּ'),
    (0xFB48, 'M', u'רּ'),
    (0xFB49, 'M', u'שּ'),
    (0xFB4A, 'M', u'תּ'),
    (0xFB4B, 'M', u'וֹ'),
    (0xFB4C, 'M', u'בֿ'),
    (0xFB4D, 'M', u'כֿ'),
    (0xFB4E, 'M', u'פֿ'),
    (0xFB4F, 'M', u'אל'),
    (0xFB50, 'M', u'ٱ'),
    (0xFB52, 'M', u'ٻ'),
    (0xFB56, 'M', u'پ'),
    (0xFB5A, 'M', u'ڀ'),
    (0xFB5E, 'M', u'ٺ'),
    (0xFB62, 'M', u'ٿ'),
    (0xFB66, 'M', u'ٹ'),
    (0xFB6A, 'M', u'ڤ'),
    (0xFB6E, 'M', u'ڦ'),
    (0xFB72, 'M', u'ڄ'),
    (0xFB76, 'M', u'ڃ'),
    (0xFB7A, 'M', u'چ'),
    (0xFB7E, 'M', u'ڇ'),
    (0xFB82, 'M', u'ڍ'),
    ]

def _seg_43():
    return [
    (0xFB84, 'M', u'ڌ'),
    (0xFB86, 'M', u'ڎ'),
    (0xFB88, 'M', u'ڈ'),
    (0xFB8A, 'M', u'ژ'),
    (0xFB8C, 'M', u'ڑ'),
    (0xFB8E, 'M', u'ک'),
    (0xFB92, 'M', u'گ'),
    (0xFB96, 'M', u'ڳ'),
    (0xFB9A, 'M', u'ڱ'),
    (0xFB9E, 'M', u'ں'),
    (0xFBA0, 'M', u'ڻ'),
    (0xFBA4, 'M', u'ۀ'),
    (0xFBA6, 'M', u'ہ'),
    (0xFBAA, 'M', u'ھ'),
    (0xFBAE, 'M', u'ے'),
    (0xFBB0, 'M', u'ۓ'),
    (0xFBB2, 'V'),
    (0xFBC2, 'X'),
    (0xFBD3, 'M', u'ڭ'),
    (0xFBD7, 'M', u'ۇ'),
    (0xFBD9, 'M', u'ۆ'),
    (0xFBDB, 'M', u'ۈ'),
    (0xFBDD, 'M', u'ۇٴ'),
    (0xFBDE, 'M', u'ۋ'),
    (0xFBE0, 'M', u'ۅ'),
    (0xFBE2, 'M', u'ۉ'),
    (0xFBE4, 'M', u'ې'),
    (0xFBE8, 'M', u'ى'),
    (0xFBEA, 'M', u'ئا'),
    (0xFBEC, 'M', u'ئە'),
    (0xFBEE, 'M', u'ئو'),
    (0xFBF0, 'M', u'ئۇ'),
    (0xFBF2, 'M', u'ئۆ'),
    (0xFBF4, 'M', u'ئۈ'),
    (0xFBF6, 'M', u'ئې'),
    (0xFBF9, 'M', u'ئى'),
    (0xFBFC, 'M', u'ی'),
    (0xFC00, 'M', u'ئج'),
    (0xFC01, 'M', u'ئح'),
    (0xFC02, 'M', u'ئم'),
    (0xFC03, 'M', u'ئى'),
    (0xFC04, 'M', u'ئي'),
    (0xFC05, 'M', u'بج'),
    (0xFC06, 'M', u'بح'),
    (0xFC07, 'M', u'بخ'),
    (0xFC08, 'M', u'بم'),
    (0xFC09, 'M', u'بى'),
    (0xFC0A, 'M', u'بي'),
    (0xFC0B, 'M', u'تج'),
    (0xFC0C, 'M', u'تح'),
    (0xFC0D, 'M', u'تخ'),
    (0xFC0E, 'M', u'تم'),
    (0xFC0F, 'M', u'تى'),
    (0xFC10, 'M', u'تي'),
    (0xFC11, 'M', u'ثج'),
    (0xFC12, 'M', u'ثم'),
    (0xFC13, 'M', u'ثى'),
    (0xFC14, 'M', u'ثي'),
    (0xFC15, 'M', u'جح'),
    (0xFC16, 'M', u'جم'),
    (0xFC17, 'M', u'حج'),
    (0xFC18, 'M', u'حم'),
    (0xFC19, 'M', u'خج'),
    (0xFC1A, 'M', u'خح'),
    (0xFC1B, 'M', u'خم'),
    (0xFC1C, 'M', u'سج'),
    (0xFC1D, 'M', u'سح'),
    (0xFC1E, 'M', u'سخ'),
    (0xFC1F, 'M', u'سم'),
    (0xFC20, 'M', u'صح'),
    (0xFC21, 'M', u'صم'),
    (0xFC22, 'M', u'ضج'),
    (0xFC23, 'M', u'ضح'),
    (0xFC24, 'M', u'ضخ'),
    (0xFC25, 'M', u'ضم'),
    (0xFC26, 'M', u'طح'),
    (0xFC27, 'M', u'طم'),
    (0xFC28, 'M', u'ظم'),
    (0xFC29, 'M', u'عج'),
    (0xFC2A, 'M', u'عم'),
    (0xFC2B, 'M', u'غج'),
    (0xFC2C, 'M', u'غم'),
    (0xFC2D, 'M', u'فج'),
    (0xFC2E, 'M', u'فح'),
    (0xFC2F, 'M', u'فخ'),
    (0xFC30, 'M', u'فم'),
    (0xFC31, 'M', u'فى'),
    (0xFC32, 'M', u'في'),
    (0xFC33, 'M', u'قح'),
    (0xFC34, 'M', u'قم'),
    (0xFC35, 'M', u'قى'),
    (0xFC36, 'M', u'قي'),
    (0xFC37, 'M', u'كا'),
    (0xFC38, 'M', u'كج'),
    (0xFC39, 'M', u'كح'),
    (0xFC3A, 'M', u'كخ'),
    (0xFC3B, 'M', u'كل'),
    (0xFC3C, 'M', u'كم'),
    (0xFC3D, 'M', u'كى'),
    (0xFC3E, 'M', u'كي'),
    ]

def _seg_44():
    return [
    (0xFC3F, 'M', u'لج'),
    (0xFC40, 'M', u'لح'),
    (0xFC41, 'M', u'لخ'),
    (0xFC42, 'M', u'لم'),
    (0xFC43, 'M', u'لى'),
    (0xFC44, 'M', u'لي'),
    (0xFC45, 'M', u'مج'),
    (0xFC46, 'M', u'مح'),
    (0xFC47, 'M', u'مخ'),
    (0xFC48, 'M', u'مم'),
    (0xFC49, 'M', u'مى'),
    (0xFC4A, 'M', u'مي'),
    (0xFC4B, 'M', u'نج'),
    (0xFC4C, 'M', u'نح'),
    (0xFC4D, 'M', u'نخ'),
    (0xFC4E, 'M', u'نم'),
    (0xFC4F, 'M', u'نى'),
    (0xFC50, 'M', u'ني'),
    (0xFC51, 'M', u'هج'),
    (0xFC52, 'M', u'هم'),
    (0xFC53, 'M', u'هى'),
    (0xFC54, 'M', u'هي'),
    (0xFC55, 'M', u'يج'),
    (0xFC56, 'M', u'يح'),
    (0xFC57, 'M', u'يخ'),
    (0xFC58, 'M', u'يم'),
    (0xFC59, 'M', u'يى'),
    (0xFC5A, 'M', u'يي'),
    (0xFC5B, 'M', u'ذٰ'),
    (0xFC5C, 'M', u'رٰ'),
    (0xFC5D, 'M', u'ىٰ'),
    (0xFC5E, '3', u' ٌّ'),
    (0xFC5F, '3', u' ٍّ'),
    (0xFC60, '3', u' َّ'),
    (0xFC61, '3', u' ُّ'),
    (0xFC62, '3', u' ِّ'),
    (0xFC63, '3', u' ّٰ'),
    (0xFC64, 'M', u'ئر'),
    (0xFC65, 'M', u'ئز'),
    (0xFC66, 'M', u'ئم'),
    (0xFC67, 'M', u'ئن'),
    (0xFC68, 'M', u'ئى'),
    (0xFC69, 'M', u'ئي'),
    (0xFC6A, 'M', u'بر'),
    (0xFC6B, 'M', u'بز'),
    (0xFC6C, 'M', u'بم'),
    (0xFC6D, 'M', u'بن'),
    (0xFC6E, 'M', u'بى'),
    (0xFC6F, 'M', u'بي'),
    (0xFC70, 'M', u'تر'),
    (0xFC71, 'M', u'تز'),
    (0xFC72, 'M', u'تم'),
    (0xFC73, 'M', u'تن'),
    (0xFC74, 'M', u'تى'),
    (0xFC75, 'M', u'تي'),
    (0xFC76, 'M', u'ثر'),
    (0xFC77, 'M', u'ثز'),
    (0xFC78, 'M', u'ثم'),
    (0xFC79, 'M', u'ثن'),
    (0xFC7A, 'M', u'ثى'),
    (0xFC7B, 'M', u'ثي'),
    (0xFC7C, 'M', u'فى'),
    (0xFC7D, 'M', u'في'),
    (0xFC7E, 'M', u'قى'),
    (0xFC7F, 'M', u'قي'),
    (0xFC80, 'M', u'كا'),
    (0xFC81, 'M', u'كل'),
    (0xFC82, 'M', u'كم'),
    (0xFC83, 'M', u'كى'),
    (0xFC84, 'M', u'كي'),
    (0xFC85, 'M', u'لم'),
    (0xFC86, 'M', u'لى'),
    (0xFC87, 'M', u'لي'),
    (0xFC88, 'M', u'ما'),
    (0xFC89, 'M', u'مم'),
    (0xFC8A, 'M', u'نر'),
    (0xFC8B, 'M', u'نز'),
    (0xFC8C, 'M', u'نم'),
    (0xFC8D, 'M', u'نن'),
    (0xFC8E, 'M', u'نى'),
    (0xFC8F, 'M', u'ني'),
    (0xFC90, 'M', u'ىٰ'),
    (0xFC91, 'M', u'ير'),
    (0xFC92, 'M', u'يز'),
    (0xFC93, 'M', u'يم'),
    (0xFC94, 'M', u'ين'),
    (0xFC95, 'M', u'يى'),
    (0xFC96, 'M', u'يي'),
    (0xFC97, 'M', u'ئج'),
    (0xFC98, 'M', u'ئح'),
    (0xFC99, 'M', u'ئخ'),
    (0xFC9A, 'M', u'ئم'),
    (0xFC9B, 'M', u'ئه'),
    (0xFC9C, 'M', u'بج'),
    (0xFC9D, 'M', u'بح'),
    (0xFC9E, 'M', u'بخ'),
    (0xFC9F, 'M', u'بم'),
    (0xFCA0, 'M', u'به'),
    (0xFCA1, 'M', u'تج'),
    (0xFCA2, 'M', u'تح'),
    ]

def _seg_45():
    return [
    (0xFCA3, 'M', u'تخ'),
    (0xFCA4, 'M', u'تم'),
    (0xFCA5, 'M', u'ته'),
    (0xFCA6, 'M', u'ثم'),
    (0xFCA7, 'M', u'جح'),
    (0xFCA8, 'M', u'جم'),
    (0xFCA9, 'M', u'حج'),
    (0xFCAA, 'M', u'حم'),
    (0xFCAB, 'M', u'خج'),
    (0xFCAC, 'M', u'خم'),
    (0xFCAD, 'M', u'سج'),
    (0xFCAE, 'M', u'سح'),
    (0xFCAF, 'M', u'سخ'),
    (0xFCB0, 'M', u'سم'),
    (0xFCB1, 'M', u'صح'),
    (0xFCB2, 'M', u'صخ'),
    (0xFCB3, 'M', u'صم'),
    (0xFCB4, 'M', u'ضج'),
    (0xFCB5, 'M', u'ضح'),
    (0xFCB6, 'M', u'ضخ'),
    (0xFCB7, 'M', u'ضم'),
    (0xFCB8, 'M', u'طح'),
    (0xFCB9, 'M', u'ظم'),
    (0xFCBA, 'M', u'عج'),
    (0xFCBB, 'M', u'عم'),
    (0xFCBC, 'M', u'غج'),
    (0xFCBD, 'M', u'غم'),
    (0xFCBE, 'M', u'فج'),
    (0xFCBF, 'M', u'فح'),
    (0xFCC0, 'M', u'فخ'),
    (0xFCC1, 'M', u'فم'),
    (0xFCC2, 'M', u'قح'),
    (0xFCC3, 'M', u'قم'),
    (0xFCC4, 'M', u'كج'),
    (0xFCC5, 'M', u'كح'),
    (0xFCC6, 'M', u'كخ'),
    (0xFCC7, 'M', u'كل'),
    (0xFCC8, 'M', u'كم'),
    (0xFCC9, 'M', u'لج'),
    (0xFCCA, 'M', u'لح'),
    (0xFCCB, 'M', u'لخ'),
    (0xFCCC, 'M', u'لم'),
    (0xFCCD, 'M', u'له'),
    (0xFCCE, 'M', u'مج'),
    (0xFCCF, 'M', u'مح'),
    (0xFCD0, 'M', u'مخ'),
    (0xFCD1, 'M', u'مم'),
    (0xFCD2, 'M', u'نج'),
    (0xFCD3, 'M', u'نح'),
    (0xFCD4, 'M', u'نخ'),
    (0xFCD5, 'M', u'نم'),
    (0xFCD6, 'M', u'نه'),
    (0xFCD7, 'M', u'هج'),
    (0xFCD8, 'M', u'هم'),
    (0xFCD9, 'M', u'هٰ'),
    (0xFCDA, 'M', u'يج'),
    (0xFCDB, 'M', u'يح'),
    (0xFCDC, 'M', u'يخ'),
    (0xFCDD, 'M', u'يم'),
    (0xFCDE, 'M', u'يه'),
    (0xFCDF, 'M', u'ئم'),
    (0xFCE0, 'M', u'ئه'),
    (0xFCE1, 'M', u'بم'),
    (0xFCE2, 'M', u'به'),
    (0xFCE3, 'M', u'تم'),
    (0xFCE4, 'M', u'ته'),
    (0xFCE5, 'M', u'ثم'),
    (0xFCE6, 'M', u'ثه'),
    (0xFCE7, 'M', u'سم'),
    (0xFCE8, 'M', u'سه'),
    (0xFCE9, 'M', u'شم'),
    (0xFCEA, 'M', u'شه'),
    (0xFCEB, 'M', u'كل'),
    (0xFCEC, 'M', u'كم'),
    (0xFCED, 'M', u'لم'),
    (0xFCEE, 'M', u'نم'),
    (0xFCEF, 'M', u'نه'),
    (0xFCF0, 'M', u'يم'),
    (0xFCF1, 'M', u'يه'),
    (0xFCF2, 'M', u'ـَّ'),
    (0xFCF3, 'M', u'ـُّ'),
    (0xFCF4, 'M', u'ـِّ'),
    (0xFCF5, 'M', u'طى'),
    (0xFCF6, 'M', u'طي'),
    (0xFCF7, 'M', u'عى'),
    (0xFCF8, 'M', u'عي'),
    (0xFCF9, 'M', u'غى'),
    (0xFCFA, 'M', u'غي'),
    (0xFCFB, 'M', u'سى'),
    (0xFCFC, 'M', u'سي'),
    (0xFCFD, 'M', u'شى'),
    (0xFCFE, 'M', u'شي'),
    (0xFCFF, 'M', u'حى'),
    (0xFD00, 'M', u'حي'),
    (0xFD01, 'M', u'جى'),
    (0xFD02, 'M', u'جي'),
    (0xFD03, 'M', u'خى'),
    (0xFD04, 'M', u'خي'),
    (0xFD05, 'M', u'صى'),
    (0xFD06, 'M', u'صي'),
    ]

def _seg_46():
    return [
    (0xFD07, 'M', u'ضى'),
    (0xFD08, 'M', u'ضي'),
    (0xFD09, 'M', u'شج'),
    (0xFD0A, 'M', u'شح'),
    (0xFD0B, 'M', u'شخ'),
    (0xFD0C, 'M', u'شم'),
    (0xFD0D, 'M', u'شر'),
    (0xFD0E, 'M', u'سر'),
    (0xFD0F, 'M', u'صر'),
    (0xFD10, 'M', u'ضر'),
    (0xFD11, 'M', u'طى'),
    (0xFD12, 'M', u'طي'),
    (0xFD13, 'M', u'عى'),
    (0xFD14, 'M', u'عي'),
    (0xFD15, 'M', u'غى'),
    (0xFD16, 'M', u'غي'),
    (0xFD17, 'M', u'سى'),
    (0xFD18, 'M', u'سي'),
    (0xFD19, 'M', u'شى'),
    (0xFD1A, 'M', u'شي'),
    (0xFD1B, 'M', u'حى'),
    (0xFD1C, 'M', u'حي'),
    (0xFD1D, 'M', u'جى'),
    (0xFD1E, 'M', u'جي'),
    (0xFD1F, 'M', u'خى'),
    (0xFD20, 'M', u'خي'),
    (0xFD21, 'M', u'صى'),
    (0xFD22, 'M', u'صي'),
    (0xFD23, 'M', u'ضى'),
    (0xFD24, 'M', u'ضي'),
    (0xFD25, 'M', u'شج'),
    (0xFD26, 'M', u'شح'),
    (0xFD27, 'M', u'شخ'),
    (0xFD28, 'M', u'شم'),
    (0xFD29, 'M', u'شر'),
    (0xFD2A, 'M', u'سر'),
    (0xFD2B, 'M', u'صر'),
    (0xFD2C, 'M', u'ضر'),
    (0xFD2D, 'M', u'شج'),
    (0xFD2E, 'M', u'شح'),
    (0xFD2F, 'M', u'شخ'),
    (0xFD30, 'M', u'شم'),
    (0xFD31, 'M', u'سه'),
    (0xFD32, 'M', u'شه'),
    (0xFD33, 'M', u'طم'),
    (0xFD34, 'M', u'سج'),
    (0xFD35, 'M', u'سح'),
    (0xFD36, 'M', u'سخ'),
    (0xFD37, 'M', u'شج'),
    (0xFD38, 'M', u'شح'),
    (0xFD39, 'M', u'شخ'),
    (0xFD3A, 'M', u'طم'),
    (0xFD3B, 'M', u'ظم'),
    (0xFD3C, 'M', u'اً'),
    (0xFD3E, 'V'),
    (0xFD40, 'X'),
    (0xFD50, 'M', u'تجم'),
    (0xFD51, 'M', u'تحج'),
    (0xFD53, 'M', u'تحم'),
    (0xFD54, 'M', u'تخم'),
    (0xFD55, 'M', u'تمج'),
    (0xFD56, 'M', u'تمح'),
    (0xFD57, 'M', u'تمخ'),
    (0xFD58, 'M', u'جمح'),
    (0xFD5A, 'M', u'حمي'),
    (0xFD5B, 'M', u'حمى'),
    (0xFD5C, 'M', u'سحج'),
    (0xFD5D, 'M', u'سجح'),
    (0xFD5E, 'M', u'سجى'),
    (0xFD5F, 'M', u'سمح'),
    (0xFD61, 'M', u'سمج'),
    (0xFD62, 'M', u'سمم'),
    (0xFD64, 'M', u'صحح'),
    (0xFD66, 'M', u'صمم'),
    (0xFD67, 'M', u'شحم'),
    (0xFD69, 'M', u'شجي'),
    (0xFD6A, 'M', u'شمخ'),
    (0xFD6C, 'M', u'شمم'),
    (0xFD6E, 'M', u'ضحى'),
    (0xFD6F, 'M', u'ضخم'),
    (0xFD71, 'M', u'طمح'),
    (0xFD73, 'M', u'طمم'),
    (0xFD74, 'M', u'طمي'),
    (0xFD75, 'M', u'عجم'),
    (0xFD76, 'M', u'عمم'),
    (0xFD78, 'M', u'عمى'),
    (0xFD79, 'M', u'غمم'),
    (0xFD7A, 'M', u'غمي'),
    (0xFD7B, 'M', u'غمى'),
    (0xFD7C, 'M', u'فخم'),
    (0xFD7E, 'M', u'قمح'),
    (0xFD7F, 'M', u'قمم'),
    (0xFD80, 'M', u'لحم'),
    (0xFD81, 'M', u'لحي'),
    (0xFD82, 'M', u'لحى'),
    (0xFD83, 'M', u'لجج'),
    (0xFD85, 'M', u'لخم'),
    (0xFD87, 'M', u'لمح'),
    (0xFD89, 'M', u'محج'),
    (0xFD8A, 'M', u'محم'),
    ]

def _seg_47():
    return [
    (0xFD8B, 'M', u'محي'),
    (0xFD8C, 'M', u'مجح'),
    (0xFD8D, 'M', u'مجم'),
    (0xFD8E, 'M', u'مخج'),
    (0xFD8F, 'M', u'مخم'),
    (0xFD90, 'X'),
    (0xFD92, 'M', u'مجخ'),
    (0xFD93, 'M', u'همج'),
    (0xFD94, 'M', u'همم'),
    (0xFD95, 'M', u'نحم'),
    (0xFD96, 'M', u'نحى'),
    (0xFD97, 'M', u'نجم'),
    (0xFD99, 'M', u'نجى'),
    (0xFD9A, 'M', u'نمي'),
    (0xFD9B, 'M', u'نمى'),
    (0xFD9C, 'M', u'يمم'),
    (0xFD9E, 'M', u'بخي'),
    (0xFD9F, 'M', u'تجي'),
    (0xFDA0, 'M', u'تجى'),
    (0xFDA1, 'M', u'تخي'),
    (0xFDA2, 'M', u'تخى'),
    (0xFDA3, 'M', u'تمي'),
    (0xFDA4, 'M', u'تمى'),
    (0xFDA5, 'M', u'جمي'),
    (0xFDA6, 'M', u'جحى'),
    (0xFDA7, 'M', u'جمى'),
    (0xFDA8, 'M', u'سخى'),
    (0xFDA9, 'M', u'صحي'),
    (0xFDAA, 'M', u'شحي'),
    (0xFDAB, 'M', u'ضحي'),
    (0xFDAC, 'M', u'لجي'),
    (0xFDAD, 'M', u'لمي'),
    (0xFDAE, 'M', u'يحي'),
    (0xFDAF, 'M', u'يجي'),
    (0xFDB0, 'M', u'يمي'),
    (0xFDB1, 'M', u'ممي'),
    (0xFDB2, 'M', u'قمي'),
    (0xFDB3, 'M', u'نحي'),
    (0xFDB4, 'M', u'قمح'),
    (0xFDB5, 'M', u'لحم'),
    (0xFDB6, 'M', u'عمي'),
    (0xFDB7, 'M', u'كمي'),
    (0xFDB8, 'M', u'نجح'),
    (0xFDB9, 'M', u'مخي'),
    (0xFDBA, 'M', u'لجم'),
    (0xFDBB, 'M', u'كمم'),
    (0xFDBC, 'M', u'لجم'),
    (0xFDBD, 'M', u'نجح'),
    (0xFDBE, 'M', u'جحي'),
    (0xFDBF, 'M', u'حجي'),
    (0xFDC0, 'M', u'مجي'),
    (0xFDC1, 'M', u'فمي'),
    (0xFDC2, 'M', u'بحي'),
    (0xFDC3, 'M', u'كمم'),
    (0xFDC4, 'M', u'عجم'),
    (0xFDC5, 'M', u'صمم'),
    (0xFDC6, 'M', u'سخي'),
    (0xFDC7, 'M', u'نجي'),
    (0xFDC8, 'X'),
    (0xFDF0, 'M', u'صلے'),
    (0xFDF1, 'M', u'قلے'),
    (0xFDF2, 'M', u'الله'),
    (0xFDF3, 'M', u'اكبر'),
    (0xFDF4, 'M', u'محمد'),
    (0xFDF5, 'M', u'صلعم'),
    (0xFDF6, 'M', u'رسول'),
    (0xFDF7, 'M', u'عليه'),
    (0xFDF8, 'M', u'وسلم'),
    (0xFDF9, 'M', u'صلى'),
    (0xFDFA, '3', u'صلى الله عليه وسلم'),
    (0xFDFB, '3', u'جل جلاله'),
    (0xFDFC, 'M', u'ریال'),
    (0xFDFD, 'V'),
    (0xFDFE, 'X'),
    (0xFE00, 'I'),
    (0xFE10, '3', u','),
    (0xFE11, 'M', u'、'),
    (0xFE12, 'X'),
    (0xFE13, '3', u':'),
    (0xFE14, '3', u';'),
    (0xFE15, '3', u'!'),
    (0xFE16, '3', u'?'),
    (0xFE17, 'M', u'〖'),
    (0xFE18, 'M', u'〗'),
    (0xFE19, 'X'),
    (0xFE20, 'V'),
    (0xFE27, 'X'),
    (0xFE31, 'M', u'—'),
    (0xFE32, 'M', u'–'),
    (0xFE33, '3', u'_'),
    (0xFE35, '3', u'('),
    (0xFE36, '3', u')'),
    (0xFE37, '3', u'{'),
    (0xFE38, '3', u'}'),
    (0xFE39, 'M', u'〔'),
    (0xFE3A, 'M', u'〕'),
    (0xFE3B, 'M', u'【'),
    (0xFE3C, 'M', u'】'),
    (0xFE3D, 'M', u'《'),
    (0xFE3E, 'M', u'》'),
    ]

def _seg_48():
    return [
    (0xFE3F, 'M', u'〈'),
    (0xFE40, 'M', u'〉'),
    (0xFE41, 'M', u'「'),
    (0xFE42, 'M', u'」'),
    (0xFE43, 'M', u'『'),
    (0xFE44, 'M', u'』'),
    (0xFE45, 'V'),
    (0xFE47, '3', u'['),
    (0xFE48, '3', u']'),
    (0xFE49, '3', u' ̅'),
    (0xFE4D, '3', u'_'),
    (0xFE50, '3', u','),
    (0xFE51, 'M', u'、'),
    (0xFE52, 'X'),
    (0xFE54, '3', u';'),
    (0xFE55, '3', u':'),
    (0xFE56, '3', u'?'),
    (0xFE57, '3', u'!'),
    (0xFE58, 'M', u'—'),
    (0xFE59, '3', u'('),
    (0xFE5A, '3', u')'),
    (0xFE5B, '3', u'{'),
    (0xFE5C, '3', u'}'),
    (0xFE5D, 'M', u'〔'),
    (0xFE5E, 'M', u'〕'),
    (0xFE5F, '3', u'#'),
    (0xFE60, '3', u'&'),
    (0xFE61, '3', u'*'),
    (0xFE62, '3', u'+'),
    (0xFE63, 'M', u'-'),
    (0xFE64, '3', u'<'),
    (0xFE65, '3', u'>'),
    (0xFE66, '3', u'='),
    (0xFE67, 'X'),
    (0xFE68, '3', u'\\'),
    (0xFE69, '3', u'$'),
    (0xFE6A, '3', u'%'),
    (0xFE6B, '3', u'@'),
    (0xFE6C, 'X'),
    (0xFE70, '3', u' ً'),
    (0xFE71, 'M', u'ـً'),
    (0xFE72, '3', u' ٌ'),
    (0xFE73, 'V'),
    (0xFE74, '3', u' ٍ'),
    (0xFE75, 'X'),
    (0xFE76, '3', u' َ'),
    (0xFE77, 'M', u'ـَ'),
    (0xFE78, '3', u' ُ'),
    (0xFE79, 'M', u'ـُ'),
    (0xFE7A, '3', u' ِ'),
    (0xFE7B, 'M', u'ـِ'),
    (0xFE7C, '3', u' ّ'),
    (0xFE7D, 'M', u'ـّ'),
    (0xFE7E, '3', u' ْ'),
    (0xFE7F, 'M', u'ـْ'),
    (0xFE80, 'M', u'ء'),
    (0xFE81, 'M', u'آ'),
    (0xFE83, 'M', u'أ'),
    (0xFE85, 'M', u'ؤ'),
    (0xFE87, 'M', u'إ'),
    (0xFE89, 'M', u'ئ'),
    (0xFE8D, 'M', u'ا'),
    (0xFE8F, 'M', u'ب'),
    (0xFE93, 'M', u'ة'),
    (0xFE95, 'M', u'ت'),
    (0xFE99, 'M', u'ث'),
    (0xFE9D, 'M', u'ج'),
    (0xFEA1, 'M', u'ح'),
    (0xFEA5, 'M', u'خ'),
    (0xFEA9, 'M', u'د'),
    (0xFEAB, 'M', u'ذ'),
    (0xFEAD, 'M', u'ر'),
    (0xFEAF, 'M', u'ز'),
    (0xFEB1, 'M', u'س'),
    (0xFEB5, 'M', u'ش'),
    (0xFEB9, 'M', u'ص'),
    (0xFEBD, 'M', u'ض'),
    (0xFEC1, 'M', u'ط'),
    (0xFEC5, 'M', u'ظ'),
    (0xFEC9, 'M', u'ع'),
    (0xFECD, 'M', u'غ'),
    (0xFED1, 'M', u'ف'),
    (0xFED5, 'M', u'ق'),
    (0xFED9, 'M', u'ك'),
    (0xFEDD, 'M', u'ل'),
    (0xFEE1, 'M', u'م'),
    (0xFEE5, 'M', u'ن'),
    (0xFEE9, 'M', u'ه'),
    (0xFEED, 'M', u'و'),
    (0xFEEF, 'M', u'ى'),
    (0xFEF1, 'M', u'ي'),
    (0xFEF5, 'M', u'لآ'),
    (0xFEF7, 'M', u'لأ'),
    (0xFEF9, 'M', u'لإ'),
    (0xFEFB, 'M', u'لا'),
    (0xFEFD, 'X'),
    (0xFEFF, 'I'),
    (0xFF00, 'X'),
    (0xFF01, '3', u'!'),
    (0xFF02, '3', u'"'),
    ]

def _seg_49():
    return [
    (0xFF03, '3', u'#'),
    (0xFF04, '3', u'$'),
    (0xFF05, '3', u'%'),
    (0xFF06, '3', u'&'),
    (0xFF07, '3', u'\''),
    (0xFF08, '3', u'('),
    (0xFF09, '3', u')'),
    (0xFF0A, '3', u'*'),
    (0xFF0B, '3', u'+'),
    (0xFF0C, '3', u','),
    (0xFF0D, 'M', u'-'),
    (0xFF0E, 'M', u'.'),
    (0xFF0F, '3', u'/'),
    (0xFF10, 'M', u'0'),
    (0xFF11, 'M', u'1'),
    (0xFF12, 'M', u'2'),
    (0xFF13, 'M', u'3'),
    (0xFF14, 'M', u'4'),
    (0xFF15, 'M', u'5'),
    (0xFF16, 'M', u'6'),
    (0xFF17, 'M', u'7'),
    (0xFF18, 'M', u'8'),
    (0xFF19, 'M', u'9'),
    (0xFF1A, '3', u':'),
    (0xFF1B, '3', u';'),
    (0xFF1C, '3', u'<'),
    (0xFF1D, '3', u'='),
    (0xFF1E, '3', u'>'),
    (0xFF1F, '3', u'?'),
    (0xFF20, '3', u'@'),
    (0xFF21, 'M', u'a'),
    (0xFF22, 'M', u'b'),
    (0xFF23, 'M', u'c'),
    (0xFF24, 'M', u'd'),
    (0xFF25, 'M', u'e'),
    (0xFF26, 'M', u'f'),
    (0xFF27, 'M', u'g'),
    (0xFF28, 'M', u'h'),
    (0xFF29, 'M', u'i'),
    (0xFF2A, 'M', u'j'),
    (0xFF2B, 'M', u'k'),
    (0xFF2C, 'M', u'l'),
    (0xFF2D, 'M', u'm'),
    (0xFF2E, 'M', u'n'),
    (0xFF2F, 'M', u'o'),
    (0xFF30, 'M', u'p'),
    (0xFF31, 'M', u'q'),
    (0xFF32, 'M', u'r'),
    (0xFF33, 'M', u's'),
    (0xFF34, 'M', u't'),
    (0xFF35, 'M', u'u'),
    (0xFF36, 'M', u'v'),
    (0xFF37, 'M', u'w'),
    (0xFF38, 'M', u'x'),
    (0xFF39, 'M', u'y'),
    (0xFF3A, 'M', u'z'),
    (0xFF3B, '3', u'['),
    (0xFF3C, '3', u'\\'),
    (0xFF3D, '3', u']'),
    (0xFF3E, '3', u'^'),
    (0xFF3F, '3', u'_'),
    (0xFF40, '3', u'`'),
    (0xFF41, 'M', u'a'),
    (0xFF42, 'M', u'b'),
    (0xFF43, 'M', u'c'),
    (0xFF44, 'M', u'd'),
    (0xFF45, 'M', u'e'),
    (0xFF46, 'M', u'f'),
    (0xFF47, 'M', u'g'),
    (0xFF48, 'M', u'h'),
    (0xFF49, 'M', u'i'),
    (0xFF4A, 'M', u'j'),
    (0xFF4B, 'M', u'k'),
    (0xFF4C, 'M', u'l'),
    (0xFF4D, 'M', u'm'),
    (0xFF4E, 'M', u'n'),
    (0xFF4F, 'M', u'o'),
    (0xFF50, 'M', u'p'),
    (0xFF51, 'M', u'q'),
    (0xFF52, 'M', u'r'),
    (0xFF53, 'M', u's'),
    (0xFF54, 'M', u't'),
    (0xFF55, 'M', u'u'),
    (0xFF56, 'M', u'v'),
    (0xFF57, 'M', u'w'),
    (0xFF58, 'M', u'x'),
    (0xFF59, 'M', u'y'),
    (0xFF5A, 'M', u'z'),
    (0xFF5B, '3', u'{'),
    (0xFF5C, '3', u'|'),
    (0xFF5D, '3', u'}'),
    (0xFF5E, '3', u'~'),
    (0xFF5F, 'M', u'⦅'),
    (0xFF60, 'M', u'⦆'),
    (0xFF61, 'M', u'.'),
    (0xFF62, 'M', u'「'),
    (0xFF63, 'M', u'」'),
    (0xFF64, 'M', u'、'),
    (0xFF65, 'M', u'・'),
    (0xFF66, 'M', u'ヲ'),
    ]

def _seg_50():
    return [
    (0xFF67, 'M', u'ァ'),
    (0xFF68, 'M', u'ィ'),
    (0xFF69, 'M', u'ゥ'),
    (0xFF6A, 'M', u'ェ'),
    (0xFF6B, 'M', u'ォ'),
    (0xFF6C, 'M', u'ャ'),
    (0xFF6D, 'M', u'ュ'),
    (0xFF6E, 'M', u'ョ'),
    (0xFF6F, 'M', u'ッ'),
    (0xFF70, 'M', u'ー'),
    (0xFF71, 'M', u'ア'),
    (0xFF72, 'M', u'イ'),
    (0xFF73, 'M', u'ウ'),
    (0xFF74, 'M', u'エ'),
    (0xFF75, 'M', u'オ'),
    (0xFF76, 'M', u'カ'),
    (0xFF77, 'M', u'キ'),
    (0xFF78, 'M', u'ク'),
    (0xFF79, 'M', u'ケ'),
    (0xFF7A, 'M', u'コ'),
    (0xFF7B, 'M', u'サ'),
    (0xFF7C, 'M', u'シ'),
    (0xFF7D, 'M', u'ス'),
    (0xFF7E, 'M', u'セ'),
    (0xFF7F, 'M', u'ソ'),
    (0xFF80, 'M', u'タ'),
    (0xFF81, 'M', u'チ'),
    (0xFF82, 'M', u'ツ'),
    (0xFF83, 'M', u'テ'),
    (0xFF84, 'M', u'ト'),
    (0xFF85, 'M', u'ナ'),
    (0xFF86, 'M', u'ニ'),
    (0xFF87, 'M', u'ヌ'),
    (0xFF88, 'M', u'ネ'),
    (0xFF89, 'M', u'ノ'),
    (0xFF8A, 'M', u'ハ'),
    (0xFF8B, 'M', u'ヒ'),
    (0xFF8C, 'M', u'フ'),
    (0xFF8D, 'M', u'ヘ'),
    (0xFF8E, 'M', u'ホ'),
    (0xFF8F, 'M', u'マ'),
    (0xFF90, 'M', u'ミ'),
    (0xFF91, 'M', u'ム'),
    (0xFF92, 'M', u'メ'),
    (0xFF93, 'M', u'モ'),
    (0xFF94, 'M', u'ヤ'),
    (0xFF95, 'M', u'ユ'),
    (0xFF96, 'M', u'ヨ'),
    (0xFF97, 'M', u'ラ'),
    (0xFF98, 'M', u'リ'),
    (0xFF99, 'M', u'ル'),
    (0xFF9A, 'M', u'レ'),
    (0xFF9B, 'M', u'ロ'),
    (0xFF9C, 'M', u'ワ'),
    (0xFF9D, 'M', u'ン'),
    (0xFF9E, 'M', u'゙'),
    (0xFF9F, 'M', u'゚'),
    (0xFFA0, 'X'),
    (0xFFA1, 'M', u'ᄀ'),
    (0xFFA2, 'M', u'ᄁ'),
    (0xFFA3, 'M', u'ᆪ'),
    (0xFFA4, 'M', u'ᄂ'),
    (0xFFA5, 'M', u'ᆬ'),
    (0xFFA6, 'M', u'ᆭ'),
    (0xFFA7, 'M', u'ᄃ'),
    (0xFFA8, 'M', u'ᄄ'),
    (0xFFA9, 'M', u'ᄅ'),
    (0xFFAA, 'M', u'ᆰ'),
    (0xFFAB, 'M', u'ᆱ'),
    (0xFFAC, 'M', u'ᆲ'),
    (0xFFAD, 'M', u'ᆳ'),
    (0xFFAE, 'M', u'ᆴ'),
    (0xFFAF, 'M', u'ᆵ'),
    (0xFFB0, 'M', u'ᄚ'),
    (0xFFB1, 'M', u'ᄆ'),
    (0xFFB2, 'M', u'ᄇ'),
    (0xFFB3, 'M', u'ᄈ'),
    (0xFFB4, 'M', u'ᄡ'),
    (0xFFB5, 'M', u'ᄉ'),
    (0xFFB6, 'M', u'ᄊ'),
    (0xFFB7, 'M', u'ᄋ'),
    (0xFFB8, 'M', u'ᄌ'),
    (0xFFB9, 'M', u'ᄍ'),
    (0xFFBA, 'M', u'ᄎ'),
    (0xFFBB, 'M', u'ᄏ'),
    (0xFFBC, 'M', u'ᄐ'),
    (0xFFBD, 'M', u'ᄑ'),
    (0xFFBE, 'M', u'ᄒ'),
    (0xFFBF, 'X'),
    (0xFFC2, 'M', u'ᅡ'),
    (0xFFC3, 'M', u'ᅢ'),
    (0xFFC4, 'M', u'ᅣ'),
    (0xFFC5, 'M', u'ᅤ'),
    (0xFFC6, 'M', u'ᅥ'),
    (0xFFC7, 'M', u'ᅦ'),
    (0xFFC8, 'X'),
    (0xFFCA, 'M', u'ᅧ'),
    (0xFFCB, 'M', u'ᅨ'),
    (0xFFCC, 'M', u'ᅩ'),
    (0xFFCD, 'M', u'ᅪ'),
    ]

def _seg_51():
    return [
    (0xFFCE, 'M', u'ᅫ'),
    (0xFFCF, 'M', u'ᅬ'),
    (0xFFD0, 'X'),
    (0xFFD2, 'M', u'ᅭ'),
    (0xFFD3, 'M', u'ᅮ'),
    (0xFFD4, 'M', u'ᅯ'),
    (0xFFD5, 'M', u'ᅰ'),
    (0xFFD6, 'M', u'ᅱ'),
    (0xFFD7, 'M', u'ᅲ'),
    (0xFFD8, 'X'),
    (0xFFDA, 'M', u'ᅳ'),
    (0xFFDB, 'M', u'ᅴ'),
    (0xFFDC, 'M', u'ᅵ'),
    (0xFFDD, 'X'),
    (0xFFE0, 'M', u'¢'),
    (0xFFE1, 'M', u'£'),
    (0xFFE2, 'M', u'¬'),
    (0xFFE3, '3', u' ̄'),
    (0xFFE4, 'M', u'¦'),
    (0xFFE5, 'M', u'¥'),
    (0xFFE6, 'M', u'₩'),
    (0xFFE7, 'X'),
    (0xFFE8, 'M', u'│'),
    (0xFFE9, 'M', u'←'),
    (0xFFEA, 'M', u'↑'),
    (0xFFEB, 'M', u'→'),
    (0xFFEC, 'M', u'↓'),
    (0xFFED, 'M', u'■'),
    (0xFFEE, 'M', u'○'),
    (0xFFEF, 'X'),
    (0x10000, 'V'),
    (0x1000C, 'X'),
    (0x1000D, 'V'),
    (0x10027, 'X'),
    (0x10028, 'V'),
    (0x1003B, 'X'),
    (0x1003C, 'V'),
    (0x1003E, 'X'),
    (0x1003F, 'V'),
    (0x1004E, 'X'),
    (0x10050, 'V'),
    (0x1005E, 'X'),
    (0x10080, 'V'),
    (0x100FB, 'X'),
    (0x10100, 'V'),
    (0x10103, 'X'),
    (0x10107, 'V'),
    (0x10134, 'X'),
    (0x10137, 'V'),
    (0x1018B, 'X'),
    (0x10190, 'V'),
    (0x1019C, 'X'),
    (0x101D0, 'V'),
    (0x101FE, 'X'),
    (0x10280, 'V'),
    (0x1029D, 'X'),
    (0x102A0, 'V'),
    (0x102D1, 'X'),
    (0x10300, 'V'),
    (0x1031F, 'X'),
    (0x10320, 'V'),
    (0x10324, 'X'),
    (0x10330, 'V'),
    (0x1034B, 'X'),
    (0x10380, 'V'),
    (0x1039E, 'X'),
    (0x1039F, 'V'),
    (0x103C4, 'X'),
    (0x103C8, 'V'),
    (0x103D6, 'X'),
    (0x10400, 'M', u'𐐨'),
    (0x10401, 'M', u'𐐩'),
    (0x10402, 'M', u'𐐪'),
    (0x10403, 'M', u'𐐫'),
    (0x10404, 'M', u'𐐬'),
    (0x10405, 'M', u'𐐭'),
    (0x10406, 'M', u'𐐮'),
    (0x10407, 'M', u'𐐯'),
    (0x10408, 'M', u'𐐰'),
    (0x10409, 'M', u'𐐱'),
    (0x1040A, 'M', u'𐐲'),
    (0x1040B, 'M', u'𐐳'),
    (0x1040C, 'M', u'𐐴'),
    (0x1040D, 'M', u'𐐵'),
    (0x1040E, 'M', u'𐐶'),
    (0x1040F, 'M', u'𐐷'),
    (0x10410, 'M', u'𐐸'),
    (0x10411, 'M', u'𐐹'),
    (0x10412, 'M', u'𐐺'),
    (0x10413, 'M', u'𐐻'),
    (0x10414, 'M', u'𐐼'),
    (0x10415, 'M', u'𐐽'),
    (0x10416, 'M', u'𐐾'),
    (0x10417, 'M', u'𐐿'),
    (0x10418, 'M', u'𐑀'),
    (0x10419, 'M', u'𐑁'),
    (0x1041A, 'M', u'𐑂'),
    (0x1041B, 'M', u'𐑃'),
    (0x1041C, 'M', u'𐑄'),
    (0x1041D, 'M', u'𐑅'),
    ]

def _seg_52():
    return [
    (0x1041E, 'M', u'𐑆'),
    (0x1041F, 'M', u'𐑇'),
    (0x10420, 'M', u'𐑈'),
    (0x10421, 'M', u'𐑉'),
    (0x10422, 'M', u'𐑊'),
    (0x10423, 'M', u'𐑋'),
    (0x10424, 'M', u'𐑌'),
    (0x10425, 'M', u'𐑍'),
    (0x10426, 'M', u'𐑎'),
    (0x10427, 'M', u'𐑏'),
    (0x10428, 'V'),
    (0x1049E, 'X'),
    (0x104A0, 'V'),
    (0x104AA, 'X'),
    (0x10800, 'V'),
    (0x10806, 'X'),
    (0x10808, 'V'),
    (0x10809, 'X'),
    (0x1080A, 'V'),
    (0x10836, 'X'),
    (0x10837, 'V'),
    (0x10839, 'X'),
    (0x1083C, 'V'),
    (0x1083D, 'X'),
    (0x1083F, 'V'),
    (0x10856, 'X'),
    (0x10857, 'V'),
    (0x10860, 'X'),
    (0x10900, 'V'),
    (0x1091C, 'X'),
    (0x1091F, 'V'),
    (0x1093A, 'X'),
    (0x1093F, 'V'),
    (0x10940, 'X'),
    (0x10980, 'V'),
    (0x109B8, 'X'),
    (0x109BE, 'V'),
    (0x109C0, 'X'),
    (0x10A00, 'V'),
    (0x10A04, 'X'),
    (0x10A05, 'V'),
    (0x10A07, 'X'),
    (0x10A0C, 'V'),
    (0x10A14, 'X'),
    (0x10A15, 'V'),
    (0x10A18, 'X'),
    (0x10A19, 'V'),
    (0x10A34, 'X'),
    (0x10A38, 'V'),
    (0x10A3B, 'X'),
    (0x10A3F, 'V'),
    (0x10A48, 'X'),
    (0x10A50, 'V'),
    (0x10A59, 'X'),
    (0x10A60, 'V'),
    (0x10A80, 'X'),
    (0x10B00, 'V'),
    (0x10B36, 'X'),
    (0x10B39, 'V'),
    (0x10B56, 'X'),
    (0x10B58, 'V'),
    (0x10B73, 'X'),
    (0x10B78, 'V'),
    (0x10B80, 'X'),
    (0x10C00, 'V'),
    (0x10C49, 'X'),
    (0x10E60, 'V'),
    (0x10E7F, 'X'),
    (0x11000, 'V'),
    (0x1104E, 'X'),
    (0x11052, 'V'),
    (0x11070, 'X'),
    (0x11080, 'V'),
    (0x110BD, 'X'),
    (0x110BE, 'V'),
    (0x110C2, 'X'),
    (0x110D0, 'V'),
    (0x110E9, 'X'),
    (0x110F0, 'V'),
    (0x110FA, 'X'),
    (0x11100, 'V'),
    (0x11135, 'X'),
    (0x11136, 'V'),
    (0x11144, 'X'),
    (0x11180, 'V'),
    (0x111C9, 'X'),
    (0x111D0, 'V'),
    (0x111DA, 'X'),
    (0x11680, 'V'),
    (0x116B8, 'X'),
    (0x116C0, 'V'),
    (0x116CA, 'X'),
    (0x12000, 'V'),
    (0x1236F, 'X'),
    (0x12400, 'V'),
    (0x12463, 'X'),
    (0x12470, 'V'),
    (0x12474, 'X'),
    (0x13000, 'V'),
    (0x1342F, 'X'),
    ]

def _seg_53():
    return [
    (0x16800, 'V'),
    (0x16A39, 'X'),
    (0x16F00, 'V'),
    (0x16F45, 'X'),
    (0x16F50, 'V'),
    (0x16F7F, 'X'),
    (0x16F8F, 'V'),
    (0x16FA0, 'X'),
    (0x1B000, 'V'),
    (0x1B002, 'X'),
    (0x1D000, 'V'),
    (0x1D0F6, 'X'),
    (0x1D100, 'V'),
    (0x1D127, 'X'),
    (0x1D129, 'V'),
    (0x1D15E, 'M', u'𝅗𝅥'),
    (0x1D15F, 'M', u'𝅘𝅥'),
    (0x1D160, 'M', u'𝅘𝅥𝅮'),
    (0x1D161, 'M', u'𝅘𝅥𝅯'),
    (0x1D162, 'M', u'𝅘𝅥𝅰'),
    (0x1D163, 'M', u'𝅘𝅥𝅱'),
    (0x1D164, 'M', u'𝅘𝅥𝅲'),
    (0x1D165, 'V'),
    (0x1D173, 'X'),
    (0x1D17B, 'V'),
    (0x1D1BB, 'M', u'𝆹𝅥'),
    (0x1D1BC, 'M', u'𝆺𝅥'),
    (0x1D1BD, 'M', u'𝆹𝅥𝅮'),
    (0x1D1BE, 'M', u'𝆺𝅥𝅮'),
    (0x1D1BF, 'M', u'𝆹𝅥𝅯'),
    (0x1D1C0, 'M', u'𝆺𝅥𝅯'),
    (0x1D1C1, 'V'),
    (0x1D1DE, 'X'),
    (0x1D200, 'V'),
    (0x1D246, 'X'),
    (0x1D300, 'V'),
    (0x1D357, 'X'),
    (0x1D360, 'V'),
    (0x1D372, 'X'),
    (0x1D400, 'M', u'a'),
    (0x1D401, 'M', u'b'),
    (0x1D402, 'M', u'c'),
    (0x1D403, 'M', u'd'),
    (0x1D404, 'M', u'e'),
    (0x1D405, 'M', u'f'),
    (0x1D406, 'M', u'g'),
    (0x1D407, 'M', u'h'),
    (0x1D408, 'M', u'i'),
    (0x1D409, 'M', u'j'),
    (0x1D40A, 'M', u'k'),
    (0x1D40B, 'M', u'l'),
    (0x1D40C, 'M', u'm'),
    (0x1D40D, 'M', u'n'),
    (0x1D40E, 'M', u'o'),
    (0x1D40F, 'M', u'p'),
    (0x1D410, 'M', u'q'),
    (0x1D411, 'M', u'r'),
    (0x1D412, 'M', u's'),
    (0x1D413, 'M', u't'),
    (0x1D414, 'M', u'u'),
    (0x1D415, 'M', u'v'),
    (0x1D416, 'M', u'w'),
    (0x1D417, 'M', u'x'),
    (0x1D418, 'M', u'y'),
    (0x1D419, 'M', u'z'),
    (0x1D41A, 'M', u'a'),
    (0x1D41B, 'M', u'b'),
    (0x1D41C, 'M', u'c'),
    (0x1D41D, 'M', u'd'),
    (0x1D41E, 'M', u'e'),
    (0x1D41F, 'M', u'f'),
    (0x1D420, 'M', u'g'),
    (0x1D421, 'M', u'h'),
    (0x1D422, 'M', u'i'),
    (0x1D423, 'M', u'j'),
    (0x1D424, 'M', u'k'),
    (0x1D425, 'M', u'l'),
    (0x1D426, 'M', u'm'),
    (0x1D427, 'M', u'n'),
    (0x1D428, 'M', u'o'),
    (0x1D429, 'M', u'p'),
    (0x1D42A, 'M', u'q'),
    (0x1D42B, 'M', u'r'),
    (0x1D42C, 'M', u's'),
    (0x1D42D, 'M', u't'),
    (0x1D42E, 'M', u'u'),
    (0x1D42F, 'M', u'v'),
    (0x1D430, 'M', u'w'),
    (0x1D431, 'M', u'x'),
    (0x1D432, 'M', u'y'),
    (0x1D433, 'M', u'z'),
    (0x1D434, 'M', u'a'),
    (0x1D435, 'M', u'b'),
    (0x1D436, 'M', u'c'),
    (0x1D437, 'M', u'd'),
    (0x1D438, 'M', u'e'),
    (0x1D439, 'M', u'f'),
    (0x1D43A, 'M', u'g'),
    (0x1D43B, 'M', u'h'),
    (0x1D43C, 'M', u'i'),
    ]

def _seg_54():
    return [
    (0x1D43D, 'M', u'j'),
    (0x1D43E, 'M', u'k'),
    (0x1D43F, 'M', u'l'),
    (0x1D440, 'M', u'm'),
    (0x1D441, 'M', u'n'),
    (0x1D442, 'M', u'o'),
    (0x1D443, 'M', u'p'),
    (0x1D444, 'M', u'q'),
    (0x1D445, 'M', u'r'),
    (0x1D446, 'M', u's'),
    (0x1D447, 'M', u't'),
    (0x1D448, 'M', u'u'),
    (0x1D449, 'M', u'v'),
    (0x1D44A, 'M', u'w'),
    (0x1D44B, 'M', u'x'),
    (0x1D44C, 'M', u'y'),
    (0x1D44D, 'M', u'z'),
    (0x1D44E, 'M', u'a'),
    (0x1D44F, 'M', u'b'),
    (0x1D450, 'M', u'c'),
    (0x1D451, 'M', u'd'),
    (0x1D452, 'M', u'e'),
    (0x1D453, 'M', u'f'),
    (0x1D454, 'M', u'g'),
    (0x1D455, 'X'),
    (0x1D456, 'M', u'i'),
    (0x1D457, 'M', u'j'),
    (0x1D458, 'M', u'k'),
    (0x1D459, 'M', u'l'),
    (0x1D45A, 'M', u'm'),
    (0x1D45B, 'M', u'n'),
    (0x1D45C, 'M', u'o'),
    (0x1D45D, 'M', u'p'),
    (0x1D45E, 'M', u'q'),
    (0x1D45F, 'M', u'r'),
    (0x1D460, 'M', u's'),
    (0x1D461, 'M', u't'),
    (0x1D462, 'M', u'u'),
    (0x1D463, 'M', u'v'),
    (0x1D464, 'M', u'w'),
    (0x1D465, 'M', u'x'),
    (0x1D466, 'M', u'y'),
    (0x1D467, 'M', u'z'),
    (0x1D468, 'M', u'a'),
    (0x1D469, 'M', u'b'),
    (0x1D46A, 'M', u'c'),
    (0x1D46B, 'M', u'd'),
    (0x1D46C, 'M', u'e'),
    (0x1D46D, 'M', u'f'),
    (0x1D46E, 'M', u'g'),
    (0x1D46F, 'M', u'h'),
    (0x1D470, 'M', u'i'),
    (0x1D471, 'M', u'j'),
    (0x1D472, 'M', u'k'),
    (0x1D473, 'M', u'l'),
    (0x1D474, 'M', u'm'),
    (0x1D475, 'M', u'n'),
    (0x1D476, 'M', u'o'),
    (0x1D477, 'M', u'p'),
    (0x1D478, 'M', u'q'),
    (0x1D479, 'M', u'r'),
    (0x1D47A, 'M', u's'),
    (0x1D47B, 'M', u't'),
    (0x1D47C, 'M', u'u'),
    (0x1D47D, 'M', u'v'),
    (0x1D47E, 'M', u'w'),
    (0x1D47F, 'M', u'x'),
    (0x1D480, 'M', u'y'),
    (0x1D481, 'M', u'z'),
    (0x1D482, 'M', u'a'),
    (0x1D483, 'M', u'b'),
    (0x1D484, 'M', u'c'),
    (0x1D485, 'M', u'd'),
    (0x1D486, 'M', u'e'),
    (0x1D487, 'M', u'f'),
    (0x1D488, 'M', u'g'),
    (0x1D489, 'M', u'h'),
    (0x1D48A, 'M', u'i'),
    (0x1D48B, 'M', u'j'),
    (0x1D48C, 'M', u'k'),
    (0x1D48D, 'M', u'l'),
    (0x1D48E, 'M', u'm'),
    (0x1D48F, 'M', u'n'),
    (0x1D490, 'M', u'o'),
    (0x1D491, 'M', u'p'),
    (0x1D492, 'M', u'q'),
    (0x1D493, 'M', u'r'),
    (0x1D494, 'M', u's'),
    (0x1D495, 'M', u't'),
    (0x1D496, 'M', u'u'),
    (0x1D497, 'M', u'v'),
    (0x1D498, 'M', u'w'),
    (0x1D499, 'M', u'x'),
    (0x1D49A, 'M', u'y'),
    (0x1D49B, 'M', u'z'),
    (0x1D49C, 'M', u'a'),
    (0x1D49D, 'X'),
    (0x1D49E, 'M', u'c'),
    (0x1D49F, 'M', u'd'),
    (0x1D4A0, 'X'),
    ]

def _seg_55():
    return [
    (0x1D4A2, 'M', u'g'),
    (0x1D4A3, 'X'),
    (0x1D4A5, 'M', u'j'),
    (0x1D4A6, 'M', u'k'),
    (0x1D4A7, 'X'),
    (0x1D4A9, 'M', u'n'),
    (0x1D4AA, 'M', u'o'),
    (0x1D4AB, 'M', u'p'),
    (0x1D4AC, 'M', u'q'),
    (0x1D4AD, 'X'),
    (0x1D4AE, 'M', u's'),
    (0x1D4AF, 'M', u't'),
    (0x1D4B0, 'M', u'u'),
    (0x1D4B1, 'M', u'v'),
    (0x1D4B2, 'M', u'w'),
    (0x1D4B3, 'M', u'x'),
    (0x1D4B4, 'M', u'y'),
    (0x1D4B5, 'M', u'z'),
    (0x1D4B6, 'M', u'a'),
    (0x1D4B7, 'M', u'b'),
    (0x1D4B8, 'M', u'c'),
    (0x1D4B9, 'M', u'd'),
    (0x1D4BA, 'X'),
    (0x1D4BB, 'M', u'f'),
    (0x1D4BC, 'X'),
    (0x1D4BD, 'M', u'h'),
    (0x1D4BE, 'M', u'i'),
    (0x1D4BF, 'M', u'j'),
    (0x1D4C0, 'M', u'k'),
    (0x1D4C1, 'M', u'l'),
    (0x1D4C2, 'M', u'm'),
    (0x1D4C3, 'M', u'n'),
    (0x1D4C4, 'X'),
    (0x1D4C5, 'M', u'p'),
    (0x1D4C6, 'M', u'q'),
    (0x1D4C7, 'M', u'r'),
    (0x1D4C8, 'M', u's'),
    (0x1D4C9, 'M', u't'),
    (0x1D4CA, 'M', u'u'),
    (0x1D4CB, 'M', u'v'),
    (0x1D4CC, 'M', u'w'),
    (0x1D4CD, 'M', u'x'),
    (0x1D4CE, 'M', u'y'),
    (0x1D4CF, 'M', u'z'),
    (0x1D4D0, 'M', u'a'),
    (0x1D4D1, 'M', u'b'),
    (0x1D4D2, 'M', u'c'),
    (0x1D4D3, 'M', u'd'),
    (0x1D4D4, 'M', u'e'),
    (0x1D4D5, 'M', u'f'),
    (0x1D4D6, 'M', u'g'),
    (0x1D4D7, 'M', u'h'),
    (0x1D4D8, 'M', u'i'),
    (0x1D4D9, 'M', u'j'),
    (0x1D4DA, 'M', u'k'),
    (0x1D4DB, 'M', u'l'),
    (0x1D4DC, 'M', u'm'),
    (0x1D4DD, 'M', u'n'),
    (0x1D4DE, 'M', u'o'),
    (0x1D4DF, 'M', u'p'),
    (0x1D4E0, 'M', u'q'),
    (0x1D4E1, 'M', u'r'),
    (0x1D4E2, 'M', u's'),
    (0x1D4E3, 'M', u't'),
    (0x1D4E4, 'M', u'u'),
    (0x1D4E5, 'M', u'v'),
    (0x1D4E6, 'M', u'w'),
    (0x1D4E7, 'M', u'x'),
    (0x1D4E8, 'M', u'y'),
    (0x1D4E9, 'M', u'z'),
    (0x1D4EA, 'M', u'a'),
    (0x1D4EB, 'M', u'b'),
    (0x1D4EC, 'M', u'c'),
    (0x1D4ED, 'M', u'd'),
    (0x1D4EE, 'M', u'e'),
    (0x1D4EF, 'M', u'f'),
    (0x1D4F0, 'M', u'g'),
    (0x1D4F1, 'M', u'h'),
    (0x1D4F2, 'M', u'i'),
    (0x1D4F3, 'M', u'j'),
    (0x1D4F4, 'M', u'k'),
    (0x1D4F5, 'M', u'l'),
    (0x1D4F6, 'M', u'm'),
    (0x1D4F7, 'M', u'n'),
    (0x1D4F8, 'M', u'o'),
    (0x1D4F9, 'M', u'p'),
    (0x1D4FA, 'M', u'q'),
    (0x1D4FB, 'M', u'r'),
    (0x1D4FC, 'M', u's'),
    (0x1D4FD, 'M', u't'),
    (0x1D4FE, 'M', u'u'),
    (0x1D4FF, 'M', u'v'),
    (0x1D500, 'M', u'w'),
    (0x1D501, 'M', u'x'),
    (0x1D502, 'M', u'y'),
    (0x1D503, 'M', u'z'),
    (0x1D504, 'M', u'a'),
    (0x1D505, 'M', u'b'),
    (0x1D506, 'X'),
    (0x1D507, 'M', u'd'),
    ]

def _seg_56():
    return [
    (0x1D508, 'M', u'e'),
    (0x1D509, 'M', u'f'),
    (0x1D50A, 'M', u'g'),
    (0x1D50B, 'X'),
    (0x1D50D, 'M', u'j'),
    (0x1D50E, 'M', u'k'),
    (0x1D50F, 'M', u'l'),
    (0x1D510, 'M', u'm'),
    (0x1D511, 'M', u'n'),
    (0x1D512, 'M', u'o'),
    (0x1D513, 'M', u'p'),
    (0x1D514, 'M', u'q'),
    (0x1D515, 'X'),
    (0x1D516, 'M', u's'),
    (0x1D517, 'M', u't'),
    (0x1D518, 'M', u'u'),
    (0x1D519, 'M', u'v'),
    (0x1D51A, 'M', u'w'),
    (0x1D51B, 'M', u'x'),
    (0x1D51C, 'M', u'y'),
    (0x1D51D, 'X'),
    (0x1D51E, 'M', u'a'),
    (0x1D51F, 'M', u'b'),
    (0x1D520, 'M', u'c'),
    (0x1D521, 'M', u'd'),
    (0x1D522, 'M', u'e'),
    (0x1D523, 'M', u'f'),
    (0x1D524, 'M', u'g'),
    (0x1D525, 'M', u'h'),
    (0x1D526, 'M', u'i'),
    (0x1D527, 'M', u'j'),
    (0x1D528, 'M', u'k'),
    (0x1D529, 'M', u'l'),
    (0x1D52A, 'M', u'm'),
    (0x1D52B, 'M', u'n'),
    (0x1D52C, 'M', u'o'),
    (0x1D52D, 'M', u'p'),
    (0x1D52E, 'M', u'q'),
    (0x1D52F, 'M', u'r'),
    (0x1D530, 'M', u's'),
    (0x1D531, 'M', u't'),
    (0x1D532, 'M', u'u'),
    (0x1D533, 'M', u'v'),
    (0x1D534, 'M', u'w'),
    (0x1D535, 'M', u'x'),
    (0x1D536, 'M', u'y'),
    (0x1D537, 'M', u'z'),
    (0x1D538, 'M', u'a'),
    (0x1D539, 'M', u'b'),
    (0x1D53A, 'X'),
    (0x1D53B, 'M', u'd'),
    (0x1D53C, 'M', u'e'),
    (0x1D53D, 'M', u'f'),
    (0x1D53E, 'M', u'g'),
    (0x1D53F, 'X'),
    (0x1D540, 'M', u'i'),
    (0x1D541, 'M', u'j'),
    (0x1D542, 'M', u'k'),
    (0x1D543, 'M', u'l'),
    (0x1D544, 'M', u'm'),
    (0x1D545, 'X'),
    (0x1D546, 'M', u'o'),
    (0x1D547, 'X'),
    (0x1D54A, 'M', u's'),
    (0x1D54B, 'M', u't'),
    (0x1D54C, 'M', u'u'),
    (0x1D54D, 'M', u'v'),
    (0x1D54E, 'M', u'w'),
    (0x1D54F, 'M', u'x'),
    (0x1D550, 'M', u'y'),
    (0x1D551, 'X'),
    (0x1D552, 'M', u'a'),
    (0x1D553, 'M', u'b'),
    (0x1D554, 'M', u'c'),
    (0x1D555, 'M', u'd'),
    (0x1D556, 'M', u'e'),
    (0x1D557, 'M', u'f'),
    (0x1D558, 'M', u'g'),
    (0x1D559, 'M', u'h'),
    (0x1D55A, 'M', u'i'),
    (0x1D55B, 'M', u'j'),
    (0x1D55C, 'M', u'k'),
    (0x1D55D, 'M', u'l'),
    (0x1D55E, 'M', u'm'),
    (0x1D55F, 'M', u'n'),
    (0x1D560, 'M', u'o'),
    (0x1D561, 'M', u'p'),
    (0x1D562, 'M', u'q'),
    (0x1D563, 'M', u'r'),
    (0x1D564, 'M', u's'),
    (0x1D565, 'M', u't'),
    (0x1D566, 'M', u'u'),
    (0x1D567, 'M', u'v'),
    (0x1D568, 'M', u'w'),
    (0x1D569, 'M', u'x'),
    (0x1D56A, 'M', u'y'),
    (0x1D56B, 'M', u'z'),
    (0x1D56C, 'M', u'a'),
    (0x1D56D, 'M', u'b'),
    (0x1D56E, 'M', u'c'),
    ]

def _seg_57():
    return [
    (0x1D56F, 'M', u'd'),
    (0x1D570, 'M', u'e'),
    (0x1D571, 'M', u'f'),
    (0x1D572, 'M', u'g'),
    (0x1D573, 'M', u'h'),
    (0x1D574, 'M', u'i'),
    (0x1D575, 'M', u'j'),
    (0x1D576, 'M', u'k'),
    (0x1D577, 'M', u'l'),
    (0x1D578, 'M', u'm'),
    (0x1D579, 'M', u'n'),
    (0x1D57A, 'M', u'o'),
    (0x1D57B, 'M', u'p'),
    (0x1D57C, 'M', u'q'),
    (0x1D57D, 'M', u'r'),
    (0x1D57E, 'M', u's'),
    (0x1D57F, 'M', u't'),
    (0x1D580, 'M', u'u'),
    (0x1D581, 'M', u'v'),
    (0x1D582, 'M', u'w'),
    (0x1D583, 'M', u'x'),
    (0x1D584, 'M', u'y'),
    (0x1D585, 'M', u'z'),
    (0x1D586, 'M', u'a'),
    (0x1D587, 'M', u'b'),
    (0x1D588, 'M', u'c'),
    (0x1D589, 'M', u'd'),
    (0x1D58A, 'M', u'e'),
    (0x1D58B, 'M', u'f'),
    (0x1D58C, 'M', u'g'),
    (0x1D58D, 'M', u'h'),
    (0x1D58E, 'M', u'i'),
    (0x1D58F, 'M', u'j'),
    (0x1D590, 'M', u'k'),
    (0x1D591, 'M', u'l'),
    (0x1D592, 'M', u'm'),
    (0x1D593, 'M', u'n'),
    (0x1D594, 'M', u'o'),
    (0x1D595, 'M', u'p'),
    (0x1D596, 'M', u'q'),
    (0x1D597, 'M', u'r'),
    (0x1D598, 'M', u's'),
    (0x1D599, 'M', u't'),
    (0x1D59A, 'M', u'u'),
    (0x1D59B, 'M', u'v'),
    (0x1D59C, 'M', u'w'),
    (0x1D59D, 'M', u'x'),
    (0x1D59E, 'M', u'y'),
    (0x1D59F, 'M', u'z'),
    (0x1D5A0, 'M', u'a'),
    (0x1D5A1, 'M', u'b'),
    (0x1D5A2, 'M', u'c'),
    (0x1D5A3, 'M', u'd'),
    (0x1D5A4, 'M', u'e'),
    (0x1D5A5, 'M', u'f'),
    (0x1D5A6, 'M', u'g'),
    (0x1D5A7, 'M', u'h'),
    (0x1D5A8, 'M', u'i'),
    (0x1D5A9, 'M', u'j'),
    (0x1D5AA, 'M', u'k'),
    (0x1D5AB, 'M', u'l'),
    (0x1D5AC, 'M', u'm'),
    (0x1D5AD, 'M', u'n'),
    (0x1D5AE, 'M', u'o'),
    (0x1D5AF, 'M', u'p'),
    (0x1D5B0, 'M', u'q'),
    (0x1D5B1, 'M', u'r'),
    (0x1D5B2, 'M', u's'),
    (0x1D5B3, 'M', u't'),
    (0x1D5B4, 'M', u'u'),
    (0x1D5B5, 'M', u'v'),
    (0x1D5B6, 'M', u'w'),
    (0x1D5B7, 'M', u'x'),
    (0x1D5B8, 'M', u'y'),
    (0x1D5B9, 'M', u'z'),
    (0x1D5BA, 'M', u'a'),
    (0x1D5BB, 'M', u'b'),
    (0x1D5BC, 'M', u'c'),
    (0x1D5BD, 'M', u'd'),
    (0x1D5BE, 'M', u'e'),
    (0x1D5BF, 'M', u'f'),
    (0x1D5C0, 'M', u'g'),
    (0x1D5C1, 'M', u'h'),
    (0x1D5C2, 'M', u'i'),
    (0x1D5C3, 'M', u'j'),
    (0x1D5C4, 'M', u'k'),
    (0x1D5C5, 'M', u'l'),
    (0x1D5C6, 'M', u'm'),
    (0x1D5C7, 'M', u'n'),
    (0x1D5C8, 'M', u'o'),
    (0x1D5C9, 'M', u'p'),
    (0x1D5CA, 'M', u'q'),
    (0x1D5CB, 'M', u'r'),
    (0x1D5CC, 'M', u's'),
    (0x1D5CD, 'M', u't'),
    (0x1D5CE, 'M', u'u'),
    (0x1D5CF, 'M', u'v'),
    (0x1D5D0, 'M', u'w'),
    (0x1D5D1, 'M', u'x'),
    (0x1D5D2, 'M', u'y'),
    ]

def _seg_58():
    return [
    (0x1D5D3, 'M', u'z'),
    (0x1D5D4, 'M', u'a'),
    (0x1D5D5, 'M', u'b'),
    (0x1D5D6, 'M', u'c'),
    (0x1D5D7, 'M', u'd'),
    (0x1D5D8, 'M', u'e'),
    (0x1D5D9, 'M', u'f'),
    (0x1D5DA, 'M', u'g'),
    (0x1D5DB, 'M', u'h'),
    (0x1D5DC, 'M', u'i'),
    (0x1D5DD, 'M', u'j'),
    (0x1D5DE, 'M', u'k'),
    (0x1D5DF, 'M', u'l'),
    (0x1D5E0, 'M', u'm'),
    (0x1D5E1, 'M', u'n'),
    (0x1D5E2, 'M', u'o'),
    (0x1D5E3, 'M', u'p'),
    (0x1D5E4, 'M', u'q'),
    (0x1D5E5, 'M', u'r'),
    (0x1D5E6, 'M', u's'),
    (0x1D5E7, 'M', u't'),
    (0x1D5E8, 'M', u'u'),
    (0x1D5E9, 'M', u'v'),
    (0x1D5EA, 'M', u'w'),
    (0x1D5EB, 'M', u'x'),
    (0x1D5EC, 'M', u'y'),
    (0x1D5ED, 'M', u'z'),
    (0x1D5EE, 'M', u'a'),
    (0x1D5EF, 'M', u'b'),
    (0x1D5F0, 'M', u'c'),
    (0x1D5F1, 'M', u'd'),
    (0x1D5F2, 'M', u'e'),
    (0x1D5F3, 'M', u'f'),
    (0x1D5F4, 'M', u'g'),
    (0x1D5F5, 'M', u'h'),
    (0x1D5F6, 'M', u'i'),
    (0x1D5F7, 'M', u'j'),
    (0x1D5F8, 'M', u'k'),
    (0x1D5F9, 'M', u'l'),
    (0x1D5FA, 'M', u'm'),
    (0x1D5FB, 'M', u'n'),
    (0x1D5FC, 'M', u'o'),
    (0x1D5FD, 'M', u'p'),
    (0x1D5FE, 'M', u'q'),
    (0x1D5FF, 'M', u'r'),
    (0x1D600, 'M', u's'),
    (0x1D601, 'M', u't'),
    (0x1D602, 'M', u'u'),
    (0x1D603, 'M', u'v'),
    (0x1D604, 'M', u'w'),
    (0x1D605, 'M', u'x'),
    (0x1D606, 'M', u'y'),
    (0x1D607, 'M', u'z'),
    (0x1D608, 'M', u'a'),
    (0x1D609, 'M', u'b'),
    (0x1D60A, 'M', u'c'),
    (0x1D60B, 'M', u'd'),
    (0x1D60C, 'M', u'e'),
    (0x1D60D, 'M', u'f'),
    (0x1D60E, 'M', u'g'),
    (0x1D60F, 'M', u'h'),
    (0x1D610, 'M', u'i'),
    (0x1D611, 'M', u'j'),
    (0x1D612, 'M', u'k'),
    (0x1D613, 'M', u'l'),
    (0x1D614, 'M', u'm'),
    (0x1D615, 'M', u'n'),
    (0x1D616, 'M', u'o'),
    (0x1D617, 'M', u'p'),
    (0x1D618, 'M', u'q'),
    (0x1D619, 'M', u'r'),
    (0x1D61A, 'M', u's'),
    (0x1D61B, 'M', u't'),
    (0x1D61C, 'M', u'u'),
    (0x1D61D, 'M', u'v'),
    (0x1D61E, 'M', u'w'),
    (0x1D61F, 'M', u'x'),
    (0x1D620, 'M', u'y'),
    (0x1D621, 'M', u'z'),
    (0x1D622, 'M', u'a'),
    (0x1D623, 'M', u'b'),
    (0x1D624, 'M', u'c'),
    (0x1D625, 'M', u'd'),
    (0x1D626, 'M', u'e'),
    (0x1D627, 'M', u'f'),
    (0x1D628, 'M', u'g'),
    (0x1D629, 'M', u'h'),
    (0x1D62A, 'M', u'i'),
    (0x1D62B, 'M', u'j'),
    (0x1D62C, 'M', u'k'),
    (0x1D62D, 'M', u'l'),
    (0x1D62E, 'M', u'm'),
    (0x1D62F, 'M', u'n'),
    (0x1D630, 'M', u'o'),
    (0x1D631, 'M', u'p'),
    (0x1D632, 'M', u'q'),
    (0x1D633, 'M', u'r'),
    (0x1D634, 'M', u's'),
    (0x1D635, 'M', u't'),
    (0x1D636, 'M', u'u'),
    ]

def _seg_59():
    return [
    (0x1D637, 'M', u'v'),
    (0x1D638, 'M', u'w'),
    (0x1D639, 'M', u'x'),
    (0x1D63A, 'M', u'y'),
    (0x1D63B, 'M', u'z'),
    (0x1D63C, 'M', u'a'),
    (0x1D63D, 'M', u'b'),
    (0x1D63E, 'M', u'c'),
    (0x1D63F, 'M', u'd'),
    (0x1D640, 'M', u'e'),
    (0x1D641, 'M', u'f'),
    (0x1D642, 'M', u'g'),
    (0x1D643, 'M', u'h'),
    (0x1D644, 'M', u'i'),
    (0x1D645, 'M', u'j'),
    (0x1D646, 'M', u'k'),
    (0x1D647, 'M', u'l'),
    (0x1D648, 'M', u'm'),
    (0x1D649, 'M', u'n'),
    (0x1D64A, 'M', u'o'),
    (0x1D64B, 'M', u'p'),
    (0x1D64C, 'M', u'q'),
    (0x1D64D, 'M', u'r'),
    (0x1D64E, 'M', u's'),
    (0x1D64F, 'M', u't'),
    (0x1D650, 'M', u'u'),
    (0x1D651, 'M', u'v'),
    (0x1D652, 'M', u'w'),
    (0x1D653, 'M', u'x'),
    (0x1D654, 'M', u'y'),
    (0x1D655, 'M', u'z'),
    (0x1D656, 'M', u'a'),
    (0x1D657, 'M', u'b'),
    (0x1D658, 'M', u'c'),
    (0x1D659, 'M', u'd'),
    (0x1D65A, 'M', u'e'),
    (0x1D65B, 'M', u'f'),
    (0x1D65C, 'M', u'g'),
    (0x1D65D, 'M', u'h'),
    (0x1D65E, 'M', u'i'),
    (0x1D65F, 'M', u'j'),
    (0x1D660, 'M', u'k'),
    (0x1D661, 'M', u'l'),
    (0x1D662, 'M', u'm'),
    (0x1D663, 'M', u'n'),
    (0x1D664, 'M', u'o'),
    (0x1D665, 'M', u'p'),
    (0x1D666, 'M', u'q'),
    (0x1D667, 'M', u'r'),
    (0x1D668, 'M', u's'),
    (0x1D669, 'M', u't'),
    (0x1D66A, 'M', u'u'),
    (0x1D66B, 'M', u'v'),
    (0x1D66C, 'M', u'w'),
    (0x1D66D, 'M', u'x'),
    (0x1D66E, 'M', u'y'),
    (0x1D66F, 'M', u'z'),
    (0x1D670, 'M', u'a'),
    (0x1D671, 'M', u'b'),
    (0x1D672, 'M', u'c'),
    (0x1D673, 'M', u'd'),
    (0x1D674, 'M', u'e'),
    (0x1D675, 'M', u'f'),
    (0x1D676, 'M', u'g'),
    (0x1D677, 'M', u'h'),
    (0x1D678, 'M', u'i'),
    (0x1D679, 'M', u'j'),
    (0x1D67A, 'M', u'k'),
    (0x1D67B, 'M', u'l'),
    (0x1D67C, 'M', u'm'),
    (0x1D67D, 'M', u'n'),
    (0x1D67E, 'M', u'o'),
    (0x1D67F, 'M', u'p'),
    (0x1D680, 'M', u'q'),
    (0x1D681, 'M', u'r'),
    (0x1D682, 'M', u's'),
    (0x1D683, 'M', u't'),
    (0x1D684, 'M', u'u'),
    (0x1D685, 'M', u'v'),
    (0x1D686, 'M', u'w'),
    (0x1D687, 'M', u'x'),
    (0x1D688, 'M', u'y'),
    (0x1D689, 'M', u'z'),
    (0x1D68A, 'M', u'a'),
    (0x1D68B, 'M', u'b'),
    (0x1D68C, 'M', u'c'),
    (0x1D68D, 'M', u'd'),
    (0x1D68E, 'M', u'e'),
    (0x1D68F, 'M', u'f'),
    (0x1D690, 'M', u'g'),
    (0x1D691, 'M', u'h'),
    (0x1D692, 'M', u'i'),
    (0x1D693, 'M', u'j'),
    (0x1D694, 'M', u'k'),
    (0x1D695, 'M', u'l'),
    (0x1D696, 'M', u'm'),
    (0x1D697, 'M', u'n'),
    (0x1D698, 'M', u'o'),
    (0x1D699, 'M', u'p'),
    (0x1D69A, 'M', u'q'),
    ]

def _seg_60():
    return [
    (0x1D69B, 'M', u'r'),
    (0x1D69C, 'M', u's'),
    (0x1D69D, 'M', u't'),
    (0x1D69E, 'M', u'u'),
    (0x1D69F, 'M', u'v'),
    (0x1D6A0, 'M', u'w'),
    (0x1D6A1, 'M', u'x'),
    (0x1D6A2, 'M', u'y'),
    (0x1D6A3, 'M', u'z'),
    (0x1D6A4, 'M', u'ı'),
    (0x1D6A5, 'M', u'ȷ'),
    (0x1D6A6, 'X'),
    (0x1D6A8, 'M', u'α'),
    (0x1D6A9, 'M', u'β'),
    (0x1D6AA, 'M', u'γ'),
    (0x1D6AB, 'M', u'δ'),
    (0x1D6AC, 'M', u'ε'),
    (0x1D6AD, 'M', u'ζ'),
    (0x1D6AE, 'M', u'η'),
    (0x1D6AF, 'M', u'θ'),
    (0x1D6B0, 'M', u'ι'),
    (0x1D6B1, 'M', u'κ'),
    (0x1D6B2, 'M', u'λ'),
    (0x1D6B3, 'M', u'μ'),
    (0x1D6B4, 'M', u'ν'),
    (0x1D6B5, 'M', u'ξ'),
    (0x1D6B6, 'M', u'ο'),
    (0x1D6B7, 'M', u'π'),
    (0x1D6B8, 'M', u'ρ'),
    (0x1D6B9, 'M', u'θ'),
    (0x1D6BA, 'M', u'σ'),
    (0x1D6BB, 'M', u'τ'),
    (0x1D6BC, 'M', u'υ'),
    (0x1D6BD, 'M', u'φ'),
    (0x1D6BE, 'M', u'χ'),
    (0x1D6BF, 'M', u'ψ'),
    (0x1D6C0, 'M', u'ω'),
    (0x1D6C1, 'M', u'∇'),
    (0x1D6C2, 'M', u'α'),
    (0x1D6C3, 'M', u'β'),
    (0x1D6C4, 'M', u'γ'),
    (0x1D6C5, 'M', u'δ'),
    (0x1D6C6, 'M', u'ε'),
    (0x1D6C7, 'M', u'ζ'),
    (0x1D6C8, 'M', u'η'),
    (0x1D6C9, 'M', u'θ'),
    (0x1D6CA, 'M', u'ι'),
    (0x1D6CB, 'M', u'κ'),
    (0x1D6CC, 'M', u'λ'),
    (0x1D6CD, 'M', u'μ'),
    (0x1D6CE, 'M', u'ν'),
    (0x1D6CF, 'M', u'ξ'),
    (0x1D6D0, 'M', u'ο'),
    (0x1D6D1, 'M', u'π'),
    (0x1D6D2, 'M', u'ρ'),
    (0x1D6D3, 'M', u'σ'),
    (0x1D6D5, 'M', u'τ'),
    (0x1D6D6, 'M', u'υ'),
    (0x1D6D7, 'M', u'φ'),
    (0x1D6D8, 'M', u'χ'),
    (0x1D6D9, 'M', u'ψ'),
    (0x1D6DA, 'M', u'ω'),
    (0x1D6DB, 'M', u'∂'),
    (0x1D6DC, 'M', u'ε'),
    (0x1D6DD, 'M', u'θ'),
    (0x1D6DE, 'M', u'κ'),
    (0x1D6DF, 'M', u'φ'),
    (0x1D6E0, 'M', u'ρ'),
    (0x1D6E1, 'M', u'π'),
    (0x1D6E2, 'M', u'α'),
    (0x1D6E3, 'M', u'β'),
    (0x1D6E4, 'M', u'γ'),
    (0x1D6E5, 'M', u'δ'),
    (0x1D6E6, 'M', u'ε'),
    (0x1D6E7, 'M', u'ζ'),
    (0x1D6E8, 'M', u'η'),
    (0x1D6E9, 'M', u'θ'),
    (0x1D6EA, 'M', u'ι'),
    (0x1D6EB, 'M', u'κ'),
    (0x1D6EC, 'M', u'λ'),
    (0x1D6ED, 'M', u'μ'),
    (0x1D6EE, 'M', u'ν'),
    (0x1D6EF, 'M', u'ξ'),
    (0x1D6F0, 'M', u'ο'),
    (0x1D6F1, 'M', u'π'),
    (0x1D6F2, 'M', u'ρ'),
    (0x1D6F3, 'M', u'θ'),
    (0x1D6F4, 'M', u'σ'),
    (0x1D6F5, 'M', u'τ'),
    (0x1D6F6, 'M', u'υ'),
    (0x1D6F7, 'M', u'φ'),
    (0x1D6F8, 'M', u'χ'),
    (0x1D6F9, 'M', u'ψ'),
    (0x1D6FA, 'M', u'ω'),
    (0x1D6FB, 'M', u'∇'),
    (0x1D6FC, 'M', u'α'),
    (0x1D6FD, 'M', u'β'),
    (0x1D6FE, 'M', u'γ'),
    (0x1D6FF, 'M', u'δ'),
    (0x1D700, 'M', u'ε'),
    ]

def _seg_61():
    return [
    (0x1D701, 'M', u'ζ'),
    (0x1D702, 'M', u'η'),
    (0x1D703, 'M', u'θ'),
    (0x1D704, 'M', u'ι'),
    (0x1D705, 'M', u'κ'),
    (0x1D706, 'M', u'λ'),
    (0x1D707, 'M', u'μ'),
    (0x1D708, 'M', u'ν'),
    (0x1D709, 'M', u'ξ'),
    (0x1D70A, 'M', u'ο'),
    (0x1D70B, 'M', u'π'),
    (0x1D70C, 'M', u'ρ'),
    (0x1D70D, 'M', u'σ'),
    (0x1D70F, 'M', u'τ'),
    (0x1D710, 'M', u'υ'),
    (0x1D711, 'M', u'φ'),
    (0x1D712, 'M', u'χ'),
    (0x1D713, 'M', u'ψ'),
    (0x1D714, 'M', u'ω'),
    (0x1D715, 'M', u'∂'),
    (0x1D716, 'M', u'ε'),
    (0x1D717, 'M', u'θ'),
    (0x1D718, 'M', u'κ'),
    (0x1D719, 'M', u'φ'),
    (0x1D71A, 'M', u'ρ'),
    (0x1D71B, 'M', u'π'),
    (0x1D71C, 'M', u'α'),
    (0x1D71D, 'M', u'β'),
    (0x1D71E, 'M', u'γ'),
    (0x1D71F, 'M', u'δ'),
    (0x1D720, 'M', u'ε'),
    (0x1D721, 'M', u'ζ'),
    (0x1D722, 'M', u'η'),
    (0x1D723, 'M', u'θ'),
    (0x1D724, 'M', u'ι'),
    (0x1D725, 'M', u'κ'),
    (0x1D726, 'M', u'λ'),
    (0x1D727, 'M', u'μ'),
    (0x1D728, 'M', u'ν'),
    (0x1D729, 'M', u'ξ'),
    (0x1D72A, 'M', u'ο'),
    (0x1D72B, 'M', u'π'),
    (0x1D72C, 'M', u'ρ'),
    (0x1D72D, 'M', u'θ'),
    (0x1D72E, 'M', u'σ'),
    (0x1D72F, 'M', u'τ'),
    (0x1D730, 'M', u'υ'),
    (0x1D731, 'M', u'φ'),
    (0x1D732, 'M', u'χ'),
    (0x1D733, 'M', u'ψ'),
    (0x1D734, 'M', u'ω'),
    (0x1D735, 'M', u'∇'),
    (0x1D736, 'M', u'α'),
    (0x1D737, 'M', u'β'),
    (0x1D738, 'M', u'γ'),
    (0x1D739, 'M', u'δ'),
    (0x1D73A, 'M', u'ε'),
    (0x1D73B, 'M', u'ζ'),
    (0x1D73C, 'M', u'η'),
    (0x1D73D, 'M', u'θ'),
    (0x1D73E, 'M', u'ι'),
    (0x1D73F, 'M', u'κ'),
    (0x1D740, 'M', u'λ'),
    (0x1D741, 'M', u'μ'),
    (0x1D742, 'M', u'ν'),
    (0x1D743, 'M', u'ξ'),
    (0x1D744, 'M', u'ο'),
    (0x1D745, 'M', u'π'),
    (0x1D746, 'M', u'ρ'),
    (0x1D747, 'M', u'σ'),
    (0x1D749, 'M', u'τ'),
    (0x1D74A, 'M', u'υ'),
    (0x1D74B, 'M', u'φ'),
    (0x1D74C, 'M', u'χ'),
    (0x1D74D, 'M', u'ψ'),
    (0x1D74E, 'M', u'ω'),
    (0x1D74F, 'M', u'∂'),
    (0x1D750, 'M', u'ε'),
    (0x1D751, 'M', u'θ'),
    (0x1D752, 'M', u'κ'),
    (0x1D753, 'M', u'φ'),
    (0x1D754, 'M', u'ρ'),
    (0x1D755, 'M', u'π'),
    (0x1D756, 'M', u'α'),
    (0x1D757, 'M', u'β'),
    (0x1D758, 'M', u'γ'),
    (0x1D759, 'M', u'δ'),
    (0x1D75A, 'M', u'ε'),
    (0x1D75B, 'M', u'ζ'),
    (0x1D75C, 'M', u'η'),
    (0x1D75D, 'M', u'θ'),
    (0x1D75E, 'M', u'ι'),
    (0x1D75F, 'M', u'κ'),
    (0x1D760, 'M', u'λ'),
    (0x1D761, 'M', u'μ'),
    (0x1D762, 'M', u'ν'),
    (0x1D763, 'M', u'ξ'),
    (0x1D764, 'M', u'ο'),
    (0x1D765, 'M', u'π'),
    (0x1D766, 'M', u'ρ'),
    ]

def _seg_62():
    return [
    (0x1D767, 'M', u'θ'),
    (0x1D768, 'M', u'σ'),
    (0x1D769, 'M', u'τ'),
    (0x1D76A, 'M', u'υ'),
    (0x1D76B, 'M', u'φ'),
    (0x1D76C, 'M', u'χ'),
    (0x1D76D, 'M', u'ψ'),
    (0x1D76E, 'M', u'ω'),
    (0x1D76F, 'M', u'∇'),
    (0x1D770, 'M', u'α'),
    (0x1D771, 'M', u'β'),
    (0x1D772, 'M', u'γ'),
    (0x1D773, 'M', u'δ'),
    (0x1D774, 'M', u'ε'),
    (0x1D775, 'M', u'ζ'),
    (0x1D776, 'M', u'η'),
    (0x1D777, 'M', u'θ'),
    (0x1D778, 'M', u'ι'),
    (0x1D779, 'M', u'κ'),
    (0x1D77A, 'M', u'λ'),
    (0x1D77B, 'M', u'μ'),
    (0x1D77C, 'M', u'ν'),
    (0x1D77D, 'M', u'ξ'),
    (0x1D77E, 'M', u'ο'),
    (0x1D77F, 'M', u'π'),
    (0x1D780, 'M', u'ρ'),
    (0x1D781, 'M', u'σ'),
    (0x1D783, 'M', u'τ'),
    (0x1D784, 'M', u'υ'),
    (0x1D785, 'M', u'φ'),
    (0x1D786, 'M', u'χ'),
    (0x1D787, 'M', u'ψ'),
    (0x1D788, 'M', u'ω'),
    (0x1D789, 'M', u'∂'),
    (0x1D78A, 'M', u'ε'),
    (0x1D78B, 'M', u'θ'),
    (0x1D78C, 'M', u'κ'),
    (0x1D78D, 'M', u'φ'),
    (0x1D78E, 'M', u'ρ'),
    (0x1D78F, 'M', u'π'),
    (0x1D790, 'M', u'α'),
    (0x1D791, 'M', u'β'),
    (0x1D792, 'M', u'γ'),
    (0x1D793, 'M', u'δ'),
    (0x1D794, 'M', u'ε'),
    (0x1D795, 'M', u'ζ'),
    (0x1D796, 'M', u'η'),
    (0x1D797, 'M', u'θ'),
    (0x1D798, 'M', u'ι'),
    (0x1D799, 'M', u'κ'),
    (0x1D79A, 'M', u'λ'),
    (0x1D79B, 'M', u'μ'),
    (0x1D79C, 'M', u'ν'),
    (0x1D79D, 'M', u'ξ'),
    (0x1D79E, 'M', u'ο'),
    (0x1D79F, 'M', u'π'),
    (0x1D7A0, 'M', u'ρ'),
    (0x1D7A1, 'M', u'θ'),
    (0x1D7A2, 'M', u'σ'),
    (0x1D7A3, 'M', u'τ'),
    (0x1D7A4, 'M', u'υ'),
    (0x1D7A5, 'M', u'φ'),
    (0x1D7A6, 'M', u'χ'),
    (0x1D7A7, 'M', u'ψ'),
    (0x1D7A8, 'M', u'ω'),
    (0x1D7A9, 'M', u'∇'),
    (0x1D7AA, 'M', u'α'),
    (0x1D7AB, 'M', u'β'),
    (0x1D7AC, 'M', u'γ'),
    (0x1D7AD, 'M', u'δ'),
    (0x1D7AE, 'M', u'ε'),
    (0x1D7AF, 'M', u'ζ'),
    (0x1D7B0, 'M', u'η'),
    (0x1D7B1, 'M', u'θ'),
    (0x1D7B2, 'M', u'ι'),
    (0x1D7B3, 'M', u'κ'),
    (0x1D7B4, 'M', u'λ'),
    (0x1D7B5, 'M', u'μ'),
    (0x1D7B6, 'M', u'ν'),
    (0x1D7B7, 'M', u'ξ'),
    (0x1D7B8, 'M', u'ο'),
    (0x1D7B9, 'M', u'π'),
    (0x1D7BA, 'M', u'ρ'),
    (0x1D7BB, 'M', u'σ'),
    (0x1D7BD, 'M', u'τ'),
    (0x1D7BE, 'M', u'υ'),
    (0x1D7BF, 'M', u'φ'),
    (0x1D7C0, 'M', u'χ'),
    (0x1D7C1, 'M', u'ψ'),
    (0x1D7C2, 'M', u'ω'),
    (0x1D7C3, 'M', u'∂'),
    (0x1D7C4, 'M', u'ε'),
    (0x1D7C5, 'M', u'θ'),
    (0x1D7C6, 'M', u'κ'),
    (0x1D7C7, 'M', u'φ'),
    (0x1D7C8, 'M', u'ρ'),
    (0x1D7C9, 'M', u'π'),
    (0x1D7CA, 'M', u'ϝ'),
    (0x1D7CC, 'X'),
    (0x1D7CE, 'M', u'0'),
    ]

def _seg_63():
    return [
    (0x1D7CF, 'M', u'1'),
    (0x1D7D0, 'M', u'2'),
    (0x1D7D1, 'M', u'3'),
    (0x1D7D2, 'M', u'4'),
    (0x1D7D3, 'M', u'5'),
    (0x1D7D4, 'M', u'6'),
    (0x1D7D5, 'M', u'7'),
    (0x1D7D6, 'M', u'8'),
    (0x1D7D7, 'M', u'9'),
    (0x1D7D8, 'M', u'0'),
    (0x1D7D9, 'M', u'1'),
    (0x1D7DA, 'M', u'2'),
    (0x1D7DB, 'M', u'3'),
    (0x1D7DC, 'M', u'4'),
    (0x1D7DD, 'M', u'5'),
    (0x1D7DE, 'M', u'6'),
    (0x1D7DF, 'M', u'7'),
    (0x1D7E0, 'M', u'8'),
    (0x1D7E1, 'M', u'9'),
    (0x1D7E2, 'M', u'0'),
    (0x1D7E3, 'M', u'1'),
    (0x1D7E4, 'M', u'2'),
    (0x1D7E5, 'M', u'3'),
    (0x1D7E6, 'M', u'4'),
    (0x1D7E7, 'M', u'5'),
    (0x1D7E8, 'M', u'6'),
    (0x1D7E9, 'M', u'7'),
    (0x1D7EA, 'M', u'8'),
    (0x1D7EB, 'M', u'9'),
    (0x1D7EC, 'M', u'0'),
    (0x1D7ED, 'M', u'1'),
    (0x1D7EE, 'M', u'2'),
    (0x1D7EF, 'M', u'3'),
    (0x1D7F0, 'M', u'4'),
    (0x1D7F1, 'M', u'5'),
    (0x1D7F2, 'M', u'6'),
    (0x1D7F3, 'M', u'7'),
    (0x1D7F4, 'M', u'8'),
    (0x1D7F5, 'M', u'9'),
    (0x1D7F6, 'M', u'0'),
    (0x1D7F7, 'M', u'1'),
    (0x1D7F8, 'M', u'2'),
    (0x1D7F9, 'M', u'3'),
    (0x1D7FA, 'M', u'4'),
    (0x1D7FB, 'M', u'5'),
    (0x1D7FC, 'M', u'6'),
    (0x1D7FD, 'M', u'7'),
    (0x1D7FE, 'M', u'8'),
    (0x1D7FF, 'M', u'9'),
    (0x1D800, 'X'),
    (0x1EE00, 'M', u'ا'),
    (0x1EE01, 'M', u'ب'),
    (0x1EE02, 'M', u'ج'),
    (0x1EE03, 'M', u'د'),
    (0x1EE04, 'X'),
    (0x1EE05, 'M', u'و'),
    (0x1EE06, 'M', u'ز'),
    (0x1EE07, 'M', u'ح'),
    (0x1EE08, 'M', u'ط'),
    (0x1EE09, 'M', u'ي'),
    (0x1EE0A, 'M', u'ك'),
    (0x1EE0B, 'M', u'ل'),
    (0x1EE0C, 'M', u'م'),
    (0x1EE0D, 'M', u'ن'),
    (0x1EE0E, 'M', u'س'),
    (0x1EE0F, 'M', u'ع'),
    (0x1EE10, 'M', u'ف'),
    (0x1EE11, 'M', u'ص'),
    (0x1EE12, 'M', u'ق'),
    (0x1EE13, 'M', u'ر'),
    (0x1EE14, 'M', u'ش'),
    (0x1EE15, 'M', u'ت'),
    (0x1EE16, 'M', u'ث'),
    (0x1EE17, 'M', u'خ'),
    (0x1EE18, 'M', u'ذ'),
    (0x1EE19, 'M', u'ض'),
    (0x1EE1A, 'M', u'ظ'),
    (0x1EE1B, 'M', u'غ'),
    (0x1EE1C, 'M', u'ٮ'),
    (0x1EE1D, 'M', u'ں'),
    (0x1EE1E, 'M', u'ڡ'),
    (0x1EE1F, 'M', u'ٯ'),
    (0x1EE20, 'X'),
    (0x1EE21, 'M', u'ب'),
    (0x1EE22, 'M', u'ج'),
    (0x1EE23, 'X'),
    (0x1EE24, 'M', u'ه'),
    (0x1EE25, 'X'),
    (0x1EE27, 'M', u'ح'),
    (0x1EE28, 'X'),
    (0x1EE29, 'M', u'ي'),
    (0x1EE2A, 'M', u'ك'),
    (0x1EE2B, 'M', u'ل'),
    (0x1EE2C, 'M', u'م'),
    (0x1EE2D, 'M', u'ن'),
    (0x1EE2E, 'M', u'س'),
    (0x1EE2F, 'M', u'ع'),
    (0x1EE30, 'M', u'ف'),
    (0x1EE31, 'M', u'ص'),
    (0x1EE32, 'M', u'ق'),
    ]

def _seg_64():
    return [
    (0x1EE33, 'X'),
    (0x1EE34, 'M', u'ش'),
    (0x1EE35, 'M', u'ت'),
    (0x1EE36, 'M', u'ث'),
    (0x1EE37, 'M', u'خ'),
    (0x1EE38, 'X'),
    (0x1EE39, 'M', u'ض'),
    (0x1EE3A, 'X'),
    (0x1EE3B, 'M', u'غ'),
    (0x1EE3C, 'X'),
    (0x1EE42, 'M', u'ج'),
    (0x1EE43, 'X'),
    (0x1EE47, 'M', u'ح'),
    (0x1EE48, 'X'),
    (0x1EE49, 'M', u'ي'),
    (0x1EE4A, 'X'),
    (0x1EE4B, 'M', u'ل'),
    (0x1EE4C, 'X'),
    (0x1EE4D, 'M', u'ن'),
    (0x1EE4E, 'M', u'س'),
    (0x1EE4F, 'M', u'ع'),
    (0x1EE50, 'X'),
    (0x1EE51, 'M', u'ص'),
    (0x1EE52, 'M', u'ق'),
    (0x1EE53, 'X'),
    (0x1EE54, 'M', u'ش'),
    (0x1EE55, 'X'),
    (0x1EE57, 'M', u'خ'),
    (0x1EE58, 'X'),
    (0x1EE59, 'M', u'ض'),
    (0x1EE5A, 'X'),
    (0x1EE5B, 'M', u'غ'),
    (0x1EE5C, 'X'),
    (0x1EE5D, 'M', u'ں'),
    (0x1EE5E, 'X'),
    (0x1EE5F, 'M', u'ٯ'),
    (0x1EE60, 'X'),
    (0x1EE61, 'M', u'ب'),
    (0x1EE62, 'M', u'ج'),
    (0x1EE63, 'X'),
    (0x1EE64, 'M', u'ه'),
    (0x1EE65, 'X'),
    (0x1EE67, 'M', u'ح'),
    (0x1EE68, 'M', u'ط'),
    (0x1EE69, 'M', u'ي'),
    (0x1EE6A, 'M', u'ك'),
    (0x1EE6B, 'X'),
    (0x1EE6C, 'M', u'م'),
    (0x1EE6D, 'M', u'ن'),
    (0x1EE6E, 'M', u'س'),
    (0x1EE6F, 'M', u'ع'),
    (0x1EE70, 'M', u'ف'),
    (0x1EE71, 'M', u'ص'),
    (0x1EE72, 'M', u'ق'),
    (0x1EE73, 'X'),
    (0x1EE74, 'M', u'ش'),
    (0x1EE75, 'M', u'ت'),
    (0x1EE76, 'M', u'ث'),
    (0x1EE77, 'M', u'خ'),
    (0x1EE78, 'X'),
    (0x1EE79, 'M', u'ض'),
    (0x1EE7A, 'M', u'ظ'),
    (0x1EE7B, 'M', u'غ'),
    (0x1EE7C, 'M', u'ٮ'),
    (0x1EE7D, 'X'),
    (0x1EE7E, 'M', u'ڡ'),
    (0x1EE7F, 'X'),
    (0x1EE80, 'M', u'ا'),
    (0x1EE81, 'M', u'ب'),
    (0x1EE82, 'M', u'ج'),
    (0x1EE83, 'M', u'د'),
    (0x1EE84, 'M', u'ه'),
    (0x1EE85, 'M', u'و'),
    (0x1EE86, 'M', u'ز'),
    (0x1EE87, 'M', u'ح'),
    (0x1EE88, 'M', u'ط'),
    (0x1EE89, 'M', u'ي'),
    (0x1EE8A, 'X'),
    (0x1EE8B, 'M', u'ل'),
    (0x1EE8C, 'M', u'م'),
    (0x1EE8D, 'M', u'ن'),
    (0x1EE8E, 'M', u'س'),
    (0x1EE8F, 'M', u'ع'),
    (0x1EE90, 'M', u'ف'),
    (0x1EE91, 'M', u'ص'),
    (0x1EE92, 'M', u'ق'),
    (0x1EE93, 'M', u'ر'),
    (0x1EE94, 'M', u'ش'),
    (0x1EE95, 'M', u'ت'),
    (0x1EE96, 'M', u'ث'),
    (0x1EE97, 'M', u'خ'),
    (0x1EE98, 'M', u'ذ'),
    (0x1EE99, 'M', u'ض'),
    (0x1EE9A, 'M', u'ظ'),
    (0x1EE9B, 'M', u'غ'),
    (0x1EE9C, 'X'),
    (0x1EEA1, 'M', u'ب'),
    (0x1EEA2, 'M', u'ج'),
    (0x1EEA3, 'M', u'د'),
    (0x1EEA4, 'X'),
    ]

def _seg_65():
    return [
    (0x1EEA5, 'M', u'و'),
    (0x1EEA6, 'M', u'ز'),
    (0x1EEA7, 'M', u'ح'),
    (0x1EEA8, 'M', u'ط'),
    (0x1EEA9, 'M', u'ي'),
    (0x1EEAA, 'X'),
    (0x1EEAB, 'M', u'ل'),
    (0x1EEAC, 'M', u'م'),
    (0x1EEAD, 'M', u'ن'),
    (0x1EEAE, 'M', u'س'),
    (0x1EEAF, 'M', u'ع'),
    (0x1EEB0, 'M', u'ف'),
    (0x1EEB1, 'M', u'ص'),
    (0x1EEB2, 'M', u'ق'),
    (0x1EEB3, 'M', u'ر'),
    (0x1EEB4, 'M', u'ش'),
    (0x1EEB5, 'M', u'ت'),
    (0x1EEB6, 'M', u'ث'),
    (0x1EEB7, 'M', u'خ'),
    (0x1EEB8, 'M', u'ذ'),
    (0x1EEB9, 'M', u'ض'),
    (0x1EEBA, 'M', u'ظ'),
    (0x1EEBB, 'M', u'غ'),
    (0x1EEBC, 'X'),
    (0x1EEF0, 'V'),
    (0x1EEF2, 'X'),
    (0x1F000, 'V'),
    (0x1F02C, 'X'),
    (0x1F030, 'V'),
    (0x1F094, 'X'),
    (0x1F0A0, 'V'),
    (0x1F0AF, 'X'),
    (0x1F0B1, 'V'),
    (0x1F0BF, 'X'),
    (0x1F0C1, 'V'),
    (0x1F0D0, 'X'),
    (0x1F0D1, 'V'),
    (0x1F0E0, 'X'),
    (0x1F101, '3', u'0,'),
    (0x1F102, '3', u'1,'),
    (0x1F103, '3', u'2,'),
    (0x1F104, '3', u'3,'),
    (0x1F105, '3', u'4,'),
    (0x1F106, '3', u'5,'),
    (0x1F107, '3', u'6,'),
    (0x1F108, '3', u'7,'),
    (0x1F109, '3', u'8,'),
    (0x1F10A, '3', u'9,'),
    (0x1F10B, 'X'),
    (0x1F110, '3', u'(a)'),
    (0x1F111, '3', u'(b)'),
    (0x1F112, '3', u'(c)'),
    (0x1F113, '3', u'(d)'),
    (0x1F114, '3', u'(e)'),
    (0x1F115, '3', u'(f)'),
    (0x1F116, '3', u'(g)'),
    (0x1F117, '3', u'(h)'),
    (0x1F118, '3', u'(i)'),
    (0x1F119, '3', u'(j)'),
    (0x1F11A, '3', u'(k)'),
    (0x1F11B, '3', u'(l)'),
    (0x1F11C, '3', u'(m)'),
    (0x1F11D, '3', u'(n)'),
    (0x1F11E, '3', u'(o)'),
    (0x1F11F, '3', u'(p)'),
    (0x1F120, '3', u'(q)'),
    (0x1F121, '3', u'(r)'),
    (0x1F122, '3', u'(s)'),
    (0x1F123, '3', u'(t)'),
    (0x1F124, '3', u'(u)'),
    (0x1F125, '3', u'(v)'),
    (0x1F126, '3', u'(w)'),
    (0x1F127, '3', u'(x)'),
    (0x1F128, '3', u'(y)'),
    (0x1F129, '3', u'(z)'),
    (0x1F12A, 'M', u'〔s〕'),
    (0x1F12B, 'M', u'c'),
    (0x1F12C, 'M', u'r'),
    (0x1F12D, 'M', u'cd'),
    (0x1F12E, 'M', u'wz'),
    (0x1F12F, 'X'),
    (0x1F130, 'M', u'a'),
    (0x1F131, 'M', u'b'),
    (0x1F132, 'M', u'c'),
    (0x1F133, 'M', u'd'),
    (0x1F134, 'M', u'e'),
    (0x1F135, 'M', u'f'),
    (0x1F136, 'M', u'g'),
    (0x1F137, 'M', u'h'),
    (0x1F138, 'M', u'i'),
    (0x1F139, 'M', u'j'),
    (0x1F13A, 'M', u'k'),
    (0x1F13B, 'M', u'l'),
    (0x1F13C, 'M', u'm'),
    (0x1F13D, 'M', u'n'),
    (0x1F13E, 'M', u'o'),
    (0x1F13F, 'M', u'p'),
    (0x1F140, 'M', u'q'),
    (0x1F141, 'M', u'r'),
    (0x1F142, 'M', u's'),
    ]

def _seg_66():
    return [
    (0x1F143, 'M', u't'),
    (0x1F144, 'M', u'u'),
    (0x1F145, 'M', u'v'),
    (0x1F146, 'M', u'w'),
    (0x1F147, 'M', u'x'),
    (0x1F148, 'M', u'y'),
    (0x1F149, 'M', u'z'),
    (0x1F14A, 'M', u'hv'),
    (0x1F14B, 'M', u'mv'),
    (0x1F14C, 'M', u'sd'),
    (0x1F14D, 'M', u'ss'),
    (0x1F14E, 'M', u'ppv'),
    (0x1F14F, 'M', u'wc'),
    (0x1F150, 'V'),
    (0x1F16A, 'M', u'mc'),
    (0x1F16B, 'M', u'md'),
    (0x1F16C, 'X'),
    (0x1F170, 'V'),
    (0x1F190, 'M', u'dj'),
    (0x1F191, 'V'),
    (0x1F19B, 'X'),
    (0x1F1E6, 'V'),
    (0x1F200, 'M', u'ほか'),
    (0x1F201, 'M', u'ココ'),
    (0x1F202, 'M', u'サ'),
    (0x1F203, 'X'),
    (0x1F210, 'M', u'手'),
    (0x1F211, 'M', u'字'),
    (0x1F212, 'M', u'双'),
    (0x1F213, 'M', u'デ'),
    (0x1F214, 'M', u'二'),
    (0x1F215, 'M', u'多'),
    (0x1F216, 'M', u'解'),
    (0x1F217, 'M', u'天'),
    (0x1F218, 'M', u'交'),
    (0x1F219, 'M', u'映'),
    (0x1F21A, 'M', u'無'),
    (0x1F21B, 'M', u'料'),
    (0x1F21C, 'M', u'前'),
    (0x1F21D, 'M', u'後'),
    (0x1F21E, 'M', u'再'),
    (0x1F21F, 'M', u'新'),
    (0x1F220, 'M', u'初'),
    (0x1F221, 'M', u'終'),
    (0x1F222, 'M', u'生'),
    (0x1F223, 'M', u'販'),
    (0x1F224, 'M', u'声'),
    (0x1F225, 'M', u'吹'),
    (0x1F226, 'M', u'演'),
    (0x1F227, 'M', u'投'),
    (0x1F228, 'M', u'捕'),
    (0x1F229, 'M', u'一'),
    (0x1F22A, 'M', u'三'),
    (0x1F22B, 'M', u'遊'),
    (0x1F22C, 'M', u'左'),
    (0x1F22D, 'M', u'中'),
    (0x1F22E, 'M', u'右'),
    (0x1F22F, 'M', u'指'),
    (0x1F230, 'M', u'走'),
    (0x1F231, 'M', u'打'),
    (0x1F232, 'M', u'禁'),
    (0x1F233, 'M', u'空'),
    (0x1F234, 'M', u'合'),
    (0x1F235, 'M', u'満'),
    (0x1F236, 'M', u'有'),
    (0x1F237, 'M', u'月'),
    (0x1F238, 'M', u'申'),
    (0x1F239, 'M', u'割'),
    (0x1F23A, 'M', u'営'),
    (0x1F23B, 'X'),
    (0x1F240, 'M', u'〔本〕'),
    (0x1F241, 'M', u'〔三〕'),
    (0x1F242, 'M', u'〔二〕'),
    (0x1F243, 'M', u'〔安〕'),
    (0x1F244, 'M', u'〔点〕'),
    (0x1F245, 'M', u'〔打〕'),
    (0x1F246, 'M', u'〔盗〕'),
    (0x1F247, 'M', u'〔勝〕'),
    (0x1F248, 'M', u'〔敗〕'),
    (0x1F249, 'X'),
    (0x1F250, 'M', u'得'),
    (0x1F251, 'M', u'可'),
    (0x1F252, 'X'),
    (0x1F300, 'V'),
    (0x1F321, 'X'),
    (0x1F330, 'V'),
    (0x1F336, 'X'),
    (0x1F337, 'V'),
    (0x1F37D, 'X'),
    (0x1F380, 'V'),
    (0x1F394, 'X'),
    (0x1F3A0, 'V'),
    (0x1F3C5, 'X'),
    (0x1F3C6, 'V'),
    (0x1F3CB, 'X'),
    (0x1F3E0, 'V'),
    (0x1F3F1, 'X'),
    (0x1F400, 'V'),
    (0x1F43F, 'X'),
    (0x1F440, 'V'),
    ]

def _seg_67():
    return [
    (0x1F441, 'X'),
    (0x1F442, 'V'),
    (0x1F4F8, 'X'),
    (0x1F4F9, 'V'),
    (0x1F4FD, 'X'),
    (0x1F500, 'V'),
    (0x1F53E, 'X'),
    (0x1F540, 'V'),
    (0x1F544, 'X'),
    (0x1F550, 'V'),
    (0x1F568, 'X'),
    (0x1F5FB, 'V'),
    (0x1F641, 'X'),
    (0x1F645, 'V'),
    (0x1F650, 'X'),
    (0x1F680, 'V'),
    (0x1F6C6, 'X'),
    (0x1F700, 'V'),
    (0x1F774, 'X'),
    (0x20000, 'V'),
    (0x2A6D7, 'X'),
    (0x2A700, 'V'),
    (0x2B735, 'X'),
    (0x2B740, 'V'),
    (0x2B81E, 'X'),
    (0x2F800, 'M', u'丽'),
    (0x2F801, 'M', u'丸'),
    (0x2F802, 'M', u'乁'),
    (0x2F803, 'M', u'𠄢'),
    (0x2F804, 'M', u'你'),
    (0x2F805, 'M', u'侮'),
    (0x2F806, 'M', u'侻'),
    (0x2F807, 'M', u'倂'),
    (0x2F808, 'M', u'偺'),
    (0x2F809, 'M', u'備'),
    (0x2F80A, 'M', u'僧'),
    (0x2F80B, 'M', u'像'),
    (0x2F80C, 'M', u'㒞'),
    (0x2F80D, 'M', u'𠘺'),
    (0x2F80E, 'M', u'免'),
    (0x2F80F, 'M', u'兔'),
    (0x2F810, 'M', u'兤'),
    (0x2F811, 'M', u'具'),
    (0x2F812, 'M', u'𠔜'),
    (0x2F813, 'M', u'㒹'),
    (0x2F814, 'M', u'內'),
    (0x2F815, 'M', u'再'),
    (0x2F816, 'M', u'𠕋'),
    (0x2F817, 'M', u'冗'),
    (0x2F818, 'M', u'冤'),
    (0x2F819, 'M', u'仌'),
    (0x2F81A, 'M', u'冬'),
    (0x2F81B, 'M', u'况'),
    (0x2F81C, 'M', u'𩇟'),
    (0x2F81D, 'M', u'凵'),
    (0x2F81E, 'M', u'刃'),
    (0x2F81F, 'M', u'㓟'),
    (0x2F820, 'M', u'刻'),
    (0x2F821, 'M', u'剆'),
    (0x2F822, 'M', u'割'),
    (0x2F823, 'M', u'剷'),
    (0x2F824, 'M', u'㔕'),
    (0x2F825, 'M', u'勇'),
    (0x2F826, 'M', u'勉'),
    (0x2F827, 'M', u'勤'),
    (0x2F828, 'M', u'勺'),
    (0x2F829, 'M', u'包'),
    (0x2F82A, 'M', u'匆'),
    (0x2F82B, 'M', u'北'),
    (0x2F82C, 'M', u'卉'),
    (0x2F82D, 'M', u'卑'),
    (0x2F82E, 'M', u'博'),
    (0x2F82F, 'M', u'即'),
    (0x2F830, 'M', u'卽'),
    (0x2F831, 'M', u'卿'),
    (0x2F834, 'M', u'𠨬'),
    (0x2F835, 'M', u'灰'),
    (0x2F836, 'M', u'及'),
    (0x2F837, 'M', u'叟'),
    (0x2F838, 'M', u'𠭣'),
    (0x2F839, 'M', u'叫'),
    (0x2F83A, 'M', u'叱'),
    (0x2F83B, 'M', u'吆'),
    (0x2F83C, 'M', u'咞'),
    (0x2F83D, 'M', u'吸'),
    (0x2F83E, 'M', u'呈'),
    (0x2F83F, 'M', u'周'),
    (0x2F840, 'M', u'咢'),
    (0x2F841, 'M', u'哶'),
    (0x2F842, 'M', u'唐'),
    (0x2F843, 'M', u'啓'),
    (0x2F844, 'M', u'啣'),
    (0x2F845, 'M', u'善'),
    (0x2F847, 'M', u'喙'),
    (0x2F848, 'M', u'喫'),
    (0x2F849, 'M', u'喳'),
    (0x2F84A, 'M', u'嗂'),
    (0x2F84B, 'M', u'圖'),
    (0x2F84C, 'M', u'嘆'),
    (0x2F84D, 'M', u'圗'),
    ]

def _seg_68():
    return [
    (0x2F84E, 'M', u'噑'),
    (0x2F84F, 'M', u'噴'),
    (0x2F850, 'M', u'切'),
    (0x2F851, 'M', u'壮'),
    (0x2F852, 'M', u'城'),
    (0x2F853, 'M', u'埴'),
    (0x2F854, 'M', u'堍'),
    (0x2F855, 'M', u'型'),
    (0x2F856, 'M', u'堲'),
    (0x2F857, 'M', u'報'),
    (0x2F858, 'M', u'墬'),
    (0x2F859, 'M', u'𡓤'),
    (0x2F85A, 'M', u'売'),
    (0x2F85B, 'M', u'壷'),
    (0x2F85C, 'M', u'夆'),
    (0x2F85D, 'M', u'多'),
    (0x2F85E, 'M', u'夢'),
    (0x2F85F, 'M', u'奢'),
    (0x2F860, 'M', u'𡚨'),
    (0x2F861, 'M', u'𡛪'),
    (0x2F862, 'M', u'姬'),
    (0x2F863, 'M', u'娛'),
    (0x2F864, 'M', u'娧'),
    (0x2F865, 'M', u'姘'),
    (0x2F866, 'M', u'婦'),
    (0x2F867, 'M', u'㛮'),
    (0x2F868, 'X'),
    (0x2F869, 'M', u'嬈'),
    (0x2F86A, 'M', u'嬾'),
    (0x2F86C, 'M', u'𡧈'),
    (0x2F86D, 'M', u'寃'),
    (0x2F86E, 'M', u'寘'),
    (0x2F86F, 'M', u'寧'),
    (0x2F870, 'M', u'寳'),
    (0x2F871, 'M', u'𡬘'),
    (0x2F872, 'M', u'寿'),
    (0x2F873, 'M', u'将'),
    (0x2F874, 'X'),
    (0x2F875, 'M', u'尢'),
    (0x2F876, 'M', u'㞁'),
    (0x2F877, 'M', u'屠'),
    (0x2F878, 'M', u'屮'),
    (0x2F879, 'M', u'峀'),
    (0x2F87A, 'M', u'岍'),
    (0x2F87B, 'M', u'𡷤'),
    (0x2F87C, 'M', u'嵃'),
    (0x2F87D, 'M', u'𡷦'),
    (0x2F87E, 'M', u'嵮'),
    (0x2F87F, 'M', u'嵫'),
    (0x2F880, 'M', u'嵼'),
    (0x2F881, 'M', u'巡'),
    (0x2F882, 'M', u'巢'),
    (0x2F883, 'M', u'㠯'),
    (0x2F884, 'M', u'巽'),
    (0x2F885, 'M', u'帨'),
    (0x2F886, 'M', u'帽'),
    (0x2F887, 'M', u'幩'),
    (0x2F888, 'M', u'㡢'),
    (0x2F889, 'M', u'𢆃'),
    (0x2F88A, 'M', u'㡼'),
    (0x2F88B, 'M', u'庰'),
    (0x2F88C, 'M', u'庳'),
    (0x2F88D, 'M', u'庶'),
    (0x2F88E, 'M', u'廊'),
    (0x2F88F, 'M', u'𪎒'),
    (0x2F890, 'M', u'廾'),
    (0x2F891, 'M', u'𢌱'),
    (0x2F893, 'M', u'舁'),
    (0x2F894, 'M', u'弢'),
    (0x2F896, 'M', u'㣇'),
    (0x2F897, 'M', u'𣊸'),
    (0x2F898, 'M', u'𦇚'),
    (0x2F899, 'M', u'形'),
    (0x2F89A, 'M', u'彫'),
    (0x2F89B, 'M', u'㣣'),
    (0x2F89C, 'M', u'徚'),
    (0x2F89D, 'M', u'忍'),
    (0x2F89E, 'M', u'志'),
    (0x2F89F, 'M', u'忹'),
    (0x2F8A0, 'M', u'悁'),
    (0x2F8A1, 'M', u'㤺'),
    (0x2F8A2, 'M', u'㤜'),
    (0x2F8A3, 'M', u'悔'),
    (0x2F8A4, 'M', u'𢛔'),
    (0x2F8A5, 'M', u'惇'),
    (0x2F8A6, 'M', u'慈'),
    (0x2F8A7, 'M', u'慌'),
    (0x2F8A8, 'M', u'慎'),
    (0x2F8A9, 'M', u'慌'),
    (0x2F8AA, 'M', u'慺'),
    (0x2F8AB, 'M', u'憎'),
    (0x2F8AC, 'M', u'憲'),
    (0x2F8AD, 'M', u'憤'),
    (0x2F8AE, 'M', u'憯'),
    (0x2F8AF, 'M', u'懞'),
    (0x2F8B0, 'M', u'懲'),
    (0x2F8B1, 'M', u'懶'),
    (0x2F8B2, 'M', u'成'),
    (0x2F8B3, 'M', u'戛'),
    (0x2F8B4, 'M', u'扝'),
    ]

def _seg_69():
    return [
    (0x2F8B5, 'M', u'抱'),
    (0x2F8B6, 'M', u'拔'),
    (0x2F8B7, 'M', u'捐'),
    (0x2F8B8, 'M', u'𢬌'),
    (0x2F8B9, 'M', u'挽'),
    (0x2F8BA, 'M', u'拼'),
    (0x2F8BB, 'M', u'捨'),
    (0x2F8BC, 'M', u'掃'),
    (0x2F8BD, 'M', u'揤'),
    (0x2F8BE, 'M', u'𢯱'),
    (0x2F8BF, 'M', u'搢'),
    (0x2F8C0, 'M', u'揅'),
    (0x2F8C1, 'M', u'掩'),
    (0x2F8C2, 'M', u'㨮'),
    (0x2F8C3, 'M', u'摩'),
    (0x2F8C4, 'M', u'摾'),
    (0x2F8C5, 'M', u'撝'),
    (0x2F8C6, 'M', u'摷'),
    (0x2F8C7, 'M', u'㩬'),
    (0x2F8C8, 'M', u'敏'),
    (0x2F8C9, 'M', u'敬'),
    (0x2F8CA, 'M', u'𣀊'),
    (0x2F8CB, 'M', u'旣'),
    (0x2F8CC, 'M', u'書'),
    (0x2F8CD, 'M', u'晉'),
    (0x2F8CE, 'M', u'㬙'),
    (0x2F8CF, 'M', u'暑'),
    (0x2F8D0, 'M', u'㬈'),
    (0x2F8D1, 'M', u'㫤'),
    (0x2F8D2, 'M', u'冒'),
    (0x2F8D3, 'M', u'冕'),
    (0x2F8D4, 'M', u'最'),
    (0x2F8D5, 'M', u'暜'),
    (0x2F8D6, 'M', u'肭'),
    (0x2F8D7, 'M', u'䏙'),
    (0x2F8D8, 'M', u'朗'),
    (0x2F8D9, 'M', u'望'),
    (0x2F8DA, 'M', u'朡'),
    (0x2F8DB, 'M', u'杞'),
    (0x2F8DC, 'M', u'杓'),
    (0x2F8DD, 'M', u'𣏃'),
    (0x2F8DE, 'M', u'㭉'),
    (0x2F8DF, 'M', u'柺'),
    (0x2F8E0, 'M', u'枅'),
    (0x2F8E1, 'M', u'桒'),
    (0x2F8E2, 'M', u'梅'),
    (0x2F8E3, 'M', u'𣑭'),
    (0x2F8E4, 'M', u'梎'),
    (0x2F8E5, 'M', u'栟'),
    (0x2F8E6, 'M', u'椔'),
    (0x2F8E7, 'M', u'㮝'),
    (0x2F8E8, 'M', u'楂'),
    (0x2F8E9, 'M', u'榣'),
    (0x2F8EA, 'M', u'槪'),
    (0x2F8EB, 'M', u'檨'),
    (0x2F8EC, 'M', u'𣚣'),
    (0x2F8ED, 'M', u'櫛'),
    (0x2F8EE, 'M', u'㰘'),
    (0x2F8EF, 'M', u'次'),
    (0x2F8F0, 'M', u'𣢧'),
    (0x2F8F1, 'M', u'歔'),
    (0x2F8F2, 'M', u'㱎'),
    (0x2F8F3, 'M', u'歲'),
    (0x2F8F4, 'M', u'殟'),
    (0x2F8F5, 'M', u'殺'),
    (0x2F8F6, 'M', u'殻'),
    (0x2F8F7, 'M', u'𣪍'),
    (0x2F8F8, 'M', u'𡴋'),
    (0x2F8F9, 'M', u'𣫺'),
    (0x2F8FA, 'M', u'汎'),
    (0x2F8FB, 'M', u'𣲼'),
    (0x2F8FC, 'M', u'沿'),
    (0x2F8FD, 'M', u'泍'),
    (0x2F8FE, 'M', u'汧'),
    (0x2F8FF, 'M', u'洖'),
    (0x2F900, 'M', u'派'),
    (0x2F901, 'M', u'海'),
    (0x2F902, 'M', u'流'),
    (0x2F903, 'M', u'浩'),
    (0x2F904, 'M', u'浸'),
    (0x2F905, 'M', u'涅'),
    (0x2F906, 'M', u'𣴞'),
    (0x2F907, 'M', u'洴'),
    (0x2F908, 'M', u'港'),
    (0x2F909, 'M', u'湮'),
    (0x2F90A, 'M', u'㴳'),
    (0x2F90B, 'M', u'滋'),
    (0x2F90C, 'M', u'滇'),
    (0x2F90D, 'M', u'𣻑'),
    (0x2F90E, 'M', u'淹'),
    (0x2F90F, 'M', u'潮'),
    (0x2F910, 'M', u'𣽞'),
    (0x2F911, 'M', u'𣾎'),
    (0x2F912, 'M', u'濆'),
    (0x2F913, 'M', u'瀹'),
    (0x2F914, 'M', u'瀞'),
    (0x2F915, 'M', u'瀛'),
    (0x2F916, 'M', u'㶖'),
    (0x2F917, 'M', u'灊'),
    (0x2F918, 'M', u'災'),
    ]

def _seg_70():
    return [
    (0x2F919, 'M', u'灷'),
    (0x2F91A, 'M', u'炭'),
    (0x2F91B, 'M', u'𠔥'),
    (0x2F91C, 'M', u'煅'),
    (0x2F91D, 'M', u'𤉣'),
    (0x2F91E, 'M', u'熜'),
    (0x2F91F, 'X'),
    (0x2F920, 'M', u'爨'),
    (0x2F921, 'M', u'爵'),
    (0x2F922, 'M', u'牐'),
    (0x2F923, 'M', u'𤘈'),
    (0x2F924, 'M', u'犀'),
    (0x2F925, 'M', u'犕'),
    (0x2F926, 'M', u'𤜵'),
    (0x2F927, 'M', u'𤠔'),
    (0x2F928, 'M', u'獺'),
    (0x2F929, 'M', u'王'),
    (0x2F92A, 'M', u'㺬'),
    (0x2F92B, 'M', u'玥'),
    (0x2F92C, 'M', u'㺸'),
    (0x2F92E, 'M', u'瑇'),
    (0x2F92F, 'M', u'瑜'),
    (0x2F930, 'M', u'瑱'),
    (0x2F931, 'M', u'璅'),
    (0x2F932, 'M', u'瓊'),
    (0x2F933, 'M', u'㼛'),
    (0x2F934, 'M', u'甤'),
    (0x2F935, 'M', u'𤰶'),
    (0x2F936, 'M', u'甾'),
    (0x2F937, 'M', u'𤲒'),
    (0x2F938, 'M', u'異'),
    (0x2F939, 'M', u'𢆟'),
    (0x2F93A, 'M', u'瘐'),
    (0x2F93B, 'M', u'𤾡'),
    (0x2F93C, 'M', u'𤾸'),
    (0x2F93D, 'M', u'𥁄'),
    (0x2F93E, 'M', u'㿼'),
    (0x2F93F, 'M', u'䀈'),
    (0x2F940, 'M', u'直'),
    (0x2F941, 'M', u'𥃳'),
    (0x2F942, 'M', u'𥃲'),
    (0x2F943, 'M', u'𥄙'),
    (0x2F944, 'M', u'𥄳'),
    (0x2F945, 'M', u'眞'),
    (0x2F946, 'M', u'真'),
    (0x2F948, 'M', u'睊'),
    (0x2F949, 'M', u'䀹'),
    (0x2F94A, 'M', u'瞋'),
    (0x2F94B, 'M', u'䁆'),
    (0x2F94C, 'M', u'䂖'),
    (0x2F94D, 'M', u'𥐝'),
    (0x2F94E, 'M', u'硎'),
    (0x2F94F, 'M', u'碌'),
    (0x2F950, 'M', u'磌'),
    (0x2F951, 'M', u'䃣'),
    (0x2F952, 'M', u'𥘦'),
    (0x2F953, 'M', u'祖'),
    (0x2F954, 'M', u'𥚚'),
    (0x2F955, 'M', u'𥛅'),
    (0x2F956, 'M', u'福'),
    (0x2F957, 'M', u'秫'),
    (0x2F958, 'M', u'䄯'),
    (0x2F959, 'M', u'穀'),
    (0x2F95A, 'M', u'穊'),
    (0x2F95B, 'M', u'穏'),
    (0x2F95C, 'M', u'𥥼'),
    (0x2F95D, 'M', u'𥪧'),
    (0x2F95F, 'X'),
    (0x2F960, 'M', u'䈂'),
    (0x2F961, 'M', u'𥮫'),
    (0x2F962, 'M', u'篆'),
    (0x2F963, 'M', u'築'),
    (0x2F964, 'M', u'䈧'),
    (0x2F965, 'M', u'𥲀'),
    (0x2F966, 'M', u'糒'),
    (0x2F967, 'M', u'䊠'),
    (0x2F968, 'M', u'糨'),
    (0x2F969, 'M', u'糣'),
    (0x2F96A, 'M', u'紀'),
    (0x2F96B, 'M', u'𥾆'),
    (0x2F96C, 'M', u'絣'),
    (0x2F96D, 'M', u'䌁'),
    (0x2F96E, 'M', u'緇'),
    (0x2F96F, 'M', u'縂'),
    (0x2F970, 'M', u'繅'),
    (0x2F971, 'M', u'䌴'),
    (0x2F972, 'M', u'𦈨'),
    (0x2F973, 'M', u'𦉇'),
    (0x2F974, 'M', u'䍙'),
    (0x2F975, 'M', u'𦋙'),
    (0x2F976, 'M', u'罺'),
    (0x2F977, 'M', u'𦌾'),
    (0x2F978, 'M', u'羕'),
    (0x2F979, 'M', u'翺'),
    (0x2F97A, 'M', u'者'),
    (0x2F97B, 'M', u'𦓚'),
    (0x2F97C, 'M', u'𦔣'),
    (0x2F97D, 'M', u'聠'),
    (0x2F97E, 'M', u'𦖨'),
    (0x2F97F, 'M', u'聰'),
    ]

def _seg_71():
    return [
    (0x2F980, 'M', u'𣍟'),
    (0x2F981, 'M', u'䏕'),
    (0x2F982, 'M', u'育'),
    (0x2F983, 'M', u'脃'),
    (0x2F984, 'M', u'䐋'),
    (0x2F985, 'M', u'脾'),
    (0x2F986, 'M', u'媵'),
    (0x2F987, 'M', u'𦞧'),
    (0x2F988, 'M', u'𦞵'),
    (0x2F989, 'M', u'𣎓'),
    (0x2F98A, 'M', u'𣎜'),
    (0x2F98B, 'M', u'舁'),
    (0x2F98C, 'M', u'舄'),
    (0x2F98D, 'M', u'辞'),
    (0x2F98E, 'M', u'䑫'),
    (0x2F98F, 'M', u'芑'),
    (0x2F990, 'M', u'芋'),
    (0x2F991, 'M', u'芝'),
    (0x2F992, 'M', u'劳'),
    (0x2F993, 'M', u'花'),
    (0x2F994, 'M', u'芳'),
    (0x2F995, 'M', u'芽'),
    (0x2F996, 'M', u'苦'),
    (0x2F997, 'M', u'𦬼'),
    (0x2F998, 'M', u'若'),
    (0x2F999, 'M', u'茝'),
    (0x2F99A, 'M', u'荣'),
    (0x2F99B, 'M', u'莭'),
    (0x2F99C, 'M', u'茣'),
    (0x2F99D, 'M', u'莽'),
    (0x2F99E, 'M', u'菧'),
    (0x2F99F, 'M', u'著'),
    (0x2F9A0, 'M', u'荓'),
    (0x2F9A1, 'M', u'菊'),
    (0x2F9A2, 'M', u'菌'),
    (0x2F9A3, 'M', u'菜'),
    (0x2F9A4, 'M', u'𦰶'),
    (0x2F9A5, 'M', u'𦵫'),
    (0x2F9A6, 'M', u'𦳕'),
    (0x2F9A7, 'M', u'䔫'),
    (0x2F9A8, 'M', u'蓱'),
    (0x2F9A9, 'M', u'蓳'),
    (0x2F9AA, 'M', u'蔖'),
    (0x2F9AB, 'M', u'𧏊'),
    (0x2F9AC, 'M', u'蕤'),
    (0x2F9AD, 'M', u'𦼬'),
    (0x2F9AE, 'M', u'䕝'),
    (0x2F9AF, 'M', u'䕡'),
    (0x2F9B0, 'M', u'𦾱'),
    (0x2F9B1, 'M', u'𧃒'),
    (0x2F9B2, 'M', u'䕫'),
    (0x2F9B3, 'M', u'虐'),
    (0x2F9B4, 'M', u'虜'),
    (0x2F9B5, 'M', u'虧'),
    (0x2F9B6, 'M', u'虩'),
    (0x2F9B7, 'M', u'蚩'),
    (0x2F9B8, 'M', u'蚈'),
    (0x2F9B9, 'M', u'蜎'),
    (0x2F9BA, 'M', u'蛢'),
    (0x2F9BB, 'M', u'蝹'),
    (0x2F9BC, 'M', u'蜨'),
    (0x2F9BD, 'M', u'蝫'),
    (0x2F9BE, 'M', u'螆'),
    (0x2F9BF, 'X'),
    (0x2F9C0, 'M', u'蟡'),
    (0x2F9C1, 'M', u'蠁'),
    (0x2F9C2, 'M', u'䗹'),
    (0x2F9C3, 'M', u'衠'),
    (0x2F9C4, 'M', u'衣'),
    (0x2F9C5, 'M', u'𧙧'),
    (0x2F9C6, 'M', u'裗'),
    (0x2F9C7, 'M', u'裞'),
    (0x2F9C8, 'M', u'䘵'),
    (0x2F9C9, 'M', u'裺'),
    (0x2F9CA, 'M', u'㒻'),
    (0x2F9CB, 'M', u'𧢮'),
    (0x2F9CC, 'M', u'𧥦'),
    (0x2F9CD, 'M', u'䚾'),
    (0x2F9CE, 'M', u'䛇'),
    (0x2F9CF, 'M', u'誠'),
    (0x2F9D0, 'M', u'諭'),
    (0x2F9D1, 'M', u'變'),
    (0x2F9D2, 'M', u'豕'),
    (0x2F9D3, 'M', u'𧲨'),
    (0x2F9D4, 'M', u'貫'),
    (0x2F9D5, 'M', u'賁'),
    (0x2F9D6, 'M', u'贛'),
    (0x2F9D7, 'M', u'起'),
    (0x2F9D8, 'M', u'𧼯'),
    (0x2F9D9, 'M', u'𠠄'),
    (0x2F9DA, 'M', u'跋'),
    (0x2F9DB, 'M', u'趼'),
    (0x2F9DC, 'M', u'跰'),
    (0x2F9DD, 'M', u'𠣞'),
    (0x2F9DE, 'M', u'軔'),
    (0x2F9DF, 'M', u'輸'),
    (0x2F9E0, 'M', u'𨗒'),
    (0x2F9E1, 'M', u'𨗭'),
    (0x2F9E2, 'M', u'邔'),
    (0x2F9E3, 'M', u'郱'),
    ]

def _seg_72():
    return [
    (0x2F9E4, 'M', u'鄑'),
    (0x2F9E5, 'M', u'𨜮'),
    (0x2F9E6, 'M', u'鄛'),
    (0x2F9E7, 'M', u'鈸'),
    (0x2F9E8, 'M', u'鋗'),
    (0x2F9E9, 'M', u'鋘'),
    (0x2F9EA, 'M', u'鉼'),
    (0x2F9EB, 'M', u'鏹'),
    (0x2F9EC, 'M', u'鐕'),
    (0x2F9ED, 'M', u'𨯺'),
    (0x2F9EE, 'M', u'開'),
    (0x2F9EF, 'M', u'䦕'),
    (0x2F9F0, 'M', u'閷'),
    (0x2F9F1, 'M', u'𨵷'),
    (0x2F9F2, 'M', u'䧦'),
    (0x2F9F3, 'M', u'雃'),
    (0x2F9F4, 'M', u'嶲'),
    (0x2F9F5, 'M', u'霣'),
    (0x2F9F6, 'M', u'𩅅'),
    (0x2F9F7, 'M', u'𩈚'),
    (0x2F9F8, 'M', u'䩮'),
    (0x2F9F9, 'M', u'䩶'),
    (0x2F9FA, 'M', u'韠'),
    (0x2F9FB, 'M', u'𩐊'),
    (0x2F9FC, 'M', u'䪲'),
    (0x2F9FD, 'M', u'𩒖'),
    (0x2F9FE, 'M', u'頋'),
    (0x2FA00, 'M', u'頩'),
    (0x2FA01, 'M', u'𩖶'),
    (0x2FA02, 'M', u'飢'),
    (0x2FA03, 'M', u'䬳'),
    (0x2FA04, 'M', u'餩'),
    (0x2FA05, 'M', u'馧'),
    (0x2FA06, 'M', u'駂'),
    (0x2FA07, 'M', u'駾'),
    (0x2FA08, 'M', u'䯎'),
    (0x2FA09, 'M', u'𩬰'),
    (0x2FA0A, 'M', u'鬒'),
    (0x2FA0B, 'M', u'鱀'),
    (0x2FA0C, 'M', u'鳽'),
    (0x2FA0D, 'M', u'䳎'),
    (0x2FA0E, 'M', u'䳭'),
    (0x2FA0F, 'M', u'鵧'),
    (0x2FA10, 'M', u'𪃎'),
    (0x2FA11, 'M', u'䳸'),
    (0x2FA12, 'M', u'𪄅'),
    (0x2FA13, 'M', u'𪈎'),
    (0x2FA14, 'M', u'𪊑'),
    (0x2FA15, 'M', u'麻'),
    (0x2FA16, 'M', u'䵖'),
    (0x2FA17, 'M', u'黹'),
    (0x2FA18, 'M', u'黾'),
    (0x2FA19, 'M', u'鼅'),
    (0x2FA1A, 'M', u'鼏'),
    (0x2FA1B, 'M', u'鼖'),
    (0x2FA1C, 'M', u'鼻'),
    (0x2FA1D, 'M', u'𪘀'),
    (0x2FA1E, 'X'),
    (0xE0100, 'I'),
    (0xE01F0, 'X'),
    ]

uts46data = tuple(
    _seg_0()
    + _seg_1()
    + _seg_2()
    + _seg_3()
    + _seg_4()
    + _seg_5()
    + _seg_6()
    + _seg_7()
    + _seg_8()
    + _seg_9()
    + _seg_10()
    + _seg_11()
    + _seg_12()
    + _seg_13()
    + _seg_14()
    + _seg_15()
    + _seg_16()
    + _seg_17()
    + _seg_18()
    + _seg_19()
    + _seg_20()
    + _seg_21()
    + _seg_22()
    + _seg_23()
    + _seg_24()
    + _seg_25()
    + _seg_26()
    + _seg_27()
    + _seg_28()
    + _seg_29()
    + _seg_30()
    + _seg_31()
    + _seg_32()
    + _seg_33()
    + _seg_34()
    + _seg_35()
    + _seg_36()
    + _seg_37()
    + _seg_38()
    + _seg_39()
    + _seg_40()
    + _seg_41()
    + _seg_42()
    + _seg_43()
    + _seg_44()
    + _seg_45()
    + _seg_46()
    + _seg_47()
    + _seg_48()
    + _seg_49()
    + _seg_50()
    + _seg_51()
    + _seg_52()
    + _seg_53()
    + _seg_54()
    + _seg_55()
    + _seg_56()
    + _seg_57()
    + _seg_58()
    + _seg_59()
    + _seg_60()
    + _seg_61()
    + _seg_62()
    + _seg_63()
    + _seg_64()
    + _seg_65()
    + _seg_66()
    + _seg_67()
    + _seg_68()
    + _seg_69()
    + _seg_70()
    + _seg_71()
    + _seg_72()
)
_vendor/idna/__init__.py000064400000000072151733136500011227 0ustar00from .package_data import __version__
from .core import *
_vendor/idna/codec.py000064400000006343151733136500010554 0ustar00from .core import encode, decode, alabel, ulabel, IDNAError
import codecs
import re

_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')

class Codec(codecs.Codec):

    def encode(self, data, errors='strict'):

        if errors != 'strict':
            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))

        if not data:
            return "", 0

        return encode(data), len(data)

    def decode(self, data, errors='strict'):

        if errors != 'strict':
            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))

        if not data:
            return u"", 0

        return decode(data), len(data)

class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
    def _buffer_encode(self, data, errors, final):
        if errors != 'strict':
            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))

        if not data:
            return ("", 0)

        labels = _unicode_dots_re.split(data)
        trailing_dot = u''
        if labels:
            if not labels[-1]:
                trailing_dot = '.'
                del labels[-1]
            elif not final:
                # Keep potentially unfinished label until the next call
                del labels[-1]
                if labels:
                    trailing_dot = '.'

        result = []
        size = 0
        for label in labels:
            result.append(alabel(label))
            if size:
                size += 1
            size += len(label)

        # Join with U+002E
        result = ".".join(result) + trailing_dot
        size += len(trailing_dot)
        return (result, size)

class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
    def _buffer_decode(self, data, errors, final):
        if errors != 'strict':
            raise IDNAError("Unsupported error handling \"{0}\"".format(errors))

        if not data:
            return (u"", 0)

        # IDNA allows decoding to operate on Unicode strings, too.
        if isinstance(data, unicode):
            labels = _unicode_dots_re.split(data)
        else:
            # Must be ASCII string
            data = str(data)
            unicode(data, "ascii")
            labels = data.split(".")

        trailing_dot = u''
        if labels:
            if not labels[-1]:
                trailing_dot = u'.'
                del labels[-1]
            elif not final:
                # Keep potentially unfinished label until the next call
                del labels[-1]
                if labels:
                    trailing_dot = u'.'

        result = []
        size = 0
        for label in labels:
            result.append(ulabel(label))
            if size:
                size += 1
            size += len(label)

        result = u".".join(result) + trailing_dot
        size += len(trailing_dot)
        return (result, size)


class StreamWriter(Codec, codecs.StreamWriter):
    pass

class StreamReader(Codec, codecs.StreamReader):
    pass

def getregentry():
    return codecs.CodecInfo(
        name='idna',
        encode=Codec().encode,
        decode=Codec().decode,
        incrementalencoder=IncrementalEncoder,
        incrementaldecoder=IncrementalDecoder,
        streamwriter=StreamWriter,
        streamreader=StreamReader,
    )
_vendor/idna/compat.py000064400000000350151733136500010752 0ustar00from .core import *
from .codec import *

def ToASCII(label):
    return encode(label)

def ToUnicode(label):
    return decode(label)

def nameprep(s):
    raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")

_vendor/idna/core.py000064400000026176151733136500010435 0ustar00from . import idnadata
import bisect
import unicodedata
import re
import sys
from .intranges import intranges_contain

_virama_combining_class = 9
_alabel_prefix = b'xn--'
_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')

if sys.version_info[0] == 3:
    unicode = str
    unichr = chr

class IDNAError(UnicodeError):
    """ Base exception for all IDNA-encoding related problems """
    pass


class IDNABidiError(IDNAError):
    """ Exception when bidirectional requirements are not satisfied """
    pass


class InvalidCodepoint(IDNAError):
    """ Exception when a disallowed or unallocated codepoint is used """
    pass


class InvalidCodepointContext(IDNAError):
    """ Exception when the codepoint is not valid in the context it is used """
    pass


def _combining_class(cp):
    return unicodedata.combining(unichr(cp))

def _is_script(cp, script):
    return intranges_contain(ord(cp), idnadata.scripts[script])

def _punycode(s):
    return s.encode('punycode')

def _unot(s):
    return 'U+{0:04X}'.format(s)


def valid_label_length(label):

    if len(label) > 63:
        return False
    return True


def valid_string_length(label, trailing_dot):

    if len(label) > (254 if trailing_dot else 253):
        return False
    return True


def check_bidi(label, check_ltr=False):

    # Bidi rules should only be applied if string contains RTL characters
    bidi_label = False
    for (idx, cp) in enumerate(label, 1):
        direction = unicodedata.bidirectional(cp)
        if direction == '':
            # String likely comes from a newer version of Unicode
            raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
        if direction in ['R', 'AL', 'AN']:
            bidi_label = True
            break
    if not bidi_label and not check_ltr:
        return True

    # Bidi rule 1
    direction = unicodedata.bidirectional(label[0])
    if direction in ['R', 'AL']:
        rtl = True
    elif direction == 'L':
        rtl = False
    else:
        raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label)))

    valid_ending = False
    number_type = False
    for (idx, cp) in enumerate(label, 1):
        direction = unicodedata.bidirectional(cp)

        if rtl:
            # Bidi rule 2
            if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
                raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx))
            # Bidi rule 3
            if direction in ['R', 'AL', 'EN', 'AN']:
                valid_ending = True
            elif direction != 'NSM':
                valid_ending = False
            # Bidi rule 4
            if direction in ['AN', 'EN']:
                if not number_type:
                    number_type = direction
                else:
                    if number_type != direction:
                        raise IDNABidiError('Can not mix numeral types in a right-to-left label')
        else:
            # Bidi rule 5
            if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
                raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx))
            # Bidi rule 6
            if direction in ['L', 'EN']:
                valid_ending = True
            elif direction != 'NSM':
                valid_ending = False

    if not valid_ending:
        raise IDNABidiError('Label ends with illegal codepoint directionality')

    return True


def check_initial_combiner(label):

    if unicodedata.category(label[0])[0] == 'M':
        raise IDNAError('Label begins with an illegal combining character')
    return True


def check_hyphen_ok(label):

    if label[2:4] == '--':
        raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
    if label[0] == '-' or label[-1] == '-':
        raise IDNAError('Label must not start or end with a hyphen')
    return True


def check_nfc(label):

    if unicodedata.normalize('NFC', label) != label:
        raise IDNAError('Label must be in Normalization Form C')


def valid_contextj(label, pos):

    cp_value = ord(label[pos])

    if cp_value == 0x200c:

        if pos > 0:
            if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
                return True

        ok = False
        for i in range(pos-1, -1, -1):
            joining_type = idnadata.joining_types.get(ord(label[i]))
            if joining_type == ord('T'):
                continue
            if joining_type in [ord('L'), ord('D')]:
                ok = True
                break

        if not ok:
            return False

        ok = False
        for i in range(pos+1, len(label)):
            joining_type = idnadata.joining_types.get(ord(label[i]))
            if joining_type == ord('T'):
                continue
            if joining_type in [ord('R'), ord('D')]:
                ok = True
                break
        return ok

    if cp_value == 0x200d:

        if pos > 0:
            if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
                return True
        return False

    else:

        return False


def valid_contexto(label, pos, exception=False):

    cp_value = ord(label[pos])

    if cp_value == 0x00b7:
        if 0 < pos < len(label)-1:
            if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
                return True
        return False

    elif cp_value == 0x0375:
        if pos < len(label)-1 and len(label) > 1:
            return _is_script(label[pos + 1], 'Greek')
        return False

    elif cp_value == 0x05f3 or cp_value == 0x05f4:
        if pos > 0:
            return _is_script(label[pos - 1], 'Hebrew')
        return False

    elif cp_value == 0x30fb:
        for cp in label:
            if cp == u'\u30fb':
                continue
            if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
                return True
        return False

    elif 0x660 <= cp_value <= 0x669:
        for cp in label:
            if 0x6f0 <= ord(cp) <= 0x06f9:
                return False
        return True

    elif 0x6f0 <= cp_value <= 0x6f9:
        for cp in label:
            if 0x660 <= ord(cp) <= 0x0669:
                return False
        return True


def check_label(label):

    if isinstance(label, (bytes, bytearray)):
        label = label.decode('utf-8')
    if len(label) == 0:
        raise IDNAError('Empty Label')

    check_nfc(label)
    check_hyphen_ok(label)
    check_initial_combiner(label)

    for (pos, cp) in enumerate(label):
        cp_value = ord(cp)
        if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
            continue
        elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
            if not valid_contextj(label, pos):
                raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
        elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
            if not valid_contexto(label, pos):
                raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
        else:
            raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))

    check_bidi(label)


def alabel(label):

    try:
        label = label.encode('ascii')
        try:
            ulabel(label)
        except IDNAError:
            raise IDNAError('The label {0} is not a valid A-label'.format(label))
        if not valid_label_length(label):
            raise IDNAError('Label too long')
        return label
    except UnicodeEncodeError:
        pass

    if not label:
        raise IDNAError('No Input')

    label = unicode(label)
    check_label(label)
    label = _punycode(label)
    label = _alabel_prefix + label

    if not valid_label_length(label):
        raise IDNAError('Label too long')

    return label


def ulabel(label):

    if not isinstance(label, (bytes, bytearray)):
        try:
            label = label.encode('ascii')
        except UnicodeEncodeError:
            check_label(label)
            return label

    label = label.lower()
    if label.startswith(_alabel_prefix):
        label = label[len(_alabel_prefix):]
    else:
        check_label(label)
        return label.decode('ascii')

    label = label.decode('punycode')
    check_label(label)
    return label


def uts46_remap(domain, std3_rules=True, transitional=False):
    """Re-map the characters in the string according to UTS46 processing."""
    from .uts46data import uts46data
    output = u""
    try:
        for pos, char in enumerate(domain):
            code_point = ord(char)
            uts46row = uts46data[code_point if code_point < 256 else
                bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
            status = uts46row[1]
            replacement = uts46row[2] if len(uts46row) == 3 else None
            if (status == "V" or
                    (status == "D" and not transitional) or
                    (status == "3" and std3_rules and replacement is None)):
                output += char
            elif replacement is not None and (status == "M" or
                    (status == "3" and std3_rules) or
                    (status == "D" and transitional)):
                output += replacement
            elif status != "I":
                raise IndexError()
        return unicodedata.normalize("NFC", output)
    except IndexError:
        raise InvalidCodepoint(
            "Codepoint {0} not allowed at position {1} in {2}".format(
            _unot(code_point), pos + 1, repr(domain)))


def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):

    if isinstance(s, (bytes, bytearray)):
        s = s.decode("ascii")
    if uts46:
        s = uts46_remap(s, std3_rules, transitional)
    trailing_dot = False
    result = []
    if strict:
        labels = s.split('.')
    else:
        labels = _unicode_dots_re.split(s)
    while labels and not labels[0]:
        del labels[0]
    if not labels:
        raise IDNAError('Empty domain')
    if labels[-1] == '':
        del labels[-1]
        trailing_dot = True
    for label in labels:
        result.append(alabel(label))
    if trailing_dot:
        result.append(b'')
    s = b'.'.join(result)
    if not valid_string_length(s, trailing_dot):
        raise IDNAError('Domain too long')
    return s


def decode(s, strict=False, uts46=False, std3_rules=False):

    if isinstance(s, (bytes, bytearray)):
        s = s.decode("ascii")
    if uts46:
        s = uts46_remap(s, std3_rules, False)
    trailing_dot = False
    result = []
    if not strict:
        labels = _unicode_dots_re.split(s)
    else:
        labels = s.split(u'.')
    while labels and not labels[0]:
        del labels[0]
    if not labels:
        raise IDNAError('Empty domain')
    if not labels[-1]:
        del labels[-1]
        trailing_dot = True
    for label in labels:
        result.append(ulabel(label))
    if trailing_dot:
        result.append(u'')
    return u'.'.join(result)
_vendor/idna/package_data.py000064400000000025151733136500012052 0ustar00__version__ = '2.6'

_vendor/idna/intranges.py000064400000003325151733136500011466 0ustar00"""
Given a list of integers, made up of (hopefully) a small number of long runs
of consecutive integers, compute a representation of the form
((start1, end1), (start2, end2) ...). Then answer the question "was x present
in the original list?" in time O(log(# runs)).
"""

import bisect

def intranges_from_list(list_):
    """Represent a list of integers as a sequence of ranges:
    ((start_0, end_0), (start_1, end_1), ...), such that the original
    integers are exactly those x such that start_i <= x < end_i for some i.

    Ranges are encoded as single integers (start << 32 | end), not as tuples.
    """

    sorted_list = sorted(list_)
    ranges = []
    last_write = -1
    for i in range(len(sorted_list)):
        if i+1 < len(sorted_list):
            if sorted_list[i] == sorted_list[i+1]-1:
                continue
        current_range = sorted_list[last_write+1:i+1]
        ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
        last_write = i

    return tuple(ranges)

def _encode_range(start, end):
    return (start << 32) | end

def _decode_range(r):
    return (r >> 32), (r & ((1 << 32) - 1))


def intranges_contain(int_, ranges):
    """Determine if `int_` falls into one of the ranges in `ranges`."""
    tuple_ = _encode_range(int_, 0)
    pos = bisect.bisect_left(ranges, tuple_)
    # we could be immediately ahead of a tuple (start, end)
    # with start < int_ <= end
    if pos > 0:
        left, right = _decode_range(ranges[pos-1])
        if left <= int_ < right:
            return True
    # or we could be immediately behind a tuple (int_, end)
    if pos < len(ranges):
        left, _ = _decode_range(ranges[pos])
        if left == int_:
            return True
    return False
_vendor/pkg_resources/__pycache__/__init__.cpython-36.pyc000064400000271767151733136500017500 0ustar003

�Pf>��^@s�dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZyddlZWnek
r�ddlZYnXddlmZddlmZm Z m!Z!ddlm"Z"yddlm#Z#m$Z$m%Z%d	Z&Wnek
�rHd
Z&YnXddlm'Z(ddl)m*Z*m+Z+yddl,j-Z.e.j/Wnek
�r�dZ.YnXdd
lm0Z0ddlm1Z1e2d�e2d�e2d�e2d�d�ej3k�o�d�kn�r�dZ4ej5e4�dZ6dZ7Gdd�de8�Z9Gdd�de:�Z;Gdd�de;e1j<j=�Z>Gdd�de;e1j<j?�Z@dd�ZAiZBdd �ZCd!d"�ZDd#d$�ZEd%d&�ZFd'd(�ZGd)d*�ZHd+d,�ZId-d.�ZJZKd/d0�ZLd1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPddQddRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrdsdtgFZMGdudL�dLeN�ZOGdvdM�dMeO�ZPGdwdx�dxeP�ZQGdydN�dNeO�ZRGdzdO�dOeO�ZSiZTej<dd�ZUdZVd{ZWd|ZXdZYd�ZZd}dp�Z[d~d3�Z\gfdd��Z]d�d��Z^d�d��Z_ej`d��Zaej`d��Zbe_Zcd�dU�Zdd�d2�ZeeeZfd�d4�Zgd�d5�Zhd�d�d6�Zid�d7�ZjGd�dc�dc�ZkGd�dd�ddek�ZlGd�dG�dGe:�ZmGd�d��d�en�ZoGd�dF�dFe:�ZpepZqGd�dP�dPer�ZsGd�dH�dH�Ztd�dE�Zud�dR�Zvd�dS�Zwd�dX�Zxd�dY�Zyd�dZ�Zzd�d�d[�Z{Gd�dj�dj�Z|e[e:e|�Gd�dk�dke|�Z}Gd�dl�dle}�Z~e~j�Gd�dh�dhe|�Z�e��Z�Gd�d��d�en�Z�Gd�d��d�e��Z�Gd�d��d�e	j��Z�Gd�dm�dme}�Z�e[e
j�e��Gd�de�dee��Z�Gd�df�dfe~�Z�Gd�dg�dge��Z�eCd�id��d�dn�Z�d�d�dB�Z�d�d�d��Z�e�e
j�e��d�d�d��Z�e�e:e��d�d��Z�d�d�d��Z�e�ej�e��e�e.d���r"e�e.j�e��eCd�id��eCd�id��d�do�Z�d�d��Z�d�d��Z�d�d?�Z�d�d�dq�Z�d�d��Z�e�ej�e��e�e
j�e��e�e.d���r�e�e.j�e��d�dÄZ�e�e:e��d�d]�Z�ifd�dƄZ�d�dȄZ�d�dʄZ�d�dV�Z�ej`d̃j�Z�ej`d�ej�ej�B�j�Z�Gd�dK�dKe:�Z�d�dЄZ�d�d҄Z�Gd�dI�dIe:�Z�Gd�dՄd�e��Z�Gd�dׄd�e��Z�e�e�e�d؜Z�d�dڄZ�Gd�d܄d�e��Z�d�dQ�Z�Gd�dJ�dJe1j�j��Z�d�d�Z�d�d�Z�d�d\�Z�d�d�Z�d�dW�Z�d�d�Z�ej�d�e9d	d�d�d�Z�e�e��fd�d��Z�e�d�d��Z�dS)�aZ
Package resource API
--------------------

A resource is a logical file contained within a package, or a logical
subdirectory thereof.  The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is.  Do not use os.path operations to manipulate resource
names being passed into the API.

The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files.  It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
�)�absolute_importN)�get_importer)�six)�urllib�map�filter)�utime)�mkdir�rename�unlinkTF)�open)�isdir�split)�appdirs)�	packagingzpip._vendor.packaging.versionz pip._vendor.packaging.specifiersz"pip._vendor.packaging.requirementszpip._vendor.packaging.markers�zLSupport for Python 3.0-3.2 has been dropped. Future versions will fail here.c@seZdZdZdS)�
PEP440Warningza
    Used when there is an issue with a version or specifier not complying with
    PEP 440.
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/__init__.pyr[srcsteZdZ�fdd�Z�fdd�Z�fdd�Z�fdd�Z�fd	d
�Z�fdd�Z�fd
d�Z	dd�Z
dd�Z�ZS)�_SetuptoolsVersionMixincstt|�j�S)N)�superr�__hash__)�self)�	__class__rrrcsz _SetuptoolsVersionMixin.__hash__cs*t|t�rt|�|kStt|�j|�SdS)N)�
isinstance�tuplerr�__lt__)r�other)rrrr fs
z_SetuptoolsVersionMixin.__lt__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__le__)rr!)rrrr"ls
z_SetuptoolsVersionMixin.__le__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__eq__)rr!)rrrr#rs
z_SetuptoolsVersionMixin.__eq__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__ge__)rr!)rrrr$xs
z_SetuptoolsVersionMixin.__ge__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__gt__)rr!)rrrr%~s
z_SetuptoolsVersionMixin.__gt__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__ne__)rr!)rrrr&�s
z_SetuptoolsVersionMixin.__ne__cCst|�|S)N)r)r�keyrrr�__getitem__�sz#_SetuptoolsVersionMixin.__getitem__c#sjtjdtj��dddddd�j���fdd���fdd	�}tjd
tdd�x|t|��D]
}|VqXWdS)
Nz(\d+ | [a-z]+ | \.| -)�czfinal-�@)ZpreZpreview�-ZrcZdevc3s`xT�j|�D]F}�||�}|s|dkr*q|dd�dkrH|jd�Vqd|VqWdVdS)N�.��
0123456789��*z*final)r�zfill)�s�part)�component_re�replacerr�_parse_version_parts�s
z>_SetuptoolsVersionMixin.__iter__.<locals>._parse_version_partscszg}xl�|j��D]\}|jd�rd|dkrFx|rD|ddkrD|j�q*Wx|rb|ddkrb|j�qHW|j|�qWt|�S)Nr0z*finalr-z*final-Z00000000���r7)�lower�
startswith�pop�appendr)r2�partsr3)r6rr�old_parse_version�s
z;_SetuptoolsVersionMixin.__iter__.<locals>.old_parse_versiona�You have iterated over the result of pkg_resources.parse_version. This is a legacy behavior which is inconsistent with the new version class introduced in setuptools 8.0. In most cases, conversion to a tuple is unnecessary. For comparison of versions, sort the Version instances directly. If you have another use case requiring the tuple, please file a bug with the setuptools project describing that need.r-)�
stacklevel)�re�compile�VERBOSE�get�warnings�warn�RuntimeWarning�str)rr=r3r)r6r4r5r�__iter__�s
z _SetuptoolsVersionMixin.__iter__)
rrrrr r"r#r$r%r&r(rG�
__classcell__rr)rrrbsrc@seZdZdS)�SetuptoolsVersionN)rrrrrrrrI�srIc@seZdZdS)�SetuptoolsLegacyVersionN)rrrrrrrrJ�srJcCs*yt|�Stjjk
r$t|�SXdS)N)rIr�version�InvalidVersionrJ)�vrrr�
parse_version�srNcKs"t�j|�tjtj||��dS)N)�globals�update�_state_vars�dict�fromkeys)Zvartype�kwrrr�_declare_state�srUcCs<i}t�}x,tj�D] \}}|d|||�||<qW|S)NZ_sget_)rOrQ�items)�state�g�krMrrr�__getstate__�s
rZcCs<t�}x0|j�D]$\}}|dt|||||�qW|S)NZ_sset_)rOrVrQ)rWrXrYrMrrr�__setstate__�s r[cCs|j�S)N)�copy)�valrrr�
_sget_dict�sr^cCs|j�|j|�dS)N)�clearrP)r'�obrWrrr�
_sset_dict�sracCs|j�S)N)rZ)r]rrr�_sget_object�srbcCs|j|�dS)N)r[)r'r`rWrrr�_sset_object�srccGsdS)Nr)�argsrrr�<lambda>�srecCsbt�}tj|�}|dk	r^tjdkr^y&ddjt�dd��|jd�f}Wntk
r\YnX|S)aZReturn this platform's maximum compatible version.

    distutils.util.get_platform() normally reports the minimum version
    of Mac OS X that would be required to *use* extensions produced by
    distutils.  But what we want when checking compatibility is to know the
    version of Mac OS X that we are *running*.  To allow usage of packages that
    explicitly require a newer version of Mac OS X, we must also know the
    current version of the OS.

    If this condition occurs for any other platform with a version in its
    platform strings, this function should be extended accordingly.
    N�darwinzmacosx-%s-%sr,�r)	�get_build_platform�macosVersionString�match�sys�platform�join�_macosx_vers�group�
ValueError)�plat�mrrr�get_supported_platform�s

&rs�require�
run_script�get_provider�get_distribution�load_entry_point�
get_entry_map�get_entry_info�iter_entry_points�resource_string�resource_stream�resource_filename�resource_listdir�resource_exists�resource_isdir�declare_namespace�working_set�add_activation_listener�find_distributions�set_extraction_path�cleanup_resources�get_default_cache�Environment�
WorkingSet�ResourceManager�Distribution�Requirement�
EntryPoint�ResolutionError�VersionConflict�DistributionNotFound�UnknownExtra�ExtractionError�parse_requirements�	safe_name�safe_version�get_platform�compatible_platforms�yield_lines�split_sections�
safe_extra�to_filename�invalid_marker�evaluate_marker�ensure_directory�normalize_path�EGG_DIST�BINARY_DIST�SOURCE_DIST�
CHECKOUT_DIST�DEVELOP_DIST�IMetadataProvider�IResourceProvider�FileMetadata�PathMetadata�EggMetadata�
EmptyProvider�empty_provider�NullProvider�EggProvider�DefaultProvider�ZipProvider�register_finder�register_namespace_handler�register_loader_type�fixup_namespace_packagesr�run_main�AvailableDistributionsc@seZdZdZdd�ZdS)r�z.Abstract base for dependency resolution errorscCs|jjt|j�S)N)rr�reprrd)rrrr�__repr__IszResolutionError.__repr__N)rrrrr�rrrrr�Fsc@s<eZdZdZdZedd��Zedd��Zdd�Zd	d
�Z	dS)r�z�
    An already-installed version conflicts with the requested version.

    Should be initialized with the installed Distribution and the requested
    Requirement.
    z3{self.dist} is installed but {self.req} is requiredcCs
|jdS)Nr)rd)rrrr�distWszVersionConflict.distcCs
|jdS)Nr-)rd)rrrr�req[szVersionConflict.reqcCs|jjft��S)N)�	_template�format�locals)rrrr�report_szVersionConflict.reportcCs|s|S|j|f}t|�S)zt
        If required_by is non-empty, return a version of self that is a
        ContextualVersionConflict.
        )rd�ContextualVersionConflict)r�required_byrdrrr�with_contextbszVersionConflict.with_contextN)
rrrrr��propertyr�r�r�r�rrrrr�Msc@s&eZdZdZejdZedd��ZdS)r�z�
    A VersionConflict that accepts a third parameter, the set of the
    requirements that required the installed Distribution.
    z by {self.required_by}cCs
|jdS)Nrg)rd)rrrrr�usz%ContextualVersionConflict.required_byN)rrrrr�r�r�r�rrrrr�ms
r�c@sHeZdZdZdZedd��Zedd��Zedd��Zd	d
�Z	dd�Z
d
S)r�z&A requested distribution was not foundzSThe '{self.req}' distribution was not found and is required by {self.requirers_str}cCs
|jdS)Nr)rd)rrrrr��szDistributionNotFound.reqcCs
|jdS)Nr-)rd)rrrr�	requirers�szDistributionNotFound.requirerscCs|js
dSdj|j�S)Nzthe applicationz, )r�rm)rrrr�
requirers_str�sz"DistributionNotFound.requirers_strcCs|jjft��S)N)r�r�r�)rrrrr��szDistributionNotFound.reportcCs|j�S)N)r�)rrrr�__str__�szDistributionNotFound.__str__N)rrrrr�r�r�r�r�r�r�rrrrr�zsc@seZdZdZdS)r�z>Distribution doesn't have an "extra feature" of the given nameN)rrrrrrrrr��srgr-cCs|t|<dS)aRegister `provider_factory` to make providers for `loader_type`

    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
    and `provider_factory` is a function that, passed a *module* object,
    returns an ``IResourceProvider`` for that module.
    N)�_provider_factories)Zloader_typeZprovider_factoryrrrr��scCstt|t�r$tj|�p"tt|��dSytj|}Wn&tk
rXt	|�tj|}YnXt
|dd�}tt|�|�S)z?Return an IResourceProvider for the named module or requirementr�
__loader__N)
rr�r��findrtrFrk�modules�KeyError�
__import__�getattr�
_find_adapterr�)ZmoduleOrReq�module�loaderrrrrv�s
cCsd|s\tj�d}|dkrLd}tjj|�rLttd�rLtj|�}d|krL|d}|j|j	d��|dS)Nr�z0/System/Library/CoreServices/SystemVersion.plist�	readPlistZProductVersionr,)
rlZmac_ver�os�path�exists�hasattr�plistlibr�r;r)�_cacherKZplistZ
plist_contentrrrrn�s

rncCsddd�j||�S)NZppc)ZPowerPCZPower_Macintosh)rB)�machinerrr�_macosx_arch�sr�cCs�yddlm}Wn tk
r0ddlm}YnX|�}tjdkr�|jd�r�y<t�}tj	�dj
dd�}dt|d�t|d	�t|�fSt
k
r�YnX|S)
z�Return this platform's string for platform-specific distributions

    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
    needs some hacks for Linux and Mac OS X.
    r)r�rfzmacosx-�� �_zmacosx-%d.%d-%sr-)�	sysconfigr��ImportErrorZdistutils.utilrkrlr9rnr��unamer5�intr�rp)r�rqrKr�rrrrh�srhzmacosx-(\d+)\.(\d+)-(.*)zdarwin-(\d+)\.(\d+)\.(\d+)-(.*)cCs�|dks|dks||krdStj|�}|r�tj|�}|s�tj|�}|r�t|jd��}d|jd�|jd�f}|dkr||dks�|dkr�|d	kr�dSd
S|jd�|jd�ks�|jd�|jd�kr�d
St|jd��t|jd��kr�d
SdSd
S)z�Can code for the `provided` platform run on the `required` platform?

    Returns true if either platform is ``None``, or the platforms are equal.

    XXX Needs compatibility checks for Linux and other unixy OSes.
    NTr-z%s.%srg�z10.3r/z10.4Fr)rirj�darwinVersionStringr�ro)ZprovidedZrequiredZreqMacZprovMacZ
provDarwinZdversionZmacosversionrrrr��s*


cCs<tjd�j}|d}|j�||d<t|�dj||�dS)z@Locate distribution `dist_spec` and run its `script_name` scriptr-rrN)rk�	_getframe�	f_globalsr_rtru)Z	dist_spec�script_name�ns�namerrrrus
cCs@t|tj�rtj|�}t|t�r(t|�}t|t�s<td|��|S)z@Return a current distribution object for a Requirement or stringz-Expected string, Requirement, or Distribution)rr�string_typesr��parservr��	TypeError)r�rrrrw)s



cCst|�j||�S)zDReturn `name` entry point of `group` for `dist` or raise ImportError)rwrx)r�ror�rrrrx4scCst|�j|�S)z=Return the entry point map for `group`, or the full entry map)rwry)r�rorrrry9scCst|�j||�S)z<Return the EntryPoint object for `group`+`name`, or ``None``)rwrz)r�ror�rrrrz>sc@s<eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
S)r�cCsdS)z;Does the package's distribution contain the named metadata?Nr)r�rrr�has_metadataDszIMetadataProvider.has_metadatacCsdS)z'The named metadata resource as a stringNr)r�rrr�get_metadataGszIMetadataProvider.get_metadatacCsdS)z�Yield named metadata resource as list of non-blank non-comment lines

       Leading and trailing whitespace is stripped from each line, and lines
       with ``#`` as the first non-blank character are omitted.Nr)r�rrr�get_metadata_linesJsz$IMetadataProvider.get_metadata_linescCsdS)z>Is the named metadata a directory?  (like ``os.path.isdir()``)Nr)r�rrr�metadata_isdirPsz IMetadataProvider.metadata_isdircCsdS)z?List of metadata names in the directory (like ``os.listdir()``)Nr)r�rrr�metadata_listdirSsz"IMetadataProvider.metadata_listdircCsdS)z=Execute the named script in the supplied namespace dictionaryNr)r��	namespacerrrruVszIMetadataProvider.run_scriptN)	rrrr�r�r�r�r�rurrrrr�Csc@s@eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dS)r�z3An object that provides access to package resourcescCsdS)zdReturn a true filesystem path for `resource_name`

        `manager` must be an ``IResourceManager``Nr)�manager�
resource_namerrr�get_resource_filename]sz'IResourceProvider.get_resource_filenamecCsdS)ziReturn a readable file-like object for `resource_name`

        `manager` must be an ``IResourceManager``Nr)r�r�rrr�get_resource_streambsz%IResourceProvider.get_resource_streamcCsdS)zmReturn a string containing the contents of `resource_name`

        `manager` must be an ``IResourceManager``Nr)r�r�rrr�get_resource_stringgsz%IResourceProvider.get_resource_stringcCsdS)z,Does the package contain the named resource?Nr)r�rrr�has_resourcelszIResourceProvider.has_resourcecCsdS)z>Is the named resource a directory?  (like ``os.path.isdir()``)Nr)r�rrrr�osz IResourceProvider.resource_isdircCsdS)z?List of resource names in the directory (like ``os.listdir()``)Nr)r�rrrrrsz"IResourceProvider.resource_listdirN)
rrrrr�r�r�r�r�rrrrrr�Zsc@s�eZdZdZd'dd�Zedd��Zedd��Zd	d
�Zdd�Z	d
d�Z
d(dd�Zdd�Zdd�Z
d)dd�Zd*dd�Zd+dd�Zdd�Zd,dd �Zd!d"�Zd#d$�Zd%d&�ZdS)-r�zDA collection of active distributions on sys.path (or a similar list)NcCsBg|_i|_i|_g|_|dkr&tj}x|D]}|j|�q,WdS)z?Create working set from list of path entries (default=sys.path)N)�entries�
entry_keys�by_key�	callbacksrkr��	add_entry)rr��entryrrr�__init__ys
zWorkingSet.__init__cCsZ|�}yddlm}Wntk
r*|SXy|j|�Wntk
rT|j|�SX|S)z1
        Prepare the master working set.
        r)�__requires__)�__main__r�r�rtr��_build_from_requirements)�cls�wsr�rrr�
_build_master�szWorkingSet._build_mastercCsn|g�}t|�}|j|t��}x|D]}|j|�q$Wx"tjD]}||jkr>|j|�q>W|jtjdd�<|S)zQ
        Build a working set from a requirement spec. Rewrites sys.path.
        N)r��resolver��addrkr�r�r�)rZreq_specr�reqs�distsr�r�rrrr�s

z#WorkingSet._build_from_requirementscCs@|jj|g�|jj|�x t|d�D]}|j||d�q&WdS)a�Add a path item to ``.entries``, finding any distributions on it

        ``find_distributions(entry, True)`` is used to find distributions
        corresponding to the path entry, and they are added.  `entry` is
        always appended to ``.entries``, even if it is already present.
        (This is because ``sys.path`` can contain the same value more than
        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
        equal ``sys.path``.)
        TFN)r��
setdefaultr�r;r�r)rr�r�rrrr��s
zWorkingSet.add_entrycCs|jj|j�|kS)z9True if `dist` is the active distribution for its project)r�rBr')rr�rrr�__contains__�szWorkingSet.__contains__cCs,|jj|j�}|dk	r(||kr(t||��|S)a�Find a distribution matching requirement `req`

        If there is an active distribution for the requested project, this
        returns it as long as it meets the version requirement specified by
        `req`.  But, if there is an active distribution for the project and it
        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
        If there is no active distribution for the requested project, ``None``
        is returned.
        N)r�rBr'r�)rr�r�rrrr��s

zWorkingSet.findccsPxJ|D]B}|j|�}|dkr6x*|j�D]
}|Vq&Wq||kr||VqWdS)aYield entry point objects from `group` matching `name`

        If `name` is None, yields all entry points in `group` from all
        distributions in the working set, otherwise only ones matching
        both `group` and `name` are yielded (in distribution order).
        N)ry�values)rror�r�r��eprrrr{�s

zWorkingSet.iter_entry_pointscCs>tjd�j}|d}|j�||d<|j|�dj||�dS)z?Locate distribution for `requires` and run `script_name` scriptr-rrN)rkr�r�r_rtru)r�requiresr�r�r�rrrru�s
zWorkingSet.run_scriptccsTi}xJ|jD]@}||jkrqx.|j|D] }||kr(d||<|j|Vq(WqWdS)z�Yield distributions for non-duplicate projects in the working set

        The yield order is the order in which the items' path entries were
        added to the working set.
        r-N)r�r�r�)r�seen�itemr'rrrrG�s
zWorkingSet.__iter__TFcCs�|r|j|j||d�|dkr$|j}|jj|g�}|jj|jg�}|rX|j|jkrXdS||j|j<|j|krz|j|j�|j|kr�|j|j�|j|�dS)aAdd `dist` to working set, associated with `entry`

        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
        On exit from this routine, `entry` is added to the end of the working
        set's ``.entries`` (if it wasn't already present).

        `dist` is only added to the working set if it's for a project that
        doesn't already have a distribution in the set, unless `replace=True`.
        If it's added, any callbacks registered with the ``subscribe()`` method
        will be called.
        )r5N)	�	insert_onr��locationr�rr'r�r;�
_added_new)rr�r��insertr5�keysZkeys2rrrr�s

zWorkingSet.addcCs|t|�ddd�}i}i}g}t�}tjt�}	�xF|�rv|jd�}
|
|krLq2|j|
�sXq2|j|
j�}|dk�r|j	j|
j�}|dks�||
kr�|r�|}|dkr�|dkr�t
|j�}nt
g�}tg�}|j
|
||�}||
j<|dkr�|	j|
d�}
t|
|
��|j|�||
k�r"|	|
}t||
�j|��|j|
j�ddd�}|j|�x(|D] }|	|j|
j�|
j||<�qHWd||
<q2W|S)aeList all distributions needed to (recursively) meet `requirements`

        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
        if supplied, should be an ``Environment`` instance.  If
        not supplied, it defaults to all distributions available within any
        entry or distribution in the working set.  `installer`, if supplied,
        will be invoked with each requirement that cannot be met by an
        already-installed distribution; it should return a ``Distribution`` or
        ``None``.

        Unless `replace_conflicting=True`, raises a VersionConflict exception if
        any requirements are found on the path that have the correct name but
        the wrong version.  Otherwise, if an `installer` is supplied it will be
        invoked to obtain the correct version of the requirement and activate
        it.
        Nr-rTr7r7)�list�
_ReqExtras�collections�defaultdict�setr:�markers_passrBr'r�r�r�r��
best_matchr�r;r�r�r�extras�extendr�project_name)r�requirements�env�	installerZreplace_conflictingZ	processedZbestZto_activateZ
req_extrasr�r�r�rr�Z
dependent_reqZnew_requirementsZnew_requirementrrrrsJ









zWorkingSet.resolvecCst|�}|j�i}i}|dkr4t|j�}||7}n||}|jg�}	tt|	j|��x�|D]�}
x�||
D]x}|j�g}y|	j|||�}
Wn4t	k
r�}z|||<|r�wjnPWYdd}~XqjXtt|	j|
��|j
tj|
��PqjWq\Wt|�}|j�||fS)asFind all activatable distributions in `plugin_env`

        Example usage::

            distributions, errors = working_set.find_plugins(
                Environment(plugin_dirlist)
            )
            # add plugins+libs to sys.path
            map(working_set.add, distributions)
            # display errors
            print('Could not load', errors)

        The `plugin_env` should be an ``Environment`` instance that contains
        only distributions that are in the project's "plugin directory" or
        directories. The `full_env`, if supplied, should be an ``Environment``
        contains all currently-available distributions.  If `full_env` is not
        supplied, one is created automatically from the ``WorkingSet`` this
        method is called on, which will typically mean that every directory on
        ``sys.path`` will be scanned for distributions.

        `installer` is a standard installer callback as used by the
        ``resolve()`` method. The `fallback` flag indicates whether we should
        attempt to resolve older versions of a plugin if the newest version
        cannot be resolved.

        This method returns a 2-tuple: (`distributions`, `error_info`), where
        `distributions` is a list of the distributions found in `plugin_env`
        that were loadable, along with any other distributions that are needed
        to resolve their dependencies.  `error_info` is a dictionary mapping
        unloadable plugin distributions to an exception instance describing the
        error that occurred. Usually this will be a ``DistributionNotFound`` or
        ``VersionConflict`` instance.
        N)
r�sortr�r�rrr�as_requirementrr�rPrRrS)rZ
plugin_envZfull_envr ZfallbackZplugin_projectsZ
error_infoZ
distributionsrZ
shadow_setrr�r�Z	resolveesrMrrr�find_pluginsks4$





zWorkingSet.find_pluginscGs*|jt|��}x|D]}|j|�qW|S)a�Ensure that distributions matching `requirements` are activated

        `requirements` must be a string or a (possibly-nested) sequence
        thereof, specifying the distributions and versions required.  The
        return value is a sequence of the distributions that needed to be
        activated to fulfill the requirements; all relevant distributions are
        included, even if they were already activated in this working set.
        )rr�r)rrZneededr�rrrrt�s	
zWorkingSet.requirecCs<||jkrdS|jj|�|s"dSx|D]}||�q(WdS)z�Invoke `callback` for all distributions

        If `existing=True` (default),
        call on all existing ones, as well.
        N)r�r;)r�callback�existingr�rrr�	subscribe�s

zWorkingSet.subscribecCsx|jD]}||�qWdS)N)r�)rr�r$rrrr�szWorkingSet._added_newcCs,|jdd�|jj�|jj�|jdd�fS)N)r�r�r\r�r�)rrrrrZ�szWorkingSet.__getstate__cCs@|\}}}}|dd�|_|j�|_|j�|_|dd�|_dS)N)r�r\r�r�r�)rZe_k_b_cr�rr�r�rrrr[�s


zWorkingSet.__setstate__)N)N)NTF)NNF)NNT)T)rrrrr��classmethodrrr�r	r�r{rurGrrr#rtr&rrZr[rrrrr�vs(




Q
S
c@seZdZdZdd�ZdS)rz>
    Map each requirement to the extras that demanded it.
    cs.�fdd�|j�f�dD�}�jp,t|�S)z�
        Evaluate markers for req against each extra that
        demanded it.

        Return False if the req has a marker and fails
        evaluation. Otherwise, return True.
        c3s|]}�jjd|i�VqdS)�extraN)�marker�evaluate)�.0r()r�rr�	<genexpr>�sz*_ReqExtras.markers_pass.<locals>.<genexpr>N)N)rBr)�any)rr�Zextra_evalsr)r�rr�s	
z_ReqExtras.markers_passN)rrrrrrrrrr�src@sxeZdZdZde�efdd�Zdd�Zdd�Zdd	d
�Z	dd�Z
d
d�Zddd�Zddd�Z
dd�Zdd�Zdd�ZdS)r�z5Searchable snapshot of distributions on a search pathNcCs i|_||_||_|j|�dS)a!Snapshot distributions available on a search path

        Any distributions found on `search_path` are added to the environment.
        `search_path` should be a sequence of ``sys.path`` items.  If not
        supplied, ``sys.path`` is used.

        `platform` is an optional string specifying the name of the platform
        that platform-specific distributions must be compatible with.  If
        unspecified, it defaults to the current platform.  `python` is an
        optional string naming the desired version of Python (e.g. ``'3.3'``);
        it defaults to the current version.

        You may explicitly set `platform` (and/or `python`) to ``None`` if you
        wish to map *all* distributions, not just those compatible with the
        running platform or Python version.
        N)�_distmaprl�python�scan)r�search_pathrlr/rrrr�szEnvironment.__init__cCs.|jdks |jdks |j|jko,t|j|j�S)z�Is distribution `dist` acceptable for this environment?

        The distribution must match the platform and python version
        requirements specified when this environment was created, or False
        is returned.
        N)r/�
py_versionr�rl)rr�rrr�can_addszEnvironment.can_addcCs|j|jj|�dS)z"Remove `dist` from the environmentN)r.r'�remove)rr�rrrr4(szEnvironment.removecCs<|dkrtj}x(|D] }xt|�D]}|j|�q"WqWdS)adScan `search_path` for distributions usable in this environment

        Any distributions found are added to the environment.
        `search_path` should be a sequence of ``sys.path`` items.  If not
        supplied, ``sys.path`` is used.  Only distributions conforming to
        the platform/python version defined at initialization are added.
        N)rkr�r�r)rr1rr�rrrr0,s

zEnvironment.scancCs|j�}|jj|g�S)aReturn a newest-to-oldest list of distributions for `project_name`

        Uses case-insensitive `project_name` comparison, assuming all the
        project's distributions use their project's name converted to all
        lowercase as their key.

        )r8r.rB)rrZdistribution_keyrrrr(;szEnvironment.__getitem__cCsL|j|�rH|j�rH|jj|jg�}||krH|j|�|jtjd�dd�dS)zLAdd `dist` if we ``can_add()`` it and it has not already been added
        �hashcmpT)r'�reverseN)	r3�has_versionr.rr'r;r!�operator�
attrgetter)rr�rrrrrFs

zEnvironment.addcCsB|j|�}|dk	r|Sx||jD]}||kr"|Sq"W|j||�S)a�Find distribution best matching `req` and usable on `working_set`

        This calls the ``find(req)`` method of the `working_set` to see if a
        suitable distribution is already active.  (This may raise
        ``VersionConflict`` if an unsuitable version of the project is already
        active in the specified `working_set`.)  If a suitable distribution
        isn't active, this method returns the newest distribution in the
        environment that meets the ``Requirement`` in `req`.  If no suitable
        distribution is found, and `installer` is supplied, then the result of
        calling the environment's ``obtain(req, installer)`` method will be
        returned.
        N)r�r'�obtain)rr�r�r r�rrrrOs
zEnvironment.best_matchcCs|dk	r||�SdS)a�Obtain a distribution matching `requirement` (e.g. via download)

        Obtain a distro that matches requirement (e.g. via download).  In the
        base ``Environment`` class, this routine just returns
        ``installer(requirement)``, unless `installer` is None, in which case
        None is returned instead.  This method is a hook that allows subclasses
        to attempt other ways of obtaining a distribution before falling back
        to the `installer` argument.Nr)rZrequirementr rrrr:es	zEnvironment.obtainccs&x |jj�D]}||r|VqWdS)z=Yield the unique project names of the available distributionsN)r.r)rr'rrrrGqszEnvironment.__iter__cCs^t|t�r|j|�nDt|t�rLx8|D] }x||D]}|j|�q4Wq&Wntd|f��|S)z2In-place addition of a distribution or environmentzCan't add %r to environment)rr�rr�r�)rr!Zprojectr�rrr�__iadd__ws


zEnvironment.__iadd__cCs.|jgddd�}x||fD]}||7}qW|S)z4Add an environment or distribution to an environmentN)rlr/)r)rr!�newrrrr�__add__�szEnvironment.__add__)N)N)N)rrrrrs�PY_MAJORr�r3r4r0r(rrr:rGr;r=rrrrr�s
	

c@seZdZdZdS)r�aTAn error occurred extracting a resource

    The following attributes are available from instances of this exception:

    manager
        The resource manager that raised this exception

    cache_path
        The base directory for resource extraction

    original_error
        The exception instance that caused extraction to fail
    N)rrrrrrrrr��s
c@s�eZdZdZdZdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
d�Z
dd�Zdd�Zffdd�Z
edd��Zdd�Zdd�Zddd�ZdS)r�z'Manage resource extraction and packagesNcCs
i|_dS)N)�cached_files)rrrrr��szResourceManager.__init__cCst|�j|�S)zDoes the named resource exist?)rvr�)r�package_or_requirementr�rrrr��szResourceManager.resource_existscCst|�j|�S)z,Is the named resource an existing directory?)rvr�)rr@r�rrrr��szResourceManager.resource_isdircCst|�j||�S)z4Return a true filesystem path for specified resource)rvr�)rr@r�rrrr~�sz!ResourceManager.resource_filenamecCst|�j||�S)z9Return a readable file-like object for specified resource)rvr�)rr@r�rrrr}�szResourceManager.resource_streamcCst|�j||�S)z%Return specified resource as a string)rvr�)rr@r�rrrr|�szResourceManager.resource_stringcCst|�j|�S)z1List the contents of the named resource directory)rvr)rr@r�rrrr�sz ResourceManager.resource_listdircCsRtj�d}|jpt�}tjd�j�}t|jft	���}||_
||_||_|�dS)z5Give an error message for problems extracting file(s)r-a
            Can't extract file(s) to egg cache

            The following error occurred while trying to extract file(s) to the Python egg
            cache:

              {old_exc}

            The Python egg cache directory is currently set to:

              {cache_path}

            Perhaps your account does not have write access to this directory?  You can
            change the cache directory by setting the PYTHON_EGG_CACHE environment
            variable to point to an accessible directory.
            N)
rk�exc_info�extraction_pathr��textwrap�dedent�lstripr�r�r�r��
cache_pathZoriginal_error)r�old_excrF�tmpl�errrrr�extraction_error�s
z ResourceManager.extraction_errorc	Cs^|jp
t�}tjj||df|��}yt|�Wn|j�YnX|j|�d|j|<|S)a�Return absolute location in cache for `archive_name` and `names`

        The parent directory of the resulting path will be created if it does
        not already exist.  `archive_name` should be the base filename of the
        enclosing egg (which may not be the name of the enclosing zipfile!),
        including its ".egg" extension.  `names`, if provided, should be a
        sequence of path name parts "under" the egg's extraction location.

        This method should only be called by resource providers that need to
        obtain an extraction location, and only for names they intend to
        extract, as it tracks the generated names for possible cleanup later.
        z-tmpr-)	rBr�r�r�rm�_bypass_ensure_directoryrJ�_warn_unsafe_extraction_pathr?)rZarchive_name�namesZextract_pathZtarget_pathrrr�get_cache_path�s


zResourceManager.get_cache_pathcCsXtjdkr |jtjd�r dStj|�j}|tj@s@|tj@rTd|}tj	|t
�dS)aN
        If the default extraction path is overridden and set to an insecure
        location, such as /tmp, it opens up an opportunity for an attacker to
        replace an extracted file with an unauthorized payload. Warn the user
        if a known insecure location is used.

        See Distribute #375 for more details.
        �ntZwindirNz�%s is writable by group/others and vulnerable to attack when used with get_resource_filename. Consider a more secure location (set with .set_extraction_path or the PYTHON_EGG_CACHE environment variable).)r�r�r9�environ�stat�st_mode�S_IWOTH�S_IWGRPrCrD�UserWarning)r��mode�msgrrrrL�s
z,ResourceManager._warn_unsafe_extraction_pathcCs.tjdkr*tj|�jdBd@}tj||�dS)a4Perform any platform-specific postprocessing of `tempname`

        This is where Mac header rewrites should be done; other platforms don't
        have anything special they should do.

        Resource providers should call this method ONLY after successfully
        extracting a compressed resource.  They must NOT call it on resources
        that are already in the filesystem.

        `tempname` is the current (temporary) name of the file, and `filename`
        is the name it will be renamed to by the caller after this routine
        returns.
        �posiximi�N)r�r�rQrR�chmod)rZtempname�filenamerVrrr�postprocesss
zResourceManager.postprocesscCs|jrtd��||_dS)a�Set the base path where resources will be extracted to, if needed.

        If you do not call this routine before any extractions take place, the
        path defaults to the return value of ``get_default_cache()``.  (Which
        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
        platform-specific fallbacks.  See that routine's documentation for more
        details.)

        Resources are extracted to subdirectories of this path based upon
        information given by the ``IResourceProvider``.  You may set this to a
        temporary directory, but then you must call ``cleanup_resources()`` to
        delete the extracted files when done.  There is no guarantee that
        ``cleanup_resources()`` will be able to remove all extracted files.

        (Note: you may not change the extraction path for a given resource
        manager once resources have been extracted, unless you first call
        ``cleanup_resources()``.)
        z5Can't change extraction path, files already extractedN)r?rprB)rr�rrrr�)sz#ResourceManager.set_extraction_pathFcCsdS)aB
        Delete all extracted resource files and directories, returning a list
        of the file and directory names that could not be successfully removed.
        This function does not have any concurrency protection, so it should
        generally only be called when the extraction path is a temporary
        directory exclusive to a single process.  This method is not
        automatically called; you must call it explicitly or register it as an
        ``atexit`` function if you wish to ensure cleanup of a temporary
        directory used for extractions.
        Nr)r�forcerrrr�Csz!ResourceManager.cleanup_resources)F)rrrrrBr�r�r�r~r}r|rrJrN�staticmethodrLr[r�r�rrrrr��scCstjjd�ptjdd�S)z�
    Return the ``PYTHON_EGG_CACHE`` environment variable
    or a platform-relevant user cache dir for an app
    named "Python-Eggs".
    ZPYTHON_EGG_CACHEzPython-Eggs)Zappname)r�rPrBrZuser_cache_dirrrrrr�QscCstjdd|�S)z�Convert an arbitrary string to a standard distribution name

    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
    z[^A-Za-z0-9.]+r+)r?�sub)r�rrrr�]scCsDyttjj|��Stjjk
r>|jdd�}tjdd|�SXdS)zB
    Convert an arbitrary string to a standard version string
    r�r,z[^A-Za-z0-9.]+r+N)rFrrK�VersionrLr5r?r^)rKrrrr�es
cCstjdd|�j�S)z�Convert an arbitrary string to a standard 'extra' name

    Any runs of non-alphanumeric characters are replaced with a single '_',
    and the result is always lowercased.
    z[^A-Za-z0-9.-]+r�)r?r^r8)r(rrrr�qscCs|jdd�S)z|Convert a project or version name to its filename-escaped form

    Any '-' characters are currently replaced with '_'.
    r+r�)r5)r�rrrr�zscCs>yt|�Wn,tk
r8}zd|_d|_|Sd}~XnXdS)zo
    Validate text as a PEP 508 environment marker; return an exception
    if invalid or False otherwise.
    NF)r��SyntaxErrorrZ�lineno)�text�errrr��scCsHytjj|�}|j�Stjjk
rB}zt|��WYdd}~XnXdS)z�
    Evaluate a PEP 508 environment marker.
    Return a boolean indicating the marker result in this environment.
    Raise SyntaxError if marker is invalid.

    This implementation uses the 'pyparsing' module.
    N)rZmarkersZMarkerr*Z
InvalidMarkerr`)rbr(r)rcrrrr��s
c@s�eZdZdZdZdZdZdd�Zdd�Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�ZdS)'r�zETry to implement resources and metadata for arbitrary PEP 302 loadersNcCs(t|dd�|_tjjt|dd��|_dS)Nr��__file__r�)r�r�r�r��dirname�module_path)rr�rrrr��szNullProvider.__init__cCs|j|j|�S)N)�_fnrf)rr�r�rrrr��sz"NullProvider.get_resource_filenamecCstj|j||��S)N)�io�BytesIOr�)rr�r�rrrr��sz NullProvider.get_resource_streamcCs|j|j|j|��S)N)�_getrgrf)rr�r�rrrr��sz NullProvider.get_resource_stringcCs|j|j|j|��S)N)�_hasrgrf)rr�rrrr��szNullProvider.has_resourcecCs|jo|j|j|j|��S)N)�egg_inforkrg)rr�rrrr��szNullProvider.has_metadatacCs2|js
dS|j|j|j|��}tjr.|jd�S|S)Nr�zutf-8)rlrjrgrZPY3�decode)rr��valuerrrr��szNullProvider.get_metadatacCst|j|��S)N)r�r�)rr�rrrr��szNullProvider.get_metadata_linescCs|j|j|j|��S)N)�_isdirrgrf)rr�rrrr��szNullProvider.resource_isdircCs|jo|j|j|j|��S)N)rlrorg)rr�rrrr��szNullProvider.metadata_isdircCs|j|j|j|��S)N)�_listdirrgrf)rr�rrrr�szNullProvider.resource_listdircCs|jr|j|j|j|��SgS)N)rlrprg)rr�rrrr��szNullProvider.metadata_listdirc
Cs�d|}|j|�std|��|j|�jdd�}|jdd�}|j|j|�}||d<tjj|�r�t	|�j
�}t||d�}t|||�n>dd	l
m}t|�d|jd�|f||<t||d�}	t|	||�dS)
Nzscripts/zNo script named %rz
�
�
rd�execr)�cache)r�r�r�r5rgrlr�r�r�r�readr@rs�	linecachert�lenr)
rr�r�ZscriptZscript_textZscript_filename�source�codertZscript_coderrrru�s
zNullProvider.run_scriptcCstd��dS)Nz9Can't perform this operation for unregistered loader type)�NotImplementedError)rr�rrrrk�szNullProvider._hascCstd��dS)Nz9Can't perform this operation for unregistered loader type)rz)rr�rrrro�szNullProvider._isdircCstd��dS)Nz9Can't perform this operation for unregistered loader type)rz)rr�rrrrp�szNullProvider._listdircCs |rtjj|f|jd���S|S)N�/)r�r�rmr)r�baser�rrrrg�szNullProvider._fncCs$t|jd�r|jj|�Std��dS)N�get_dataz=Can't perform this operation for loaders without 'get_data()')r�r�r}rz)rr�rrrrj�szNullProvider._get)rrrr�egg_namerlr�r�r�r�r�r�r�r�r�r�r�rr�rurkrorprgrjrrrrr��s,c@s eZdZdZdd�Zdd�ZdS)r�z&Provider based on a virtual filesystemcCstj||�|j�dS)N)r�r��
_setup_prefix)rr�rrrr�szEggProvider.__init__cCs^|j}d}xN||krXt|�rBtjj|�|_tjj|d�|_||_P|}tjj	|�\}}qWdS)NzEGG-INFO)
rf�_is_unpacked_eggr�r��basenamer~rmrl�egg_rootr)rr��oldr|rrrr
s
zEggProvider._setup_prefixN)rrrrr�rrrrrr�sc@sDeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Ze	dd
��Z
dS)r�z6Provides access to package resources in the filesystemcCstjj|�S)N)r�r�r�)rr�rrrrkszDefaultProvider._hascCstjj|�S)N)r�r�r
)rr�rrrroszDefaultProvider._isdircCs
tj|�S)N)r��listdir)rr�rrrrp"szDefaultProvider._listdircCst|j|j|�d�S)N�rb)rrgrf)rr�r�rrrr�%sz#DefaultProvider.get_resource_streamc	Cst|d��
}|j�SQRXdS)Nr�)rru)rr��streamrrrrj(szDefaultProvider._getcCsttdtd��}t||�dS)N�SourceFileLoader)r��importlib_machinery�typer�)rZ
loader_clsrrr�	_register,s
zDefaultProvider._registerN)rrrrrkrorpr�rjr'r�rrrrr�sc@s8eZdZdZdd�ZZdd�Zdd�ZdZdd�Z	dS)	r�z.Provider that returns nothing for all requestscCsdS)NFr)rr�rrrre9szEmptyProvider.<lambda>cCsdS)Nr�r)rr�rrrre:scCsgS)Nr)rr�rrrre;sNcCsdS)Nr)rrrrr�>szEmptyProvider.__init__)
rrrrrorkrjrprfr�rrrrr�6sc@s eZdZdZedd��ZeZdS)�ZipManifestsz
    zip manifest builder
    c
s2t|�� ��fdd��j�D�}t|�SQRXdS)a
        Build a dictionary similar to the zipimport directory
        caches, except instead of tuples, store ZipInfo objects.

        Use a platform-specific path separator (os.sep) for the path keys
        for compatibility with pypy on Windows.
        c3s&|]}|jdtj��j|�fVqdS)r{N)r5r��sepZgetinfo)r+r�)�zfilerrr,Usz%ZipManifests.build.<locals>.<genexpr>N)�ContextualZipFileZnamelistrR)rr�rVr)r�r�buildJs	

zZipManifests.buildN)rrrrr'r��loadrrrrr�Esr�c@s$eZdZdZejdd�Zdd�ZdS)�MemoizedZipManifestsz%
    Memoized zipfile manifests.
    �manifest_modzmanifest mtimecCsRtjj|�}tj|�j}||ks.||j|krH|j|�}|j||�||<||jS)zW
        Load a manifest at path or return a suitable manifest already loaded.
        )	r�r��normpathrQ�st_mtime�mtimer�r��manifest)rr�r�r�rrrr�fs
zMemoizedZipManifests.loadN)rrrrr�
namedtupler�r�rrrrr�`sr�cs0eZdZdZdd�Zdd�Z�fdd�Z�ZS)r�zL
    Supplement ZipFile class to support context manager for Python 2.6
    cCs|S)Nr)rrrr�	__enter__yszContextualZipFile.__enter__cCs|j�dS)N)�close)rr�rn�	tracebackrrr�__exit__|szContextualZipFile.__exit__cs(ttjd�rtj||�Stt|�j|�S)zI
        Construct a ZipFile or ContextualZipFile as appropriate
        r�)r��zipfile�ZipFilerr��__new__)rrd�kwargs)rrrr�szContextualZipFile.__new__)rrrrr�r�r�rHrr)rrr�tsr�c@s�eZdZdZdZe�Zdd�Zdd�Zdd�Z	e
d	d
��Zdd�Ze
d
d��Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �ZdS)!r�z"Resource support for zips and eggsNcCs tj||�|jjtj|_dS)N)r�r�r��archiver�r��zip_pre)rr�rrrr��szZipProvider.__init__cCs4|j|j�r|t|j�d�Std||jf��dS)Nz%s is not a subpath of %s)r9r�rw�AssertionError)r�fspathrrr�
_zipinfo_name�szZipProvider._zipinfo_namecCsP|j|}|j|jtj�r:|t|j�dd�jtj�Std||jf��dS)Nr-z%s is not a subpath of %s)r�r9r�r�r�rwrr�)r�zip_pathr�rrr�_parts�s

zZipProvider._partscCs|jj|jj�S)N)�_zip_manifestsr�r�r�)rrrr�zipinfo�szZipProvider.zipinfocCs`|jstd��|j|�}|j�}dj|j|��|krTx|D]}|j||j|��q:W|j||�S)Nz5resource_filename() only supported for .egg, not .zipr{)r~rz�_resource_to_zip�_get_eager_resourcesrmr��_extract_resource�
_eager_to_zip)rr�r�r��eagersr�rrrr��s

z!ZipProvider.get_resource_filenamecCs"|j}|jd}tj|�}||fS)Nrr-r7)rrr7)Z	file_size�	date_time�timeZmktime)Zzip_stat�sizer��	timestamprrr�_get_date_and_size�s

zZipProvider._get_date_and_sizec
Csn||j�krDx*|j�|D]}|j|tjj||��}qWtjj|�S|j|j|�\}}tsdt	d��y�|j
|j|j|��}|j
||�r�|Stdtjj|�d�\}}	tj||jj|��tj|�t|	||f�|j|	|�yt|	|�Wn\tjk
�rDtjj|��r>|j
||��r|Stjdk�r>t|�t|	|�|S�YnXWn tjk
�rh|j�YnX|S)Nz>"os.rename" and "os.unlink" are not supported on this platformz	.$extract)�dirrO)�_indexr�r�r�rmrer�r��
WRITE_SUPPORT�IOErrorrNr~r��_is_current�_mkstemp�writer�r}r�rr[r
�error�isfiler�rrJ)
rr�r�r�Zlastr�r�Z	real_pathZoutfZtmpnamrrrr��s@

zZipProvider._extract_resourcec		Csx|j|j|�\}}tjj|�s$dStj|�}|j|ksB|j|krFdS|jj	|�}t
|d��}|j�}WdQRX||kS)zK
        Return True if the file_path is current for this zip_path
        Fr�N)r�r�r�r�r�rQ�st_sizer�r�r}rru)	rZ	file_pathr�r�r�rQZzip_contents�fZ
file_contentsrrrr��s
zZipProvider._is_currentcCsB|jdkr<g}x&dD]}|j|�r|j|j|��qW||_|jS)N�native_libs.txt�eager_resources.txt)r�r�)r�r�rr�)rr�r�rrrr�s


z ZipProvider._get_eager_resourcescCs�y|jStk
r�i}xd|jD]Z}|jtj�}xH|rztjj|dd��}||krj||j|d�Pq4|j�g||<q4Wq"W||_|SXdS)Nr-r7r7)	Z	_dirindex�AttributeErrorr�rr�r�rmr;r:)rZindr�r<�parentrrrr�szZipProvider._indexcCs |j|�}||jkp||j�kS)N)r�r�r�)rr�r�rrrrks
zZipProvider._hascCs|j|�|j�kS)N)r�r�)rr�rrrro!szZipProvider._isdircCst|j�j|j|�f��S)N)rr�rBr�)rr�rrrrp$szZipProvider._listdircCs|j|j|j|��S)N)r�rgr�)rr�rrrr�'szZipProvider._eager_to_zipcCs|j|j|j|��S)N)r�rgrf)rr�rrrr�*szZipProvider._resource_to_zip)rrrrr�r�r�r�r�r�r�r�r�r]r�r�r�r�r�rkrorpr�r�rrrrr��s$	

	4	c@s8eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
r�a*Metadata handler for standalone PKG-INFO files

    Usage::

        metadata = FileMetadata("/path/to/PKG-INFO")

    This provider rejects all data and metadata requests except for PKG-INFO,
    which is treated as existing, and will be the contents of the file at
    the provided location.
    cCs
||_dS)N)r�)rr�rrrr�=szFileMetadata.__init__cCs|dkotjj|j�S)NzPKG-INFO)r�r�r�)rr�rrrr�@szFileMetadata.has_metadatac	CsD|dkrtd��tj|jddd��}|j�}WdQRX|j|�|S)NzPKG-INFOz(No metadata except PKG-INFO is availablezutf-8r5)�encoding�errors)r�rhrr�ru�_warn_on_replacement)rr�r��metadatarrrr�Cs
zFileMetadata.get_metadatacCs2djd�}||kr.d}|jft��}tj|�dS)Ns�zutf-8z2{self.path} could not be properly decoded in UTF-8)rmr�r�rCrD)rr�Zreplacement_charrHrWrrrr�Ls

z!FileMetadata._warn_on_replacementcCst|j|��S)N)r�r�)rr�rrrr�TszFileMetadata.get_metadata_linesN)	rrrrr�r�r�r�r�rrrrr�1s
	c@seZdZdZdd�ZdS)r�asMetadata provider for egg directories

    Usage::

        # Development eggs:

        egg_info = "/path/to/PackageName.egg-info"
        base_dir = os.path.dirname(egg_info)
        metadata = PathMetadata(base_dir, egg_info)
        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)

        # Unpacked egg directories:

        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
        dist = Distribution.from_filename(egg_path, metadata=metadata)
    cCs||_||_dS)N)rfrl)rr�rlrrrr�lszPathMetadata.__init__N)rrrrr�rrrrr�Xsc@seZdZdZdd�ZdS)r�z Metadata provider for .egg filescCsD|jtj|_||_|jr0tjj|j|j�|_n|j|_|j	�dS)z-Create a metadata provider from a zipimporterN)
r�r�r�r�r��prefixr�rmrfr)r�importerrrrr�tszEggMetadata.__init__N)rrrrr�rrrrr�qsrR)�_distribution_finderscCs|t|<dS)axRegister `distribution_finder` to find distributions in sys.path items

    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    handler), and `distribution_finder` is a callable that, passed a path
    item and the importer instance, yields ``Distribution`` instances found on
    that path item.  See ``pkg_resources.find_on_path`` for an example.N)r�)�
importer_typeZdistribution_finderrrrr��scCst|�}tt|�}||||�S)z.Yield distributions accessible via `path_item`)rr�r�)�	path_item�onlyr��finderrrrr��s
ccs�|jjd�rdSt|�}|jd�r2tj||d�V|r:dSxH|jd�D]:}t|�rFtj	j
||�}xttj
|�|�D]
}|VqrWqFWdS)z@
    Find eggs in zip files; possibly multiple nested eggs.
    z.whlNzPKG-INFO)r�r{)r��endswithr�r�r��
from_filenamerr�r�r�rm�find_eggs_in_zip�	zipimport�zipimporter)r�r�r�r�Zsubitem�subpathr�rrrr��s
r�cCsfS)Nr)r�r�r�rrr�find_nothing�sr�cCsdd�}t||dd�S)aL
    Given a list of filenames, return them in descending order
    by version number.

    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
    >>> _by_version_descending(names)
    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
    >>> _by_version_descending(names)
    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
    >>> _by_version_descending(names)
    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
    cSs2tjj|�\}}tj|jd�|g�}dd�|D�S)z6
        Parse each component of the filename
        r+cSsg|]}tjj|��qSr)rrKr�)r+r3rrr�
<listcomp>�sz?_by_version_descending.<locals>._by_version.<locals>.<listcomp>)r�r��splitext�	itertools�chainr)r��extr<rrr�_by_version�sz+_by_version_descending.<locals>._by_versionT)r'r6)�sorted)rMr�rrr�_by_version_descending�sr�ccs�t|�}tjj|�o tj|tj��r�t|�rPtj|t	|tjj
|d��d�V�nTttj|��}�xB|D�]8}|j
�}|jd�s�|jd�r�tjj
||�}tjj|�r�ttj|��dkr�qft	||�}nt|�}tj|||td�Vqf|o�t|��rttjj
||��}x�|D]}	|	V�qWqf|rf|jd�rfttjj
||���}
|
j�}WdQRXxN|D]F}|j��sh�qVtjj
||j��}
t|
�}x|D]}|V�q�WP�qVWqfWdS)	z6Yield distributions accessible on a sys.path directoryzEGG-INFO)r�z	.egg-infoz
.dist-infor)�
precedencez	.egg-linkN)�_normalize_cachedr�r�r
�access�R_OKr�r�r�r�rmr�r�r8r�rwr��
from_locationr�r�r�	readlines�strip�rstrip)r�r�r�Zpath_item_entriesr�r8Zfullpathr�rr�Z
entry_fileZentry_lines�liner�rrrr�find_on_path�sB



r��
FileFinder)�_namespace_handlers)�_namespace_packagescCs|t|<dS)a�Register `namespace_handler` to declare namespace packages

    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    handler), and `namespace_handler` is a callable like this::

        def namespace_handler(importer, path_entry, moduleName, module):
            # return a path_entry to use for child packages

    Namespace handlers are only called if the importer object has already
    agreed that it can handle the relevant path item, and they should only
    return a subpath if the module __path__ does not already contain an
    equivalent subpath.  For an example namespace handler, see
    ``pkg_resources.file_ns_handler``.
    N)r�)r�Znamespace_handlerrrrr�scCs�t|�}|dkrdS|j|�}|dkr*dStjj|�}|dkrbtj|�}tj|<g|_t|�nt	|d�svt
d|��tt|�}|||||�}|dk	r�|j}|j
|�|j|�t|||�|S)zEEnsure that named package includes a subpath of path_item (if needed)N�__path__zNot a package:)r�find_modulerkr�rB�types�
ModuleTyper��_set_parent_nsr�r�r�r�r;�load_module�_rebuild_mod_path)�packageNamer�r�r�r�Zhandlerr�r�rrr�
_handle_nss*






r�csRdd�tjD���fdd����fdd�}|j|d�dd�|D�|jd	d	�<d	S)
zq
    Rebuild module.__path__ ensuring that all entries are ordered
    corresponding to their sys.path order
    cSsg|]}t|��qSr)r�)r+�prrrr�5sz%_rebuild_mod_path.<locals>.<listcomp>cs(y
�j|�Stk
r"td�SXdS)z/
        Workaround for #520 and #513.
        �infN)�indexrp�float)r�)�sys_pathrr�safe_sys_path_index7s
z._rebuild_mod_path.<locals>.safe_sys_path_indexcs<|jtj�}�jd�d}|d|�}�ttjj|���S)zR
        Return the ordinal of the path based on its position in sys.path
        r,r-N)rr�r��countr�rm)r��
path_partsZmodule_partsr<)�package_namer�rr�position_in_sys_path@sz/_rebuild_mod_path.<locals>.position_in_sys_path)r'cSsg|]}t|��qSr)r�)r+r�rrrr�JsN)rkr�r!r�)Z	orig_pathr�r�r�r)r�r�r�rr�0s
		r�cCs�tj�z�|tkrdStjd}}d|kr�dj|jd�dd��}t|�|tkrZt|�ytj	|j
}Wntk
r�td|��YnXtj
|g�j|�tj
|g�x|D]}t||�q�WWdtj�XdS)z9Declare that package 'packageName' is a namespace packageNr,r-zNot a package:r7)�_imp�acquire_lockr�rkr�rmrr�r�r�r�r�r�rr;r��release_lock)r�r�r�r�rrrr�Ms&
c
CsJtj�z2x,tj|f�D]}t||�}|rt||�qWWdtj�XdS)zDEnsure that previously-declared namespace packages include path_itemN)r�r�r�rBr�r�r�)r�r��packager�rrrr�ns
cCsFtjj||jd�d�}t|�}x |jD]}t|�|kr(Pq(W|SdS)zBCompute an ns-package subpath for a filesystem or zipfile importerr,r-Nr7)r�r�rmrr�r�)r�r�r�r�r�Z
normalizedrrrr�file_ns_handlerzsrcCsdS)Nr)r�r�r�r�rrr�null_ns_handler�srcCstjjtjj|��S)z1Normalize a file/dir name for comparison purposes)r�r��normcase�realpath)rZrrrr��scCs2y||Stk
r,t|�||<}|SXdS)N)r�r�)rZr��resultrrrr��s
r�cCs|j�jd�S)z@
    Determine if given path appears to be an unpacked egg.
    z.egg)r8r�)r�rrrr��sr�cCs<|jd�}|j�}|r8dj|�}ttj||tj|�dS)Nr,)rr:rm�setattrrkr�)r�r<r�r�rrrr��s


r�ccsht|tj�r>xV|j�D]"}|j�}|r|jd�r|VqWn&x$|D]}xt|�D]
}|VqRWqDWdS)z9Yield non-empty/non-comment lines of a string or sequence�#N)rrr��
splitlinesr�r9r�)�strsr2Zssrrrr��s
z\w+(\.\w+)*$z�
    (?P<name>[^-]+) (
        -(?P<ver>[^-]+) (
            -py(?P<pyver>[^-]+) (
                -(?P<plat>.+)
            )?
        )?
    )?
    c@s�eZdZdZffdfdd�Zdd�Zdd�Zdd
d�Zdd
�Zddd�Z	e
jd�Ze
ddd��Ze
dd��Ze
ddd��Ze
ddd��ZdS)r�z3Object representing an advertised importable objectNcCsJt|�std|��||_||_t|�|_tjddj|��j	|_	||_
dS)NzInvalid module namezx[%s]�,)�MODULErpr��module_namer�attrsr�r�rmrr�)rr�rrrr�rrrr��s

zEntryPoint.__init__cCsHd|j|jf}|jr*|ddj|j�7}|jrD|ddj|j�7}|S)Nz%s = %s�:r,z [%s]r	)r�rrrmr)rr2rrrr��szEntryPoint.__str__cCsdt|�S)NzEntryPoint.parse(%r))rF)rrrrr��szEntryPoint.__repr__TcOs6|s|s|rtjdtdd�|r.|j||�|j�S)zH
        Require packages for this EntryPoint, then resolve it.
        zJParameters to load are deprecated.  Call .resolve and .require separately.rg)r>)rCrD�DeprecationWarningrtr)rrtrdr�rrrr��szEntryPoint.loadcCsVt|jdgdd�}ytjt|j|�Stk
rP}ztt|���WYdd}~XnXdS)zD
        Resolve the entry point from its module and attrs.
        rr)�fromlist�levelN)	r�r�	functools�reducer�rr�r�rF)rr��excrrrr�s
zEntryPoint.resolvecCsH|jr|jrtd|��|jj|j�}tj|||�}tttj|��dS)Nz&Can't require() without a distribution)	rr�r�rr�rrrr)rrr rrVrrrrt	s

zEntryPoint.requirez]\s*(?P<name>.+?)\s*=\s*(?P<module>[\w.]+)\s*(:\s*(?P<attr>[\w.]+))?\s*(?P<extras>\[.*\])?\s*$cCsf|jj|�}|sd}t||��|j�}|j|d�}|drJ|djd�nf}||d|d|||�S)aParse a single entry point from string `src`

        Entry point syntax follows the form::

            name = some.module:some.attr [extra1, extra2]

        The entry name and module name are required, but the ``:attrs`` and
        ``[extras]`` parts are optional
        z9EntryPoint must be in 'name=module:attrs [extras]' formatr�attrr,r�r�)�patternrjrp�	groupdict�
_parse_extrasr)r�srcr�rrrW�resrrrrrr�	s
zEntryPoint.parsecCs(|sfStjd|�}|jr"t��|jS)N�x)r�r��specsrpr)rZextras_specr�rrrr$	szEntryPoint._parse_extrascCsZt|�std|��i}x>t|�D]2}|j||�}|j|krHtd||j��|||j<q W|S)zParse an entry point groupzInvalid group namezDuplicate entry point)r
rpr�r�r�)rro�linesr��thisr�rrrr�parse_group-	s

zEntryPoint.parse_groupcCsxt|t�r|j�}nt|�}i}xR|D]J\}}|dkrD|s<q&td��|j�}||kr^td|��|j|||�||<q&W|S)z!Parse a map of entry point groupsNz%Entry points must be listed in groupszDuplicate group name)rrRrVr�rpr�r)r�datar��mapsrorrrr�	parse_map:	s


zEntryPoint.parse_map)T)NN)N)N)N)rrrrr�r�r�r�rrtr?r@rr'r�rrr!rrrrr��s 	


	cCs>|sdStjj|�}|djd�r:tjj|dd�d�S|S)Nr�r-zmd5=r7r7)r�)rr�Zurlparser9Z
urlunparse)rZparsedrrr�_remove_md5_fragmentN	sr"cCs@dd�}t||�}tt|�d�}|jd�\}}}t|j��p>dS)z�
    Given an iterable of lines from a Metadata file, return
    the value of the Version field, if present, or None otherwise.
    cSs|j�jd�S)Nzversion:)r8r9)r�rrrre\	sz$_version_from_file.<locals>.<lambda>r�r
N)r�next�iter�	partitionr�r�)rZis_version_lineZ
version_linesr�r�rnrrr�_version_from_fileW	s

r&c@sZeZdZdZdZddddedefdd�ZedGdd��Z	dd	�Z
ed
d��Zdd
�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zedd��Zedd��Zdd�Zed d!��Zed"d#��Zffd$d%�Zd&d'�ZdHd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�ZedId3d4��Z d5d6�Z!d7d8�Z"dJd9d:�Z#d;d<�Z$dKd=d>�Z%d?d@�Z&dAdB�Z'dCdD�Z(edEdF��Z)dS)Lr�z5Wrap an actual or potential sys.path entry w/metadatazPKG-INFONcCsFt|pd�|_|dk	r t|�|_||_||_||_||_|p>t|_	dS)NZUnknown)
r�rr��_versionr2rlrr�r��	_provider)rrr�rrKr2rlr�rrrr�g	s
zDistribution.__init__cKs~dgd\}}}}tjj|�\}}	|	j�tkr^t|	j�}t|�}
|
r^|
jdddd�\}}}}|||f||||d�|��j�S)Nr�r�ZverZpyverrq)rrKr2rl)r�r�r�r8�_distributionImpl�EGG_NAMEro�_reload_version)rrr�r�rTrrKr2rlr�rjrrrr�s	s
zDistribution.from_locationcCs|S)Nr)rrrrr+�	szDistribution._reload_versioncCs(|j|j|jt|j�|jpd|jp$dfS)Nr�)�parsed_versionr�r'r"rr2rl)rrrrr5�	szDistribution.hashcmpcCs
t|j�S)N)�hashr5)rrrrr�	szDistribution.__hash__cCs|j|jkS)N)r5)rr!rrrr �	szDistribution.__lt__cCs|j|jkS)N)r5)rr!rrrr"�	szDistribution.__le__cCs|j|jkS)N)r5)rr!rrrr%�	szDistribution.__gt__cCs|j|jkS)N)r5)rr!rrrr$�	szDistribution.__ge__cCst||j�sdS|j|jkS)NF)rrr5)rr!rrrr#�	szDistribution.__eq__cCs
||kS)Nr)rr!rrrr&�	szDistribution.__ne__cCs0y|jStk
r*|jj�|_}|SXdS)N)Z_keyr�rr8)rr'rrrr'�	s
zDistribution.keycCst|d�st|j�|_|jS)N�_parsed_version)r�rNrKr.)rrrrr,�	s
zDistribution.parsed_versioncCsXtjj}t|j|�}|sdS|js&dStjd�j�jdd�}t	j
|jft|��t
�dS)Na>
            '{project_name} ({version})' is being parsed as a legacy,
            non PEP 440,
            version. You may find odd behavior and sort order.
            In particular it will be sorted as less than 0.0. It
            is recommended to migrate to PEP 440 compatible
            versions.
            rqr�)rrK�
LegacyVersionrr.rCrDr�r5rCrDr��varsr)rZLVZ	is_legacyrHrrr�_warn_legacy_version�	sz!Distribution._warn_legacy_versioncCsLy|jStk
rFt|j|j��}|dkrBd}t||j|��|SXdS)Nz(Missing 'Version:' header and/or %s file)r'r�r&�
_get_metadata�PKG_INFOrp)rrKrHrrrrK�	szDistribution.versioncCs�y|jStk
r�dgi}|_x�dD]x}xrt|j|��D]`\}}|r�d|kr||jdd�\}}t|�rpg}nt|�s|g}t|�p�d}|j|g�j	t
|��q>Wq*W|SXdS)N�requires.txt�depends.txtr
r-)r4r5)Z_Distribution__dep_mapr�r�r2rr�r�r�rrr�)r�dmr�r(rr)rrr�_dep_map�	s 
zDistribution._dep_mapcCsj|j}g}|j|jdf��xH|D]@}y|j|t|��Wq"tk
r`td||f��Yq"Xq"W|S)z@List of Requirements needed for this distro if `extras` are usedNz%s has no such extra feature %r)r7rrBr�r�r�)rrr6Zdepsr�rrrr�	s
zDistribution.requiresccs(|j|�r$x|j|�D]
}|VqWdS)N)r�r�)rr�r�rrrr2
s
zDistribution._get_metadataFcCsZ|dkrtj}|j||d�|tjkrVt|j�x$|jd�D]}|tjkr<t|�q<WdS)z>Ensure distribution is importable on `path` (default=sys.path)N)r5znamespace_packages.txt)rkr�rr�rr2r�r�)rr�r5Zpkgrrr�activate	
s


zDistribution.activatecCs8dt|j�t|j�|jptf}|jr4|d|j7}|S)z@Return what this distribution's standard .egg filename should bez
%s-%s-py%sr+)r�rrKr2r>rl)rrZrrrr~
szDistribution.egg_namecCs |jrd||jfSt|�SdS)Nz%s (%s))rrF)rrrrr�
szDistribution.__repr__cCs@yt|dd�}Wntk
r(d}YnX|p0d}d|j|fS)NrKz[unknown version]z%s %s)r�rpr)rrKrrrr�%
s
zDistribution.__str__cCs|jd�rt|��t|j|�S)zADelegate all unrecognized public attributes to .metadata providerr�)r9r�r�r()rrrrr�__getattr__-
s
zDistribution.__getattr__cKs|jt|�tjj|�|f|�S)N)r�r�r�r�r�)rrZr�rTrrrr�3
szDistribution.from_filenamecCs<t|jtjj�r"d|j|jf}nd|j|jf}tj|�S)z?Return a ``Requirement`` that matches this distribution exactlyz%s==%sz%s===%s)rr,rrKr_rr�r�)r�specrrrr":
szDistribution.as_requirementcCs.|j||�}|dkr&td||ff��|j�S)z=Return the `name` entry point of `group` or raise ImportErrorNzEntry point %r not found)rzr�r�)rror�rrrrrxC
szDistribution.load_entry_pointcCsPy
|j}Wn,tk
r6tj|jd�|�}|_YnX|dk	rL|j|i�S|S)z=Return the entry point map for `group`, or the full entry mapzentry_points.txtN)Z_ep_mapr�r�r!r2rB)rroZep_maprrrryJ
s
zDistribution.get_entry_mapcCs|j|�j|�S)z<Return the EntryPoint object for `group`+`name`, or ``None``)ryrB)rror�rrrrzV
szDistribution.get_entry_infoc
Cs2|p|j}|sdSt|�}tjj|�}dd�|D�}x�t|�D]v\}}||kr\|rVPq�dSq>||kr>|jtkr>|r�|||d�kr�dS|tjkr�|j	�|j
||�|j
||�Pq>W|tjkr�|j	�|r�|j
d|�n
|j|�dSxBy|j||d�}	Wnt
k
�rPYq�X||	=||	=|	}q�WdS)a�Ensure self.location is on path

        If replace=False (default):
            - If location is already in path anywhere, do nothing.
            - Else:
              - If it's an egg and its parent directory is on path,
                insert just ahead of the parent.
              - Else: add to the end of path.
        If replace=True:
            - If location is already on path anywhere (not eggs)
              or higher priority than its parent (eggs)
              do nothing.
            - Else:
              - If it's an egg and its parent directory is on path,
                insert just ahead of the parent,
                removing any lower-priority entries.
              - Else: add it to the front of path.
        NcSsg|]}|rt|�p|�qSr)r�)r+r�rrrr�t
sz*Distribution.insert_on.<locals>.<listcomp>rr-)rr�r�r�re�	enumerater�r�rk�check_version_conflictrr;r�rp)
rr��locr5ZnlocZbdirZnpathr�rZnprrrrZ
sB



zDistribution.insert_oncCs�|jdkrdStj|jd��}t|j�}x~|jd�D]p}|tjks4||ks4|tkrTq4|dkr^q4t	tj|dd�}|r�t|�j
|�s4|j
|j�r�q4td|||jf�q4WdS)	N�
setuptoolsznamespace_packages.txtz
top_level.txt�
pkg_resources�siterdzIModule %s was already imported from %s, but %s is being added to sys.path)r?r>r@)r'rRrSr2r�rrkr�r�r�r9�
issue_warning)rZnspr=�modname�fnrrrr<�
s"

z#Distribution.check_version_conflictcCs4y
|jWn$tk
r.tdt|��dSXdS)NzUnbuilt egg for FT)rKrprAr�)rrrrr7�
s
zDistribution.has_versioncKsDd}x$|j�D]}|j|t||d��qW|jd|j�|jf|�S)z@Copy this distribution, substituting in any changed keyword argsz<project_name version py_version platform location precedenceNr�)rrr�r(r)rrTrMrrrr�clone�
s
zDistribution.clonecCsdd�|jD�S)NcSsg|]}|r|�qSrr)r+Zdeprrrr��
sz'Distribution.extras.<locals>.<listcomp>)r7)rrrrr�
szDistribution.extras)N)NF)N)N)NF)*rrrrr3r>r�r�r'r�r+r�r5rr r"r%r$r#r&r'r,r1rKr7rr2r8r~r�r�r9r�r"rxryrzrr<r7rDrrrrrr�c	sN

	

Cc@seZdZdd�ZdS)�EggInfoDistributioncCst|j|j��}|r||_|S)a�
        Packages installed by distutils (e.g. numpy or scipy),
        which uses an old safe_version, and so
        their version numbers can get mangled when
        converted to filenames (e.g., 1.11.0.dev0+2329eae to
        1.11.0.dev0_2329eae). These distributions will not be
        parsed properly
        downstream by Distribution and safe_version, so
        take an extra step and try to get the version number from
        the metadata file itself instead of the filename.
        )r&r2r3r')rZ
md_versionrrrr+�
sz#EggInfoDistribution._reload_versionN)rrrr+rrrrrE�
srEc@s>eZdZdZdZejd�Zedd��Z	edd��Z
dd	�Zd
S)�DistInfoDistributionzGWrap an actual or potential sys.path entry w/metadata, .dist-info styleZMETADATAz([\(,])\s*(\d.*?)\s*([,\)])cCs@y|jStk
r:|j|j�}tjj�j|�|_|jSXdS)zParse and cache metadataN)Z	_pkg_infor�r�r3�email�parserZParserZparsestr)rr�rrr�_parsed_pkg_info�
sz%DistInfoDistribution._parsed_pkg_infocCs,y|jStk
r&|j�|_|jSXdS)N)�_DistInfoDistribution__dep_mapr��_compute_dependencies)rrrrr7�
s

zDistInfoDistribution._dep_mapcs�dgi}|_g�x&|jjd�p"gD]}�jt|��q$W�fdd�}t|d��}|dj|�x<|jjd�ppgD](}t|j��}tt||��|�||<qrW|S)z+Recompute this distribution's dependencies.Nz
Requires-Distc3s0x*�D]"}|js"|jjd|i�r|VqWdS)Nr()r)r*)r(r�)rrr�reqs_for_extra�
s
zBDistInfoDistribution._compute_dependencies.<locals>.reqs_for_extrazProvides-Extra)	rJrIZget_allrr��	frozensetr�r�r)rr6r�rL�commonr(Zs_extrar)rrrK�
sz*DistInfoDistribution._compute_dependenciesN)rrrrr3r?r@ZEQEQr�rIr7rKrrrrrF�
s

rF)z.eggz	.egg-infoz
.dist-infoc
Os^d}t�}y"xtj|�j|kr(|d7}qWWntk
r@YnXtj|d|di|��dS)Nr-r>)rOrkr�r�rprCrD)rdrTrrXrrrrAsrAc@seZdZdd�ZdS)�RequirementParseErrorcCsdj|j�S)Nr�)rmrd)rrrrr�szRequirementParseError.__str__N)rrrr�rrrrrOsrOccshtt|��}xV|D]N}d|kr0|d|jd��}|jd�rV|dd�j�}|t|�7}t|�VqWdS)z�Yield ``Requirement`` objects for each specification in `strs`

    `strs` must be a string, or a (possibly-nested) iterable thereof.
    z #N�\rg���)r$r�r�r�r�r#r�)rrr�rrrr�#s

csPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Ze	d
d��Z
�ZS)r�cs�ytt|�j|�Wn2tjjk
rF}ztt|���WYdd}~XnX|j|_	t
|j�}||j�|_|_
dd�|jD�|_ttt|j��|_|j
|jt|j�|jr�t|j�ndf|_t|j�|_dS)z>DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!NcSsg|]}|j|jf�qSr)r8rK)r+r:rrrr�Asz(Requirement.__init__.<locals>.<listcomp>)rr�r�rrZInvalidRequirementrOrFr�Zunsafe_namer�r8rr'�	specifierrrrr�rrMr)�hashCmpr-�_Requirement__hash)rZrequirement_stringrcr)rrrr�7s
zRequirement.__init__cCst|t�o|j|jkS)N)rr�rS)rr!rrrr#Ks
zRequirement.__eq__cCs
||kS)Nr)rr!rrrr&QszRequirement.__ne__cCs0t|t�r |j|jkrdS|j}|jj|dd�S)NFT)Zprereleases)rr�r'rKrR�contains)rrrrrr	Ts

zRequirement.__contains__cCs|jS)N)rT)rrrrr`szRequirement.__hash__cCsdt|�S)NzRequirement.parse(%r))rF)rrrrr�cszRequirement.__repr__cCst|�\}|S)N)r�)r2r�rrrr�es
zRequirement.parse)rrrr�r#r&r	rr�r]r�rHrr)rrr�6scCs0t|t�s*Gdd�d|t�}|jdd�S|jS)z&Get an mro for a type or classic classc@seZdZdS)z_get_mro.<locals>.clsN)rrrrrrrrosrr-N)rr��object�__mro__)rrrr�_get_mroks
rXcCs2x,tt|dt|���D]}||kr||SqWdS)z2Return an adapter factory for `ob` from `registry`rN)rXr�r�)�registryr`�trrrr�vsr�cCs&tjj|�}tjj|�s"tj|�dS)z1Ensure that the parent directory of `path` existsN)r�r�rer
�makedirs)r�rerrrr�}scCs@tstd��t|�\}}|r<|r<t|�r<t|�t|d�dS)z/Sandbox-bypassing version of ensure_directory()z*"os.mkdir" not supported on this platform.i�N)r�r�rr
rKr	)r�rerZrrrrK�srKccszd}g}xbt|�D]V}|jd�r^|jd�rR|s2|r<||fV|dd�j�}g}qhtd|��q|j|�qW||fVdS)asSplit a string or iterable thereof into (section, content) pairs

    Each ``section`` is a stripped version of the section header ("[section]")
    and each ``content`` is a list of stripped lines excluding blank lines and
    comment-only lines.  If there are any such lines before the first section
    header, they're returned in a first ``section`` of ``None``.
    N�[�]r-zInvalid section headingr7)r�r9r�r�rpr;)r2ZsectionZcontentr�rrrr��s


cOs&tj}ztt_tj||�S|t_XdS)N)r�r�os_open�tempfileZmkstemp)rdrTZold_openrrrr��s
r��ignore)�categoryr;cOs|||�|S)Nr)r�rdr�rrr�_call_aside�s
rbcCs<t�}||d<x(t|�D]}|jd�st||�||<qWdS)z=Set up global resource manager (deliberately not state-saved)Z_managerr�N)r�r�r9r�)rXr�r�rrr�_initialize�s

rccCs�tj�}td|d�|j}|j}|j}|j}|}d}x|D]}|jdd�q:W~|dd�dd�g|_t	t
|jtj
��t�jt��dS)	aE
    Prepare the master working set and make the ``require()``
    API available.

    This function has explicit effects on the global state
    of pkg_resources. It is intended to be invoked once at
    the initialization of this module.

    Invocation by other packages is unsupported and done
    at their own risk.
    rV)r�NF)r5cSs|jdd�S)NT)r5)r8)r�rrrre�sz0_initialize_master_working_set.<locals>.<lambda>)r%)r�rrUrtr{r&rur8r�rrr�rkr�rOrPr�)r�rtr{r�rur�r�rrr�_initialize_master_working_set�s

rd)rr)rrr7)N)N)F)F)F)F)N)�rZ
__future__rrkr�rhr�r?r�r�r�rCrQrZpkgutilr8rlrr�Zemail.parserrGr_rCr�rr�r�ZimpZpip._vendorrZpip._vendor.six.movesrrrrr	r
rr�rr^Zos.pathr
rZimportlib.machinery�	machineryr�rrrr��version_inforWrDrtr�rErrVrrKr_rIr/rJrNrQrUrZr[r^rarbrcZ
_sget_noneZ
_sset_noners�__all__�	Exceptionr�r�r�r�r�r�r>r�r�r�r�r�r�rvrnr�rhr@rir�r�r�rur�rwrxryrzr�r�r�rRrr�r��RuntimeErrorr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ZImpImporterr�r�r�r�r�r�r�rrr�r�r�r�r�rjr
rA�
IGNORECASEr*r�r"r&r�rErFr)rArprOr�rr�rXr�r�rKr�r��filterwarningsrbrOrcrdrrrr�<module>s�




b

 




.

{
3	
a
''





.!


		~	g0
5
	_vendor/pkg_resources/__pycache__/__init__.cpython-36.opt-1.pyc000064400000271767151733136500020437 0ustar003

�Pf>��^@s�dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZyddlZWnek
r�ddlZYnXddlmZddlmZm Z m!Z!ddlm"Z"yddlm#Z#m$Z$m%Z%d	Z&Wnek
�rHd
Z&YnXddlm'Z(ddl)m*Z*m+Z+yddl,j-Z.e.j/Wnek
�r�dZ.YnXdd
lm0Z0ddlm1Z1e2d�e2d�e2d�e2d�d�ej3k�o�d�kn�r�dZ4ej5e4�dZ6dZ7Gdd�de8�Z9Gdd�de:�Z;Gdd�de;e1j<j=�Z>Gdd�de;e1j<j?�Z@dd�ZAiZBdd �ZCd!d"�ZDd#d$�ZEd%d&�ZFd'd(�ZGd)d*�ZHd+d,�ZId-d.�ZJZKd/d0�ZLd1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPddQddRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrdsdtgFZMGdudL�dLeN�ZOGdvdM�dMeO�ZPGdwdx�dxeP�ZQGdydN�dNeO�ZRGdzdO�dOeO�ZSiZTej<dd�ZUdZVd{ZWd|ZXdZYd�ZZd}dp�Z[d~d3�Z\gfdd��Z]d�d��Z^d�d��Z_ej`d��Zaej`d��Zbe_Zcd�dU�Zdd�d2�ZeeeZfd�d4�Zgd�d5�Zhd�d�d6�Zid�d7�ZjGd�dc�dc�ZkGd�dd�ddek�ZlGd�dG�dGe:�ZmGd�d��d�en�ZoGd�dF�dFe:�ZpepZqGd�dP�dPer�ZsGd�dH�dH�Ztd�dE�Zud�dR�Zvd�dS�Zwd�dX�Zxd�dY�Zyd�dZ�Zzd�d�d[�Z{Gd�dj�dj�Z|e[e:e|�Gd�dk�dke|�Z}Gd�dl�dle}�Z~e~j�Gd�dh�dhe|�Z�e��Z�Gd�d��d�en�Z�Gd�d��d�e��Z�Gd�d��d�e	j��Z�Gd�dm�dme}�Z�e[e
j�e��Gd�de�dee��Z�Gd�df�dfe~�Z�Gd�dg�dge��Z�eCd�id��d�dn�Z�d�d�dB�Z�d�d�d��Z�e�e
j�e��d�d�d��Z�e�e:e��d�d��Z�d�d�d��Z�e�ej�e��e�e.d���r"e�e.j�e��eCd�id��eCd�id��d�do�Z�d�d��Z�d�d��Z�d�d?�Z�d�d�dq�Z�d�d��Z�e�ej�e��e�e
j�e��e�e.d���r�e�e.j�e��d�dÄZ�e�e:e��d�d]�Z�ifd�dƄZ�d�dȄZ�d�dʄZ�d�dV�Z�ej`d̃j�Z�ej`d�ej�ej�B�j�Z�Gd�dK�dKe:�Z�d�dЄZ�d�d҄Z�Gd�dI�dIe:�Z�Gd�dՄd�e��Z�Gd�dׄd�e��Z�e�e�e�d؜Z�d�dڄZ�Gd�d܄d�e��Z�d�dQ�Z�Gd�dJ�dJe1j�j��Z�d�d�Z�d�d�Z�d�d\�Z�d�d�Z�d�dW�Z�d�d�Z�ej�d�e9d	d�d�d�Z�e�e��fd�d��Z�e�d�d��Z�dS)�aZ
Package resource API
--------------------

A resource is a logical file contained within a package, or a logical
subdirectory thereof.  The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is.  Do not use os.path operations to manipulate resource
names being passed into the API.

The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files.  It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
�)�absolute_importN)�get_importer)�six)�urllib�map�filter)�utime)�mkdir�rename�unlinkTF)�open)�isdir�split)�appdirs)�	packagingzpip._vendor.packaging.versionz pip._vendor.packaging.specifiersz"pip._vendor.packaging.requirementszpip._vendor.packaging.markers�zLSupport for Python 3.0-3.2 has been dropped. Future versions will fail here.c@seZdZdZdS)�
PEP440Warningza
    Used when there is an issue with a version or specifier not complying with
    PEP 440.
    N)�__name__�
__module__�__qualname__�__doc__�rr�/usr/lib/python3.6/__init__.pyr[srcsteZdZ�fdd�Z�fdd�Z�fdd�Z�fdd�Z�fd	d
�Z�fdd�Z�fd
d�Z	dd�Z
dd�Z�ZS)�_SetuptoolsVersionMixincstt|�j�S)N)�superr�__hash__)�self)�	__class__rrrcsz _SetuptoolsVersionMixin.__hash__cs*t|t�rt|�|kStt|�j|�SdS)N)�
isinstance�tuplerr�__lt__)r�other)rrrr fs
z_SetuptoolsVersionMixin.__lt__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__le__)rr!)rrrr"ls
z_SetuptoolsVersionMixin.__le__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__eq__)rr!)rrrr#rs
z_SetuptoolsVersionMixin.__eq__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__ge__)rr!)rrrr$xs
z_SetuptoolsVersionMixin.__ge__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__gt__)rr!)rrrr%~s
z_SetuptoolsVersionMixin.__gt__cs*t|t�rt|�|kStt|�j|�SdS)N)rrrr�__ne__)rr!)rrrr&�s
z_SetuptoolsVersionMixin.__ne__cCst|�|S)N)r)r�keyrrr�__getitem__�sz#_SetuptoolsVersionMixin.__getitem__c#sjtjdtj��dddddd�j���fdd���fdd	�}tjd
tdd�x|t|��D]
}|VqXWdS)
Nz(\d+ | [a-z]+ | \.| -)�czfinal-�@)ZpreZpreview�-ZrcZdevc3s`xT�j|�D]F}�||�}|s|dkr*q|dd�dkrH|jd�Vqd|VqWdVdS)N�.��
0123456789��*z*final)r�zfill)�s�part)�component_re�replacerr�_parse_version_parts�s
z>_SetuptoolsVersionMixin.__iter__.<locals>._parse_version_partscszg}xl�|j��D]\}|jd�rd|dkrFx|rD|ddkrD|j�q*Wx|rb|ddkrb|j�qHW|j|�qWt|�S)Nr0z*finalr-z*final-Z00000000���r7)�lower�
startswith�pop�appendr)r2�partsr3)r6rr�old_parse_version�s
z;_SetuptoolsVersionMixin.__iter__.<locals>.old_parse_versiona�You have iterated over the result of pkg_resources.parse_version. This is a legacy behavior which is inconsistent with the new version class introduced in setuptools 8.0. In most cases, conversion to a tuple is unnecessary. For comparison of versions, sort the Version instances directly. If you have another use case requiring the tuple, please file a bug with the setuptools project describing that need.r-)�
stacklevel)�re�compile�VERBOSE�get�warnings�warn�RuntimeWarning�str)rr=r3r)r6r4r5r�__iter__�s
z _SetuptoolsVersionMixin.__iter__)
rrrrr r"r#r$r%r&r(rG�
__classcell__rr)rrrbsrc@seZdZdS)�SetuptoolsVersionN)rrrrrrrrI�srIc@seZdZdS)�SetuptoolsLegacyVersionN)rrrrrrrrJ�srJcCs*yt|�Stjjk
r$t|�SXdS)N)rIr�version�InvalidVersionrJ)�vrrr�
parse_version�srNcKs"t�j|�tjtj||��dS)N)�globals�update�_state_vars�dict�fromkeys)Zvartype�kwrrr�_declare_state�srUcCs<i}t�}x,tj�D] \}}|d|||�||<qW|S)NZ_sget_)rOrQ�items)�state�g�krMrrr�__getstate__�s
rZcCs<t�}x0|j�D]$\}}|dt|||||�qW|S)NZ_sset_)rOrVrQ)rWrXrYrMrrr�__setstate__�s r[cCs|j�S)N)�copy)�valrrr�
_sget_dict�sr^cCs|j�|j|�dS)N)�clearrP)r'�obrWrrr�
_sset_dict�sracCs|j�S)N)rZ)r]rrr�_sget_object�srbcCs|j|�dS)N)r[)r'r`rWrrr�_sset_object�srccGsdS)Nr)�argsrrr�<lambda>�srecCsbt�}tj|�}|dk	r^tjdkr^y&ddjt�dd��|jd�f}Wntk
r\YnX|S)aZReturn this platform's maximum compatible version.

    distutils.util.get_platform() normally reports the minimum version
    of Mac OS X that would be required to *use* extensions produced by
    distutils.  But what we want when checking compatibility is to know the
    version of Mac OS X that we are *running*.  To allow usage of packages that
    explicitly require a newer version of Mac OS X, we must also know the
    current version of the OS.

    If this condition occurs for any other platform with a version in its
    platform strings, this function should be extended accordingly.
    N�darwinzmacosx-%s-%sr,�r)	�get_build_platform�macosVersionString�match�sys�platform�join�_macosx_vers�group�
ValueError)�plat�mrrr�get_supported_platform�s

&rs�require�
run_script�get_provider�get_distribution�load_entry_point�
get_entry_map�get_entry_info�iter_entry_points�resource_string�resource_stream�resource_filename�resource_listdir�resource_exists�resource_isdir�declare_namespace�working_set�add_activation_listener�find_distributions�set_extraction_path�cleanup_resources�get_default_cache�Environment�
WorkingSet�ResourceManager�Distribution�Requirement�
EntryPoint�ResolutionError�VersionConflict�DistributionNotFound�UnknownExtra�ExtractionError�parse_requirements�	safe_name�safe_version�get_platform�compatible_platforms�yield_lines�split_sections�
safe_extra�to_filename�invalid_marker�evaluate_marker�ensure_directory�normalize_path�EGG_DIST�BINARY_DIST�SOURCE_DIST�
CHECKOUT_DIST�DEVELOP_DIST�IMetadataProvider�IResourceProvider�FileMetadata�PathMetadata�EggMetadata�
EmptyProvider�empty_provider�NullProvider�EggProvider�DefaultProvider�ZipProvider�register_finder�register_namespace_handler�register_loader_type�fixup_namespace_packagesr�run_main�AvailableDistributionsc@seZdZdZdd�ZdS)r�z.Abstract base for dependency resolution errorscCs|jjt|j�S)N)rr�reprrd)rrrr�__repr__IszResolutionError.__repr__N)rrrrr�rrrrr�Fsc@s<eZdZdZdZedd��Zedd��Zdd�Zd	d
�Z	dS)r�z�
    An already-installed version conflicts with the requested version.

    Should be initialized with the installed Distribution and the requested
    Requirement.
    z3{self.dist} is installed but {self.req} is requiredcCs
|jdS)Nr)rd)rrrr�distWszVersionConflict.distcCs
|jdS)Nr-)rd)rrrr�req[szVersionConflict.reqcCs|jjft��S)N)�	_template�format�locals)rrrr�report_szVersionConflict.reportcCs|s|S|j|f}t|�S)zt
        If required_by is non-empty, return a version of self that is a
        ContextualVersionConflict.
        )rd�ContextualVersionConflict)r�required_byrdrrr�with_contextbszVersionConflict.with_contextN)
rrrrr��propertyr�r�r�r�rrrrr�Msc@s&eZdZdZejdZedd��ZdS)r�z�
    A VersionConflict that accepts a third parameter, the set of the
    requirements that required the installed Distribution.
    z by {self.required_by}cCs
|jdS)Nrg)rd)rrrrr�usz%ContextualVersionConflict.required_byN)rrrrr�r�r�r�rrrrr�ms
r�c@sHeZdZdZdZedd��Zedd��Zedd��Zd	d
�Z	dd�Z
d
S)r�z&A requested distribution was not foundzSThe '{self.req}' distribution was not found and is required by {self.requirers_str}cCs
|jdS)Nr)rd)rrrrr��szDistributionNotFound.reqcCs
|jdS)Nr-)rd)rrrr�	requirers�szDistributionNotFound.requirerscCs|js
dSdj|j�S)Nzthe applicationz, )r�rm)rrrr�
requirers_str�sz"DistributionNotFound.requirers_strcCs|jjft��S)N)r�r�r�)rrrrr��szDistributionNotFound.reportcCs|j�S)N)r�)rrrr�__str__�szDistributionNotFound.__str__N)rrrrr�r�r�r�r�r�r�rrrrr�zsc@seZdZdZdS)r�z>Distribution doesn't have an "extra feature" of the given nameN)rrrrrrrrr��srgr-cCs|t|<dS)aRegister `provider_factory` to make providers for `loader_type`

    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
    and `provider_factory` is a function that, passed a *module* object,
    returns an ``IResourceProvider`` for that module.
    N)�_provider_factories)Zloader_typeZprovider_factoryrrrr��scCstt|t�r$tj|�p"tt|��dSytj|}Wn&tk
rXt	|�tj|}YnXt
|dd�}tt|�|�S)z?Return an IResourceProvider for the named module or requirementr�
__loader__N)
rr�r��findrtrFrk�modules�KeyError�
__import__�getattr�
_find_adapterr�)ZmoduleOrReq�module�loaderrrrrv�s
cCsd|s\tj�d}|dkrLd}tjj|�rLttd�rLtj|�}d|krL|d}|j|j	d��|dS)Nr�z0/System/Library/CoreServices/SystemVersion.plist�	readPlistZProductVersionr,)
rlZmac_ver�os�path�exists�hasattr�plistlibr�r;r)�_cacherKZplistZ
plist_contentrrrrn�s

rncCsddd�j||�S)NZppc)ZPowerPCZPower_Macintosh)rB)�machinerrr�_macosx_arch�sr�cCs�yddlm}Wn tk
r0ddlm}YnX|�}tjdkr�|jd�r�y<t�}tj	�dj
dd�}dt|d�t|d	�t|�fSt
k
r�YnX|S)
z�Return this platform's string for platform-specific distributions

    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
    needs some hacks for Linux and Mac OS X.
    r)r�rfzmacosx-�� �_zmacosx-%d.%d-%sr-)�	sysconfigr��ImportErrorZdistutils.utilrkrlr9rnr��unamer5�intr�rp)r�rqrKr�rrrrh�srhzmacosx-(\d+)\.(\d+)-(.*)zdarwin-(\d+)\.(\d+)\.(\d+)-(.*)cCs�|dks|dks||krdStj|�}|r�tj|�}|s�tj|�}|r�t|jd��}d|jd�|jd�f}|dkr||dks�|dkr�|d	kr�dSd
S|jd�|jd�ks�|jd�|jd�kr�d
St|jd��t|jd��kr�d
SdSd
S)z�Can code for the `provided` platform run on the `required` platform?

    Returns true if either platform is ``None``, or the platforms are equal.

    XXX Needs compatibility checks for Linux and other unixy OSes.
    NTr-z%s.%srg�z10.3r/z10.4Fr)rirj�darwinVersionStringr�ro)ZprovidedZrequiredZreqMacZprovMacZ
provDarwinZdversionZmacosversionrrrr��s*


cCs<tjd�j}|d}|j�||d<t|�dj||�dS)z@Locate distribution `dist_spec` and run its `script_name` scriptr-rrN)rk�	_getframe�	f_globalsr_rtru)Z	dist_spec�script_name�ns�namerrrrus
cCs@t|tj�rtj|�}t|t�r(t|�}t|t�s<td|��|S)z@Return a current distribution object for a Requirement or stringz-Expected string, Requirement, or Distribution)rr�string_typesr��parservr��	TypeError)r�rrrrw)s



cCst|�j||�S)zDReturn `name` entry point of `group` for `dist` or raise ImportError)rwrx)r�ror�rrrrx4scCst|�j|�S)z=Return the entry point map for `group`, or the full entry map)rwry)r�rorrrry9scCst|�j||�S)z<Return the EntryPoint object for `group`+`name`, or ``None``)rwrz)r�ror�rrrrz>sc@s<eZdZdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Zd
S)r�cCsdS)z;Does the package's distribution contain the named metadata?Nr)r�rrr�has_metadataDszIMetadataProvider.has_metadatacCsdS)z'The named metadata resource as a stringNr)r�rrr�get_metadataGszIMetadataProvider.get_metadatacCsdS)z�Yield named metadata resource as list of non-blank non-comment lines

       Leading and trailing whitespace is stripped from each line, and lines
       with ``#`` as the first non-blank character are omitted.Nr)r�rrr�get_metadata_linesJsz$IMetadataProvider.get_metadata_linescCsdS)z>Is the named metadata a directory?  (like ``os.path.isdir()``)Nr)r�rrr�metadata_isdirPsz IMetadataProvider.metadata_isdircCsdS)z?List of metadata names in the directory (like ``os.listdir()``)Nr)r�rrr�metadata_listdirSsz"IMetadataProvider.metadata_listdircCsdS)z=Execute the named script in the supplied namespace dictionaryNr)r��	namespacerrrruVszIMetadataProvider.run_scriptN)	rrrr�r�r�r�r�rurrrrr�Csc@s@eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Zdd
�Z	dS)r�z3An object that provides access to package resourcescCsdS)zdReturn a true filesystem path for `resource_name`

        `manager` must be an ``IResourceManager``Nr)�manager�
resource_namerrr�get_resource_filename]sz'IResourceProvider.get_resource_filenamecCsdS)ziReturn a readable file-like object for `resource_name`

        `manager` must be an ``IResourceManager``Nr)r�r�rrr�get_resource_streambsz%IResourceProvider.get_resource_streamcCsdS)zmReturn a string containing the contents of `resource_name`

        `manager` must be an ``IResourceManager``Nr)r�r�rrr�get_resource_stringgsz%IResourceProvider.get_resource_stringcCsdS)z,Does the package contain the named resource?Nr)r�rrr�has_resourcelszIResourceProvider.has_resourcecCsdS)z>Is the named resource a directory?  (like ``os.path.isdir()``)Nr)r�rrrr�osz IResourceProvider.resource_isdircCsdS)z?List of resource names in the directory (like ``os.listdir()``)Nr)r�rrrrrsz"IResourceProvider.resource_listdirN)
rrrrr�r�r�r�r�rrrrrr�Zsc@s�eZdZdZd'dd�Zedd��Zedd��Zd	d
�Zdd�Z	d
d�Z
d(dd�Zdd�Zdd�Z
d)dd�Zd*dd�Zd+dd�Zdd�Zd,dd �Zd!d"�Zd#d$�Zd%d&�ZdS)-r�zDA collection of active distributions on sys.path (or a similar list)NcCsBg|_i|_i|_g|_|dkr&tj}x|D]}|j|�q,WdS)z?Create working set from list of path entries (default=sys.path)N)�entries�
entry_keys�by_key�	callbacksrkr��	add_entry)rr��entryrrr�__init__ys
zWorkingSet.__init__cCsZ|�}yddlm}Wntk
r*|SXy|j|�Wntk
rT|j|�SX|S)z1
        Prepare the master working set.
        r)�__requires__)�__main__r�r�rtr��_build_from_requirements)�cls�wsr�rrr�
_build_master�szWorkingSet._build_mastercCsn|g�}t|�}|j|t��}x|D]}|j|�q$Wx"tjD]}||jkr>|j|�q>W|jtjdd�<|S)zQ
        Build a working set from a requirement spec. Rewrites sys.path.
        N)r��resolver��addrkr�r�r�)rZreq_specr�reqs�distsr�r�rrrr�s

z#WorkingSet._build_from_requirementscCs@|jj|g�|jj|�x t|d�D]}|j||d�q&WdS)a�Add a path item to ``.entries``, finding any distributions on it

        ``find_distributions(entry, True)`` is used to find distributions
        corresponding to the path entry, and they are added.  `entry` is
        always appended to ``.entries``, even if it is already present.
        (This is because ``sys.path`` can contain the same value more than
        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
        equal ``sys.path``.)
        TFN)r��
setdefaultr�r;r�r)rr�r�rrrr��s
zWorkingSet.add_entrycCs|jj|j�|kS)z9True if `dist` is the active distribution for its project)r�rBr')rr�rrr�__contains__�szWorkingSet.__contains__cCs,|jj|j�}|dk	r(||kr(t||��|S)a�Find a distribution matching requirement `req`

        If there is an active distribution for the requested project, this
        returns it as long as it meets the version requirement specified by
        `req`.  But, if there is an active distribution for the project and it
        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
        If there is no active distribution for the requested project, ``None``
        is returned.
        N)r�rBr'r�)rr�r�rrrr��s

zWorkingSet.findccsPxJ|D]B}|j|�}|dkr6x*|j�D]
}|Vq&Wq||kr||VqWdS)aYield entry point objects from `group` matching `name`

        If `name` is None, yields all entry points in `group` from all
        distributions in the working set, otherwise only ones matching
        both `group` and `name` are yielded (in distribution order).
        N)ry�values)rror�r�r��eprrrr{�s

zWorkingSet.iter_entry_pointscCs>tjd�j}|d}|j�||d<|j|�dj||�dS)z?Locate distribution for `requires` and run `script_name` scriptr-rrN)rkr�r�r_rtru)r�requiresr�r�r�rrrru�s
zWorkingSet.run_scriptccsTi}xJ|jD]@}||jkrqx.|j|D] }||kr(d||<|j|Vq(WqWdS)z�Yield distributions for non-duplicate projects in the working set

        The yield order is the order in which the items' path entries were
        added to the working set.
        r-N)r�r�r�)r�seen�itemr'rrrrG�s
zWorkingSet.__iter__TFcCs�|r|j|j||d�|dkr$|j}|jj|g�}|jj|jg�}|rX|j|jkrXdS||j|j<|j|krz|j|j�|j|kr�|j|j�|j|�dS)aAdd `dist` to working set, associated with `entry`

        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
        On exit from this routine, `entry` is added to the end of the working
        set's ``.entries`` (if it wasn't already present).

        `dist` is only added to the working set if it's for a project that
        doesn't already have a distribution in the set, unless `replace=True`.
        If it's added, any callbacks registered with the ``subscribe()`` method
        will be called.
        )r5N)	�	insert_onr��locationr�rr'r�r;�
_added_new)rr�r��insertr5�keysZkeys2rrrr�s

zWorkingSet.addcCs|t|�ddd�}i}i}g}t�}tjt�}	�xF|�rv|jd�}
|
|krLq2|j|
�sXq2|j|
j�}|dk�r|j	j|
j�}|dks�||
kr�|r�|}|dkr�|dkr�t
|j�}nt
g�}tg�}|j
|
||�}||
j<|dkr�|	j|
d�}
t|
|
��|j|�||
k�r"|	|
}t||
�j|��|j|
j�ddd�}|j|�x(|D] }|	|j|
j�|
j||<�qHWd||
<q2W|S)aeList all distributions needed to (recursively) meet `requirements`

        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
        if supplied, should be an ``Environment`` instance.  If
        not supplied, it defaults to all distributions available within any
        entry or distribution in the working set.  `installer`, if supplied,
        will be invoked with each requirement that cannot be met by an
        already-installed distribution; it should return a ``Distribution`` or
        ``None``.

        Unless `replace_conflicting=True`, raises a VersionConflict exception if
        any requirements are found on the path that have the correct name but
        the wrong version.  Otherwise, if an `installer` is supplied it will be
        invoked to obtain the correct version of the requirement and activate
        it.
        Nr-rTr7r7)�list�
_ReqExtras�collections�defaultdict�setr:�markers_passrBr'r�r�r�r��
best_matchr�r;r�r�r�extras�extendr�project_name)r�requirements�env�	installerZreplace_conflictingZ	processedZbestZto_activateZ
req_extrasr�r�r�rr�Z
dependent_reqZnew_requirementsZnew_requirementrrrrsJ









zWorkingSet.resolvecCst|�}|j�i}i}|dkr4t|j�}||7}n||}|jg�}	tt|	j|��x�|D]�}
x�||
D]x}|j�g}y|	j|||�}
Wn4t	k
r�}z|||<|r�wjnPWYdd}~XqjXtt|	j|
��|j
tj|
��PqjWq\Wt|�}|j�||fS)asFind all activatable distributions in `plugin_env`

        Example usage::

            distributions, errors = working_set.find_plugins(
                Environment(plugin_dirlist)
            )
            # add plugins+libs to sys.path
            map(working_set.add, distributions)
            # display errors
            print('Could not load', errors)

        The `plugin_env` should be an ``Environment`` instance that contains
        only distributions that are in the project's "plugin directory" or
        directories. The `full_env`, if supplied, should be an ``Environment``
        contains all currently-available distributions.  If `full_env` is not
        supplied, one is created automatically from the ``WorkingSet`` this
        method is called on, which will typically mean that every directory on
        ``sys.path`` will be scanned for distributions.

        `installer` is a standard installer callback as used by the
        ``resolve()`` method. The `fallback` flag indicates whether we should
        attempt to resolve older versions of a plugin if the newest version
        cannot be resolved.

        This method returns a 2-tuple: (`distributions`, `error_info`), where
        `distributions` is a list of the distributions found in `plugin_env`
        that were loadable, along with any other distributions that are needed
        to resolve their dependencies.  `error_info` is a dictionary mapping
        unloadable plugin distributions to an exception instance describing the
        error that occurred. Usually this will be a ``DistributionNotFound`` or
        ``VersionConflict`` instance.
        N)
r�sortr�r�rrr�as_requirementrr�rPrRrS)rZ
plugin_envZfull_envr ZfallbackZplugin_projectsZ
error_infoZ
distributionsrZ
shadow_setrr�r�Z	resolveesrMrrr�find_pluginsks4$





zWorkingSet.find_pluginscGs*|jt|��}x|D]}|j|�qW|S)a�Ensure that distributions matching `requirements` are activated

        `requirements` must be a string or a (possibly-nested) sequence
        thereof, specifying the distributions and versions required.  The
        return value is a sequence of the distributions that needed to be
        activated to fulfill the requirements; all relevant distributions are
        included, even if they were already activated in this working set.
        )rr�r)rrZneededr�rrrrt�s	
zWorkingSet.requirecCs<||jkrdS|jj|�|s"dSx|D]}||�q(WdS)z�Invoke `callback` for all distributions

        If `existing=True` (default),
        call on all existing ones, as well.
        N)r�r;)r�callback�existingr�rrr�	subscribe�s

zWorkingSet.subscribecCsx|jD]}||�qWdS)N)r�)rr�r$rrrr�szWorkingSet._added_newcCs,|jdd�|jj�|jj�|jdd�fS)N)r�r�r\r�r�)rrrrrZ�szWorkingSet.__getstate__cCs@|\}}}}|dd�|_|j�|_|j�|_|dd�|_dS)N)r�r\r�r�r�)rZe_k_b_cr�rr�r�rrrr[�s


zWorkingSet.__setstate__)N)N)NTF)NNF)NNT)T)rrrrr��classmethodrrr�r	r�r{rurGrrr#rtr&rrZr[rrrrr�vs(




Q
S
c@seZdZdZdd�ZdS)rz>
    Map each requirement to the extras that demanded it.
    cs.�fdd�|j�f�dD�}�jp,t|�S)z�
        Evaluate markers for req against each extra that
        demanded it.

        Return False if the req has a marker and fails
        evaluation. Otherwise, return True.
        c3s|]}�jjd|i�VqdS)�extraN)�marker�evaluate)�.0r()r�rr�	<genexpr>�sz*_ReqExtras.markers_pass.<locals>.<genexpr>N)N)rBr)�any)rr�Zextra_evalsr)r�rr�s	
z_ReqExtras.markers_passN)rrrrrrrrrr�src@sxeZdZdZde�efdd�Zdd�Zdd�Zdd	d
�Z	dd�Z
d
d�Zddd�Zddd�Z
dd�Zdd�Zdd�ZdS)r�z5Searchable snapshot of distributions on a search pathNcCs i|_||_||_|j|�dS)a!Snapshot distributions available on a search path

        Any distributions found on `search_path` are added to the environment.
        `search_path` should be a sequence of ``sys.path`` items.  If not
        supplied, ``sys.path`` is used.

        `platform` is an optional string specifying the name of the platform
        that platform-specific distributions must be compatible with.  If
        unspecified, it defaults to the current platform.  `python` is an
        optional string naming the desired version of Python (e.g. ``'3.3'``);
        it defaults to the current version.

        You may explicitly set `platform` (and/or `python`) to ``None`` if you
        wish to map *all* distributions, not just those compatible with the
        running platform or Python version.
        N)�_distmaprl�python�scan)r�search_pathrlr/rrrr�szEnvironment.__init__cCs.|jdks |jdks |j|jko,t|j|j�S)z�Is distribution `dist` acceptable for this environment?

        The distribution must match the platform and python version
        requirements specified when this environment was created, or False
        is returned.
        N)r/�
py_versionr�rl)rr�rrr�can_addszEnvironment.can_addcCs|j|jj|�dS)z"Remove `dist` from the environmentN)r.r'�remove)rr�rrrr4(szEnvironment.removecCs<|dkrtj}x(|D] }xt|�D]}|j|�q"WqWdS)adScan `search_path` for distributions usable in this environment

        Any distributions found are added to the environment.
        `search_path` should be a sequence of ``sys.path`` items.  If not
        supplied, ``sys.path`` is used.  Only distributions conforming to
        the platform/python version defined at initialization are added.
        N)rkr�r�r)rr1rr�rrrr0,s

zEnvironment.scancCs|j�}|jj|g�S)aReturn a newest-to-oldest list of distributions for `project_name`

        Uses case-insensitive `project_name` comparison, assuming all the
        project's distributions use their project's name converted to all
        lowercase as their key.

        )r8r.rB)rrZdistribution_keyrrrr(;szEnvironment.__getitem__cCsL|j|�rH|j�rH|jj|jg�}||krH|j|�|jtjd�dd�dS)zLAdd `dist` if we ``can_add()`` it and it has not already been added
        �hashcmpT)r'�reverseN)	r3�has_versionr.rr'r;r!�operator�
attrgetter)rr�rrrrrFs

zEnvironment.addcCsB|j|�}|dk	r|Sx||jD]}||kr"|Sq"W|j||�S)a�Find distribution best matching `req` and usable on `working_set`

        This calls the ``find(req)`` method of the `working_set` to see if a
        suitable distribution is already active.  (This may raise
        ``VersionConflict`` if an unsuitable version of the project is already
        active in the specified `working_set`.)  If a suitable distribution
        isn't active, this method returns the newest distribution in the
        environment that meets the ``Requirement`` in `req`.  If no suitable
        distribution is found, and `installer` is supplied, then the result of
        calling the environment's ``obtain(req, installer)`` method will be
        returned.
        N)r�r'�obtain)rr�r�r r�rrrrOs
zEnvironment.best_matchcCs|dk	r||�SdS)a�Obtain a distribution matching `requirement` (e.g. via download)

        Obtain a distro that matches requirement (e.g. via download).  In the
        base ``Environment`` class, this routine just returns
        ``installer(requirement)``, unless `installer` is None, in which case
        None is returned instead.  This method is a hook that allows subclasses
        to attempt other ways of obtaining a distribution before falling back
        to the `installer` argument.Nr)rZrequirementr rrrr:es	zEnvironment.obtainccs&x |jj�D]}||r|VqWdS)z=Yield the unique project names of the available distributionsN)r.r)rr'rrrrGqszEnvironment.__iter__cCs^t|t�r|j|�nDt|t�rLx8|D] }x||D]}|j|�q4Wq&Wntd|f��|S)z2In-place addition of a distribution or environmentzCan't add %r to environment)rr�rr�r�)rr!Zprojectr�rrr�__iadd__ws


zEnvironment.__iadd__cCs.|jgddd�}x||fD]}||7}qW|S)z4Add an environment or distribution to an environmentN)rlr/)r)rr!�newrrrr�__add__�szEnvironment.__add__)N)N)N)rrrrrs�PY_MAJORr�r3r4r0r(rrr:rGr;r=rrrrr�s
	

c@seZdZdZdS)r�aTAn error occurred extracting a resource

    The following attributes are available from instances of this exception:

    manager
        The resource manager that raised this exception

    cache_path
        The base directory for resource extraction

    original_error
        The exception instance that caused extraction to fail
    N)rrrrrrrrr��s
c@s�eZdZdZdZdd�Zdd�Zdd�Zd	d
�Zdd�Z	d
d�Z
dd�Zdd�Zffdd�Z
edd��Zdd�Zdd�Zddd�ZdS)r�z'Manage resource extraction and packagesNcCs
i|_dS)N)�cached_files)rrrrr��szResourceManager.__init__cCst|�j|�S)zDoes the named resource exist?)rvr�)r�package_or_requirementr�rrrr��szResourceManager.resource_existscCst|�j|�S)z,Is the named resource an existing directory?)rvr�)rr@r�rrrr��szResourceManager.resource_isdircCst|�j||�S)z4Return a true filesystem path for specified resource)rvr�)rr@r�rrrr~�sz!ResourceManager.resource_filenamecCst|�j||�S)z9Return a readable file-like object for specified resource)rvr�)rr@r�rrrr}�szResourceManager.resource_streamcCst|�j||�S)z%Return specified resource as a string)rvr�)rr@r�rrrr|�szResourceManager.resource_stringcCst|�j|�S)z1List the contents of the named resource directory)rvr)rr@r�rrrr�sz ResourceManager.resource_listdircCsRtj�d}|jpt�}tjd�j�}t|jft	���}||_
||_||_|�dS)z5Give an error message for problems extracting file(s)r-a
            Can't extract file(s) to egg cache

            The following error occurred while trying to extract file(s) to the Python egg
            cache:

              {old_exc}

            The Python egg cache directory is currently set to:

              {cache_path}

            Perhaps your account does not have write access to this directory?  You can
            change the cache directory by setting the PYTHON_EGG_CACHE environment
            variable to point to an accessible directory.
            N)
rk�exc_info�extraction_pathr��textwrap�dedent�lstripr�r�r�r��
cache_pathZoriginal_error)r�old_excrF�tmpl�errrrr�extraction_error�s
z ResourceManager.extraction_errorc	Cs^|jp
t�}tjj||df|��}yt|�Wn|j�YnX|j|�d|j|<|S)a�Return absolute location in cache for `archive_name` and `names`

        The parent directory of the resulting path will be created if it does
        not already exist.  `archive_name` should be the base filename of the
        enclosing egg (which may not be the name of the enclosing zipfile!),
        including its ".egg" extension.  `names`, if provided, should be a
        sequence of path name parts "under" the egg's extraction location.

        This method should only be called by resource providers that need to
        obtain an extraction location, and only for names they intend to
        extract, as it tracks the generated names for possible cleanup later.
        z-tmpr-)	rBr�r�r�rm�_bypass_ensure_directoryrJ�_warn_unsafe_extraction_pathr?)rZarchive_name�namesZextract_pathZtarget_pathrrr�get_cache_path�s


zResourceManager.get_cache_pathcCsXtjdkr |jtjd�r dStj|�j}|tj@s@|tj@rTd|}tj	|t
�dS)aN
        If the default extraction path is overridden and set to an insecure
        location, such as /tmp, it opens up an opportunity for an attacker to
        replace an extracted file with an unauthorized payload. Warn the user
        if a known insecure location is used.

        See Distribute #375 for more details.
        �ntZwindirNz�%s is writable by group/others and vulnerable to attack when used with get_resource_filename. Consider a more secure location (set with .set_extraction_path or the PYTHON_EGG_CACHE environment variable).)r�r�r9�environ�stat�st_mode�S_IWOTH�S_IWGRPrCrD�UserWarning)r��mode�msgrrrrL�s
z,ResourceManager._warn_unsafe_extraction_pathcCs.tjdkr*tj|�jdBd@}tj||�dS)a4Perform any platform-specific postprocessing of `tempname`

        This is where Mac header rewrites should be done; other platforms don't
        have anything special they should do.

        Resource providers should call this method ONLY after successfully
        extracting a compressed resource.  They must NOT call it on resources
        that are already in the filesystem.

        `tempname` is the current (temporary) name of the file, and `filename`
        is the name it will be renamed to by the caller after this routine
        returns.
        �posiximi�N)r�r�rQrR�chmod)rZtempname�filenamerVrrr�postprocesss
zResourceManager.postprocesscCs|jrtd��||_dS)a�Set the base path where resources will be extracted to, if needed.

        If you do not call this routine before any extractions take place, the
        path defaults to the return value of ``get_default_cache()``.  (Which
        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
        platform-specific fallbacks.  See that routine's documentation for more
        details.)

        Resources are extracted to subdirectories of this path based upon
        information given by the ``IResourceProvider``.  You may set this to a
        temporary directory, but then you must call ``cleanup_resources()`` to
        delete the extracted files when done.  There is no guarantee that
        ``cleanup_resources()`` will be able to remove all extracted files.

        (Note: you may not change the extraction path for a given resource
        manager once resources have been extracted, unless you first call
        ``cleanup_resources()``.)
        z5Can't change extraction path, files already extractedN)r?rprB)rr�rrrr�)sz#ResourceManager.set_extraction_pathFcCsdS)aB
        Delete all extracted resource files and directories, returning a list
        of the file and directory names that could not be successfully removed.
        This function does not have any concurrency protection, so it should
        generally only be called when the extraction path is a temporary
        directory exclusive to a single process.  This method is not
        automatically called; you must call it explicitly or register it as an
        ``atexit`` function if you wish to ensure cleanup of a temporary
        directory used for extractions.
        Nr)r�forcerrrr�Csz!ResourceManager.cleanup_resources)F)rrrrrBr�r�r�r~r}r|rrJrN�staticmethodrLr[r�r�rrrrr��scCstjjd�ptjdd�S)z�
    Return the ``PYTHON_EGG_CACHE`` environment variable
    or a platform-relevant user cache dir for an app
    named "Python-Eggs".
    ZPYTHON_EGG_CACHEzPython-Eggs)Zappname)r�rPrBrZuser_cache_dirrrrrr�QscCstjdd|�S)z�Convert an arbitrary string to a standard distribution name

    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
    z[^A-Za-z0-9.]+r+)r?�sub)r�rrrr�]scCsDyttjj|��Stjjk
r>|jdd�}tjdd|�SXdS)zB
    Convert an arbitrary string to a standard version string
    r�r,z[^A-Za-z0-9.]+r+N)rFrrK�VersionrLr5r?r^)rKrrrr�es
cCstjdd|�j�S)z�Convert an arbitrary string to a standard 'extra' name

    Any runs of non-alphanumeric characters are replaced with a single '_',
    and the result is always lowercased.
    z[^A-Za-z0-9.-]+r�)r?r^r8)r(rrrr�qscCs|jdd�S)z|Convert a project or version name to its filename-escaped form

    Any '-' characters are currently replaced with '_'.
    r+r�)r5)r�rrrr�zscCs>yt|�Wn,tk
r8}zd|_d|_|Sd}~XnXdS)zo
    Validate text as a PEP 508 environment marker; return an exception
    if invalid or False otherwise.
    NF)r��SyntaxErrorrZ�lineno)�text�errrr��scCsHytjj|�}|j�Stjjk
rB}zt|��WYdd}~XnXdS)z�
    Evaluate a PEP 508 environment marker.
    Return a boolean indicating the marker result in this environment.
    Raise SyntaxError if marker is invalid.

    This implementation uses the 'pyparsing' module.
    N)rZmarkersZMarkerr*Z
InvalidMarkerr`)rbr(r)rcrrrr��s
c@s�eZdZdZdZdZdZdd�Zdd�Zdd�Z	d	d
�Z
dd�Zd
d�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd%d&�ZdS)'r�zETry to implement resources and metadata for arbitrary PEP 302 loadersNcCs(t|dd�|_tjjt|dd��|_dS)Nr��__file__r�)r�r�r�r��dirname�module_path)rr�rrrr��szNullProvider.__init__cCs|j|j|�S)N)�_fnrf)rr�r�rrrr��sz"NullProvider.get_resource_filenamecCstj|j||��S)N)�io�BytesIOr�)rr�r�rrrr��sz NullProvider.get_resource_streamcCs|j|j|j|��S)N)�_getrgrf)rr�r�rrrr��sz NullProvider.get_resource_stringcCs|j|j|j|��S)N)�_hasrgrf)rr�rrrr��szNullProvider.has_resourcecCs|jo|j|j|j|��S)N)�egg_inforkrg)rr�rrrr��szNullProvider.has_metadatacCs2|js
dS|j|j|j|��}tjr.|jd�S|S)Nr�zutf-8)rlrjrgrZPY3�decode)rr��valuerrrr��szNullProvider.get_metadatacCst|j|��S)N)r�r�)rr�rrrr��szNullProvider.get_metadata_linescCs|j|j|j|��S)N)�_isdirrgrf)rr�rrrr��szNullProvider.resource_isdircCs|jo|j|j|j|��S)N)rlrorg)rr�rrrr��szNullProvider.metadata_isdircCs|j|j|j|��S)N)�_listdirrgrf)rr�rrrr�szNullProvider.resource_listdircCs|jr|j|j|j|��SgS)N)rlrprg)rr�rrrr��szNullProvider.metadata_listdirc
Cs�d|}|j|�std|��|j|�jdd�}|jdd�}|j|j|�}||d<tjj|�r�t	|�j
�}t||d�}t|||�n>dd	l
m}t|�d|jd�|f||<t||d�}	t|	||�dS)
Nzscripts/zNo script named %rz
�
�
rd�execr)�cache)r�r�r�r5rgrlr�r�r�r�readr@rs�	linecachert�lenr)
rr�r�ZscriptZscript_textZscript_filename�source�codertZscript_coderrrru�s
zNullProvider.run_scriptcCstd��dS)Nz9Can't perform this operation for unregistered loader type)�NotImplementedError)rr�rrrrk�szNullProvider._hascCstd��dS)Nz9Can't perform this operation for unregistered loader type)rz)rr�rrrro�szNullProvider._isdircCstd��dS)Nz9Can't perform this operation for unregistered loader type)rz)rr�rrrrp�szNullProvider._listdircCs |rtjj|f|jd���S|S)N�/)r�r�rmr)r�baser�rrrrg�szNullProvider._fncCs$t|jd�r|jj|�Std��dS)N�get_dataz=Can't perform this operation for loaders without 'get_data()')r�r�r}rz)rr�rrrrj�szNullProvider._get)rrrr�egg_namerlr�r�r�r�r�r�r�r�r�r�r�rr�rurkrorprgrjrrrrr��s,c@s eZdZdZdd�Zdd�ZdS)r�z&Provider based on a virtual filesystemcCstj||�|j�dS)N)r�r��
_setup_prefix)rr�rrrr�szEggProvider.__init__cCs^|j}d}xN||krXt|�rBtjj|�|_tjj|d�|_||_P|}tjj	|�\}}qWdS)NzEGG-INFO)
rf�_is_unpacked_eggr�r��basenamer~rmrl�egg_rootr)rr��oldr|rrrr
s
zEggProvider._setup_prefixN)rrrrr�rrrrrr�sc@sDeZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�Ze	dd
��Z
dS)r�z6Provides access to package resources in the filesystemcCstjj|�S)N)r�r�r�)rr�rrrrkszDefaultProvider._hascCstjj|�S)N)r�r�r
)rr�rrrroszDefaultProvider._isdircCs
tj|�S)N)r��listdir)rr�rrrrp"szDefaultProvider._listdircCst|j|j|�d�S)N�rb)rrgrf)rr�r�rrrr�%sz#DefaultProvider.get_resource_streamc	Cst|d��
}|j�SQRXdS)Nr�)rru)rr��streamrrrrj(szDefaultProvider._getcCsttdtd��}t||�dS)N�SourceFileLoader)r��importlib_machinery�typer�)rZ
loader_clsrrr�	_register,s
zDefaultProvider._registerN)rrrrrkrorpr�rjr'r�rrrrr�sc@s8eZdZdZdd�ZZdd�Zdd�ZdZdd�Z	dS)	r�z.Provider that returns nothing for all requestscCsdS)NFr)rr�rrrre9szEmptyProvider.<lambda>cCsdS)Nr�r)rr�rrrre:scCsgS)Nr)rr�rrrre;sNcCsdS)Nr)rrrrr�>szEmptyProvider.__init__)
rrrrrorkrjrprfr�rrrrr�6sc@s eZdZdZedd��ZeZdS)�ZipManifestsz
    zip manifest builder
    c
s2t|�� ��fdd��j�D�}t|�SQRXdS)a
        Build a dictionary similar to the zipimport directory
        caches, except instead of tuples, store ZipInfo objects.

        Use a platform-specific path separator (os.sep) for the path keys
        for compatibility with pypy on Windows.
        c3s&|]}|jdtj��j|�fVqdS)r{N)r5r��sepZgetinfo)r+r�)�zfilerrr,Usz%ZipManifests.build.<locals>.<genexpr>N)�ContextualZipFileZnamelistrR)rr�rVr)r�r�buildJs	

zZipManifests.buildN)rrrrr'r��loadrrrrr�Esr�c@s$eZdZdZejdd�Zdd�ZdS)�MemoizedZipManifestsz%
    Memoized zipfile manifests.
    �manifest_modzmanifest mtimecCsRtjj|�}tj|�j}||ks.||j|krH|j|�}|j||�||<||jS)zW
        Load a manifest at path or return a suitable manifest already loaded.
        )	r�r��normpathrQ�st_mtime�mtimer�r��manifest)rr�r�r�rrrr�fs
zMemoizedZipManifests.loadN)rrrrr�
namedtupler�r�rrrrr�`sr�cs0eZdZdZdd�Zdd�Z�fdd�Z�ZS)r�zL
    Supplement ZipFile class to support context manager for Python 2.6
    cCs|S)Nr)rrrr�	__enter__yszContextualZipFile.__enter__cCs|j�dS)N)�close)rr�rn�	tracebackrrr�__exit__|szContextualZipFile.__exit__cs(ttjd�rtj||�Stt|�j|�S)zI
        Construct a ZipFile or ContextualZipFile as appropriate
        r�)r��zipfile�ZipFilerr��__new__)rrd�kwargs)rrrr�szContextualZipFile.__new__)rrrrr�r�r�rHrr)rrr�tsr�c@s�eZdZdZdZe�Zdd�Zdd�Zdd�Z	e
d	d
��Zdd�Ze
d
d��Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �ZdS)!r�z"Resource support for zips and eggsNcCs tj||�|jjtj|_dS)N)r�r�r��archiver�r��zip_pre)rr�rrrr��szZipProvider.__init__cCs4|j|j�r|t|j�d�Std||jf��dS)Nz%s is not a subpath of %s)r9r�rw�AssertionError)r�fspathrrr�
_zipinfo_name�szZipProvider._zipinfo_namecCsP|j|}|j|jtj�r:|t|j�dd�jtj�Std||jf��dS)Nr-z%s is not a subpath of %s)r�r9r�r�r�rwrr�)r�zip_pathr�rrr�_parts�s

zZipProvider._partscCs|jj|jj�S)N)�_zip_manifestsr�r�r�)rrrr�zipinfo�szZipProvider.zipinfocCs`|jstd��|j|�}|j�}dj|j|��|krTx|D]}|j||j|��q:W|j||�S)Nz5resource_filename() only supported for .egg, not .zipr{)r~rz�_resource_to_zip�_get_eager_resourcesrmr��_extract_resource�
_eager_to_zip)rr�r�r��eagersr�rrrr��s

z!ZipProvider.get_resource_filenamecCs"|j}|jd}tj|�}||fS)Nrr-r7)rrr7)Z	file_size�	date_time�timeZmktime)Zzip_stat�sizer��	timestamprrr�_get_date_and_size�s

zZipProvider._get_date_and_sizec
Csn||j�krDx*|j�|D]}|j|tjj||��}qWtjj|�S|j|j|�\}}tsdt	d��y�|j
|j|j|��}|j
||�r�|Stdtjj|�d�\}}	tj||jj|��tj|�t|	||f�|j|	|�yt|	|�Wn\tjk
�rDtjj|��r>|j
||��r|Stjdk�r>t|�t|	|�|S�YnXWn tjk
�rh|j�YnX|S)Nz>"os.rename" and "os.unlink" are not supported on this platformz	.$extract)�dirrO)�_indexr�r�r�rmrer�r��
WRITE_SUPPORT�IOErrorrNr~r��_is_current�_mkstemp�writer�r}r�rr[r
�error�isfiler�rrJ)
rr�r�r�Zlastr�r�Z	real_pathZoutfZtmpnamrrrr��s@

zZipProvider._extract_resourcec		Csx|j|j|�\}}tjj|�s$dStj|�}|j|ksB|j|krFdS|jj	|�}t
|d��}|j�}WdQRX||kS)zK
        Return True if the file_path is current for this zip_path
        Fr�N)r�r�r�r�r�rQ�st_sizer�r�r}rru)	rZ	file_pathr�r�r�rQZzip_contents�fZ
file_contentsrrrr��s
zZipProvider._is_currentcCsB|jdkr<g}x&dD]}|j|�r|j|j|��qW||_|jS)N�native_libs.txt�eager_resources.txt)r�r�)r�r�rr�)rr�r�rrrr�s


z ZipProvider._get_eager_resourcescCs�y|jStk
r�i}xd|jD]Z}|jtj�}xH|rztjj|dd��}||krj||j|d�Pq4|j�g||<q4Wq"W||_|SXdS)Nr-r7r7)	Z	_dirindex�AttributeErrorr�rr�r�rmr;r:)rZindr�r<�parentrrrr�szZipProvider._indexcCs |j|�}||jkp||j�kS)N)r�r�r�)rr�r�rrrrks
zZipProvider._hascCs|j|�|j�kS)N)r�r�)rr�rrrro!szZipProvider._isdircCst|j�j|j|�f��S)N)rr�rBr�)rr�rrrrp$szZipProvider._listdircCs|j|j|j|��S)N)r�rgr�)rr�rrrr�'szZipProvider._eager_to_zipcCs|j|j|j|��S)N)r�rgrf)rr�rrrr�*szZipProvider._resource_to_zip)rrrrr�r�r�r�r�r�r�r�r�r]r�r�r�r�r�rkrorpr�r�rrrrr��s$	

	4	c@s8eZdZdZdd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
r�a*Metadata handler for standalone PKG-INFO files

    Usage::

        metadata = FileMetadata("/path/to/PKG-INFO")

    This provider rejects all data and metadata requests except for PKG-INFO,
    which is treated as existing, and will be the contents of the file at
    the provided location.
    cCs
||_dS)N)r�)rr�rrrr�=szFileMetadata.__init__cCs|dkotjj|j�S)NzPKG-INFO)r�r�r�)rr�rrrr�@szFileMetadata.has_metadatac	CsD|dkrtd��tj|jddd��}|j�}WdQRX|j|�|S)NzPKG-INFOz(No metadata except PKG-INFO is availablezutf-8r5)�encoding�errors)r�rhrr�ru�_warn_on_replacement)rr�r��metadatarrrr�Cs
zFileMetadata.get_metadatacCs2djd�}||kr.d}|jft��}tj|�dS)Ns�zutf-8z2{self.path} could not be properly decoded in UTF-8)rmr�r�rCrD)rr�Zreplacement_charrHrWrrrr�Ls

z!FileMetadata._warn_on_replacementcCst|j|��S)N)r�r�)rr�rrrr�TszFileMetadata.get_metadata_linesN)	rrrrr�r�r�r�r�rrrrr�1s
	c@seZdZdZdd�ZdS)r�asMetadata provider for egg directories

    Usage::

        # Development eggs:

        egg_info = "/path/to/PackageName.egg-info"
        base_dir = os.path.dirname(egg_info)
        metadata = PathMetadata(base_dir, egg_info)
        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)

        # Unpacked egg directories:

        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
        dist = Distribution.from_filename(egg_path, metadata=metadata)
    cCs||_||_dS)N)rfrl)rr�rlrrrr�lszPathMetadata.__init__N)rrrrr�rrrrr�Xsc@seZdZdZdd�ZdS)r�z Metadata provider for .egg filescCsD|jtj|_||_|jr0tjj|j|j�|_n|j|_|j	�dS)z-Create a metadata provider from a zipimporterN)
r�r�r�r�r��prefixr�rmrfr)r�importerrrrr�tszEggMetadata.__init__N)rrrrr�rrrrr�qsrR)�_distribution_finderscCs|t|<dS)axRegister `distribution_finder` to find distributions in sys.path items

    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    handler), and `distribution_finder` is a callable that, passed a path
    item and the importer instance, yields ``Distribution`` instances found on
    that path item.  See ``pkg_resources.find_on_path`` for an example.N)r�)�
importer_typeZdistribution_finderrrrr��scCst|�}tt|�}||||�S)z.Yield distributions accessible via `path_item`)rr�r�)�	path_item�onlyr��finderrrrr��s
ccs�|jjd�rdSt|�}|jd�r2tj||d�V|r:dSxH|jd�D]:}t|�rFtj	j
||�}xttj
|�|�D]
}|VqrWqFWdS)z@
    Find eggs in zip files; possibly multiple nested eggs.
    z.whlNzPKG-INFO)r�r{)r��endswithr�r�r��
from_filenamerr�r�r�rm�find_eggs_in_zip�	zipimport�zipimporter)r�r�r�r�Zsubitem�subpathr�rrrr��s
r�cCsfS)Nr)r�r�r�rrr�find_nothing�sr�cCsdd�}t||dd�S)aL
    Given a list of filenames, return them in descending order
    by version number.

    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
    >>> _by_version_descending(names)
    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
    >>> _by_version_descending(names)
    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
    >>> _by_version_descending(names)
    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
    cSs2tjj|�\}}tj|jd�|g�}dd�|D�S)z6
        Parse each component of the filename
        r+cSsg|]}tjj|��qSr)rrKr�)r+r3rrr�
<listcomp>�sz?_by_version_descending.<locals>._by_version.<locals>.<listcomp>)r�r��splitext�	itertools�chainr)r��extr<rrr�_by_version�sz+_by_version_descending.<locals>._by_versionT)r'r6)�sorted)rMr�rrr�_by_version_descending�sr�ccs�t|�}tjj|�o tj|tj��r�t|�rPtj|t	|tjj
|d��d�V�nTttj|��}�xB|D�]8}|j
�}|jd�s�|jd�r�tjj
||�}tjj|�r�ttj|��dkr�qft	||�}nt|�}tj|||td�Vqf|o�t|��rttjj
||��}x�|D]}	|	V�qWqf|rf|jd�rfttjj
||���}
|
j�}WdQRXxN|D]F}|j��sh�qVtjj
||j��}
t|
�}x|D]}|V�q�WP�qVWqfWdS)	z6Yield distributions accessible on a sys.path directoryzEGG-INFO)r�z	.egg-infoz
.dist-infor)�
precedencez	.egg-linkN)�_normalize_cachedr�r�r
�access�R_OKr�r�r�r�rmr�r�r8r�rwr��
from_locationr�r�r�	readlines�strip�rstrip)r�r�r�Zpath_item_entriesr�r8Zfullpathr�rr�Z
entry_fileZentry_lines�liner�rrrr�find_on_path�sB



r��
FileFinder)�_namespace_handlers)�_namespace_packagescCs|t|<dS)a�Register `namespace_handler` to declare namespace packages

    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    handler), and `namespace_handler` is a callable like this::

        def namespace_handler(importer, path_entry, moduleName, module):
            # return a path_entry to use for child packages

    Namespace handlers are only called if the importer object has already
    agreed that it can handle the relevant path item, and they should only
    return a subpath if the module __path__ does not already contain an
    equivalent subpath.  For an example namespace handler, see
    ``pkg_resources.file_ns_handler``.
    N)r�)r�Znamespace_handlerrrrr�scCs�t|�}|dkrdS|j|�}|dkr*dStjj|�}|dkrbtj|�}tj|<g|_t|�nt	|d�svt
d|��tt|�}|||||�}|dk	r�|j}|j
|�|j|�t|||�|S)zEEnsure that named package includes a subpath of path_item (if needed)N�__path__zNot a package:)r�find_modulerkr�rB�types�
ModuleTyper��_set_parent_nsr�r�r�r�r;�load_module�_rebuild_mod_path)�packageNamer�r�r�r�Zhandlerr�r�rrr�
_handle_nss*






r�csRdd�tjD���fdd����fdd�}|j|d�dd�|D�|jd	d	�<d	S)
zq
    Rebuild module.__path__ ensuring that all entries are ordered
    corresponding to their sys.path order
    cSsg|]}t|��qSr)r�)r+�prrrr�5sz%_rebuild_mod_path.<locals>.<listcomp>cs(y
�j|�Stk
r"td�SXdS)z/
        Workaround for #520 and #513.
        �infN)�indexrp�float)r�)�sys_pathrr�safe_sys_path_index7s
z._rebuild_mod_path.<locals>.safe_sys_path_indexcs<|jtj�}�jd�d}|d|�}�ttjj|���S)zR
        Return the ordinal of the path based on its position in sys.path
        r,r-N)rr�r��countr�rm)r��
path_partsZmodule_partsr<)�package_namer�rr�position_in_sys_path@sz/_rebuild_mod_path.<locals>.position_in_sys_path)r'cSsg|]}t|��qSr)r�)r+r�rrrr�JsN)rkr�r!r�)Z	orig_pathr�r�r�r)r�r�r�rr�0s
		r�cCs�tj�z�|tkrdStjd}}d|kr�dj|jd�dd��}t|�|tkrZt|�ytj	|j
}Wntk
r�td|��YnXtj
|g�j|�tj
|g�x|D]}t||�q�WWdtj�XdS)z9Declare that package 'packageName' is a namespace packageNr,r-zNot a package:r7)�_imp�acquire_lockr�rkr�rmrr�r�r�r�r�r�rr;r��release_lock)r�r�r�r�rrrr�Ms&
c
CsJtj�z2x,tj|f�D]}t||�}|rt||�qWWdtj�XdS)zDEnsure that previously-declared namespace packages include path_itemN)r�r�r�rBr�r�r�)r�r��packager�rrrr�ns
cCsFtjj||jd�d�}t|�}x |jD]}t|�|kr(Pq(W|SdS)zBCompute an ns-package subpath for a filesystem or zipfile importerr,r-Nr7)r�r�rmrr�r�)r�r�r�r�r�Z
normalizedrrrr�file_ns_handlerzsrcCsdS)Nr)r�r�r�r�rrr�null_ns_handler�srcCstjjtjj|��S)z1Normalize a file/dir name for comparison purposes)r�r��normcase�realpath)rZrrrr��scCs2y||Stk
r,t|�||<}|SXdS)N)r�r�)rZr��resultrrrr��s
r�cCs|j�jd�S)z@
    Determine if given path appears to be an unpacked egg.
    z.egg)r8r�)r�rrrr��sr�cCs<|jd�}|j�}|r8dj|�}ttj||tj|�dS)Nr,)rr:rm�setattrrkr�)r�r<r�r�rrrr��s


r�ccsht|tj�r>xV|j�D]"}|j�}|r|jd�r|VqWn&x$|D]}xt|�D]
}|VqRWqDWdS)z9Yield non-empty/non-comment lines of a string or sequence�#N)rrr��
splitlinesr�r9r�)�strsr2Zssrrrr��s
z\w+(\.\w+)*$z�
    (?P<name>[^-]+) (
        -(?P<ver>[^-]+) (
            -py(?P<pyver>[^-]+) (
                -(?P<plat>.+)
            )?
        )?
    )?
    c@s�eZdZdZffdfdd�Zdd�Zdd�Zdd
d�Zdd
�Zddd�Z	e
jd�Ze
ddd��Ze
dd��Ze
ddd��Ze
ddd��ZdS)r�z3Object representing an advertised importable objectNcCsJt|�std|��||_||_t|�|_tjddj|��j	|_	||_
dS)NzInvalid module namezx[%s]�,)�MODULErpr��module_namer�attrsr�r�rmrr�)rr�rrrr�rrrr��s

zEntryPoint.__init__cCsHd|j|jf}|jr*|ddj|j�7}|jrD|ddj|j�7}|S)Nz%s = %s�:r,z [%s]r	)r�rrrmr)rr2rrrr��szEntryPoint.__str__cCsdt|�S)NzEntryPoint.parse(%r))rF)rrrrr��szEntryPoint.__repr__TcOs6|s|s|rtjdtdd�|r.|j||�|j�S)zH
        Require packages for this EntryPoint, then resolve it.
        zJParameters to load are deprecated.  Call .resolve and .require separately.rg)r>)rCrD�DeprecationWarningrtr)rrtrdr�rrrr��szEntryPoint.loadcCsVt|jdgdd�}ytjt|j|�Stk
rP}ztt|���WYdd}~XnXdS)zD
        Resolve the entry point from its module and attrs.
        rr)�fromlist�levelN)	r�r�	functools�reducer�rr�r�rF)rr��excrrrr�s
zEntryPoint.resolvecCsH|jr|jrtd|��|jj|j�}tj|||�}tttj|��dS)Nz&Can't require() without a distribution)	rr�r�rr�rrrr)rrr rrVrrrrt	s

zEntryPoint.requirez]\s*(?P<name>.+?)\s*=\s*(?P<module>[\w.]+)\s*(:\s*(?P<attr>[\w.]+))?\s*(?P<extras>\[.*\])?\s*$cCsf|jj|�}|sd}t||��|j�}|j|d�}|drJ|djd�nf}||d|d|||�S)aParse a single entry point from string `src`

        Entry point syntax follows the form::

            name = some.module:some.attr [extra1, extra2]

        The entry name and module name are required, but the ``:attrs`` and
        ``[extras]`` parts are optional
        z9EntryPoint must be in 'name=module:attrs [extras]' formatr�attrr,r�r�)�patternrjrp�	groupdict�
_parse_extrasr)r�srcr�rrrW�resrrrrrr�	s
zEntryPoint.parsecCs(|sfStjd|�}|jr"t��|jS)N�x)r�r��specsrpr)rZextras_specr�rrrr$	szEntryPoint._parse_extrascCsZt|�std|��i}x>t|�D]2}|j||�}|j|krHtd||j��|||j<q W|S)zParse an entry point groupzInvalid group namezDuplicate entry point)r
rpr�r�r�)rro�linesr��thisr�rrrr�parse_group-	s

zEntryPoint.parse_groupcCsxt|t�r|j�}nt|�}i}xR|D]J\}}|dkrD|s<q&td��|j�}||kr^td|��|j|||�||<q&W|S)z!Parse a map of entry point groupsNz%Entry points must be listed in groupszDuplicate group name)rrRrVr�rpr�r)r�datar��mapsrorrrr�	parse_map:	s


zEntryPoint.parse_map)T)NN)N)N)N)rrrrr�r�r�r�rrtr?r@rr'r�rrr!rrrrr��s 	


	cCs>|sdStjj|�}|djd�r:tjj|dd�d�S|S)Nr�r-zmd5=r7r7)r�)rr�Zurlparser9Z
urlunparse)rZparsedrrr�_remove_md5_fragmentN	sr"cCs@dd�}t||�}tt|�d�}|jd�\}}}t|j��p>dS)z�
    Given an iterable of lines from a Metadata file, return
    the value of the Version field, if present, or None otherwise.
    cSs|j�jd�S)Nzversion:)r8r9)r�rrrre\	sz$_version_from_file.<locals>.<lambda>r�r
N)r�next�iter�	partitionr�r�)rZis_version_lineZ
version_linesr�r�rnrrr�_version_from_fileW	s

r&c@sZeZdZdZdZddddedefdd�ZedGdd��Z	dd	�Z
ed
d��Zdd
�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zedd��Zedd��Zdd�Zed d!��Zed"d#��Zffd$d%�Zd&d'�ZdHd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2�ZedId3d4��Z d5d6�Z!d7d8�Z"dJd9d:�Z#d;d<�Z$dKd=d>�Z%d?d@�Z&dAdB�Z'dCdD�Z(edEdF��Z)dS)Lr�z5Wrap an actual or potential sys.path entry w/metadatazPKG-INFONcCsFt|pd�|_|dk	r t|�|_||_||_||_||_|p>t|_	dS)NZUnknown)
r�rr��_versionr2rlrr�r��	_provider)rrr�rrKr2rlr�rrrr�g	s
zDistribution.__init__cKs~dgd\}}}}tjj|�\}}	|	j�tkr^t|	j�}t|�}
|
r^|
jdddd�\}}}}|||f||||d�|��j�S)Nr�r�ZverZpyverrq)rrKr2rl)r�r�r�r8�_distributionImpl�EGG_NAMEro�_reload_version)rrr�r�rTrrKr2rlr�rjrrrr�s	s
zDistribution.from_locationcCs|S)Nr)rrrrr+�	szDistribution._reload_versioncCs(|j|j|jt|j�|jpd|jp$dfS)Nr�)�parsed_versionr�r'r"rr2rl)rrrrr5�	szDistribution.hashcmpcCs
t|j�S)N)�hashr5)rrrrr�	szDistribution.__hash__cCs|j|jkS)N)r5)rr!rrrr �	szDistribution.__lt__cCs|j|jkS)N)r5)rr!rrrr"�	szDistribution.__le__cCs|j|jkS)N)r5)rr!rrrr%�	szDistribution.__gt__cCs|j|jkS)N)r5)rr!rrrr$�	szDistribution.__ge__cCst||j�sdS|j|jkS)NF)rrr5)rr!rrrr#�	szDistribution.__eq__cCs
||kS)Nr)rr!rrrr&�	szDistribution.__ne__cCs0y|jStk
r*|jj�|_}|SXdS)N)Z_keyr�rr8)rr'rrrr'�	s
zDistribution.keycCst|d�st|j�|_|jS)N�_parsed_version)r�rNrKr.)rrrrr,�	s
zDistribution.parsed_versioncCsXtjj}t|j|�}|sdS|js&dStjd�j�jdd�}t	j
|jft|��t
�dS)Na>
            '{project_name} ({version})' is being parsed as a legacy,
            non PEP 440,
            version. You may find odd behavior and sort order.
            In particular it will be sorted as less than 0.0. It
            is recommended to migrate to PEP 440 compatible
            versions.
            rqr�)rrK�
LegacyVersionrr.rCrDr�r5rCrDr��varsr)rZLVZ	is_legacyrHrrr�_warn_legacy_version�	sz!Distribution._warn_legacy_versioncCsLy|jStk
rFt|j|j��}|dkrBd}t||j|��|SXdS)Nz(Missing 'Version:' header and/or %s file)r'r�r&�
_get_metadata�PKG_INFOrp)rrKrHrrrrK�	szDistribution.versioncCs�y|jStk
r�dgi}|_x�dD]x}xrt|j|��D]`\}}|r�d|kr||jdd�\}}t|�rpg}nt|�s|g}t|�p�d}|j|g�j	t
|��q>Wq*W|SXdS)N�requires.txt�depends.txtr
r-)r4r5)Z_Distribution__dep_mapr�r�r2rr�r�r�rrr�)r�dmr�r(rr)rrr�_dep_map�	s 
zDistribution._dep_mapcCsj|j}g}|j|jdf��xH|D]@}y|j|t|��Wq"tk
r`td||f��Yq"Xq"W|S)z@List of Requirements needed for this distro if `extras` are usedNz%s has no such extra feature %r)r7rrBr�r�r�)rrr6Zdepsr�rrrr�	s
zDistribution.requiresccs(|j|�r$x|j|�D]
}|VqWdS)N)r�r�)rr�r�rrrr2
s
zDistribution._get_metadataFcCsZ|dkrtj}|j||d�|tjkrVt|j�x$|jd�D]}|tjkr<t|�q<WdS)z>Ensure distribution is importable on `path` (default=sys.path)N)r5znamespace_packages.txt)rkr�rr�rr2r�r�)rr�r5Zpkgrrr�activate	
s


zDistribution.activatecCs8dt|j�t|j�|jptf}|jr4|d|j7}|S)z@Return what this distribution's standard .egg filename should bez
%s-%s-py%sr+)r�rrKr2r>rl)rrZrrrr~
szDistribution.egg_namecCs |jrd||jfSt|�SdS)Nz%s (%s))rrF)rrrrr�
szDistribution.__repr__cCs@yt|dd�}Wntk
r(d}YnX|p0d}d|j|fS)NrKz[unknown version]z%s %s)r�rpr)rrKrrrr�%
s
zDistribution.__str__cCs|jd�rt|��t|j|�S)zADelegate all unrecognized public attributes to .metadata providerr�)r9r�r�r()rrrrr�__getattr__-
s
zDistribution.__getattr__cKs|jt|�tjj|�|f|�S)N)r�r�r�r�r�)rrZr�rTrrrr�3
szDistribution.from_filenamecCs<t|jtjj�r"d|j|jf}nd|j|jf}tj|�S)z?Return a ``Requirement`` that matches this distribution exactlyz%s==%sz%s===%s)rr,rrKr_rr�r�)r�specrrrr":
szDistribution.as_requirementcCs.|j||�}|dkr&td||ff��|j�S)z=Return the `name` entry point of `group` or raise ImportErrorNzEntry point %r not found)rzr�r�)rror�rrrrrxC
szDistribution.load_entry_pointcCsPy
|j}Wn,tk
r6tj|jd�|�}|_YnX|dk	rL|j|i�S|S)z=Return the entry point map for `group`, or the full entry mapzentry_points.txtN)Z_ep_mapr�r�r!r2rB)rroZep_maprrrryJ
s
zDistribution.get_entry_mapcCs|j|�j|�S)z<Return the EntryPoint object for `group`+`name`, or ``None``)ryrB)rror�rrrrzV
szDistribution.get_entry_infoc
Cs2|p|j}|sdSt|�}tjj|�}dd�|D�}x�t|�D]v\}}||kr\|rVPq�dSq>||kr>|jtkr>|r�|||d�kr�dS|tjkr�|j	�|j
||�|j
||�Pq>W|tjkr�|j	�|r�|j
d|�n
|j|�dSxBy|j||d�}	Wnt
k
�rPYq�X||	=||	=|	}q�WdS)a�Ensure self.location is on path

        If replace=False (default):
            - If location is already in path anywhere, do nothing.
            - Else:
              - If it's an egg and its parent directory is on path,
                insert just ahead of the parent.
              - Else: add to the end of path.
        If replace=True:
            - If location is already on path anywhere (not eggs)
              or higher priority than its parent (eggs)
              do nothing.
            - Else:
              - If it's an egg and its parent directory is on path,
                insert just ahead of the parent,
                removing any lower-priority entries.
              - Else: add it to the front of path.
        NcSsg|]}|rt|�p|�qSr)r�)r+r�rrrr�t
sz*Distribution.insert_on.<locals>.<listcomp>rr-)rr�r�r�re�	enumerater�r�rk�check_version_conflictrr;r�rp)
rr��locr5ZnlocZbdirZnpathr�rZnprrrrZ
sB



zDistribution.insert_oncCs�|jdkrdStj|jd��}t|j�}x~|jd�D]p}|tjks4||ks4|tkrTq4|dkr^q4t	tj|dd�}|r�t|�j
|�s4|j
|j�r�q4td|||jf�q4WdS)	N�
setuptoolsznamespace_packages.txtz
top_level.txt�
pkg_resources�siterdzIModule %s was already imported from %s, but %s is being added to sys.path)r?r>r@)r'rRrSr2r�rrkr�r�r�r9�
issue_warning)rZnspr=�modname�fnrrrr<�
s"

z#Distribution.check_version_conflictcCs4y
|jWn$tk
r.tdt|��dSXdS)NzUnbuilt egg for FT)rKrprAr�)rrrrr7�
s
zDistribution.has_versioncKsDd}x$|j�D]}|j|t||d��qW|jd|j�|jf|�S)z@Copy this distribution, substituting in any changed keyword argsz<project_name version py_version platform location precedenceNr�)rrr�r(r)rrTrMrrrr�clone�
s
zDistribution.clonecCsdd�|jD�S)NcSsg|]}|r|�qSrr)r+Zdeprrrr��
sz'Distribution.extras.<locals>.<listcomp>)r7)rrrrr�
szDistribution.extras)N)NF)N)N)NF)*rrrrr3r>r�r�r'r�r+r�r5rr r"r%r$r#r&r'r,r1rKr7rr2r8r~r�r�r9r�r"rxryrzrr<r7rDrrrrrr�c	sN

	

Cc@seZdZdd�ZdS)�EggInfoDistributioncCst|j|j��}|r||_|S)a�
        Packages installed by distutils (e.g. numpy or scipy),
        which uses an old safe_version, and so
        their version numbers can get mangled when
        converted to filenames (e.g., 1.11.0.dev0+2329eae to
        1.11.0.dev0_2329eae). These distributions will not be
        parsed properly
        downstream by Distribution and safe_version, so
        take an extra step and try to get the version number from
        the metadata file itself instead of the filename.
        )r&r2r3r')rZ
md_versionrrrr+�
sz#EggInfoDistribution._reload_versionN)rrrr+rrrrrE�
srEc@s>eZdZdZdZejd�Zedd��Z	edd��Z
dd	�Zd
S)�DistInfoDistributionzGWrap an actual or potential sys.path entry w/metadata, .dist-info styleZMETADATAz([\(,])\s*(\d.*?)\s*([,\)])cCs@y|jStk
r:|j|j�}tjj�j|�|_|jSXdS)zParse and cache metadataN)Z	_pkg_infor�r�r3�email�parserZParserZparsestr)rr�rrr�_parsed_pkg_info�
sz%DistInfoDistribution._parsed_pkg_infocCs,y|jStk
r&|j�|_|jSXdS)N)�_DistInfoDistribution__dep_mapr��_compute_dependencies)rrrrr7�
s

zDistInfoDistribution._dep_mapcs�dgi}|_g�x&|jjd�p"gD]}�jt|��q$W�fdd�}t|d��}|dj|�x<|jjd�ppgD](}t|j��}tt||��|�||<qrW|S)z+Recompute this distribution's dependencies.Nz
Requires-Distc3s0x*�D]"}|js"|jjd|i�r|VqWdS)Nr()r)r*)r(r�)rrr�reqs_for_extra�
s
zBDistInfoDistribution._compute_dependencies.<locals>.reqs_for_extrazProvides-Extra)	rJrIZget_allrr��	frozensetr�r�r)rr6r�rL�commonr(Zs_extrar)rrrK�
sz*DistInfoDistribution._compute_dependenciesN)rrrrr3r?r@ZEQEQr�rIr7rKrrrrrF�
s

rF)z.eggz	.egg-infoz
.dist-infoc
Os^d}t�}y"xtj|�j|kr(|d7}qWWntk
r@YnXtj|d|di|��dS)Nr-r>)rOrkr�r�rprCrD)rdrTrrXrrrrAsrAc@seZdZdd�ZdS)�RequirementParseErrorcCsdj|j�S)Nr�)rmrd)rrrrr�szRequirementParseError.__str__N)rrrr�rrrrrOsrOccshtt|��}xV|D]N}d|kr0|d|jd��}|jd�rV|dd�j�}|t|�7}t|�VqWdS)z�Yield ``Requirement`` objects for each specification in `strs`

    `strs` must be a string, or a (possibly-nested) iterable thereof.
    z #N�\rg���)r$r�r�r�r�r#r�)rrr�rrrr�#s

csPeZdZ�fdd�Zdd�Zdd�Zdd�Zd	d
�Zdd�Ze	d
d��Z
�ZS)r�cs�ytt|�j|�Wn2tjjk
rF}ztt|���WYdd}~XnX|j|_	t
|j�}||j�|_|_
dd�|jD�|_ttt|j��|_|j
|jt|j�|jr�t|j�ndf|_t|j�|_dS)z>DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!NcSsg|]}|j|jf�qSr)r8rK)r+r:rrrr�Asz(Requirement.__init__.<locals>.<listcomp>)rr�r�rrZInvalidRequirementrOrFr�Zunsafe_namer�r8rr'�	specifierrrrr�rrMr)�hashCmpr-�_Requirement__hash)rZrequirement_stringrcr)rrrr�7s
zRequirement.__init__cCst|t�o|j|jkS)N)rr�rS)rr!rrrr#Ks
zRequirement.__eq__cCs
||kS)Nr)rr!rrrr&QszRequirement.__ne__cCs0t|t�r |j|jkrdS|j}|jj|dd�S)NFT)Zprereleases)rr�r'rKrR�contains)rrrrrr	Ts

zRequirement.__contains__cCs|jS)N)rT)rrrrr`szRequirement.__hash__cCsdt|�S)NzRequirement.parse(%r))rF)rrrrr�cszRequirement.__repr__cCst|�\}|S)N)r�)r2r�rrrr�es
zRequirement.parse)rrrr�r#r&r	rr�r]r�rHrr)rrr�6scCs0t|t�s*Gdd�d|t�}|jdd�S|jS)z&Get an mro for a type or classic classc@seZdZdS)z_get_mro.<locals>.clsN)rrrrrrrrosrr-N)rr��object�__mro__)rrrr�_get_mroks
rXcCs2x,tt|dt|���D]}||kr||SqWdS)z2Return an adapter factory for `ob` from `registry`rN)rXr�r�)�registryr`�trrrr�vsr�cCs&tjj|�}tjj|�s"tj|�dS)z1Ensure that the parent directory of `path` existsN)r�r�rer
�makedirs)r�rerrrr�}scCs@tstd��t|�\}}|r<|r<t|�r<t|�t|d�dS)z/Sandbox-bypassing version of ensure_directory()z*"os.mkdir" not supported on this platform.i�N)r�r�rr
rKr	)r�rerZrrrrK�srKccszd}g}xbt|�D]V}|jd�r^|jd�rR|s2|r<||fV|dd�j�}g}qhtd|��q|j|�qW||fVdS)asSplit a string or iterable thereof into (section, content) pairs

    Each ``section`` is a stripped version of the section header ("[section]")
    and each ``content`` is a list of stripped lines excluding blank lines and
    comment-only lines.  If there are any such lines before the first section
    header, they're returned in a first ``section`` of ``None``.
    N�[�]r-zInvalid section headingr7)r�r9r�r�rpr;)r2ZsectionZcontentr�rrrr��s


cOs&tj}ztt_tj||�S|t_XdS)N)r�r�os_open�tempfileZmkstemp)rdrTZold_openrrrr��s
r��ignore)�categoryr;cOs|||�|S)Nr)r�rdr�rrr�_call_aside�s
rbcCs<t�}||d<x(t|�D]}|jd�st||�||<qWdS)z=Set up global resource manager (deliberately not state-saved)Z_managerr�N)r�r�r9r�)rXr�r�rrr�_initialize�s

rccCs�tj�}td|d�|j}|j}|j}|j}|}d}x|D]}|jdd�q:W~|dd�dd�g|_t	t
|jtj
��t�jt��dS)	aE
    Prepare the master working set and make the ``require()``
    API available.

    This function has explicit effects on the global state
    of pkg_resources. It is intended to be invoked once at
    the initialization of this module.

    Invocation by other packages is unsupported and done
    at their own risk.
    rV)r�NF)r5cSs|jdd�S)NT)r5)r8)r�rrrre�sz0_initialize_master_working_set.<locals>.<lambda>)r%)r�rrUrtr{r&rur8r�rrr�rkr�rOrPr�)r�rtr{r�rur�r�rrr�_initialize_master_working_set�s

rd)rr)rrr7)N)N)F)F)F)F)N)�rZ
__future__rrkr�rhr�r?r�r�r�rCrQrZpkgutilr8rlrr�Zemail.parserrGr_rCr�rr�r�ZimpZpip._vendorrZpip._vendor.six.movesrrrrr	r
rr�rr^Zos.pathr
rZimportlib.machinery�	machineryr�rrrr��version_inforWrDrtr�rErrVrrKr_rIr/rJrNrQrUrZr[r^rarbrcZ
_sget_noneZ
_sset_noners�__all__�	Exceptionr�r�r�r�r�r�r>r�r�r�r�r�r�rvrnr�rhr@rir�r�r�rur�rwrxryrzr�r�r�rRrr�r��RuntimeErrorr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�ZImpImporterr�r�r�r�r�r�r�rrr�r�r�r�r�rjr
rA�
IGNORECASEr*r�r"r&r�rErFr)rArprOr�rr�rXr�r�rKr�r��filterwarningsrbrOrcrdrrrr�<module>s�




b

 




.

{
3	
a
''





.!


		~	g0
5
	_vendor/pkg_resources/__init__.py000064400000311476151733136500013204 0ustar00# coding: utf-8
"""
Package resource API
--------------------

A resource is a logical file contained within a package, or a logical
subdirectory thereof.  The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is.  Do not use os.path operations to manipulate resource
names being passed into the API.

The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files.  It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
"""

from __future__ import absolute_import

import sys
import os
import io
import time
import re
import types
import zipfile
import zipimport
import warnings
import stat
import functools
import pkgutil
import operator
import platform
import collections
import plistlib
import email.parser
import tempfile
import textwrap
import itertools
from pkgutil import get_importer

try:
    import _imp
except ImportError:
    # Python 3.2 compatibility
    import imp as _imp

from pip._vendor import six
from pip._vendor.six.moves import urllib, map, filter

# capture these to bypass sandboxing
from os import utime
try:
    from os import mkdir, rename, unlink
    WRITE_SUPPORT = True
except ImportError:
    # no write support, probably under GAE
    WRITE_SUPPORT = False

from os import open as os_open
from os.path import isdir, split

try:
    import importlib.machinery as importlib_machinery
    # access attribute to force import under delayed import mechanisms.
    importlib_machinery.__name__
except ImportError:
    importlib_machinery = None

from pip._vendor import appdirs
from pip._vendor import packaging
__import__('pip._vendor.packaging.version')
__import__('pip._vendor.packaging.specifiers')
__import__('pip._vendor.packaging.requirements')
__import__('pip._vendor.packaging.markers')


if (3, 0) < sys.version_info < (3, 3):
    msg = (
        "Support for Python 3.0-3.2 has been dropped. Future versions "
        "will fail here."
    )
    warnings.warn(msg)

# declare some globals that will be defined later to
# satisfy the linters.
require = None
working_set = None


class PEP440Warning(RuntimeWarning):
    """
    Used when there is an issue with a version or specifier not complying with
    PEP 440.
    """


class _SetuptoolsVersionMixin(object):
    def __hash__(self):
        return super(_SetuptoolsVersionMixin, self).__hash__()

    def __lt__(self, other):
        if isinstance(other, tuple):
            return tuple(self) < other
        else:
            return super(_SetuptoolsVersionMixin, self).__lt__(other)

    def __le__(self, other):
        if isinstance(other, tuple):
            return tuple(self) <= other
        else:
            return super(_SetuptoolsVersionMixin, self).__le__(other)

    def __eq__(self, other):
        if isinstance(other, tuple):
            return tuple(self) == other
        else:
            return super(_SetuptoolsVersionMixin, self).__eq__(other)

    def __ge__(self, other):
        if isinstance(other, tuple):
            return tuple(self) >= other
        else:
            return super(_SetuptoolsVersionMixin, self).__ge__(other)

    def __gt__(self, other):
        if isinstance(other, tuple):
            return tuple(self) > other
        else:
            return super(_SetuptoolsVersionMixin, self).__gt__(other)

    def __ne__(self, other):
        if isinstance(other, tuple):
            return tuple(self) != other
        else:
            return super(_SetuptoolsVersionMixin, self).__ne__(other)

    def __getitem__(self, key):
        return tuple(self)[key]

    def __iter__(self):
        component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
        replace = {
            'pre': 'c',
            'preview': 'c',
            '-': 'final-',
            'rc': 'c',
            'dev': '@',
        }.get

        def _parse_version_parts(s):
            for part in component_re.split(s):
                part = replace(part, part)
                if not part or part == '.':
                    continue
                if part[:1] in '0123456789':
                    # pad for numeric comparison
                    yield part.zfill(8)
                else:
                    yield '*' + part

            # ensure that alpha/beta/candidate are before final
            yield '*final'

        def old_parse_version(s):
            parts = []
            for part in _parse_version_parts(s.lower()):
                if part.startswith('*'):
                    # remove '-' before a prerelease tag
                    if part < '*final':
                        while parts and parts[-1] == '*final-':
                            parts.pop()
                    # remove trailing zeros from each series of numeric parts
                    while parts and parts[-1] == '00000000':
                        parts.pop()
                parts.append(part)
            return tuple(parts)

        # Warn for use of this function
        warnings.warn(
            "You have iterated over the result of "
            "pkg_resources.parse_version. This is a legacy behavior which is "
            "inconsistent with the new version class introduced in setuptools "
            "8.0. In most cases, conversion to a tuple is unnecessary. For "
            "comparison of versions, sort the Version instances directly. If "
            "you have another use case requiring the tuple, please file a "
            "bug with the setuptools project describing that need.",
            RuntimeWarning,
            stacklevel=1,
        )

        for part in old_parse_version(str(self)):
            yield part


class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
    pass


class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
                              packaging.version.LegacyVersion):
    pass


def parse_version(v):
    try:
        return SetuptoolsVersion(v)
    except packaging.version.InvalidVersion:
        return SetuptoolsLegacyVersion(v)


_state_vars = {}


def _declare_state(vartype, **kw):
    globals().update(kw)
    _state_vars.update(dict.fromkeys(kw, vartype))


def __getstate__():
    state = {}
    g = globals()
    for k, v in _state_vars.items():
        state[k] = g['_sget_' + v](g[k])
    return state


def __setstate__(state):
    g = globals()
    for k, v in state.items():
        g['_sset_' + _state_vars[k]](k, g[k], v)
    return state


def _sget_dict(val):
    return val.copy()


def _sset_dict(key, ob, state):
    ob.clear()
    ob.update(state)


def _sget_object(val):
    return val.__getstate__()


def _sset_object(key, ob, state):
    ob.__setstate__(state)


_sget_none = _sset_none = lambda *args: None


def get_supported_platform():
    """Return this platform's maximum compatible version.

    distutils.util.get_platform() normally reports the minimum version
    of Mac OS X that would be required to *use* extensions produced by
    distutils.  But what we want when checking compatibility is to know the
    version of Mac OS X that we are *running*.  To allow usage of packages that
    explicitly require a newer version of Mac OS X, we must also know the
    current version of the OS.

    If this condition occurs for any other platform with a version in its
    platform strings, this function should be extended accordingly.
    """
    plat = get_build_platform()
    m = macosVersionString.match(plat)
    if m is not None and sys.platform == "darwin":
        try:
            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
        except ValueError:
            # not Mac OS X
            pass
    return plat


__all__ = [
    # Basic resource access and distribution/entry point discovery
    'require', 'run_script', 'get_provider', 'get_distribution',
    'load_entry_point', 'get_entry_map', 'get_entry_info',
    'iter_entry_points',
    'resource_string', 'resource_stream', 'resource_filename',
    'resource_listdir', 'resource_exists', 'resource_isdir',

    # Environmental control
    'declare_namespace', 'working_set', 'add_activation_listener',
    'find_distributions', 'set_extraction_path', 'cleanup_resources',
    'get_default_cache',

    # Primary implementation classes
    'Environment', 'WorkingSet', 'ResourceManager',
    'Distribution', 'Requirement', 'EntryPoint',

    # Exceptions
    'ResolutionError', 'VersionConflict', 'DistributionNotFound',
    'UnknownExtra', 'ExtractionError',

    # Warnings
    'PEP440Warning',

    # Parsing functions and string utilities
    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',

    # filesystem utilities
    'ensure_directory', 'normalize_path',

    # Distribution "precedence" constants
    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',

    # "Provider" interfaces, implementations, and registration/lookup APIs
    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
    'register_finder', 'register_namespace_handler', 'register_loader_type',
    'fixup_namespace_packages', 'get_importer',

    # Deprecated/backward compatibility only
    'run_main', 'AvailableDistributions',
]


class ResolutionError(Exception):
    """Abstract base for dependency resolution errors"""

    def __repr__(self):
        return self.__class__.__name__ + repr(self.args)


class VersionConflict(ResolutionError):
    """
    An already-installed version conflicts with the requested version.

    Should be initialized with the installed Distribution and the requested
    Requirement.
    """

    _template = "{self.dist} is installed but {self.req} is required"

    @property
    def dist(self):
        return self.args[0]

    @property
    def req(self):
        return self.args[1]

    def report(self):
        return self._template.format(**locals())

    def with_context(self, required_by):
        """
        If required_by is non-empty, return a version of self that is a
        ContextualVersionConflict.
        """
        if not required_by:
            return self
        args = self.args + (required_by,)
        return ContextualVersionConflict(*args)


class ContextualVersionConflict(VersionConflict):
    """
    A VersionConflict that accepts a third parameter, the set of the
    requirements that required the installed Distribution.
    """

    _template = VersionConflict._template + ' by {self.required_by}'

    @property
    def required_by(self):
        return self.args[2]


class DistributionNotFound(ResolutionError):
    """A requested distribution was not found"""

    _template = ("The '{self.req}' distribution was not found "
                 "and is required by {self.requirers_str}")

    @property
    def req(self):
        return self.args[0]

    @property
    def requirers(self):
        return self.args[1]

    @property
    def requirers_str(self):
        if not self.requirers:
            return 'the application'
        return ', '.join(self.requirers)

    def report(self):
        return self._template.format(**locals())

    def __str__(self):
        return self.report()


class UnknownExtra(ResolutionError):
    """Distribution doesn't have an "extra feature" of the given name"""


_provider_factories = {}

PY_MAJOR = sys.version[:3]
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
CHECKOUT_DIST = 0
DEVELOP_DIST = -1


def register_loader_type(loader_type, provider_factory):
    """Register `provider_factory` to make providers for `loader_type`

    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
    and `provider_factory` is a function that, passed a *module* object,
    returns an ``IResourceProvider`` for that module.
    """
    _provider_factories[loader_type] = provider_factory


def get_provider(moduleOrReq):
    """Return an IResourceProvider for the named module or requirement"""
    if isinstance(moduleOrReq, Requirement):
        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
    try:
        module = sys.modules[moduleOrReq]
    except KeyError:
        __import__(moduleOrReq)
        module = sys.modules[moduleOrReq]
    loader = getattr(module, '__loader__', None)
    return _find_adapter(_provider_factories, loader)(module)


def _macosx_vers(_cache=[]):
    if not _cache:
        version = platform.mac_ver()[0]
        # fallback for MacPorts
        if version == '':
            plist = '/System/Library/CoreServices/SystemVersion.plist'
            if os.path.exists(plist):
                if hasattr(plistlib, 'readPlist'):
                    plist_content = plistlib.readPlist(plist)
                    if 'ProductVersion' in plist_content:
                        version = plist_content['ProductVersion']

        _cache.append(version.split('.'))
    return _cache[0]


def _macosx_arch(machine):
    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)


def get_build_platform():
    """Return this platform's string for platform-specific distributions

    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
    needs some hacks for Linux and Mac OS X.
    """
    try:
        # Python 2.7 or >=3.2
        from sysconfig import get_platform
    except ImportError:
        from distutils.util import get_platform

    plat = get_platform()
    if sys.platform == "darwin" and not plat.startswith('macosx-'):
        try:
            version = _macosx_vers()
            machine = os.uname()[4].replace(" ", "_")
            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
                _macosx_arch(machine))
        except ValueError:
            # if someone is running a non-Mac darwin system, this will fall
            # through to the default implementation
            pass
    return plat


macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
# XXX backward compat
get_platform = get_build_platform


def compatible_platforms(provided, required):
    """Can code for the `provided` platform run on the `required` platform?

    Returns true if either platform is ``None``, or the platforms are equal.

    XXX Needs compatibility checks for Linux and other unixy OSes.
    """
    if provided is None or required is None or provided == required:
        # easy case
        return True

    # Mac OS X special cases
    reqMac = macosVersionString.match(required)
    if reqMac:
        provMac = macosVersionString.match(provided)

        # is this a Mac package?
        if not provMac:
            # this is backwards compatibility for packages built before
            # setuptools 0.6. All packages built after this point will
            # use the new macosx designation.
            provDarwin = darwinVersionString.match(provided)
            if provDarwin:
                dversion = int(provDarwin.group(1))
                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
                if dversion == 7 and macosversion >= "10.3" or \
                        dversion == 8 and macosversion >= "10.4":
                    return True
            # egg isn't macosx or legacy darwin
            return False

        # are they the same major version and machine type?
        if provMac.group(1) != reqMac.group(1) or \
                provMac.group(3) != reqMac.group(3):
            return False

        # is the required OS major update >= the provided one?
        if int(provMac.group(2)) > int(reqMac.group(2)):
            return False

        return True

    # XXX Linux and other platforms' special cases should go here
    return False


def run_script(dist_spec, script_name):
    """Locate distribution `dist_spec` and run its `script_name` script"""
    ns = sys._getframe(1).f_globals
    name = ns['__name__']
    ns.clear()
    ns['__name__'] = name
    require(dist_spec)[0].run_script(script_name, ns)


# backward compatibility
run_main = run_script


def get_distribution(dist):
    """Return a current distribution object for a Requirement or string"""
    if isinstance(dist, six.string_types):
        dist = Requirement.parse(dist)
    if isinstance(dist, Requirement):
        dist = get_provider(dist)
    if not isinstance(dist, Distribution):
        raise TypeError("Expected string, Requirement, or Distribution", dist)
    return dist


def load_entry_point(dist, group, name):
    """Return `name` entry point of `group` for `dist` or raise ImportError"""
    return get_distribution(dist).load_entry_point(group, name)


def get_entry_map(dist, group=None):
    """Return the entry point map for `group`, or the full entry map"""
    return get_distribution(dist).get_entry_map(group)


def get_entry_info(dist, group, name):
    """Return the EntryPoint object for `group`+`name`, or ``None``"""
    return get_distribution(dist).get_entry_info(group, name)


class IMetadataProvider:
    def has_metadata(name):
        """Does the package's distribution contain the named metadata?"""

    def get_metadata(name):
        """The named metadata resource as a string"""

    def get_metadata_lines(name):
        """Yield named metadata resource as list of non-blank non-comment lines

       Leading and trailing whitespace is stripped from each line, and lines
       with ``#`` as the first non-blank character are omitted."""

    def metadata_isdir(name):
        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""

    def metadata_listdir(name):
        """List of metadata names in the directory (like ``os.listdir()``)"""

    def run_script(script_name, namespace):
        """Execute the named script in the supplied namespace dictionary"""


class IResourceProvider(IMetadataProvider):
    """An object that provides access to package resources"""

    def get_resource_filename(manager, resource_name):
        """Return a true filesystem path for `resource_name`

        `manager` must be an ``IResourceManager``"""

    def get_resource_stream(manager, resource_name):
        """Return a readable file-like object for `resource_name`

        `manager` must be an ``IResourceManager``"""

    def get_resource_string(manager, resource_name):
        """Return a string containing the contents of `resource_name`

        `manager` must be an ``IResourceManager``"""

    def has_resource(resource_name):
        """Does the package contain the named resource?"""

    def resource_isdir(resource_name):
        """Is the named resource a directory?  (like ``os.path.isdir()``)"""

    def resource_listdir(resource_name):
        """List of resource names in the directory (like ``os.listdir()``)"""


class WorkingSet(object):
    """A collection of active distributions on sys.path (or a similar list)"""

    def __init__(self, entries=None):
        """Create working set from list of path entries (default=sys.path)"""
        self.entries = []
        self.entry_keys = {}
        self.by_key = {}
        self.callbacks = []

        if entries is None:
            entries = sys.path

        for entry in entries:
            self.add_entry(entry)

    @classmethod
    def _build_master(cls):
        """
        Prepare the master working set.
        """
        ws = cls()
        try:
            from __main__ import __requires__
        except ImportError:
            # The main program does not list any requirements
            return ws

        # ensure the requirements are met
        try:
            ws.require(__requires__)
        except VersionConflict:
            return cls._build_from_requirements(__requires__)

        return ws

    @classmethod
    def _build_from_requirements(cls, req_spec):
        """
        Build a working set from a requirement spec. Rewrites sys.path.
        """
        # try it without defaults already on sys.path
        # by starting with an empty path
        ws = cls([])
        reqs = parse_requirements(req_spec)
        dists = ws.resolve(reqs, Environment())
        for dist in dists:
            ws.add(dist)

        # add any missing entries from sys.path
        for entry in sys.path:
            if entry not in ws.entries:
                ws.add_entry(entry)

        # then copy back to sys.path
        sys.path[:] = ws.entries
        return ws

    def add_entry(self, entry):
        """Add a path item to ``.entries``, finding any distributions on it

        ``find_distributions(entry, True)`` is used to find distributions
        corresponding to the path entry, and they are added.  `entry` is
        always appended to ``.entries``, even if it is already present.
        (This is because ``sys.path`` can contain the same value more than
        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
        equal ``sys.path``.)
        """
        self.entry_keys.setdefault(entry, [])
        self.entries.append(entry)
        for dist in find_distributions(entry, True):
            self.add(dist, entry, False)

    def __contains__(self, dist):
        """True if `dist` is the active distribution for its project"""
        return self.by_key.get(dist.key) == dist

    def find(self, req):
        """Find a distribution matching requirement `req`

        If there is an active distribution for the requested project, this
        returns it as long as it meets the version requirement specified by
        `req`.  But, if there is an active distribution for the project and it
        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
        If there is no active distribution for the requested project, ``None``
        is returned.
        """
        dist = self.by_key.get(req.key)
        if dist is not None and dist not in req:
            # XXX add more info
            raise VersionConflict(dist, req)
        return dist

    def iter_entry_points(self, group, name=None):
        """Yield entry point objects from `group` matching `name`

        If `name` is None, yields all entry points in `group` from all
        distributions in the working set, otherwise only ones matching
        both `group` and `name` are yielded (in distribution order).
        """
        for dist in self:
            entries = dist.get_entry_map(group)
            if name is None:
                for ep in entries.values():
                    yield ep
            elif name in entries:
                yield entries[name]

    def run_script(self, requires, script_name):
        """Locate distribution for `requires` and run `script_name` script"""
        ns = sys._getframe(1).f_globals
        name = ns['__name__']
        ns.clear()
        ns['__name__'] = name
        self.require(requires)[0].run_script(script_name, ns)

    def __iter__(self):
        """Yield distributions for non-duplicate projects in the working set

        The yield order is the order in which the items' path entries were
        added to the working set.
        """
        seen = {}
        for item in self.entries:
            if item not in self.entry_keys:
                # workaround a cache issue
                continue

            for key in self.entry_keys[item]:
                if key not in seen:
                    seen[key] = 1
                    yield self.by_key[key]

    def add(self, dist, entry=None, insert=True, replace=False):
        """Add `dist` to working set, associated with `entry`

        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
        On exit from this routine, `entry` is added to the end of the working
        set's ``.entries`` (if it wasn't already present).

        `dist` is only added to the working set if it's for a project that
        doesn't already have a distribution in the set, unless `replace=True`.
        If it's added, any callbacks registered with the ``subscribe()`` method
        will be called.
        """
        if insert:
            dist.insert_on(self.entries, entry, replace=replace)

        if entry is None:
            entry = dist.location
        keys = self.entry_keys.setdefault(entry, [])
        keys2 = self.entry_keys.setdefault(dist.location, [])
        if not replace and dist.key in self.by_key:
            # ignore hidden distros
            return

        self.by_key[dist.key] = dist
        if dist.key not in keys:
            keys.append(dist.key)
        if dist.key not in keys2:
            keys2.append(dist.key)
        self._added_new(dist)

    def resolve(self, requirements, env=None, installer=None,
            replace_conflicting=False):
        """List all distributions needed to (recursively) meet `requirements`

        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
        if supplied, should be an ``Environment`` instance.  If
        not supplied, it defaults to all distributions available within any
        entry or distribution in the working set.  `installer`, if supplied,
        will be invoked with each requirement that cannot be met by an
        already-installed distribution; it should return a ``Distribution`` or
        ``None``.

        Unless `replace_conflicting=True`, raises a VersionConflict exception if
        any requirements are found on the path that have the correct name but
        the wrong version.  Otherwise, if an `installer` is supplied it will be
        invoked to obtain the correct version of the requirement and activate
        it.
        """

        # set up the stack
        requirements = list(requirements)[::-1]
        # set of processed requirements
        processed = {}
        # key -> dist
        best = {}
        to_activate = []

        req_extras = _ReqExtras()

        # Mapping of requirement to set of distributions that required it;
        # useful for reporting info about conflicts.
        required_by = collections.defaultdict(set)

        while requirements:
            # process dependencies breadth-first
            req = requirements.pop(0)
            if req in processed:
                # Ignore cyclic or redundant dependencies
                continue

            if not req_extras.markers_pass(req):
                continue

            dist = best.get(req.key)
            if dist is None:
                # Find the best distribution and add it to the map
                dist = self.by_key.get(req.key)
                if dist is None or (dist not in req and replace_conflicting):
                    ws = self
                    if env is None:
                        if dist is None:
                            env = Environment(self.entries)
                        else:
                            # Use an empty environment and workingset to avoid
                            # any further conflicts with the conflicting
                            # distribution
                            env = Environment([])
                            ws = WorkingSet([])
                    dist = best[req.key] = env.best_match(req, ws, installer)
                    if dist is None:
                        requirers = required_by.get(req, None)
                        raise DistributionNotFound(req, requirers)
                to_activate.append(dist)
            if dist not in req:
                # Oops, the "best" so far conflicts with a dependency
                dependent_req = required_by[req]
                raise VersionConflict(dist, req).with_context(dependent_req)

            # push the new requirements onto the stack
            new_requirements = dist.requires(req.extras)[::-1]
            requirements.extend(new_requirements)

            # Register the new requirements needed by req
            for new_requirement in new_requirements:
                required_by[new_requirement].add(req.project_name)
                req_extras[new_requirement] = req.extras

            processed[req] = True

        # return list of distros to activate
        return to_activate

    def find_plugins(self, plugin_env, full_env=None, installer=None,
            fallback=True):
        """Find all activatable distributions in `plugin_env`

        Example usage::

            distributions, errors = working_set.find_plugins(
                Environment(plugin_dirlist)
            )
            # add plugins+libs to sys.path
            map(working_set.add, distributions)
            # display errors
            print('Could not load', errors)

        The `plugin_env` should be an ``Environment`` instance that contains
        only distributions that are in the project's "plugin directory" or
        directories. The `full_env`, if supplied, should be an ``Environment``
        contains all currently-available distributions.  If `full_env` is not
        supplied, one is created automatically from the ``WorkingSet`` this
        method is called on, which will typically mean that every directory on
        ``sys.path`` will be scanned for distributions.

        `installer` is a standard installer callback as used by the
        ``resolve()`` method. The `fallback` flag indicates whether we should
        attempt to resolve older versions of a plugin if the newest version
        cannot be resolved.

        This method returns a 2-tuple: (`distributions`, `error_info`), where
        `distributions` is a list of the distributions found in `plugin_env`
        that were loadable, along with any other distributions that are needed
        to resolve their dependencies.  `error_info` is a dictionary mapping
        unloadable plugin distributions to an exception instance describing the
        error that occurred. Usually this will be a ``DistributionNotFound`` or
        ``VersionConflict`` instance.
        """

        plugin_projects = list(plugin_env)
        # scan project names in alphabetic order
        plugin_projects.sort()

        error_info = {}
        distributions = {}

        if full_env is None:
            env = Environment(self.entries)
            env += plugin_env
        else:
            env = full_env + plugin_env

        shadow_set = self.__class__([])
        # put all our entries in shadow_set
        list(map(shadow_set.add, self))

        for project_name in plugin_projects:

            for dist in plugin_env[project_name]:

                req = [dist.as_requirement()]

                try:
                    resolvees = shadow_set.resolve(req, env, installer)

                except ResolutionError as v:
                    # save error info
                    error_info[dist] = v
                    if fallback:
                        # try the next older version of project
                        continue
                    else:
                        # give up on this project, keep going
                        break

                else:
                    list(map(shadow_set.add, resolvees))
                    distributions.update(dict.fromkeys(resolvees))

                    # success, no need to try any more versions of this project
                    break

        distributions = list(distributions)
        distributions.sort()

        return distributions, error_info

    def require(self, *requirements):
        """Ensure that distributions matching `requirements` are activated

        `requirements` must be a string or a (possibly-nested) sequence
        thereof, specifying the distributions and versions required.  The
        return value is a sequence of the distributions that needed to be
        activated to fulfill the requirements; all relevant distributions are
        included, even if they were already activated in this working set.
        """
        needed = self.resolve(parse_requirements(requirements))

        for dist in needed:
            self.add(dist)

        return needed

    def subscribe(self, callback, existing=True):
        """Invoke `callback` for all distributions

        If `existing=True` (default),
        call on all existing ones, as well.
        """
        if callback in self.callbacks:
            return
        self.callbacks.append(callback)
        if not existing:
            return
        for dist in self:
            callback(dist)

    def _added_new(self, dist):
        for callback in self.callbacks:
            callback(dist)

    def __getstate__(self):
        return (
            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
            self.callbacks[:]
        )

    def __setstate__(self, e_k_b_c):
        entries, keys, by_key, callbacks = e_k_b_c
        self.entries = entries[:]
        self.entry_keys = keys.copy()
        self.by_key = by_key.copy()
        self.callbacks = callbacks[:]


class _ReqExtras(dict):
    """
    Map each requirement to the extras that demanded it.
    """

    def markers_pass(self, req):
        """
        Evaluate markers for req against each extra that
        demanded it.

        Return False if the req has a marker and fails
        evaluation. Otherwise, return True.
        """
        extra_evals = (
            req.marker.evaluate({'extra': extra})
            for extra in self.get(req, ()) + (None,)
        )
        return not req.marker or any(extra_evals)


class Environment(object):
    """Searchable snapshot of distributions on a search path"""

    def __init__(self, search_path=None, platform=get_supported_platform(),
            python=PY_MAJOR):
        """Snapshot distributions available on a search path

        Any distributions found on `search_path` are added to the environment.
        `search_path` should be a sequence of ``sys.path`` items.  If not
        supplied, ``sys.path`` is used.

        `platform` is an optional string specifying the name of the platform
        that platform-specific distributions must be compatible with.  If
        unspecified, it defaults to the current platform.  `python` is an
        optional string naming the desired version of Python (e.g. ``'3.3'``);
        it defaults to the current version.

        You may explicitly set `platform` (and/or `python`) to ``None`` if you
        wish to map *all* distributions, not just those compatible with the
        running platform or Python version.
        """
        self._distmap = {}
        self.platform = platform
        self.python = python
        self.scan(search_path)

    def can_add(self, dist):
        """Is distribution `dist` acceptable for this environment?

        The distribution must match the platform and python version
        requirements specified when this environment was created, or False
        is returned.
        """
        return (self.python is None or dist.py_version is None
            or dist.py_version == self.python) \
            and compatible_platforms(dist.platform, self.platform)

    def remove(self, dist):
        """Remove `dist` from the environment"""
        self._distmap[dist.key].remove(dist)

    def scan(self, search_path=None):
        """Scan `search_path` for distributions usable in this environment

        Any distributions found are added to the environment.
        `search_path` should be a sequence of ``sys.path`` items.  If not
        supplied, ``sys.path`` is used.  Only distributions conforming to
        the platform/python version defined at initialization are added.
        """
        if search_path is None:
            search_path = sys.path

        for item in search_path:
            for dist in find_distributions(item):
                self.add(dist)

    def __getitem__(self, project_name):
        """Return a newest-to-oldest list of distributions for `project_name`

        Uses case-insensitive `project_name` comparison, assuming all the
        project's distributions use their project's name converted to all
        lowercase as their key.

        """
        distribution_key = project_name.lower()
        return self._distmap.get(distribution_key, [])

    def add(self, dist):
        """Add `dist` if we ``can_add()`` it and it has not already been added
        """
        if self.can_add(dist) and dist.has_version():
            dists = self._distmap.setdefault(dist.key, [])
            if dist not in dists:
                dists.append(dist)
                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)

    def best_match(self, req, working_set, installer=None):
        """Find distribution best matching `req` and usable on `working_set`

        This calls the ``find(req)`` method of the `working_set` to see if a
        suitable distribution is already active.  (This may raise
        ``VersionConflict`` if an unsuitable version of the project is already
        active in the specified `working_set`.)  If a suitable distribution
        isn't active, this method returns the newest distribution in the
        environment that meets the ``Requirement`` in `req`.  If no suitable
        distribution is found, and `installer` is supplied, then the result of
        calling the environment's ``obtain(req, installer)`` method will be
        returned.
        """
        dist = working_set.find(req)
        if dist is not None:
            return dist
        for dist in self[req.key]:
            if dist in req:
                return dist
        # try to download/install
        return self.obtain(req, installer)

    def obtain(self, requirement, installer=None):
        """Obtain a distribution matching `requirement` (e.g. via download)

        Obtain a distro that matches requirement (e.g. via download).  In the
        base ``Environment`` class, this routine just returns
        ``installer(requirement)``, unless `installer` is None, in which case
        None is returned instead.  This method is a hook that allows subclasses
        to attempt other ways of obtaining a distribution before falling back
        to the `installer` argument."""
        if installer is not None:
            return installer(requirement)

    def __iter__(self):
        """Yield the unique project names of the available distributions"""
        for key in self._distmap.keys():
            if self[key]:
                yield key

    def __iadd__(self, other):
        """In-place addition of a distribution or environment"""
        if isinstance(other, Distribution):
            self.add(other)
        elif isinstance(other, Environment):
            for project in other:
                for dist in other[project]:
                    self.add(dist)
        else:
            raise TypeError("Can't add %r to environment" % (other,))
        return self

    def __add__(self, other):
        """Add an environment or distribution to an environment"""
        new = self.__class__([], platform=None, python=None)
        for env in self, other:
            new += env
        return new


# XXX backward compatibility
AvailableDistributions = Environment


class ExtractionError(RuntimeError):
    """An error occurred extracting a resource

    The following attributes are available from instances of this exception:

    manager
        The resource manager that raised this exception

    cache_path
        The base directory for resource extraction

    original_error
        The exception instance that caused extraction to fail
    """


class ResourceManager:
    """Manage resource extraction and packages"""
    extraction_path = None

    def __init__(self):
        self.cached_files = {}

    def resource_exists(self, package_or_requirement, resource_name):
        """Does the named resource exist?"""
        return get_provider(package_or_requirement).has_resource(resource_name)

    def resource_isdir(self, package_or_requirement, resource_name):
        """Is the named resource an existing directory?"""
        return get_provider(package_or_requirement).resource_isdir(
            resource_name
        )

    def resource_filename(self, package_or_requirement, resource_name):
        """Return a true filesystem path for specified resource"""
        return get_provider(package_or_requirement).get_resource_filename(
            self, resource_name
        )

    def resource_stream(self, package_or_requirement, resource_name):
        """Return a readable file-like object for specified resource"""
        return get_provider(package_or_requirement).get_resource_stream(
            self, resource_name
        )

    def resource_string(self, package_or_requirement, resource_name):
        """Return specified resource as a string"""
        return get_provider(package_or_requirement).get_resource_string(
            self, resource_name
        )

    def resource_listdir(self, package_or_requirement, resource_name):
        """List the contents of the named resource directory"""
        return get_provider(package_or_requirement).resource_listdir(
            resource_name
        )

    def extraction_error(self):
        """Give an error message for problems extracting file(s)"""

        old_exc = sys.exc_info()[1]
        cache_path = self.extraction_path or get_default_cache()

        tmpl = textwrap.dedent("""
            Can't extract file(s) to egg cache

            The following error occurred while trying to extract file(s) to the Python egg
            cache:

              {old_exc}

            The Python egg cache directory is currently set to:

              {cache_path}

            Perhaps your account does not have write access to this directory?  You can
            change the cache directory by setting the PYTHON_EGG_CACHE environment
            variable to point to an accessible directory.
            """).lstrip()
        err = ExtractionError(tmpl.format(**locals()))
        err.manager = self
        err.cache_path = cache_path
        err.original_error = old_exc
        raise err

    def get_cache_path(self, archive_name, names=()):
        """Return absolute location in cache for `archive_name` and `names`

        The parent directory of the resulting path will be created if it does
        not already exist.  `archive_name` should be the base filename of the
        enclosing egg (which may not be the name of the enclosing zipfile!),
        including its ".egg" extension.  `names`, if provided, should be a
        sequence of path name parts "under" the egg's extraction location.

        This method should only be called by resource providers that need to
        obtain an extraction location, and only for names they intend to
        extract, as it tracks the generated names for possible cleanup later.
        """
        extract_path = self.extraction_path or get_default_cache()
        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
        try:
            _bypass_ensure_directory(target_path)
        except:
            self.extraction_error()

        self._warn_unsafe_extraction_path(extract_path)

        self.cached_files[target_path] = 1
        return target_path

    @staticmethod
    def _warn_unsafe_extraction_path(path):
        """
        If the default extraction path is overridden and set to an insecure
        location, such as /tmp, it opens up an opportunity for an attacker to
        replace an extracted file with an unauthorized payload. Warn the user
        if a known insecure location is used.

        See Distribute #375 for more details.
        """
        if os.name == 'nt' and not path.startswith(os.environ['windir']):
            # On Windows, permissions are generally restrictive by default
            #  and temp directories are not writable by other users, so
            #  bypass the warning.
            return
        mode = os.stat(path).st_mode
        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
            msg = ("%s is writable by group/others and vulnerable to attack "
                "when "
                "used with get_resource_filename. Consider a more secure "
                "location (set with .set_extraction_path or the "
                "PYTHON_EGG_CACHE environment variable)." % path)
            warnings.warn(msg, UserWarning)

    def postprocess(self, tempname, filename):
        """Perform any platform-specific postprocessing of `tempname`

        This is where Mac header rewrites should be done; other platforms don't
        have anything special they should do.

        Resource providers should call this method ONLY after successfully
        extracting a compressed resource.  They must NOT call it on resources
        that are already in the filesystem.

        `tempname` is the current (temporary) name of the file, and `filename`
        is the name it will be renamed to by the caller after this routine
        returns.
        """

        if os.name == 'posix':
            # Make the resource executable
            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
            os.chmod(tempname, mode)

    def set_extraction_path(self, path):
        """Set the base path where resources will be extracted to, if needed.

        If you do not call this routine before any extractions take place, the
        path defaults to the return value of ``get_default_cache()``.  (Which
        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
        platform-specific fallbacks.  See that routine's documentation for more
        details.)

        Resources are extracted to subdirectories of this path based upon
        information given by the ``IResourceProvider``.  You may set this to a
        temporary directory, but then you must call ``cleanup_resources()`` to
        delete the extracted files when done.  There is no guarantee that
        ``cleanup_resources()`` will be able to remove all extracted files.

        (Note: you may not change the extraction path for a given resource
        manager once resources have been extracted, unless you first call
        ``cleanup_resources()``.)
        """
        if self.cached_files:
            raise ValueError(
                "Can't change extraction path, files already extracted"
            )

        self.extraction_path = path

    def cleanup_resources(self, force=False):
        """
        Delete all extracted resource files and directories, returning a list
        of the file and directory names that could not be successfully removed.
        This function does not have any concurrency protection, so it should
        generally only be called when the extraction path is a temporary
        directory exclusive to a single process.  This method is not
        automatically called; you must call it explicitly or register it as an
        ``atexit`` function if you wish to ensure cleanup of a temporary
        directory used for extractions.
        """
        # XXX


def get_default_cache():
    """
    Return the ``PYTHON_EGG_CACHE`` environment variable
    or a platform-relevant user cache dir for an app
    named "Python-Eggs".
    """
    return (
        os.environ.get('PYTHON_EGG_CACHE')
        or appdirs.user_cache_dir(appname='Python-Eggs')
    )


def safe_name(name):
    """Convert an arbitrary string to a standard distribution name

    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
    """
    return re.sub('[^A-Za-z0-9.]+', '-', name)


def safe_version(version):
    """
    Convert an arbitrary string to a standard version string
    """
    try:
        # normalize the version
        return str(packaging.version.Version(version))
    except packaging.version.InvalidVersion:
        version = version.replace(' ', '.')
        return re.sub('[^A-Za-z0-9.]+', '-', version)


def safe_extra(extra):
    """Convert an arbitrary string to a standard 'extra' name

    Any runs of non-alphanumeric characters are replaced with a single '_',
    and the result is always lowercased.
    """
    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()


def to_filename(name):
    """Convert a project or version name to its filename-escaped form

    Any '-' characters are currently replaced with '_'.
    """
    return name.replace('-', '_')


def invalid_marker(text):
    """
    Validate text as a PEP 508 environment marker; return an exception
    if invalid or False otherwise.
    """
    try:
        evaluate_marker(text)
    except SyntaxError as e:
        e.filename = None
        e.lineno = None
        return e
    return False


def evaluate_marker(text, extra=None):
    """
    Evaluate a PEP 508 environment marker.
    Return a boolean indicating the marker result in this environment.
    Raise SyntaxError if marker is invalid.

    This implementation uses the 'pyparsing' module.
    """
    try:
        marker = packaging.markers.Marker(text)
        return marker.evaluate()
    except packaging.markers.InvalidMarker as e:
        raise SyntaxError(e)


class NullProvider:
    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""

    egg_name = None
    egg_info = None
    loader = None

    def __init__(self, module):
        self.loader = getattr(module, '__loader__', None)
        self.module_path = os.path.dirname(getattr(module, '__file__', ''))

    def get_resource_filename(self, manager, resource_name):
        return self._fn(self.module_path, resource_name)

    def get_resource_stream(self, manager, resource_name):
        return io.BytesIO(self.get_resource_string(manager, resource_name))

    def get_resource_string(self, manager, resource_name):
        return self._get(self._fn(self.module_path, resource_name))

    def has_resource(self, resource_name):
        return self._has(self._fn(self.module_path, resource_name))

    def has_metadata(self, name):
        return self.egg_info and self._has(self._fn(self.egg_info, name))

    def get_metadata(self, name):
        if not self.egg_info:
            return ""
        value = self._get(self._fn(self.egg_info, name))
        return value.decode('utf-8') if six.PY3 else value

    def get_metadata_lines(self, name):
        return yield_lines(self.get_metadata(name))

    def resource_isdir(self, resource_name):
        return self._isdir(self._fn(self.module_path, resource_name))

    def metadata_isdir(self, name):
        return self.egg_info and self._isdir(self._fn(self.egg_info, name))

    def resource_listdir(self, resource_name):
        return self._listdir(self._fn(self.module_path, resource_name))

    def metadata_listdir(self, name):
        if self.egg_info:
            return self._listdir(self._fn(self.egg_info, name))
        return []

    def run_script(self, script_name, namespace):
        script = 'scripts/' + script_name
        if not self.has_metadata(script):
            raise ResolutionError("No script named %r" % script_name)
        script_text = self.get_metadata(script).replace('\r\n', '\n')
        script_text = script_text.replace('\r', '\n')
        script_filename = self._fn(self.egg_info, script)
        namespace['__file__'] = script_filename
        if os.path.exists(script_filename):
            source = open(script_filename).read()
            code = compile(source, script_filename, 'exec')
            exec(code, namespace, namespace)
        else:
            from linecache import cache
            cache[script_filename] = (
                len(script_text), 0, script_text.split('\n'), script_filename
            )
            script_code = compile(script_text, script_filename, 'exec')
            exec(script_code, namespace, namespace)

    def _has(self, path):
        raise NotImplementedError(
            "Can't perform this operation for unregistered loader type"
        )

    def _isdir(self, path):
        raise NotImplementedError(
            "Can't perform this operation for unregistered loader type"
        )

    def _listdir(self, path):
        raise NotImplementedError(
            "Can't perform this operation for unregistered loader type"
        )

    def _fn(self, base, resource_name):
        if resource_name:
            return os.path.join(base, *resource_name.split('/'))
        return base

    def _get(self, path):
        if hasattr(self.loader, 'get_data'):
            return self.loader.get_data(path)
        raise NotImplementedError(
            "Can't perform this operation for loaders without 'get_data()'"
        )


register_loader_type(object, NullProvider)


class EggProvider(NullProvider):
    """Provider based on a virtual filesystem"""

    def __init__(self, module):
        NullProvider.__init__(self, module)
        self._setup_prefix()

    def _setup_prefix(self):
        # we assume here that our metadata may be nested inside a "basket"
        # of multiple eggs; that's why we use module_path instead of .archive
        path = self.module_path
        old = None
        while path != old:
            if _is_unpacked_egg(path):
                self.egg_name = os.path.basename(path)
                self.egg_info = os.path.join(path, 'EGG-INFO')
                self.egg_root = path
                break
            old = path
            path, base = os.path.split(path)


class DefaultProvider(EggProvider):
    """Provides access to package resources in the filesystem"""

    def _has(self, path):
        return os.path.exists(path)

    def _isdir(self, path):
        return os.path.isdir(path)

    def _listdir(self, path):
        return os.listdir(path)

    def get_resource_stream(self, manager, resource_name):
        return open(self._fn(self.module_path, resource_name), 'rb')

    def _get(self, path):
        with open(path, 'rb') as stream:
            return stream.read()

    @classmethod
    def _register(cls):
        loader_cls = getattr(importlib_machinery, 'SourceFileLoader',
            type(None))
        register_loader_type(loader_cls, cls)


DefaultProvider._register()


class EmptyProvider(NullProvider):
    """Provider that returns nothing for all requests"""

    _isdir = _has = lambda self, path: False
    _get = lambda self, path: ''
    _listdir = lambda self, path: []
    module_path = None

    def __init__(self):
        pass


empty_provider = EmptyProvider()


class ZipManifests(dict):
    """
    zip manifest builder
    """

    @classmethod
    def build(cls, path):
        """
        Build a dictionary similar to the zipimport directory
        caches, except instead of tuples, store ZipInfo objects.

        Use a platform-specific path separator (os.sep) for the path keys
        for compatibility with pypy on Windows.
        """
        with ContextualZipFile(path) as zfile:
            items = (
                (
                    name.replace('/', os.sep),
                    zfile.getinfo(name),
                )
                for name in zfile.namelist()
            )
            return dict(items)

    load = build


class MemoizedZipManifests(ZipManifests):
    """
    Memoized zipfile manifests.
    """
    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')

    def load(self, path):
        """
        Load a manifest at path or return a suitable manifest already loaded.
        """
        path = os.path.normpath(path)
        mtime = os.stat(path).st_mtime

        if path not in self or self[path].mtime != mtime:
            manifest = self.build(path)
            self[path] = self.manifest_mod(manifest, mtime)

        return self[path].manifest


class ContextualZipFile(zipfile.ZipFile):
    """
    Supplement ZipFile class to support context manager for Python 2.6
    """

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.close()

    def __new__(cls, *args, **kwargs):
        """
        Construct a ZipFile or ContextualZipFile as appropriate
        """
        if hasattr(zipfile.ZipFile, '__exit__'):
            return zipfile.ZipFile(*args, **kwargs)
        return super(ContextualZipFile, cls).__new__(cls)


class ZipProvider(EggProvider):
    """Resource support for zips and eggs"""

    eagers = None
    _zip_manifests = MemoizedZipManifests()

    def __init__(self, module):
        EggProvider.__init__(self, module)
        self.zip_pre = self.loader.archive + os.sep

    def _zipinfo_name(self, fspath):
        # Convert a virtual filename (full path to file) into a zipfile subpath
        # usable with the zipimport directory cache for our target archive
        if fspath.startswith(self.zip_pre):
            return fspath[len(self.zip_pre):]
        raise AssertionError(
            "%s is not a subpath of %s" % (fspath, self.zip_pre)
        )

    def _parts(self, zip_path):
        # Convert a zipfile subpath into an egg-relative path part list.
        # pseudo-fs path
        fspath = self.zip_pre + zip_path
        if fspath.startswith(self.egg_root + os.sep):
            return fspath[len(self.egg_root) + 1:].split(os.sep)
        raise AssertionError(
            "%s is not a subpath of %s" % (fspath, self.egg_root)
        )

    @property
    def zipinfo(self):
        return self._zip_manifests.load(self.loader.archive)

    def get_resource_filename(self, manager, resource_name):
        if not self.egg_name:
            raise NotImplementedError(
                "resource_filename() only supported for .egg, not .zip"
            )
        # no need to lock for extraction, since we use temp names
        zip_path = self._resource_to_zip(resource_name)
        eagers = self._get_eager_resources()
        if '/'.join(self._parts(zip_path)) in eagers:
            for name in eagers:
                self._extract_resource(manager, self._eager_to_zip(name))
        return self._extract_resource(manager, zip_path)

    @staticmethod
    def _get_date_and_size(zip_stat):
        size = zip_stat.file_size
        # ymdhms+wday, yday, dst
        date_time = zip_stat.date_time + (0, 0, -1)
        # 1980 offset already done
        timestamp = time.mktime(date_time)
        return timestamp, size

    def _extract_resource(self, manager, zip_path):

        if zip_path in self._index():
            for name in self._index()[zip_path]:
                last = self._extract_resource(
                    manager, os.path.join(zip_path, name)
                )
            # return the extracted directory name
            return os.path.dirname(last)

        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])

        if not WRITE_SUPPORT:
            raise IOError('"os.rename" and "os.unlink" are not supported '
                          'on this platform')
        try:

            real_path = manager.get_cache_path(
                self.egg_name, self._parts(zip_path)
            )

            if self._is_current(real_path, zip_path):
                return real_path

            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
            os.write(outf, self.loader.get_data(zip_path))
            os.close(outf)
            utime(tmpnam, (timestamp, timestamp))
            manager.postprocess(tmpnam, real_path)

            try:
                rename(tmpnam, real_path)

            except os.error:
                if os.path.isfile(real_path):
                    if self._is_current(real_path, zip_path):
                        # the file became current since it was checked above,
                        #  so proceed.
                        return real_path
                    # Windows, del old file and retry
                    elif os.name == 'nt':
                        unlink(real_path)
                        rename(tmpnam, real_path)
                        return real_path
                raise

        except os.error:
            # report a user-friendly error
            manager.extraction_error()

        return real_path

    def _is_current(self, file_path, zip_path):
        """
        Return True if the file_path is current for this zip_path
        """
        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
        if not os.path.isfile(file_path):
            return False
        stat = os.stat(file_path)
        if stat.st_size != size or stat.st_mtime != timestamp:
            return False
        # check that the contents match
        zip_contents = self.loader.get_data(zip_path)
        with open(file_path, 'rb') as f:
            file_contents = f.read()
        return zip_contents == file_contents

    def _get_eager_resources(self):
        if self.eagers is None:
            eagers = []
            for name in ('native_libs.txt', 'eager_resources.txt'):
                if self.has_metadata(name):
                    eagers.extend(self.get_metadata_lines(name))
            self.eagers = eagers
        return self.eagers

    def _index(self):
        try:
            return self._dirindex
        except AttributeError:
            ind = {}
            for path in self.zipinfo:
                parts = path.split(os.sep)
                while parts:
                    parent = os.sep.join(parts[:-1])
                    if parent in ind:
                        ind[parent].append(parts[-1])
                        break
                    else:
                        ind[parent] = [parts.pop()]
            self._dirindex = ind
            return ind

    def _has(self, fspath):
        zip_path = self._zipinfo_name(fspath)
        return zip_path in self.zipinfo or zip_path in self._index()

    def _isdir(self, fspath):
        return self._zipinfo_name(fspath) in self._index()

    def _listdir(self, fspath):
        return list(self._index().get(self._zipinfo_name(fspath), ()))

    def _eager_to_zip(self, resource_name):
        return self._zipinfo_name(self._fn(self.egg_root, resource_name))

    def _resource_to_zip(self, resource_name):
        return self._zipinfo_name(self._fn(self.module_path, resource_name))


register_loader_type(zipimport.zipimporter, ZipProvider)


class FileMetadata(EmptyProvider):
    """Metadata handler for standalone PKG-INFO files

    Usage::

        metadata = FileMetadata("/path/to/PKG-INFO")

    This provider rejects all data and metadata requests except for PKG-INFO,
    which is treated as existing, and will be the contents of the file at
    the provided location.
    """

    def __init__(self, path):
        self.path = path

    def has_metadata(self, name):
        return name == 'PKG-INFO' and os.path.isfile(self.path)

    def get_metadata(self, name):
        if name != 'PKG-INFO':
            raise KeyError("No metadata except PKG-INFO is available")

        with io.open(self.path, encoding='utf-8', errors="replace") as f:
            metadata = f.read()
        self._warn_on_replacement(metadata)
        return metadata

    def _warn_on_replacement(self, metadata):
        # Python 2.6 and 3.2 compat for: replacement_char = '�'
        replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
        if replacement_char in metadata:
            tmpl = "{self.path} could not be properly decoded in UTF-8"
            msg = tmpl.format(**locals())
            warnings.warn(msg)

    def get_metadata_lines(self, name):
        return yield_lines(self.get_metadata(name))


class PathMetadata(DefaultProvider):
    """Metadata provider for egg directories

    Usage::

        # Development eggs:

        egg_info = "/path/to/PackageName.egg-info"
        base_dir = os.path.dirname(egg_info)
        metadata = PathMetadata(base_dir, egg_info)
        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)

        # Unpacked egg directories:

        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
        dist = Distribution.from_filename(egg_path, metadata=metadata)
    """

    def __init__(self, path, egg_info):
        self.module_path = path
        self.egg_info = egg_info


class EggMetadata(ZipProvider):
    """Metadata provider for .egg files"""

    def __init__(self, importer):
        """Create a metadata provider from a zipimporter"""

        self.zip_pre = importer.archive + os.sep
        self.loader = importer
        if importer.prefix:
            self.module_path = os.path.join(importer.archive, importer.prefix)
        else:
            self.module_path = importer.archive
        self._setup_prefix()


_declare_state('dict', _distribution_finders={})


def register_finder(importer_type, distribution_finder):
    """Register `distribution_finder` to find distributions in sys.path items

    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    handler), and `distribution_finder` is a callable that, passed a path
    item and the importer instance, yields ``Distribution`` instances found on
    that path item.  See ``pkg_resources.find_on_path`` for an example."""
    _distribution_finders[importer_type] = distribution_finder


def find_distributions(path_item, only=False):
    """Yield distributions accessible via `path_item`"""
    importer = get_importer(path_item)
    finder = _find_adapter(_distribution_finders, importer)
    return finder(importer, path_item, only)


def find_eggs_in_zip(importer, path_item, only=False):
    """
    Find eggs in zip files; possibly multiple nested eggs.
    """
    if importer.archive.endswith('.whl'):
        # wheels are not supported with this finder
        # they don't have PKG-INFO metadata, and won't ever contain eggs
        return
    metadata = EggMetadata(importer)
    if metadata.has_metadata('PKG-INFO'):
        yield Distribution.from_filename(path_item, metadata=metadata)
    if only:
        # don't yield nested distros
        return
    for subitem in metadata.resource_listdir('/'):
        if _is_unpacked_egg(subitem):
            subpath = os.path.join(path_item, subitem)
            for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
                yield dist


register_finder(zipimport.zipimporter, find_eggs_in_zip)


def find_nothing(importer, path_item, only=False):
    return ()


register_finder(object, find_nothing)


def _by_version_descending(names):
    """
    Given a list of filenames, return them in descending order
    by version number.

    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
    >>> _by_version_descending(names)
    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
    >>> _by_version_descending(names)
    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
    >>> _by_version_descending(names)
    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
    """
    def _by_version(name):
        """
        Parse each component of the filename
        """
        name, ext = os.path.splitext(name)
        parts = itertools.chain(name.split('-'), [ext])
        return [packaging.version.parse(part) for part in parts]

    return sorted(names, key=_by_version, reverse=True)


def find_on_path(importer, path_item, only=False):
    """Yield distributions accessible on a sys.path directory"""
    path_item = _normalize_cached(path_item)

    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
        if _is_unpacked_egg(path_item):
            yield Distribution.from_filename(
                path_item, metadata=PathMetadata(
                    path_item, os.path.join(path_item, 'EGG-INFO')
                )
            )
        else:
            # scan for .egg and .egg-info in directory
            path_item_entries = _by_version_descending(os.listdir(path_item))
            for entry in path_item_entries:
                lower = entry.lower()
                if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
                    fullpath = os.path.join(path_item, entry)
                    if os.path.isdir(fullpath):
                        # egg-info directory, allow getting metadata
                        if len(os.listdir(fullpath)) == 0:
                            # Empty egg directory, skip.
                            continue
                        metadata = PathMetadata(path_item, fullpath)
                    else:
                        metadata = FileMetadata(fullpath)
                    yield Distribution.from_location(
                        path_item, entry, metadata, precedence=DEVELOP_DIST
                    )
                elif not only and _is_unpacked_egg(entry):
                    dists = find_distributions(os.path.join(path_item, entry))
                    for dist in dists:
                        yield dist
                elif not only and lower.endswith('.egg-link'):
                    with open(os.path.join(path_item, entry)) as entry_file:
                        entry_lines = entry_file.readlines()
                    for line in entry_lines:
                        if not line.strip():
                            continue
                        path = os.path.join(path_item, line.rstrip())
                        dists = find_distributions(path)
                        for item in dists:
                            yield item
                        break


register_finder(pkgutil.ImpImporter, find_on_path)

if hasattr(importlib_machinery, 'FileFinder'):
    register_finder(importlib_machinery.FileFinder, find_on_path)

_declare_state('dict', _namespace_handlers={})
_declare_state('dict', _namespace_packages={})


def register_namespace_handler(importer_type, namespace_handler):
    """Register `namespace_handler` to declare namespace packages

    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
    handler), and `namespace_handler` is a callable like this::

        def namespace_handler(importer, path_entry, moduleName, module):
            # return a path_entry to use for child packages

    Namespace handlers are only called if the importer object has already
    agreed that it can handle the relevant path item, and they should only
    return a subpath if the module __path__ does not already contain an
    equivalent subpath.  For an example namespace handler, see
    ``pkg_resources.file_ns_handler``.
    """
    _namespace_handlers[importer_type] = namespace_handler


def _handle_ns(packageName, path_item):
    """Ensure that named package includes a subpath of path_item (if needed)"""

    importer = get_importer(path_item)
    if importer is None:
        return None
    loader = importer.find_module(packageName)
    if loader is None:
        return None
    module = sys.modules.get(packageName)
    if module is None:
        module = sys.modules[packageName] = types.ModuleType(packageName)
        module.__path__ = []
        _set_parent_ns(packageName)
    elif not hasattr(module, '__path__'):
        raise TypeError("Not a package:", packageName)
    handler = _find_adapter(_namespace_handlers, importer)
    subpath = handler(importer, path_item, packageName, module)
    if subpath is not None:
        path = module.__path__
        path.append(subpath)
        loader.load_module(packageName)
        _rebuild_mod_path(path, packageName, module)
    return subpath


def _rebuild_mod_path(orig_path, package_name, module):
    """
    Rebuild module.__path__ ensuring that all entries are ordered
    corresponding to their sys.path order
    """
    sys_path = [_normalize_cached(p) for p in sys.path]

    def safe_sys_path_index(entry):
        """
        Workaround for #520 and #513.
        """
        try:
            return sys_path.index(entry)
        except ValueError:
            return float('inf')

    def position_in_sys_path(path):
        """
        Return the ordinal of the path based on its position in sys.path
        """
        path_parts = path.split(os.sep)
        module_parts = package_name.count('.') + 1
        parts = path_parts[:-module_parts]
        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))

    orig_path.sort(key=position_in_sys_path)
    module.__path__[:] = [_normalize_cached(p) for p in orig_path]


def declare_namespace(packageName):
    """Declare that package 'packageName' is a namespace package"""

    _imp.acquire_lock()
    try:
        if packageName in _namespace_packages:
            return

        path, parent = sys.path, None
        if '.' in packageName:
            parent = '.'.join(packageName.split('.')[:-1])
            declare_namespace(parent)
            if parent not in _namespace_packages:
                __import__(parent)
            try:
                path = sys.modules[parent].__path__
            except AttributeError:
                raise TypeError("Not a package:", parent)

        # Track what packages are namespaces, so when new path items are added,
        # they can be updated
        _namespace_packages.setdefault(parent, []).append(packageName)
        _namespace_packages.setdefault(packageName, [])

        for path_item in path:
            # Ensure all the parent's path items are reflected in the child,
            # if they apply
            _handle_ns(packageName, path_item)

    finally:
        _imp.release_lock()


def fixup_namespace_packages(path_item, parent=None):
    """Ensure that previously-declared namespace packages include path_item"""
    _imp.acquire_lock()
    try:
        for package in _namespace_packages.get(parent, ()):
            subpath = _handle_ns(package, path_item)
            if subpath:
                fixup_namespace_packages(subpath, package)
    finally:
        _imp.release_lock()


def file_ns_handler(importer, path_item, packageName, module):
    """Compute an ns-package subpath for a filesystem or zipfile importer"""

    subpath = os.path.join(path_item, packageName.split('.')[-1])
    normalized = _normalize_cached(subpath)
    for item in module.__path__:
        if _normalize_cached(item) == normalized:
            break
    else:
        # Only return the path if it's not already there
        return subpath


register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
register_namespace_handler(zipimport.zipimporter, file_ns_handler)

if hasattr(importlib_machinery, 'FileFinder'):
    register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)


def null_ns_handler(importer, path_item, packageName, module):
    return None


register_namespace_handler(object, null_ns_handler)


def normalize_path(filename):
    """Normalize a file/dir name for comparison purposes"""
    return os.path.normcase(os.path.realpath(filename))


def _normalize_cached(filename, _cache={}):
    try:
        return _cache[filename]
    except KeyError:
        _cache[filename] = result = normalize_path(filename)
        return result


def _is_unpacked_egg(path):
    """
    Determine if given path appears to be an unpacked egg.
    """
    return (
        path.lower().endswith('.egg')
    )


def _set_parent_ns(packageName):
    parts = packageName.split('.')
    name = parts.pop()
    if parts:
        parent = '.'.join(parts)
        setattr(sys.modules[parent], name, sys.modules[packageName])


def yield_lines(strs):
    """Yield non-empty/non-comment lines of a string or sequence"""
    if isinstance(strs, six.string_types):
        for s in strs.splitlines():
            s = s.strip()
            # skip blank lines/comments
            if s and not s.startswith('#'):
                yield s
    else:
        for ss in strs:
            for s in yield_lines(ss):
                yield s


MODULE = re.compile(r"\w+(\.\w+)*$").match
EGG_NAME = re.compile(
    r"""
    (?P<name>[^-]+) (
        -(?P<ver>[^-]+) (
            -py(?P<pyver>[^-]+) (
                -(?P<plat>.+)
            )?
        )?
    )?
    """,
    re.VERBOSE | re.IGNORECASE,
).match


class EntryPoint(object):
    """Object representing an advertised importable object"""

    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
        if not MODULE(module_name):
            raise ValueError("Invalid module name", module_name)
        self.name = name
        self.module_name = module_name
        self.attrs = tuple(attrs)
        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
        self.dist = dist

    def __str__(self):
        s = "%s = %s" % (self.name, self.module_name)
        if self.attrs:
            s += ':' + '.'.join(self.attrs)
        if self.extras:
            s += ' [%s]' % ','.join(self.extras)
        return s

    def __repr__(self):
        return "EntryPoint.parse(%r)" % str(self)

    def load(self, require=True, *args, **kwargs):
        """
        Require packages for this EntryPoint, then resolve it.
        """
        if not require or args or kwargs:
            warnings.warn(
                "Parameters to load are deprecated.  Call .resolve and "
                ".require separately.",
                DeprecationWarning,
                stacklevel=2,
            )
        if require:
            self.require(*args, **kwargs)
        return self.resolve()

    def resolve(self):
        """
        Resolve the entry point from its module and attrs.
        """
        module = __import__(self.module_name, fromlist=['__name__'], level=0)
        try:
            return functools.reduce(getattr, self.attrs, module)
        except AttributeError as exc:
            raise ImportError(str(exc))

    def require(self, env=None, installer=None):
        if self.extras and not self.dist:
            raise UnknownExtra("Can't require() without a distribution", self)
        reqs = self.dist.requires(self.extras)
        items = working_set.resolve(reqs, env, installer)
        list(map(working_set.add, items))

    pattern = re.compile(
        r'\s*'
        r'(?P<name>.+?)\s*'
        r'=\s*'
        r'(?P<module>[\w.]+)\s*'
        r'(:\s*(?P<attr>[\w.]+))?\s*'
        r'(?P<extras>\[.*\])?\s*$'
    )

    @classmethod
    def parse(cls, src, dist=None):
        """Parse a single entry point from string `src`

        Entry point syntax follows the form::

            name = some.module:some.attr [extra1, extra2]

        The entry name and module name are required, but the ``:attrs`` and
        ``[extras]`` parts are optional
        """
        m = cls.pattern.match(src)
        if not m:
            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
            raise ValueError(msg, src)
        res = m.groupdict()
        extras = cls._parse_extras(res['extras'])
        attrs = res['attr'].split('.') if res['attr'] else ()
        return cls(res['name'], res['module'], attrs, extras, dist)

    @classmethod
    def _parse_extras(cls, extras_spec):
        if not extras_spec:
            return ()
        req = Requirement.parse('x' + extras_spec)
        if req.specs:
            raise ValueError()
        return req.extras

    @classmethod
    def parse_group(cls, group, lines, dist=None):
        """Parse an entry point group"""
        if not MODULE(group):
            raise ValueError("Invalid group name", group)
        this = {}
        for line in yield_lines(lines):
            ep = cls.parse(line, dist)
            if ep.name in this:
                raise ValueError("Duplicate entry point", group, ep.name)
            this[ep.name] = ep
        return this

    @classmethod
    def parse_map(cls, data, dist=None):
        """Parse a map of entry point groups"""
        if isinstance(data, dict):
            data = data.items()
        else:
            data = split_sections(data)
        maps = {}
        for group, lines in data:
            if group is None:
                if not lines:
                    continue
                raise ValueError("Entry points must be listed in groups")
            group = group.strip()
            if group in maps:
                raise ValueError("Duplicate group name", group)
            maps[group] = cls.parse_group(group, lines, dist)
        return maps


def _remove_md5_fragment(location):
    if not location:
        return ''
    parsed = urllib.parse.urlparse(location)
    if parsed[-1].startswith('md5='):
        return urllib.parse.urlunparse(parsed[:-1] + ('',))
    return location


def _version_from_file(lines):
    """
    Given an iterable of lines from a Metadata file, return
    the value of the Version field, if present, or None otherwise.
    """
    is_version_line = lambda line: line.lower().startswith('version:')
    version_lines = filter(is_version_line, lines)
    line = next(iter(version_lines), '')
    _, _, value = line.partition(':')
    return safe_version(value.strip()) or None


class Distribution(object):
    """Wrap an actual or potential sys.path entry w/metadata"""
    PKG_INFO = 'PKG-INFO'

    def __init__(self, location=None, metadata=None, project_name=None,
            version=None, py_version=PY_MAJOR, platform=None,
            precedence=EGG_DIST):
        self.project_name = safe_name(project_name or 'Unknown')
        if version is not None:
            self._version = safe_version(version)
        self.py_version = py_version
        self.platform = platform
        self.location = location
        self.precedence = precedence
        self._provider = metadata or empty_provider

    @classmethod
    def from_location(cls, location, basename, metadata=None, **kw):
        project_name, version, py_version, platform = [None] * 4
        basename, ext = os.path.splitext(basename)
        if ext.lower() in _distributionImpl:
            cls = _distributionImpl[ext.lower()]

            match = EGG_NAME(basename)
            if match:
                project_name, version, py_version, platform = match.group(
                    'name', 'ver', 'pyver', 'plat'
                )
        return cls(
            location, metadata, project_name=project_name, version=version,
            py_version=py_version, platform=platform, **kw
        )._reload_version()

    def _reload_version(self):
        return self

    @property
    def hashcmp(self):
        return (
            self.parsed_version,
            self.precedence,
            self.key,
            _remove_md5_fragment(self.location),
            self.py_version or '',
            self.platform or '',
        )

    def __hash__(self):
        return hash(self.hashcmp)

    def __lt__(self, other):
        return self.hashcmp < other.hashcmp

    def __le__(self, other):
        return self.hashcmp <= other.hashcmp

    def __gt__(self, other):
        return self.hashcmp > other.hashcmp

    def __ge__(self, other):
        return self.hashcmp >= other.hashcmp

    def __eq__(self, other):
        if not isinstance(other, self.__class__):
            # It's not a Distribution, so they are not equal
            return False
        return self.hashcmp == other.hashcmp

    def __ne__(self, other):
        return not self == other

    # These properties have to be lazy so that we don't have to load any
    # metadata until/unless it's actually needed.  (i.e., some distributions
    # may not know their name or version without loading PKG-INFO)

    @property
    def key(self):
        try:
            return self._key
        except AttributeError:
            self._key = key = self.project_name.lower()
            return key

    @property
    def parsed_version(self):
        if not hasattr(self, "_parsed_version"):
            self._parsed_version = parse_version(self.version)

        return self._parsed_version

    def _warn_legacy_version(self):
        LV = packaging.version.LegacyVersion
        is_legacy = isinstance(self._parsed_version, LV)
        if not is_legacy:
            return

        # While an empty version is technically a legacy version and
        # is not a valid PEP 440 version, it's also unlikely to
        # actually come from someone and instead it is more likely that
        # it comes from setuptools attempting to parse a filename and
        # including it in the list. So for that we'll gate this warning
        # on if the version is anything at all or not.
        if not self.version:
            return

        tmpl = textwrap.dedent("""
            '{project_name} ({version})' is being parsed as a legacy,
            non PEP 440,
            version. You may find odd behavior and sort order.
            In particular it will be sorted as less than 0.0. It
            is recommended to migrate to PEP 440 compatible
            versions.
            """).strip().replace('\n', ' ')

        warnings.warn(tmpl.format(**vars(self)), PEP440Warning)

    @property
    def version(self):
        try:
            return self._version
        except AttributeError:
            version = _version_from_file(self._get_metadata(self.PKG_INFO))
            if version is None:
                tmpl = "Missing 'Version:' header and/or %s file"
                raise ValueError(tmpl % self.PKG_INFO, self)
            return version

    @property
    def _dep_map(self):
        try:
            return self.__dep_map
        except AttributeError:
            dm = self.__dep_map = {None: []}
            for name in 'requires.txt', 'depends.txt':
                for extra, reqs in split_sections(self._get_metadata(name)):
                    if extra:
                        if ':' in extra:
                            extra, marker = extra.split(':', 1)
                            if invalid_marker(marker):
                                # XXX warn
                                reqs = []
                            elif not evaluate_marker(marker):
                                reqs = []
                        extra = safe_extra(extra) or None
                    dm.setdefault(extra, []).extend(parse_requirements(reqs))
            return dm

    def requires(self, extras=()):
        """List of Requirements needed for this distro if `extras` are used"""
        dm = self._dep_map
        deps = []
        deps.extend(dm.get(None, ()))
        for ext in extras:
            try:
                deps.extend(dm[safe_extra(ext)])
            except KeyError:
                raise UnknownExtra(
                    "%s has no such extra feature %r" % (self, ext)
                )
        return deps

    def _get_metadata(self, name):
        if self.has_metadata(name):
            for line in self.get_metadata_lines(name):
                yield line

    def activate(self, path=None, replace=False):
        """Ensure distribution is importable on `path` (default=sys.path)"""
        if path is None:
            path = sys.path
        self.insert_on(path, replace=replace)
        if path is sys.path:
            fixup_namespace_packages(self.location)
            for pkg in self._get_metadata('namespace_packages.txt'):
                if pkg in sys.modules:
                    declare_namespace(pkg)

    def egg_name(self):
        """Return what this distribution's standard .egg filename should be"""
        filename = "%s-%s-py%s" % (
            to_filename(self.project_name), to_filename(self.version),
            self.py_version or PY_MAJOR
        )

        if self.platform:
            filename += '-' + self.platform
        return filename

    def __repr__(self):
        if self.location:
            return "%s (%s)" % (self, self.location)
        else:
            return str(self)

    def __str__(self):
        try:
            version = getattr(self, 'version', None)
        except ValueError:
            version = None
        version = version or "[unknown version]"
        return "%s %s" % (self.project_name, version)

    def __getattr__(self, attr):
        """Delegate all unrecognized public attributes to .metadata provider"""
        if attr.startswith('_'):
            raise AttributeError(attr)
        return getattr(self._provider, attr)

    @classmethod
    def from_filename(cls, filename, metadata=None, **kw):
        return cls.from_location(
            _normalize_cached(filename), os.path.basename(filename), metadata,
            **kw
        )

    def as_requirement(self):
        """Return a ``Requirement`` that matches this distribution exactly"""
        if isinstance(self.parsed_version, packaging.version.Version):
            spec = "%s==%s" % (self.project_name, self.parsed_version)
        else:
            spec = "%s===%s" % (self.project_name, self.parsed_version)

        return Requirement.parse(spec)

    def load_entry_point(self, group, name):
        """Return the `name` entry point of `group` or raise ImportError"""
        ep = self.get_entry_info(group, name)
        if ep is None:
            raise ImportError("Entry point %r not found" % ((group, name),))
        return ep.load()

    def get_entry_map(self, group=None):
        """Return the entry point map for `group`, or the full entry map"""
        try:
            ep_map = self._ep_map
        except AttributeError:
            ep_map = self._ep_map = EntryPoint.parse_map(
                self._get_metadata('entry_points.txt'), self
            )
        if group is not None:
            return ep_map.get(group, {})
        return ep_map

    def get_entry_info(self, group, name):
        """Return the EntryPoint object for `group`+`name`, or ``None``"""
        return self.get_entry_map(group).get(name)

    def insert_on(self, path, loc=None, replace=False):
        """Ensure self.location is on path

        If replace=False (default):
            - If location is already in path anywhere, do nothing.
            - Else:
              - If it's an egg and its parent directory is on path,
                insert just ahead of the parent.
              - Else: add to the end of path.
        If replace=True:
            - If location is already on path anywhere (not eggs)
              or higher priority than its parent (eggs)
              do nothing.
            - Else:
              - If it's an egg and its parent directory is on path,
                insert just ahead of the parent,
                removing any lower-priority entries.
              - Else: add it to the front of path.
        """

        loc = loc or self.location
        if not loc:
            return

        nloc = _normalize_cached(loc)
        bdir = os.path.dirname(nloc)
        npath = [(p and _normalize_cached(p) or p) for p in path]

        for p, item in enumerate(npath):
            if item == nloc:
                if replace:
                    break
                else:
                    # don't modify path (even removing duplicates) if found and not replace
                    return
            elif item == bdir and self.precedence == EGG_DIST:
                # if it's an .egg, give it precedence over its directory
                # UNLESS it's already been added to sys.path and replace=False
                if (not replace) and nloc in npath[p:]:
                    return
                if path is sys.path:
                    self.check_version_conflict()
                path.insert(p, loc)
                npath.insert(p, nloc)
                break
        else:
            if path is sys.path:
                self.check_version_conflict()
            if replace:
                path.insert(0, loc)
            else:
                path.append(loc)
            return

        # p is the spot where we found or inserted loc; now remove duplicates
        while True:
            try:
                np = npath.index(nloc, p + 1)
            except ValueError:
                break
            else:
                del npath[np], path[np]
                # ha!
                p = np

        return

    def check_version_conflict(self):
        if self.key == 'setuptools':
            # ignore the inevitable setuptools self-conflicts  :(
            return

        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
        loc = normalize_path(self.location)
        for modname in self._get_metadata('top_level.txt'):
            if (modname not in sys.modules or modname in nsp
                    or modname in _namespace_packages):
                continue
            if modname in ('pkg_resources', 'setuptools', 'site'):
                continue
            fn = getattr(sys.modules[modname], '__file__', None)
            if fn and (normalize_path(fn).startswith(loc) or
                       fn.startswith(self.location)):
                continue
            issue_warning(
                "Module %s was already imported from %s, but %s is being added"
                " to sys.path" % (modname, fn, self.location),
            )

    def has_version(self):
        try:
            self.version
        except ValueError:
            issue_warning("Unbuilt egg for " + repr(self))
            return False
        return True

    def clone(self, **kw):
        """Copy this distribution, substituting in any changed keyword args"""
        names = 'project_name version py_version platform location precedence'
        for attr in names.split():
            kw.setdefault(attr, getattr(self, attr, None))
        kw.setdefault('metadata', self._provider)
        return self.__class__(**kw)

    @property
    def extras(self):
        return [dep for dep in self._dep_map if dep]


class EggInfoDistribution(Distribution):
    def _reload_version(self):
        """
        Packages installed by distutils (e.g. numpy or scipy),
        which uses an old safe_version, and so
        their version numbers can get mangled when
        converted to filenames (e.g., 1.11.0.dev0+2329eae to
        1.11.0.dev0_2329eae). These distributions will not be
        parsed properly
        downstream by Distribution and safe_version, so
        take an extra step and try to get the version number from
        the metadata file itself instead of the filename.
        """
        md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
        if md_version:
            self._version = md_version
        return self


class DistInfoDistribution(Distribution):
    """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
    PKG_INFO = 'METADATA'
    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")

    @property
    def _parsed_pkg_info(self):
        """Parse and cache metadata"""
        try:
            return self._pkg_info
        except AttributeError:
            metadata = self.get_metadata(self.PKG_INFO)
            self._pkg_info = email.parser.Parser().parsestr(metadata)
            return self._pkg_info

    @property
    def _dep_map(self):
        try:
            return self.__dep_map
        except AttributeError:
            self.__dep_map = self._compute_dependencies()
            return self.__dep_map

    def _compute_dependencies(self):
        """Recompute this distribution's dependencies."""
        dm = self.__dep_map = {None: []}

        reqs = []
        # Including any condition expressions
        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
            reqs.extend(parse_requirements(req))

        def reqs_for_extra(extra):
            for req in reqs:
                if not req.marker or req.marker.evaluate({'extra': extra}):
                    yield req

        common = frozenset(reqs_for_extra(None))
        dm[None].extend(common)

        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
            s_extra = safe_extra(extra.strip())
            dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)

        return dm


_distributionImpl = {
    '.egg': Distribution,
    '.egg-info': EggInfoDistribution,
    '.dist-info': DistInfoDistribution,
    }


def issue_warning(*args, **kw):
    level = 1
    g = globals()
    try:
        # find the first stack frame that is *not* code in
        # the pkg_resources module, to use for the warning
        while sys._getframe(level).f_globals is g:
            level += 1
    except ValueError:
        pass
    warnings.warn(stacklevel=level + 1, *args, **kw)


class RequirementParseError(ValueError):
    def __str__(self):
        return ' '.join(self.args)


def parse_requirements(strs):
    """Yield ``Requirement`` objects for each specification in `strs`

    `strs` must be a string, or a (possibly-nested) iterable thereof.
    """
    # create a steppable iterator, so we can handle \-continuations
    lines = iter(yield_lines(strs))

    for line in lines:
        # Drop comments -- a hash without a space may be in a URL.
        if ' #' in line:
            line = line[:line.find(' #')]
        # If there is a line continuation, drop it, and append the next line.
        if line.endswith('\\'):
            line = line[:-2].strip()
            line += next(lines)
        yield Requirement(line)


class Requirement(packaging.requirements.Requirement):
    def __init__(self, requirement_string):
        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
        try:
            super(Requirement, self).__init__(requirement_string)
        except packaging.requirements.InvalidRequirement as e:
            raise RequirementParseError(str(e))
        self.unsafe_name = self.name
        project_name = safe_name(self.name)
        self.project_name, self.key = project_name, project_name.lower()
        self.specs = [
            (spec.operator, spec.version) for spec in self.specifier]
        self.extras = tuple(map(safe_extra, self.extras))
        self.hashCmp = (
            self.key,
            self.specifier,
            frozenset(self.extras),
            str(self.marker) if self.marker else None,
        )
        self.__hash = hash(self.hashCmp)

    def __eq__(self, other):
        return (
            isinstance(other, Requirement) and
            self.hashCmp == other.hashCmp
        )

    def __ne__(self, other):
        return not self == other

    def __contains__(self, item):
        if isinstance(item, Distribution):
            if item.key != self.key:
                return False

            item = item.version

        # Allow prereleases always in order to match the previous behavior of
        # this method. In the future this should be smarter and follow PEP 440
        # more accurately.
        return self.specifier.contains(item, prereleases=True)

    def __hash__(self):
        return self.__hash

    def __repr__(self): return "Requirement.parse(%r)" % str(self)

    @staticmethod
    def parse(s):
        req, = parse_requirements(s)
        return req


def _get_mro(cls):
    """Get an mro for a type or classic class"""
    if not isinstance(cls, type):

        class cls(cls, object):
            pass

        return cls.__mro__[1:]
    return cls.__mro__


def _find_adapter(registry, ob):
    """Return an adapter factory for `ob` from `registry`"""
    for t in _get_mro(getattr(ob, '__class__', type(ob))):
        if t in registry:
            return registry[t]


def ensure_directory(path):
    """Ensure that the parent directory of `path` exists"""
    dirname = os.path.dirname(path)
    if not os.path.isdir(dirname):
        os.makedirs(dirname)


def _bypass_ensure_directory(path):
    """Sandbox-bypassing version of ensure_directory()"""
    if not WRITE_SUPPORT:
        raise IOError('"os.mkdir" not supported on this platform.')
    dirname, filename = split(path)
    if dirname and filename and not isdir(dirname):
        _bypass_ensure_directory(dirname)
        mkdir(dirname, 0o755)


def split_sections(s):
    """Split a string or iterable thereof into (section, content) pairs

    Each ``section`` is a stripped version of the section header ("[section]")
    and each ``content`` is a list of stripped lines excluding blank lines and
    comment-only lines.  If there are any such lines before the first section
    header, they're returned in a first ``section`` of ``None``.
    """
    section = None
    content = []
    for line in yield_lines(s):
        if line.startswith("["):
            if line.endswith("]"):
                if section or content:
                    yield section, content
                section = line[1:-1].strip()
                content = []
            else:
                raise ValueError("Invalid section heading", line)
        else:
            content.append(line)

    # wrap up last segment
    yield section, content


def _mkstemp(*args, **kw):
    old_open = os.open
    try:
        # temporarily bypass sandboxing
        os.open = os_open
        return tempfile.mkstemp(*args, **kw)
    finally:
        # and then put it back
        os.open = old_open


# Silence the PEP440Warning by default, so that end users don't get hit by it
# randomly just because they use pkg_resources. We want to append the rule
# because we want earlier uses of filterwarnings to take precedence over this
# one.
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)


# from jaraco.functools 1.3
def _call_aside(f, *args, **kwargs):
    f(*args, **kwargs)
    return f


@_call_aside
def _initialize(g=globals()):
    "Set up global resource manager (deliberately not state-saved)"
    manager = ResourceManager()
    g['_manager'] = manager
    for name in dir(manager):
        if not name.startswith('_'):
            g[name] = getattr(manager, name)


@_call_aside
def _initialize_master_working_set():
    """
    Prepare the master working set and make the ``require()``
    API available.

    This function has explicit effects on the global state
    of pkg_resources. It is intended to be invoked once at
    the initialization of this module.

    Invocation by other packages is unsupported and done
    at their own risk.
    """
    working_set = WorkingSet._build_master()
    _declare_state('object', working_set=working_set)

    require = working_set.require
    iter_entry_points = working_set.iter_entry_points
    add_activation_listener = working_set.subscribe
    run_script = working_set.run_script
    # backward compatibility
    run_main = run_script
    # Activate all distributions already on sys.path with replace=False and
    # ensure that all distributions added to the working set in the future
    # (e.g. by calling ``require()``) will get activated as well,
    # with higher priority (replace=True).
    dist = None  # ensure dist is defined for del dist below
    for dist in working_set:
        dist.activate(replace=False)
    del dist
    add_activation_listener(lambda dist: dist.activate(replace=True), existing=False)
    working_set.entries = []
    # match order
    list(map(working_set.add_entry, sys.path))
    globals().update(locals())
_vendor/retrying.py000064400000023364151733136500010431 0ustar00## Copyright 2013-2014 Ray Holder
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.

import random
from pip._vendor import six
import sys
import time
import traceback


# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
MAX_WAIT = 1073741823


def retry(*dargs, **dkw):
    """
    Decorator function that instantiates the Retrying object
    @param *dargs: positional arguments passed to Retrying object
    @param **dkw: keyword arguments passed to the Retrying object
    """
    # support both @retry and @retry() as valid syntax
    if len(dargs) == 1 and callable(dargs[0]):
        def wrap_simple(f):

            @six.wraps(f)
            def wrapped_f(*args, **kw):
                return Retrying().call(f, *args, **kw)

            return wrapped_f

        return wrap_simple(dargs[0])

    else:
        def wrap(f):

            @six.wraps(f)
            def wrapped_f(*args, **kw):
                return Retrying(*dargs, **dkw).call(f, *args, **kw)

            return wrapped_f

        return wrap


class Retrying(object):

    def __init__(self,
                 stop=None, wait=None,
                 stop_max_attempt_number=None,
                 stop_max_delay=None,
                 wait_fixed=None,
                 wait_random_min=None, wait_random_max=None,
                 wait_incrementing_start=None, wait_incrementing_increment=None,
                 wait_exponential_multiplier=None, wait_exponential_max=None,
                 retry_on_exception=None,
                 retry_on_result=None,
                 wrap_exception=False,
                 stop_func=None,
                 wait_func=None,
                 wait_jitter_max=None):

        self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number
        self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay
        self._wait_fixed = 1000 if wait_fixed is None else wait_fixed
        self._wait_random_min = 0 if wait_random_min is None else wait_random_min
        self._wait_random_max = 1000 if wait_random_max is None else wait_random_max
        self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start
        self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment
        self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier
        self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max
        self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max

        # TODO add chaining of stop behaviors
        # stop behavior
        stop_funcs = []
        if stop_max_attempt_number is not None:
            stop_funcs.append(self.stop_after_attempt)

        if stop_max_delay is not None:
            stop_funcs.append(self.stop_after_delay)

        if stop_func is not None:
            self.stop = stop_func

        elif stop is None:
            self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs)

        else:
            self.stop = getattr(self, stop)

        # TODO add chaining of wait behaviors
        # wait behavior
        wait_funcs = [lambda *args, **kwargs: 0]
        if wait_fixed is not None:
            wait_funcs.append(self.fixed_sleep)

        if wait_random_min is not None or wait_random_max is not None:
            wait_funcs.append(self.random_sleep)

        if wait_incrementing_start is not None or wait_incrementing_increment is not None:
            wait_funcs.append(self.incrementing_sleep)

        if wait_exponential_multiplier is not None or wait_exponential_max is not None:
            wait_funcs.append(self.exponential_sleep)

        if wait_func is not None:
            self.wait = wait_func

        elif wait is None:
            self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs)

        else:
            self.wait = getattr(self, wait)

        # retry on exception filter
        if retry_on_exception is None:
            self._retry_on_exception = self.always_reject
        else:
            self._retry_on_exception = retry_on_exception

        # TODO simplify retrying by Exception types
        # retry on result filter
        if retry_on_result is None:
            self._retry_on_result = self.never_reject
        else:
            self._retry_on_result = retry_on_result

        self._wrap_exception = wrap_exception

    def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):
        """Stop after the previous attempt >= stop_max_attempt_number."""
        return previous_attempt_number >= self._stop_max_attempt_number

    def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):
        """Stop after the time from the first attempt >= stop_max_delay."""
        return delay_since_first_attempt_ms >= self._stop_max_delay

    def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
        """Don't sleep at all before retrying."""
        return 0

    def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
        """Sleep a fixed amount of time between each retry."""
        return self._wait_fixed

    def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
        """Sleep a random amount of time between wait_random_min and wait_random_max"""
        return random.randint(self._wait_random_min, self._wait_random_max)

    def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
        """
        Sleep an incremental amount of time after each attempt, starting at
        wait_incrementing_start and incrementing by wait_incrementing_increment
        """
        result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1))
        if result < 0:
            result = 0
        return result

    def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
        exp = 2 ** previous_attempt_number
        result = self._wait_exponential_multiplier * exp
        if result > self._wait_exponential_max:
            result = self._wait_exponential_max
        if result < 0:
            result = 0
        return result

    def never_reject(self, result):
        return False

    def always_reject(self, result):
        return True

    def should_reject(self, attempt):
        reject = False
        if attempt.has_exception:
            reject |= self._retry_on_exception(attempt.value[1])
        else:
            reject |= self._retry_on_result(attempt.value)

        return reject

    def call(self, fn, *args, **kwargs):
        start_time = int(round(time.time() * 1000))
        attempt_number = 1
        while True:
            try:
                attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
            except:
                tb = sys.exc_info()
                attempt = Attempt(tb, attempt_number, True)

            if not self.should_reject(attempt):
                return attempt.get(self._wrap_exception)

            delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time
            if self.stop(attempt_number, delay_since_first_attempt_ms):
                if not self._wrap_exception and attempt.has_exception:
                    # get() on an attempt with an exception should cause it to be raised, but raise just in case
                    raise attempt.get()
                else:
                    raise RetryError(attempt)
            else:
                sleep = self.wait(attempt_number, delay_since_first_attempt_ms)
                if self._wait_jitter_max:
                    jitter = random.random() * self._wait_jitter_max
                    sleep = sleep + max(0, jitter)
                time.sleep(sleep / 1000.0)

            attempt_number += 1


class Attempt(object):
    """
    An Attempt encapsulates a call to a target function that may end as a
    normal return value from the function or an Exception depending on what
    occurred during the execution.
    """

    def __init__(self, value, attempt_number, has_exception):
        self.value = value
        self.attempt_number = attempt_number
        self.has_exception = has_exception

    def get(self, wrap_exception=False):
        """
        Return the return value of this Attempt instance or raise an Exception.
        If wrap_exception is true, this Attempt is wrapped inside of a
        RetryError before being raised.
        """
        if self.has_exception:
            if wrap_exception:
                raise RetryError(self)
            else:
                six.reraise(self.value[0], self.value[1], self.value[2])
        else:
            return self.value

    def __repr__(self):
        if self.has_exception:
            return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2])))
        else:
            return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)


class RetryError(Exception):
    """
    A RetryError encapsulates the last Attempt instance right before giving up.
    """

    def __init__(self, last_attempt):
        self.last_attempt = last_attempt

    def __str__(self):
        return "RetryError[{0}]".format(self.last_attempt)
utils/__pycache__/encoding.cpython-36.opt-1.pyc000064400000001747151733136500015304 0ustar003

�Pf��@sjddlZddlZddlZejdfejdfejdfejdfejdfejdfej	dfgZ
ejd	�Zd
d�Z
dS)�N�utf8�utf16zutf16-bezutf16-le�utf32zutf32-bezutf32-lescoding[:=]\s*([-\w.]+)cCs�x0tD](\}}|j|�r|t|�d�j|�SqWxV|jd�dd�D]@}|dd�dkrFtj|�rFtj|�j�djd�}|j|�SqFW|jtj	d��S)	z�Check a bytes string for a BOM to correctly detect the encoding

    Fallback to locale.getpreferredencoding(False) like open() on Python3N�
�r��#�asciiF)
�BOMS�
startswith�len�decode�split�ENCODING_RE�search�groups�locale�getpreferredencoding)�dataZbom�encoding�line�r�/usr/lib/python3.6/encoding.py�auto_decodes
r)�codecsr�re�BOM_UTF8�	BOM_UTF16�BOM_UTF16_BE�BOM_UTF16_LE�	BOM_UTF32�BOM_UTF32_BE�BOM_UTF32_LEr
�compilerrrrrr�<module>s
utils/__pycache__/logging.cpython-36.pyc000064400000007447151733136510014211 0ustar003

�Pf��@sddlmZddlZddlZddlZddlZyddlZWnek
rTddlZYnXddl	m
Z
ddlmZyddl
mZWnek
r�dZYnXej�Zde_ejddd��Zd	d
�ZGdd�dej�Zd
d�ZGdd�dej�ZGdd�dejj�ZGdd�dej�ZdS)�)�absolute_importN)�WINDOWS)�
ensure_dir)�colorama�ccs.tj|7_z
dVWdtj|8_XdS)zv
    A context manager which will cause the log output to be indented for any
    log messages emitted inside it.
    N)�
_log_state�indentation)Znum�r	�/usr/lib/python3.6/logging.py�
indent_logs
rcCsttdd�S)Nrr)�getattrrr	r	r	r
�get_indentation)sr
c@seZdZdd�ZdS)�IndentingFormattercCs,tjj||�}djdd�|jd�D��}|S)z�
        Calls the standard formatter, but will indent all of the log messages
        by our current indentation level.
        �cSsg|]}dt�|�qS)� )r
)�.0�liner	r	r
�
<listcomp>6sz-IndentingFormatter.format.<locals>.<listcomp>T)�logging�	Formatter�format�join�
splitlines)�self�recordZ	formattedr	r	r
r/s
zIndentingFormatter.formatN)�__name__�
__module__�__qualname__rr	r	r	r
r-srcs�fdd�}|S)Ncsdjt��|tjjg�S)Nr)r�listrZStyleZ	RESET_ALL)Zinp)�colorsr	r
�wrapped=sz_color_wrap.<locals>.wrappedr	)rr r	)rr
�_color_wrap<sr!c@sTeZdZer2ejeejj�fej	eejj
�fgZngZddd�Zdd�Z
dd�ZdS)	�ColorizedStreamHandlerNcCs(tjj||�tr$tr$tj|j�|_dS)N)r�
StreamHandler�__init__rr�AnsiToWin32�stream)rr&r	r	r
r$NszColorizedStreamHandler.__init__cCsRtsdSt|jtj�s|jn|jj}t|d�r:|j�r:dStjj	d�dkrNdSdS)NF�isattyTZTERMZANSI)
r�
isinstancer&r%r �hasattrr'�os�environ�get)rZreal_streamr	r	r
�should_colorTsz#ColorizedStreamHandler.should_colorcCsBtjj||�}|j�r>x&|jD]\}}|j|kr||�}PqW|S)N)rr#rr-�COLORS�levelno)rr�msg�levelZcolorr	r	r
ris
zColorizedStreamHandler.format)N)rrrrrZERRORr!ZForeZREDZWARNINGZYELLOWr.r$r-rr	r	r	r
r"Bs
r"c@seZdZdd�ZdS)�BetterRotatingFileHandlercCs ttjj|j��tjjj|�S)N)	rr*�path�dirnameZbaseFilenamer�handlers�RotatingFileHandler�_open)rr	r	r
r7wszBetterRotatingFileHandler._openN)rrrr7r	r	r	r
r2usr2c@seZdZdd�Zdd�ZdS)�MaxLevelFiltercCs
||_dS)N)r1)rr1r	r	r
r$~szMaxLevelFilter.__init__cCs|j|jkS)N)r/r1)rrr	r	r
�filter�szMaxLevelFilter.filterN)rrrr$r9r	r	r	r
r8|sr8)r) Z
__future__r�
contextlibrZlogging.handlersr*Z	threading�ImportErrorZdummy_threadingZ
pip.compatrZ	pip.utilsrZpip._vendorr�	ExceptionZlocalrr�contextmanagerrr
rrr!r#r"r5r6r2�Filterr8r	r	r	r
�<module>s0
3utils/__pycache__/filesystem.cpython-36.pyc000064400000001064151733136510014734 0ustar003

�Pf��@s(ddlZddlZddlmZdd�ZdS)�N)�get_path_uidcCs�ttd�sdSd}xp||kr�tjj|�rntj�dkr^yt|�}Wntk
rTdSX|dkStj|tj�Sq|tjj	|�}}qWdS)N�geteuidTrF)
�hasattr�os�path�lexistsrr�OSError�access�W_OK�dirname)rZpreviousZpath_uid�r� /usr/lib/python3.6/filesystem.py�check_path_owners

r)rZos.pathZ
pip.compatrrrrrr
�<module>sutils/__pycache__/packaging.cpython-36.pyc000064400000003657151733136510014506 0ustar003

�Pf �@s~ddlmZddlmZddlZddlZddlmZddlmZddl	m
Z
ddlmZej
e�Zdd	�Zd
d�Zdd
�ZdS)�)�absolute_import)�
FeedParserN)�
specifiers)�version)�
pkg_resources)�
exceptionscCs>|dkrdStj|�}tjdjtttjdd����}||kS)aG
    Check if the python version in use match the `requires_python` specifier.

    Returns `True` if the version of python in use matches the requirement.
    Returns `False` if the version of python in use does not matches the
    requirement.

    Raises an InvalidSpecifier if `requires_python` have an invalid format.
    NT�.�)	rZSpecifierSetr�parse�join�map�str�sys�version_info)�requires_pythonZrequires_python_specifierZpython_version�r�/usr/lib/python3.6/packaging.py�check_requires_pythons


 rcCs8t|tj�r |jd�r |jd�S|jd�r4|jd�SdS)NZMETADATAzPKG-INFO)�
isinstancerZDistInfoDistributionZhas_metadata�get_metadata)�distrrrr%s



rcCs�t|�}t�}|j|�|j�}|jd�}y8t|�s`tjd|j|dj	t
ttj
dd���f��Wn8tjk
r�}ztjd|j||f�dSd}~XnXdS)NzRequires-Pythonz4%s requires Python '%s' but the running Python is %srr	z7Package %s has an invalid Requires-Python entry %s - %s)rrZfeed�close�getrrZUnsupportedPythonVersionZproject_namerrr
rrrZInvalidSpecifier�loggerZwarning)rZmetadataZfeed_parserZ
pkg_info_dictr�errr�check_dist_requires_python-s"

$r)Z
__future__rZemail.parserrZloggingrZpip._vendor.packagingrrZpip._vendorrZpiprZ	getLogger�__name__rrrrrrrr�<module>s
utils/__pycache__/glibc.cpython-36.opt-1.pyc000064400000002503151733136510014566 0ustar003

�Pf{�@sPddlmZddlZddlZddlZddlZdd�Zdd�Zdd�Zd	d
�Z	dS)�)�absolute_importNcCsPtjd�}y
|j}Wntk
r(dSXtj|_|�}t|t�sL|jd�}|S)z9Returns glibc version string, or None if not using glibc.N�ascii)	�ctypesZCDLL�gnu_get_libc_version�AttributeErrorZc_char_pZrestype�
isinstance�str�decode)Zprocess_namespacer�version_str�r�/usr/lib/python3.6/glibc.py�glibc_version_string	s



r
cCsHtjd|�}|s$tjd|t�dSt|jd��|koFt|jd��|kS)Nz$(?P<major>[0-9]+)\.(?P<minor>[0-9]+)z=Expected glibc version with 2 components major.minor, got: %sF�major�minor)�re�match�warnings�warn�RuntimeWarning�int�group)r
�required_major�
minimum_minor�mrrr�check_glibc_version#s
rcCst�}|dkrdSt|||�S)NF)r
r)rrr
rrr�have_compatible_glibc3srcCs"t�}|dkrtj�Sd|fSdS)NZglibc)r
�platform�libc_ver)Z
glibc_versionrrrrKsr)
Z
__future__rrrrrr
rrrrrrr�<module>sutils/__pycache__/build.cpython-36.pyc000064400000002420151733136510013644 0ustar003

�Pf �@s<ddlmZddlZddlZddlmZGdd�de�ZdS)�)�absolute_importN)�rmtreec@s6eZdZddd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
�BuildDirectoryNcCsL|dkr|dkrd}|dkr<tjjtjdd��}|dkr<d}||_||_dS)NTz
pip-build-)�prefix)�os�path�realpath�tempfileZmkdtemp�name�delete)�selfr
r�r
�/usr/lib/python3.6/build.py�__init__szBuildDirectory.__init__cCsdj|jj|j�S)Nz	<{} {!r}>)�format�	__class__�__name__r
)rr
r
r�__repr__szBuildDirectory.__repr__cCs|jS)N)r
)rr
r
r�	__enter__"szBuildDirectory.__enter__cCs|j�dS)N)�cleanup)r�exc�value�tbr
r
r�__exit__%szBuildDirectory.__exit__cCs|jrt|j�dS)N)rrr
)rr
r
rr(szBuildDirectory.cleanup)NN)r�
__module__�__qualname__rrrrrr
r
r
rr	s

r)	Z
__future__rZos.pathrr	Z	pip.utilsr�objectrr
r
r
r�<module>sutils/__pycache__/packaging.cpython-36.opt-1.pyc000064400000003657151733136510015445 0ustar003

�Pf �@s~ddlmZddlmZddlZddlZddlmZddlmZddl	m
Z
ddlmZej
e�Zdd	�Zd
d�Zdd
�ZdS)�)�absolute_import)�
FeedParserN)�
specifiers)�version)�
pkg_resources)�
exceptionscCs>|dkrdStj|�}tjdjtttjdd����}||kS)aG
    Check if the python version in use match the `requires_python` specifier.

    Returns `True` if the version of python in use matches the requirement.
    Returns `False` if the version of python in use does not matches the
    requirement.

    Raises an InvalidSpecifier if `requires_python` have an invalid format.
    NT�.�)	rZSpecifierSetr�parse�join�map�str�sys�version_info)�requires_pythonZrequires_python_specifierZpython_version�r�/usr/lib/python3.6/packaging.py�check_requires_pythons


 rcCs8t|tj�r |jd�r |jd�S|jd�r4|jd�SdS)NZMETADATAzPKG-INFO)�
isinstancerZDistInfoDistributionZhas_metadata�get_metadata)�distrrrr%s



rcCs�t|�}t�}|j|�|j�}|jd�}y8t|�s`tjd|j|dj	t
ttj
dd���f��Wn8tjk
r�}ztjd|j||f�dSd}~XnXdS)NzRequires-Pythonz4%s requires Python '%s' but the running Python is %srr	z7Package %s has an invalid Requires-Python entry %s - %s)rrZfeed�close�getrrZUnsupportedPythonVersionZproject_namerrr
rrrZInvalidSpecifier�loggerZwarning)rZmetadataZfeed_parserZ
pkg_info_dictr�errr�check_dist_requires_python-s"

$r)Z
__future__rZemail.parserrZloggingrZpip._vendor.packagingrrZpip._vendorrZpiprZ	getLogger�__name__rrrrrrrr�<module>s
utils/__pycache__/encoding.cpython-36.pyc000064400000001747151733136510014346 0ustar003

�Pf��@sjddlZddlZddlZejdfejdfejdfejdfejdfejdfej	dfgZ
ejd	�Zd
d�Z
dS)�N�utf8�utf16zutf16-bezutf16-le�utf32zutf32-bezutf32-lescoding[:=]\s*([-\w.]+)cCs�x0tD](\}}|j|�r|t|�d�j|�SqWxV|jd�dd�D]@}|dd�dkrFtj|�rFtj|�j�djd�}|j|�SqFW|jtj	d��S)	z�Check a bytes string for a BOM to correctly detect the encoding

    Fallback to locale.getpreferredencoding(False) like open() on Python3N�
�r��#�asciiF)
�BOMS�
startswith�len�decode�split�ENCODING_RE�search�groups�locale�getpreferredencoding)�dataZbom�encoding�line�r�/usr/lib/python3.6/encoding.py�auto_decodes
r)�codecsr�re�BOM_UTF8�	BOM_UTF16�BOM_UTF16_BE�BOM_UTF16_LE�	BOM_UTF32�BOM_UTF32_BE�BOM_UTF32_LEr
�compilerrrrrr�<module>s
utils/__pycache__/outdated.cpython-36.opt-1.pyc000064400000011147151733136510015323 0ustar003

�Pfe�@s�ddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddl
mZmZddlmZddlmZmZddlmZmZdd	lmZd
Zeje�ZGdd�de�ZGd
d�de�Z dd�Z!dd�Z"dd�Z#dS)�)�absolute_importN)�lockfile)�version)�
total_seconds�WINDOWS)�PyPI)�USER_CACHE_DIR�running_under_virtualenv)�
ensure_dir�get_installed_version)�check_path_ownerz%Y-%m-%dT%H:%M:%SZc@seZdZdd�Zdd�ZdS)�VirtualenvSelfCheckStatecCs\tjjtjd�|_y&t|j��}tj|�|_	WdQRXWnt
tfk
rVi|_	YnXdS)Nzpip-selfcheck.json)�os�path�join�sys�prefix�statefile_path�open�json�load�state�IOError�
ValueError)�self�	statefile�r�/usr/lib/python3.6/outdated.py�__init__sz!VirtualenvSelfCheckState.__init__c
Cs:t|jd��$}tj|jt�|d�|ddd�WdQRXdS)N�w)�
last_check�pypi_versionT�,�:)�	sort_keys�
separators)r"r#)rrr�dump�strftime�SELFCHECK_DATE_FMT)rr!�current_timerrrr�save$szVirtualenvSelfCheckState.saveN)�__name__�
__module__�__qualname__rr*rrrrr
s
r
c@seZdZdd�Zdd�ZdS)�GlobalSelfCheckStatecCsbtjjtd�|_y,t|j��}tj|�tj	|_
WdQRXWn ttt
fk
r\i|_
YnXdS)Nzselfcheck.json)rrrrrrrrrrrrr�KeyError)rrrrrr3s zGlobalSelfCheckState.__init__cCs�ttjj|j��sdSttjj|j��tj|j��ztjj|j�rft	|j��}t
j|�}WdQRXni}|jt
�|d�|tj<t	|jd��}t
j||ddd�WdQRXWdQRXdS)N)r r!rTr"r#)r$r%)r"r#)rrr�dirnamerr
rZLockFile�existsrrrr'r(rrr&)rr!r)rrrrrr*=s
zGlobalSelfCheckState.saveN)r+r,r-rr*rrrrr.2s
r.cCst�rt�St�SdS)N)r	r
r.rrrr�load_selfcheck_statefileXsr2cCsFddl}y"|jd�}|jd�o*d|jd�kS|jk
r@dSXdS)z�Checks whether pip was installed by pip

    This is used not to display the upgrade message when pip is in fact
    installed by system package manager, such as dnf on Fedora.
    rN�pipZ	INSTALLERF)�
pkg_resourcesZget_distributionZhas_metadataZget_metadata_linesZDistributionNotFound)r4Zdistrrr�pip_installed_by_pip_s

r5c
CsFtd�}|dkrdStj|�}d}�y�t�}tjj�}d|jkrxd|jkrxtjj|jdt�}t	||�dkrx|jd}|dkr�|j
tjdd	id
�}|j
�dd�tt|j�d
�tjd�D�d}|j||�tj|�}||k�r|j|jk�rt��rt�rd}	nd}	tjd|||	�Wn$tk
�r@tjddd�YnXdS)z�Check for an update for pip.

    Limit the frequency of checks to once per week. State is stored either in
    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
    of the pip script path.
    r3Nr r!���<ZAcceptzapplication/json)ZheaderscSsg|]}tj|�js|�qSr)�packaging_version�parseZ
is_prerelease)�.0�vrrr�
<listcomp>�sz%pip_version_check.<locals>.<listcomp>Zreleases)�key�z
python -m pipz�You are using pip version %s, however version %s is available.
You should consider upgrading via the '%s install --upgrade pip' command.z5There was an error checking the latest version of pipT)�exc_info�i`'i�:	���)rr9r:r2�datetimeZutcnowrZstrptimer(r�getrZpip_json_urlZraise_for_status�sorted�listrr*Zbase_versionr5r�loggerZwarning�	Exception�debug)
ZsessionZinstalled_versionZpip_versionr!rr)r ZrespZremote_versionZpip_cmdrrr�pip_version_checknsL




rJ)$Z
__future__rrCrZloggingZos.pathrrZpip._vendorrZpip._vendor.packagingrr9Z
pip.compatrrZ
pip.modelsrZ
pip.locationsrr	Z	pip.utilsr
rZpip.utils.filesystemrr(Z	getLoggerr+rG�objectr
r.r2r5rJrrrr�<module>s&
&utils/__pycache__/glibc.cpython-36.pyc000064400000002503151733136510013627 0ustar003

�Pf{�@sPddlmZddlZddlZddlZddlZdd�Zdd�Zdd�Zd	d
�Z	dS)�)�absolute_importNcCsPtjd�}y
|j}Wntk
r(dSXtj|_|�}t|t�sL|jd�}|S)z9Returns glibc version string, or None if not using glibc.N�ascii)	�ctypesZCDLL�gnu_get_libc_version�AttributeErrorZc_char_pZrestype�
isinstance�str�decode)Zprocess_namespacer�version_str�r�/usr/lib/python3.6/glibc.py�glibc_version_string	s



r
cCsHtjd|�}|s$tjd|t�dSt|jd��|koFt|jd��|kS)Nz$(?P<major>[0-9]+)\.(?P<minor>[0-9]+)z=Expected glibc version with 2 components major.minor, got: %sF�major�minor)�re�match�warnings�warn�RuntimeWarning�int�group)r
�required_major�
minimum_minor�mrrr�check_glibc_version#s
rcCst�}|dkrdSt|||�S)NF)r
r)rrr
rrr�have_compatible_glibc3srcCs"t�}|dkrtj�Sd|fSdS)NZglibc)r
�platform�libc_ver)Z
glibc_versionrrrrKsr)
Z
__future__rrrrrr
rrrrrrr�<module>sutils/__pycache__/hashes.cpython-36.pyc000064400000006227151733136510014031 0ustar003

�Pf2�@szddlmZddlZddlmZmZmZddlmZddl	m
Z
mZmZdZ
dddgZGd	d
�d
e�ZGdd�de�ZdS)
�)�absolute_importN)�HashMismatch�HashMissing�InstallationError)�read_chunks)�	iteritems�iterkeys�
itervaluesZsha256Zsha384Zsha512c@sJeZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)�HasheszaA wrapper that builds multiple hashes at once and checks them against
    known-good values

    NcCs|dkrin||_dS)zo
        :param hashes: A dict of algorithm names pointing to lists of allowed
            hex digests
        N)�_allowed)�self�hashes�r�/usr/lib/python3.6/hashes.py�__init__szHashes.__init__c
Cs�i}xJt|j�D]<}ytj|�||<Wqttfk
rJtd|��YqXqWx(|D] }xt|�D]}|j|�qdWqVWx*t	|�D]\}}|j
�|j|kr�dSq�W|j|�dS)z�Check good hashes against ones built from iterable of chunks of
        data.

        Raise HashMismatch if none match.

        zUnknown hash name: %sN)rr�hashlib�new�
ValueError�	TypeErrorrr	�updater�	hexdigest�_raise)rZchunks�gotsZ	hash_name�chunk�hashZgotrrr�check_against_chunks s
zHashes.check_against_chunkscCst|j|��dS)N)rr)rrrrrr7sz
Hashes._raisecCs|jt|��S)zaCheck good hashes against a file-like object

        Raise HashMismatch if none match.

        )rr)r�filerrr�check_against_file:szHashes.check_against_filec	Cs t|d��}|j|�SQRXdS)N�rb)�openr)r�pathrrrr�check_against_pathBszHashes.check_against_pathcCs
t|j�S)z,Return whether I know any known-good hashes.)�boolr)rrrr�__nonzero__FszHashes.__nonzero__cCs|j�S)N)r#)rrrr�__bool__JszHashes.__bool__)N)�__name__�
__module__�__qualname__�__doc__rrrrr!r#r$rrrrr
s
r
cs(eZdZdZ�fdd�Zdd�Z�ZS)�
MissingHashesz�A workalike for Hashes used when we're missing a hash for a requirement

    It computes the actual hash of the requirement and raises a HashMissing
    exception showing it to the user.

    cstt|�jtgid�dS)z!Don't offer the ``hashes`` kwarg.)r
N)�superr)r�
FAVORITE_HASH)r)�	__class__rrrUszMissingHashes.__init__cCst|tj���dS)N)rr+r)rrrrrr[szMissingHashes._raise)r%r&r'r(rr�
__classcell__rr)r,rr)Nsr))Z
__future__rrZpip.exceptionsrrrZ	pip.utilsrZpip._vendor.sixrrr	r+Z
STRONG_HASHES�objectr
r)rrrr�<module>s
:utils/__pycache__/ui.cpython-36.pyc000064400000022534151733136510013172 0ustar003

�PfM-�@s�ddlmZddlmZddlZddlZddlmZmZmZddlZddl	Z	ddl
Z
ddlmZddl
mZddlmZddlmZdd	lmZmZdd
lmZmZmZddlmZyddlmZWnek
r�dZYnXe
je�Z d
d�Z!e!ee�Z"Gdd�de#�Z$Gdd�de#�Z%Gdd�de#�Z&Gdd�de&e$e%e"�Z'Gdd�de&e$e%ee�Z(e	j)dd��Z*Gdd�de#�Z+Gdd�de#�Z,Gdd �d e#�Z-e	j)d!d"��Z.dS)#�)�absolute_import)�divisionN)�signal�SIGINT�default_int_handler)�WINDOWS)�format_size)�get_indentation)�six)�Bar�IncrementalBar)�WritelnMixin�HIDE_CURSOR�SHOW_CURSOR)�Spinner)�coloramacCs�t|jdd�}|s|St|dtj��t|dtj��g}|tt|dg��7}ytj�j|�j|�Wntk
rv|SX|SdS)N�encodingZ
empty_fillZfill�phases)�getattr�filer
Z	text_type�list�join�encode�UnicodeEncodeError)Z	preferredZfallbackrZ
characters�r�/usr/lib/python3.6/ui.py�_select_progress_classsrcs4eZdZdZ�fdd�Z�fdd�Zdd�Z�ZS)�InterruptibleMixina�
    Helper to ensure that self.finish() gets called on keyboard interrupt.

    This allows downloads to be interrupted without leaving temporary state
    (like hidden cursors) behind.

    This class is similar to the progress library's existing SigIntMixin
    helper, but as of version 1.2, that helper has the following problems:

    1. It calls sys.exit().
    2. It discards the existing SIGINT handler completely.
    3. It leaves its own handler in place even after an uninterrupted finish,
       which will have unexpected delayed effects if the user triggers an
       unrelated keyboard interrupt some time after a progress-displaying
       download has already completed, for example.
    cs4tt|�j||�tt|j�|_|jdkr0t|_dS)z=
        Save the original SIGINT handler for later.
        N)�superr�__init__rr�
handle_sigint�original_handlerr)�self�args�kwargs)�	__class__rrrNs
zInterruptibleMixin.__init__cstt|�j�tt|j�dS)z�
        Restore the original SIGINT handler after finishing.

        This should happen regardless of whether the progress display finishes
        normally, or gets interrupted.
        N)rr�finishrrr!)r")r%rrr&^szInterruptibleMixin.finishcCs|j�|j||�dS)z�
        Call self.finish() before delegating to the original SIGINT handler.

        This handler should only be in place while the progress display is
        active.
        N)r&r!)r"Zsignum�framerrrr hsz InterruptibleMixin.handle_sigint)�__name__�
__module__�__qualname__�__doc__rr&r �
__classcell__rr)r%rr<s
rcsJeZdZ�fdd�Zedd��Zedd��Zedd��Zdd
d�Z�Z	S)
�DownloadProgressMixincs,tt|�j||�dt�d|j|_dS)N� �)rr-rr	�message)r"r#r$)r%rrruszDownloadProgressMixin.__init__cCs
t|j�S)N)r�index)r"rrr�
downloadedysz DownloadProgressMixin.downloadedcCs |jdkrdStd|j�dS)Ngz...�z/s)Zavgr)r"rrr�download_speed}s
z$DownloadProgressMixin.download_speedcCs|jrd|jSdS)Nzeta %s�)ZetaZeta_td)r"rrr�
pretty_eta�s
z DownloadProgressMixin.pretty_etar3ccs*x|D]}|V|j|�qW|j�dS)N)�nextr&)r"�it�n�xrrr�iter�s
zDownloadProgressMixin.iter)r3)
r(r)r*r�propertyr2r4r6r;r,rr)r%rr-ss
r-cseZdZ�fdd�Z�ZS)�WindowsMixincs\tr�jrd�_tt��j||�trXtrXtj�j��_�fdd��j_�fdd��j_	dS)NFcs�jjj�S)N)r�wrapped�isattyr)r"rr�<lambda>�sz'WindowsMixin.__init__.<locals>.<lambda>cs�jjj�S)N)rr>�flushr)r"rrr@�s)
rZhide_cursorrr=rrZAnsiToWin32rr?rA)r"r#r$)r%)r"rr�s
zWindowsMixin.__init__)r(r)r*rr,rr)r%rr=�sr=c@seZdZejZdZdZdS)�DownloadProgressBarz
%(percent)d%%z0%(downloaded)s %(download_speed)s %(pretty_eta)sN)r(r)r*�sys�stdoutrr0�suffixrrrrrB�srBc@s&eZdZejZdZdd�Zdd�ZdS)�DownloadProgressSpinnerz!%(downloaded)s %(download_speed)scCs"t|d�stj|j�|_t|j�S)N�_phaser)�hasattr�	itertools�cyclerrGr7)r"rrr�
next_phase�s
z"DownloadProgressSpinner.next_phasecCsN|j|}|j�}|j|}dj||r*dnd||r6dnd|g�}|j|�dS)Nr5r.)r0rKrErZwriteln)r"r0ZphaserE�linerrr�update�s



zDownloadProgressSpinner.updateN)	r(r)r*rCrDrrErKrMrrrrrF�srFccsRtrdVnB|j�s$tj�tjkr,dVn"|jt�z
dVWd|jt�XdS)N)	rr?�logger�getEffectiveLevel�logging�INFO�writerr)rrrr�
hidden_cursor�s

rSc@s$eZdZdd�Zdd�Zdd�ZdS)�RateLimitercCs||_d|_dS)Nr)�_min_update_interval_seconds�_last_update)r"�min_update_interval_secondsrrrr�szRateLimiter.__init__cCstj�}||j}||jkS)N)�timerVrU)r"ZnowZdeltarrr�ready�s
zRateLimiter.readycCstj�|_dS)N)rXrV)r"rrr�reset�szRateLimiter.resetN)r(r)r*rrYrZrrrrrT�srTc@s.eZdZddd�Zdd�Zdd	�Zd
d�ZdS)
�InteractiveSpinnerN�-\|/��?cCs\||_|dkrtj}||_t|�|_d|_tj|�|_	|jj
dt�|jd�d|_dS)NFr.z ... r)
�_messagerCrD�_filerT�
_rate_limiter�	_finishedrIrJ�_spin_cyclerRr	�_width)r"r0rZ
spin_charsrWrrrr�s
zInteractiveSpinner.__init__cCs^|jst�d|j}|jj|d|j|�|jj|�t|�|_|jj�|jj�dS)N�r.)	ra�AssertionErrorrcr_rR�lenrAr`rZ)r"�statusZbackuprrr�_write	s


zInteractiveSpinner._writecCs,|jr
dS|jj�sdS|jt|j��dS)N)rar`rYrhr7rb)r"rrr�spins

zInteractiveSpinner.spincCs4|jr
dS|j|�|jjd�|jj�d|_dS)N�
T)rarhr_rRrA)r"�final_statusrrrr&s

zInteractiveSpinner.finish)Nr\r])r(r)r*rrhrir&rrrrr[�s


r[c@s.eZdZddd�Zdd�Zdd�Zdd	�Zd
S)�NonInteractiveSpinner�<cCs$||_d|_t|�|_|jd�dS)NFZstarted)r^rarTr`�_update)r"r0rWrrrr*s
zNonInteractiveSpinner.__init__cCs*|jst�|jj�tjd|j|�dS)Nz%s: %s)rarer`rZrN�infor^)r"rgrrrrn0s
zNonInteractiveSpinner._updatecCs&|jr
dS|jj�sdS|jd�dS)Nzstill running...)rar`rYrn)r"rrrri5s

zNonInteractiveSpinner.spincCs$|jr
dS|jd|f�d|_dS)Nzfinished with status '%s'T)rarn)r"rkrrrr&<szNonInteractiveSpinner.finishN)rm)r(r)r*rrnrir&rrrrrl)s
rlccs�tjj�r"tj�tjkr"t|�}nt|�}y t	tj��|VWdQRXWn>t
k
rj|jd��Yn*tk
r�|jd��YnX|jd�dS)NZcanceled�error�done)
rCrDr?rNrOrPrQr[rlrS�KeyboardInterruptr&�	Exception)r0Zspinnerrrr�open_spinnerCs


rt)/Z
__future__rrrIrCrrrrX�
contextlibrPZ
pip.compatrZ	pip.utilsrZpip.utils.loggingr	Zpip._vendorr
Zpip._vendor.progress.barrrZpip._vendor.progress.helpersr
rrZpip._vendor.progress.spinnerrrrsZ	getLoggerr(rNrZ_BaseBar�objectrr-r=rBrF�contextmanagerrSrTr[rlrtrrrr�<module>sB


7
!0utils/__pycache__/appdirs.cpython-36.pyc000064400000017002151733136510014211 0ustar003

�Pfk"�@s�dZddlmZddlZddlZddlmZmZddlm	Z	m
Z
dd�Zdd	d
�Zddd
�Z
dd�Zdd�Zdd�Zer�yddlZeZWnek
r�eZYnXdd�ZdS)zd
This code was taken from https://github.com/ActiveState/appdirs and modified
to suit our purposes.
�)�absolute_importN)�WINDOWS�
expanduser)�PY2�	text_typecCs�tr<tjjtd��}tr*t|t�r*t|�}tjj	||d�}n@t
jdkr^td�}tjj	||�}ntj
dtd��}tjj	||�}|S)a5
    Return full path to the user-specific cache dir for this application.

        "appname" is the name of application.

    Typical user cache directories are:
        macOS:      ~/Library/Caches/<AppName>
        Unix:       ~/.cache/<AppName> (XDG default)
        Windows:    C:\Users\<username>\AppData\Local\<AppName>\Cache

    On Windows the only suggestion in the MSDN docs is that local settings go
    in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
    non-roaming app data dir (the default returned by `user_data_dir`). Apps
    typically put cache data somewhere *under* the given dir here. Some
    examples:
        ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
        ...\Acme\SuperApp\Cache\1.0

    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
    �CSIDL_LOCAL_APPDATAZCache�darwinz~/Library/CachesZXDG_CACHE_HOMEz~/.cache)r�os�path�normpath�_get_win_folderr�
isinstancer�_win_path_to_bytes�join�sys�platformr�getenv)�appnamer
�r�/usr/lib/python3.6/appdirs.py�user_cache_dirs
rFcCshtr,|rdpd}tjjtjjt|��|�}n8tjdkrJtjjtd�|�}ntjjtj	dtd��|�}|S)aS
    Return full path to the user-specific data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  ~/Library/Application Support/<AppName>
        Unix:                   ~/.local/share/<AppName>    # or in
                                $XDG_DATA_HOME, if defined
        Win XP (not roaming):   C:\Documents and Settings\<username>\ ...
                                ...Application Data\<AppName>
        Win XP (roaming):       C:\Documents and Settings\<username>\Local ...
                                ...Settings\Application Data\<AppName>
        Win 7  (not roaming):   C:\Users\<username>\AppData\Local\<AppName>
        Win 7  (roaming):       C:\Users\<username>\AppData\Roaming\<AppName>

    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
    That means, by default "~/.local/share/<AppName>".
    �
CSIDL_APPDATArrz~/Library/Application Support/Z
XDG_DATA_HOMEz~/.local/share)
rr	r
rrrrrrr)r�roaming�constr
rrr�
user_data_dir>s
rTcCsHtrt||d�}n2tjdkr&t|�}ntjdtd��}tjj||�}|S)arReturn full path to the user-specific config dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "roaming" (boolean, default True) can be set False to not use the
            Windows roaming appdata directory. That means that for users on a
            Windows network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  same as user_data_dir
        Unix:                   ~/.config/<AppName>
        Win *:                  same as user_data_dir

    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
    That means, by default "~/.config/<AppName>".
    )rrZXDG_CONFIG_HOMEz	~/.config)	rrrrr	rrr
r)rrr
rrr�user_config_dirjs

rcs�tr&tjjtd��}tjj|��g}nVtjdkrBtjjd��g}n:tjdd�}|rn�fdd�|j	tj
�D�}ng}|jd�|S)	a�Return a list of potential user-shared config dirs for this application.

        "appname" is the name of application.

    Typical user config directories are:
        macOS:      /Library/Application Support/<AppName>/
        Unix:       /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
                    $XDG_CONFIG_DIRS
        Win XP:     C:\Documents and Settings\All Users\Application ...
                    ...Data\<AppName>        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory
                    on Vista.)
        Win 7:      Hidden, but writeable on Win 7:
                    C:\ProgramData\<AppName>    �CSIDL_COMMON_APPDATArz/Library/Application SupportZXDG_CONFIG_DIRSz/etc/xdgcsg|]}tjjt|����qSr)r	r
rr)�.0�x)rrr�
<listcomp>�sz$site_config_dirs.<locals>.<listcomp>z/etc)rr	r
rrrrrr�split�pathsep�append)rr
ZpathlistZxdg_config_dirsr)rr�site_config_dirs�s


r#cCs:ddl}dddd�|}|j|jd�}|j||�\}}|S)z�
    This is a fallback technique at best. I'm not sure if using the
    registry for this guarantees us the correct answer for all CSIDL_*
    names.
    rNZAppDatazCommon AppDataz
Local AppData)rrrz@Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders)�_winreg�OpenKey�HKEY_CURRENT_USERZQueryValueEx)�
csidl_namer$Zshell_folder_name�keyZ	directoryZ_typerrr�_get_win_folder_from_registry�sr)cCs�dddd�|}tjd�}tjjjd|dd|�d}x|D]}t|�dkr:d	}Pq:W|rztjd�}tjjj|j|d�rz|}|jS)
N��#�)rrrirF�T)	�ctypesZcreate_unicode_bufferZwindllZshell32ZSHGetFolderPathW�ordZkernel32ZGetShortPathNameW�value)r'Zcsidl_constZbufZ
has_high_char�cZbuf2rrr�_get_win_folder_with_ctypes�s 


r2c
Cs6x0dD](}y
|j|�Sttfk
r,YqXqW|S)a�Encode Windows paths to bytes. Only used on Python 2.

    Motivation is to be consistent with other operating systems where paths
    are also returned as bytes. This avoids problems mixing bytes and Unicode
    elsewhere in the codebase. For more details and discussion see
    <https://github.com/pypa/pip/issues/3463>.

    If encoding using ASCII and MBCS fails, return the original Unicode path.
    �ASCII�MBCS)r3r4)�encode�UnicodeEncodeError�LookupError)r
�encodingrrrr�s



r)F)T)�__doc__Z
__future__rr	rZ
pip.compatrrZpip._vendor.sixrrrrrr#r)r2r.r�ImportErrorrrrrr�<module>s$0
,
!(
utils/__pycache__/__init__.cpython-36.pyc000064400000054152151733136510014315 0ustar003

�Pfml�@s�ddlmZddlmZddlZddlZddlZddlZddlZ	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlmZddlmZmZmZddlmZmZmZmZmZmZddl m!Z!ddl"m#Z#dd	l$m%Z%dd
l&m'Z'e%�rddlm(Z)nddlm)Z)d
ddddddddddddddddddd d!d"d#d$gZ*e	j+e,�Z-dzZ.d{Z/d|Z0d}Z1e0e.e1e/Z2e0e1Z3yddl4Z4e3e.7Z3Wn e5k
�r�e-j6d1�YnXyddl7Z7e3e/7Z3Wn e5k
�r�e-j6d2�YnXd3d4�Z8d5d!�Z9d6d�Z:e'd7d8d9�d~d;d
��Z;d<d=�Z<d>d�Z=dd@d�Z>dAdB�Z?dCd�Z@dDd�ZAdEd�ZBdFd�ZCdGd�ZDejEfdHdI�ZFdJd�ZGdKd�ZHd�dMd�ZIdNd�ZJdOd�ZKdPdQ�ZLdRdS�ZMdTdU�ZNdVdW�ZOdXdY�ZPdZd[�ZQdLedLd:d:fd\d]�ZRd^d_�ZSd`da�ZTdbd�ZUdcdd�ZVd�ded�ZWdfd�ZXdgd�ZYd�did�ZZdjdk�Z[dldm�Z\Gdndo�doe]�Z^Gdpdq�dqe)�Z_ej`drds��Zadtd �ZbGdudv�dve]�Zcd�dwd$�Zddxdy�ZedS)��)�absolute_import)�dequeN)�InstallationError)�console_to_str�
expanduser�stdlib_pkgs)�
site_packages�	user_site�running_under_virtualenv�virtualenv_no_global�write_delete_marker_file�distutils_scheme)�
pkg_resources)�input)�PY2)�retry)�BytesIO)�StringIO�rmtree�display_path�
backup_dir�ask�splitext�format_size�is_installable_dir�is_svn_page�
file_contents�split_leading_dir�has_leading_dir�normalize_path�renames�get_terminal_size�get_prog�
unzip_file�
untar_file�unpack_file�call_subprocess�captured_stdout�
ensure_dir�ARCHIVE_EXTENSIONS�SUPPORTED_EXTENSIONS�get_installed_version�.tar.bz2�.tbz�.tar.xz�.txz�.tlz�.tar.lz�	.tar.lzma�.zip�.whl�.tar.gz�.tgz�.tarzbz2 module is not availablezlzma module is not availablecOs,yt|�Stk
r&|||��YnXdS)N)�
__import__�ImportError)Zpkg_or_module_stringZ
ExceptionType�args�kwargs�r<�/usr/lib/python3.6/__init__.py�import_or_raiseIsr>cCsDytj|�Wn0tk
r>}z|jtjkr.�WYdd}~XnXdS)z os.path.makedirs without EEXIST.N)�os�makedirs�OSError�errnoZEEXIST)�path�er<r<r=r(Ps
c
CsDy$tjjtjd�dkr"dtjSWntttfk
r>YnXdS)Nr�__main__.py�-cz	%s -m pipZpip)rErF)	r?rC�basename�sys�argv�
executable�AttributeError�	TypeError�
IndexErrorr<r<r<r=r"Ysi�i�)Zstop_max_delayZ
wait_fixedFcCstj||td�dS)N)�
ignore_errors�onerror)�shutilr�rmtree_errorhandler)�dirrNr<r<r=rcscCs2tj|�jtj@r,tj|tj�||�dS�dS)z�On Windows, the files in .svn are read-only, so when rmtree() tries to
    remove them, an exception is thrown.  We catch that here, remove the
    read-only attribute, and hopefully continue without problems.N)r?�stat�st_mode�S_IREAD�chmod�S_IWRITE)�funcrC�exc_infor<r<r=rQis
rQcCsttjjtjj|��}tjddkrB|jtj�d�}|jtj	�d�}|j
tj�tjj�rpd|t
tj��d�}|S)zTGives the display value for a given path, making it relative to cwd
    if possible.r��replace�.N)r?rC�normcase�abspathrH�version_info�decode�getfilesystemencoding�encode�getdefaultencoding�
startswith�getcwd�sep�len)rCr<r<r=rxs�.bakcCs:d}|}x(tjj||�r0|d7}|t|�}q
W||S)z\Figure out the name of a directory to back up the given dir to
    (adding .bak, .bak2, etc)�)r?rC�exists�str)rR�ext�n�	extensionr<r<r=r�scCs2x&tjjdd�j�D]}||kr|SqWt||�S)NZPIP_EXISTS_ACTION�)r?�environ�get�splitr)�message�options�actionr<r<r=�ask_path_exists�srvcCsZxTtjjd�rtd|��t|�}|j�j�}||krNtd|dj|�f�q|SqWdS)z@Ask the message interactively, with the given possible responsesZPIP_NO_INPUTz7No input was expected ($PIP_NO_INPUT set); question: %sz<Your response (%r) was not one of the expected responses: %sz, N)	r?rprq�	Exceptionr�strip�lower�print�join)rsrtZresponser<r<r=r�scCsL|dkrd|ddS|d	kr,d|dS|dkr@d|dSd|SdS)
Ni�z%.1fMBg@�@�
z%ikBz%.1fkBz%ibytesi@Bi'r<)�bytesr<r<r=r�scCs2tjj|�sdStjj|d�}tjj|�r.dSdS)z@Return True if `path` is a directory containing a setup.py file.Fzsetup.pyT)r?rC�isdirr{�isfile)rCZsetup_pyr<r<r=r�scCstjd|�otjd|tj�S)zT
    Returns true if the page appears to be the index page of an svn repository
    z<title>[^<]*Revision \d+:z#Powered by (?:<a[^>]*?>)?Subversion)�re�search�I)Zhtmlr<r<r=r�sc	Cs$t|d��}|j�jd�SQRXdS)N�rbzutf-8)�open�readr`)�filename�fpr<r<r=r�sccs x|j|�}|sP|VqWdS)z7Yield pieces of data from a file-like object until EOF.N)r�)�file�size�chunkr<r<r=�read_chunks�s

r�cCsh|jd�jd�}d|krHd|kr4|jd�|jd�ks<d|krH|jdd�Sd|kr\|jdd�S|dfSdS)N�/�\riro)�lstrip�findrr)rCr<r<r=r�s$cCsDd}x:|D]2}t|�\}}|s"dS|dkr0|}q
||kr
dSq
WdS)zyReturns true if all the paths have the same leading path name
    (i.e., everything is in one subdirectory in an archive)NFT)r)�pathsZ
common_prefixrC�prefix�restr<r<r=r�s
TcCs2t|�}|rtjj|�}ntjj|�}tjj|�S)zN
    Convert a path to its canonical, case-normalized, absolute version.

    )rr?rC�realpathr^r])rCZresolve_symlinksr<r<r=r�s
cCs@tj|�\}}|j�jd�r8|dd�|}|dd�}||fS)z,Like os.path.splitext, but take off .tar tooz.tar�N���r�)�	posixpathrry�endswith)rC�baserlr<r<r=r�s
cCs|tjj|�\}}|r0|r0tjj|�r0tj|�tj||�tjj|�\}}|rx|rxytj|�Wntk
rvYnXdS)z7Like os.renames(), but handles renaming across devices.N)	r?rCrrrjr@rPZmove�
removedirsrA)�old�new�head�tailr<r<r=r s
cCst�s
dSt|�jttj��S)z�
    Return True if path is within sys.prefix, if we're running in a virtualenv.

    If we're not in a virtualenv, all paths are considered "local."

    T)r
rrdrHr�)rCr<r<r=�is_localsr�cCstt|��S)z�
    Return True if given Distribution object is installed locally
    (i.e. within current virtualenv).

    Always True if we're not in a virtualenv.

    )r��
dist_location)�distr<r<r=�
dist_is_local!sr�cCstt|��}|jtt��S)zF
    Return True if given Distribution is installed in user site.
    )rr�rdr	)r��	norm_pathr<r<r=�dist_in_usersite,sr�cCstt|��jtt��S)ze
    Return True if given Distribution is installed in
    distutils.sysconfig.get_python_lib().
    )rr�rdr)r�r<r<r=�dist_in_site_packages4s
r�cCs,tt|��}|jttd�djd�d��S)zf
    Return True if given Distribution is installed in
    path matching distutils_scheme layout.
    ro�purelib�pythonr)rr�rdr
rr)r�r�r<r<r=�dist_in_install_path>sr�cCs8x2tjD](}tjj||jd�}tjj|�rdSqWdS)z$Is distribution an editable install?z	.egg-linkTF)rHrCr?r{�project_namer)r�Z	path_item�egg_linkr<r<r=�dist_is_editableHs
r�csl|r
t�ndd��|r dd��ndd��|r6dd��ndd��|rHt�nd	d
�������fdd�tjD�S)
a�
    Return a list of installed Distribution objects.

    If ``local_only`` is True (default), only return installations
    local to the current virtualenv, if in a virtualenv.

    ``skip`` argument is an iterable of lower-case project names to
    ignore; defaults to stdlib_pkgs

    If ``editables`` is False, don't report editables.

    If ``editables_only`` is True , only report editables.

    If ``user_only`` is True , only report installations in the user
    site directory.

    cSsdS)NTr<)�dr<r<r=�
local_testjsz/get_installed_distributions.<locals>.local_testcSsdS)NTr<)r�r<r<r=�
editable_testnsz2get_installed_distributions.<locals>.editable_testcSs
t|�S)N)r�)r�r<r<r=r�qscSst|�S)N)r�)r�r<r<r=�editables_only_testusz8get_installed_distributions.<locals>.editables_only_testcSsdS)NTr<)r�r<r<r=r�xscSsdS)NTr<)r�r<r<r=�	user_test~sz.get_installed_distributions.<locals>.user_testcs:g|]2}�|�r|j�kr�|�r�|�r�|�r|�qSr<)�key)�.0r�)r�r�r��skipr�r<r=�
<listcomp>�s
z/get_installed_distributions.<locals>.<listcomp>)r�r�r�working_set)Z
local_onlyr�Zinclude_editablesZeditables_onlyZ	user_onlyr<)r�r�r�r�r�r=�get_installed_distributionsQs

r�cCs�g}t�r6t�r|jt�qN|jt�trN|jt�ntrD|jt�|jt�x0|D](}tjj||j�d}tjj	|�rT|SqTWdS)a
    Return the path for the .egg-link file if it exists, otherwise, None.

    There's 3 scenarios:
    1) not in a virtualenv
       try to find in site.USER_SITE, then site_packages
    2) in a no-global virtualenv
       try to find in site_packages
    3) in a yes-global virtualenv
       try to find in site_packages, then site.USER_SITE
       (don't look in global location)

    For #1 and #3, there could be odd cases, where there's an egg-link in 2
    locations.

    This method will just return the first one found.
    z	.egg-linkN)
r
r�appendrr	r?rCr{r�r)r�ZsitesZsiteZegglinkr<r<r=�
egg_link_path�s



r�cCst|�}|r|S|jS)z�
    Get the site-packages location of this distribution. Generally
    this is dist.location, except in the case of develop-installed
    packages, where dist.location is the source code location, and we
    want to know where the egg-link file is.

    )r��location)r�r�r<r<r=r��sr�c
Cs�dd�}|d�p|d�p|d�}|sZy(tjtj�tj�}||�}tj|�WnYnX|sztjjdd�tjjdd	�f}t|d�t|d�fS)
zlReturns a tuple (x, y) representing the width(x) and the height(x)
    in characters of the terminal window.cSsPy4ddl}ddl}ddl}|jd|j||jd��}Wn
dS|dkrLdS|S)NrZhhZ1234)rr)�fcntl�termios�struct�unpackZioctlZ
TIOCGWINSZ)�fdr�r�r��crr<r<r=�ioctl_GWINSZ�sz'get_terminal_size.<locals>.ioctl_GWINSZrrirZZLINES�ZCOLUMNS�P)r?r��ctermid�O_RDONLY�closerprq�int)r�r�r�r<r<r=r!�scCstjd�}tj|�|S)zBGet the current umask which involves having to set it temporarily.r)r?�umask)�maskr<r<r=�
current_umask�s

r�c
Cst|�t|d�}z�tj|dd�}t|j��o0|}x�|j�D]�}|j}|j|�}|}	|rdt	|�d}	t
jj||	�}	t
jj
|	�}
|	jd�s�|	jd�r�t|	�q<t|
�t|	d�}z|j|�Wd|j�|jd	?}|r�tj|�r�|d
@r�t
j|	dt�d
B�Xq<WWd|j�XdS)a�
    Unzip the file (with path `filename`) to the destination `location`.  All
    files are written based on system defaults and umask (i.e. permissions are
    not preserved), except that regular file members with any execute
    permissions (user, group, or world) have "chmod +x" applied after being
    written. Note that for windows, any execute changes using os.chmod are
    no-ops per the python docs.
    r�T)Z
allowZip64rir�r��wbN��Ii�)r(r��zipfileZZipFilerZnamelistZinfolistr�r�rr?rCr{�dirnamer��writer�Z
external_attrrS�S_ISREGrVr�)
r�r��flattenZzipfp�zip�leading�info�name�data�fnrRr��moder<r<r=r#�s0	




 c(Cs@t|�|j�jd�s$|j�jd�r*d}nL|j�jt�r>d}n8|j�jt�rRd}n$|j�jd�rfd}ntjd|�d	}tj||�}�z�t	d
d�|j
�D��}�x�|j
�D�]�}|j}|dkr�q�|r�t|�d
}t
jj||�}ytj|j|d�|�Wntjk
�rYnX|j��r"t|�q�|j��rxy|j||�Wn8tk
�rt}ztjd||j|�w�WYdd}~XnXq�y|j|�}	Wn<ttfk
�r�}ztjd||j|�w�WYdd}~XnXtt
jj|��t|d��}
tj|	|
�WdQRX|	j�|j||�|jd@r�t
j |dt!�dB�q�WWd|j�XdS)a�
    Untar the file (with path `filename`) to the destination `location`.
    All files are written based on system defaults and umask (i.e. permissions
    are not preserved), except that regular file members with any execute
    permissions (user, group, or world) have "chmod +x" applied after being
    written.  Note that for windows, any execute changes using os.chmod are
    no-ops per the python docs.
    z.gzz.tgzzr:gzzr:bz2zr:xzz.tar�rz-Cannot determine compression type for file %szr:*cSsg|]}|jdkr|j�qS)�pax_global_header)r�)r��memberr<r<r=r�(szuntar_file.<locals>.<listcomp>r�ri)r�z/In the tar file %s the member %s is invalid: %sNr�r�i�)"r(ryr��BZ2_EXTENSIONS�
XZ_EXTENSIONS�logger�warning�tarfiler�rZ
getmembersr�rr?rCr{Zdata_filterr[ZLinkOutsideDestinationErrorr~ZissymZ_extract_memberrwZextractfile�KeyErrorrKr�rPZcopyfileobjr��utimer�rVr�)r�r�r�Ztarr�r�r�rC�excr�Zdestfpr<r<r=r$
sh	



cCs�tjj|�}|dks,|j�jt�s,tj|�rDt|||jd�d�n�|dkslt	j
|�sl|j�jttt
�rxt||�nX|r�|jd�r�tt|��r�ddlm}|d|j�j|�ntjd	|||�td
|��dS)Nzapplication/zipz.whl)r�zapplication/x-gzipz	text/htmlr)�
Subversionzsvn+zZCannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive formatz%Cannot determine archive format of %s)r?rCr�ryr��ZIP_EXTENSIONSr�Z
is_zipfiler#r�Z
is_tarfile�TAR_EXTENSIONSr�r�r$rdrrZpip.vcs.subversionr�Zurlr�r��criticalr)r�r�Zcontent_type�linkr�r<r<r=r%`s,


�raisecCs,|r
d}ntj}|dkrng}xF|D]>}	d|	ksFd|	ksFd|	ksFd|	krVd|	jdd�}	|j|	�q"Wdj|�}tjd|�tjj	�}
|r�|
j
|�ytj|tjd|||
d�}Wn2t
k
r�}ztjd	||��WYdd}~XnX|dk	�rNg}
x\t|jj��}|�sP|j�}|
j|d�tj�tjk�r:tj|�q�|dk	r�|j�q�W|j�|dk	�r~|j�rt|jd
�n
|jd�|j�r|dk�r�tj�tjk�r�|�r�tjd
|�tjdj|
�d�td||j|f��n:|dk�r�tjd||j|�n|dk�rntdt|���|�s(dj|
�SdS)N� �
�"�'z"%s"z\"zRunning command %s)�stderr�stdin�stdout�cwd�envz#Error %s while executing command %s�error�doner�z Complete output from command %s:roz)
----------------------------------------z,Command "%s" failed with error code %s in %s�warnz$Command "%s" had error code %s in %s�ignorezInvalid value: on_returncode=%s)�
subprocess�PIPEr[r�r{r��debugr?rp�copy�update�PopenZSTDOUTrwr�rr��readline�rstripZgetEffectiveLevel�std_logging�DEBUGZspin�wait�
returncodeZfinishr�rr��
ValueError�repr)�cmdZshow_stdoutr�Z
on_returncodeZcommand_descZ
extra_environZspinnerr�Z	cmd_parts�partr��procr�Z
all_output�liner<r<r=r&�sz
 










cCsxt|d��}|j�}WdQRXdtjd�dg}x4|D],}y|j|�}Wntk
r\w4YnXPq4Wt|�tkstt�|S)aRReturn the contents of *filename*.

    Try to decode the file contents with utf-8, the preferred system encoding
    (e.g., cp1252 on some Windows machines), and latin1, in that order.
    Decoding a byte string with latin1 will never raise an error. In the worst
    case, the returned string will contain some garbage characters.

    r�Nzutf-8F�latin1)	r�r��locale�getpreferredencodingr`�UnicodeDecodeError�typer}�AssertionError)r�r�r�Z	encodings�encr<r<r=�read_text_file�s	
rcCstj|�t|�dS)N)r?r@r)Z	build_dirr<r<r=�_make_build_dir�s
rc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�FakeFilezQWrap a list of lines in an object with readline() to make
    ConfigParser happy.cCsdd�|D�|_dS)Ncss|]
}|VqdS)Nr<)r��lr<r<r=�	<genexpr>sz$FakeFile.__init__.<locals>.<genexpr>)�_gen)�self�linesr<r<r=�__init__szFakeFile.__init__cCsDy*y
t|j�Stk
r&|jj�SXWntk
r>dSXdS)Nro)�nextr�	NameError�
StopIteration)rr<r<r=r�s
zFakeFile.readlinecCs|jS)N)r)rr<r<r=�__iter__szFakeFile.__iter__N)�__name__�
__module__�__qualname__�__doc__rr�rr<r<r<r=rs	rc@s$eZdZedd��Zedd��ZdS)�
StreamWrappercCs||_|�S)N)�orig_stream)�clsr!r<r<r=�from_streamszStreamWrapper.from_streamcCs|jjS)N)r!�encoding)rr<r<r=r$szStreamWrapper.encodingN)rrr�classmethodr#�propertyr$r<r<r<r=r sr c
cs@tt|�}tt|tj|��ztt|�VWdtt||�XdS)z�Return a context manager used by captured_stdout/stdin/stderr
    that temporarily replaces the sys stream *stream_name* with a StringIO.

    Taken from Lib/support/__init__.py in the CPython repo.
    N)�getattrrH�setattrr r#)Zstream_nameZorig_stdoutr<r<r=�captured_output s

r)cCstd�S)z�Capture the output of sys.stdout:

       with captured_stdout() as stdout:
           print('hello')
       self.assertEqual(stdout.getvalue(), 'hello
')

    Taken from Lib/support/__init__.py in the CPython repo.
    r�)r)r<r<r<r=r'/s	c@s eZdZdZdd�Zdd�ZdS)�cached_propertyz�A property that is only computed once per instance and then replaces
       itself with an ordinary attribute. Deleting the attribute resets the
       property.

       Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
    cCst|d�|_||_dS)Nr)r'rrX)rrXr<r<r=rCszcached_property.__init__cCs(|dkr|S|j|�}|j|jj<|S)N)rX�__dict__r)r�objr"�valuer<r<r=�__get__Gszcached_property.__get__N)rrrrrr.r<r<r<r=r*;sr*cCs@tjj|�}|dkrtj�}n
tj|�}|j|�}|r<|jSdS)zCGet the installed version of dist_name avoiding pkg_resources cacheN)rZRequirement�parseZ
WorkingSetr��version)Z	dist_nameZlookup_dirsZreqr�r�r<r<r=r+Os


cCst|dd�dS)zConsume an iterable at C speed.r)�maxlenN)r)�iteratorr<r<r=�consumecsr3)r,r-)r.r/r0r1r2)r3r4)r5r6r7)F)rh)T)T)TNr�NNN)N)fZ
__future__r�collectionsr�
contextlibrB�ior	Zloggingr�r�r?r�rPrSr�rHr�r�Zpip.exceptionsrZ
pip.compatrrrZ
pip.locationsrr	r
rrr
Zpip._vendorrZpip._vendor.six.movesrZpip._vendor.sixrZpip._vendor.retryingrrr�__all__Z	getLoggerrr�r�r�r�r�r)r*�bz2r9r�Zlzmar>r(r"rrQrrrvrrrrr�DEFAULT_BUFFER_SIZEr�rrrrr r�r�r�r�r�r�r�r�r�r!r�r#r$r%r&rr�objectrr �contextmanagerr)r'r*r+r3r<r<r<r=�<module>s� 
	



	

	

	5%
+S!
_

utils/__pycache__/outdated.cpython-36.pyc000064400000011147151733136510014364 0ustar003

�Pfe�@s�ddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddl
mZmZddlmZddlmZmZddlmZmZdd	lmZd
Zeje�ZGdd�de�ZGd
d�de�Z dd�Z!dd�Z"dd�Z#dS)�)�absolute_importN)�lockfile)�version)�
total_seconds�WINDOWS)�PyPI)�USER_CACHE_DIR�running_under_virtualenv)�
ensure_dir�get_installed_version)�check_path_ownerz%Y-%m-%dT%H:%M:%SZc@seZdZdd�Zdd�ZdS)�VirtualenvSelfCheckStatecCs\tjjtjd�|_y&t|j��}tj|�|_	WdQRXWnt
tfk
rVi|_	YnXdS)Nzpip-selfcheck.json)�os�path�join�sys�prefix�statefile_path�open�json�load�state�IOError�
ValueError)�self�	statefile�r�/usr/lib/python3.6/outdated.py�__init__sz!VirtualenvSelfCheckState.__init__c
Cs:t|jd��$}tj|jt�|d�|ddd�WdQRXdS)N�w)�
last_check�pypi_versionT�,�:)�	sort_keys�
separators)r"r#)rrr�dump�strftime�SELFCHECK_DATE_FMT)rr!�current_timerrrr�save$szVirtualenvSelfCheckState.saveN)�__name__�
__module__�__qualname__rr*rrrrr
s
r
c@seZdZdd�Zdd�ZdS)�GlobalSelfCheckStatecCsbtjjtd�|_y,t|j��}tj|�tj	|_
WdQRXWn ttt
fk
r\i|_
YnXdS)Nzselfcheck.json)rrrrrrrrrrrrr�KeyError)rrrrrr3s zGlobalSelfCheckState.__init__cCs�ttjj|j��sdSttjj|j��tj|j��ztjj|j�rft	|j��}t
j|�}WdQRXni}|jt
�|d�|tj<t	|jd��}t
j||ddd�WdQRXWdQRXdS)N)r r!rTr"r#)r$r%)r"r#)rrr�dirnamerr
rZLockFile�existsrrrr'r(rrr&)rr!r)rrrrrr*=s
zGlobalSelfCheckState.saveN)r+r,r-rr*rrrrr.2s
r.cCst�rt�St�SdS)N)r	r
r.rrrr�load_selfcheck_statefileXsr2cCsFddl}y"|jd�}|jd�o*d|jd�kS|jk
r@dSXdS)z�Checks whether pip was installed by pip

    This is used not to display the upgrade message when pip is in fact
    installed by system package manager, such as dnf on Fedora.
    rN�pipZ	INSTALLERF)�
pkg_resourcesZget_distributionZhas_metadataZget_metadata_linesZDistributionNotFound)r4Zdistrrr�pip_installed_by_pip_s

r5c
CsFtd�}|dkrdStj|�}d}�y�t�}tjj�}d|jkrxd|jkrxtjj|jdt�}t	||�dkrx|jd}|dkr�|j
tjdd	id
�}|j
�dd�tt|j�d
�tjd�D�d}|j||�tj|�}||k�r|j|jk�rt��rt�rd}	nd}	tjd|||	�Wn$tk
�r@tjddd�YnXdS)z�Check for an update for pip.

    Limit the frequency of checks to once per week. State is stored either in
    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
    of the pip script path.
    r3Nr r!���<ZAcceptzapplication/json)ZheaderscSsg|]}tj|�js|�qSr)�packaging_version�parseZ
is_prerelease)�.0�vrrr�
<listcomp>�sz%pip_version_check.<locals>.<listcomp>Zreleases)�key�z
python -m pipz�You are using pip version %s, however version %s is available.
You should consider upgrading via the '%s install --upgrade pip' command.z5There was an error checking the latest version of pipT)�exc_info�i`'i�:	���)rr9r:r2�datetimeZutcnowrZstrptimer(r�getrZpip_json_urlZraise_for_status�sorted�listrr*Zbase_versionr5r�loggerZwarning�	Exception�debug)
ZsessionZinstalled_versionZpip_versionr!rr)r ZrespZremote_versionZpip_cmdrrr�pip_version_checknsL




rJ)$Z
__future__rrCrZloggingZos.pathrrZpip._vendorrZpip._vendor.packagingrr9Z
pip.compatrrZ
pip.modelsrZ
pip.locationsrr	Z	pip.utilsr
rZpip.utils.filesystemrr(Z	getLoggerr+rG�objectr
r.r2r5rJrrrr�<module>s&
&utils/__pycache__/setuptools_build.cpython-36.pyc000064400000000464151733136510016153 0ustar003

�Pf�@sdZdS)z�import setuptools, tokenize;__file__=%r;f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\r\n', '\n');f.close();exec(compile(code, __file__, 'exec'))N)ZSETUPTOOLS_SHIM�rr�&/usr/lib/python3.6/setuptools_build.py�<module>sutils/__pycache__/hashes.cpython-36.opt-1.pyc000064400000006227151733136510014770 0ustar003

�Pf2�@szddlmZddlZddlmZmZmZddlmZddl	m
Z
mZmZdZ
dddgZGd	d
�d
e�ZGdd�de�ZdS)
�)�absolute_importN)�HashMismatch�HashMissing�InstallationError)�read_chunks)�	iteritems�iterkeys�
itervaluesZsha256Zsha384Zsha512c@sJeZdZdZddd�Zdd�Zdd�Zd	d
�Zdd�Zd
d�Z	dd�Z
dS)�HasheszaA wrapper that builds multiple hashes at once and checks them against
    known-good values

    NcCs|dkrin||_dS)zo
        :param hashes: A dict of algorithm names pointing to lists of allowed
            hex digests
        N)�_allowed)�self�hashes�r�/usr/lib/python3.6/hashes.py�__init__szHashes.__init__c
Cs�i}xJt|j�D]<}ytj|�||<Wqttfk
rJtd|��YqXqWx(|D] }xt|�D]}|j|�qdWqVWx*t	|�D]\}}|j
�|j|kr�dSq�W|j|�dS)z�Check good hashes against ones built from iterable of chunks of
        data.

        Raise HashMismatch if none match.

        zUnknown hash name: %sN)rr�hashlib�new�
ValueError�	TypeErrorrr	�updater�	hexdigest�_raise)rZchunks�gotsZ	hash_name�chunk�hashZgotrrr�check_against_chunks s
zHashes.check_against_chunkscCst|j|��dS)N)rr)rrrrrr7sz
Hashes._raisecCs|jt|��S)zaCheck good hashes against a file-like object

        Raise HashMismatch if none match.

        )rr)r�filerrr�check_against_file:szHashes.check_against_filec	Cs t|d��}|j|�SQRXdS)N�rb)�openr)r�pathrrrr�check_against_pathBszHashes.check_against_pathcCs
t|j�S)z,Return whether I know any known-good hashes.)�boolr)rrrr�__nonzero__FszHashes.__nonzero__cCs|j�S)N)r#)rrrr�__bool__JszHashes.__bool__)N)�__name__�
__module__�__qualname__�__doc__rrrrr!r#r$rrrrr
s
r
cs(eZdZdZ�fdd�Zdd�Z�ZS)�
MissingHashesz�A workalike for Hashes used when we're missing a hash for a requirement

    It computes the actual hash of the requirement and raises a HashMissing
    exception showing it to the user.

    cstt|�jtgid�dS)z!Don't offer the ``hashes`` kwarg.)r
N)�superr)r�
FAVORITE_HASH)r)�	__class__rrrUszMissingHashes.__init__cCst|tj���dS)N)rr+r)rrrrrr[szMissingHashes._raise)r%r&r'r(rr�
__classcell__rr)r,rr)Nsr))Z
__future__rrZpip.exceptionsrrrZ	pip.utilsrZpip._vendor.sixrrr	r+Z
STRONG_HASHES�objectr
r)rrrr�<module>s
:utils/__pycache__/logging.cpython-36.opt-1.pyc000064400000007447151733136510015150 0ustar003

�Pf��@sddlmZddlZddlZddlZddlZyddlZWnek
rTddlZYnXddl	m
Z
ddlmZyddl
mZWnek
r�dZYnXej�Zde_ejddd��Zd	d
�ZGdd�dej�Zd
d�ZGdd�dej�ZGdd�dejj�ZGdd�dej�ZdS)�)�absolute_importN)�WINDOWS)�
ensure_dir)�colorama�ccs.tj|7_z
dVWdtj|8_XdS)zv
    A context manager which will cause the log output to be indented for any
    log messages emitted inside it.
    N)�
_log_state�indentation)Znum�r	�/usr/lib/python3.6/logging.py�
indent_logs
rcCsttdd�S)Nrr)�getattrrr	r	r	r
�get_indentation)sr
c@seZdZdd�ZdS)�IndentingFormattercCs,tjj||�}djdd�|jd�D��}|S)z�
        Calls the standard formatter, but will indent all of the log messages
        by our current indentation level.
        �cSsg|]}dt�|�qS)� )r
)�.0�liner	r	r
�
<listcomp>6sz-IndentingFormatter.format.<locals>.<listcomp>T)�logging�	Formatter�format�join�
splitlines)�self�recordZ	formattedr	r	r
r/s
zIndentingFormatter.formatN)�__name__�
__module__�__qualname__rr	r	r	r
r-srcs�fdd�}|S)Ncsdjt��|tjjg�S)Nr)r�listrZStyleZ	RESET_ALL)Zinp)�colorsr	r
�wrapped=sz_color_wrap.<locals>.wrappedr	)rr r	)rr
�_color_wrap<sr!c@sTeZdZer2ejeejj�fej	eejj
�fgZngZddd�Zdd�Z
dd�ZdS)	�ColorizedStreamHandlerNcCs(tjj||�tr$tr$tj|j�|_dS)N)r�
StreamHandler�__init__rr�AnsiToWin32�stream)rr&r	r	r
r$NszColorizedStreamHandler.__init__cCsRtsdSt|jtj�s|jn|jj}t|d�r:|j�r:dStjj	d�dkrNdSdS)NF�isattyTZTERMZANSI)
r�
isinstancer&r%r �hasattrr'�os�environ�get)rZreal_streamr	r	r
�should_colorTsz#ColorizedStreamHandler.should_colorcCsBtjj||�}|j�r>x&|jD]\}}|j|kr||�}PqW|S)N)rr#rr-�COLORS�levelno)rr�msg�levelZcolorr	r	r
ris
zColorizedStreamHandler.format)N)rrrrrZERRORr!ZForeZREDZWARNINGZYELLOWr.r$r-rr	r	r	r
r"Bs
r"c@seZdZdd�ZdS)�BetterRotatingFileHandlercCs ttjj|j��tjjj|�S)N)	rr*�path�dirnameZbaseFilenamer�handlers�RotatingFileHandler�_open)rr	r	r
r7wszBetterRotatingFileHandler._openN)rrrr7r	r	r	r
r2usr2c@seZdZdd�Zdd�ZdS)�MaxLevelFiltercCs
||_dS)N)r1)rr1r	r	r
r$~szMaxLevelFilter.__init__cCs|j|jkS)N)r/r1)rrr	r	r
�filter�szMaxLevelFilter.filterN)rrrr$r9r	r	r	r
r8|sr8)r) Z
__future__r�
contextlibrZlogging.handlersr*Z	threading�ImportErrorZdummy_threadingZ
pip.compatrZ	pip.utilsrZpip._vendorr�	ExceptionZlocalrr�contextmanagerrr
rrr!r#r"r5r6r2�Filterr8r	r	r	r
�<module>s0
3utils/__pycache__/deprecation.cpython-36.opt-1.pyc000064400000003255151733136510016010 0ustar003

�Pf��@s�dZddlmZddlZddlZGdd�de�ZGdd�de�ZGdd	�d	e�Z	Gd
d�dee�Z
Gdd
�d
e�Zdaddd�Z
dd�ZdS)zN
A module that implements tooling to enable easy warnings about deprecations.
�)�absolute_importNc@seZdZdS)�PipDeprecationWarningN)�__name__�
__module__�__qualname__�rr�!/usr/lib/python3.6/deprecation.pyr
src@seZdZdS)�PendingN)rrrrrrrr	sr	c@seZdZdS)�RemovedInPip10WarningN)rrrrrrrr
sr
c@seZdZdS)�RemovedInPip11WarningN)rrrrrrrrsrc@seZdZdS)�Python26DeprecationWarningN)rrrrrrrrsrcCsx|dk	r$tdk	rtt||||||�nPt|t�rbtjd�}d|}t|t�rV|j|�qt|j|�nt||||||�dS)Nzpip.deprecationszDEPRECATION: %s)�_warnings_showwarning�
issubclassr�loggingZ	getLoggerr	Zwarning�error)�message�category�filename�lineno�file�lineZloggerZlog_messagerrr�_showwarning$s


rcCs(tjdtdd�tdkr$tjatt_dS)N�defaultT)�append)�warnings�simplefilterrr
�showwarningrrrrr�install_warning_loggerDsr)NN)�__doc__Z
__future__rrr�Warningr�objectr	r
rrr
rrrrrr�<module>s
 utils/__pycache__/filesystem.cpython-36.opt-1.pyc000064400000001064151733136510015673 0ustar003

�Pf��@s(ddlZddlZddlmZdd�ZdS)�N)�get_path_uidcCs�ttd�sdSd}xp||kr�tjj|�rntj�dkr^yt|�}Wntk
rTdSX|dkStj|tj�Sq|tjj	|�}}qWdS)N�geteuidTrF)
�hasattr�os�path�lexistsrr�OSError�access�W_OK�dirname)rZpreviousZpath_uid�r� /usr/lib/python3.6/filesystem.py�check_path_owners

r)rZos.pathZ
pip.compatrrrrrr
�<module>sutils/__pycache__/deprecation.cpython-36.pyc000064400000003255151733136510015051 0ustar003

�Pf��@s�dZddlmZddlZddlZGdd�de�ZGdd�de�ZGdd	�d	e�Z	Gd
d�dee�Z
Gdd
�d
e�Zdaddd�Z
dd�ZdS)zN
A module that implements tooling to enable easy warnings about deprecations.
�)�absolute_importNc@seZdZdS)�PipDeprecationWarningN)�__name__�
__module__�__qualname__�rr�!/usr/lib/python3.6/deprecation.pyr
src@seZdZdS)�PendingN)rrrrrrrr	sr	c@seZdZdS)�RemovedInPip10WarningN)rrrrrrrr
sr
c@seZdZdS)�RemovedInPip11WarningN)rrrrrrrrsrc@seZdZdS)�Python26DeprecationWarningN)rrrrrrrrsrcCsx|dk	r$tdk	rtt||||||�nPt|t�rbtjd�}d|}t|t�rV|j|�qt|j|�nt||||||�dS)Nzpip.deprecationszDEPRECATION: %s)�_warnings_showwarning�
issubclassr�loggingZ	getLoggerr	Zwarning�error)�message�category�filename�lineno�file�lineZloggerZlog_messagerrr�_showwarning$s


rcCs(tjdtdd�tdkr$tjatt_dS)N�defaultT)�append)�warnings�simplefilterrr
�showwarningrrrrr�install_warning_loggerDsr)NN)�__doc__Z
__future__rrr�Warningr�objectr	r
rrr
rrrrrr�<module>s
 utils/__pycache__/__init__.cpython-36.opt-1.pyc000064400000054075151733136510015260 0ustar003

�Pfml�@s�ddlmZddlmZddlZddlZddlZddlZddlZ	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlmZddlmZmZmZddlmZmZmZmZmZmZddl m!Z!ddl"m#Z#dd	l$m%Z%dd
l&m'Z'e%�rddlm(Z)nddlm)Z)d
ddddddddddddddddddd d!d"d#d$gZ*e	j+e,�Z-dzZ.d{Z/d|Z0d}Z1e0e.e1e/Z2e0e1Z3yddl4Z4e3e.7Z3Wn e5k
�r�e-j6d1�YnXyddl7Z7e3e/7Z3Wn e5k
�r�e-j6d2�YnXd3d4�Z8d5d!�Z9d6d�Z:e'd7d8d9�d~d;d
��Z;d<d=�Z<d>d�Z=dd@d�Z>dAdB�Z?dCd�Z@dDd�ZAdEd�ZBdFd�ZCdGd�ZDejEfdHdI�ZFdJd�ZGdKd�ZHd�dMd�ZIdNd�ZJdOd�ZKdPdQ�ZLdRdS�ZMdTdU�ZNdVdW�ZOdXdY�ZPdZd[�ZQdLedLd:d:fd\d]�ZRd^d_�ZSd`da�ZTdbd�ZUdcdd�ZVd�ded�ZWdfd�ZXdgd�ZYd�did�ZZdjdk�Z[dldm�Z\Gdndo�doe]�Z^Gdpdq�dqe)�Z_ej`drds��Zadtd �ZbGdudv�dve]�Zcd�dwd$�Zddxdy�ZedS)��)�absolute_import)�dequeN)�InstallationError)�console_to_str�
expanduser�stdlib_pkgs)�
site_packages�	user_site�running_under_virtualenv�virtualenv_no_global�write_delete_marker_file�distutils_scheme)�
pkg_resources)�input)�PY2)�retry)�BytesIO)�StringIO�rmtree�display_path�
backup_dir�ask�splitext�format_size�is_installable_dir�is_svn_page�
file_contents�split_leading_dir�has_leading_dir�normalize_path�renames�get_terminal_size�get_prog�
unzip_file�
untar_file�unpack_file�call_subprocess�captured_stdout�
ensure_dir�ARCHIVE_EXTENSIONS�SUPPORTED_EXTENSIONS�get_installed_version�.tar.bz2�.tbz�.tar.xz�.txz�.tlz�.tar.lz�	.tar.lzma�.zip�.whl�.tar.gz�.tgz�.tarzbz2 module is not availablezlzma module is not availablecOs,yt|�Stk
r&|||��YnXdS)N)�
__import__�ImportError)Zpkg_or_module_stringZ
ExceptionType�args�kwargs�r<�/usr/lib/python3.6/__init__.py�import_or_raiseIsr>cCsDytj|�Wn0tk
r>}z|jtjkr.�WYdd}~XnXdS)z os.path.makedirs without EEXIST.N)�os�makedirs�OSError�errnoZEEXIST)�path�er<r<r=r(Ps
c
CsDy$tjjtjd�dkr"dtjSWntttfk
r>YnXdS)Nr�__main__.py�-cz	%s -m pipZpip)rErF)	r?rC�basename�sys�argv�
executable�AttributeError�	TypeError�
IndexErrorr<r<r<r=r"Ysi�i�)Zstop_max_delayZ
wait_fixedFcCstj||td�dS)N)�
ignore_errors�onerror)�shutilr�rmtree_errorhandler)�dirrNr<r<r=rcscCs2tj|�jtj@r,tj|tj�||�dS�dS)z�On Windows, the files in .svn are read-only, so when rmtree() tries to
    remove them, an exception is thrown.  We catch that here, remove the
    read-only attribute, and hopefully continue without problems.N)r?�stat�st_mode�S_IREAD�chmod�S_IWRITE)�funcrC�exc_infor<r<r=rQis
rQcCsttjjtjj|��}tjddkrB|jtj�d�}|jtj	�d�}|j
tj�tjj�rpd|t
tj��d�}|S)zTGives the display value for a given path, making it relative to cwd
    if possible.r��replace�.N)r?rC�normcase�abspathrH�version_info�decode�getfilesystemencoding�encode�getdefaultencoding�
startswith�getcwd�sep�len)rCr<r<r=rxs�.bakcCs:d}|}x(tjj||�r0|d7}|t|�}q
W||S)z\Figure out the name of a directory to back up the given dir to
    (adding .bak, .bak2, etc)�)r?rC�exists�str)rR�ext�n�	extensionr<r<r=r�scCs2x&tjjdd�j�D]}||kr|SqWt||�S)NZPIP_EXISTS_ACTION�)r?�environ�get�splitr)�message�options�actionr<r<r=�ask_path_exists�srvcCsZxTtjjd�rtd|��t|�}|j�j�}||krNtd|dj|�f�q|SqWdS)z@Ask the message interactively, with the given possible responsesZPIP_NO_INPUTz7No input was expected ($PIP_NO_INPUT set); question: %sz<Your response (%r) was not one of the expected responses: %sz, N)	r?rprq�	Exceptionr�strip�lower�print�join)rsrtZresponser<r<r=r�scCsL|dkrd|ddS|d	kr,d|dS|dkr@d|dSd|SdS)
Ni�z%.1fMBg@�@�
z%ikBz%.1fkBz%ibytesi@Bi'r<)�bytesr<r<r=r�scCs2tjj|�sdStjj|d�}tjj|�r.dSdS)z@Return True if `path` is a directory containing a setup.py file.Fzsetup.pyT)r?rC�isdirr{�isfile)rCZsetup_pyr<r<r=r�scCstjd|�otjd|tj�S)zT
    Returns true if the page appears to be the index page of an svn repository
    z<title>[^<]*Revision \d+:z#Powered by (?:<a[^>]*?>)?Subversion)�re�search�I)Zhtmlr<r<r=r�sc	Cs$t|d��}|j�jd�SQRXdS)N�rbzutf-8)�open�readr`)�filename�fpr<r<r=r�sccs x|j|�}|sP|VqWdS)z7Yield pieces of data from a file-like object until EOF.N)r�)�file�size�chunkr<r<r=�read_chunks�s

r�cCsh|jd�jd�}d|krHd|kr4|jd�|jd�ks<d|krH|jdd�Sd|kr\|jdd�S|dfSdS)N�/�\riro)�lstrip�findrr)rCr<r<r=r�s$cCsDd}x:|D]2}t|�\}}|s"dS|dkr0|}q
||kr
dSq
WdS)zyReturns true if all the paths have the same leading path name
    (i.e., everything is in one subdirectory in an archive)NFT)r)�pathsZ
common_prefixrC�prefix�restr<r<r=r�s
TcCs2t|�}|rtjj|�}ntjj|�}tjj|�S)zN
    Convert a path to its canonical, case-normalized, absolute version.

    )rr?rC�realpathr^r])rCZresolve_symlinksr<r<r=r�s
cCs@tj|�\}}|j�jd�r8|dd�|}|dd�}||fS)z,Like os.path.splitext, but take off .tar tooz.tar�N���r�)�	posixpathrry�endswith)rC�baserlr<r<r=r�s
cCs|tjj|�\}}|r0|r0tjj|�r0tj|�tj||�tjj|�\}}|rx|rxytj|�Wntk
rvYnXdS)z7Like os.renames(), but handles renaming across devices.N)	r?rCrrrjr@rPZmove�
removedirsrA)�old�new�head�tailr<r<r=r s
cCst�s
dSt|�jttj��S)z�
    Return True if path is within sys.prefix, if we're running in a virtualenv.

    If we're not in a virtualenv, all paths are considered "local."

    T)r
rrdrHr�)rCr<r<r=�is_localsr�cCstt|��S)z�
    Return True if given Distribution object is installed locally
    (i.e. within current virtualenv).

    Always True if we're not in a virtualenv.

    )r��
dist_location)�distr<r<r=�
dist_is_local!sr�cCstt|��}|jtt��S)zF
    Return True if given Distribution is installed in user site.
    )rr�rdr	)r��	norm_pathr<r<r=�dist_in_usersite,sr�cCstt|��jtt��S)ze
    Return True if given Distribution is installed in
    distutils.sysconfig.get_python_lib().
    )rr�rdr)r�r<r<r=�dist_in_site_packages4s
r�cCs,tt|��}|jttd�djd�d��S)zf
    Return True if given Distribution is installed in
    path matching distutils_scheme layout.
    ro�purelib�pythonr)rr�rdr
rr)r�r�r<r<r=�dist_in_install_path>sr�cCs8x2tjD](}tjj||jd�}tjj|�rdSqWdS)z$Is distribution an editable install?z	.egg-linkTF)rHrCr?r{�project_namer)r�Z	path_item�egg_linkr<r<r=�dist_is_editableHs
r�csl|r
t�ndd��|r dd��ndd��|r6dd��ndd��|rHt�nd	d
�������fdd�tjD�S)
a�
    Return a list of installed Distribution objects.

    If ``local_only`` is True (default), only return installations
    local to the current virtualenv, if in a virtualenv.

    ``skip`` argument is an iterable of lower-case project names to
    ignore; defaults to stdlib_pkgs

    If ``editables`` is False, don't report editables.

    If ``editables_only`` is True , only report editables.

    If ``user_only`` is True , only report installations in the user
    site directory.

    cSsdS)NTr<)�dr<r<r=�
local_testjsz/get_installed_distributions.<locals>.local_testcSsdS)NTr<)r�r<r<r=�
editable_testnsz2get_installed_distributions.<locals>.editable_testcSs
t|�S)N)r�)r�r<r<r=r�qscSst|�S)N)r�)r�r<r<r=�editables_only_testusz8get_installed_distributions.<locals>.editables_only_testcSsdS)NTr<)r�r<r<r=r�xscSsdS)NTr<)r�r<r<r=�	user_test~sz.get_installed_distributions.<locals>.user_testcs:g|]2}�|�r|j�kr�|�r�|�r�|�r|�qSr<)�key)�.0r�)r�r�r��skipr�r<r=�
<listcomp>�s
z/get_installed_distributions.<locals>.<listcomp>)r�r�r�working_set)Z
local_onlyr�Zinclude_editablesZeditables_onlyZ	user_onlyr<)r�r�r�r�r�r=�get_installed_distributionsQs

r�cCs�g}t�r6t�r|jt�qN|jt�trN|jt�ntrD|jt�|jt�x0|D](}tjj||j�d}tjj	|�rT|SqTWdS)a
    Return the path for the .egg-link file if it exists, otherwise, None.

    There's 3 scenarios:
    1) not in a virtualenv
       try to find in site.USER_SITE, then site_packages
    2) in a no-global virtualenv
       try to find in site_packages
    3) in a yes-global virtualenv
       try to find in site_packages, then site.USER_SITE
       (don't look in global location)

    For #1 and #3, there could be odd cases, where there's an egg-link in 2
    locations.

    This method will just return the first one found.
    z	.egg-linkN)
r
r�appendrr	r?rCr{r�r)r�ZsitesZsiteZegglinkr<r<r=�
egg_link_path�s



r�cCst|�}|r|S|jS)z�
    Get the site-packages location of this distribution. Generally
    this is dist.location, except in the case of develop-installed
    packages, where dist.location is the source code location, and we
    want to know where the egg-link file is.

    )r��location)r�r�r<r<r=r��sr�c
Cs�dd�}|d�p|d�p|d�}|sZy(tjtj�tj�}||�}tj|�WnYnX|sztjjdd�tjjdd	�f}t|d�t|d�fS)
zlReturns a tuple (x, y) representing the width(x) and the height(x)
    in characters of the terminal window.cSsPy4ddl}ddl}ddl}|jd|j||jd��}Wn
dS|dkrLdS|S)NrZhhZ1234)rr)�fcntl�termios�struct�unpackZioctlZ
TIOCGWINSZ)�fdr�r�r��crr<r<r=�ioctl_GWINSZ�sz'get_terminal_size.<locals>.ioctl_GWINSZrrirZZLINES�ZCOLUMNS�P)r?r��ctermid�O_RDONLY�closerprq�int)r�r�r�r<r<r=r!�scCstjd�}tj|�|S)zBGet the current umask which involves having to set it temporarily.r)r?�umask)�maskr<r<r=�
current_umask�s

r�c
Cst|�t|d�}z�tj|dd�}t|j��o0|}x�|j�D]�}|j}|j|�}|}	|rdt	|�d}	t
jj||	�}	t
jj
|	�}
|	jd�s�|	jd�r�t|	�q<t|
�t|	d�}z|j|�Wd|j�|jd	?}|r�tj|�r�|d
@r�t
j|	dt�d
B�Xq<WWd|j�XdS)a�
    Unzip the file (with path `filename`) to the destination `location`.  All
    files are written based on system defaults and umask (i.e. permissions are
    not preserved), except that regular file members with any execute
    permissions (user, group, or world) have "chmod +x" applied after being
    written. Note that for windows, any execute changes using os.chmod are
    no-ops per the python docs.
    r�T)Z
allowZip64rir�r��wbN��Ii�)r(r��zipfileZZipFilerZnamelistZinfolistr�r�rr?rCr{�dirnamer��writer�Z
external_attrrS�S_ISREGrVr�)
r�r��flattenZzipfp�zip�leading�info�name�data�fnrRr��moder<r<r=r#�s0	




 c(Cs@t|�|j�jd�s$|j�jd�r*d}nL|j�jt�r>d}n8|j�jt�rRd}n$|j�jd�rfd}ntjd|�d	}tj||�}�z�t	d
d�|j
�D��}�x�|j
�D�]�}|j}|dkr�q�|r�t|�d
}t
jj||�}ytj|j|d�|�Wntjk
�rYnX|j��r"t|�q�|j��rxy|j||�Wn8tk
�rt}ztjd||j|�w�WYdd}~XnXq�y|j|�}	Wn<ttfk
�r�}ztjd||j|�w�WYdd}~XnXtt
jj|��t|d��}
tj|	|
�WdQRX|	j�|j||�|jd@r�t
j |dt!�dB�q�WWd|j�XdS)a�
    Untar the file (with path `filename`) to the destination `location`.
    All files are written based on system defaults and umask (i.e. permissions
    are not preserved), except that regular file members with any execute
    permissions (user, group, or world) have "chmod +x" applied after being
    written.  Note that for windows, any execute changes using os.chmod are
    no-ops per the python docs.
    z.gzz.tgzzr:gzzr:bz2zr:xzz.tar�rz-Cannot determine compression type for file %szr:*cSsg|]}|jdkr|j�qS)�pax_global_header)r�)r��memberr<r<r=r�(szuntar_file.<locals>.<listcomp>r�ri)r�z/In the tar file %s the member %s is invalid: %sNr�r�i�)"r(ryr��BZ2_EXTENSIONS�
XZ_EXTENSIONS�logger�warning�tarfiler�rZ
getmembersr�rr?rCr{Zdata_filterr[ZLinkOutsideDestinationErrorr~ZissymZ_extract_memberrwZextractfile�KeyErrorrKr�rPZcopyfileobjr��utimer�rVr�)r�r�r�Ztarr�r�r�rC�excr�Zdestfpr<r<r=r$
sh	



cCs�tjj|�}|dks,|j�jt�s,tj|�rDt|||jd�d�n�|dkslt	j
|�sl|j�jttt
�rxt||�nX|r�|jd�r�tt|��r�ddlm}|d|j�j|�ntjd	|||�td
|��dS)Nzapplication/zipz.whl)r�zapplication/x-gzipz	text/htmlr)�
Subversionzsvn+zZCannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive formatz%Cannot determine archive format of %s)r?rCr�ryr��ZIP_EXTENSIONSr�Z
is_zipfiler#r�Z
is_tarfile�TAR_EXTENSIONSr�r�r$rdrrZpip.vcs.subversionr�Zurlr�r��criticalr)r�r�Zcontent_type�linkr�r<r<r=r%`s,


�raisecCs,|r
d}ntj}|dkrng}xF|D]>}	d|	ksFd|	ksFd|	ksFd|	krVd|	jdd�}	|j|	�q"Wdj|�}tjd|�tjj	�}
|r�|
j
|�ytj|tjd|||
d�}Wn2t
k
r�}ztjd	||��WYdd}~XnX|dk	�rNg}
x\t|jj��}|�sP|j�}|
j|d�tj�tjk�r:tj|�q�|dk	r�|j�q�W|j�|dk	�r~|j�rt|jd
�n
|jd�|j�r|dk�r�tj�tjk�r�|�r�tjd
|�tjdj|
�d�td||j|f��n:|dk�r�tjd||j|�n|dk�rntdt|���|�s(dj|
�SdS)N� �
�"�'z"%s"z\"zRunning command %s)�stderr�stdin�stdout�cwd�envz#Error %s while executing command %s�error�doner�z Complete output from command %s:roz)
----------------------------------------z,Command "%s" failed with error code %s in %s�warnz$Command "%s" had error code %s in %s�ignorezInvalid value: on_returncode=%s)�
subprocess�PIPEr[r�r{r��debugr?rp�copy�update�PopenZSTDOUTrwr�rr��readline�rstripZgetEffectiveLevel�std_logging�DEBUGZspin�wait�
returncodeZfinishr�rr��
ValueError�repr)�cmdZshow_stdoutr�Z
on_returncodeZcommand_descZ
extra_environZspinnerr�Z	cmd_parts�partr��procr�Z
all_output�liner<r<r=r&�sz
 










cCsht|d��}|j�}WdQRXdtjd�dg}x4|D],}y|j|�}Wntk
r\w4YnXPq4W|S)aRReturn the contents of *filename*.

    Try to decode the file contents with utf-8, the preferred system encoding
    (e.g., cp1252 on some Windows machines), and latin1, in that order.
    Decoding a byte string with latin1 will never raise an error. In the worst
    case, the returned string will contain some garbage characters.

    r�Nzutf-8F�latin1)r�r��locale�getpreferredencodingr`�UnicodeDecodeError)r�r�r�Z	encodings�encr<r<r=�read_text_file�s	
r
cCstj|�t|�dS)N)r?r@r)Z	build_dirr<r<r=�_make_build_dir�s
rc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�FakeFilezQWrap a list of lines in an object with readline() to make
    ConfigParser happy.cCsdd�|D�|_dS)Ncss|]
}|VqdS)Nr<)r��lr<r<r=�	<genexpr>sz$FakeFile.__init__.<locals>.<genexpr>)�_gen)�self�linesr<r<r=�__init__szFakeFile.__init__cCsDy*y
t|j�Stk
r&|jj�SXWntk
r>dSXdS)Nro)�nextr�	NameError�
StopIteration)rr<r<r=r�s
zFakeFile.readlinecCs|jS)N)r)rr<r<r=�__iter__szFakeFile.__iter__N)�__name__�
__module__�__qualname__�__doc__rr�rr<r<r<r=rs	rc@s$eZdZedd��Zedd��ZdS)�
StreamWrappercCs||_|�S)N)�orig_stream)�clsrr<r<r=�from_streamszStreamWrapper.from_streamcCs|jjS)N)r�encoding)rr<r<r=r"szStreamWrapper.encodingN)rrr�classmethodr!�propertyr"r<r<r<r=rsrc
cs@tt|�}tt|tj|��ztt|�VWdtt||�XdS)z�Return a context manager used by captured_stdout/stdin/stderr
    that temporarily replaces the sys stream *stream_name* with a StringIO.

    Taken from Lib/support/__init__.py in the CPython repo.
    N)�getattrrH�setattrrr!)Zstream_nameZorig_stdoutr<r<r=�captured_output s

r'cCstd�S)z�Capture the output of sys.stdout:

       with captured_stdout() as stdout:
           print('hello')
       self.assertEqual(stdout.getvalue(), 'hello
')

    Taken from Lib/support/__init__.py in the CPython repo.
    r�)r'r<r<r<r=r'/s	c@s eZdZdZdd�Zdd�ZdS)�cached_propertyz�A property that is only computed once per instance and then replaces
       itself with an ordinary attribute. Deleting the attribute resets the
       property.

       Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
    cCst|d�|_||_dS)Nr)r%rrX)rrXr<r<r=rCszcached_property.__init__cCs(|dkr|S|j|�}|j|jj<|S)N)rX�__dict__r)r�objr �valuer<r<r=�__get__Gszcached_property.__get__N)rrrrrr,r<r<r<r=r(;sr(cCs@tjj|�}|dkrtj�}n
tj|�}|j|�}|r<|jSdS)zCGet the installed version of dist_name avoiding pkg_resources cacheN)rZRequirement�parseZ
WorkingSetr��version)Z	dist_nameZlookup_dirsZreqr�r�r<r<r=r+Os


cCst|dd�dS)zConsume an iterable at C speed.r)�maxlenN)r)�iteratorr<r<r=�consumecsr1)r,r-)r.r/r0r1r2)r3r4)r5r6r7)F)rh)T)T)TNr�NNN)N)fZ
__future__r�collectionsr�
contextlibrB�ior	Zloggingr�r�r?r�rPrSr�rHr�r�Zpip.exceptionsrZ
pip.compatrrrZ
pip.locationsrr	r
rrr
Zpip._vendorrZpip._vendor.six.movesrZpip._vendor.sixrZpip._vendor.retryingrrr�__all__Z	getLoggerrr�r�r�r�r�r)r*�bz2r9r�Zlzmar>r(r"rrQrrrvrrrrr�DEFAULT_BUFFER_SIZEr�rrrrr r�r�r�r�r�r�r�r�r�r!r�r#r$r%r&r
r�objectrr�contextmanagerr'r'r(r+r1r<r<r<r=�<module>s� 
	



	

	

	5%
+S!
_

utils/__pycache__/build.cpython-36.opt-1.pyc000064400000002420151733136510014603 0ustar003

�Pf �@s<ddlmZddlZddlZddlmZGdd�de�ZdS)�)�absolute_importN)�rmtreec@s6eZdZddd�Zdd�Zdd�Zdd	�Zd
d�ZdS)
�BuildDirectoryNcCsL|dkr|dkrd}|dkr<tjjtjdd��}|dkr<d}||_||_dS)NTz
pip-build-)�prefix)�os�path�realpath�tempfileZmkdtemp�name�delete)�selfr
r�r
�/usr/lib/python3.6/build.py�__init__szBuildDirectory.__init__cCsdj|jj|j�S)Nz	<{} {!r}>)�format�	__class__�__name__r
)rr
r
r�__repr__szBuildDirectory.__repr__cCs|jS)N)r
)rr
r
r�	__enter__"szBuildDirectory.__enter__cCs|j�dS)N)�cleanup)r�exc�value�tbr
r
r�__exit__%szBuildDirectory.__exit__cCs|jrt|j�dS)N)rrr
)rr
r
rr(szBuildDirectory.cleanup)NN)r�
__module__�__qualname__rrrrrr
r
r
rr	s

r)	Z
__future__rZos.pathrr	Z	pip.utilsr�objectrr
r
r
r�<module>sutils/__pycache__/setuptools_build.cpython-36.opt-1.pyc000064400000000464151733136510017112 0ustar003

�Pf�@sdZdS)z�import setuptools, tokenize;__file__=%r;f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\r\n', '\n');f.close();exec(compile(code, __file__, 'exec'))N)ZSETUPTOOLS_SHIM�rr�&/usr/lib/python3.6/setuptools_build.py�<module>sutils/__pycache__/appdirs.cpython-36.opt-1.pyc000064400000017002151733136510015150 0ustar003

�Pfk"�@s�dZddlmZddlZddlZddlmZmZddlm	Z	m
Z
dd�Zdd	d
�Zddd
�Z
dd�Zdd�Zdd�Zer�yddlZeZWnek
r�eZYnXdd�ZdS)zd
This code was taken from https://github.com/ActiveState/appdirs and modified
to suit our purposes.
�)�absolute_importN)�WINDOWS�
expanduser)�PY2�	text_typecCs�tr<tjjtd��}tr*t|t�r*t|�}tjj	||d�}n@t
jdkr^td�}tjj	||�}ntj
dtd��}tjj	||�}|S)a5
    Return full path to the user-specific cache dir for this application.

        "appname" is the name of application.

    Typical user cache directories are:
        macOS:      ~/Library/Caches/<AppName>
        Unix:       ~/.cache/<AppName> (XDG default)
        Windows:    C:\Users\<username>\AppData\Local\<AppName>\Cache

    On Windows the only suggestion in the MSDN docs is that local settings go
    in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
    non-roaming app data dir (the default returned by `user_data_dir`). Apps
    typically put cache data somewhere *under* the given dir here. Some
    examples:
        ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
        ...\Acme\SuperApp\Cache\1.0

    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
    �CSIDL_LOCAL_APPDATAZCache�darwinz~/Library/CachesZXDG_CACHE_HOMEz~/.cache)r�os�path�normpath�_get_win_folderr�
isinstancer�_win_path_to_bytes�join�sys�platformr�getenv)�appnamer
�r�/usr/lib/python3.6/appdirs.py�user_cache_dirs
rFcCshtr,|rdpd}tjjtjjt|��|�}n8tjdkrJtjjtd�|�}ntjjtj	dtd��|�}|S)aS
    Return full path to the user-specific data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  ~/Library/Application Support/<AppName>
        Unix:                   ~/.local/share/<AppName>    # or in
                                $XDG_DATA_HOME, if defined
        Win XP (not roaming):   C:\Documents and Settings\<username>\ ...
                                ...Application Data\<AppName>
        Win XP (roaming):       C:\Documents and Settings\<username>\Local ...
                                ...Settings\Application Data\<AppName>
        Win 7  (not roaming):   C:\Users\<username>\AppData\Local\<AppName>
        Win 7  (roaming):       C:\Users\<username>\AppData\Roaming\<AppName>

    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
    That means, by default "~/.local/share/<AppName>".
    �
CSIDL_APPDATArrz~/Library/Application Support/Z
XDG_DATA_HOMEz~/.local/share)
rr	r
rrrrrrr)r�roaming�constr
rrr�
user_data_dir>s
rTcCsHtrt||d�}n2tjdkr&t|�}ntjdtd��}tjj||�}|S)arReturn full path to the user-specific config dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "roaming" (boolean, default True) can be set False to not use the
            Windows roaming appdata directory. That means that for users on a
            Windows network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  same as user_data_dir
        Unix:                   ~/.config/<AppName>
        Win *:                  same as user_data_dir

    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
    That means, by default "~/.config/<AppName>".
    )rrZXDG_CONFIG_HOMEz	~/.config)	rrrrr	rrr
r)rrr
rrr�user_config_dirjs

rcs�tr&tjjtd��}tjj|��g}nVtjdkrBtjjd��g}n:tjdd�}|rn�fdd�|j	tj
�D�}ng}|jd�|S)	a�Return a list of potential user-shared config dirs for this application.

        "appname" is the name of application.

    Typical user config directories are:
        macOS:      /Library/Application Support/<AppName>/
        Unix:       /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
                    $XDG_CONFIG_DIRS
        Win XP:     C:\Documents and Settings\All Users\Application ...
                    ...Data\<AppName>        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory
                    on Vista.)
        Win 7:      Hidden, but writeable on Win 7:
                    C:\ProgramData\<AppName>    �CSIDL_COMMON_APPDATArz/Library/Application SupportZXDG_CONFIG_DIRSz/etc/xdgcsg|]}tjjt|����qSr)r	r
rr)�.0�x)rrr�
<listcomp>�sz$site_config_dirs.<locals>.<listcomp>z/etc)rr	r
rrrrrr�split�pathsep�append)rr
ZpathlistZxdg_config_dirsr)rr�site_config_dirs�s


r#cCs:ddl}dddd�|}|j|jd�}|j||�\}}|S)z�
    This is a fallback technique at best. I'm not sure if using the
    registry for this guarantees us the correct answer for all CSIDL_*
    names.
    rNZAppDatazCommon AppDataz
Local AppData)rrrz@Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders)�_winreg�OpenKey�HKEY_CURRENT_USERZQueryValueEx)�
csidl_namer$Zshell_folder_name�keyZ	directoryZ_typerrr�_get_win_folder_from_registry�sr)cCs�dddd�|}tjd�}tjjjd|dd|�d}x|D]}t|�dkr:d	}Pq:W|rztjd�}tjjj|j|d�rz|}|jS)
N��#�)rrrirF�T)	�ctypesZcreate_unicode_bufferZwindllZshell32ZSHGetFolderPathW�ordZkernel32ZGetShortPathNameW�value)r'Zcsidl_constZbufZ
has_high_char�cZbuf2rrr�_get_win_folder_with_ctypes�s 


r2c
Cs6x0dD](}y
|j|�Sttfk
r,YqXqW|S)a�Encode Windows paths to bytes. Only used on Python 2.

    Motivation is to be consistent with other operating systems where paths
    are also returned as bytes. This avoids problems mixing bytes and Unicode
    elsewhere in the codebase. For more details and discussion see
    <https://github.com/pypa/pip/issues/3463>.

    If encoding using ASCII and MBCS fails, return the original Unicode path.
    �ASCII�MBCS)r3r4)�encode�UnicodeEncodeError�LookupError)r
�encodingrrrr�s



r)F)T)�__doc__Z
__future__rr	rZ
pip.compatrrZpip._vendor.sixrrrrrr#r)r2r.r�ImportErrorrrrrr�<module>s$0
,
!(
utils/__pycache__/ui.cpython-36.opt-1.pyc000064400000022441151733136510014126 0ustar003

�PfM-�@s�ddlmZddlmZddlZddlZddlmZmZmZddlZddl	Z	ddl
Z
ddlmZddl
mZddlmZddlmZdd	lmZmZdd
lmZmZmZddlmZyddlmZWnek
r�dZYnXe
je�Z d
d�Z!e!ee�Z"Gdd�de#�Z$Gdd�de#�Z%Gdd�de#�Z&Gdd�de&e$e%e"�Z'Gdd�de&e$e%ee�Z(e	j)dd��Z*Gdd�de#�Z+Gdd�de#�Z,Gdd �d e#�Z-e	j)d!d"��Z.dS)#�)�absolute_import)�divisionN)�signal�SIGINT�default_int_handler)�WINDOWS)�format_size)�get_indentation)�six)�Bar�IncrementalBar)�WritelnMixin�HIDE_CURSOR�SHOW_CURSOR)�Spinner)�coloramacCs�t|jdd�}|s|St|dtj��t|dtj��g}|tt|dg��7}ytj�j|�j|�Wntk
rv|SX|SdS)N�encodingZ
empty_fillZfill�phases)�getattr�filer
Z	text_type�list�join�encode�UnicodeEncodeError)Z	preferredZfallbackrZ
characters�r�/usr/lib/python3.6/ui.py�_select_progress_classsrcs4eZdZdZ�fdd�Z�fdd�Zdd�Z�ZS)�InterruptibleMixina�
    Helper to ensure that self.finish() gets called on keyboard interrupt.

    This allows downloads to be interrupted without leaving temporary state
    (like hidden cursors) behind.

    This class is similar to the progress library's existing SigIntMixin
    helper, but as of version 1.2, that helper has the following problems:

    1. It calls sys.exit().
    2. It discards the existing SIGINT handler completely.
    3. It leaves its own handler in place even after an uninterrupted finish,
       which will have unexpected delayed effects if the user triggers an
       unrelated keyboard interrupt some time after a progress-displaying
       download has already completed, for example.
    cs4tt|�j||�tt|j�|_|jdkr0t|_dS)z=
        Save the original SIGINT handler for later.
        N)�superr�__init__rr�
handle_sigint�original_handlerr)�self�args�kwargs)�	__class__rrrNs
zInterruptibleMixin.__init__cstt|�j�tt|j�dS)z�
        Restore the original SIGINT handler after finishing.

        This should happen regardless of whether the progress display finishes
        normally, or gets interrupted.
        N)rr�finishrrr!)r")r%rrr&^szInterruptibleMixin.finishcCs|j�|j||�dS)z�
        Call self.finish() before delegating to the original SIGINT handler.

        This handler should only be in place while the progress display is
        active.
        N)r&r!)r"Zsignum�framerrrr hsz InterruptibleMixin.handle_sigint)�__name__�
__module__�__qualname__�__doc__rr&r �
__classcell__rr)r%rr<s
rcsJeZdZ�fdd�Zedd��Zedd��Zedd��Zdd
d�Z�Z	S)
�DownloadProgressMixincs,tt|�j||�dt�d|j|_dS)N� �)rr-rr	�message)r"r#r$)r%rrruszDownloadProgressMixin.__init__cCs
t|j�S)N)r�index)r"rrr�
downloadedysz DownloadProgressMixin.downloadedcCs |jdkrdStd|j�dS)Ngz...�z/s)Zavgr)r"rrr�download_speed}s
z$DownloadProgressMixin.download_speedcCs|jrd|jSdS)Nzeta %s�)ZetaZeta_td)r"rrr�
pretty_eta�s
z DownloadProgressMixin.pretty_etar3ccs*x|D]}|V|j|�qW|j�dS)N)�nextr&)r"�it�n�xrrr�iter�s
zDownloadProgressMixin.iter)r3)
r(r)r*r�propertyr2r4r6r;r,rr)r%rr-ss
r-cseZdZ�fdd�Z�ZS)�WindowsMixincs\tr�jrd�_tt��j||�trXtrXtj�j��_�fdd��j_�fdd��j_	dS)NFcs�jjj�S)N)r�wrapped�isattyr)r"rr�<lambda>�sz'WindowsMixin.__init__.<locals>.<lambda>cs�jjj�S)N)rr>�flushr)r"rrr@�s)
rZhide_cursorrr=rrZAnsiToWin32rr?rA)r"r#r$)r%)r"rr�s
zWindowsMixin.__init__)r(r)r*rr,rr)r%rr=�sr=c@seZdZejZdZdZdS)�DownloadProgressBarz
%(percent)d%%z0%(downloaded)s %(download_speed)s %(pretty_eta)sN)r(r)r*�sys�stdoutrr0�suffixrrrrrB�srBc@s&eZdZejZdZdd�Zdd�ZdS)�DownloadProgressSpinnerz!%(downloaded)s %(download_speed)scCs"t|d�stj|j�|_t|j�S)N�_phaser)�hasattr�	itertools�cyclerrGr7)r"rrr�
next_phase�s
z"DownloadProgressSpinner.next_phasecCsN|j|}|j�}|j|}dj||r*dnd||r6dnd|g�}|j|�dS)Nr5r.)r0rKrErZwriteln)r"r0ZphaserE�linerrr�update�s



zDownloadProgressSpinner.updateN)	r(r)r*rCrDrrErKrMrrrrrF�srFccsRtrdVnB|j�s$tj�tjkr,dVn"|jt�z
dVWd|jt�XdS)N)	rr?�logger�getEffectiveLevel�logging�INFO�writerr)rrrr�
hidden_cursor�s

rSc@s$eZdZdd�Zdd�Zdd�ZdS)�RateLimitercCs||_d|_dS)Nr)�_min_update_interval_seconds�_last_update)r"�min_update_interval_secondsrrrr�szRateLimiter.__init__cCstj�}||j}||jkS)N)�timerVrU)r"ZnowZdeltarrr�ready�s
zRateLimiter.readycCstj�|_dS)N)rXrV)r"rrr�reset�szRateLimiter.resetN)r(r)r*rrYrZrrrrrT�srTc@s.eZdZddd�Zdd�Zdd	�Zd
d�ZdS)
�InteractiveSpinnerN�-\|/��?cCs\||_|dkrtj}||_t|�|_d|_tj|�|_	|jj
dt�|jd�d|_dS)NFr.z ... r)
�_messagerCrD�_filerT�
_rate_limiter�	_finishedrIrJ�_spin_cyclerRr	�_width)r"r0rZ
spin_charsrWrrrr�s
zInteractiveSpinner.__init__cCsRd|j}|jj|d|j|�|jj|�t|�|_|jj�|jj�dS)N�r.)rcr_rR�lenrAr`rZ)r"�statusZbackuprrr�_write	s


zInteractiveSpinner._writecCs,|jr
dS|jj�sdS|jt|j��dS)N)rar`rYrgr7rb)r"rrr�spins

zInteractiveSpinner.spincCs4|jr
dS|j|�|jjd�|jj�d|_dS)N�
T)rargr_rRrA)r"�final_statusrrrr&s

zInteractiveSpinner.finish)Nr\r])r(r)r*rrgrhr&rrrrr[�s


r[c@s.eZdZddd�Zdd�Zdd�Zdd	�Zd
S)�NonInteractiveSpinner�<cCs$||_d|_t|�|_|jd�dS)NFZstarted)r^rarTr`�_update)r"r0rWrrrr*s
zNonInteractiveSpinner.__init__cCs|jj�tjd|j|�dS)Nz%s: %s)r`rZrN�infor^)r"rfrrrrm0s
zNonInteractiveSpinner._updatecCs&|jr
dS|jj�sdS|jd�dS)Nzstill running...)rar`rYrm)r"rrrrh5s

zNonInteractiveSpinner.spincCs$|jr
dS|jd|f�d|_dS)Nzfinished with status '%s'T)rarm)r"rjrrrr&<szNonInteractiveSpinner.finishN)rl)r(r)r*rrmrhr&rrrrrk)s
rkccs�tjj�r"tj�tjkr"t|�}nt|�}y t	tj��|VWdQRXWn>t
k
rj|jd��Yn*tk
r�|jd��YnX|jd�dS)NZcanceled�error�done)
rCrDr?rNrOrPrQr[rkrS�KeyboardInterruptr&�	Exception)r0Zspinnerrrr�open_spinnerCs


rs)/Z
__future__rrrIrCrrrrX�
contextlibrPZ
pip.compatrZ	pip.utilsrZpip.utils.loggingr	Zpip._vendorr
Zpip._vendor.progress.barrrZpip._vendor.progress.helpersr
rrZpip._vendor.progress.spinnerrrrrZ	getLoggerr(rNrZ_BaseBar�objectrr-r=rBrF�contextmanagerrSrTr[rkrsrrrr�<module>sB


7
!0utils/build.py000064400000002440151733136510007362 0ustar00from __future__ import absolute_import

import os.path
import tempfile

from pip.utils import rmtree


class BuildDirectory(object):

    def __init__(self, name=None, delete=None):
        # If we were not given an explicit directory, and we were not given an
        # explicit delete option, then we'll default to deleting.
        if name is None and delete is None:
            delete = True

        if name is None:
            # We realpath here because some systems have their default tmpdir
            # symlinked to another directory.  This tends to confuse build
            # scripts, so we canonicalize the path by traversing potential
            # symlinks here.
            name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-"))
            # If we were not given an explicit directory, and we were not given
            # an explicit delete option, then we'll default to deleting.
            if delete is None:
                delete = True

        self.name = name
        self.delete = delete

    def __repr__(self):
        return "<{} {!r}>".format(self.__class__.__name__, self.name)

    def __enter__(self):
        return self.name

    def __exit__(self, exc, value, tb):
        self.cleanup()

    def cleanup(self):
        if self.delete:
            rmtree(self.name)
utils/__init__.py000064400000066155151733136510010037 0ustar00from __future__ import absolute_import

from collections import deque
import contextlib
import errno
import io
import locale
# we have a submodule named 'logging' which would shadow this if we used the
# regular name:
import logging as std_logging
import re
import os
import posixpath
import shutil
import stat
import subprocess
import sys
import tarfile
import zipfile

from pip.exceptions import InstallationError
from pip.compat import console_to_str, expanduser, stdlib_pkgs
from pip.locations import (
    site_packages, user_site, running_under_virtualenv, virtualenv_no_global,
    write_delete_marker_file, distutils_scheme,
)
from pip._vendor import pkg_resources
from pip._vendor.six.moves import input
from pip._vendor.six import PY2
from pip._vendor.retrying import retry

if PY2:
    from io import BytesIO as StringIO
else:
    from io import StringIO

__all__ = ['rmtree', 'display_path', 'backup_dir',
           'ask', 'splitext',
           'format_size', 'is_installable_dir',
           'is_svn_page', 'file_contents',
           'split_leading_dir', 'has_leading_dir',
           'normalize_path',
           'renames', 'get_terminal_size', 'get_prog',
           'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
           'captured_stdout', 'ensure_dir',
           'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS',
           'get_installed_version']


logger = std_logging.getLogger(__name__)

BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
ZIP_EXTENSIONS = ('.zip', '.whl')
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
ARCHIVE_EXTENSIONS = (
    ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
try:
    import bz2  # noqa
    SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
except ImportError:
    logger.debug('bz2 module is not available')

try:
    # Only for Python 3.3+
    import lzma  # noqa
    SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
except ImportError:
    logger.debug('lzma module is not available')


def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
    try:
        return __import__(pkg_or_module_string)
    except ImportError:
        raise ExceptionType(*args, **kwargs)


def ensure_dir(path):
    """os.path.makedirs without EEXIST."""
    try:
        os.makedirs(path)
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise


def get_prog():
    try:
        if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
            return "%s -m pip" % sys.executable
    except (AttributeError, TypeError, IndexError):
        pass
    return 'pip'


# Retry every half second for up to 3 seconds
@retry(stop_max_delay=3000, wait_fixed=500)
def rmtree(dir, ignore_errors=False):
    shutil.rmtree(dir, ignore_errors=ignore_errors,
                  onerror=rmtree_errorhandler)


def rmtree_errorhandler(func, path, exc_info):
    """On Windows, the files in .svn are read-only, so when rmtree() tries to
    remove them, an exception is thrown.  We catch that here, remove the
    read-only attribute, and hopefully continue without problems."""
    # if file type currently read only
    if os.stat(path).st_mode & stat.S_IREAD:
        # convert to read/write
        os.chmod(path, stat.S_IWRITE)
        # use the original function to repeat the operation
        func(path)
        return
    else:
        raise


def display_path(path):
    """Gives the display value for a given path, making it relative to cwd
    if possible."""
    path = os.path.normcase(os.path.abspath(path))
    if sys.version_info[0] == 2:
        path = path.decode(sys.getfilesystemencoding(), 'replace')
        path = path.encode(sys.getdefaultencoding(), 'replace')
    if path.startswith(os.getcwd() + os.path.sep):
        path = '.' + path[len(os.getcwd()):]
    return path


def backup_dir(dir, ext='.bak'):
    """Figure out the name of a directory to back up the given dir to
    (adding .bak, .bak2, etc)"""
    n = 1
    extension = ext
    while os.path.exists(dir + extension):
        n += 1
        extension = ext + str(n)
    return dir + extension


def ask_path_exists(message, options):
    for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
        if action in options:
            return action
    return ask(message, options)


def ask(message, options):
    """Ask the message interactively, with the given possible responses"""
    while 1:
        if os.environ.get('PIP_NO_INPUT'):
            raise Exception(
                'No input was expected ($PIP_NO_INPUT set); question: %s' %
                message
            )
        response = input(message)
        response = response.strip().lower()
        if response not in options:
            print(
                'Your response (%r) was not one of the expected responses: '
                '%s' % (response, ', '.join(options))
            )
        else:
            return response


def format_size(bytes):
    if bytes > 1000 * 1000:
        return '%.1fMB' % (bytes / 1000.0 / 1000)
    elif bytes > 10 * 1000:
        return '%ikB' % (bytes / 1000)
    elif bytes > 1000:
        return '%.1fkB' % (bytes / 1000.0)
    else:
        return '%ibytes' % bytes


def is_installable_dir(path):
    """Return True if `path` is a directory containing a setup.py file."""
    if not os.path.isdir(path):
        return False
    setup_py = os.path.join(path, 'setup.py')
    if os.path.isfile(setup_py):
        return True
    return False


def is_svn_page(html):
    """
    Returns true if the page appears to be the index page of an svn repository
    """
    return (re.search(r'<title>[^<]*Revision \d+:', html) and
            re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))


def file_contents(filename):
    with open(filename, 'rb') as fp:
        return fp.read().decode('utf-8')


def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
    """Yield pieces of data from a file-like object until EOF."""
    while True:
        chunk = file.read(size)
        if not chunk:
            break
        yield chunk


def split_leading_dir(path):
    path = path.lstrip('/').lstrip('\\')
    if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
                        '\\' not in path):
        return path.split('/', 1)
    elif '\\' in path:
        return path.split('\\', 1)
    else:
        return path, ''


def has_leading_dir(paths):
    """Returns true if all the paths have the same leading path name
    (i.e., everything is in one subdirectory in an archive)"""
    common_prefix = None
    for path in paths:
        prefix, rest = split_leading_dir(path)
        if not prefix:
            return False
        elif common_prefix is None:
            common_prefix = prefix
        elif prefix != common_prefix:
            return False
    return True


def normalize_path(path, resolve_symlinks=True):
    """
    Convert a path to its canonical, case-normalized, absolute version.

    """
    path = expanduser(path)
    if resolve_symlinks:
        path = os.path.realpath(path)
    else:
        path = os.path.abspath(path)
    return os.path.normcase(path)


def splitext(path):
    """Like os.path.splitext, but take off .tar too"""
    base, ext = posixpath.splitext(path)
    if base.lower().endswith('.tar'):
        ext = base[-4:] + ext
        base = base[:-4]
    return base, ext


def renames(old, new):
    """Like os.renames(), but handles renaming across devices."""
    # Implementation borrowed from os.renames().
    head, tail = os.path.split(new)
    if head and tail and not os.path.exists(head):
        os.makedirs(head)

    shutil.move(old, new)

    head, tail = os.path.split(old)
    if head and tail:
        try:
            os.removedirs(head)
        except OSError:
            pass


def is_local(path):
    """
    Return True if path is within sys.prefix, if we're running in a virtualenv.

    If we're not in a virtualenv, all paths are considered "local."

    """
    if not running_under_virtualenv():
        return True
    return normalize_path(path).startswith(normalize_path(sys.prefix))


def dist_is_local(dist):
    """
    Return True if given Distribution object is installed locally
    (i.e. within current virtualenv).

    Always True if we're not in a virtualenv.

    """
    return is_local(dist_location(dist))


def dist_in_usersite(dist):
    """
    Return True if given Distribution is installed in user site.
    """
    norm_path = normalize_path(dist_location(dist))
    return norm_path.startswith(normalize_path(user_site))


def dist_in_site_packages(dist):
    """
    Return True if given Distribution is installed in
    distutils.sysconfig.get_python_lib().
    """
    return normalize_path(
        dist_location(dist)
    ).startswith(normalize_path(site_packages))


def dist_in_install_path(dist):
    """
    Return True if given Distribution is installed in
    path matching distutils_scheme layout.
    """
    norm_path = normalize_path(dist_location(dist))
    return norm_path.startswith(normalize_path(
        distutils_scheme("")['purelib'].split('python')[0]))


def dist_is_editable(dist):
    """Is distribution an editable install?"""
    for path_item in sys.path:
        egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
        if os.path.isfile(egg_link):
            return True
    return False


def get_installed_distributions(local_only=True,
                                skip=stdlib_pkgs,
                                include_editables=True,
                                editables_only=False,
                                user_only=False):
    """
    Return a list of installed Distribution objects.

    If ``local_only`` is True (default), only return installations
    local to the current virtualenv, if in a virtualenv.

    ``skip`` argument is an iterable of lower-case project names to
    ignore; defaults to stdlib_pkgs

    If ``editables`` is False, don't report editables.

    If ``editables_only`` is True , only report editables.

    If ``user_only`` is True , only report installations in the user
    site directory.

    """
    if local_only:
        local_test = dist_is_local
    else:
        def local_test(d):
            return True

    if include_editables:
        def editable_test(d):
            return True
    else:
        def editable_test(d):
            return not dist_is_editable(d)

    if editables_only:
        def editables_only_test(d):
            return dist_is_editable(d)
    else:
        def editables_only_test(d):
            return True

    if user_only:
        user_test = dist_in_usersite
    else:
        def user_test(d):
            return True

    return [d for d in pkg_resources.working_set
            if local_test(d) and
            d.key not in skip and
            editable_test(d) and
            editables_only_test(d) and
            user_test(d)
            ]


def egg_link_path(dist):
    """
    Return the path for the .egg-link file if it exists, otherwise, None.

    There's 3 scenarios:
    1) not in a virtualenv
       try to find in site.USER_SITE, then site_packages
    2) in a no-global virtualenv
       try to find in site_packages
    3) in a yes-global virtualenv
       try to find in site_packages, then site.USER_SITE
       (don't look in global location)

    For #1 and #3, there could be odd cases, where there's an egg-link in 2
    locations.

    This method will just return the first one found.
    """
    sites = []
    if running_under_virtualenv():
        if virtualenv_no_global():
            sites.append(site_packages)
        else:
            sites.append(site_packages)
            if user_site:
                sites.append(user_site)
    else:
        if user_site:
            sites.append(user_site)
        sites.append(site_packages)

    for site in sites:
        egglink = os.path.join(site, dist.project_name) + '.egg-link'
        if os.path.isfile(egglink):
            return egglink


def dist_location(dist):
    """
    Get the site-packages location of this distribution. Generally
    this is dist.location, except in the case of develop-installed
    packages, where dist.location is the source code location, and we
    want to know where the egg-link file is.

    """
    egg_link = egg_link_path(dist)
    if egg_link:
        return egg_link
    return dist.location


def get_terminal_size():
    """Returns a tuple (x, y) representing the width(x) and the height(x)
    in characters of the terminal window."""
    def ioctl_GWINSZ(fd):
        try:
            import fcntl
            import termios
            import struct
            cr = struct.unpack(
                'hh',
                fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')
            )
        except:
            return None
        if cr == (0, 0):
            return None
        return cr
    cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
    if not cr:
        try:
            fd = os.open(os.ctermid(), os.O_RDONLY)
            cr = ioctl_GWINSZ(fd)
            os.close(fd)
        except:
            pass
    if not cr:
        cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
    return int(cr[1]), int(cr[0])


def current_umask():
    """Get the current umask which involves having to set it temporarily."""
    mask = os.umask(0)
    os.umask(mask)
    return mask


def unzip_file(filename, location, flatten=True):
    """
    Unzip the file (with path `filename`) to the destination `location`.  All
    files are written based on system defaults and umask (i.e. permissions are
    not preserved), except that regular file members with any execute
    permissions (user, group, or world) have "chmod +x" applied after being
    written. Note that for windows, any execute changes using os.chmod are
    no-ops per the python docs.
    """
    ensure_dir(location)
    zipfp = open(filename, 'rb')
    try:
        zip = zipfile.ZipFile(zipfp, allowZip64=True)
        leading = has_leading_dir(zip.namelist()) and flatten
        for info in zip.infolist():
            name = info.filename
            data = zip.read(name)
            fn = name
            if leading:
                fn = split_leading_dir(name)[1]
            fn = os.path.join(location, fn)
            dir = os.path.dirname(fn)
            if fn.endswith('/') or fn.endswith('\\'):
                # A directory
                ensure_dir(fn)
            else:
                ensure_dir(dir)
                fp = open(fn, 'wb')
                try:
                    fp.write(data)
                finally:
                    fp.close()
                    mode = info.external_attr >> 16
                    # if mode and regular file and any execute permissions for
                    # user/group/world?
                    if mode and stat.S_ISREG(mode) and mode & 0o111:
                        # make dest file have execute for user/group/world
                        # (chmod +x) no-op on windows per python docs
                        os.chmod(fn, (0o777 - current_umask() | 0o111))
    finally:
        zipfp.close()


def untar_file(filename, location):
    """
    Untar the file (with path `filename`) to the destination `location`.
    All files are written based on system defaults and umask (i.e. permissions
    are not preserved), except that regular file members with any execute
    permissions (user, group, or world) have "chmod +x" applied after being
    written.  Note that for windows, any execute changes using os.chmod are
    no-ops per the python docs.
    """
    ensure_dir(location)
    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
        mode = 'r:gz'
    elif filename.lower().endswith(BZ2_EXTENSIONS):
        mode = 'r:bz2'
    elif filename.lower().endswith(XZ_EXTENSIONS):
        mode = 'r:xz'
    elif filename.lower().endswith('.tar'):
        mode = 'r'
    else:
        logger.warning(
            'Cannot determine compression type for file %s', filename,
        )
        mode = 'r:*'
    tar = tarfile.open(filename, mode)
    try:
        # note: python<=2.5 doesn't seem to know about pax headers, filter them
        leading = has_leading_dir([
            member.name for member in tar.getmembers()
            if member.name != 'pax_global_header'
        ])
        for member in tar.getmembers():
            fn = member.name
            if fn == 'pax_global_header':
                continue
            if leading:
                fn = split_leading_dir(fn)[1]
            path = os.path.join(location, fn)

            # Call the `data` filter for its side effect (raising exception)
            try:
                tarfile.data_filter(member.replace(name=fn), location)
            except tarfile.LinkOutsideDestinationError:
                pass

            if member.isdir():
                ensure_dir(path)
            elif member.issym():
                try:
                    tar._extract_member(member, path)
                except Exception as exc:
                    # Some corrupt tar files seem to produce this
                    # (specifically bad symlinks)
                    logger.warning(
                        'In the tar file %s the member %s is invalid: %s',
                        filename, member.name, exc,
                    )
                    continue
            else:
                try:
                    fp = tar.extractfile(member)
                except (KeyError, AttributeError) as exc:
                    # Some corrupt tar files seem to produce this
                    # (specifically bad symlinks)
                    logger.warning(
                        'In the tar file %s the member %s is invalid: %s',
                        filename, member.name, exc,
                    )
                    continue
                ensure_dir(os.path.dirname(path))
                with open(path, 'wb') as destfp:
                    shutil.copyfileobj(fp, destfp)
                fp.close()
                # Update the timestamp (useful for cython compiled files)
                tar.utime(member, path)
                # member have any execute permissions for user/group/world?
                if member.mode & 0o111:
                    # make dest file have execute for user/group/world
                    # no-op on windows per python docs
                    os.chmod(path, (0o777 - current_umask() | 0o111))
    finally:
        tar.close()


def unpack_file(filename, location, content_type, link):
    filename = os.path.realpath(filename)
    if (content_type == 'application/zip' or
            filename.lower().endswith(ZIP_EXTENSIONS) or
            zipfile.is_zipfile(filename)):
        unzip_file(
            filename,
            location,
            flatten=not filename.endswith('.whl')
        )
    elif (content_type == 'application/x-gzip' or
            tarfile.is_tarfile(filename) or
            filename.lower().endswith(
                TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
        untar_file(filename, location)
    elif (content_type and content_type.startswith('text/html') and
            is_svn_page(file_contents(filename))):
        # We don't really care about this
        from pip.vcs.subversion import Subversion
        Subversion('svn+' + link.url).unpack(location)
    else:
        # FIXME: handle?
        # FIXME: magic signatures?
        logger.critical(
            'Cannot unpack file %s (downloaded from %s, content-type: %s); '
            'cannot detect archive format',
            filename, location, content_type,
        )
        raise InstallationError(
            'Cannot determine archive format of %s' % location
        )


def call_subprocess(cmd, show_stdout=True, cwd=None,
                    on_returncode='raise',
                    command_desc=None,
                    extra_environ=None, spinner=None):
    # This function's handling of subprocess output is confusing and I
    # previously broke it terribly, so as penance I will write a long comment
    # explaining things.
    #
    # The obvious thing that affects output is the show_stdout=
    # kwarg. show_stdout=True means, let the subprocess write directly to our
    # stdout. Even though it is nominally the default, it is almost never used
    # inside pip (and should not be used in new code without a very good
    # reason); as of 2016-02-22 it is only used in a few places inside the VCS
    # wrapper code. Ideally we should get rid of it entirely, because it
    # creates a lot of complexity here for a rarely used feature.
    #
    # Most places in pip set show_stdout=False. What this means is:
    # - We connect the child stdout to a pipe, which we read.
    # - By default, we hide the output but show a spinner -- unless the
    #   subprocess exits with an error, in which case we show the output.
    # - If the --verbose option was passed (= loglevel is DEBUG), then we show
    #   the output unconditionally. (But in this case we don't want to show
    #   the output a second time if it turns out that there was an error.)
    #
    # stderr is always merged with stdout (even if show_stdout=True).
    if show_stdout:
        stdout = None
    else:
        stdout = subprocess.PIPE
    if command_desc is None:
        cmd_parts = []
        for part in cmd:
            if ' ' in part or '\n' in part or '"' in part or "'" in part:
                part = '"%s"' % part.replace('"', '\\"')
            cmd_parts.append(part)
        command_desc = ' '.join(cmd_parts)
    logger.debug("Running command %s", command_desc)
    env = os.environ.copy()
    if extra_environ:
        env.update(extra_environ)
    try:
        proc = subprocess.Popen(
            cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
            cwd=cwd, env=env)
    except Exception as exc:
        logger.critical(
            "Error %s while executing command %s", exc, command_desc,
        )
        raise
    if stdout is not None:
        all_output = []
        while True:
            line = console_to_str(proc.stdout.readline())
            if not line:
                break
            line = line.rstrip()
            all_output.append(line + '\n')
            if logger.getEffectiveLevel() <= std_logging.DEBUG:
                # Show the line immediately
                logger.debug(line)
            else:
                # Update the spinner
                if spinner is not None:
                    spinner.spin()
    proc.wait()
    if spinner is not None:
        if proc.returncode:
            spinner.finish("error")
        else:
            spinner.finish("done")
    if proc.returncode:
        if on_returncode == 'raise':
            if (logger.getEffectiveLevel() > std_logging.DEBUG and
                    not show_stdout):
                logger.info(
                    'Complete output from command %s:', command_desc,
                )
                logger.info(
                    ''.join(all_output) +
                    '\n----------------------------------------'
                )
            raise InstallationError(
                'Command "%s" failed with error code %s in %s'
                % (command_desc, proc.returncode, cwd))
        elif on_returncode == 'warn':
            logger.warning(
                'Command "%s" had error code %s in %s',
                command_desc, proc.returncode, cwd,
            )
        elif on_returncode == 'ignore':
            pass
        else:
            raise ValueError('Invalid value: on_returncode=%s' %
                             repr(on_returncode))
    if not show_stdout:
        return ''.join(all_output)


def read_text_file(filename):
    """Return the contents of *filename*.

    Try to decode the file contents with utf-8, the preferred system encoding
    (e.g., cp1252 on some Windows machines), and latin1, in that order.
    Decoding a byte string with latin1 will never raise an error. In the worst
    case, the returned string will contain some garbage characters.

    """
    with open(filename, 'rb') as fp:
        data = fp.read()

    encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
    for enc in encodings:
        try:
            data = data.decode(enc)
        except UnicodeDecodeError:
            continue
        break

    assert type(data) != bytes  # Latin1 should have worked.
    return data


def _make_build_dir(build_dir):
    os.makedirs(build_dir)
    write_delete_marker_file(build_dir)


class FakeFile(object):
    """Wrap a list of lines in an object with readline() to make
    ConfigParser happy."""
    def __init__(self, lines):
        self._gen = (l for l in lines)

    def readline(self):
        try:
            try:
                return next(self._gen)
            except NameError:
                return self._gen.next()
        except StopIteration:
            return ''

    def __iter__(self):
        return self._gen


class StreamWrapper(StringIO):

    @classmethod
    def from_stream(cls, orig_stream):
        cls.orig_stream = orig_stream
        return cls()

    # compileall.compile_dir() needs stdout.encoding to print to stdout
    @property
    def encoding(self):
        return self.orig_stream.encoding


@contextlib.contextmanager
def captured_output(stream_name):
    """Return a context manager used by captured_stdout/stdin/stderr
    that temporarily replaces the sys stream *stream_name* with a StringIO.

    Taken from Lib/support/__init__.py in the CPython repo.
    """
    orig_stdout = getattr(sys, stream_name)
    setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
    try:
        yield getattr(sys, stream_name)
    finally:
        setattr(sys, stream_name, orig_stdout)


def captured_stdout():
    """Capture the output of sys.stdout:

       with captured_stdout() as stdout:
           print('hello')
       self.assertEqual(stdout.getvalue(), 'hello\n')

    Taken from Lib/support/__init__.py in the CPython repo.
    """
    return captured_output('stdout')


class cached_property(object):
    """A property that is only computed once per instance and then replaces
       itself with an ordinary attribute. Deleting the attribute resets the
       property.

       Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
    """

    def __init__(self, func):
        self.__doc__ = getattr(func, '__doc__')
        self.func = func

    def __get__(self, obj, cls):
        if obj is None:
            # We're being accessed from the class itself, not from an object
            return self
        value = obj.__dict__[self.func.__name__] = self.func(obj)
        return value


def get_installed_version(dist_name, lookup_dirs=None):
    """Get the installed version of dist_name avoiding pkg_resources cache"""
    # Create a requirement that we'll look for inside of setuptools.
    req = pkg_resources.Requirement.parse(dist_name)

    # We want to avoid having this cached, so we need to construct a new
    # working set each time.
    if lookup_dirs is None:
        working_set = pkg_resources.WorkingSet()
    else:
        working_set = pkg_resources.WorkingSet(lookup_dirs)

    # Get the installed distribution from our working set
    dist = working_set.find(req)

    # Check to see if we got an installed distribution or not, if we did
    # we want to return it's version.
    return dist.version if dist else None


def consume(iterator):
    """Consume an iterable at C speed."""
    deque(iterator, maxlen=0)
utils/filesystem.py000064400000001603151733136510010447 0ustar00import os
import os.path

from pip.compat import get_path_uid


def check_path_owner(path):
    # If we don't have a way to check the effective uid of this process, then
    # we'll just assume that we own the directory.
    if not hasattr(os, "geteuid"):
        return True

    previous = None
    while path != previous:
        if os.path.lexists(path):
            # Check if path is writable by current user.
            if os.geteuid() == 0:
                # Special handling for root user in order to handle properly
                # cases where users use sudo without -H flag.
                try:
                    path_uid = get_path_uid(path)
                except OSError:
                    return False
                return path_uid == 0
            else:
                return os.access(path, os.W_OK)
        else:
            previous, path = path, os.path.dirname(path)
utils/appdirs.py000064400000021153151733136510007727 0ustar00"""
This code was taken from https://github.com/ActiveState/appdirs and modified
to suit our purposes.
"""
from __future__ import absolute_import

import os
import sys

from pip.compat import WINDOWS, expanduser
from pip._vendor.six import PY2, text_type


def user_cache_dir(appname):
    r"""
    Return full path to the user-specific cache dir for this application.

        "appname" is the name of application.

    Typical user cache directories are:
        macOS:      ~/Library/Caches/<AppName>
        Unix:       ~/.cache/<AppName> (XDG default)
        Windows:    C:\Users\<username>\AppData\Local\<AppName>\Cache

    On Windows the only suggestion in the MSDN docs is that local settings go
    in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
    non-roaming app data dir (the default returned by `user_data_dir`). Apps
    typically put cache data somewhere *under* the given dir here. Some
    examples:
        ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
        ...\Acme\SuperApp\Cache\1.0

    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
    """
    if WINDOWS:
        # Get the base path
        path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))

        # When using Python 2, return paths as bytes on Windows like we do on
        # other operating systems. See helper function docs for more details.
        if PY2 and isinstance(path, text_type):
            path = _win_path_to_bytes(path)

        # Add our app name and Cache directory to it
        path = os.path.join(path, appname, "Cache")
    elif sys.platform == "darwin":
        # Get the base path
        path = expanduser("~/Library/Caches")

        # Add our app name to it
        path = os.path.join(path, appname)
    else:
        # Get the base path
        path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))

        # Add our app name to it
        path = os.path.join(path, appname)

    return path


def user_data_dir(appname, roaming=False):
    """
    Return full path to the user-specific data dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "roaming" (boolean, default False) can be set True to use the Windows
            roaming appdata directory. That means that for users on a Windows
            network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  ~/Library/Application Support/<AppName>
        Unix:                   ~/.local/share/<AppName>    # or in
                                $XDG_DATA_HOME, if defined
        Win XP (not roaming):   C:\Documents and Settings\<username>\ ...
                                ...Application Data\<AppName>
        Win XP (roaming):       C:\Documents and Settings\<username>\Local ...
                                ...Settings\Application Data\<AppName>
        Win 7  (not roaming):   C:\\Users\<username>\AppData\Local\<AppName>
        Win 7  (roaming):       C:\\Users\<username>\AppData\Roaming\<AppName>

    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
    That means, by default "~/.local/share/<AppName>".
    """
    if WINDOWS:
        const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
        path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
    elif sys.platform == "darwin":
        path = os.path.join(
            expanduser('~/Library/Application Support/'),
            appname,
        )
    else:
        path = os.path.join(
            os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
            appname,
        )

    return path


def user_config_dir(appname, roaming=True):
    """Return full path to the user-specific config dir for this application.

        "appname" is the name of application.
            If None, just the system directory is returned.
        "roaming" (boolean, default True) can be set False to not use the
            Windows roaming appdata directory. That means that for users on a
            Windows network setup for roaming profiles, this user data will be
            sync'd on login. See
            <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
            for a discussion of issues.

    Typical user data directories are:
        macOS:                  same as user_data_dir
        Unix:                   ~/.config/<AppName>
        Win *:                  same as user_data_dir

    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
    That means, by default "~/.config/<AppName>".
    """
    if WINDOWS:
        path = user_data_dir(appname, roaming=roaming)
    elif sys.platform == "darwin":
        path = user_data_dir(appname)
    else:
        path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
        path = os.path.join(path, appname)

    return path


# for the discussion regarding site_config_dirs locations
# see <https://github.com/pypa/pip/issues/1733>
def site_config_dirs(appname):
    """Return a list of potential user-shared config dirs for this application.

        "appname" is the name of application.

    Typical user config directories are:
        macOS:      /Library/Application Support/<AppName>/
        Unix:       /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
                    $XDG_CONFIG_DIRS
        Win XP:     C:\Documents and Settings\All Users\Application ...
                    ...Data\<AppName>\
        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory
                    on Vista.)
        Win 7:      Hidden, but writeable on Win 7:
                    C:\ProgramData\<AppName>\
    """
    if WINDOWS:
        path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
        pathlist = [os.path.join(path, appname)]
    elif sys.platform == 'darwin':
        pathlist = [os.path.join('/Library/Application Support', appname)]
    else:
        # try looking in $XDG_CONFIG_DIRS
        xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
        if xdg_config_dirs:
            pathlist = [
                os.path.join(expanduser(x), appname)
                for x in xdg_config_dirs.split(os.pathsep)
            ]
        else:
            pathlist = []

        # always look in /etc directly as well
        pathlist.append('/etc')

    return pathlist


# -- Windows support functions --

def _get_win_folder_from_registry(csidl_name):
    """
    This is a fallback technique at best. I'm not sure if using the
    registry for this guarantees us the correct answer for all CSIDL_*
    names.
    """
    import _winreg

    shell_folder_name = {
        "CSIDL_APPDATA": "AppData",
        "CSIDL_COMMON_APPDATA": "Common AppData",
        "CSIDL_LOCAL_APPDATA": "Local AppData",
    }[csidl_name]

    key = _winreg.OpenKey(
        _winreg.HKEY_CURRENT_USER,
        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
    )
    directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
    return directory


def _get_win_folder_with_ctypes(csidl_name):
    csidl_const = {
        "CSIDL_APPDATA": 26,
        "CSIDL_COMMON_APPDATA": 35,
        "CSIDL_LOCAL_APPDATA": 28,
    }[csidl_name]

    buf = ctypes.create_unicode_buffer(1024)
    ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)

    # Downgrade to short path name if have highbit chars. See
    # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
    has_high_char = False
    for c in buf:
        if ord(c) > 255:
            has_high_char = True
            break
    if has_high_char:
        buf2 = ctypes.create_unicode_buffer(1024)
        if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
            buf = buf2

    return buf.value

if WINDOWS:
    try:
        import ctypes
        _get_win_folder = _get_win_folder_with_ctypes
    except ImportError:
        _get_win_folder = _get_win_folder_from_registry


def _win_path_to_bytes(path):
    """Encode Windows paths to bytes. Only used on Python 2.

    Motivation is to be consistent with other operating systems where paths
    are also returned as bytes. This avoids problems mixing bytes and Unicode
    elsewhere in the codebase. For more details and discussion see
    <https://github.com/pypa/pip/issues/3463>.

    If encoding using ASCII and MBCS fails, return the original Unicode path.
    """
    for encoding in ('ASCII', 'MBCS'):
        try:
            return path.encode(encoding)
        except (UnicodeEncodeError, LookupError):
            pass
    return path
utils/ui.py000064400000026515151733136510006711 0ustar00from __future__ import absolute_import
from __future__ import division

import itertools
import sys
from signal import signal, SIGINT, default_int_handler
import time
import contextlib
import logging

from pip.compat import WINDOWS
from pip.utils import format_size
from pip.utils.logging import get_indentation
from pip._vendor import six
from pip._vendor.progress.bar import Bar, IncrementalBar
from pip._vendor.progress.helpers import (WritelnMixin,
                                          HIDE_CURSOR, SHOW_CURSOR)
from pip._vendor.progress.spinner import Spinner

try:
    from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
    colorama = None

logger = logging.getLogger(__name__)


def _select_progress_class(preferred, fallback):
    encoding = getattr(preferred.file, "encoding", None)

    # If we don't know what encoding this file is in, then we'll just assume
    # that it doesn't support unicode and use the ASCII bar.
    if not encoding:
        return fallback

    # Collect all of the possible characters we want to use with the preferred
    # bar.
    characters = [
        getattr(preferred, "empty_fill", six.text_type()),
        getattr(preferred, "fill", six.text_type()),
    ]
    characters += list(getattr(preferred, "phases", []))

    # Try to decode the characters we're using for the bar using the encoding
    # of the given file, if this works then we'll assume that we can use the
    # fancier bar and if not we'll fall back to the plaintext bar.
    try:
        six.text_type().join(characters).encode(encoding)
    except UnicodeEncodeError:
        return fallback
    else:
        return preferred


_BaseBar = _select_progress_class(IncrementalBar, Bar)


class InterruptibleMixin(object):
    """
    Helper to ensure that self.finish() gets called on keyboard interrupt.

    This allows downloads to be interrupted without leaving temporary state
    (like hidden cursors) behind.

    This class is similar to the progress library's existing SigIntMixin
    helper, but as of version 1.2, that helper has the following problems:

    1. It calls sys.exit().
    2. It discards the existing SIGINT handler completely.
    3. It leaves its own handler in place even after an uninterrupted finish,
       which will have unexpected delayed effects if the user triggers an
       unrelated keyboard interrupt some time after a progress-displaying
       download has already completed, for example.
    """

    def __init__(self, *args, **kwargs):
        """
        Save the original SIGINT handler for later.
        """
        super(InterruptibleMixin, self).__init__(*args, **kwargs)

        self.original_handler = signal(SIGINT, self.handle_sigint)

        # If signal() returns None, the previous handler was not installed from
        # Python, and we cannot restore it. This probably should not happen,
        # but if it does, we must restore something sensible instead, at least.
        # The least bad option should be Python's default SIGINT handler, which
        # just raises KeyboardInterrupt.
        if self.original_handler is None:
            self.original_handler = default_int_handler

    def finish(self):
        """
        Restore the original SIGINT handler after finishing.

        This should happen regardless of whether the progress display finishes
        normally, or gets interrupted.
        """
        super(InterruptibleMixin, self).finish()
        signal(SIGINT, self.original_handler)

    def handle_sigint(self, signum, frame):
        """
        Call self.finish() before delegating to the original SIGINT handler.

        This handler should only be in place while the progress display is
        active.
        """
        self.finish()
        self.original_handler(signum, frame)


class DownloadProgressMixin(object):

    def __init__(self, *args, **kwargs):
        super(DownloadProgressMixin, self).__init__(*args, **kwargs)
        self.message = (" " * (get_indentation() + 2)) + self.message

    @property
    def downloaded(self):
        return format_size(self.index)

    @property
    def download_speed(self):
        # Avoid zero division errors...
        if self.avg == 0.0:
            return "..."
        return format_size(1 / self.avg) + "/s"

    @property
    def pretty_eta(self):
        if self.eta:
            return "eta %s" % self.eta_td
        return ""

    def iter(self, it, n=1):
        for x in it:
            yield x
            self.next(n)
        self.finish()


class WindowsMixin(object):

    def __init__(self, *args, **kwargs):
        # The Windows terminal does not support the hide/show cursor ANSI codes
        # even with colorama. So we'll ensure that hide_cursor is False on
        # Windows.
        # This call neds to go before the super() call, so that hide_cursor
        # is set in time. The base progress bar class writes the "hide cursor"
        # code to the terminal in its init, so if we don't set this soon
        # enough, we get a "hide" with no corresponding "show"...
        if WINDOWS and self.hide_cursor:
            self.hide_cursor = False

        super(WindowsMixin, self).__init__(*args, **kwargs)

        # Check if we are running on Windows and we have the colorama module,
        # if we do then wrap our file with it.
        if WINDOWS and colorama:
            self.file = colorama.AnsiToWin32(self.file)
            # The progress code expects to be able to call self.file.isatty()
            # but the colorama.AnsiToWin32() object doesn't have that, so we'll
            # add it.
            self.file.isatty = lambda: self.file.wrapped.isatty()
            # The progress code expects to be able to call self.file.flush()
            # but the colorama.AnsiToWin32() object doesn't have that, so we'll
            # add it.
            self.file.flush = lambda: self.file.wrapped.flush()


class DownloadProgressBar(WindowsMixin, InterruptibleMixin,
                          DownloadProgressMixin, _BaseBar):

    file = sys.stdout
    message = "%(percent)d%%"
    suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"


class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
                              DownloadProgressMixin, WritelnMixin, Spinner):

    file = sys.stdout
    suffix = "%(downloaded)s %(download_speed)s"

    def next_phase(self):
        if not hasattr(self, "_phaser"):
            self._phaser = itertools.cycle(self.phases)
        return next(self._phaser)

    def update(self):
        message = self.message % self
        phase = self.next_phase()
        suffix = self.suffix % self
        line = ''.join([
            message,
            " " if message else "",
            phase,
            " " if suffix else "",
            suffix,
        ])

        self.writeln(line)


################################################################
# Generic "something is happening" spinners
#
# We don't even try using progress.spinner.Spinner here because it's actually
# simpler to reimplement from scratch than to coerce their code into doing
# what we need.
################################################################

@contextlib.contextmanager
def hidden_cursor(file):
    # The Windows terminal does not support the hide/show cursor ANSI codes,
    # even via colorama. So don't even try.
    if WINDOWS:
        yield
    # We don't want to clutter the output with control characters if we're
    # writing to a file, or if the user is running with --quiet.
    # See https://github.com/pypa/pip/issues/3418
    elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
        yield
    else:
        file.write(HIDE_CURSOR)
        try:
            yield
        finally:
            file.write(SHOW_CURSOR)


class RateLimiter(object):
    def __init__(self, min_update_interval_seconds):
        self._min_update_interval_seconds = min_update_interval_seconds
        self._last_update = 0

    def ready(self):
        now = time.time()
        delta = now - self._last_update
        return delta >= self._min_update_interval_seconds

    def reset(self):
        self._last_update = time.time()


class InteractiveSpinner(object):
    def __init__(self, message, file=None, spin_chars="-\\|/",
                 # Empirically, 8 updates/second looks nice
                 min_update_interval_seconds=0.125):
        self._message = message
        if file is None:
            file = sys.stdout
        self._file = file
        self._rate_limiter = RateLimiter(min_update_interval_seconds)
        self._finished = False

        self._spin_cycle = itertools.cycle(spin_chars)

        self._file.write(" " * get_indentation() + self._message + " ... ")
        self._width = 0

    def _write(self, status):
        assert not self._finished
        # Erase what we wrote before by backspacing to the beginning, writing
        # spaces to overwrite the old text, and then backspacing again
        backup = "\b" * self._width
        self._file.write(backup + " " * self._width + backup)
        # Now we have a blank slate to add our status
        self._file.write(status)
        self._width = len(status)
        self._file.flush()
        self._rate_limiter.reset()

    def spin(self):
        if self._finished:
            return
        if not self._rate_limiter.ready():
            return
        self._write(next(self._spin_cycle))

    def finish(self, final_status):
        if self._finished:
            return
        self._write(final_status)
        self._file.write("\n")
        self._file.flush()
        self._finished = True


# Used for dumb terminals, non-interactive installs (no tty), etc.
# We still print updates occasionally (once every 60 seconds by default) to
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
# an indication that a task has frozen.
class NonInteractiveSpinner(object):
    def __init__(self, message, min_update_interval_seconds=60):
        self._message = message
        self._finished = False
        self._rate_limiter = RateLimiter(min_update_interval_seconds)
        self._update("started")

    def _update(self, status):
        assert not self._finished
        self._rate_limiter.reset()
        logger.info("%s: %s", self._message, status)

    def spin(self):
        if self._finished:
            return
        if not self._rate_limiter.ready():
            return
        self._update("still running...")

    def finish(self, final_status):
        if self._finished:
            return
        self._update("finished with status '%s'" % (final_status,))
        self._finished = True


@contextlib.contextmanager
def open_spinner(message):
    # Interactive spinner goes directly to sys.stdout rather than being routed
    # through the logging system, but it acts like it has level INFO,
    # i.e. it's only displayed if we're at level INFO or better.
    # Non-interactive spinner goes through the logging system, so it is always
    # in sync with logging configuration.
    if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
        spinner = InteractiveSpinner(message)
    else:
        spinner = NonInteractiveSpinner(message)
    try:
        with hidden_cursor(sys.stdout):
            yield spinner
    except KeyboardInterrupt:
        spinner.finish("canceled")
        raise
    except Exception:
        spinner.finish("error")
        raise
    else:
        spinner.finish("done")
utils/encoding.py000064400000001713151733136510010053 0ustar00import codecs
import locale
import re


BOMS = [
    (codecs.BOM_UTF8, 'utf8'),
    (codecs.BOM_UTF16, 'utf16'),
    (codecs.BOM_UTF16_BE, 'utf16-be'),
    (codecs.BOM_UTF16_LE, 'utf16-le'),
    (codecs.BOM_UTF32, 'utf32'),
    (codecs.BOM_UTF32_BE, 'utf32-be'),
    (codecs.BOM_UTF32_LE, 'utf32-le'),
]

ENCODING_RE = re.compile(b'coding[:=]\s*([-\w.]+)')


def auto_decode(data):
    """Check a bytes string for a BOM to correctly detect the encoding

    Fallback to locale.getpreferredencoding(False) like open() on Python3"""
    for bom, encoding in BOMS:
        if data.startswith(bom):
            return data[len(bom):].decode(encoding)
    # Lets check the first two lines as in PEP263
    for line in data.split(b'\n')[:2]:
        if line[0:1] == b'#' and ENCODING_RE.search(line):
            encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
            return data.decode(encoding)
    return data.decode(locale.getpreferredencoding(False))
utils/logging.py000064400000006377151733136510007726 0ustar00from __future__ import absolute_import

import contextlib
import logging
import logging.handlers
import os

try:
    import threading
except ImportError:
    import dummy_threading as threading

from pip.compat import WINDOWS
from pip.utils import ensure_dir

try:
    from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
    colorama = None


_log_state = threading.local()
_log_state.indentation = 0


@contextlib.contextmanager
def indent_log(num=2):
    """
    A context manager which will cause the log output to be indented for any
    log messages emitted inside it.
    """
    _log_state.indentation += num
    try:
        yield
    finally:
        _log_state.indentation -= num


def get_indentation():
    return getattr(_log_state, 'indentation', 0)


class IndentingFormatter(logging.Formatter):

    def format(self, record):
        """
        Calls the standard formatter, but will indent all of the log messages
        by our current indentation level.
        """
        formatted = logging.Formatter.format(self, record)
        formatted = "".join([
            (" " * get_indentation()) + line
            for line in formatted.splitlines(True)
        ])
        return formatted


def _color_wrap(*colors):
    def wrapped(inp):
        return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
    return wrapped


class ColorizedStreamHandler(logging.StreamHandler):

    # Don't build up a list of colors if we don't have colorama
    if colorama:
        COLORS = [
            # This needs to be in order from highest logging level to lowest.
            (logging.ERROR, _color_wrap(colorama.Fore.RED)),
            (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
        ]
    else:
        COLORS = []

    def __init__(self, stream=None):
        logging.StreamHandler.__init__(self, stream)

        if WINDOWS and colorama:
            self.stream = colorama.AnsiToWin32(self.stream)

    def should_color(self):
        # Don't colorize things if we do not have colorama
        if not colorama:
            return False

        real_stream = (
            self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
            else self.stream.wrapped
        )

        # If the stream is a tty we should color it
        if hasattr(real_stream, "isatty") and real_stream.isatty():
            return True

        # If we have an ASNI term we should color it
        if os.environ.get("TERM") == "ANSI":
            return True

        # If anything else we should not color it
        return False

    def format(self, record):
        msg = logging.StreamHandler.format(self, record)

        if self.should_color():
            for level, color in self.COLORS:
                if record.levelno >= level:
                    msg = color(msg)
                    break

        return msg


class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):

    def _open(self):
        ensure_dir(os.path.dirname(self.baseFilename))
        return logging.handlers.RotatingFileHandler._open(self)


class MaxLevelFilter(logging.Filter):

    def __init__(self, level):
        self.level = level

    def filter(self, record):
        return record.levelno < self.level
utils/setuptools_build.py000064400000000426151733136510011665 0ustar00# Shim to wrap setup.py invocation with setuptools
SETUPTOOLS_SHIM = (
    "import setuptools, tokenize;__file__=%r;"
    "f=getattr(tokenize, 'open', open)(__file__);"
    "code=f.read().replace('\\r\\n', '\\n');"
    "f.close();"
    "exec(compile(code, __file__, 'exec'))"
)
utils/glibc.py000064400000005573151733136510007355 0ustar00from __future__ import absolute_import

import re
import ctypes
import platform
import warnings


def glibc_version_string():
    "Returns glibc version string, or None if not using glibc."

    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
    # manpage says, "If filename is NULL, then the returned handle is for the
    # main program". This way we can let the linker do the work to figure out
    # which libc our process is actually using.
    process_namespace = ctypes.CDLL(None)
    try:
        gnu_get_libc_version = process_namespace.gnu_get_libc_version
    except AttributeError:
        # Symbol doesn't exist -> therefore, we are not linked to
        # glibc.
        return None

    # Call gnu_get_libc_version, which returns a string like "2.5"
    gnu_get_libc_version.restype = ctypes.c_char_p
    version_str = gnu_get_libc_version()
    # py2 / py3 compatibility:
    if not isinstance(version_str, str):
        version_str = version_str.decode("ascii")

    return version_str


# Separated out from have_compatible_glibc for easier unit testing
def check_glibc_version(version_str, required_major, minimum_minor):
    # Parse string and check against requested version.
    #
    # We use a regexp instead of str.split because we want to discard any
    # random junk that might come after the minor version -- this might happen
    # in patched/forked versions of glibc (e.g. Linaro's version of glibc
    # uses version strings like "2.20-2014.11"). See gh-3588.
    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
    if not m:
        warnings.warn("Expected glibc version with 2 components major.minor,"
                      " got: %s" % version_str, RuntimeWarning)
        return False
    return (int(m.group("major")) == required_major and
            int(m.group("minor")) >= minimum_minor)


def have_compatible_glibc(required_major, minimum_minor):
    version_str = glibc_version_string()
    if version_str is None:
        return False
    return check_glibc_version(version_str, required_major, minimum_minor)


# platform.libc_ver regularly returns completely nonsensical glibc
# versions. E.g. on my computer, platform says:
#
#   ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
#   ('glibc', '2.7')
#   ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
#   ('glibc', '2.9')
#
# But the truth is:
#
#   ~$ ldd --version
#   ldd (Debian GLIBC 2.22-11) 2.22
#
# This is unfortunate, because it means that the linehaul data on libc
# versions that was generated by pip 8.1.2 and earlier is useless and
# misleading. Solution: instead of using platform, use our code that actually
# works.
def libc_ver():
    glibc_version = glibc_version_string()
    if glibc_version is None:
        # For non-glibc platforms, fall back on platform.libc_ver
        return platform.libc_ver()
    else:
        return ("glibc", glibc_version)
utils/packaging.py000064400000004040151733136510010205 0ustar00from __future__ import absolute_import

from email.parser import FeedParser

import logging
import sys

from pip._vendor.packaging import specifiers
from pip._vendor.packaging import version
from pip._vendor import pkg_resources

from pip import exceptions

logger = logging.getLogger(__name__)


def check_requires_python(requires_python):
    """
    Check if the python version in use match the `requires_python` specifier.

    Returns `True` if the version of python in use matches the requirement.
    Returns `False` if the version of python in use does not matches the
    requirement.

    Raises an InvalidSpecifier if `requires_python` have an invalid format.
    """
    if requires_python is None:
        # The package provides no information
        return True
    requires_python_specifier = specifiers.SpecifierSet(requires_python)

    # We only use major.minor.micro
    python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
    return python_version in requires_python_specifier


def get_metadata(dist):
    if (isinstance(dist, pkg_resources.DistInfoDistribution) and
            dist.has_metadata('METADATA')):
        return dist.get_metadata('METADATA')
    elif dist.has_metadata('PKG-INFO'):
        return dist.get_metadata('PKG-INFO')


def check_dist_requires_python(dist):
    metadata = get_metadata(dist)
    feed_parser = FeedParser()
    feed_parser.feed(metadata)
    pkg_info_dict = feed_parser.close()
    requires_python = pkg_info_dict.get('Requires-Python')
    try:
        if not check_requires_python(requires_python):
            raise exceptions.UnsupportedPythonVersion(
                "%s requires Python '%s' but the running Python is %s" % (
                    dist.project_name,
                    requires_python,
                    '.'.join(map(str, sys.version_info[:3])),)
            )
    except specifiers.InvalidSpecifier as e:
        logger.warning(
            "Package %s has an invalid Requires-Python entry %s - %s" % (
                dist.project_name, requires_python, e))
        return
utils/deprecation.py000064400000004270151733136520010564 0ustar00"""
A module that implements tooling to enable easy warnings about deprecations.
"""
from __future__ import absolute_import

import logging
import warnings


class PipDeprecationWarning(Warning):
    pass


class Pending(object):
    pass


class RemovedInPip10Warning(PipDeprecationWarning):
    pass


class RemovedInPip11Warning(PipDeprecationWarning, Pending):
    pass


class Python26DeprecationWarning(PipDeprecationWarning):
    pass


# Warnings <-> Logging Integration


_warnings_showwarning = None


def _showwarning(message, category, filename, lineno, file=None, line=None):
    if file is not None:
        if _warnings_showwarning is not None:
            _warnings_showwarning(
                message, category, filename, lineno, file, line,
            )
    else:
        if issubclass(category, PipDeprecationWarning):
            # We use a specially named logger which will handle all of the
            # deprecation messages for pip.
            logger = logging.getLogger("pip.deprecations")

            # This is purposely using the % formatter here instead of letting
            # the logging module handle the interpolation. This is because we
            # want it to appear as if someone typed this entire message out.
            log_message = "DEPRECATION: %s" % message

            # PipDeprecationWarnings that are Pending still have at least 2
            # versions to go until they are removed so they can just be
            # warnings.  Otherwise, they will be removed in the very next
            # version of pip. We want these to be more obvious so we use the
            # ERROR logging level.
            if issubclass(category, Pending):
                logger.warning(log_message)
            else:
                logger.error(log_message)
        else:
            _warnings_showwarning(
                message, category, filename, lineno, file, line,
            )


def install_warning_logger():
    # Enable our Deprecation Warnings
    warnings.simplefilter("default", PipDeprecationWarning, append=True)

    global _warnings_showwarning

    if _warnings_showwarning is None:
        _warnings_showwarning = warnings.showwarning
        warnings.showwarning = _showwarning
utils/outdated.py000064400000013545151733136520010105 0ustar00from __future__ import absolute_import

import datetime
import json
import logging
import os.path
import sys

from pip._vendor import lockfile
from pip._vendor.packaging import version as packaging_version

from pip.compat import total_seconds, WINDOWS
from pip.models import PyPI
from pip.locations import USER_CACHE_DIR, running_under_virtualenv
from pip.utils import ensure_dir, get_installed_version
from pip.utils.filesystem import check_path_owner


SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"


logger = logging.getLogger(__name__)


class VirtualenvSelfCheckState(object):
    def __init__(self):
        self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json")

        # Load the existing state
        try:
            with open(self.statefile_path) as statefile:
                self.state = json.load(statefile)
        except (IOError, ValueError):
            self.state = {}

    def save(self, pypi_version, current_time):
        # Attempt to write out our version check file
        with open(self.statefile_path, "w") as statefile:
            json.dump(
                {
                    "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
                    "pypi_version": pypi_version,
                },
                statefile,
                sort_keys=True,
                separators=(",", ":")
            )


class GlobalSelfCheckState(object):
    def __init__(self):
        self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json")

        # Load the existing state
        try:
            with open(self.statefile_path) as statefile:
                self.state = json.load(statefile)[sys.prefix]
        except (IOError, ValueError, KeyError):
            self.state = {}

    def save(self, pypi_version, current_time):
        # Check to make sure that we own the directory
        if not check_path_owner(os.path.dirname(self.statefile_path)):
            return

        # Now that we've ensured the directory is owned by this user, we'll go
        # ahead and make sure that all our directories are created.
        ensure_dir(os.path.dirname(self.statefile_path))

        # Attempt to write out our version check file
        with lockfile.LockFile(self.statefile_path):
            if os.path.exists(self.statefile_path):
                with open(self.statefile_path) as statefile:
                    state = json.load(statefile)
            else:
                state = {}

            state[sys.prefix] = {
                "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
                "pypi_version": pypi_version,
            }

            with open(self.statefile_path, "w") as statefile:
                json.dump(state, statefile, sort_keys=True,
                          separators=(",", ":"))


def load_selfcheck_statefile():
    if running_under_virtualenv():
        return VirtualenvSelfCheckState()
    else:
        return GlobalSelfCheckState()


def pip_installed_by_pip():
    """Checks whether pip was installed by pip

    This is used not to display the upgrade message when pip is in fact
    installed by system package manager, such as dnf on Fedora.
    """
    import pkg_resources
    try:
        dist = pkg_resources.get_distribution('pip')
        return (dist.has_metadata('INSTALLER') and
                'pip' in dist.get_metadata_lines('INSTALLER'))
    except pkg_resources.DistributionNotFound:
        return False


def pip_version_check(session):
    """Check for an update for pip.

    Limit the frequency of checks to once per week. State is stored either in
    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
    of the pip script path.
    """
    installed_version = get_installed_version("pip")
    if installed_version is None:
        return

    pip_version = packaging_version.parse(installed_version)
    pypi_version = None

    try:
        state = load_selfcheck_statefile()

        current_time = datetime.datetime.utcnow()
        # Determine if we need to refresh the state
        if "last_check" in state.state and "pypi_version" in state.state:
            last_check = datetime.datetime.strptime(
                state.state["last_check"],
                SELFCHECK_DATE_FMT
            )
            if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60:
                pypi_version = state.state["pypi_version"]

        # Refresh the version if we need to or just see if we need to warn
        if pypi_version is None:
            resp = session.get(
                PyPI.pip_json_url,
                headers={"Accept": "application/json"},
            )
            resp.raise_for_status()
            pypi_version = [
                v for v in sorted(
                    list(resp.json()["releases"]),
                    key=packaging_version.parse,
                )
                if not packaging_version.parse(v).is_prerelease
            ][-1]

            # save that we've performed a check
            state.save(pypi_version, current_time)

        remote_version = packaging_version.parse(pypi_version)

        # Determine if our pypi_version is older
        if (pip_version < remote_version and
                pip_version.base_version != remote_version.base_version and
                pip_installed_by_pip()):
            # Advise "python -m pip" on Windows to avoid issues
            # with overwriting pip.exe.
            if WINDOWS:
                pip_cmd = "python -m pip"
            else:
                pip_cmd = "pip"
            logger.warning(
                "You are using pip version %s, however version %s is "
                "available.\nYou should consider upgrading via the "
                "'%s install --upgrade pip' command.",
                pip_version, pypi_version, pip_cmd
            )

    except Exception:
        logger.debug(
            "There was an error checking the latest version of pip",
            exc_info=True,
        )
utils/hashes.py000064400000005462151733136520007546 0ustar00from __future__ import absolute_import

import hashlib

from pip.exceptions import HashMismatch, HashMissing, InstallationError
from pip.utils import read_chunks
from pip._vendor.six import iteritems, iterkeys, itervalues


# The recommended hash algo of the moment. Change this whenever the state of
# the art changes; it won't hurt backward compatibility.
FAVORITE_HASH = 'sha256'


# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
# Currently, those are the ones at least as collision-resistant as sha256.
STRONG_HASHES = ['sha256', 'sha384', 'sha512']


class Hashes(object):
    """A wrapper that builds multiple hashes at once and checks them against
    known-good values

    """
    def __init__(self, hashes=None):
        """
        :param hashes: A dict of algorithm names pointing to lists of allowed
            hex digests
        """
        self._allowed = {} if hashes is None else hashes

    def check_against_chunks(self, chunks):
        """Check good hashes against ones built from iterable of chunks of
        data.

        Raise HashMismatch if none match.

        """
        gots = {}
        for hash_name in iterkeys(self._allowed):
            try:
                gots[hash_name] = hashlib.new(hash_name)
            except (ValueError, TypeError):
                raise InstallationError('Unknown hash name: %s' % hash_name)

        for chunk in chunks:
            for hash in itervalues(gots):
                hash.update(chunk)

        for hash_name, got in iteritems(gots):
            if got.hexdigest() in self._allowed[hash_name]:
                return
        self._raise(gots)

    def _raise(self, gots):
        raise HashMismatch(self._allowed, gots)

    def check_against_file(self, file):
        """Check good hashes against a file-like object

        Raise HashMismatch if none match.

        """
        return self.check_against_chunks(read_chunks(file))

    def check_against_path(self, path):
        with open(path, 'rb') as file:
            return self.check_against_file(file)

    def __nonzero__(self):
        """Return whether I know any known-good hashes."""
        return bool(self._allowed)

    def __bool__(self):
        return self.__nonzero__()


class MissingHashes(Hashes):
    """A workalike for Hashes used when we're missing a hash for a requirement

    It computes the actual hash of the requirement and raises a HashMissing
    exception showing it to the user.

    """
    def __init__(self):
        """Don't offer the ``hashes`` kwarg."""
        # Pass our favorite hash in to generate a "gotten hash". With the
        # empty list, it will never match, so an error will always raise.
        super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})

    def _raise(self, gots):
        raise HashMissing(gots[FAVORITE_HASH].hexdigest())
vcs/mercurial.py000064400000006620151733136520007706 0ustar00from __future__ import absolute_import

import logging
import os
import tempfile

from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
from pip._vendor.six.moves import configparser


logger = logging.getLogger(__name__)


class Mercurial(VersionControl):
    name = 'hg'
    dirname = '.hg'
    repo_name = 'clone'
    schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')

    def export(self, location):
        """Export the Hg repository at the url to the destination location"""
        temp_dir = tempfile.mkdtemp('-export', 'pip-')
        self.unpack(temp_dir)
        try:
            self.run_command(
                ['archive', location], show_stdout=False, cwd=temp_dir)
        finally:
            rmtree(temp_dir)

    def switch(self, dest, url, rev_options):
        repo_config = os.path.join(dest, self.dirname, 'hgrc')
        config = configparser.SafeConfigParser()
        try:
            config.read(repo_config)
            config.set('paths', 'default', url)
            with open(repo_config, 'w') as config_file:
                config.write(config_file)
        except (OSError, configparser.NoSectionError) as exc:
            logger.warning(
                'Could not switch Mercurial repository to %s: %s', url, exc,
            )
        else:
            self.run_command(['update', '-q'] + rev_options, cwd=dest)

    def update(self, dest, rev_options):
        self.run_command(['pull', '-q'], cwd=dest)
        self.run_command(['update', '-q'] + rev_options, cwd=dest)

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to revision %s)' % rev
        else:
            rev_options = []
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning hg %s%s to %s',
                url,
                rev_display,
                display_path(dest),
            )
            self.run_command(['clone', '--noupdate', '-q', url, dest])
            self.run_command(['update', '-q'] + rev_options, cwd=dest)

    def get_url(self, location):
        url = self.run_command(
            ['showconfig', 'paths.default'],
            show_stdout=False, cwd=location).strip()
        if self._is_local_repository(url):
            url = path_to_url(url)
        return url.strip()

    def get_revision(self, location):
        current_revision = self.run_command(
            ['parents', '--template={rev}'],
            show_stdout=False, cwd=location).strip()
        return current_revision

    def get_revision_hash(self, location):
        current_rev_hash = self.run_command(
            ['parents', '--template={node}'],
            show_stdout=False, cwd=location).strip()
        return current_rev_hash

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if not repo.lower().startswith('hg:'):
            repo = 'hg+' + repo
        egg_project_name = dist.egg_name().split('-', 1)[0]
        if not repo:
            return None
        current_rev_hash = self.get_revision_hash(location)
        return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)

    def check_version(self, dest, rev_options):
        """Always assume the versions don't match"""
        return False

vcs.register(Mercurial)
vcs/__pycache__/git.cpython-36.pyc000064400000021350151733136520012767 0ustar003

�Pf,-�@s�ddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZddl
m
ZddlmZddlmZmZddlmZmZejZejZeje�ZGd	d
�d
e�Zeje�dS)�)�absolute_importN)�samefile)�
BadCommand)�parse)�request)�display_path�rmtree)�vcs�VersionControlcs�eZdZdZdZdZd7Zd8�fd
d�	Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zd9dd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd:d+d,�Zd-d.�Zd/d0�Z�fd1d2�Zd3d4�Ze�fd5d6��Z�ZS);�Git�gitz.git�clone�git+http�	git+https�git+ssh�git+git�git+fileNcs�|r�t|�\}}}}}|jd�r�|dt|jd���}	|	tj|�jdd�jd�}
t|||
||f�}|jd�d}|d|�t||d�||
||f�}t	t
|�j|f|�|�dS)N�file�/�\�+�)�urlsplit�endswith�len�lstrip�urllib_requestZurl2pathname�replace�
urlunsplit�find�superr�__init__)�self�url�args�kwargs�schemeZnetloc�pathZqueryZfragment�initial_slashes�newpathZ
after_plus)�	__class__��/usr/lib/python3.6/git.pyr! s

zGit.__init__cCsTd}|jdgdd�}|j|�r0|t|�d�}nd}dj|jd�dd��}t|�S)Nzgit version �versionF)�show_stdout��.�)�run_command�
startswithr�join�split�
parse_version)r"ZVERSION_PFXr-r+r+r,�get_git_version5s
zGit.get_git_versioncCsVtjdd�}|j|�z0|jd�s*|d}|jdddd|gd|d	�Wd
t|�Xd
S)z@Export the Git repository at the url to the destination locationz-exportzpip-rzcheckout-indexz-az-fz--prefixF)r.�cwdN)�tempfileZmkdtemp�unpackrr2r)r"�locationZtemp_dirr+r+r,�exportBs

z
Git.exportcCsL|j||�}d|}||kr&||gS||kr8||gStjd|�|SdS)z�Check the revision options before checkout to compensate that tags
        and branches may need origin/ as a prefix.
        Returns the SHA1 of the branch or tag if found.
        z	origin/%sz5Could not find a tag or branch '%s', assuming commit.N)�get_short_refs�logger�warning)r"�rev�dest�rev_optionsZ	revisionsZ
origin_revr+r+r,�check_rev_optionsOs

zGit.check_rev_optionscCs|j|�j|d�S)a

        Compare the current sha to the ref. ref may be a branch or tag name,
        but current rev will always point to a sha. This means that a branch
        or tag will never compare as True. So this ultimately only matches
        against exact shas.
        r)�get_revisionr3)r"rArBr+r+r,�
check_versioncszGit.check_versioncCs8|jdd|g|d�|jddg||d�|j|�dS)N�configzremote.origin.url)r8�checkoutz-q)r2�update_submodules)r"rAr#rBr+r+r,�switchlsz
Git.switchcCst|j�td�kr&|jdddg|d�n|jddg|d�|rN|j|d||�}|jdddg||d�|j|�dS)	Nz1.9.0Zfetchz-qz--tags)r8r�resetz--hard)r7r6r2rCrH)r"rArBr+r+r,�updatersz
Git.updatecCs�|j�\}}|r |g}d|}n
dg}d}|j||||�r�tjd||t|��|jdd||g�|r�|j|||�}|j||�s�|jddg||d�|j|�dS)	Nz (to %s)z
origin/masterr/zCloning %s%s to %sr
z-qrG)r8)	�get_url_revZcheck_destinationr>�inforr2rCrErH)r"rAr#r@rBZrev_displayr+r+r,�obtain�s"

z
Git.obtaincCsZ|jdddgd|d�}|j�}|d}x|D]}|jd�r,|}Pq,W|jd�d	}|j�S)
z+Return URL of the first remote encountered.rFz--get-regexpzremote\..*\.urlF)r.r8rzremote.origin.url � r)r2�
splitlinesr3r5�strip)r"r;ZremotesZfound_remoteZremoter#r+r+r,�get_url�s


zGit.get_urlcCs|jddgd|d�}|j�S)Nz	rev-parseZHEADF)r.r8)r2rQ)r"r;�current_revr+r+r,rD�szGit.get_revisionr/ccs�|jd|gd|d�}xl|jd�D]^}|jd�}|s4q y|jdd�\}}Wn"tk
rjtd|����YnX|j�|j�fVq Wd	S)
z4Yields tuples of (commit, ref) for branches and tagszshow-refF)r.r8�
�
rOrzunexpected show-ref line: N)r2r5�rstrip�
ValueErrorrQ)r"r;�pattern�output�line�commit�refr+r+r,�
get_full_refs�s


zGit.get_full_refscCs
|jd�S)Nz
refs/remotes/)r3)r"r\r+r+r,�
is_ref_remote�szGit.is_ref_remotecCs
|jd�S)Nzrefs/heads/)r3)r"r\r+r+r,�
is_ref_branch�szGit.is_ref_branchcCs
|jd�S)Nz
refs/tags/)r3)r"r\r+r+r,�
is_ref_tag�szGit.is_ref_tagcCs"t|j|�|j|�|j|�f�S)z0A ref is a commit sha if it is not anything else)�anyr^r_r`)r"r\r+r+r,�
is_ref_commit�szGit.is_ref_commitcCs
|j|�S)N)r=)r"r;r+r+r,�get_refs�szGit.get_refscCs�i}x~|j||�D]n\}}d}|j|�r:|td�d�}n6|j|�rV|td�d�}n|j|�rp|td�d�}|dk	r|||<qW|S)z=Return map of named refs (branches or tags) to commit hashes.Nz
refs/remotes/zrefs/heads/z
refs/tags/)r]r^rr_r`)r"r;rX�rvr[r\Zref_namer+r+r,r=�s


zGit.get_short_refscCs�|jddgd|d�j�}tjj|�s2tjj||�}tjj|d�}|}xBtjjtjj|d��s�|}tjj|�}||krFtj	d|�dSqFWt
||�r�dStjj||�S)	z:Return the relative path of setup.py to the git repo root.z	rev-parsez	--git-dirF)r.r8z..zsetup.pyzGCould not find setup.py for directory %s (tried all parent directories)N)r2rQ�osr'�isabsr4�exists�dirnamer>r?r�relpath)r"r;Zgit_dirZroot_dirZ
orig_locationZ
last_locationr+r+r,�_get_subdirectory�s"

zGit._get_subdirectorycCsr|j|�}|j�jd�s d|}|j�jdd�d}|s<dS|j|�}d|||f}|j|�}|rn|d|7}|S)Nzgit:zgit+�-rrz%s@%s#egg=%sz&subdirectory=)rR�lowerr3Zegg_namer5rDrj)r"Zdistr;ZrepoZegg_project_namerSZreqZsubdirectoryr+r+r,�get_src_requirement�s


zGit.get_src_requirementcsbd|jkrHd|jkst�|jjdd�|_tt|�j�\}}|jdd�}ntt|�j�\}}||fS)a;
        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
        That's required because although they use SSH they sometimes doesn't
        work with a ssh:// scheme (e.g. Github). But we need a scheme for
        parsing. Hence we remove it again afterwards and return it as a stub.
        z://zfile:zgit+z
git+ssh://zssh://r/)r#�AssertionErrorrr rrL)r"r#r@)r*r+r,rLs
zGit.get_url_revcCs6tjjtjj|d��sdS|jdddddg|d�dS)Nz.gitmodulesZ	submodulerKz--initz--recursivez-q)r8)rer'rgr4r2)r"r;r+r+r,rHs
zGit.update_submodulescsVtt|�j|�rdSy|�jdg|ddd�}|Stk
rPtjd|�dSXdS)NTz	rev-parseF�ignore)r8r.Z
on_returncodezKcould not determine if %s is under git control because git is not available)r r�controls_locationr2rr>�debug)�clsr;�r)r*r+r,rp$s
zGit.controls_location)rrrrrr)N)r/)r/)�__name__�
__module__�__qualname__�namerhZ	repo_nameZschemesr!r7r<rCrErIrKrNrRrDr]r^r_r`rbrcr=rjrmrLrH�classmethodrp�
__classcell__r+r+)r*r,rs4

	
	
r)Z
__future__rZloggingr9Zos.pathreZ
pip.compatrZpip.exceptionsrZpip._vendor.six.moves.urllibrZurllib_parserrZpip._vendor.packaging.versionr6Z	pip.utilsrrZpip.vcsr	r
rrZ	getLoggerrtr>r�registerr+r+r+r,�<module>s"
vcs/__pycache__/subversion.cpython-36.opt-1.pyc000064400000015517151733136520015352 0ustar003

�Pf�$�@s�ddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZmZddl
mZddlmZmZejd�Zejd	�Zejd
�Zejd�Zejd�Zejd
�Zeje�ZGdd�de�Zdd�Zeje�dS)�)�absolute_importN)�parse)�Link)�rmtree�display_path)�
indent_log)�vcs�VersionControlz
url="([^"]+)"zcommitted-rev="(\d+)"z	URL: (.+)zRevision: (.+)z\s*revision="(\d+)"z<url>(.*)</url>cs�eZdZdZdZdZd"Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
�fdd�Zdd�Zdd�Zdd�Zdd�Zed d!��Z�ZS)#�
Subversion�svnz.svn�checkout�svn+ssh�svn+http�	svn+https�svn+svncCs�|jd|gdddid�}tj|�}|sFtjdt|��tjd|�dS|jd	�j�}t	j|�}|s�tjd
t|��tjd|�|dfS||jd	�fS)z/Returns (url, revision), where both are strings�infoFZLANG�C)�show_stdoutZ
extra_environz'Cannot determine URL of svn checkout %sz!Output that cannot be parsed: 
%sN�z,Cannot determine revision of svn checkout %s)NN)
�run_command�_svn_url_re�search�logger�warningr�debug�group�strip�_svn_revision_re)�self�location�output�match�url�r#� /usr/lib/python3.6/subversion.py�get_infos(



zSubversion.get_infocCst|j�\}}t||�}|j|�}tjd||�t��6tjj|�rJt	|�|j
dg|||gdd�WdQRXdS)z@Export the svn repository at the url to the destination locationz!Exporting svn repository %s to %s�exportF)rN)�get_url_rev�get_rev_options�remove_auth_from_urlrrr�os�path�existsrr)rrr"�rev�rev_optionsr#r#r$r&;s

zSubversion.exportcCs|jdg|||g�dS)N�switch)r)r�destr"r.r#r#r$r/JszSubversion.switchcCs|jdg||g�dS)N�update)r)rr0r.r#r#r$r1MszSubversion.updatecCst|j�\}}t||�}|j|�}|r.d|}nd}|j||||�rptjd||t|��|jddg|||g�dS)Nz (to revision %s)�zChecking out %s%s to %srz-q)r'r(r)Zcheck_destinationrrrr)rr0r"r-r.Zrev_displayr#r#r$�obtainPs



zSubversion.obtaincCsfx`|D]X}t|�j}|sqd|kr@dj|jd�dd��j�}n|}||jkr|jdd�dSqWdS)N�-r�#r���)r�egg_fragment�join�split�lower�key)r�distZdependency_linksr"r7r;r#r#r$�get_locationas


zSubversion.get_locationc
Cs�d}x�tj|�D]�\}}}|j|kr2g|dd�<q|j|j�tjj||jd�}tjj|�s^q|j|�\}}||kr~|d}	n |s�|j|	�r�g|dd�<qt	||�}qW|S)zR
        Return the maximum revision for all files under a given location
        rN�entries�/)
r*�walk�dirname�remover+r8r,�_get_svn_url_rev�
startswith�max)
rrZrevision�base�dirs�filesZ
entries_fnZdirurlZlocalrevZbase_urlr#r#r$�get_revisionos"

zSubversion.get_revisioncs,tt|�j�\}}|jd�r$d|}||fS)Nzssh://zsvn+)�superr
r'rD)rr"r-)�	__class__r#r$r'�s
zSubversion.get_url_revcCsV|}xBtjjtjj|d��sF|}tjj|�}||krtjd|�dSqW|j|�dS)Nzsetup.pyzGCould not find setup.py for directory %s (tried all parent directories)r)r*r+r,r8rArrrC)rrZ
orig_locationZ
last_locationr#r#r$�get_url�szSubversion.get_urlcCspddlm}tjj||jd�}tjj|�rHt|��}|j�}WdQRXnd}|j	d�sj|j	d�sj|j	d�r�t
ttj
|jd���}|dd=|dd	}d
d�|D�dg}n�|j	d�r�tj|�}|s�td
|��|jd�}dd�tj|�D�dg}n^y<|jdd|gdd�}	tj|	�jd�}dd�tj|	�D�}Wn |k
�rRdg}}YnX|�rdt|�}
nd}
||
fS)Nr)�InstallationErrorr>r2�8�9Z10z

�cSs,g|]$}t|�dkr|drt|d��qS)�	)�len�int)�.0�dr#r#r$�
<listcomp>�sz/Subversion._get_svn_url_rev.<locals>.<listcomp>z<?xmlzBadly formatted data: %rrcSsg|]}t|jd���qS)r)rSr)rT�mr#r#r$rV�srz--xmlF)rcSsg|]}t|jd���qS)r)rSr)rTrWr#r#r$rV�s)Zpip.exceptionsrMr*r+r8rAr,�open�readrD�list�map�str�
splitlinesr9�_svn_xml_url_rer�
ValueErrorr�_svn_rev_re�finditerr�_svn_info_xml_url_re�_svn_info_xml_rev_rerE)rrrMZentries_path�f�datar"Zrevsr!Zxmlr-r#r#r$rC�s>








zSubversion._get_svn_url_revcCsB|j|�}|dkrdS|j�jdd�d}|j|�}d|||fS)Nr4rrzsvn+%s@%s#egg=%s)rLZegg_namer9rI)rr<rZrepoZegg_project_namer-r#r#r$�get_src_requirement�s

zSubversion.get_src_requirementcCsdS)z&Always assume the versions don't matchFr#)rr0r.r#r#r$�
check_version�szSubversion.check_versioncCs>tj|�}|jjd�d}|j||j|j|jf}tj|�}|S)N�@rr6)	�urllib_parse�urlsplit�netlocr9�schemer+ZqueryZfragmentZ
urlunsplit)r"ZpurlZstripped_netlocZ
url_piecesZsurlr#r#r$r)�s


zSubversion.remove_auth_from_url)rr
rrr)�__name__�
__module__�__qualname__�namerAZ	repo_nameZschemesr%r&r/r1r3r=rIr'rLrCrfrg�staticmethodr)�
__classcell__r#r#)rKr$r
s",	r
cCs�|rd|g}ng}tj|�}t|d�r6|j|j}}nL|d}d|krz|jd�d}d|krn|jdd�\}}q�|d}}nd	\}}|r�|d|g7}|r�|d|g7}|S)
Nz-r�usernamerrhr�:z
--usernamez
--password)NN)rirj�hasattrrs�passwordr9)r"r-r.�rrsrvrkZauthr#r#r$r(�s$


r()Z
__future__rZloggingr*�reZpip._vendor.six.moves.urllibrriZ	pip.indexrZ	pip.utilsrrZpip.utils.loggingrZpip.vcsrr	�compiler^r`rrrcrbZ	getLoggerrmrr
r(�registerr#r#r#r$�<module>s&






Yvcs/__pycache__/git.cpython-36.opt-1.pyc000064400000021301151733136520013722 0ustar003

�Pf,-�@s�ddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZddl
m
ZddlmZddlmZmZddlmZmZejZejZeje�ZGd	d
�d
e�Zeje�dS)�)�absolute_importN)�samefile)�
BadCommand)�parse)�request)�display_path�rmtree)�vcs�VersionControlcs�eZdZdZdZdZd7Zd8�fd
d�	Zdd
�Zdd�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�Zdd�Zd9dd �Zd!d"�Zd#d$�Zd%d&�Zd'd(�Zd)d*�Zd:d+d,�Zd-d.�Zd/d0�Z�fd1d2�Zd3d4�Ze�fd5d6��Z�ZS);�Git�gitz.git�clone�git+http�	git+https�git+ssh�git+git�git+fileNcs�|r�t|�\}}}}}|jd�r�|dt|jd���}	|	tj|�jdd�jd�}
t|||
||f�}|jd�d}|d|�t||d�||
||f�}t	t
|�j|f|�|�dS)N�file�/�\�+�)�urlsplit�endswith�len�lstrip�urllib_requestZurl2pathname�replace�
urlunsplit�find�superr�__init__)�self�url�args�kwargs�schemeZnetloc�pathZqueryZfragment�initial_slashes�newpathZ
after_plus)�	__class__��/usr/lib/python3.6/git.pyr! s

zGit.__init__cCsTd}|jdgdd�}|j|�r0|t|�d�}nd}dj|jd�dd��}t|�S)Nzgit version �versionF)�show_stdout��.�)�run_command�
startswithr�join�split�
parse_version)r"ZVERSION_PFXr-r+r+r,�get_git_version5s
zGit.get_git_versioncCsVtjdd�}|j|�z0|jd�s*|d}|jdddd|gd|d	�Wd
t|�Xd
S)z@Export the Git repository at the url to the destination locationz-exportzpip-rzcheckout-indexz-az-fz--prefixF)r.�cwdN)�tempfileZmkdtemp�unpackrr2r)r"�locationZtemp_dirr+r+r,�exportBs

z
Git.exportcCsL|j||�}d|}||kr&||gS||kr8||gStjd|�|SdS)z�Check the revision options before checkout to compensate that tags
        and branches may need origin/ as a prefix.
        Returns the SHA1 of the branch or tag if found.
        z	origin/%sz5Could not find a tag or branch '%s', assuming commit.N)�get_short_refs�logger�warning)r"�rev�dest�rev_optionsZ	revisionsZ
origin_revr+r+r,�check_rev_optionsOs

zGit.check_rev_optionscCs|j|�j|d�S)a

        Compare the current sha to the ref. ref may be a branch or tag name,
        but current rev will always point to a sha. This means that a branch
        or tag will never compare as True. So this ultimately only matches
        against exact shas.
        r)�get_revisionr3)r"rArBr+r+r,�
check_versioncszGit.check_versioncCs8|jdd|g|d�|jddg||d�|j|�dS)N�configzremote.origin.url)r8�checkoutz-q)r2�update_submodules)r"rAr#rBr+r+r,�switchlsz
Git.switchcCst|j�td�kr&|jdddg|d�n|jddg|d�|rN|j|d||�}|jdddg||d�|j|�dS)	Nz1.9.0Zfetchz-qz--tags)r8r�resetz--hard)r7r6r2rCrH)r"rArBr+r+r,�updatersz
Git.updatecCs�|j�\}}|r |g}d|}n
dg}d}|j||||�r�tjd||t|��|jdd||g�|r�|j|||�}|j||�s�|jddg||d�|j|�dS)	Nz (to %s)z
origin/masterr/zCloning %s%s to %sr
z-qrG)r8)	�get_url_revZcheck_destinationr>�inforr2rCrErH)r"rAr#r@rBZrev_displayr+r+r,�obtain�s"

z
Git.obtaincCsZ|jdddgd|d�}|j�}|d}x|D]}|jd�r,|}Pq,W|jd�d	}|j�S)
z+Return URL of the first remote encountered.rFz--get-regexpzremote\..*\.urlF)r.r8rzremote.origin.url � r)r2�
splitlinesr3r5�strip)r"r;ZremotesZfound_remoteZremoter#r+r+r,�get_url�s


zGit.get_urlcCs|jddgd|d�}|j�S)Nz	rev-parseZHEADF)r.r8)r2rQ)r"r;�current_revr+r+r,rD�szGit.get_revisionr/ccs�|jd|gd|d�}xl|jd�D]^}|jd�}|s4q y|jdd�\}}Wn"tk
rjtd|����YnX|j�|j�fVq Wd	S)
z4Yields tuples of (commit, ref) for branches and tagszshow-refF)r.r8�
�
rOrzunexpected show-ref line: N)r2r5�rstrip�
ValueErrorrQ)r"r;�pattern�output�line�commit�refr+r+r,�
get_full_refs�s


zGit.get_full_refscCs
|jd�S)Nz
refs/remotes/)r3)r"r\r+r+r,�
is_ref_remote�szGit.is_ref_remotecCs
|jd�S)Nzrefs/heads/)r3)r"r\r+r+r,�
is_ref_branch�szGit.is_ref_branchcCs
|jd�S)Nz
refs/tags/)r3)r"r\r+r+r,�
is_ref_tag�szGit.is_ref_tagcCs"t|j|�|j|�|j|�f�S)z0A ref is a commit sha if it is not anything else)�anyr^r_r`)r"r\r+r+r,�
is_ref_commit�szGit.is_ref_commitcCs
|j|�S)N)r=)r"r;r+r+r,�get_refs�szGit.get_refscCs�i}x~|j||�D]n\}}d}|j|�r:|td�d�}n6|j|�rV|td�d�}n|j|�rp|td�d�}|dk	r|||<qW|S)z=Return map of named refs (branches or tags) to commit hashes.Nz
refs/remotes/zrefs/heads/z
refs/tags/)r]r^rr_r`)r"r;rX�rvr[r\Zref_namer+r+r,r=�s


zGit.get_short_refscCs�|jddgd|d�j�}tjj|�s2tjj||�}tjj|d�}|}xBtjjtjj|d��s�|}tjj|�}||krFtj	d|�dSqFWt
||�r�dStjj||�S)	z:Return the relative path of setup.py to the git repo root.z	rev-parsez	--git-dirF)r.r8z..zsetup.pyzGCould not find setup.py for directory %s (tried all parent directories)N)r2rQ�osr'�isabsr4�exists�dirnamer>r?r�relpath)r"r;Zgit_dirZroot_dirZ
orig_locationZ
last_locationr+r+r,�_get_subdirectory�s"

zGit._get_subdirectorycCsr|j|�}|j�jd�s d|}|j�jdd�d}|s<dS|j|�}d|||f}|j|�}|rn|d|7}|S)Nzgit:zgit+�-rrz%s@%s#egg=%sz&subdirectory=)rR�lowerr3Zegg_namer5rDrj)r"Zdistr;ZrepoZegg_project_namerSZreqZsubdirectoryr+r+r,�get_src_requirement�s


zGit.get_src_requirementcsTd|jkr:|jjdd�|_tt|�j�\}}|jdd�}ntt|�j�\}}||fS)a;
        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
        That's required because although they use SSH they sometimes doesn't
        work with a ssh:// scheme (e.g. Github). But we need a scheme for
        parsing. Hence we remove it again afterwards and return it as a stub.
        z://zgit+z
git+ssh://zssh://r/)r#rr rrL)r"r#r@)r*r+r,rLs
zGit.get_url_revcCs6tjjtjj|d��sdS|jdddddg|d�dS)Nz.gitmodulesZ	submodulerKz--initz--recursivez-q)r8)rer'rgr4r2)r"r;r+r+r,rHs
zGit.update_submodulescsVtt|�j|�rdSy|�jdg|ddd�}|Stk
rPtjd|�dSXdS)NTz	rev-parseF�ignore)r8r.Z
on_returncodezKcould not determine if %s is under git control because git is not available)r r�controls_locationr2rr>�debug)�clsr;�r)r*r+r,ro$s
zGit.controls_location)rrrrrr)N)r/)r/)�__name__�
__module__�__qualname__�namerhZ	repo_nameZschemesr!r7r<rCrErIrKrNrRrDr]r^r_r`rbrcr=rjrmrLrH�classmethodro�
__classcell__r+r+)r*r,rs4

	
	
r)Z
__future__rZloggingr9Zos.pathreZ
pip.compatrZpip.exceptionsrZpip._vendor.six.moves.urllibrZurllib_parserrZpip._vendor.packaging.versionr6Z	pip.utilsrrZpip.vcsr	r
rrZ	getLoggerrsr>r�registerr+r+r+r,�<module>s"
vcs/__pycache__/mercurial.cpython-36.opt-1.pyc000064400000006737151733136520015142 0ustar003

�Pf�
�@s�ddlmZddlZddlZddlZddlmZmZddlm	Z	m
Z
ddlmZddl
mZeje�ZGdd�de
�Ze	je�dS)	�)�absolute_importN)�display_path�rmtree)�vcs�VersionControl)�path_to_url)�configparserc@sdeZdZdZdZdZdZdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)�	Mercurial�hgz.hg�clone�hg+http�hg+https�hg+ssh�hg+static-httpcCs>tjdd�}|j|�z|jd|gd|d�Wdt|�XdS)z?Export the Hg repository at the url to the destination locationz-exportzpip-�archiveF)�show_stdout�cwdN)�tempfileZmkdtemp�unpack�run_commandr)�self�locationZtemp_dir�r�/usr/lib/python3.6/mercurial.py�exports
zMercurial.exportcCs�tjj||jd�}tj�}y<|j|�|jdd|�t|d��}|j	|�WdQRXWn6t
tjfk
r�}ztj
d||�WYdd}~XnX|jddg||d�dS)	NZhgrc�paths�default�wz/Could not switch Mercurial repository to %s: %s�updatez-q)r)�os�path�join�dirnamerZSafeConfigParser�read�set�open�write�OSErrorZNoSectionError�loggerZwarningr)r�dest�url�rev_optionsZrepo_config�configZconfig_file�excrrr�switch s
zMercurial.switchcCs,|jddg|d�|jddg||d�dS)NZpullz-q)rr)r)rr)r+rrrr/szMercurial.updatecCsz|j�\}}|r |g}d|}ng}d}|j||||�rvtjd||t|��|jddd||g�|jddg||d�dS)	Nz (to revision %s)�zCloning hg %s%s to %srz
--noupdatez-qr)r)Zget_url_revZcheck_destinationr(�inforr)rr)r*Zrevr+Zrev_displayrrr�obtain3s

zMercurial.obtaincCs2|jddgd|d�j�}|j|�r*t|�}|j�S)NZ
showconfigz
paths.defaultF)rr)r�stripZ_is_local_repositoryr)rrr*rrr�get_urlEs
zMercurial.get_urlcCs|jddgd|d�j�}|S)N�parentsz--template={rev}F)rr)rr2)rrZcurrent_revisionrrr�get_revisionMszMercurial.get_revisioncCs|jddgd|d�j�}|S)Nr4z--template={node}F)rr)rr2)rr�current_rev_hashrrr�get_revision_hashSszMercurial.get_revision_hashcCsT|j|�}|j�jd�s d|}|j�jdd�d}|s<dS|j|�}d|||fS)Nzhg:zhg+�-�rz%s@%s#egg=%s)r3�lower�
startswithZegg_name�splitr7)rZdistrZrepoZegg_project_namer6rrr�get_src_requirementYs

zMercurial.get_src_requirementcCsdS)z&Always assume the versions don't matchFr)rr)r+rrr�
check_versioncszMercurial.check_versionN)r
rr
rr)�__name__�
__module__�__qualname__�namer"Z	repo_nameZschemesrr.rr1r3r5r7r=r>rrrrr	s

r	)Z
__future__rZloggingrrZ	pip.utilsrrZpip.vcsrrZpip.downloadrZpip._vendor.six.movesrZ	getLoggerr?r(r	�registerrrrr�<module>s
Wvcs/__pycache__/bazaar.cpython-36.opt-1.pyc000064400000007066151733136520014413 0ustar003

�Pf��@s�ddlmZddlZddlZddlZyddlmZWnek
rPddl	ZYnXddl
mZmZddl
mZmZddlmZeje�ZGdd�de�Zeje�dS)	�)�absolute_importN)�parse)�rmtree�display_path)�vcs�VersionControl)�path_to_urlcszeZdZdZdZdZdZd �fdd�	Zd
d�Zdd�Z	dd�Z
dd�Z�fdd�Zdd�Z
dd�Zdd�Zdd�Z�ZS)!�Bazaar�bzrz.bzr�branch�bzr+http�	bzr+https�bzr+ssh�bzr+sftp�bzr+ftp�bzr+lpNcsDtt|�j|f|�|�ttdd�r@tjjdg�tjjdg�dS)N�
uses_fragmentZlp)�superr	�__init__�getattr�urllib_parser�extendZnon_hierarchical)�self�url�args�kwargs)�	__class__��/usr/lib/python3.6/bazaar.pyrszBazaar.__init__cCsRtjdd�}|j|�tjj|�r*t|�z|jd|g|dd�Wdt|�XdS)zU
        Export the Bazaar repository at the url to the destination location
        z-exportzpip-�exportF)�cwd�show_stdoutN)�tempfileZmkdtemp�unpack�os�path�existsr�run_command)r�locationZtemp_dirrrrr&s
z
Bazaar.exportcCs|jd|g|d�dS)N�switch)r )r')r�destr�rev_optionsrrrr)5sz
Bazaar.switchcCs|jddg||d�dS)NZpullz-q)r )r')rr*r+rrr�update8sz
Bazaar.updatecCsl|j�\}}|r"d|g}d|}ng}d}|j||||�rhtjd||t|��|jddg|||g�dS)Nz-rz (to revision %s)�zChecking out %s%s to %srz-q)�get_url_revZcheck_destination�logger�inforr')rr*r�revr+Zrev_displayrrr�obtain;s

z
Bazaar.obtaincs,tt|�j�\}}|jd�r$d|}||fS)Nzssh://zbzr+)rr	r.�
startswith)rrr1)rrrr.Ls
zBazaar.get_url_revcCsl|jdgd|d�}xT|j�D]H}|j�}x:dD]2}|j|�r.|j|�d}|j|�r\t|�S|Sq.WqWdS)Nr0F)r!r �checkout of branch: �parent branch: �)r4r5)r'�
splitlines�stripr3�splitZ_is_local_repositoryr)rr(Zurls�line�x�reporrr�get_urlSs

zBazaar.get_urlcCs|jdgd|d�}|j�dS)NZrevnoF)r!r r6���)r'r7)rr(Zrevisionrrr�get_revision`szBazaar.get_revisioncCsT|j|�}|sdS|j�jd�s(d|}|j�jdd�d}|j|�}d|||fS)Nzbzr:zbzr+�-r6rz%s@%s#egg=%s)r=�lowerr3Zegg_namer9r?)rZdistr(r<Zegg_project_nameZcurrent_revrrr�get_src_requirementes

zBazaar.get_src_requirementcCsdS)z&Always assume the versions don't matchFr)rr*r+rrr�
check_versionoszBazaar.check_version)r
rr
rrrr)N)�__name__�
__module__�__qualname__�name�dirnameZ	repo_nameZschemesrrr)r,r2r.r=r?rBrC�
__classcell__rr)rrr	s

r	)Z
__future__rZloggingr$r"Zurllibrr�ImportErrorZurlparseZ	pip.utilsrrZpip.vcsrrZpip.downloadrZ	getLoggerrDr/r	�registerrrrr�<module>s
_vcs/__pycache__/subversion.cpython-36.pyc000064400000015662151733136520014414 0ustar003

�Pf�$�@s�ddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZmZddl
mZddlmZmZejd�Zejd	�Zejd
�Zejd�Zejd�Zejd
�Zeje�ZGdd�de�Zdd�Zeje�dS)�)�absolute_importN)�parse)�Link)�rmtree�display_path)�
indent_log)�vcs�VersionControlz
url="([^"]+)"zcommitted-rev="(\d+)"z	URL: (.+)zRevision: (.+)z\s*revision="(\d+)"z<url>(.*)</url>cs�eZdZdZdZdZd"Zdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
�fdd�Zdd�Zdd�Zdd�Zdd�Zed d!��Z�ZS)#�
Subversion�svnz.svn�checkout�svn+ssh�svn+http�	svn+https�svn+svncCs�|jd�j|j�s td|��|jd|gdddid�}tj|�}|sftjdt	|��tj
d	|�d
S|jd�j�}t
j|�}|s�tjdt	|��tj
d	|�|d
fS||jd�fS)z/Returns (url, revision), where both are strings�/zBad directory: %s�infoFZLANG�C)�show_stdoutZ
extra_environz'Cannot determine URL of svn checkout %sz!Output that cannot be parsed: 
%sN�z,Cannot determine revision of svn checkout %s)NN)�rstrip�endswith�dirname�AssertionError�run_command�_svn_url_re�search�logger�warningr�debug�group�strip�_svn_revision_re)�self�location�output�match�url�r(� /usr/lib/python3.6/subversion.py�get_infos,




zSubversion.get_infocCst|j�\}}t||�}|j|�}tjd||�t��6tjj|�rJt	|�|j
dg|||gdd�WdQRXdS)z@Export the svn repository at the url to the destination locationz!Exporting svn repository %s to %s�exportF)rN)�get_url_rev�get_rev_options�remove_auth_from_urlrrr�os�path�existsrr)r#r$r'�rev�rev_optionsr(r(r)r+;s

zSubversion.exportcCs|jdg|||g�dS)N�switch)r)r#�destr'r3r(r(r)r4JszSubversion.switchcCs|jdg||g�dS)N�update)r)r#r5r3r(r(r)r6MszSubversion.updatecCst|j�\}}t||�}|j|�}|r.d|}nd}|j||||�rptjd||t|��|jddg|||g�dS)Nz (to revision %s)�zChecking out %s%s to %srz-q)r,r-r.Zcheck_destinationrrrr)r#r5r'r2r3Zrev_displayr(r(r)�obtainPs



zSubversion.obtaincCsfx`|D]X}t|�j}|sqd|kr@dj|jd�dd��j�}n|}||jkr|jdd�dSqWdS)N�-r�#r���)r�egg_fragment�join�split�lower�key)r#�distZdependency_linksr'r<r@r(r(r)�get_locationas


zSubversion.get_locationc
Cs�d}x�tj|�D]�\}}}|j|kr2g|dd�<q|j|j�tjj||jd�}tjj|�s^q|j|�\}}||kr~|d}	n |s�|j|	�r�g|dd�<qt	||�}qW|S)zR
        Return the maximum revision for all files under a given location
        rN�entriesr)
r/�walkr�remover0r=r1�_get_svn_url_rev�
startswith�max)
r#r$Zrevision�base�dirs�filesZ
entries_fnZdirurlZlocalrevZbase_urlr(r(r)�get_revisionos"

zSubversion.get_revisioncs,tt|�j�\}}|jd�r$d|}||fS)Nzssh://zsvn+)�superr
r,rG)r#r'r2)�	__class__r(r)r,�s
zSubversion.get_url_revcCsV|}xBtjjtjj|d��sF|}tjj|�}||krtjd|�dSqW|j|�dS)Nzsetup.pyzGCould not find setup.py for directory %s (tried all parent directories)r)r/r0r1r=rrrrF)r#r$Z
orig_locationZ
last_locationr(r(r)�get_url�szSubversion.get_urlcCspddlm}tjj||jd�}tjj|�rHt|��}|j�}WdQRXnd}|j	d�sj|j	d�sj|j	d�r�t
ttj
|jd���}|dd=|dd	}d
d�|D�dg}n�|j	d�r�tj|�}|s�td
|��|jd�}dd�tj|�D�dg}n^y<|jdd|gdd�}	tj|	�jd�}dd�tj|	�D�}Wn |k
�rRdg}}YnX|�rdt|�}
nd}
||
fS)Nr)�InstallationErrorrCr7�8�9Z10z

�cSs,g|]$}t|�dkr|drt|d��qS)�	)�len�int)�.0�dr(r(r)�
<listcomp>�sz/Subversion._get_svn_url_rev.<locals>.<listcomp>z<?xmlzBadly formatted data: %rrcSsg|]}t|jd���qS)r)rVr )rW�mr(r(r)rY�srz--xmlF)rcSsg|]}t|jd���qS)r)rVr )rWrZr(r(r)rY�s)Zpip.exceptionsrPr/r0r=rr1�open�readrG�list�map�str�
splitlinesr>�_svn_xml_url_rer�
ValueErrorr �_svn_rev_re�finditerr�_svn_info_xml_url_re�_svn_info_xml_rev_rerH)r#r$rPZentries_path�f�datar'Zrevsr&Zxmlr2r(r(r)rF�s>








zSubversion._get_svn_url_revcCsB|j|�}|dkrdS|j�jdd�d}|j|�}d|||fS)Nr9rrzsvn+%s@%s#egg=%s)rOZegg_namer>rL)r#rAr$ZrepoZegg_project_namer2r(r(r)�get_src_requirement�s

zSubversion.get_src_requirementcCsdS)z&Always assume the versions don't matchFr()r#r5r3r(r(r)�
check_version�szSubversion.check_versioncCs>tj|�}|jjd�d}|j||j|j|jf}tj|�}|S)N�@rr;)	�urllib_parse�urlsplit�netlocr>�schemer0ZqueryZfragmentZ
urlunsplit)r'ZpurlZstripped_netlocZ
url_piecesZsurlr(r(r)r.�s


zSubversion.remove_auth_from_url)rr
rrr)�__name__�
__module__�__qualname__�namerZ	repo_nameZschemesr*r+r4r6r8rBrLr,rOrFrirj�staticmethodr.�
__classcell__r(r()rNr)r
s",	r
cCs�|rd|g}ng}tj|�}t|d�r6|j|j}}nL|d}d|krz|jd�d}d|krn|jdd�\}}q�|d}}nd	\}}|r�|d|g7}|r�|d|g7}|S)
Nz-r�usernamerrkr�:z
--usernamez
--password)NN)rlrm�hasattrrv�passwordr>)r'r2r3�rrvryrnZauthr(r(r)r-�s$


r-)Z
__future__rZloggingr/�reZpip._vendor.six.moves.urllibrrlZ	pip.indexrZ	pip.utilsrrZpip.utils.loggingrZpip.vcsrr	�compilerarcrr"rfreZ	getLoggerrprr
r-�registerr(r(r(r)�<module>s&






Yvcs/__pycache__/__init__.cpython-36.pyc000064400000025515151733136520013752 0ustar003

�PfV0�@s�dZddlmZddlZddlZddlZddlZddlZddlm	Z
ddlmZddl
mZmZmZmZmZddgZeje�ZGd	d
�d
e�Ze�ZGdd�de�Zd
d�ZdS)z)Handles all VCS (version control) support�)�absolute_importN)�parse)�
BadCommand)�display_path�
backup_dir�call_subprocess�rmtree�ask_path_exists�vcs�get_src_requirementcs�eZdZiZddddddgZ�fdd�Zd	d
�Zedd��Zed
d��Z	edd��Z
dd�Zddd�Zdd�Z
dd�Zdd�Z�ZS)�
VcsSupportZsshZgitZhgZbzrZsftpZsvncs:tjj|j�ttdd�r(tjj|j�tt|�j�dS)N�
uses_fragment)	�urllib_parseZuses_netloc�extend�schemes�getattrr
�superr�__init__)�self)�	__class__��/usr/lib/python3.6/__init__.pyrszVcsSupport.__init__cCs
|jj�S)N)�	_registry�__iter__)rrrrr$szVcsSupport.__iter__cCst|jj��S)N)�listr�values)rrrr�backends'szVcsSupport.backendscCsdd�|jD�S)NcSsg|]
}|j�qSr)�dirname)�.0�backendrrr�
<listcomp>-sz'VcsSupport.dirnames.<locals>.<listcomp>)r)rrrr�dirnames+szVcsSupport.dirnamescCs$g}x|jD]}|j|j�qW|S)N)rrr)rrrrrr�all_schemes/szVcsSupport.all_schemescCsFt|d�stjd|j�dS|j|jkrB||j|j<tjd|j�dS)N�namezCannot register VCS %szRegistered VCS backend: %s)�hasattr�logger�warning�__name__r#r�debug)r�clsrrr�register6s
zVcsSupport.registerNcCs<||jkr|j|=n$||jj�kr.|j|j=n
tjd�dS)Nz0Cannot unregister because no class or name given)rrr#r%r&)rr)r#rrr�
unregister>s


zVcsSupport.unregistercCs8x2|jj�D]$}|j|�rtjd||j�|jSqWdS)z�
        Return the name of the version control backend if found at given
        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
        zDetermine that %s uses VCS: %sN)rr�controls_locationr%r(r#)r�location�vc_typerrr�get_backend_nameFs


zVcsSupport.get_backend_namecCs |j�}||jkr|j|SdS)N)�lowerr)rr#rrr�get_backendRs
zVcsSupport.get_backendcCs|j|�}|r|j|�SdS)N)r/r1)rr-r.rrr�get_backend_from_locationWs

z$VcsSupport.get_backend_from_location)NN)r'�
__module__�__qualname__rrrr�propertyrr!r"r*r+r/r1r2�
__classcell__rr)rrrs	
rcs�eZdZdZdZfZd+�fdd�	Zdd�Zdd�Zd	d
�Z	dd�Z
d
d�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd,d'd(�Zed)d*��Z�ZS)-�VersionControl�Ncs||_tt|�j||�dS)N)�urlrr7r)rr9�args�kwargs)rrrrgszVersionControl.__init__cCs"tjj|�\}}|jtjj�p |S)zy
           posix absolute paths start with os.path.sep,
           win32 ones start with drive (like c:\folder)
        )�os�path�
splitdrive�
startswith�sep)rZrepoZdrive�tailrrr�_is_local_repositoryksz#VersionControl._is_local_repositorycCs|jdd�S)N�/�_)�replace)rZsurnamerrr�translate_egg_surnameusz$VersionControl.translate_egg_surnamecCst�dS)z�
        Export the repository at the url to the destination location
        i.e. only download the files, without vcs informations
        N)�NotImplementedError)rr-rrr�exportyszVersionControl.exportc	Cszd}d|jkst||j��|jjdd�d}tj|�\}}}}}d}d|kr^|jdd�\}}tj||||df�}||fS)zm
        Returns the correct repository URL and revision by parsing the given
        repository URL
        zvSorry, '%s' is a malformed VCS url. The format is <vcs>+<protocol>://<url>, e.g. svn+http://myrepo/svn/MyApp#egg=MyApp�+�N�@r8)r9�AssertionError�splitrZurlsplit�rsplitZ
urlunsplit)	rZ
error_messager9�schemeZnetlocr=ZqueryZfragZrevrrr�get_url_rev�szVersionControl.get_url_revcCs4|jd�j|j�s td|��|j|�|j|�fS)zA
        Returns (url, revision), where both are strings
        rCzBad directory: %s)�rstrip�endswithrrL�get_url�get_revision)rr-rrr�get_info�s
zVersionControl.get_infocCstj|�jd�S)zi
        Normalize a URL for comparison by unquoting it and removing any
        trailing slash.
        rC)rZunquoterQ)rr9rrr�
normalize_url�szVersionControl.normalize_urlcCs|j|�|j|�kS)zV
        Compare two repo URLs for identity, ignoring incidental differences.
        )rV)rZurl1Zurl2rrr�compare_urls�szVersionControl.compare_urlscCst�dS)zx
        Called when installing or updating an editable package, takes the
        source path of the checkout.
        N)rG)r�destrrr�obtain�szVersionControl.obtaincCst�dS)zB
        Switch the repo at ``dest`` to point to ``URL``.
        N)rG)rrXr9�rev_optionsrrr�switch�szVersionControl.switchcCst�dS)zO
        Update an already-existing repo to the given ``rev_options``.
        N)rG)rrXrZrrr�update�szVersionControl.updatecCst�dS)zp
        Return True if the version is identical to what exists and
        doesn't need to be updated.
        N)rG)rrXrZrrr�
check_version�szVersionControl.check_versionc
Cs�d}d}tjj|�r�d}tjjtjj||j��r�|j|�}|j||�r�tjd|j	j
�t|�|�|j||�s�tj
dt|�|j	|�|j||�q�tj
d�q�tjd|j|j	t|�|�d}ntjd||j|j	�d}|�r�tjd|j|�td|d|d�}|dk�r2tj
d|j	t|�||�|j|||�n~|d	k�r>nr|d
k�rftjdt|��t|�d}nJ|dk�r�t|�}	tjdt|�|	�tj||	�d}n|dk�r�tjd�|S)z�
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        TFz)%s in %s exists, and has correct URL (%s)zUpdating %s %s%sz$Skipping because already up-to-date.z%s %s in %s exists with URL %s�%(s)witch, (i)gnore, (w)ipe, (b)ackup �s�i�w�bz0Directory %s already exists, and is not a %s %s.�(i)gnore, (w)ipe, (b)ackup z+The plan is to install the %s repository %szWhat to do?  %srrJzSwitching %s %s to %s%szDeleting %szBacking up %s to %s�a�r_r`rarb)r^re�r`rarb)rcrf���)r<r=�exists�joinrrSrWr%r(Z	repo_name�titlerr]�infor\r&r#r	r[rr�shutilZmove�sys�exit)
rrXr9rZZrev_displayZcheckout�promptZexisting_urlZresponseZdest_dirrrr�check_destination�s�







z VersionControl.check_destinationcCs"tjj|�rt|�|j|�dS)zq
        Clean up current location and download the url repository
        (and vcs infos) into location
        N)r<r=rhrrY)rr-rrr�unpackszVersionControl.unpackcCst�dS)z�
        Return a string representing the requirement needed to
        redownload the files currently present in location, something
        like:
          {repository_url}@{revision}#egg={project_name}-{version_identifier}
        N)rG)r�distr-rrrr sz"VersionControl.get_src_requirementcCst�dS)z_
        Return the url used at location
        Used in get_info or check_destination
        N)rG)rr-rrrrS)szVersionControl.get_urlcCst�dS)z_
        Return the current revision of the files at location
        Used in get_info
        N)rG)rr-rrrrT0szVersionControl.get_revisionT�raisec	Csf|jg|}yt|||||||�Stk
r`}z$|jtjkrNtd|j��n�WYdd}~XnXdS)z�
        Run a VCS subcommand
        This is simply a wrapper around call_subprocess that adds the VCS
        command name, and checks that the VCS is available
        zCannot find command %rN)r#r�OSError�errno�ENOENTr)	r�cmdZshow_stdout�cwdZ
on_returncodeZcommand_descZ
extra_environZspinner�errr�run_command7s	zVersionControl.run_commandcCs0tjd||j|j�tjj||j�}tjj|�S)z�
        Check if a location is controlled by the vcs.
        It is meant to be overridden to implement smarter detection
        mechanisms for specific vcs.
        zChecking in %s for %s (%s)...)r%r(rr#r<r=rirh)r)r-r=rrrr,Nsz VersionControl.controls_location)N)TNrsNNN)r'r3r4r#rrrrBrFrHrPrUrVrWrYr[r\r]rprqrrSrTrz�classmethodr,r6rr)rrr7as2
U		
r7cCsZtj|�}|rFy|�j||�Stk
rDtjd||j�|j�SXtjd|�|j�S)NzPcannot determine version of editable source in %s (%s command not found in path)ztcannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch))r
r2rrr%r&r#Zas_requirement)rrr-Zversion_controlrrrr[s

)�__doc__Z
__future__rruZloggingr<rlrmZpip._vendor.six.moves.urllibrrZpip.exceptionsrZ	pip.utilsrrrrr	�__all__Z	getLoggerr'r%�objectrr
r7rrrrr�<module>s 
G{vcs/__pycache__/bazaar.cpython-36.pyc000064400000007066151733136520013454 0ustar003

�Pf��@s�ddlmZddlZddlZddlZyddlmZWnek
rPddl	ZYnXddl
mZmZddl
mZmZddlmZeje�ZGdd�de�Zeje�dS)	�)�absolute_importN)�parse)�rmtree�display_path)�vcs�VersionControl)�path_to_urlcszeZdZdZdZdZdZd �fdd�	Zd
d�Zdd�Z	dd�Z
dd�Z�fdd�Zdd�Z
dd�Zdd�Zdd�Z�ZS)!�Bazaar�bzrz.bzr�branch�bzr+http�	bzr+https�bzr+ssh�bzr+sftp�bzr+ftp�bzr+lpNcsDtt|�j|f|�|�ttdd�r@tjjdg�tjjdg�dS)N�
uses_fragmentZlp)�superr	�__init__�getattr�urllib_parser�extendZnon_hierarchical)�self�url�args�kwargs)�	__class__��/usr/lib/python3.6/bazaar.pyrszBazaar.__init__cCsRtjdd�}|j|�tjj|�r*t|�z|jd|g|dd�Wdt|�XdS)zU
        Export the Bazaar repository at the url to the destination location
        z-exportzpip-�exportF)�cwd�show_stdoutN)�tempfileZmkdtemp�unpack�os�path�existsr�run_command)r�locationZtemp_dirrrrr&s
z
Bazaar.exportcCs|jd|g|d�dS)N�switch)r )r')r�destr�rev_optionsrrrr)5sz
Bazaar.switchcCs|jddg||d�dS)NZpullz-q)r )r')rr*r+rrr�update8sz
Bazaar.updatecCsl|j�\}}|r"d|g}d|}ng}d}|j||||�rhtjd||t|��|jddg|||g�dS)Nz-rz (to revision %s)�zChecking out %s%s to %srz-q)�get_url_revZcheck_destination�logger�inforr')rr*r�revr+Zrev_displayrrr�obtain;s

z
Bazaar.obtaincs,tt|�j�\}}|jd�r$d|}||fS)Nzssh://zbzr+)rr	r.�
startswith)rrr1)rrrr.Ls
zBazaar.get_url_revcCsl|jdgd|d�}xT|j�D]H}|j�}x:dD]2}|j|�r.|j|�d}|j|�r\t|�S|Sq.WqWdS)Nr0F)r!r �checkout of branch: �parent branch: �)r4r5)r'�
splitlines�stripr3�splitZ_is_local_repositoryr)rr(Zurls�line�x�reporrr�get_urlSs

zBazaar.get_urlcCs|jdgd|d�}|j�dS)NZrevnoF)r!r r6���)r'r7)rr(Zrevisionrrr�get_revision`szBazaar.get_revisioncCsT|j|�}|sdS|j�jd�s(d|}|j�jdd�d}|j|�}d|||fS)Nzbzr:zbzr+�-r6rz%s@%s#egg=%s)r=�lowerr3Zegg_namer9r?)rZdistr(r<Zegg_project_nameZcurrent_revrrr�get_src_requirementes

zBazaar.get_src_requirementcCsdS)z&Always assume the versions don't matchFr)rr*r+rrr�
check_versionoszBazaar.check_version)r
rr
rrrr)N)�__name__�
__module__�__qualname__�name�dirnameZ	repo_nameZschemesrrr)r,r2r.r=r?rBrC�
__classcell__rr)rrr	s

r	)Z
__future__rZloggingr$r"Zurllibrr�ImportErrorZurlparseZ	pip.utilsrrZpip.vcsrrZpip.downloadrZ	getLoggerrDr/r	�registerrrrr�<module>s
_vcs/__pycache__/__init__.cpython-36.opt-1.pyc000064400000025316151733136520014710 0ustar003

�PfV0�@s�dZddlmZddlZddlZddlZddlZddlZddlm	Z
ddlmZddl
mZmZmZmZmZddgZeje�ZGd	d
�d
e�Ze�ZGdd�de�Zd
d�ZdS)z)Handles all VCS (version control) support�)�absolute_importN)�parse)�
BadCommand)�display_path�
backup_dir�call_subprocess�rmtree�ask_path_exists�vcs�get_src_requirementcs�eZdZiZddddddgZ�fdd�Zd	d
�Zedd��Zed
d��Z	edd��Z
dd�Zddd�Zdd�Z
dd�Zdd�Z�ZS)�
VcsSupportZsshZgitZhgZbzrZsftpZsvncs:tjj|j�ttdd�r(tjj|j�tt|�j�dS)N�
uses_fragment)	�urllib_parseZuses_netloc�extend�schemes�getattrr
�superr�__init__)�self)�	__class__��/usr/lib/python3.6/__init__.pyrszVcsSupport.__init__cCs
|jj�S)N)�	_registry�__iter__)rrrrr$szVcsSupport.__iter__cCst|jj��S)N)�listr�values)rrrr�backends'szVcsSupport.backendscCsdd�|jD�S)NcSsg|]
}|j�qSr)�dirname)�.0�backendrrr�
<listcomp>-sz'VcsSupport.dirnames.<locals>.<listcomp>)r)rrrr�dirnames+szVcsSupport.dirnamescCs$g}x|jD]}|j|j�qW|S)N)rrr)rrrrrr�all_schemes/szVcsSupport.all_schemescCsFt|d�stjd|j�dS|j|jkrB||j|j<tjd|j�dS)N�namezCannot register VCS %szRegistered VCS backend: %s)�hasattr�logger�warning�__name__r#r�debug)r�clsrrr�register6s
zVcsSupport.registerNcCs<||jkr|j|=n$||jj�kr.|j|j=n
tjd�dS)Nz0Cannot unregister because no class or name given)rrr#r%r&)rr)r#rrr�
unregister>s


zVcsSupport.unregistercCs8x2|jj�D]$}|j|�rtjd||j�|jSqWdS)z�
        Return the name of the version control backend if found at given
        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
        zDetermine that %s uses VCS: %sN)rr�controls_locationr%r(r#)r�location�vc_typerrr�get_backend_nameFs


zVcsSupport.get_backend_namecCs |j�}||jkr|j|SdS)N)�lowerr)rr#rrr�get_backendRs
zVcsSupport.get_backendcCs|j|�}|r|j|�SdS)N)r/r1)rr-r.rrr�get_backend_from_locationWs

z$VcsSupport.get_backend_from_location)NN)r'�
__module__�__qualname__rrrr�propertyrr!r"r*r+r/r1r2�
__classcell__rr)rrrs	
rcs�eZdZdZdZfZd+�fdd�	Zdd�Zdd�Zd	d
�Z	dd�Z
d
d�Zdd�Zdd�Z
dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zdd �Zd!d"�Zd#d$�Zd,d'd(�Zed)d*��Z�ZS)-�VersionControl�Ncs||_tt|�j||�dS)N)�urlrr7r)rr9�args�kwargs)rrrrgszVersionControl.__init__cCs"tjj|�\}}|jtjj�p |S)zy
           posix absolute paths start with os.path.sep,
           win32 ones start with drive (like c:\folder)
        )�os�path�
splitdrive�
startswith�sep)rZrepoZdrive�tailrrr�_is_local_repositoryksz#VersionControl._is_local_repositorycCs|jdd�S)N�/�_)�replace)rZsurnamerrr�translate_egg_surnameusz$VersionControl.translate_egg_surnamecCst�dS)z�
        Export the repository at the url to the destination location
        i.e. only download the files, without vcs informations
        N)�NotImplementedError)rr-rrr�exportyszVersionControl.exportc	Csbd}|jjdd�d}tj|�\}}}}}d}d|krF|jdd�\}}tj||||df�}||fS)zm
        Returns the correct repository URL and revision by parsing the given
        repository URL
        zvSorry, '%s' is a malformed VCS url. The format is <vcs>+<protocol>://<url>, e.g. svn+http://myrepo/svn/MyApp#egg=MyApp�+�N�@r8)r9�splitrZurlsplit�rsplitZ
urlunsplit)	rZ
error_messager9�schemeZnetlocr=ZqueryZfragZrevrrr�get_url_rev�szVersionControl.get_url_revcCs|j|�|j|�fS)zA
        Returns (url, revision), where both are strings
        )�get_url�get_revision)rr-rrr�get_info�szVersionControl.get_infocCstj|�jd�S)zi
        Normalize a URL for comparison by unquoting it and removing any
        trailing slash.
        rC)rZunquote�rstrip)rr9rrr�
normalize_url�szVersionControl.normalize_urlcCs|j|�|j|�kS)zV
        Compare two repo URLs for identity, ignoring incidental differences.
        )rT)rZurl1Zurl2rrr�compare_urls�szVersionControl.compare_urlscCst�dS)zx
        Called when installing or updating an editable package, takes the
        source path of the checkout.
        N)rG)r�destrrr�obtain�szVersionControl.obtaincCst�dS)zB
        Switch the repo at ``dest`` to point to ``URL``.
        N)rG)rrVr9�rev_optionsrrr�switch�szVersionControl.switchcCst�dS)zO
        Update an already-existing repo to the given ``rev_options``.
        N)rG)rrVrXrrr�update�szVersionControl.updatecCst�dS)zp
        Return True if the version is identical to what exists and
        doesn't need to be updated.
        N)rG)rrVrXrrr�
check_version�szVersionControl.check_versionc
Cs�d}d}tjj|�r�d}tjjtjj||j��r�|j|�}|j||�r�tjd|j	j
�t|�|�|j||�s�tj
dt|�|j	|�|j||�q�tj
d�q�tjd|j|j	t|�|�d}ntjd||j|j	�d}|�r�tjd|j|�td|d|d�}|dk�r2tj
d|j	t|�||�|j|||�n~|d	k�r>nr|d
k�rftjdt|��t|�d}nJ|dk�r�t|�}	tjdt|�|	�tj||	�d}n|dk�r�tjd�|S)z�
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        TFz)%s in %s exists, and has correct URL (%s)zUpdating %s %s%sz$Skipping because already up-to-date.z%s %s in %s exists with URL %s�%(s)witch, (i)gnore, (w)ipe, (b)ackup �s�i�w�bz0Directory %s already exists, and is not a %s %s.�(i)gnore, (w)ipe, (b)ackup z+The plan is to install the %s repository %szWhat to do?  %srrJzSwitching %s %s to %s%szDeleting %szBacking up %s to %s�a�r]r^r_r`)r\rc�r^r_r`)rard���)r<r=�exists�joinrrPrUr%r(Z	repo_name�titlerr[�inforZr&r#r	rYrr�shutilZmove�sys�exit)
rrVr9rXZrev_displayZcheckout�promptZexisting_urlZresponseZdest_dirrrr�check_destination�s�







z VersionControl.check_destinationcCs"tjj|�rt|�|j|�dS)zq
        Clean up current location and download the url repository
        (and vcs infos) into location
        N)r<r=rfrrW)rr-rrr�unpackszVersionControl.unpackcCst�dS)z�
        Return a string representing the requirement needed to
        redownload the files currently present in location, something
        like:
          {repository_url}@{revision}#egg={project_name}-{version_identifier}
        N)rG)r�distr-rrrr sz"VersionControl.get_src_requirementcCst�dS)z_
        Return the url used at location
        Used in get_info or check_destination
        N)rG)rr-rrrrP)szVersionControl.get_urlcCst�dS)z_
        Return the current revision of the files at location
        Used in get_info
        N)rG)rr-rrrrQ0szVersionControl.get_revisionT�raisec	Csf|jg|}yt|||||||�Stk
r`}z$|jtjkrNtd|j��n�WYdd}~XnXdS)z�
        Run a VCS subcommand
        This is simply a wrapper around call_subprocess that adds the VCS
        command name, and checks that the VCS is available
        zCannot find command %rN)r#r�OSError�errno�ENOENTr)	r�cmdZshow_stdout�cwdZ
on_returncodeZcommand_descZ
extra_environZspinner�errr�run_command7s	zVersionControl.run_commandcCs0tjd||j|j�tjj||j�}tjj|�S)z�
        Check if a location is controlled by the vcs.
        It is meant to be overridden to implement smarter detection
        mechanisms for specific vcs.
        zChecking in %s for %s (%s)...)r%r(rr#r<r=rgrf)r)r-r=rrrr,Nsz VersionControl.controls_location)N)TNrqNNN)r'r3r4r#rrrrBrFrHrOrRrTrUrWrYrZr[rnrorrPrQrx�classmethodr,r6rr)rrr7as2
U		
r7cCsZtj|�}|rFy|�j||�Stk
rDtjd||j�|j�SXtjd|�|j�S)NzPcannot determine version of editable source in %s (%s command not found in path)ztcannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch))r
r2rrr%r&r#Zas_requirement)rpr-Zversion_controlrrrr[s

)�__doc__Z
__future__rrsZloggingr<rjrkZpip._vendor.six.moves.urllibrrZpip.exceptionsrZ	pip.utilsrrrrr	�__all__Z	getLoggerr'r%�objectrr
r7rrrrr�<module>s 
G{vcs/__pycache__/mercurial.cpython-36.pyc000064400000006737151733136520014203 0ustar003

�Pf�
�@s�ddlmZddlZddlZddlZddlmZmZddlm	Z	m
Z
ddlmZddl
mZeje�ZGdd�de
�Ze	je�dS)	�)�absolute_importN)�display_path�rmtree)�vcs�VersionControl)�path_to_url)�configparserc@sdeZdZdZdZdZdZdd	�Zd
d�Zdd
�Z	dd�Z
dd�Zdd�Zdd�Z
dd�Zdd�ZdS)�	Mercurial�hgz.hg�clone�hg+http�hg+https�hg+ssh�hg+static-httpcCs>tjdd�}|j|�z|jd|gd|d�Wdt|�XdS)z?Export the Hg repository at the url to the destination locationz-exportzpip-�archiveF)�show_stdout�cwdN)�tempfileZmkdtemp�unpack�run_commandr)�self�locationZtemp_dir�r�/usr/lib/python3.6/mercurial.py�exports
zMercurial.exportcCs�tjj||jd�}tj�}y<|j|�|jdd|�t|d��}|j	|�WdQRXWn6t
tjfk
r�}ztj
d||�WYdd}~XnX|jddg||d�dS)	NZhgrc�paths�default�wz/Could not switch Mercurial repository to %s: %s�updatez-q)r)�os�path�join�dirnamerZSafeConfigParser�read�set�open�write�OSErrorZNoSectionError�loggerZwarningr)r�dest�url�rev_optionsZrepo_config�configZconfig_file�excrrr�switch s
zMercurial.switchcCs,|jddg|d�|jddg||d�dS)NZpullz-q)rr)r)rr)r+rrrr/szMercurial.updatecCsz|j�\}}|r |g}d|}ng}d}|j||||�rvtjd||t|��|jddd||g�|jddg||d�dS)	Nz (to revision %s)�zCloning hg %s%s to %srz
--noupdatez-qr)r)Zget_url_revZcheck_destinationr(�inforr)rr)r*Zrevr+Zrev_displayrrr�obtain3s

zMercurial.obtaincCs2|jddgd|d�j�}|j|�r*t|�}|j�S)NZ
showconfigz
paths.defaultF)rr)r�stripZ_is_local_repositoryr)rrr*rrr�get_urlEs
zMercurial.get_urlcCs|jddgd|d�j�}|S)N�parentsz--template={rev}F)rr)rr2)rrZcurrent_revisionrrr�get_revisionMszMercurial.get_revisioncCs|jddgd|d�j�}|S)Nr4z--template={node}F)rr)rr2)rr�current_rev_hashrrr�get_revision_hashSszMercurial.get_revision_hashcCsT|j|�}|j�jd�s d|}|j�jdd�d}|s<dS|j|�}d|||fS)Nzhg:zhg+�-�rz%s@%s#egg=%s)r3�lower�
startswithZegg_name�splitr7)rZdistrZrepoZegg_project_namer6rrr�get_src_requirementYs

zMercurial.get_src_requirementcCsdS)z&Always assume the versions don't matchFr)rr)r+rrr�
check_versioncszMercurial.check_versionN)r
rr
rr)�__name__�
__module__�__qualname__�namer"Z	repo_nameZschemesrr.rr1r3r5r7r=r>rrrrr	s

r	)Z
__future__rZloggingrrZ	pip.utilsrrZpip.vcsrrZpip.downloadrZpip._vendor.six.movesrZ	getLoggerr?r(r	�registerrrrr�<module>s
Wvcs/__init__.py000064400000030126151733136520007460 0ustar00"""Handles all VCS (version control) support"""
from __future__ import absolute_import

import errno
import logging
import os
import shutil
import sys

from pip._vendor.six.moves.urllib import parse as urllib_parse

from pip.exceptions import BadCommand
from pip.utils import (display_path, backup_dir, call_subprocess,
                       rmtree, ask_path_exists)


__all__ = ['vcs', 'get_src_requirement']


logger = logging.getLogger(__name__)


class VcsSupport(object):
    _registry = {}
    schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']

    def __init__(self):
        # Register more schemes with urlparse for various version control
        # systems
        urllib_parse.uses_netloc.extend(self.schemes)
        # Python >= 2.7.4, 3.3 doesn't have uses_fragment
        if getattr(urllib_parse, 'uses_fragment', None):
            urllib_parse.uses_fragment.extend(self.schemes)
        super(VcsSupport, self).__init__()

    def __iter__(self):
        return self._registry.__iter__()

    @property
    def backends(self):
        return list(self._registry.values())

    @property
    def dirnames(self):
        return [backend.dirname for backend in self.backends]

    @property
    def all_schemes(self):
        schemes = []
        for backend in self.backends:
            schemes.extend(backend.schemes)
        return schemes

    def register(self, cls):
        if not hasattr(cls, 'name'):
            logger.warning('Cannot register VCS %s', cls.__name__)
            return
        if cls.name not in self._registry:
            self._registry[cls.name] = cls
            logger.debug('Registered VCS backend: %s', cls.name)

    def unregister(self, cls=None, name=None):
        if name in self._registry:
            del self._registry[name]
        elif cls in self._registry.values():
            del self._registry[cls.name]
        else:
            logger.warning('Cannot unregister because no class or name given')

    def get_backend_name(self, location):
        """
        Return the name of the version control backend if found at given
        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
        """
        for vc_type in self._registry.values():
            if vc_type.controls_location(location):
                logger.debug('Determine that %s uses VCS: %s',
                             location, vc_type.name)
                return vc_type.name
        return None

    def get_backend(self, name):
        name = name.lower()
        if name in self._registry:
            return self._registry[name]

    def get_backend_from_location(self, location):
        vc_type = self.get_backend_name(location)
        if vc_type:
            return self.get_backend(vc_type)
        return None


vcs = VcsSupport()


class VersionControl(object):
    name = ''
    dirname = ''
    # List of supported schemes for this Version Control
    schemes = ()

    def __init__(self, url=None, *args, **kwargs):
        self.url = url
        super(VersionControl, self).__init__(*args, **kwargs)

    def _is_local_repository(self, repo):
        """
           posix absolute paths start with os.path.sep,
           win32 ones start with drive (like c:\\folder)
        """
        drive, tail = os.path.splitdrive(repo)
        return repo.startswith(os.path.sep) or drive

    # See issue #1083 for why this method was introduced:
    # https://github.com/pypa/pip/issues/1083
    def translate_egg_surname(self, surname):
        # For example, Django has branches of the form "stable/1.7.x".
        return surname.replace('/', '_')

    def export(self, location):
        """
        Export the repository at the url to the destination location
        i.e. only download the files, without vcs informations
        """
        raise NotImplementedError

    def get_url_rev(self):
        """
        Returns the correct repository URL and revision by parsing the given
        repository URL
        """
        error_message = (
            "Sorry, '%s' is a malformed VCS url. "
            "The format is <vcs>+<protocol>://<url>, "
            "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
        )
        assert '+' in self.url, error_message % self.url
        url = self.url.split('+', 1)[1]
        scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
        rev = None
        if '@' in path:
            path, rev = path.rsplit('@', 1)
        url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
        return url, rev

    def get_info(self, location):
        """
        Returns (url, revision), where both are strings
        """
        assert not location.rstrip('/').endswith(self.dirname), \
            'Bad directory: %s' % location
        return self.get_url(location), self.get_revision(location)

    def normalize_url(self, url):
        """
        Normalize a URL for comparison by unquoting it and removing any
        trailing slash.
        """
        return urllib_parse.unquote(url).rstrip('/')

    def compare_urls(self, url1, url2):
        """
        Compare two repo URLs for identity, ignoring incidental differences.
        """
        return (self.normalize_url(url1) == self.normalize_url(url2))

    def obtain(self, dest):
        """
        Called when installing or updating an editable package, takes the
        source path of the checkout.
        """
        raise NotImplementedError

    def switch(self, dest, url, rev_options):
        """
        Switch the repo at ``dest`` to point to ``URL``.
        """
        raise NotImplementedError

    def update(self, dest, rev_options):
        """
        Update an already-existing repo to the given ``rev_options``.
        """
        raise NotImplementedError

    def check_version(self, dest, rev_options):
        """
        Return True if the version is identical to what exists and
        doesn't need to be updated.
        """
        raise NotImplementedError

    def check_destination(self, dest, url, rev_options, rev_display):
        """
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        """
        checkout = True
        prompt = False
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.debug(
                        '%s in %s exists, and has correct URL (%s)',
                        self.repo_name.title(),
                        display_path(dest),
                        url,
                    )
                    if not self.check_version(dest, rev_options):
                        logger.info(
                            'Updating %s %s%s',
                            display_path(dest),
                            self.repo_name,
                            rev_display,
                        )
                        self.update(dest, rev_options)
                    else:
                        logger.info(
                            'Skipping because already up-to-date.')
                else:
                    logger.warning(
                        '%s %s in %s exists with URL %s',
                        self.name,
                        self.repo_name,
                        display_path(dest),
                        existing_url,
                    )
                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
                              ('s', 'i', 'w', 'b'))
            else:
                logger.warning(
                    'Directory %s already exists, and is not a %s %s.',
                    dest,
                    self.name,
                    self.repo_name,
                )
                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
        if prompt:
            logger.warning(
                'The plan is to install the %s repository %s',
                self.name,
                url,
            )
            response = ask_path_exists('What to do?  %s' % prompt[0],
                                       prompt[1])

            if response == 's':
                logger.info(
                    'Switching %s %s to %s%s',
                    self.repo_name,
                    display_path(dest),
                    url,
                    rev_display,
                )
                self.switch(dest, url, rev_options)
            elif response == 'i':
                # do nothing
                pass
            elif response == 'w':
                logger.warning('Deleting %s', display_path(dest))
                rmtree(dest)
                checkout = True
            elif response == 'b':
                dest_dir = backup_dir(dest)
                logger.warning(
                    'Backing up %s to %s', display_path(dest), dest_dir,
                )
                shutil.move(dest, dest_dir)
                checkout = True
            elif response == 'a':
                sys.exit(-1)
        return checkout

    def unpack(self, location):
        """
        Clean up current location and download the url repository
        (and vcs infos) into location
        """
        if os.path.exists(location):
            rmtree(location)
        self.obtain(location)

    def get_src_requirement(self, dist, location):
        """
        Return a string representing the requirement needed to
        redownload the files currently present in location, something
        like:
          {repository_url}@{revision}#egg={project_name}-{version_identifier}
        """
        raise NotImplementedError

    def get_url(self, location):
        """
        Return the url used at location
        Used in get_info or check_destination
        """
        raise NotImplementedError

    def get_revision(self, location):
        """
        Return the current revision of the files at location
        Used in get_info
        """
        raise NotImplementedError

    def run_command(self, cmd, show_stdout=True, cwd=None,
                    on_returncode='raise',
                    command_desc=None,
                    extra_environ=None, spinner=None):
        """
        Run a VCS subcommand
        This is simply a wrapper around call_subprocess that adds the VCS
        command name, and checks that the VCS is available
        """
        cmd = [self.name] + cmd
        try:
            return call_subprocess(cmd, show_stdout, cwd,
                                   on_returncode,
                                   command_desc, extra_environ,
                                   spinner)
        except OSError as e:
            # errno.ENOENT = no such file or directory
            # In other words, the VCS executable isn't available
            if e.errno == errno.ENOENT:
                raise BadCommand('Cannot find command %r' % self.name)
            else:
                raise  # re-raise exception if a different error occurred

    @classmethod
    def controls_location(cls, location):
        """
        Check if a location is controlled by the vcs.
        It is meant to be overridden to implement smarter detection
        mechanisms for specific vcs.
        """
        logger.debug('Checking in %s for %s (%s)...',
                     location, cls.dirname, cls.name)
        path = os.path.join(location, cls.dirname)
        return os.path.exists(path)


def get_src_requirement(dist, location):
    version_control = vcs.get_backend_from_location(location)
    if version_control:
        try:
            return version_control().get_src_requirement(dist,
                                                         location)
        except BadCommand:
            logger.warning(
                'cannot determine version of editable source in %s '
                '(%s command not found in path)',
                location,
                version_control.name,
            )
            return dist.as_requirement()
    logger.warning(
        'cannot determine version of editable source in %s (is not SVN '
        'checkout, Git clone, Mercurial clone or Bazaar branch)',
        location,
    )
    return dist.as_requirement()
vcs/bazaar.py000064400000007333151733136520007165 0ustar00from __future__ import absolute_import

import logging
import os
import tempfile

# TODO: Get this into six.moves.urllib.parse
try:
    from urllib import parse as urllib_parse
except ImportError:
    import urlparse as urllib_parse

from pip.utils import rmtree, display_path
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url


logger = logging.getLogger(__name__)


class Bazaar(VersionControl):
    name = 'bzr'
    dirname = '.bzr'
    repo_name = 'branch'
    schemes = (
        'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
        'bzr+lp',
    )

    def __init__(self, url=None, *args, **kwargs):
        super(Bazaar, self).__init__(url, *args, **kwargs)
        # Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
        # Register lp but do not expose as a scheme to support bzr+lp.
        if getattr(urllib_parse, 'uses_fragment', None):
            urllib_parse.uses_fragment.extend(['lp'])
            urllib_parse.non_hierarchical.extend(['lp'])

    def export(self, location):
        """
        Export the Bazaar repository at the url to the destination location
        """
        temp_dir = tempfile.mkdtemp('-export', 'pip-')
        self.unpack(temp_dir)
        if os.path.exists(location):
            # Remove the location to make sure Bazaar can export it correctly
            rmtree(location)
        try:
            self.run_command(['export', location], cwd=temp_dir,
                             show_stdout=False)
        finally:
            rmtree(temp_dir)

    def switch(self, dest, url, rev_options):
        self.run_command(['switch', url], cwd=dest)

    def update(self, dest, rev_options):
        self.run_command(['pull', '-q'] + rev_options, cwd=dest)

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = ['-r', rev]
            rev_display = ' (to revision %s)' % rev
        else:
            rev_options = []
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Checking out %s%s to %s',
                url,
                rev_display,
                display_path(dest),
            )
            self.run_command(['branch', '-q'] + rev_options + [url, dest])

    def get_url_rev(self):
        # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
        url, rev = super(Bazaar, self).get_url_rev()
        if url.startswith('ssh://'):
            url = 'bzr+' + url
        return url, rev

    def get_url(self, location):
        urls = self.run_command(['info'], show_stdout=False, cwd=location)
        for line in urls.splitlines():
            line = line.strip()
            for x in ('checkout of branch: ',
                      'parent branch: '):
                if line.startswith(x):
                    repo = line.split(x)[1]
                    if self._is_local_repository(repo):
                        return path_to_url(repo)
                    return repo
        return None

    def get_revision(self, location):
        revision = self.run_command(
            ['revno'], show_stdout=False, cwd=location)
        return revision.splitlines()[-1]

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if not repo:
            return None
        if not repo.lower().startswith('bzr:'):
            repo = 'bzr+' + repo
        egg_project_name = dist.egg_name().split('-', 1)[0]
        current_rev = self.get_revision(location)
        return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)

    def check_version(self, dest, rev_options):
        """Always assume the versions don't match"""
        return False


vcs.register(Bazaar)
vcs/subversion.py000064400000022206151733136520010120 0ustar00from __future__ import absolute_import

import logging
import os
import re

from pip._vendor.six.moves.urllib import parse as urllib_parse

from pip.index import Link
from pip.utils import rmtree, display_path
from pip.utils.logging import indent_log
from pip.vcs import vcs, VersionControl

_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')


logger = logging.getLogger(__name__)


class Subversion(VersionControl):
    name = 'svn'
    dirname = '.svn'
    repo_name = 'checkout'
    schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')

    def get_info(self, location):
        """Returns (url, revision), where both are strings"""
        assert not location.rstrip('/').endswith(self.dirname), \
            'Bad directory: %s' % location
        output = self.run_command(
            ['info', location],
            show_stdout=False,
            extra_environ={'LANG': 'C'},
        )
        match = _svn_url_re.search(output)
        if not match:
            logger.warning(
                'Cannot determine URL of svn checkout %s',
                display_path(location),
            )
            logger.debug('Output that cannot be parsed: \n%s', output)
            return None, None
        url = match.group(1).strip()
        match = _svn_revision_re.search(output)
        if not match:
            logger.warning(
                'Cannot determine revision of svn checkout %s',
                display_path(location),
            )
            logger.debug('Output that cannot be parsed: \n%s', output)
            return url, None
        return url, match.group(1)

    def export(self, location):
        """Export the svn repository at the url to the destination location"""
        url, rev = self.get_url_rev()
        rev_options = get_rev_options(url, rev)
        url = self.remove_auth_from_url(url)
        logger.info('Exporting svn repository %s to %s', url, location)
        with indent_log():
            if os.path.exists(location):
                # Subversion doesn't like to check out over an existing
                # directory --force fixes this, but was only added in svn 1.5
                rmtree(location)
            self.run_command(
                ['export'] + rev_options + [url, location],
                show_stdout=False)

    def switch(self, dest, url, rev_options):
        self.run_command(['switch'] + rev_options + [url, dest])

    def update(self, dest, rev_options):
        self.run_command(['update'] + rev_options + [dest])

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        rev_options = get_rev_options(url, rev)
        url = self.remove_auth_from_url(url)
        if rev:
            rev_display = ' (to revision %s)' % rev
        else:
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Checking out %s%s to %s',
                url,
                rev_display,
                display_path(dest),
            )
            self.run_command(['checkout', '-q'] + rev_options + [url, dest])

    def get_location(self, dist, dependency_links):
        for url in dependency_links:
            egg_fragment = Link(url).egg_fragment
            if not egg_fragment:
                continue
            if '-' in egg_fragment:
                # FIXME: will this work when a package has - in the name?
                key = '-'.join(egg_fragment.split('-')[:-1]).lower()
            else:
                key = egg_fragment
            if key == dist.key:
                return url.split('#', 1)[0]
        return None

    def get_revision(self, location):
        """
        Return the maximum revision for all files under a given location
        """
        # Note: taken from setuptools.command.egg_info
        revision = 0

        for base, dirs, files in os.walk(location):
            if self.dirname not in dirs:
                dirs[:] = []
                continue    # no sense walking uncontrolled subdirs
            dirs.remove(self.dirname)
            entries_fn = os.path.join(base, self.dirname, 'entries')
            if not os.path.exists(entries_fn):
                # FIXME: should we warn?
                continue

            dirurl, localrev = self._get_svn_url_rev(base)

            if base == location:
                base_url = dirurl + '/'   # save the root url
            elif not dirurl or not dirurl.startswith(base_url):
                dirs[:] = []
                continue    # not part of the same svn tree, skip it
            revision = max(revision, localrev)
        return revision

    def get_url_rev(self):
        # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
        url, rev = super(Subversion, self).get_url_rev()
        if url.startswith('ssh://'):
            url = 'svn+' + url
        return url, rev

    def get_url(self, location):
        # In cases where the source is in a subdirectory, not alongside
        # setup.py we have to look up in the location until we find a real
        # setup.py
        orig_location = location
        while not os.path.exists(os.path.join(location, 'setup.py')):
            last_location = location
            location = os.path.dirname(location)
            if location == last_location:
                # We've traversed up to the root of the filesystem without
                # finding setup.py
                logger.warning(
                    "Could not find setup.py for directory %s (tried all "
                    "parent directories)",
                    orig_location,
                )
                return None

        return self._get_svn_url_rev(location)[0]

    def _get_svn_url_rev(self, location):
        from pip.exceptions import InstallationError

        entries_path = os.path.join(location, self.dirname, 'entries')
        if os.path.exists(entries_path):
            with open(entries_path) as f:
                data = f.read()
        else:  # subversion >= 1.7 does not have the 'entries' file
            data = ''

        if (data.startswith('8') or
                data.startswith('9') or
                data.startswith('10')):
            data = list(map(str.splitlines, data.split('\n\x0c\n')))
            del data[0][0]  # get rid of the '8'
            url = data[0][3]
            revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
        elif data.startswith('<?xml'):
            match = _svn_xml_url_re.search(data)
            if not match:
                raise ValueError('Badly formatted data: %r' % data)
            url = match.group(1)    # get repository URL
            revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
        else:
            try:
                # subversion >= 1.7
                xml = self.run_command(
                    ['info', '--xml', location],
                    show_stdout=False,
                )
                url = _svn_info_xml_url_re.search(xml).group(1)
                revs = [
                    int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
                ]
            except InstallationError:
                url, revs = None, []

        if revs:
            rev = max(revs)
        else:
            rev = 0

        return url, rev

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if repo is None:
            return None
        # FIXME: why not project name?
        egg_project_name = dist.egg_name().split('-', 1)[0]
        rev = self.get_revision(location)
        return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)

    def check_version(self, dest, rev_options):
        """Always assume the versions don't match"""
        return False

    @staticmethod
    def remove_auth_from_url(url):
        # Return a copy of url with 'username:password@' removed.
        # username/pass params are passed to subversion through flags
        # and are not recognized in the url.

        # parsed url
        purl = urllib_parse.urlsplit(url)
        stripped_netloc = \
            purl.netloc.split('@')[-1]

        # stripped url
        url_pieces = (
            purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment
        )
        surl = urllib_parse.urlunsplit(url_pieces)
        return surl


def get_rev_options(url, rev):
    if rev:
        rev_options = ['-r', rev]
    else:
        rev_options = []

    r = urllib_parse.urlsplit(url)
    if hasattr(r, 'username'):
        # >= Python-2.5
        username, password = r.username, r.password
    else:
        netloc = r[1]
        if '@' in netloc:
            auth = netloc.split('@')[0]
            if ':' in auth:
                username, password = auth.split(':', 1)
            else:
                username, password = auth, None
        else:
            username, password = None, None

    if username:
        rev_options += ['--username', username]
    if password:
        rev_options += ['--password', password]
    return rev_options


vcs.register(Subversion)
vcs/git.py000064400000026454151733136520006515 0ustar00from __future__ import absolute_import

import logging
import tempfile
import os.path

from pip.compat import samefile
from pip.exceptions import BadCommand
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._vendor.packaging.version import parse as parse_version

from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl


urlsplit = urllib_parse.urlsplit
urlunsplit = urllib_parse.urlunsplit


logger = logging.getLogger(__name__)


class Git(VersionControl):
    name = 'git'
    dirname = '.git'
    repo_name = 'clone'
    schemes = (
        'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
    )

    def __init__(self, url=None, *args, **kwargs):

        # Works around an apparent Git bug
        # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
        if url:
            scheme, netloc, path, query, fragment = urlsplit(url)
            if scheme.endswith('file'):
                initial_slashes = path[:-len(path.lstrip('/'))]
                newpath = (
                    initial_slashes +
                    urllib_request.url2pathname(path)
                    .replace('\\', '/').lstrip('/')
                )
                url = urlunsplit((scheme, netloc, newpath, query, fragment))
                after_plus = scheme.find('+') + 1
                url = scheme[:after_plus] + urlunsplit(
                    (scheme[after_plus:], netloc, newpath, query, fragment),
                )

        super(Git, self).__init__(url, *args, **kwargs)

    def get_git_version(self):
        VERSION_PFX = 'git version '
        version = self.run_command(['version'], show_stdout=False)
        if version.startswith(VERSION_PFX):
            version = version[len(VERSION_PFX):]
        else:
            version = ''
        # get first 3 positions of the git version becasue
        # on windows it is x.y.z.windows.t, and this parses as
        # LegacyVersion which always smaller than a Version.
        version = '.'.join(version.split('.')[:3])
        return parse_version(version)

    def export(self, location):
        """Export the Git repository at the url to the destination location"""
        temp_dir = tempfile.mkdtemp('-export', 'pip-')
        self.unpack(temp_dir)
        try:
            if not location.endswith('/'):
                location = location + '/'
            self.run_command(
                ['checkout-index', '-a', '-f', '--prefix', location],
                show_stdout=False, cwd=temp_dir)
        finally:
            rmtree(temp_dir)

    def check_rev_options(self, rev, dest, rev_options):
        """Check the revision options before checkout to compensate that tags
        and branches may need origin/ as a prefix.
        Returns the SHA1 of the branch or tag if found.
        """
        revisions = self.get_short_refs(dest, rev)

        origin_rev = 'origin/%s' % rev
        if origin_rev in revisions:
            # remote branch
            return [revisions[origin_rev]]
        elif rev in revisions:
            # a local tag or branch name
            return [revisions[rev]]
        else:
            logger.warning(
                "Could not find a tag or branch '%s', assuming commit.", rev,
            )
            return rev_options

    def check_version(self, dest, rev_options):
        """
        Compare the current sha to the ref. ref may be a branch or tag name,
        but current rev will always point to a sha. This means that a branch
        or tag will never compare as True. So this ultimately only matches
        against exact shas.
        """
        return self.get_revision(dest).startswith(rev_options[0])

    def switch(self, dest, url, rev_options):
        self.run_command(['config', 'remote.origin.url', url], cwd=dest)
        self.run_command(['checkout', '-q'] + rev_options, cwd=dest)

        self.update_submodules(dest)

    def update(self, dest, rev_options):
        # First fetch changes from the default remote
        if self.get_git_version() >= parse_version('1.9.0'):
            # fetch tags in addition to everything else
            self.run_command(['fetch', '-q', '--tags'], cwd=dest)
        else:
            self.run_command(['fetch', '-q'], cwd=dest)
        # Then reset to wanted revision (maybe even origin/master)
        if rev_options:
            rev_options = self.check_rev_options(
                rev_options[0], dest, rev_options,
            )
        self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest)
        #: update submodules
        self.update_submodules(dest)

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to %s)' % rev
        else:
            rev_options = ['origin/master']
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning %s%s to %s', url, rev_display, display_path(dest),
            )
            self.run_command(['clone', '-q', url, dest])

            if rev:
                rev_options = self.check_rev_options(rev, dest, rev_options)
                # Only do a checkout if rev_options differs from HEAD
                if not self.check_version(dest, rev_options):
                    self.run_command(
                        ['checkout', '-q'] + rev_options,
                        cwd=dest,
                    )
            #: repo may contain submodules
            self.update_submodules(dest)

    def get_url(self, location):
        """Return URL of the first remote encountered."""
        remotes = self.run_command(
            ['config', '--get-regexp', 'remote\..*\.url'],
            show_stdout=False, cwd=location)
        remotes = remotes.splitlines()
        found_remote = remotes[0]
        for remote in remotes:
            if remote.startswith('remote.origin.url '):
                found_remote = remote
                break
        url = found_remote.split(' ')[1]
        return url.strip()

    def get_revision(self, location):
        current_rev = self.run_command(
            ['rev-parse', 'HEAD'], show_stdout=False, cwd=location)
        return current_rev.strip()

    def get_full_refs(self, location, pattern=''):
        """Yields tuples of (commit, ref) for branches and tags"""
        output = self.run_command(['show-ref', pattern],
                                  show_stdout=False, cwd=location)
        for line in output.split("\n"):
            line = line.rstrip("\r")
            if not line:
                continue
            try:
                commit, ref = line.split(' ', 1)
            except ValueError:
                # Include the offending line to simplify troubleshooting if
                # this error ever occurs.
                raise ValueError(f'unexpected show-ref line: {line!r}')
            yield commit.strip(), ref.strip()

    def is_ref_remote(self, ref):
        return ref.startswith('refs/remotes/')

    def is_ref_branch(self, ref):
        return ref.startswith('refs/heads/')

    def is_ref_tag(self, ref):
        return ref.startswith('refs/tags/')

    def is_ref_commit(self, ref):
        """A ref is a commit sha if it is not anything else"""
        return not any((
            self.is_ref_remote(ref),
            self.is_ref_branch(ref),
            self.is_ref_tag(ref),
        ))

    # Should deprecate `get_refs` since it's ambiguous
    def get_refs(self, location):
        return self.get_short_refs(location)

    def get_short_refs(self, location, pattern=''):
        """Return map of named refs (branches or tags) to commit hashes."""
        rv = {}
        for commit, ref in self.get_full_refs(location, pattern):
            ref_name = None
            if self.is_ref_remote(ref):
                ref_name = ref[len('refs/remotes/'):]
            elif self.is_ref_branch(ref):
                ref_name = ref[len('refs/heads/'):]
            elif self.is_ref_tag(ref):
                ref_name = ref[len('refs/tags/'):]
            if ref_name is not None:
                rv[ref_name] = commit
        return rv

    def _get_subdirectory(self, location):
        """Return the relative path of setup.py to the git repo root."""
        # find the repo root
        git_dir = self.run_command(['rev-parse', '--git-dir'],
                                   show_stdout=False, cwd=location).strip()
        if not os.path.isabs(git_dir):
            git_dir = os.path.join(location, git_dir)
        root_dir = os.path.join(git_dir, '..')
        # find setup.py
        orig_location = location
        while not os.path.exists(os.path.join(location, 'setup.py')):
            last_location = location
            location = os.path.dirname(location)
            if location == last_location:
                # We've traversed up to the root of the filesystem without
                # finding setup.py
                logger.warning(
                    "Could not find setup.py for directory %s (tried all "
                    "parent directories)",
                    orig_location,
                )
                return None
        # relative path of setup.py to repo root
        if samefile(root_dir, location):
            return None
        return os.path.relpath(location, root_dir)

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if not repo.lower().startswith('git:'):
            repo = 'git+' + repo
        egg_project_name = dist.egg_name().split('-', 1)[0]
        if not repo:
            return None
        current_rev = self.get_revision(location)
        req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
        subdirectory = self._get_subdirectory(location)
        if subdirectory:
            req += '&subdirectory=' + subdirectory
        return req

    def get_url_rev(self):
        """
        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
        That's required because although they use SSH they sometimes doesn't
        work with a ssh:// scheme (e.g. Github). But we need a scheme for
        parsing. Hence we remove it again afterwards and return it as a stub.
        """
        if '://' not in self.url:
            assert 'file:' not in self.url
            self.url = self.url.replace('git+', 'git+ssh://')
            url, rev = super(Git, self).get_url_rev()
            url = url.replace('ssh://', '')
        else:
            url, rev = super(Git, self).get_url_rev()

        return url, rev

    def update_submodules(self, location):
        if not os.path.exists(os.path.join(location, '.gitmodules')):
            return
        self.run_command(
            ['submodule', 'update', '--init', '--recursive', '-q'],
            cwd=location,
        )

    @classmethod
    def controls_location(cls, location):
        if super(Git, cls).controls_location(location):
            return True
        try:
            r = cls().run_command(['rev-parse'],
                                  cwd=location,
                                  show_stdout=False,
                                  on_returncode='ignore')
            return not r
        except BadCommand:
            logger.debug("could not determine if %s is under git control "
                         "because git is not available", location)
            return False


vcs.register(Git)
exceptions.py000064400000017671151733136520007321 0ustar00"""Exceptions used throughout package"""
from __future__ import absolute_import

from itertools import chain, groupby, repeat

from pip._vendor.six import iteritems


class PipError(Exception):
    """Base pip exception"""


class InstallationError(PipError):
    """General exception during installation"""


class UninstallationError(PipError):
    """General exception during uninstallation"""


class DistributionNotFound(InstallationError):
    """Raised when a distribution cannot be found to satisfy a requirement"""


class RequirementsFileParseError(InstallationError):
    """Raised when a general error occurs parsing a requirements file line."""


class BestVersionAlreadyInstalled(PipError):
    """Raised when the most up-to-date version of a package is already
    installed."""


class BadCommand(PipError):
    """Raised when virtualenv or a command is not found"""


class CommandError(PipError):
    """Raised when there is an error in command-line arguments"""


class PreviousBuildDirError(PipError):
    """Raised when there's a previous conflicting build directory"""


class InvalidWheelFilename(InstallationError):
    """Invalid wheel filename."""


class UnsupportedWheel(InstallationError):
    """Unsupported wheel."""


class HashErrors(InstallationError):
    """Multiple HashError instances rolled into one for reporting"""

    def __init__(self):
        self.errors = []

    def append(self, error):
        self.errors.append(error)

    def __str__(self):
        lines = []
        self.errors.sort(key=lambda e: e.order)
        for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
            lines.append(cls.head)
            lines.extend(e.body() for e in errors_of_cls)
        if lines:
            return '\n'.join(lines)

    def __nonzero__(self):
        return bool(self.errors)

    def __bool__(self):
        return self.__nonzero__()


class HashError(InstallationError):
    """
    A failure to verify a package against known-good hashes

    :cvar order: An int sorting hash exception classes by difficulty of
        recovery (lower being harder), so the user doesn't bother fretting
        about unpinned packages when he has deeper issues, like VCS
        dependencies, to deal with. Also keeps error reports in a
        deterministic order.
    :cvar head: A section heading for display above potentially many
        exceptions of this kind
    :ivar req: The InstallRequirement that triggered this error. This is
        pasted on after the exception is instantiated, because it's not
        typically available earlier.

    """
    req = None
    head = ''

    def body(self):
        """Return a summary of me for display under the heading.

        This default implementation simply prints a description of the
        triggering requirement.

        :param req: The InstallRequirement that provoked this error, with
            populate_link() having already been called

        """
        return '    %s' % self._requirement_name()

    def __str__(self):
        return '%s\n%s' % (self.head, self.body())

    def _requirement_name(self):
        """Return a description of the requirement that triggered me.

        This default implementation returns long description of the req, with
        line numbers

        """
        return str(self.req) if self.req else 'unknown package'


class VcsHashUnsupported(HashError):
    """A hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those."""

    order = 0
    head = ("Can't verify hashes for these requirements because we don't "
            "have a way to hash version control repositories:")


class DirectoryUrlHashUnsupported(HashError):
    """A hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those."""

    order = 1
    head = ("Can't verify hashes for these file:// requirements because they "
            "point to directories:")


class HashMissing(HashError):
    """A hash was needed for a requirement but is absent."""

    order = 2
    head = ('Hashes are required in --require-hashes mode, but they are '
            'missing from some requirements. Here is a list of those '
            'requirements along with the hashes their downloaded archives '
            'actually had. Add lines like these to your requirements files to '
            'prevent tampering. (If you did not enable --require-hashes '
            'manually, note that it turns on automatically when any package '
            'has a hash.)')

    def __init__(self, gotten_hash):
        """
        :param gotten_hash: The hash of the (possibly malicious) archive we
            just downloaded
        """
        self.gotten_hash = gotten_hash

    def body(self):
        from pip.utils.hashes import FAVORITE_HASH  # Dodge circular import.

        package = None
        if self.req:
            # In the case of URL-based requirements, display the original URL
            # seen in the requirements file rather than the package name,
            # so the output can be directly copied into the requirements file.
            package = (self.req.original_link if self.req.original_link
                       # In case someone feeds something downright stupid
                       # to InstallRequirement's constructor.
                       else getattr(self.req, 'req', None))
        return '    %s --hash=%s:%s' % (package or 'unknown package',
                                        FAVORITE_HASH,
                                        self.gotten_hash)


class HashUnpinned(HashError):
    """A requirement had a hash specified but was not pinned to a specific
    version."""

    order = 3
    head = ('In --require-hashes mode, all requirements must have their '
            'versions pinned with ==. These do not:')


class HashMismatch(HashError):
    """
    Distribution file hash values don't match.

    :ivar package_name: The name of the package that triggered the hash
        mismatch. Feel free to write to this after the exception is raise to
        improve its error message.

    """
    order = 4
    head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
            'FILE. If you have updated the package versions, please update '
            'the hashes. Otherwise, examine the package contents carefully; '
            'someone may have tampered with them.')

    def __init__(self, allowed, gots):
        """
        :param allowed: A dict of algorithm names pointing to lists of allowed
            hex digests
        :param gots: A dict of algorithm names pointing to hashes we
            actually got from the files under suspicion
        """
        self.allowed = allowed
        self.gots = gots

    def body(self):
        return '    %s:\n%s' % (self._requirement_name(),
                                self._hash_comparison())

    def _hash_comparison(self):
        """
        Return a comparison of actual and expected hash values.

        Example::

               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
                            or 123451234512345123451234512345123451234512345
                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef

        """
        def hash_then_or(hash_name):
            # For now, all the decent hashes have 6-char names, so we can get
            # away with hard-coding space literals.
            return chain([hash_name], repeat('    or'))

        lines = []
        for hash_name, expecteds in iteritems(self.allowed):
            prefix = hash_then_or(hash_name)
            lines.extend(('        Expected %s %s' % (next(prefix), e))
                         for e in expecteds)
            lines.append('             Got        %s\n' %
                         self.gots[hash_name].hexdigest())
            prefix = '    or'
        return '\n'.join(lines)


class UnsupportedPythonVersion(InstallationError):
    """Unsupported python version according to Requires-Python package
    metadata."""