mirror of
https://github.com/jellyfin/jellyfin-kodi.git
synced 2025-05-07 18:08:50 +00:00
Move libraries import
This commit is contained in:
parent
3ea6890c34
commit
b2bc90cb06
209 changed files with 36 additions and 14941 deletions
93
libraries/requests/packages/urllib3/__init__.py
Normal file
93
libraries/requests/packages/urllib3/__init__.py
Normal file
|
@ -0,0 +1,93 @@
|
|||
"""
|
||||
urllib3 - Thread-safe connection pooling and re-using.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
import warnings
|
||||
|
||||
from .connectionpool import (
|
||||
HTTPConnectionPool,
|
||||
HTTPSConnectionPool,
|
||||
connection_from_url
|
||||
)
|
||||
|
||||
from . import exceptions
|
||||
from .filepost import encode_multipart_formdata
|
||||
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
|
||||
from .response import HTTPResponse
|
||||
from .util.request import make_headers
|
||||
from .util.url import get_host
|
||||
from .util.timeout import Timeout
|
||||
from .util.retry import Retry
|
||||
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
try: # Python 2.7+
|
||||
from logging import NullHandler
|
||||
except ImportError:
|
||||
class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
||||
__license__ = 'MIT'
|
||||
__version__ = '1.13.1'
|
||||
|
||||
__all__ = (
|
||||
'HTTPConnectionPool',
|
||||
'HTTPSConnectionPool',
|
||||
'PoolManager',
|
||||
'ProxyManager',
|
||||
'HTTPResponse',
|
||||
'Retry',
|
||||
'Timeout',
|
||||
'add_stderr_logger',
|
||||
'connection_from_url',
|
||||
'disable_warnings',
|
||||
'encode_multipart_formdata',
|
||||
'get_host',
|
||||
'make_headers',
|
||||
'proxy_from_url',
|
||||
)
|
||||
|
||||
logging.getLogger(__name__).addHandler(NullHandler())
|
||||
|
||||
|
||||
def add_stderr_logger(level=logging.DEBUG):
|
||||
"""
|
||||
Helper for quickly adding a StreamHandler to the logger. Useful for
|
||||
debugging.
|
||||
|
||||
Returns the handler after adding it.
|
||||
"""
|
||||
# This method needs to be in this __init__.py to get the __name__ correct
|
||||
# even if urllib3 is vendored within another package.
|
||||
logger = logging.getLogger(__name__)
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(level)
|
||||
logger.debug('Added a stderr logging handler to logger: %s' % __name__)
|
||||
return handler
|
||||
|
||||
# ... Clean up.
|
||||
del NullHandler
|
||||
|
||||
|
||||
# SecurityWarning's always go off by default.
|
||||
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
|
||||
# SubjectAltNameWarning's should go off once per host
|
||||
warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
|
||||
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
|
||||
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
|
||||
append=True)
|
||||
# SNIMissingWarnings should go off only once.
|
||||
warnings.simplefilter('default', exceptions.SNIMissingWarning)
|
||||
|
||||
|
||||
def disable_warnings(category=exceptions.HTTPWarning):
|
||||
"""
|
||||
Helper for quickly disabling all urllib3 warnings.
|
||||
"""
|
||||
warnings.simplefilter('ignore', category)
|
324
libraries/requests/packages/urllib3/_collections.py
Normal file
324
libraries/requests/packages/urllib3/_collections.py
Normal file
|
@ -0,0 +1,324 @@
|
|||
from __future__ import absolute_import
|
||||
from collections import Mapping, MutableMapping
|
||||
try:
|
||||
from threading import RLock
|
||||
except ImportError: # Platform-specific: No threads available
|
||||
class RLock:
|
||||
def __enter__(self):
|
||||
pass
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
pass
|
||||
|
||||
|
||||
try: # Python 2.7+
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from .packages.ordered_dict import OrderedDict
|
||||
from .packages.six import iterkeys, itervalues, PY3
|
||||
|
||||
|
||||
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
|
||||
|
||||
|
||||
_Null = object()
|
||||
|
||||
|
||||
class RecentlyUsedContainer(MutableMapping):
|
||||
"""
|
||||
Provides a thread-safe dict-like container which maintains up to
|
||||
``maxsize`` keys while throwing away the least-recently-used keys beyond
|
||||
``maxsize``.
|
||||
|
||||
:param maxsize:
|
||||
Maximum number of recent elements to retain.
|
||||
|
||||
:param dispose_func:
|
||||
Every time an item is evicted from the container,
|
||||
``dispose_func(value)`` is called. Callback which will get called
|
||||
"""
|
||||
|
||||
ContainerCls = OrderedDict
|
||||
|
||||
def __init__(self, maxsize=10, dispose_func=None):
|
||||
self._maxsize = maxsize
|
||||
self.dispose_func = dispose_func
|
||||
|
||||
self._container = self.ContainerCls()
|
||||
self.lock = RLock()
|
||||
|
||||
def __getitem__(self, key):
|
||||
# Re-insert the item, moving it to the end of the eviction line.
|
||||
with self.lock:
|
||||
item = self._container.pop(key)
|
||||
self._container[key] = item
|
||||
return item
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
evicted_value = _Null
|
||||
with self.lock:
|
||||
# Possibly evict the existing value of 'key'
|
||||
evicted_value = self._container.get(key, _Null)
|
||||
self._container[key] = value
|
||||
|
||||
# If we didn't evict an existing value, we might have to evict the
|
||||
# least recently used item from the beginning of the container.
|
||||
if len(self._container) > self._maxsize:
|
||||
_key, evicted_value = self._container.popitem(last=False)
|
||||
|
||||
if self.dispose_func and evicted_value is not _Null:
|
||||
self.dispose_func(evicted_value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
with self.lock:
|
||||
value = self._container.pop(key)
|
||||
|
||||
if self.dispose_func:
|
||||
self.dispose_func(value)
|
||||
|
||||
def __len__(self):
|
||||
with self.lock:
|
||||
return len(self._container)
|
||||
|
||||
def __iter__(self):
|
||||
raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
|
||||
|
||||
def clear(self):
|
||||
with self.lock:
|
||||
# Copy pointers to all values, then wipe the mapping
|
||||
values = list(itervalues(self._container))
|
||||
self._container.clear()
|
||||
|
||||
if self.dispose_func:
|
||||
for value in values:
|
||||
self.dispose_func(value)
|
||||
|
||||
def keys(self):
|
||||
with self.lock:
|
||||
return list(iterkeys(self._container))
|
||||
|
||||
|
||||
class HTTPHeaderDict(MutableMapping):
|
||||
"""
|
||||
:param headers:
|
||||
An iterable of field-value pairs. Must not contain multiple field names
|
||||
when compared case-insensitively.
|
||||
|
||||
:param kwargs:
|
||||
Additional field-value pairs to pass in to ``dict.update``.
|
||||
|
||||
A ``dict`` like container for storing HTTP Headers.
|
||||
|
||||
Field names are stored and compared case-insensitively in compliance with
|
||||
RFC 7230. Iteration provides the first case-sensitive key seen for each
|
||||
case-insensitive pair.
|
||||
|
||||
Using ``__setitem__`` syntax overwrites fields that compare equal
|
||||
case-insensitively in order to maintain ``dict``'s api. For fields that
|
||||
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
|
||||
in a loop.
|
||||
|
||||
If multiple fields that are equal case-insensitively are passed to the
|
||||
constructor or ``.update``, the behavior is undefined and some will be
|
||||
lost.
|
||||
|
||||
>>> headers = HTTPHeaderDict()
|
||||
>>> headers.add('Set-Cookie', 'foo=bar')
|
||||
>>> headers.add('set-cookie', 'baz=quxx')
|
||||
>>> headers['content-length'] = '7'
|
||||
>>> headers['SET-cookie']
|
||||
'foo=bar, baz=quxx'
|
||||
>>> headers['Content-Length']
|
||||
'7'
|
||||
"""
|
||||
|
||||
def __init__(self, headers=None, **kwargs):
|
||||
super(HTTPHeaderDict, self).__init__()
|
||||
self._container = {}
|
||||
if headers is not None:
|
||||
if isinstance(headers, HTTPHeaderDict):
|
||||
self._copy_from(headers)
|
||||
else:
|
||||
self.extend(headers)
|
||||
if kwargs:
|
||||
self.extend(kwargs)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
self._container[key.lower()] = (key, val)
|
||||
return self._container[key.lower()]
|
||||
|
||||
def __getitem__(self, key):
|
||||
val = self._container[key.lower()]
|
||||
return ', '.join(val[1:])
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self._container[key.lower()]
|
||||
|
||||
def __contains__(self, key):
|
||||
return key.lower() in self._container
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
|
||||
return False
|
||||
if not isinstance(other, type(self)):
|
||||
other = type(self)(other)
|
||||
return (dict((k.lower(), v) for k, v in self.itermerged()) ==
|
||||
dict((k.lower(), v) for k, v in other.itermerged()))
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
if not PY3: # Python 2
|
||||
iterkeys = MutableMapping.iterkeys
|
||||
itervalues = MutableMapping.itervalues
|
||||
|
||||
__marker = object()
|
||||
|
||||
def __len__(self):
|
||||
return len(self._container)
|
||||
|
||||
def __iter__(self):
|
||||
# Only provide the originally cased names
|
||||
for vals in self._container.values():
|
||||
yield vals[0]
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||
'''
|
||||
# Using the MutableMapping function directly fails due to the private marker.
|
||||
# Using ordinary dict.pop would expose the internal structures.
|
||||
# So let's reinvent the wheel.
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if default is self.__marker:
|
||||
raise
|
||||
return default
|
||||
else:
|
||||
del self[key]
|
||||
return value
|
||||
|
||||
def discard(self, key):
|
||||
try:
|
||||
del self[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def add(self, key, val):
|
||||
"""Adds a (name, value) pair, doesn't overwrite the value if it already
|
||||
exists.
|
||||
|
||||
>>> headers = HTTPHeaderDict(foo='bar')
|
||||
>>> headers.add('Foo', 'baz')
|
||||
>>> headers['foo']
|
||||
'bar, baz'
|
||||
"""
|
||||
key_lower = key.lower()
|
||||
new_vals = key, val
|
||||
# Keep the common case aka no item present as fast as possible
|
||||
vals = self._container.setdefault(key_lower, new_vals)
|
||||
if new_vals is not vals:
|
||||
# new_vals was not inserted, as there was a previous one
|
||||
if isinstance(vals, list):
|
||||
# If already several items got inserted, we have a list
|
||||
vals.append(val)
|
||||
else:
|
||||
# vals should be a tuple then, i.e. only one item so far
|
||||
# Need to convert the tuple to list for further extension
|
||||
self._container[key_lower] = [vals[0], vals[1], val]
|
||||
|
||||
def extend(self, *args, **kwargs):
|
||||
"""Generic import function for any type of header-like object.
|
||||
Adapted version of MutableMapping.update in order to insert items
|
||||
with self.add instead of self.__setitem__
|
||||
"""
|
||||
if len(args) > 1:
|
||||
raise TypeError("extend() takes at most 1 positional "
|
||||
"arguments ({0} given)".format(len(args)))
|
||||
other = args[0] if len(args) >= 1 else ()
|
||||
|
||||
if isinstance(other, HTTPHeaderDict):
|
||||
for key, val in other.iteritems():
|
||||
self.add(key, val)
|
||||
elif isinstance(other, Mapping):
|
||||
for key in other:
|
||||
self.add(key, other[key])
|
||||
elif hasattr(other, "keys"):
|
||||
for key in other.keys():
|
||||
self.add(key, other[key])
|
||||
else:
|
||||
for key, value in other:
|
||||
self.add(key, value)
|
||||
|
||||
for key, value in kwargs.items():
|
||||
self.add(key, value)
|
||||
|
||||
def getlist(self, key):
|
||||
"""Returns a list of all the values for the named field. Returns an
|
||||
empty list if the key doesn't exist."""
|
||||
try:
|
||||
vals = self._container[key.lower()]
|
||||
except KeyError:
|
||||
return []
|
||||
else:
|
||||
if isinstance(vals, tuple):
|
||||
return [vals[1]]
|
||||
else:
|
||||
return vals[1:]
|
||||
|
||||
# Backwards compatibility for httplib
|
||||
getheaders = getlist
|
||||
getallmatchingheaders = getlist
|
||||
iget = getlist
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
|
||||
|
||||
def _copy_from(self, other):
|
||||
for key in other:
|
||||
val = other.getlist(key)
|
||||
if isinstance(val, list):
|
||||
# Don't need to convert tuples
|
||||
val = list(val)
|
||||
self._container[key.lower()] = [key] + val
|
||||
|
||||
def copy(self):
|
||||
clone = type(self)()
|
||||
clone._copy_from(self)
|
||||
return clone
|
||||
|
||||
def iteritems(self):
|
||||
"""Iterate over all header lines, including duplicate ones."""
|
||||
for key in self:
|
||||
vals = self._container[key.lower()]
|
||||
for val in vals[1:]:
|
||||
yield vals[0], val
|
||||
|
||||
def itermerged(self):
|
||||
"""Iterate over all headers, merging duplicate ones together."""
|
||||
for key in self:
|
||||
val = self._container[key.lower()]
|
||||
yield val[0], ', '.join(val[1:])
|
||||
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
|
||||
@classmethod
|
||||
def from_httplib(cls, message): # Python 2
|
||||
"""Read headers from a Python 2 httplib message object."""
|
||||
# python2.7 does not expose a proper API for exporting multiheaders
|
||||
# efficiently. This function re-reads raw lines from the message
|
||||
# object and extracts the multiheaders properly.
|
||||
headers = []
|
||||
|
||||
for line in message.headers:
|
||||
if line.startswith((' ', '\t')):
|
||||
key, value = headers[-1]
|
||||
headers[-1] = (key, value + '\r\n' + line.rstrip())
|
||||
continue
|
||||
|
||||
key, value = line.split(':', 1)
|
||||
headers.append((key, value.strip()))
|
||||
|
||||
return cls(headers)
|
288
libraries/requests/packages/urllib3/connection.py
Normal file
288
libraries/requests/packages/urllib3/connection.py
Normal file
|
@ -0,0 +1,288 @@
|
|||
from __future__ import absolute_import
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import socket
|
||||
from socket import error as SocketError, timeout as SocketTimeout
|
||||
import warnings
|
||||
from .packages import six
|
||||
|
||||
try: # Python 3
|
||||
from http.client import HTTPConnection as _HTTPConnection
|
||||
from http.client import HTTPException # noqa: unused in this module
|
||||
except ImportError:
|
||||
from httplib import HTTPConnection as _HTTPConnection
|
||||
from httplib import HTTPException # noqa: unused in this module
|
||||
|
||||
try: # Compiled with SSL?
|
||||
import ssl
|
||||
BaseSSLError = ssl.SSLError
|
||||
except (ImportError, AttributeError): # Platform-specific: No SSL.
|
||||
ssl = None
|
||||
|
||||
class BaseSSLError(BaseException):
|
||||
pass
|
||||
|
||||
|
||||
try: # Python 3:
|
||||
# Not a no-op, we're adding this to the namespace so it can be imported.
|
||||
ConnectionError = ConnectionError
|
||||
except NameError: # Python 2:
|
||||
class ConnectionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
from .exceptions import (
|
||||
NewConnectionError,
|
||||
ConnectTimeoutError,
|
||||
SubjectAltNameWarning,
|
||||
SystemTimeWarning,
|
||||
)
|
||||
from .packages.ssl_match_hostname import match_hostname
|
||||
|
||||
from .util.ssl_ import (
|
||||
resolve_cert_reqs,
|
||||
resolve_ssl_version,
|
||||
ssl_wrap_socket,
|
||||
assert_fingerprint,
|
||||
)
|
||||
|
||||
|
||||
from .util import connection
|
||||
|
||||
port_by_scheme = {
|
||||
'http': 80,
|
||||
'https': 443,
|
||||
}
|
||||
|
||||
RECENT_DATE = datetime.date(2014, 1, 1)
|
||||
|
||||
|
||||
class DummyConnection(object):
|
||||
"""Used to detect a failed ConnectionCls import."""
|
||||
pass
|
||||
|
||||
|
||||
class HTTPConnection(_HTTPConnection, object):
|
||||
"""
|
||||
Based on httplib.HTTPConnection but provides an extra constructor
|
||||
backwards-compatibility layer between older and newer Pythons.
|
||||
|
||||
Additional keyword parameters are used to configure attributes of the connection.
|
||||
Accepted parameters include:
|
||||
|
||||
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
||||
- ``source_address``: Set the source address for the current connection.
|
||||
|
||||
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
|
||||
|
||||
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
||||
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
||||
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
||||
|
||||
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
||||
you might pass::
|
||||
|
||||
HTTPConnection.default_socket_options + [
|
||||
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
||||
]
|
||||
|
||||
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
||||
"""
|
||||
|
||||
default_port = port_by_scheme['http']
|
||||
|
||||
#: Disable Nagle's algorithm by default.
|
||||
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
|
||||
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
|
||||
|
||||
#: Whether this connection verifies the host's certificate.
|
||||
is_verified = False
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
if six.PY3: # Python 3
|
||||
kw.pop('strict', None)
|
||||
|
||||
# Pre-set source_address in case we have an older Python like 2.6.
|
||||
self.source_address = kw.get('source_address')
|
||||
|
||||
if sys.version_info < (2, 7): # Python 2.6
|
||||
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
|
||||
# not newer versions. We can still use it when creating a
|
||||
# connection though, so we pop it *after* we have saved it as
|
||||
# self.source_address.
|
||||
kw.pop('source_address', None)
|
||||
|
||||
#: The socket options provided by the user. If no options are
|
||||
#: provided, we use the default options.
|
||||
self.socket_options = kw.pop('socket_options', self.default_socket_options)
|
||||
|
||||
# Superclass also sets self.source_address in Python 2.7+.
|
||||
_HTTPConnection.__init__(self, *args, **kw)
|
||||
|
||||
def _new_conn(self):
|
||||
""" Establish a socket connection and set nodelay settings on it.
|
||||
|
||||
:return: New socket connection.
|
||||
"""
|
||||
extra_kw = {}
|
||||
if self.source_address:
|
||||
extra_kw['source_address'] = self.source_address
|
||||
|
||||
if self.socket_options:
|
||||
extra_kw['socket_options'] = self.socket_options
|
||||
|
||||
try:
|
||||
conn = connection.create_connection(
|
||||
(self.host, self.port), self.timeout, **extra_kw)
|
||||
|
||||
except SocketTimeout as e:
|
||||
raise ConnectTimeoutError(
|
||||
self, "Connection to %s timed out. (connect timeout=%s)" %
|
||||
(self.host, self.timeout))
|
||||
|
||||
except SocketError as e:
|
||||
raise NewConnectionError(
|
||||
self, "Failed to establish a new connection: %s" % e)
|
||||
|
||||
return conn
|
||||
|
||||
def _prepare_conn(self, conn):
|
||||
self.sock = conn
|
||||
# the _tunnel_host attribute was added in python 2.6.3 (via
|
||||
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
|
||||
# not have them.
|
||||
if getattr(self, '_tunnel_host', None):
|
||||
# TODO: Fix tunnel so it doesn't depend on self.sock state.
|
||||
self._tunnel()
|
||||
# Mark this connection as not reusable
|
||||
self.auto_open = 0
|
||||
|
||||
def connect(self):
|
||||
conn = self._new_conn()
|
||||
self._prepare_conn(conn)
|
||||
|
||||
|
||||
class HTTPSConnection(HTTPConnection):
|
||||
default_port = port_by_scheme['https']
|
||||
|
||||
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
||||
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
|
||||
|
||||
HTTPConnection.__init__(self, host, port, strict=strict,
|
||||
timeout=timeout, **kw)
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
|
||||
# Required property for Google AppEngine 1.9.0 which otherwise causes
|
||||
# HTTPS requests to go out as HTTP. (See Issue #356)
|
||||
self._protocol = 'https'
|
||||
|
||||
def connect(self):
|
||||
conn = self._new_conn()
|
||||
self._prepare_conn(conn)
|
||||
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
|
||||
|
||||
|
||||
class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
"""
|
||||
Based on httplib.HTTPSConnection but wraps the socket with
|
||||
SSL certification.
|
||||
"""
|
||||
cert_reqs = None
|
||||
ca_certs = None
|
||||
ca_cert_dir = None
|
||||
ssl_version = None
|
||||
assert_fingerprint = None
|
||||
|
||||
def set_cert(self, key_file=None, cert_file=None,
|
||||
cert_reqs=None, ca_certs=None,
|
||||
assert_hostname=None, assert_fingerprint=None,
|
||||
ca_cert_dir=None):
|
||||
|
||||
if (ca_certs or ca_cert_dir) and cert_reqs is None:
|
||||
cert_reqs = 'CERT_REQUIRED'
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.cert_reqs = cert_reqs
|
||||
self.assert_hostname = assert_hostname
|
||||
self.assert_fingerprint = assert_fingerprint
|
||||
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
||||
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
|
||||
|
||||
def connect(self):
|
||||
# Add certificate verification
|
||||
conn = self._new_conn()
|
||||
|
||||
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
|
||||
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
|
||||
|
||||
hostname = self.host
|
||||
if getattr(self, '_tunnel_host', None):
|
||||
# _tunnel_host was added in Python 2.6.3
|
||||
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
|
||||
|
||||
self.sock = conn
|
||||
# Calls self._set_hostport(), so self.host is
|
||||
# self._tunnel_host below.
|
||||
self._tunnel()
|
||||
# Mark this connection as not reusable
|
||||
self.auto_open = 0
|
||||
|
||||
# Override the host with the one we're requesting data from.
|
||||
hostname = self._tunnel_host
|
||||
|
||||
is_time_off = datetime.date.today() < RECENT_DATE
|
||||
if is_time_off:
|
||||
warnings.warn((
|
||||
'System time is way off (before {0}). This will probably '
|
||||
'lead to SSL verification errors').format(RECENT_DATE),
|
||||
SystemTimeWarning
|
||||
)
|
||||
|
||||
# Wrap socket using verification with the root certs in
|
||||
# trusted_root_certs
|
||||
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
|
||||
cert_reqs=resolved_cert_reqs,
|
||||
ca_certs=self.ca_certs,
|
||||
ca_cert_dir=self.ca_cert_dir,
|
||||
server_hostname=hostname,
|
||||
ssl_version=resolved_ssl_version)
|
||||
|
||||
if self.assert_fingerprint:
|
||||
assert_fingerprint(self.sock.getpeercert(binary_form=True),
|
||||
self.assert_fingerprint)
|
||||
elif resolved_cert_reqs != ssl.CERT_NONE \
|
||||
and self.assert_hostname is not False:
|
||||
cert = self.sock.getpeercert()
|
||||
if not cert.get('subjectAltName', ()):
|
||||
warnings.warn((
|
||||
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
|
||||
'`commonName` for now. This feature is being removed by major browsers and '
|
||||
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
|
||||
'for details.)'.format(hostname)),
|
||||
SubjectAltNameWarning
|
||||
)
|
||||
|
||||
# In case the hostname is an IPv6 address, strip the square
|
||||
# brackets from it before using it to validate. This is because
|
||||
# a certificate with an IPv6 address in it won't have square
|
||||
# brackets around that address. Sadly, match_hostname won't do this
|
||||
# for us: it expects the plain host part without any extra work
|
||||
# that might have been done to make it palatable to httplib.
|
||||
asserted_hostname = self.assert_hostname or hostname
|
||||
asserted_hostname = asserted_hostname.strip('[]')
|
||||
match_hostname(cert, asserted_hostname)
|
||||
|
||||
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
|
||||
self.assert_fingerprint is not None)
|
||||
|
||||
|
||||
if ssl:
|
||||
# Make a copy for testing.
|
||||
UnverifiedHTTPSConnection = HTTPSConnection
|
||||
HTTPSConnection = VerifiedHTTPSConnection
|
||||
else:
|
||||
HTTPSConnection = DummyConnection
|
818
libraries/requests/packages/urllib3/connectionpool.py
Normal file
818
libraries/requests/packages/urllib3/connectionpool.py
Normal file
|
@ -0,0 +1,818 @@
|
|||
from __future__ import absolute_import
|
||||
import errno
|
||||
import logging
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from socket import error as SocketError, timeout as SocketTimeout
|
||||
import socket
|
||||
|
||||
try: # Python 3
|
||||
from queue import LifoQueue, Empty, Full
|
||||
except ImportError:
|
||||
from Queue import LifoQueue, Empty, Full
|
||||
# Queue is imported for side effects on MS Windows
|
||||
import Queue as _unused_module_Queue # noqa: unused
|
||||
|
||||
|
||||
from .exceptions import (
|
||||
ClosedPoolError,
|
||||
ProtocolError,
|
||||
EmptyPoolError,
|
||||
HeaderParsingError,
|
||||
HostChangedError,
|
||||
LocationValueError,
|
||||
MaxRetryError,
|
||||
ProxyError,
|
||||
ReadTimeoutError,
|
||||
SSLError,
|
||||
TimeoutError,
|
||||
InsecureRequestWarning,
|
||||
NewConnectionError,
|
||||
)
|
||||
from .packages.ssl_match_hostname import CertificateError
|
||||
from .packages import six
|
||||
from .connection import (
|
||||
port_by_scheme,
|
||||
DummyConnection,
|
||||
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
|
||||
HTTPException, BaseSSLError,
|
||||
)
|
||||
from .request import RequestMethods
|
||||
from .response import HTTPResponse
|
||||
|
||||
from .util.connection import is_connection_dropped
|
||||
from .util.response import assert_header_parsing
|
||||
from .util.retry import Retry
|
||||
from .util.timeout import Timeout
|
||||
from .util.url import get_host, Url
|
||||
|
||||
|
||||
xrange = six.moves.xrange
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_Default = object()
|
||||
|
||||
|
||||
# Pool objects
|
||||
class ConnectionPool(object):
|
||||
"""
|
||||
Base class for all connection pools, such as
|
||||
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
|
||||
"""
|
||||
|
||||
scheme = None
|
||||
QueueCls = LifoQueue
|
||||
|
||||
def __init__(self, host, port=None):
|
||||
if not host:
|
||||
raise LocationValueError("No host specified.")
|
||||
|
||||
self.host = host
|
||||
self.port = port
|
||||
|
||||
def __str__(self):
|
||||
return '%s(host=%r, port=%r)' % (type(self).__name__,
|
||||
self.host, self.port)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
# Return False to re-raise any potential exceptions
|
||||
return False
|
||||
|
||||
def close():
|
||||
"""
|
||||
Close all pooled connections and disable the pool.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
|
||||
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
|
||||
|
||||
|
||||
class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||
"""
|
||||
Thread-safe connection pool for one host.
|
||||
|
||||
:param host:
|
||||
Host used for this HTTP Connection (e.g. "localhost"), passed into
|
||||
:class:`httplib.HTTPConnection`.
|
||||
|
||||
:param port:
|
||||
Port used for this HTTP Connection (None is equivalent to 80), passed
|
||||
into :class:`httplib.HTTPConnection`.
|
||||
|
||||
:param strict:
|
||||
Causes BadStatusLine to be raised if the status line can't be parsed
|
||||
as a valid HTTP/1.0 or 1.1 status line, passed into
|
||||
:class:`httplib.HTTPConnection`.
|
||||
|
||||
.. note::
|
||||
Only works in Python 2. This parameter is ignored in Python 3.
|
||||
|
||||
:param timeout:
|
||||
Socket timeout in seconds for each individual connection. This can
|
||||
be a float or integer, which sets the timeout for the HTTP request,
|
||||
or an instance of :class:`urllib3.util.Timeout` which gives you more
|
||||
fine-grained control over request timeouts. After the constructor has
|
||||
been parsed, this is always a `urllib3.util.Timeout` object.
|
||||
|
||||
:param maxsize:
|
||||
Number of connections to save that can be reused. More than 1 is useful
|
||||
in multithreaded situations. If ``block`` is set to False, more
|
||||
connections will be created but they will not be saved once they've
|
||||
been used.
|
||||
|
||||
:param block:
|
||||
If set to True, no more than ``maxsize`` connections will be used at
|
||||
a time. When no free connections are available, the call will block
|
||||
until a connection has been released. This is a useful side effect for
|
||||
particular multithreaded situations where one does not want to use more
|
||||
than maxsize connections per host to prevent flooding.
|
||||
|
||||
:param headers:
|
||||
Headers to include with all requests, unless other headers are given
|
||||
explicitly.
|
||||
|
||||
:param retries:
|
||||
Retry configuration to use by default with requests in this pool.
|
||||
|
||||
:param _proxy:
|
||||
Parsed proxy URL, should not be used directly, instead, see
|
||||
:class:`urllib3.connectionpool.ProxyManager`"
|
||||
|
||||
:param _proxy_headers:
|
||||
A dictionary with proxy headers, should not be used directly,
|
||||
instead, see :class:`urllib3.connectionpool.ProxyManager`"
|
||||
|
||||
:param \**conn_kw:
|
||||
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
|
||||
:class:`urllib3.connection.HTTPSConnection` instances.
|
||||
"""
|
||||
|
||||
scheme = 'http'
|
||||
ConnectionCls = HTTPConnection
|
||||
|
||||
def __init__(self, host, port=None, strict=False,
|
||||
timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
|
||||
headers=None, retries=None,
|
||||
_proxy=None, _proxy_headers=None,
|
||||
**conn_kw):
|
||||
ConnectionPool.__init__(self, host, port)
|
||||
RequestMethods.__init__(self, headers)
|
||||
|
||||
self.strict = strict
|
||||
|
||||
if not isinstance(timeout, Timeout):
|
||||
timeout = Timeout.from_float(timeout)
|
||||
|
||||
if retries is None:
|
||||
retries = Retry.DEFAULT
|
||||
|
||||
self.timeout = timeout
|
||||
self.retries = retries
|
||||
|
||||
self.pool = self.QueueCls(maxsize)
|
||||
self.block = block
|
||||
|
||||
self.proxy = _proxy
|
||||
self.proxy_headers = _proxy_headers or {}
|
||||
|
||||
# Fill the queue up so that doing get() on it will block properly
|
||||
for _ in xrange(maxsize):
|
||||
self.pool.put(None)
|
||||
|
||||
# These are mostly for testing and debugging purposes.
|
||||
self.num_connections = 0
|
||||
self.num_requests = 0
|
||||
self.conn_kw = conn_kw
|
||||
|
||||
if self.proxy:
|
||||
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
|
||||
# We cannot know if the user has added default socket options, so we cannot replace the
|
||||
# list.
|
||||
self.conn_kw.setdefault('socket_options', [])
|
||||
|
||||
def _new_conn(self):
|
||||
"""
|
||||
Return a fresh :class:`HTTPConnection`.
|
||||
"""
|
||||
self.num_connections += 1
|
||||
log.info("Starting new HTTP connection (%d): %s" %
|
||||
(self.num_connections, self.host))
|
||||
|
||||
conn = self.ConnectionCls(host=self.host, port=self.port,
|
||||
timeout=self.timeout.connect_timeout,
|
||||
strict=self.strict, **self.conn_kw)
|
||||
return conn
|
||||
|
||||
def _get_conn(self, timeout=None):
|
||||
"""
|
||||
Get a connection. Will return a pooled connection if one is available.
|
||||
|
||||
If no connections are available and :prop:`.block` is ``False``, then a
|
||||
fresh connection is returned.
|
||||
|
||||
:param timeout:
|
||||
Seconds to wait before giving up and raising
|
||||
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
|
||||
:prop:`.block` is ``True``.
|
||||
"""
|
||||
conn = None
|
||||
try:
|
||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||
|
||||
except AttributeError: # self.pool is None
|
||||
raise ClosedPoolError(self, "Pool is closed.")
|
||||
|
||||
except Empty:
|
||||
if self.block:
|
||||
raise EmptyPoolError(self,
|
||||
"Pool reached maximum size and no more "
|
||||
"connections are allowed.")
|
||||
pass # Oh well, we'll create a new connection then
|
||||
|
||||
# If this is a persistent connection, check if it got disconnected
|
||||
if conn and is_connection_dropped(conn):
|
||||
log.info("Resetting dropped connection: %s" % self.host)
|
||||
conn.close()
|
||||
if getattr(conn, 'auto_open', 1) == 0:
|
||||
# This is a proxied connection that has been mutated by
|
||||
# httplib._tunnel() and cannot be reused (since it would
|
||||
# attempt to bypass the proxy)
|
||||
conn = None
|
||||
|
||||
return conn or self._new_conn()
|
||||
|
||||
def _put_conn(self, conn):
|
||||
"""
|
||||
Put a connection back into the pool.
|
||||
|
||||
:param conn:
|
||||
Connection object for the current host and port as returned by
|
||||
:meth:`._new_conn` or :meth:`._get_conn`.
|
||||
|
||||
If the pool is already full, the connection is closed and discarded
|
||||
because we exceeded maxsize. If connections are discarded frequently,
|
||||
then maxsize should be increased.
|
||||
|
||||
If the pool is closed, then the connection will be closed and discarded.
|
||||
"""
|
||||
try:
|
||||
self.pool.put(conn, block=False)
|
||||
return # Everything is dandy, done.
|
||||
except AttributeError:
|
||||
# self.pool is None.
|
||||
pass
|
||||
except Full:
|
||||
# This should never happen if self.block == True
|
||||
log.warning(
|
||||
"Connection pool is full, discarding connection: %s" %
|
||||
self.host)
|
||||
|
||||
# Connection never got put back into the pool, close it.
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def _validate_conn(self, conn):
|
||||
"""
|
||||
Called right before a request is made, after the socket is created.
|
||||
"""
|
||||
pass
|
||||
|
||||
def _prepare_proxy(self, conn):
|
||||
# Nothing to do for HTTP connections.
|
||||
pass
|
||||
|
||||
def _get_timeout(self, timeout):
|
||||
""" Helper that always returns a :class:`urllib3.util.Timeout` """
|
||||
if timeout is _Default:
|
||||
return self.timeout.clone()
|
||||
|
||||
if isinstance(timeout, Timeout):
|
||||
return timeout.clone()
|
||||
else:
|
||||
# User passed us an int/float. This is for backwards compatibility,
|
||||
# can be removed later
|
||||
return Timeout.from_float(timeout)
|
||||
|
||||
def _raise_timeout(self, err, url, timeout_value):
|
||||
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
|
||||
|
||||
if isinstance(err, SocketTimeout):
|
||||
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
|
||||
|
||||
# See the above comment about EAGAIN in Python 3. In Python 2 we have
|
||||
# to specifically catch it and throw the timeout error
|
||||
if hasattr(err, 'errno') and err.errno in _blocking_errnos:
|
||||
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
|
||||
|
||||
# Catch possible read timeouts thrown as SSL errors. If not the
|
||||
# case, rethrow the original. We need to do this because of:
|
||||
# http://bugs.python.org/issue10272
|
||||
if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
|
||||
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
|
||||
|
||||
def _make_request(self, conn, method, url, timeout=_Default,
|
||||
**httplib_request_kw):
|
||||
"""
|
||||
Perform a request on a given urllib connection object taken from our
|
||||
pool.
|
||||
|
||||
:param conn:
|
||||
a connection from one of our connection pools
|
||||
|
||||
:param timeout:
|
||||
Socket timeout in seconds for the request. This can be a
|
||||
float or integer, which will set the same timeout value for
|
||||
the socket connect and the socket read, or an instance of
|
||||
:class:`urllib3.util.Timeout`, which gives you more fine-grained
|
||||
control over your timeouts.
|
||||
"""
|
||||
self.num_requests += 1
|
||||
|
||||
timeout_obj = self._get_timeout(timeout)
|
||||
timeout_obj.start_connect()
|
||||
conn.timeout = timeout_obj.connect_timeout
|
||||
|
||||
# Trigger any extra validation we need to do.
|
||||
try:
|
||||
self._validate_conn(conn)
|
||||
except (SocketTimeout, BaseSSLError) as e:
|
||||
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
|
||||
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
|
||||
raise
|
||||
|
||||
# conn.request() calls httplib.*.request, not the method in
|
||||
# urllib3.request. It also calls makefile (recv) on the socket.
|
||||
conn.request(method, url, **httplib_request_kw)
|
||||
|
||||
# Reset the timeout for the recv() on the socket
|
||||
read_timeout = timeout_obj.read_timeout
|
||||
|
||||
# App Engine doesn't have a sock attr
|
||||
if getattr(conn, 'sock', None):
|
||||
# In Python 3 socket.py will catch EAGAIN and return None when you
|
||||
# try and read into the file pointer created by http.client, which
|
||||
# instead raises a BadStatusLine exception. Instead of catching
|
||||
# the exception and assuming all BadStatusLine exceptions are read
|
||||
# timeouts, check for a zero timeout before making the request.
|
||||
if read_timeout == 0:
|
||||
raise ReadTimeoutError(
|
||||
self, url, "Read timed out. (read timeout=%s)" % read_timeout)
|
||||
if read_timeout is Timeout.DEFAULT_TIMEOUT:
|
||||
conn.sock.settimeout(socket.getdefaulttimeout())
|
||||
else: # None or a value
|
||||
conn.sock.settimeout(read_timeout)
|
||||
|
||||
# Receive the response from the server
|
||||
try:
|
||||
try: # Python 2.7, use buffering of HTTP responses
|
||||
httplib_response = conn.getresponse(buffering=True)
|
||||
except TypeError: # Python 2.6 and older
|
||||
httplib_response = conn.getresponse()
|
||||
except (SocketTimeout, BaseSSLError, SocketError) as e:
|
||||
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
|
||||
raise
|
||||
|
||||
# AppEngine doesn't have a version attr.
|
||||
http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
|
||||
log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
|
||||
httplib_response.status,
|
||||
httplib_response.length))
|
||||
|
||||
try:
|
||||
assert_header_parsing(httplib_response.msg)
|
||||
except HeaderParsingError as hpe: # Platform-specific: Python 3
|
||||
log.warning(
|
||||
'Failed to parse headers (url=%s): %s',
|
||||
self._absolute_url(url), hpe, exc_info=True)
|
||||
|
||||
return httplib_response
|
||||
|
||||
def _absolute_url(self, path):
|
||||
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close all pooled connections and disable the pool.
|
||||
"""
|
||||
# Disable access to the pool
|
||||
old_pool, self.pool = self.pool, None
|
||||
|
||||
try:
|
||||
while True:
|
||||
conn = old_pool.get(block=False)
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
except Empty:
|
||||
pass # Done.
|
||||
|
||||
def is_same_host(self, url):
|
||||
"""
|
||||
Check if the given ``url`` is a member of the same host as this
|
||||
connection pool.
|
||||
"""
|
||||
if url.startswith('/'):
|
||||
return True
|
||||
|
||||
# TODO: Add optional support for socket.gethostbyname checking.
|
||||
scheme, host, port = get_host(url)
|
||||
|
||||
# Use explicit default port for comparison when none is given
|
||||
if self.port and not port:
|
||||
port = port_by_scheme.get(scheme)
|
||||
elif not self.port and port == port_by_scheme.get(scheme):
|
||||
port = None
|
||||
|
||||
return (scheme, host, port) == (self.scheme, self.host, self.port)
|
||||
|
||||
def urlopen(self, method, url, body=None, headers=None, retries=None,
|
||||
redirect=True, assert_same_host=True, timeout=_Default,
|
||||
pool_timeout=None, release_conn=None, **response_kw):
|
||||
"""
|
||||
Get a connection from the pool and perform an HTTP request. This is the
|
||||
lowest level call for making a request, so you'll need to specify all
|
||||
the raw details.
|
||||
|
||||
.. note::
|
||||
|
||||
More commonly, it's appropriate to use a convenience method provided
|
||||
by :class:`.RequestMethods`, such as :meth:`request`.
|
||||
|
||||
.. note::
|
||||
|
||||
`release_conn` will only behave as expected if
|
||||
`preload_content=False` because we want to make
|
||||
`preload_content=False` the default behaviour someday soon without
|
||||
breaking backwards compatibility.
|
||||
|
||||
:param method:
|
||||
HTTP request method (such as GET, POST, PUT, etc.)
|
||||
|
||||
:param body:
|
||||
Data to send in the request body (useful for creating
|
||||
POST requests, see HTTPConnectionPool.post_url for
|
||||
more convenience).
|
||||
|
||||
:param headers:
|
||||
Dictionary of custom headers to send, such as User-Agent,
|
||||
If-None-Match, etc. If None, pool headers are used. If provided,
|
||||
these headers completely replace any pool-specific headers.
|
||||
|
||||
:param retries:
|
||||
Configure the number of retries to allow before raising a
|
||||
:class:`~urllib3.exceptions.MaxRetryError` exception.
|
||||
|
||||
Pass ``None`` to retry until you receive a response. Pass a
|
||||
:class:`~urllib3.util.retry.Retry` object for fine-grained control
|
||||
over different types of retries.
|
||||
Pass an integer number to retry connection errors that many times,
|
||||
but no other types of errors. Pass zero to never retry.
|
||||
|
||||
If ``False``, then retries are disabled and any exception is raised
|
||||
immediately. Also, instead of raising a MaxRetryError on redirects,
|
||||
the redirect response will be returned.
|
||||
|
||||
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
|
||||
|
||||
:param redirect:
|
||||
If True, automatically handle redirects (status codes 301, 302,
|
||||
303, 307, 308). Each redirect counts as a retry. Disabling retries
|
||||
will disable redirect, too.
|
||||
|
||||
:param assert_same_host:
|
||||
If ``True``, will make sure that the host of the pool requests is
|
||||
consistent else will raise HostChangedError. When False, you can
|
||||
use the pool on an HTTP proxy and request foreign hosts.
|
||||
|
||||
:param timeout:
|
||||
If specified, overrides the default timeout for this one
|
||||
request. It may be a float (in seconds) or an instance of
|
||||
:class:`urllib3.util.Timeout`.
|
||||
|
||||
:param pool_timeout:
|
||||
If set and the pool is set to block=True, then this method will
|
||||
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
|
||||
connection is available within the time period.
|
||||
|
||||
:param release_conn:
|
||||
If False, then the urlopen call will not release the connection
|
||||
back into the pool once a response is received (but will release if
|
||||
you read the entire contents of the response such as when
|
||||
`preload_content=True`). This is useful if you're not preloading
|
||||
the response's content immediately. You will need to call
|
||||
``r.release_conn()`` on the response ``r`` to return the connection
|
||||
back into the pool. If None, it takes the value of
|
||||
``response_kw.get('preload_content', True)``.
|
||||
|
||||
:param \**response_kw:
|
||||
Additional parameters are passed to
|
||||
:meth:`urllib3.response.HTTPResponse.from_httplib`
|
||||
"""
|
||||
if headers is None:
|
||||
headers = self.headers
|
||||
|
||||
if not isinstance(retries, Retry):
|
||||
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
|
||||
|
||||
if release_conn is None:
|
||||
release_conn = response_kw.get('preload_content', True)
|
||||
|
||||
# Check host
|
||||
if assert_same_host and not self.is_same_host(url):
|
||||
raise HostChangedError(self, url, retries)
|
||||
|
||||
conn = None
|
||||
|
||||
# Merge the proxy headers. Only do this in HTTP. We have to copy the
|
||||
# headers dict so we can safely change it without those changes being
|
||||
# reflected in anyone else's copy.
|
||||
if self.scheme == 'http':
|
||||
headers = headers.copy()
|
||||
headers.update(self.proxy_headers)
|
||||
|
||||
# Must keep the exception bound to a separate variable or else Python 3
|
||||
# complains about UnboundLocalError.
|
||||
err = None
|
||||
|
||||
try:
|
||||
# Request a connection from the queue.
|
||||
timeout_obj = self._get_timeout(timeout)
|
||||
conn = self._get_conn(timeout=pool_timeout)
|
||||
|
||||
conn.timeout = timeout_obj.connect_timeout
|
||||
|
||||
is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
|
||||
if is_new_proxy_conn:
|
||||
self._prepare_proxy(conn)
|
||||
|
||||
# Make the request on the httplib connection object.
|
||||
httplib_response = self._make_request(conn, method, url,
|
||||
timeout=timeout_obj,
|
||||
body=body, headers=headers)
|
||||
|
||||
# If we're going to release the connection in ``finally:``, then
|
||||
# the request doesn't need to know about the connection. Otherwise
|
||||
# it will also try to release it and we'll have a double-release
|
||||
# mess.
|
||||
response_conn = not release_conn and conn
|
||||
|
||||
# Import httplib's response into our own wrapper object
|
||||
response = HTTPResponse.from_httplib(httplib_response,
|
||||
pool=self,
|
||||
connection=response_conn,
|
||||
**response_kw)
|
||||
|
||||
# else:
|
||||
# The connection will be put back into the pool when
|
||||
# ``response.release_conn()`` is called (implicitly by
|
||||
# ``response.read()``)
|
||||
|
||||
except Empty:
|
||||
# Timed out by queue.
|
||||
raise EmptyPoolError(self, "No pool connections are available.")
|
||||
|
||||
except (BaseSSLError, CertificateError) as e:
|
||||
# Close the connection. If a connection is reused on which there
|
||||
# was a Certificate error, the next request will certainly raise
|
||||
# another Certificate error.
|
||||
conn = conn and conn.close()
|
||||
release_conn = True
|
||||
raise SSLError(e)
|
||||
|
||||
except SSLError:
|
||||
# Treat SSLError separately from BaseSSLError to preserve
|
||||
# traceback.
|
||||
conn = conn and conn.close()
|
||||
release_conn = True
|
||||
raise
|
||||
|
||||
except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
|
||||
# Discard the connection for these exceptions. It will be
|
||||
# be replaced during the next _get_conn() call.
|
||||
conn = conn and conn.close()
|
||||
release_conn = True
|
||||
|
||||
if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
|
||||
e = ProxyError('Cannot connect to proxy.', e)
|
||||
elif isinstance(e, (SocketError, HTTPException)):
|
||||
e = ProtocolError('Connection aborted.', e)
|
||||
|
||||
retries = retries.increment(method, url, error=e, _pool=self,
|
||||
_stacktrace=sys.exc_info()[2])
|
||||
retries.sleep()
|
||||
|
||||
# Keep track of the error for the retry warning.
|
||||
err = e
|
||||
|
||||
finally:
|
||||
if release_conn:
|
||||
# Put the connection back to be reused. If the connection is
|
||||
# expired then it will be None, which will get replaced with a
|
||||
# fresh connection during _get_conn.
|
||||
self._put_conn(conn)
|
||||
|
||||
if not conn:
|
||||
# Try again
|
||||
log.warning("Retrying (%r) after connection "
|
||||
"broken by '%r': %s" % (retries, err, url))
|
||||
return self.urlopen(method, url, body, headers, retries,
|
||||
redirect, assert_same_host,
|
||||
timeout=timeout, pool_timeout=pool_timeout,
|
||||
release_conn=release_conn, **response_kw)
|
||||
|
||||
# Handle redirect?
|
||||
redirect_location = redirect and response.get_redirect_location()
|
||||
if redirect_location:
|
||||
if response.status == 303:
|
||||
method = 'GET'
|
||||
|
||||
try:
|
||||
retries = retries.increment(method, url, response=response, _pool=self)
|
||||
except MaxRetryError:
|
||||
if retries.raise_on_redirect:
|
||||
# Release the connection for this response, since we're not
|
||||
# returning it to be released manually.
|
||||
response.release_conn()
|
||||
raise
|
||||
return response
|
||||
|
||||
log.info("Redirecting %s -> %s" % (url, redirect_location))
|
||||
return self.urlopen(
|
||||
method, redirect_location, body, headers,
|
||||
retries=retries, redirect=redirect,
|
||||
assert_same_host=assert_same_host,
|
||||
timeout=timeout, pool_timeout=pool_timeout,
|
||||
release_conn=release_conn, **response_kw)
|
||||
|
||||
# Check if we should retry the HTTP response.
|
||||
if retries.is_forced_retry(method, status_code=response.status):
|
||||
retries = retries.increment(method, url, response=response, _pool=self)
|
||||
retries.sleep()
|
||||
log.info("Forced retry: %s" % url)
|
||||
return self.urlopen(
|
||||
method, url, body, headers,
|
||||
retries=retries, redirect=redirect,
|
||||
assert_same_host=assert_same_host,
|
||||
timeout=timeout, pool_timeout=pool_timeout,
|
||||
release_conn=release_conn, **response_kw)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class HTTPSConnectionPool(HTTPConnectionPool):
|
||||
"""
|
||||
Same as :class:`.HTTPConnectionPool`, but HTTPS.
|
||||
|
||||
When Python is compiled with the :mod:`ssl` module, then
|
||||
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
|
||||
instead of :class:`.HTTPSConnection`.
|
||||
|
||||
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
|
||||
``assert_hostname`` and ``host`` in this order to verify connections.
|
||||
If ``assert_hostname`` is False, no verification is done.
|
||||
|
||||
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
|
||||
``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
|
||||
available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
|
||||
the connection socket into an SSL socket.
|
||||
"""
|
||||
|
||||
scheme = 'https'
|
||||
ConnectionCls = HTTPSConnection
|
||||
|
||||
def __init__(self, host, port=None,
|
||||
strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
|
||||
block=False, headers=None, retries=None,
|
||||
_proxy=None, _proxy_headers=None,
|
||||
key_file=None, cert_file=None, cert_reqs=None,
|
||||
ca_certs=None, ssl_version=None,
|
||||
assert_hostname=None, assert_fingerprint=None,
|
||||
ca_cert_dir=None, **conn_kw):
|
||||
|
||||
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
|
||||
block, headers, retries, _proxy, _proxy_headers,
|
||||
**conn_kw)
|
||||
|
||||
if ca_certs and cert_reqs is None:
|
||||
cert_reqs = 'CERT_REQUIRED'
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.cert_reqs = cert_reqs
|
||||
self.ca_certs = ca_certs
|
||||
self.ca_cert_dir = ca_cert_dir
|
||||
self.ssl_version = ssl_version
|
||||
self.assert_hostname = assert_hostname
|
||||
self.assert_fingerprint = assert_fingerprint
|
||||
|
||||
def _prepare_conn(self, conn):
|
||||
"""
|
||||
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
|
||||
and establish the tunnel if proxy is used.
|
||||
"""
|
||||
|
||||
if isinstance(conn, VerifiedHTTPSConnection):
|
||||
conn.set_cert(key_file=self.key_file,
|
||||
cert_file=self.cert_file,
|
||||
cert_reqs=self.cert_reqs,
|
||||
ca_certs=self.ca_certs,
|
||||
ca_cert_dir=self.ca_cert_dir,
|
||||
assert_hostname=self.assert_hostname,
|
||||
assert_fingerprint=self.assert_fingerprint)
|
||||
conn.ssl_version = self.ssl_version
|
||||
|
||||
return conn
|
||||
|
||||
def _prepare_proxy(self, conn):
|
||||
"""
|
||||
Establish tunnel connection early, because otherwise httplib
|
||||
would improperly set Host: header to proxy's IP:port.
|
||||
"""
|
||||
# Python 2.7+
|
||||
try:
|
||||
set_tunnel = conn.set_tunnel
|
||||
except AttributeError: # Platform-specific: Python 2.6
|
||||
set_tunnel = conn._set_tunnel
|
||||
|
||||
if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
|
||||
set_tunnel(self.host, self.port)
|
||||
else:
|
||||
set_tunnel(self.host, self.port, self.proxy_headers)
|
||||
|
||||
conn.connect()
|
||||
|
||||
def _new_conn(self):
|
||||
"""
|
||||
Return a fresh :class:`httplib.HTTPSConnection`.
|
||||
"""
|
||||
self.num_connections += 1
|
||||
log.info("Starting new HTTPS connection (%d): %s"
|
||||
% (self.num_connections, self.host))
|
||||
|
||||
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
|
||||
raise SSLError("Can't connect to HTTPS URL because the SSL "
|
||||
"module is not available.")
|
||||
|
||||
actual_host = self.host
|
||||
actual_port = self.port
|
||||
if self.proxy is not None:
|
||||
actual_host = self.proxy.host
|
||||
actual_port = self.proxy.port
|
||||
|
||||
conn = self.ConnectionCls(host=actual_host, port=actual_port,
|
||||
timeout=self.timeout.connect_timeout,
|
||||
strict=self.strict, **self.conn_kw)
|
||||
|
||||
return self._prepare_conn(conn)
|
||||
|
||||
def _validate_conn(self, conn):
|
||||
"""
|
||||
Called right before a request is made, after the socket is created.
|
||||
"""
|
||||
super(HTTPSConnectionPool, self)._validate_conn(conn)
|
||||
|
||||
# Force connect early to allow us to validate the connection.
|
||||
if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
|
||||
conn.connect()
|
||||
|
||||
if not conn.is_verified:
|
||||
warnings.warn((
|
||||
'Unverified HTTPS request is being made. '
|
||||
'Adding certificate verification is strongly advised. See: '
|
||||
'https://urllib3.readthedocs.org/en/latest/security.html'),
|
||||
InsecureRequestWarning)
|
||||
|
||||
|
||||
def connection_from_url(url, **kw):
|
||||
"""
|
||||
Given a url, return an :class:`.ConnectionPool` instance of its host.
|
||||
|
||||
This is a shortcut for not having to parse out the scheme, host, and port
|
||||
of the url before creating an :class:`.ConnectionPool` instance.
|
||||
|
||||
:param url:
|
||||
Absolute URL string that must include the scheme. Port is optional.
|
||||
|
||||
:param \**kw:
|
||||
Passes additional parameters to the constructor of the appropriate
|
||||
:class:`.ConnectionPool`. Useful for specifying things like
|
||||
timeout, maxsize, headers, etc.
|
||||
|
||||
Example::
|
||||
|
||||
>>> conn = connection_from_url('http://google.com/')
|
||||
>>> r = conn.request('GET', '/')
|
||||
"""
|
||||
scheme, host, port = get_host(url)
|
||||
if scheme == 'https':
|
||||
return HTTPSConnectionPool(host, port=port, **kw)
|
||||
else:
|
||||
return HTTPConnectionPool(host, port=port, **kw)
|
0
libraries/requests/packages/urllib3/contrib/__init__.py
Normal file
0
libraries/requests/packages/urllib3/contrib/__init__.py
Normal file
223
libraries/requests/packages/urllib3/contrib/appengine.py
Normal file
223
libraries/requests/packages/urllib3/contrib/appengine.py
Normal file
|
@ -0,0 +1,223 @@
|
|||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from ..exceptions import (
|
||||
HTTPError,
|
||||
HTTPWarning,
|
||||
MaxRetryError,
|
||||
ProtocolError,
|
||||
TimeoutError,
|
||||
SSLError
|
||||
)
|
||||
|
||||
from ..packages.six import BytesIO
|
||||
from ..request import RequestMethods
|
||||
from ..response import HTTPResponse
|
||||
from ..util.timeout import Timeout
|
||||
from ..util.retry import Retry
|
||||
|
||||
try:
|
||||
from google.appengine.api import urlfetch
|
||||
except ImportError:
|
||||
urlfetch = None
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppEnginePlatformWarning(HTTPWarning):
|
||||
pass
|
||||
|
||||
|
||||
class AppEnginePlatformError(HTTPError):
|
||||
pass
|
||||
|
||||
|
||||
class AppEngineManager(RequestMethods):
|
||||
"""
|
||||
Connection manager for Google App Engine sandbox applications.
|
||||
|
||||
This manager uses the URLFetch service directly instead of using the
|
||||
emulated httplib, and is subject to URLFetch limitations as described in
|
||||
the App Engine documentation here:
|
||||
|
||||
https://cloud.google.com/appengine/docs/python/urlfetch
|
||||
|
||||
Notably it will raise an AppEnginePlatformError if:
|
||||
* URLFetch is not available.
|
||||
* If you attempt to use this on GAEv2 (Managed VMs), as full socket
|
||||
support is available.
|
||||
* If a request size is more than 10 megabytes.
|
||||
* If a response size is more than 32 megabtyes.
|
||||
* If you use an unsupported request method such as OPTIONS.
|
||||
|
||||
Beyond those cases, it will raise normal urllib3 errors.
|
||||
"""
|
||||
|
||||
def __init__(self, headers=None, retries=None, validate_certificate=True):
|
||||
if not urlfetch:
|
||||
raise AppEnginePlatformError(
|
||||
"URLFetch is not available in this environment.")
|
||||
|
||||
if is_prod_appengine_mvms():
|
||||
raise AppEnginePlatformError(
|
||||
"Use normal urllib3.PoolManager instead of AppEngineManager"
|
||||
"on Managed VMs, as using URLFetch is not necessary in "
|
||||
"this environment.")
|
||||
|
||||
warnings.warn(
|
||||
"urllib3 is using URLFetch on Google App Engine sandbox instead "
|
||||
"of sockets. To use sockets directly instead of URLFetch see "
|
||||
"https://urllib3.readthedocs.org/en/latest/contrib.html.",
|
||||
AppEnginePlatformWarning)
|
||||
|
||||
RequestMethods.__init__(self, headers)
|
||||
self.validate_certificate = validate_certificate
|
||||
|
||||
self.retries = retries or Retry.DEFAULT
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
# Return False to re-raise any potential exceptions
|
||||
return False
|
||||
|
||||
def urlopen(self, method, url, body=None, headers=None,
|
||||
retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
|
||||
**response_kw):
|
||||
|
||||
retries = self._get_retries(retries, redirect)
|
||||
|
||||
try:
|
||||
response = urlfetch.fetch(
|
||||
url,
|
||||
payload=body,
|
||||
method=method,
|
||||
headers=headers or {},
|
||||
allow_truncated=False,
|
||||
follow_redirects=(
|
||||
redirect and
|
||||
retries.redirect != 0 and
|
||||
retries.total),
|
||||
deadline=self._get_absolute_timeout(timeout),
|
||||
validate_certificate=self.validate_certificate,
|
||||
)
|
||||
except urlfetch.DeadlineExceededError as e:
|
||||
raise TimeoutError(self, e)
|
||||
|
||||
except urlfetch.InvalidURLError as e:
|
||||
if 'too large' in str(e):
|
||||
raise AppEnginePlatformError(
|
||||
"URLFetch request too large, URLFetch only "
|
||||
"supports requests up to 10mb in size.", e)
|
||||
raise ProtocolError(e)
|
||||
|
||||
except urlfetch.DownloadError as e:
|
||||
if 'Too many redirects' in str(e):
|
||||
raise MaxRetryError(self, url, reason=e)
|
||||
raise ProtocolError(e)
|
||||
|
||||
except urlfetch.ResponseTooLargeError as e:
|
||||
raise AppEnginePlatformError(
|
||||
"URLFetch response too large, URLFetch only supports"
|
||||
"responses up to 32mb in size.", e)
|
||||
|
||||
except urlfetch.SSLCertificateError as e:
|
||||
raise SSLError(e)
|
||||
|
||||
except urlfetch.InvalidMethodError as e:
|
||||
raise AppEnginePlatformError(
|
||||
"URLFetch does not support method: %s" % method, e)
|
||||
|
||||
http_response = self._urlfetch_response_to_http_response(
|
||||
response, **response_kw)
|
||||
|
||||
# Check for redirect response
|
||||
if (http_response.get_redirect_location() and
|
||||
retries.raise_on_redirect and redirect):
|
||||
raise MaxRetryError(self, url, "too many redirects")
|
||||
|
||||
# Check if we should retry the HTTP response.
|
||||
if retries.is_forced_retry(method, status_code=http_response.status):
|
||||
retries = retries.increment(
|
||||
method, url, response=http_response, _pool=self)
|
||||
log.info("Forced retry: %s" % url)
|
||||
retries.sleep()
|
||||
return self.urlopen(
|
||||
method, url,
|
||||
body=body, headers=headers,
|
||||
retries=retries, redirect=redirect,
|
||||
timeout=timeout, **response_kw)
|
||||
|
||||
return http_response
|
||||
|
||||
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
|
||||
|
||||
if is_prod_appengine():
|
||||
# Production GAE handles deflate encoding automatically, but does
|
||||
# not remove the encoding header.
|
||||
content_encoding = urlfetch_resp.headers.get('content-encoding')
|
||||
|
||||
if content_encoding == 'deflate':
|
||||
del urlfetch_resp.headers['content-encoding']
|
||||
|
||||
return HTTPResponse(
|
||||
# In order for decoding to work, we must present the content as
|
||||
# a file-like object.
|
||||
body=BytesIO(urlfetch_resp.content),
|
||||
headers=urlfetch_resp.headers,
|
||||
status=urlfetch_resp.status_code,
|
||||
**response_kw
|
||||
)
|
||||
|
||||
def _get_absolute_timeout(self, timeout):
|
||||
if timeout is Timeout.DEFAULT_TIMEOUT:
|
||||
return 5 # 5s is the default timeout for URLFetch.
|
||||
if isinstance(timeout, Timeout):
|
||||
if timeout.read is not timeout.connect:
|
||||
warnings.warn(
|
||||
"URLFetch does not support granular timeout settings, "
|
||||
"reverting to total timeout.", AppEnginePlatformWarning)
|
||||
return timeout.total
|
||||
return timeout
|
||||
|
||||
def _get_retries(self, retries, redirect):
|
||||
if not isinstance(retries, Retry):
|
||||
retries = Retry.from_int(
|
||||
retries, redirect=redirect, default=self.retries)
|
||||
|
||||
if retries.connect or retries.read or retries.redirect:
|
||||
warnings.warn(
|
||||
"URLFetch only supports total retries and does not "
|
||||
"recognize connect, read, or redirect retry parameters.",
|
||||
AppEnginePlatformWarning)
|
||||
|
||||
return retries
|
||||
|
||||
|
||||
def is_appengine():
|
||||
return (is_local_appengine() or
|
||||
is_prod_appengine() or
|
||||
is_prod_appengine_mvms())
|
||||
|
||||
|
||||
def is_appengine_sandbox():
|
||||
return is_appengine() and not is_prod_appengine_mvms()
|
||||
|
||||
|
||||
def is_local_appengine():
|
||||
return ('APPENGINE_RUNTIME' in os.environ and
|
||||
'Development/' in os.environ['SERVER_SOFTWARE'])
|
||||
|
||||
|
||||
def is_prod_appengine():
|
||||
return ('APPENGINE_RUNTIME' in os.environ and
|
||||
'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
|
||||
not is_prod_appengine_mvms())
|
||||
|
||||
|
||||
def is_prod_appengine_mvms():
|
||||
return os.environ.get('GAE_VM', False) == 'true'
|
115
libraries/requests/packages/urllib3/contrib/ntlmpool.py
Normal file
115
libraries/requests/packages/urllib3/contrib/ntlmpool.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
"""
|
||||
NTLM authenticating pool, contributed by erikcederstran
|
||||
|
||||
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
from http.client import HTTPSConnection
|
||||
except ImportError:
|
||||
from httplib import HTTPSConnection
|
||||
from logging import getLogger
|
||||
from ntlm import ntlm
|
||||
|
||||
from urllib3 import HTTPSConnectionPool
|
||||
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
||||
|
||||
class NTLMConnectionPool(HTTPSConnectionPool):
|
||||
"""
|
||||
Implements an NTLM authentication version of an urllib3 connection pool
|
||||
"""
|
||||
|
||||
scheme = 'https'
|
||||
|
||||
def __init__(self, user, pw, authurl, *args, **kwargs):
|
||||
"""
|
||||
authurl is a random URL on the server that is protected by NTLM.
|
||||
user is the Windows user, probably in the DOMAIN\\username format.
|
||||
pw is the password for the user.
|
||||
"""
|
||||
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
|
||||
self.authurl = authurl
|
||||
self.rawuser = user
|
||||
user_parts = user.split('\\', 1)
|
||||
self.domain = user_parts[0].upper()
|
||||
self.user = user_parts[1]
|
||||
self.pw = pw
|
||||
|
||||
def _new_conn(self):
|
||||
# Performs the NTLM handshake that secures the connection. The socket
|
||||
# must be kept open while requests are performed.
|
||||
self.num_connections += 1
|
||||
log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' %
|
||||
(self.num_connections, self.host, self.authurl))
|
||||
|
||||
headers = {}
|
||||
headers['Connection'] = 'Keep-Alive'
|
||||
req_header = 'Authorization'
|
||||
resp_header = 'www-authenticate'
|
||||
|
||||
conn = HTTPSConnection(host=self.host, port=self.port)
|
||||
|
||||
# Send negotiation message
|
||||
headers[req_header] = (
|
||||
'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
|
||||
log.debug('Request headers: %s' % headers)
|
||||
conn.request('GET', self.authurl, None, headers)
|
||||
res = conn.getresponse()
|
||||
reshdr = dict(res.getheaders())
|
||||
log.debug('Response status: %s %s' % (res.status, res.reason))
|
||||
log.debug('Response headers: %s' % reshdr)
|
||||
log.debug('Response data: %s [...]' % res.read(100))
|
||||
|
||||
# Remove the reference to the socket, so that it can not be closed by
|
||||
# the response object (we want to keep the socket open)
|
||||
res.fp = None
|
||||
|
||||
# Server should respond with a challenge message
|
||||
auth_header_values = reshdr[resp_header].split(', ')
|
||||
auth_header_value = None
|
||||
for s in auth_header_values:
|
||||
if s[:5] == 'NTLM ':
|
||||
auth_header_value = s[5:]
|
||||
if auth_header_value is None:
|
||||
raise Exception('Unexpected %s response header: %s' %
|
||||
(resp_header, reshdr[resp_header]))
|
||||
|
||||
# Send authentication message
|
||||
ServerChallenge, NegotiateFlags = \
|
||||
ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
|
||||
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
|
||||
self.user,
|
||||
self.domain,
|
||||
self.pw,
|
||||
NegotiateFlags)
|
||||
headers[req_header] = 'NTLM %s' % auth_msg
|
||||
log.debug('Request headers: %s' % headers)
|
||||
conn.request('GET', self.authurl, None, headers)
|
||||
res = conn.getresponse()
|
||||
log.debug('Response status: %s %s' % (res.status, res.reason))
|
||||
log.debug('Response headers: %s' % dict(res.getheaders()))
|
||||
log.debug('Response data: %s [...]' % res.read()[:100])
|
||||
if res.status != 200:
|
||||
if res.status == 401:
|
||||
raise Exception('Server rejected request: wrong '
|
||||
'username or password')
|
||||
raise Exception('Wrong server response: %s %s' %
|
||||
(res.status, res.reason))
|
||||
|
||||
res.fp = None
|
||||
log.debug('Connection established')
|
||||
return conn
|
||||
|
||||
def urlopen(self, method, url, body=None, headers=None, retries=3,
|
||||
redirect=True, assert_same_host=True):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
headers['Connection'] = 'Keep-Alive'
|
||||
return super(NTLMConnectionPool, self).urlopen(method, url, body,
|
||||
headers, retries,
|
||||
redirect,
|
||||
assert_same_host)
|
310
libraries/requests/packages/urllib3/contrib/pyopenssl.py
Normal file
310
libraries/requests/packages/urllib3/contrib/pyopenssl.py
Normal file
|
@ -0,0 +1,310 @@
|
|||
'''SSL with SNI_-support for Python 2. Follow these instructions if you would
|
||||
like to verify SSL certificates in Python 2. Note, the default libraries do
|
||||
*not* do certificate checking; you need to do additional work to validate
|
||||
certificates yourself.
|
||||
|
||||
This needs the following packages installed:
|
||||
|
||||
* pyOpenSSL (tested with 0.13)
|
||||
* ndg-httpsclient (tested with 0.3.2)
|
||||
* pyasn1 (tested with 0.1.6)
|
||||
|
||||
You can install them with the following command:
|
||||
|
||||
pip install pyopenssl ndg-httpsclient pyasn1
|
||||
|
||||
To activate certificate checking, call
|
||||
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
|
||||
before you begin making HTTP requests. This can be done in a ``sitecustomize``
|
||||
module, or at any other time before your application begins using ``urllib3``,
|
||||
like this::
|
||||
|
||||
try:
|
||||
import urllib3.contrib.pyopenssl
|
||||
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
Now you can use :mod:`urllib3` as you normally would, and it will support SNI
|
||||
when the required modules are installed.
|
||||
|
||||
Activating this module also has the positive side effect of disabling SSL/TLS
|
||||
compression in Python 2 (see `CRIME attack`_).
|
||||
|
||||
If you want to configure the default list of supported cipher suites, you can
|
||||
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
|
||||
|
||||
Module Variables
|
||||
----------------
|
||||
|
||||
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
|
||||
|
||||
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
|
||||
from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
|
||||
except SyntaxError as e:
|
||||
raise ImportError(e)
|
||||
|
||||
import OpenSSL.SSL
|
||||
from pyasn1.codec.der import decoder as der_decoder
|
||||
from pyasn1.type import univ, constraint
|
||||
from socket import _fileobject, timeout, error as SocketError
|
||||
import ssl
|
||||
import select
|
||||
|
||||
from .. import connection
|
||||
from .. import util
|
||||
|
||||
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
|
||||
|
||||
# SNI only *really* works if we can read the subjectAltName of certificates.
|
||||
HAS_SNI = SUBJ_ALT_NAME_SUPPORT
|
||||
|
||||
# Map from urllib3 to PyOpenSSL compatible parameter-values.
|
||||
_openssl_versions = {
|
||||
ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
|
||||
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
|
||||
}
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
|
||||
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
|
||||
|
||||
if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
|
||||
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
|
||||
|
||||
try:
|
||||
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
_openssl_verify = {
|
||||
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
|
||||
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
|
||||
ssl.CERT_REQUIRED:
|
||||
OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
||||
}
|
||||
|
||||
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
|
||||
|
||||
# OpenSSL will only write 16K at a time
|
||||
SSL_WRITE_BLOCKSIZE = 16384
|
||||
|
||||
orig_util_HAS_SNI = util.HAS_SNI
|
||||
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
|
||||
|
||||
|
||||
def inject_into_urllib3():
|
||||
'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
|
||||
|
||||
connection.ssl_wrap_socket = ssl_wrap_socket
|
||||
util.HAS_SNI = HAS_SNI
|
||||
|
||||
|
||||
def extract_from_urllib3():
|
||||
'Undo monkey-patching by :func:`inject_into_urllib3`.'
|
||||
|
||||
connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
|
||||
util.HAS_SNI = orig_util_HAS_SNI
|
||||
|
||||
|
||||
# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
|
||||
class SubjectAltName(BaseSubjectAltName):
|
||||
'''ASN.1 implementation for subjectAltNames support'''
|
||||
|
||||
# There is no limit to how many SAN certificates a certificate may have,
|
||||
# however this needs to have some limit so we'll set an arbitrarily high
|
||||
# limit.
|
||||
sizeSpec = univ.SequenceOf.sizeSpec + \
|
||||
constraint.ValueSizeConstraint(1, 1024)
|
||||
|
||||
|
||||
# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
|
||||
def get_subj_alt_name(peer_cert):
|
||||
# Search through extensions
|
||||
dns_name = []
|
||||
if not SUBJ_ALT_NAME_SUPPORT:
|
||||
return dns_name
|
||||
|
||||
general_names = SubjectAltName()
|
||||
for i in range(peer_cert.get_extension_count()):
|
||||
ext = peer_cert.get_extension(i)
|
||||
ext_name = ext.get_short_name()
|
||||
if ext_name != 'subjectAltName':
|
||||
continue
|
||||
|
||||
# PyOpenSSL returns extension data in ASN.1 encoded form
|
||||
ext_dat = ext.get_data()
|
||||
decoded_dat = der_decoder.decode(ext_dat,
|
||||
asn1Spec=general_names)
|
||||
|
||||
for name in decoded_dat:
|
||||
if not isinstance(name, SubjectAltName):
|
||||
continue
|
||||
for entry in range(len(name)):
|
||||
component = name.getComponentByPosition(entry)
|
||||
if component.getName() != 'dNSName':
|
||||
continue
|
||||
dns_name.append(str(component.getComponent()))
|
||||
|
||||
return dns_name
|
||||
|
||||
|
||||
class WrappedSocket(object):
|
||||
'''API-compatibility wrapper for Python OpenSSL's Connection-class.
|
||||
|
||||
Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
|
||||
collector of pypy.
|
||||
'''
|
||||
|
||||
def __init__(self, connection, socket, suppress_ragged_eofs=True):
|
||||
self.connection = connection
|
||||
self.socket = socket
|
||||
self.suppress_ragged_eofs = suppress_ragged_eofs
|
||||
self._makefile_refs = 0
|
||||
|
||||
def fileno(self):
|
||||
return self.socket.fileno()
|
||||
|
||||
def makefile(self, mode, bufsize=-1):
|
||||
self._makefile_refs += 1
|
||||
return _fileobject(self, mode, bufsize, close=True)
|
||||
|
||||
def recv(self, *args, **kwargs):
|
||||
try:
|
||||
data = self.connection.recv(*args, **kwargs)
|
||||
except OpenSSL.SSL.SysCallError as e:
|
||||
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
|
||||
return b''
|
||||
else:
|
||||
raise SocketError(e)
|
||||
except OpenSSL.SSL.ZeroReturnError as e:
|
||||
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
||||
return b''
|
||||
else:
|
||||
raise
|
||||
except OpenSSL.SSL.WantReadError:
|
||||
rd, wd, ed = select.select(
|
||||
[self.socket], [], [], self.socket.gettimeout())
|
||||
if not rd:
|
||||
raise timeout('The read operation timed out')
|
||||
else:
|
||||
return self.recv(*args, **kwargs)
|
||||
else:
|
||||
return data
|
||||
|
||||
def settimeout(self, timeout):
|
||||
return self.socket.settimeout(timeout)
|
||||
|
||||
def _send_until_done(self, data):
|
||||
while True:
|
||||
try:
|
||||
return self.connection.send(data)
|
||||
except OpenSSL.SSL.WantWriteError:
|
||||
_, wlist, _ = select.select([], [self.socket], [],
|
||||
self.socket.gettimeout())
|
||||
if not wlist:
|
||||
raise timeout()
|
||||
continue
|
||||
|
||||
def sendall(self, data):
|
||||
total_sent = 0
|
||||
while total_sent < len(data):
|
||||
sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
|
||||
total_sent += sent
|
||||
|
||||
def shutdown(self):
|
||||
# FIXME rethrow compatible exceptions should we ever use this
|
||||
self.connection.shutdown()
|
||||
|
||||
def close(self):
|
||||
if self._makefile_refs < 1:
|
||||
try:
|
||||
return self.connection.close()
|
||||
except OpenSSL.SSL.Error:
|
||||
return
|
||||
else:
|
||||
self._makefile_refs -= 1
|
||||
|
||||
def getpeercert(self, binary_form=False):
|
||||
x509 = self.connection.get_peer_certificate()
|
||||
|
||||
if not x509:
|
||||
return x509
|
||||
|
||||
if binary_form:
|
||||
return OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_ASN1,
|
||||
x509)
|
||||
|
||||
return {
|
||||
'subject': (
|
||||
(('commonName', x509.get_subject().CN),),
|
||||
),
|
||||
'subjectAltName': [
|
||||
('DNS', value)
|
||||
for value in get_subj_alt_name(x509)
|
||||
]
|
||||
}
|
||||
|
||||
def _reuse(self):
|
||||
self._makefile_refs += 1
|
||||
|
||||
def _drop(self):
|
||||
if self._makefile_refs < 1:
|
||||
self.close()
|
||||
else:
|
||||
self._makefile_refs -= 1
|
||||
|
||||
|
||||
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
|
||||
return err_no == 0
|
||||
|
||||
|
||||
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||
ca_certs=None, server_hostname=None,
|
||||
ssl_version=None, ca_cert_dir=None):
|
||||
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
|
||||
if certfile:
|
||||
keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
|
||||
ctx.use_certificate_file(certfile)
|
||||
if keyfile:
|
||||
ctx.use_privatekey_file(keyfile)
|
||||
if cert_reqs != ssl.CERT_NONE:
|
||||
ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
|
||||
if ca_certs or ca_cert_dir:
|
||||
try:
|
||||
ctx.load_verify_locations(ca_certs, ca_cert_dir)
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
|
||||
else:
|
||||
ctx.set_default_verify_paths()
|
||||
|
||||
# Disable TLS compression to migitate CRIME attack (issue #309)
|
||||
OP_NO_COMPRESSION = 0x20000
|
||||
ctx.set_options(OP_NO_COMPRESSION)
|
||||
|
||||
# Set list of supported ciphersuites.
|
||||
ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
|
||||
|
||||
cnx = OpenSSL.SSL.Connection(ctx, sock)
|
||||
cnx.set_tlsext_host_name(server_hostname)
|
||||
cnx.set_connect_state()
|
||||
while True:
|
||||
try:
|
||||
cnx.do_handshake()
|
||||
except OpenSSL.SSL.WantReadError:
|
||||
rd, _, _ = select.select([sock], [], [], sock.gettimeout())
|
||||
if not rd:
|
||||
raise timeout('select timed out')
|
||||
continue
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError('bad handshake: %r' % e)
|
||||
break
|
||||
|
||||
return WrappedSocket(cnx, sock)
|
201
libraries/requests/packages/urllib3/exceptions.py
Normal file
201
libraries/requests/packages/urllib3/exceptions.py
Normal file
|
@ -0,0 +1,201 @@
|
|||
from __future__ import absolute_import
|
||||
# Base Exceptions
|
||||
|
||||
|
||||
class HTTPError(Exception):
|
||||
"Base exception used by this module."
|
||||
pass
|
||||
|
||||
|
||||
class HTTPWarning(Warning):
|
||||
"Base warning used by this module."
|
||||
pass
|
||||
|
||||
|
||||
class PoolError(HTTPError):
|
||||
"Base exception for errors caused within a pool."
|
||||
def __init__(self, pool, message):
|
||||
self.pool = pool
|
||||
HTTPError.__init__(self, "%s: %s" % (pool, message))
|
||||
|
||||
def __reduce__(self):
|
||||
# For pickling purposes.
|
||||
return self.__class__, (None, None)
|
||||
|
||||
|
||||
class RequestError(PoolError):
|
||||
"Base exception for PoolErrors that have associated URLs."
|
||||
def __init__(self, pool, url, message):
|
||||
self.url = url
|
||||
PoolError.__init__(self, pool, message)
|
||||
|
||||
def __reduce__(self):
|
||||
# For pickling purposes.
|
||||
return self.__class__, (None, self.url, None)
|
||||
|
||||
|
||||
class SSLError(HTTPError):
|
||||
"Raised when SSL certificate fails in an HTTPS connection."
|
||||
pass
|
||||
|
||||
|
||||
class ProxyError(HTTPError):
|
||||
"Raised when the connection to a proxy fails."
|
||||
pass
|
||||
|
||||
|
||||
class DecodeError(HTTPError):
|
||||
"Raised when automatic decoding based on Content-Type fails."
|
||||
pass
|
||||
|
||||
|
||||
class ProtocolError(HTTPError):
|
||||
"Raised when something unexpected happens mid-request/response."
|
||||
pass
|
||||
|
||||
|
||||
#: Renamed to ProtocolError but aliased for backwards compatibility.
|
||||
ConnectionError = ProtocolError
|
||||
|
||||
|
||||
# Leaf Exceptions
|
||||
|
||||
class MaxRetryError(RequestError):
|
||||
"""Raised when the maximum number of retries is exceeded.
|
||||
|
||||
:param pool: The connection pool
|
||||
:type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
|
||||
:param string url: The requested Url
|
||||
:param exceptions.Exception reason: The underlying error
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, pool, url, reason=None):
|
||||
self.reason = reason
|
||||
|
||||
message = "Max retries exceeded with url: %s (Caused by %r)" % (
|
||||
url, reason)
|
||||
|
||||
RequestError.__init__(self, pool, url, message)
|
||||
|
||||
|
||||
class HostChangedError(RequestError):
|
||||
"Raised when an existing pool gets a request for a foreign host."
|
||||
|
||||
def __init__(self, pool, url, retries=3):
|
||||
message = "Tried to open a foreign host with url: %s" % url
|
||||
RequestError.__init__(self, pool, url, message)
|
||||
self.retries = retries
|
||||
|
||||
|
||||
class TimeoutStateError(HTTPError):
|
||||
""" Raised when passing an invalid state to a timeout """
|
||||
pass
|
||||
|
||||
|
||||
class TimeoutError(HTTPError):
|
||||
""" Raised when a socket timeout error occurs.
|
||||
|
||||
Catching this error will catch both :exc:`ReadTimeoutErrors
|
||||
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ReadTimeoutError(TimeoutError, RequestError):
|
||||
"Raised when a socket timeout occurs while receiving data from a server"
|
||||
pass
|
||||
|
||||
|
||||
# This timeout error does not have a URL attached and needs to inherit from the
|
||||
# base HTTPError
|
||||
class ConnectTimeoutError(TimeoutError):
|
||||
"Raised when a socket timeout occurs while connecting to a server"
|
||||
pass
|
||||
|
||||
|
||||
class NewConnectionError(ConnectTimeoutError, PoolError):
|
||||
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
|
||||
pass
|
||||
|
||||
|
||||
class EmptyPoolError(PoolError):
|
||||
"Raised when a pool runs out of connections and no more are allowed."
|
||||
pass
|
||||
|
||||
|
||||
class ClosedPoolError(PoolError):
|
||||
"Raised when a request enters a pool after the pool has been closed."
|
||||
pass
|
||||
|
||||
|
||||
class LocationValueError(ValueError, HTTPError):
|
||||
"Raised when there is something wrong with a given URL input."
|
||||
pass
|
||||
|
||||
|
||||
class LocationParseError(LocationValueError):
|
||||
"Raised when get_host or similar fails to parse the URL input."
|
||||
|
||||
def __init__(self, location):
|
||||
message = "Failed to parse: %s" % location
|
||||
HTTPError.__init__(self, message)
|
||||
|
||||
self.location = location
|
||||
|
||||
|
||||
class ResponseError(HTTPError):
|
||||
"Used as a container for an error reason supplied in a MaxRetryError."
|
||||
GENERIC_ERROR = 'too many error responses'
|
||||
SPECIFIC_ERROR = 'too many {status_code} error responses'
|
||||
|
||||
|
||||
class SecurityWarning(HTTPWarning):
|
||||
"Warned when perfoming security reducing actions"
|
||||
pass
|
||||
|
||||
|
||||
class SubjectAltNameWarning(SecurityWarning):
|
||||
"Warned when connecting to a host with a certificate missing a SAN."
|
||||
pass
|
||||
|
||||
|
||||
class InsecureRequestWarning(SecurityWarning):
|
||||
"Warned when making an unverified HTTPS request."
|
||||
pass
|
||||
|
||||
|
||||
class SystemTimeWarning(SecurityWarning):
|
||||
"Warned when system time is suspected to be wrong"
|
||||
pass
|
||||
|
||||
|
||||
class InsecurePlatformWarning(SecurityWarning):
|
||||
"Warned when certain SSL configuration is not available on a platform."
|
||||
pass
|
||||
|
||||
|
||||
class SNIMissingWarning(HTTPWarning):
|
||||
"Warned when making a HTTPS request without SNI available."
|
||||
pass
|
||||
|
||||
|
||||
class ResponseNotChunked(ProtocolError, ValueError):
|
||||
"Response needs to be chunked in order to read it as chunks."
|
||||
pass
|
||||
|
||||
|
||||
class ProxySchemeUnknown(AssertionError, ValueError):
|
||||
"ProxyManager does not support the supplied scheme"
|
||||
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
|
||||
|
||||
def __init__(self, scheme):
|
||||
message = "Not supported proxy scheme %s" % scheme
|
||||
super(ProxySchemeUnknown, self).__init__(message)
|
||||
|
||||
|
||||
class HeaderParsingError(HTTPError):
|
||||
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
|
||||
def __init__(self, defects, unparsed_data):
|
||||
message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
|
||||
super(HeaderParsingError, self).__init__(message)
|
178
libraries/requests/packages/urllib3/fields.py
Normal file
178
libraries/requests/packages/urllib3/fields.py
Normal file
|
@ -0,0 +1,178 @@
|
|||
from __future__ import absolute_import
|
||||
import email.utils
|
||||
import mimetypes
|
||||
|
||||
from .packages import six
|
||||
|
||||
|
||||
def guess_content_type(filename, default='application/octet-stream'):
|
||||
"""
|
||||
Guess the "Content-Type" of a file.
|
||||
|
||||
:param filename:
|
||||
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
|
||||
:param default:
|
||||
If no "Content-Type" can be guessed, default to `default`.
|
||||
"""
|
||||
if filename:
|
||||
return mimetypes.guess_type(filename)[0] or default
|
||||
return default
|
||||
|
||||
|
||||
def format_header_param(name, value):
|
||||
"""
|
||||
Helper function to format and quote a single header parameter.
|
||||
|
||||
Particularly useful for header parameters which might contain
|
||||
non-ASCII values, like file names. This follows RFC 2231, as
|
||||
suggested by RFC 2388 Section 4.4.
|
||||
|
||||
:param name:
|
||||
The name of the parameter, a string expected to be ASCII only.
|
||||
:param value:
|
||||
The value of the parameter, provided as a unicode string.
|
||||
"""
|
||||
if not any(ch in value for ch in '"\\\r\n'):
|
||||
result = '%s="%s"' % (name, value)
|
||||
try:
|
||||
result.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
else:
|
||||
return result
|
||||
if not six.PY3: # Python 2:
|
||||
value = value.encode('utf-8')
|
||||
value = email.utils.encode_rfc2231(value, 'utf-8')
|
||||
value = '%s*=%s' % (name, value)
|
||||
return value
|
||||
|
||||
|
||||
class RequestField(object):
|
||||
"""
|
||||
A data container for request body parameters.
|
||||
|
||||
:param name:
|
||||
The name of this request field.
|
||||
:param data:
|
||||
The data/value body.
|
||||
:param filename:
|
||||
An optional filename of the request field.
|
||||
:param headers:
|
||||
An optional dict-like object of headers to initially use for the field.
|
||||
"""
|
||||
def __init__(self, name, data, filename=None, headers=None):
|
||||
self._name = name
|
||||
self._filename = filename
|
||||
self.data = data
|
||||
self.headers = {}
|
||||
if headers:
|
||||
self.headers = dict(headers)
|
||||
|
||||
@classmethod
|
||||
def from_tuples(cls, fieldname, value):
|
||||
"""
|
||||
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
|
||||
|
||||
Supports constructing :class:`~urllib3.fields.RequestField` from
|
||||
parameter of key/value strings AND key/filetuple. A filetuple is a
|
||||
(filename, data, MIME type) tuple where the MIME type is optional.
|
||||
For example::
|
||||
|
||||
'foo': 'bar',
|
||||
'fakefile': ('foofile.txt', 'contents of foofile'),
|
||||
'realfile': ('barfile.txt', open('realfile').read()),
|
||||
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
|
||||
'nonamefile': 'contents of nonamefile field',
|
||||
|
||||
Field names and filenames must be unicode.
|
||||
"""
|
||||
if isinstance(value, tuple):
|
||||
if len(value) == 3:
|
||||
filename, data, content_type = value
|
||||
else:
|
||||
filename, data = value
|
||||
content_type = guess_content_type(filename)
|
||||
else:
|
||||
filename = None
|
||||
content_type = None
|
||||
data = value
|
||||
|
||||
request_param = cls(fieldname, data, filename=filename)
|
||||
request_param.make_multipart(content_type=content_type)
|
||||
|
||||
return request_param
|
||||
|
||||
def _render_part(self, name, value):
|
||||
"""
|
||||
Overridable helper function to format a single header parameter.
|
||||
|
||||
:param name:
|
||||
The name of the parameter, a string expected to be ASCII only.
|
||||
:param value:
|
||||
The value of the parameter, provided as a unicode string.
|
||||
"""
|
||||
return format_header_param(name, value)
|
||||
|
||||
def _render_parts(self, header_parts):
|
||||
"""
|
||||
Helper function to format and quote a single header.
|
||||
|
||||
Useful for single headers that are composed of multiple items. E.g.,
|
||||
'Content-Disposition' fields.
|
||||
|
||||
:param header_parts:
|
||||
A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
|
||||
as `k1="v1"; k2="v2"; ...`.
|
||||
"""
|
||||
parts = []
|
||||
iterable = header_parts
|
||||
if isinstance(header_parts, dict):
|
||||
iterable = header_parts.items()
|
||||
|
||||
for name, value in iterable:
|
||||
if value:
|
||||
parts.append(self._render_part(name, value))
|
||||
|
||||
return '; '.join(parts)
|
||||
|
||||
def render_headers(self):
|
||||
"""
|
||||
Renders the headers for this request field.
|
||||
"""
|
||||
lines = []
|
||||
|
||||
sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
|
||||
for sort_key in sort_keys:
|
||||
if self.headers.get(sort_key, False):
|
||||
lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
|
||||
|
||||
for header_name, header_value in self.headers.items():
|
||||
if header_name not in sort_keys:
|
||||
if header_value:
|
||||
lines.append('%s: %s' % (header_name, header_value))
|
||||
|
||||
lines.append('\r\n')
|
||||
return '\r\n'.join(lines)
|
||||
|
||||
def make_multipart(self, content_disposition=None, content_type=None,
|
||||
content_location=None):
|
||||
"""
|
||||
Makes this request field into a multipart request field.
|
||||
|
||||
This method overrides "Content-Disposition", "Content-Type" and
|
||||
"Content-Location" headers to the request parameter.
|
||||
|
||||
:param content_type:
|
||||
The 'Content-Type' of the request body.
|
||||
:param content_location:
|
||||
The 'Content-Location' of the request body.
|
||||
|
||||
"""
|
||||
self.headers['Content-Disposition'] = content_disposition or 'form-data'
|
||||
self.headers['Content-Disposition'] += '; '.join([
|
||||
'', self._render_parts(
|
||||
(('name', self._name), ('filename', self._filename))
|
||||
)
|
||||
])
|
||||
self.headers['Content-Type'] = content_type
|
||||
self.headers['Content-Location'] = content_location
|
94
libraries/requests/packages/urllib3/filepost.py
Normal file
94
libraries/requests/packages/urllib3/filepost.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
from __future__ import absolute_import
|
||||
import codecs
|
||||
|
||||
from uuid import uuid4
|
||||
from io import BytesIO
|
||||
|
||||
from .packages import six
|
||||
from .packages.six import b
|
||||
from .fields import RequestField
|
||||
|
||||
writer = codecs.lookup('utf-8')[3]
|
||||
|
||||
|
||||
def choose_boundary():
|
||||
"""
|
||||
Our embarassingly-simple replacement for mimetools.choose_boundary.
|
||||
"""
|
||||
return uuid4().hex
|
||||
|
||||
|
||||
def iter_field_objects(fields):
|
||||
"""
|
||||
Iterate over fields.
|
||||
|
||||
Supports list of (k, v) tuples and dicts, and lists of
|
||||
:class:`~urllib3.fields.RequestField`.
|
||||
|
||||
"""
|
||||
if isinstance(fields, dict):
|
||||
i = six.iteritems(fields)
|
||||
else:
|
||||
i = iter(fields)
|
||||
|
||||
for field in i:
|
||||
if isinstance(field, RequestField):
|
||||
yield field
|
||||
else:
|
||||
yield RequestField.from_tuples(*field)
|
||||
|
||||
|
||||
def iter_fields(fields):
|
||||
"""
|
||||
.. deprecated:: 1.6
|
||||
|
||||
Iterate over fields.
|
||||
|
||||
The addition of :class:`~urllib3.fields.RequestField` makes this function
|
||||
obsolete. Instead, use :func:`iter_field_objects`, which returns
|
||||
:class:`~urllib3.fields.RequestField` objects.
|
||||
|
||||
Supports list of (k, v) tuples and dicts.
|
||||
"""
|
||||
if isinstance(fields, dict):
|
||||
return ((k, v) for k, v in six.iteritems(fields))
|
||||
|
||||
return ((k, v) for k, v in fields)
|
||||
|
||||
|
||||
def encode_multipart_formdata(fields, boundary=None):
|
||||
"""
|
||||
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
|
||||
|
||||
:param fields:
|
||||
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
|
||||
|
||||
:param boundary:
|
||||
If not specified, then a random boundary will be generated using
|
||||
:func:`mimetools.choose_boundary`.
|
||||
"""
|
||||
body = BytesIO()
|
||||
if boundary is None:
|
||||
boundary = choose_boundary()
|
||||
|
||||
for field in iter_field_objects(fields):
|
||||
body.write(b('--%s\r\n' % (boundary)))
|
||||
|
||||
writer(body).write(field.render_headers())
|
||||
data = field.data
|
||||
|
||||
if isinstance(data, int):
|
||||
data = str(data) # Backwards compatibility
|
||||
|
||||
if isinstance(data, six.text_type):
|
||||
writer(body).write(data)
|
||||
else:
|
||||
body.write(data)
|
||||
|
||||
body.write(b'\r\n')
|
||||
|
||||
body.write(b('--%s--\r\n' % (boundary)))
|
||||
|
||||
content_type = str('multipart/form-data; boundary=%s' % boundary)
|
||||
|
||||
return body.getvalue(), content_type
|
5
libraries/requests/packages/urllib3/packages/__init__.py
Normal file
5
libraries/requests/packages/urllib3/packages/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from . import ssl_match_hostname
|
||||
|
||||
__all__ = ('ssl_match_hostname', )
|
259
libraries/requests/packages/urllib3/packages/ordered_dict.py
Normal file
259
libraries/requests/packages/urllib3/packages/ordered_dict.py
Normal file
|
@ -0,0 +1,259 @@
|
|||
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
|
||||
# Passes Python2.7's test suite and incorporates all the latest updates.
|
||||
# Copyright 2009 Raymond Hettinger, released under the MIT License.
|
||||
# http://code.activestate.com/recipes/576693/
|
||||
try:
|
||||
from thread import get_ident as _get_ident
|
||||
except ImportError:
|
||||
from dummy_thread import get_ident as _get_ident
|
||||
|
||||
try:
|
||||
from _abcoll import KeysView, ValuesView, ItemsView
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class OrderedDict(dict):
|
||||
'Dictionary that remembers insertion order'
|
||||
# An inherited dict maps keys to values.
|
||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
||||
# The remaining methods are order-aware.
|
||||
# Big-O running times for all methods are the same as for regular dictionaries.
|
||||
|
||||
# The internal self.__map dictionary maps keys to links in a doubly linked list.
|
||||
# The circular doubly linked list starts and ends with a sentinel element.
|
||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
||||
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
'''Initialize an ordered dictionary. Signature is the same as for
|
||||
regular dictionaries, but keyword arguments are not recommended
|
||||
because their insertion order is arbitrary.
|
||||
|
||||
'''
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
self.__root = root = [] # sentinel node
|
||||
root[:] = [root, root, None]
|
||||
self.__map = {}
|
||||
self.__update(*args, **kwds)
|
||||
|
||||
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
|
||||
'od.__setitem__(i, y) <==> od[i]=y'
|
||||
# Setting a new item creates a new link which goes at the end of the linked
|
||||
# list, and the inherited dictionary is updated with the new key/value pair.
|
||||
if key not in self:
|
||||
root = self.__root
|
||||
last = root[0]
|
||||
last[1] = root[0] = self.__map[key] = [last, root, key]
|
||||
dict_setitem(self, key, value)
|
||||
|
||||
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
||||
'od.__delitem__(y) <==> del od[y]'
|
||||
# Deleting an existing item uses self.__map to find the link which is
|
||||
# then removed by updating the links in the predecessor and successor nodes.
|
||||
dict_delitem(self, key)
|
||||
link_prev, link_next, key = self.__map.pop(key)
|
||||
link_prev[1] = link_next
|
||||
link_next[0] = link_prev
|
||||
|
||||
def __iter__(self):
|
||||
'od.__iter__() <==> iter(od)'
|
||||
root = self.__root
|
||||
curr = root[1]
|
||||
while curr is not root:
|
||||
yield curr[2]
|
||||
curr = curr[1]
|
||||
|
||||
def __reversed__(self):
|
||||
'od.__reversed__() <==> reversed(od)'
|
||||
root = self.__root
|
||||
curr = root[0]
|
||||
while curr is not root:
|
||||
yield curr[2]
|
||||
curr = curr[0]
|
||||
|
||||
def clear(self):
|
||||
'od.clear() -> None. Remove all items from od.'
|
||||
try:
|
||||
for node in self.__map.itervalues():
|
||||
del node[:]
|
||||
root = self.__root
|
||||
root[:] = [root, root, None]
|
||||
self.__map.clear()
|
||||
except AttributeError:
|
||||
pass
|
||||
dict.clear(self)
|
||||
|
||||
def popitem(self, last=True):
|
||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
||||
|
||||
'''
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
root = self.__root
|
||||
if last:
|
||||
link = root[0]
|
||||
link_prev = link[0]
|
||||
link_prev[1] = root
|
||||
root[0] = link_prev
|
||||
else:
|
||||
link = root[1]
|
||||
link_next = link[1]
|
||||
root[1] = link_next
|
||||
link_next[0] = root
|
||||
key = link[2]
|
||||
del self.__map[key]
|
||||
value = dict.pop(self, key)
|
||||
return key, value
|
||||
|
||||
# -- the following methods do not depend on the internal structure --
|
||||
|
||||
def keys(self):
|
||||
'od.keys() -> list of keys in od'
|
||||
return list(self)
|
||||
|
||||
def values(self):
|
||||
'od.values() -> list of values in od'
|
||||
return [self[key] for key in self]
|
||||
|
||||
def items(self):
|
||||
'od.items() -> list of (key, value) pairs in od'
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def iterkeys(self):
|
||||
'od.iterkeys() -> an iterator over the keys in od'
|
||||
return iter(self)
|
||||
|
||||
def itervalues(self):
|
||||
'od.itervalues -> an iterator over the values in od'
|
||||
for k in self:
|
||||
yield self[k]
|
||||
|
||||
def iteritems(self):
|
||||
'od.iteritems -> an iterator over the (key, value) items in od'
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
|
||||
def update(*args, **kwds):
|
||||
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
|
||||
|
||||
If E is a dict instance, does: for k in E: od[k] = E[k]
|
||||
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
|
||||
Or if E is an iterable of items, does: for k, v in E: od[k] = v
|
||||
In either case, this is followed by: for k, v in F.items(): od[k] = v
|
||||
|
||||
'''
|
||||
if len(args) > 2:
|
||||
raise TypeError('update() takes at most 2 positional '
|
||||
'arguments (%d given)' % (len(args),))
|
||||
elif not args:
|
||||
raise TypeError('update() takes at least 1 argument (0 given)')
|
||||
self = args[0]
|
||||
# Make progressively weaker assumptions about "other"
|
||||
other = ()
|
||||
if len(args) == 2:
|
||||
other = args[1]
|
||||
if isinstance(other, dict):
|
||||
for key in other:
|
||||
self[key] = other[key]
|
||||
elif hasattr(other, 'keys'):
|
||||
for key in other.keys():
|
||||
self[key] = other[key]
|
||||
else:
|
||||
for key, value in other:
|
||||
self[key] = value
|
||||
for key, value in kwds.items():
|
||||
self[key] = value
|
||||
|
||||
__update = update # let subclasses override update without breaking __init__
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||
|
||||
'''
|
||||
if key in self:
|
||||
result = self[key]
|
||||
del self[key]
|
||||
return result
|
||||
if default is self.__marker:
|
||||
raise KeyError(key)
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def __repr__(self, _repr_running={}):
|
||||
'od.__repr__() <==> repr(od)'
|
||||
call_key = id(self), _get_ident()
|
||||
if call_key in _repr_running:
|
||||
return '...'
|
||||
_repr_running[call_key] = 1
|
||||
try:
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
finally:
|
||||
del _repr_running[call_key]
|
||||
|
||||
def __reduce__(self):
|
||||
'Return state information for pickling'
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
for k in vars(OrderedDict()):
|
||||
inst_dict.pop(k, None)
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def copy(self):
|
||||
'od.copy() -> a shallow copy of od'
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
|
||||
and values equal to v (which defaults to None).
|
||||
|
||||
'''
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
def __eq__(self, other):
|
||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
||||
while comparison to a regular mapping is order-insensitive.
|
||||
|
||||
'''
|
||||
if isinstance(other, OrderedDict):
|
||||
return len(self)==len(other) and self.items() == other.items()
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
# -- the following methods are only used in Python 2.7 --
|
||||
|
||||
def viewkeys(self):
|
||||
"od.viewkeys() -> a set-like object providing a view on od's keys"
|
||||
return KeysView(self)
|
||||
|
||||
def viewvalues(self):
|
||||
"od.viewvalues() -> an object providing a view on od's values"
|
||||
return ValuesView(self)
|
||||
|
||||
def viewitems(self):
|
||||
"od.viewitems() -> a set-like object providing a view on od's items"
|
||||
return ItemsView(self)
|
385
libraries/requests/packages/urllib3/packages/six.py
Normal file
385
libraries/requests/packages/urllib3/packages/six.py
Normal file
|
@ -0,0 +1,385 @@
|
|||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
#Copyright (c) 2010-2011 Benjamin Peterson
|
||||
|
||||
#Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
#this software and associated documentation files (the "Software"), to deal in
|
||||
#the Software without restriction, including without limitation the rights to
|
||||
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
#the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
#subject to the following conditions:
|
||||
|
||||
#The above copyright notice and this permission notice shall be included in all
|
||||
#copies or substantial portions of the Software.
|
||||
|
||||
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.2.0" # Revision 41c74fef2ded
|
||||
|
||||
|
||||
# True if we are running on Python 3.
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result)
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(tp, self.name)
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
|
||||
class _MovedItems(types.ModuleType):
|
||||
"""Lazy loading of moved objects"""
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
del attr
|
||||
|
||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
Iterator = object
|
||||
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
|
||||
|
||||
def iterkeys(d):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)())
|
||||
|
||||
def itervalues(d):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)())
|
||||
|
||||
def iteritems(d):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)())
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
def u(s):
|
||||
return s
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
def u(s):
|
||||
return unicode(s, "unicode_escape")
|
||||
int2byte = chr
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
if PY3:
|
||||
import builtins
|
||||
exec_ = getattr(builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(builtins, "print")
|
||||
del builtins
|
||||
|
||||
else:
|
||||
def exec_(code, globs=None, locs=None):
|
||||
"""Execute code in a namespace."""
|
||||
if globs is None:
|
||||
frame = sys._getframe(1)
|
||||
globs = frame.f_globals
|
||||
if locs is None:
|
||||
locs = frame.f_locals
|
||||
del frame
|
||||
elif locs is None:
|
||||
locs = globs
|
||||
exec("""exec code in globs, locs""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
|
||||
def with_metaclass(meta, base=object):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", (base,), {})
|
|
@ -0,0 +1,13 @@
|
|||
try:
|
||||
# Python 3.2+
|
||||
from ssl import CertificateError, match_hostname
|
||||
except ImportError:
|
||||
try:
|
||||
# Backport of the function from a pypi module
|
||||
from backports.ssl_match_hostname import CertificateError, match_hostname
|
||||
except ImportError:
|
||||
# Our vendored copy
|
||||
from ._implementation import CertificateError, match_hostname
|
||||
|
||||
# Not needed, but documenting what we provide.
|
||||
__all__ = ('CertificateError', 'match_hostname')
|
|
@ -0,0 +1,105 @@
|
|||
"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
|
||||
|
||||
# Note: This file is under the PSF license as the code comes from the python
|
||||
# stdlib. http://docs.python.org/3/license.html
|
||||
|
||||
import re
|
||||
|
||||
__version__ = '3.4.0.2'
|
||||
|
||||
class CertificateError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def _dnsname_match(dn, hostname, max_wildcards=1):
|
||||
"""Matching according to RFC 6125, section 6.4.3
|
||||
|
||||
http://tools.ietf.org/html/rfc6125#section-6.4.3
|
||||
"""
|
||||
pats = []
|
||||
if not dn:
|
||||
return False
|
||||
|
||||
# Ported from python3-syntax:
|
||||
# leftmost, *remainder = dn.split(r'.')
|
||||
parts = dn.split(r'.')
|
||||
leftmost = parts[0]
|
||||
remainder = parts[1:]
|
||||
|
||||
wildcards = leftmost.count('*')
|
||||
if wildcards > max_wildcards:
|
||||
# Issue #17980: avoid denials of service by refusing more
|
||||
# than one wildcard per fragment. A survey of established
|
||||
# policy among SSL implementations showed it to be a
|
||||
# reasonable choice.
|
||||
raise CertificateError(
|
||||
"too many wildcards in certificate DNS name: " + repr(dn))
|
||||
|
||||
# speed up common case w/o wildcards
|
||||
if not wildcards:
|
||||
return dn.lower() == hostname.lower()
|
||||
|
||||
# RFC 6125, section 6.4.3, subitem 1.
|
||||
# The client SHOULD NOT attempt to match a presented identifier in which
|
||||
# the wildcard character comprises a label other than the left-most label.
|
||||
if leftmost == '*':
|
||||
# When '*' is a fragment by itself, it matches a non-empty dotless
|
||||
# fragment.
|
||||
pats.append('[^.]+')
|
||||
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
|
||||
# RFC 6125, section 6.4.3, subitem 3.
|
||||
# The client SHOULD NOT attempt to match a presented identifier
|
||||
# where the wildcard character is embedded within an A-label or
|
||||
# U-label of an internationalized domain name.
|
||||
pats.append(re.escape(leftmost))
|
||||
else:
|
||||
# Otherwise, '*' matches any dotless string, e.g. www*
|
||||
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
|
||||
|
||||
# add the remaining fragments, ignore any wildcards
|
||||
for frag in remainder:
|
||||
pats.append(re.escape(frag))
|
||||
|
||||
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
|
||||
return pat.match(hostname)
|
||||
|
||||
|
||||
def match_hostname(cert, hostname):
|
||||
"""Verify that *cert* (in decoded format as returned by
|
||||
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
|
||||
rules are followed, but IP addresses are not accepted for *hostname*.
|
||||
|
||||
CertificateError is raised on failure. On success, the function
|
||||
returns nothing.
|
||||
"""
|
||||
if not cert:
|
||||
raise ValueError("empty or no certificate")
|
||||
dnsnames = []
|
||||
san = cert.get('subjectAltName', ())
|
||||
for key, value in san:
|
||||
if key == 'DNS':
|
||||
if _dnsname_match(value, hostname):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
if not dnsnames:
|
||||
# The subject is only checked when there is no dNSName entry
|
||||
# in subjectAltName
|
||||
for sub in cert.get('subject', ()):
|
||||
for key, value in sub:
|
||||
# XXX according to RFC 2818, the most specific Common Name
|
||||
# must be used.
|
||||
if key == 'commonName':
|
||||
if _dnsname_match(value, hostname):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
if len(dnsnames) > 1:
|
||||
raise CertificateError("hostname %r "
|
||||
"doesn't match either of %s"
|
||||
% (hostname, ', '.join(map(repr, dnsnames))))
|
||||
elif len(dnsnames) == 1:
|
||||
raise CertificateError("hostname %r "
|
||||
"doesn't match %r"
|
||||
% (hostname, dnsnames[0]))
|
||||
else:
|
||||
raise CertificateError("no appropriate commonName or "
|
||||
"subjectAltName fields were found")
|
281
libraries/requests/packages/urllib3/poolmanager.py
Normal file
281
libraries/requests/packages/urllib3/poolmanager.py
Normal file
|
@ -0,0 +1,281 @@
|
|||
from __future__ import absolute_import
|
||||
import logging
|
||||
|
||||
try: # Python 3
|
||||
from urllib.parse import urljoin
|
||||
except ImportError:
|
||||
from urlparse import urljoin
|
||||
|
||||
from ._collections import RecentlyUsedContainer
|
||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
||||
from .connectionpool import port_by_scheme
|
||||
from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
|
||||
from .request import RequestMethods
|
||||
from .util.url import parse_url
|
||||
from .util.retry import Retry
|
||||
|
||||
|
||||
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
|
||||
|
||||
|
||||
pool_classes_by_scheme = {
|
||||
'http': HTTPConnectionPool,
|
||||
'https': HTTPSConnectionPool,
|
||||
}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
|
||||
'ssl_version', 'ca_cert_dir')
|
||||
|
||||
|
||||
class PoolManager(RequestMethods):
|
||||
"""
|
||||
Allows for arbitrary requests while transparently keeping track of
|
||||
necessary connection pools for you.
|
||||
|
||||
:param num_pools:
|
||||
Number of connection pools to cache before discarding the least
|
||||
recently used pool.
|
||||
|
||||
:param headers:
|
||||
Headers to include with all requests, unless other headers are given
|
||||
explicitly.
|
||||
|
||||
:param \**connection_pool_kw:
|
||||
Additional parameters are used to create fresh
|
||||
:class:`urllib3.connectionpool.ConnectionPool` instances.
|
||||
|
||||
Example::
|
||||
|
||||
>>> manager = PoolManager(num_pools=2)
|
||||
>>> r = manager.request('GET', 'http://google.com/')
|
||||
>>> r = manager.request('GET', 'http://google.com/mail')
|
||||
>>> r = manager.request('GET', 'http://yahoo.com/')
|
||||
>>> len(manager.pools)
|
||||
2
|
||||
|
||||
"""
|
||||
|
||||
proxy = None
|
||||
|
||||
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
|
||||
RequestMethods.__init__(self, headers)
|
||||
self.connection_pool_kw = connection_pool_kw
|
||||
self.pools = RecentlyUsedContainer(num_pools,
|
||||
dispose_func=lambda p: p.close())
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.clear()
|
||||
# Return False to re-raise any potential exceptions
|
||||
return False
|
||||
|
||||
def _new_pool(self, scheme, host, port):
|
||||
"""
|
||||
Create a new :class:`ConnectionPool` based on host, port and scheme.
|
||||
|
||||
This method is used to actually create the connection pools handed out
|
||||
by :meth:`connection_from_url` and companion methods. It is intended
|
||||
to be overridden for customization.
|
||||
"""
|
||||
pool_cls = pool_classes_by_scheme[scheme]
|
||||
kwargs = self.connection_pool_kw
|
||||
if scheme == 'http':
|
||||
kwargs = self.connection_pool_kw.copy()
|
||||
for kw in SSL_KEYWORDS:
|
||||
kwargs.pop(kw, None)
|
||||
|
||||
return pool_cls(host, port, **kwargs)
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Empty our store of pools and direct them all to close.
|
||||
|
||||
This will not affect in-flight connections, but they will not be
|
||||
re-used after completion.
|
||||
"""
|
||||
self.pools.clear()
|
||||
|
||||
def connection_from_host(self, host, port=None, scheme='http'):
|
||||
"""
|
||||
Get a :class:`ConnectionPool` based on the host, port, and scheme.
|
||||
|
||||
If ``port`` isn't given, it will be derived from the ``scheme`` using
|
||||
``urllib3.connectionpool.port_by_scheme``.
|
||||
"""
|
||||
|
||||
if not host:
|
||||
raise LocationValueError("No host specified.")
|
||||
|
||||
scheme = scheme or 'http'
|
||||
port = port or port_by_scheme.get(scheme, 80)
|
||||
pool_key = (scheme, host, port)
|
||||
|
||||
with self.pools.lock:
|
||||
# If the scheme, host, or port doesn't match existing open
|
||||
# connections, open a new ConnectionPool.
|
||||
pool = self.pools.get(pool_key)
|
||||
if pool:
|
||||
return pool
|
||||
|
||||
# Make a fresh ConnectionPool of the desired type
|
||||
pool = self._new_pool(scheme, host, port)
|
||||
self.pools[pool_key] = pool
|
||||
|
||||
return pool
|
||||
|
||||
def connection_from_url(self, url):
|
||||
"""
|
||||
Similar to :func:`urllib3.connectionpool.connection_from_url` but
|
||||
doesn't pass any additional parameters to the
|
||||
:class:`urllib3.connectionpool.ConnectionPool` constructor.
|
||||
|
||||
Additional parameters are taken from the :class:`.PoolManager`
|
||||
constructor.
|
||||
"""
|
||||
u = parse_url(url)
|
||||
return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
|
||||
|
||||
def urlopen(self, method, url, redirect=True, **kw):
|
||||
"""
|
||||
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
|
||||
with custom cross-host redirect logic and only sends the request-uri
|
||||
portion of the ``url``.
|
||||
|
||||
The given ``url`` parameter must be absolute, such that an appropriate
|
||||
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
|
||||
"""
|
||||
u = parse_url(url)
|
||||
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
|
||||
|
||||
kw['assert_same_host'] = False
|
||||
kw['redirect'] = False
|
||||
if 'headers' not in kw:
|
||||
kw['headers'] = self.headers
|
||||
|
||||
if self.proxy is not None and u.scheme == "http":
|
||||
response = conn.urlopen(method, url, **kw)
|
||||
else:
|
||||
response = conn.urlopen(method, u.request_uri, **kw)
|
||||
|
||||
redirect_location = redirect and response.get_redirect_location()
|
||||
if not redirect_location:
|
||||
return response
|
||||
|
||||
# Support relative URLs for redirecting.
|
||||
redirect_location = urljoin(url, redirect_location)
|
||||
|
||||
# RFC 7231, Section 6.4.4
|
||||
if response.status == 303:
|
||||
method = 'GET'
|
||||
|
||||
retries = kw.get('retries')
|
||||
if not isinstance(retries, Retry):
|
||||
retries = Retry.from_int(retries, redirect=redirect)
|
||||
|
||||
try:
|
||||
retries = retries.increment(method, url, response=response, _pool=conn)
|
||||
except MaxRetryError:
|
||||
if retries.raise_on_redirect:
|
||||
raise
|
||||
return response
|
||||
|
||||
kw['retries'] = retries
|
||||
kw['redirect'] = redirect
|
||||
|
||||
log.info("Redirecting %s -> %s" % (url, redirect_location))
|
||||
return self.urlopen(method, redirect_location, **kw)
|
||||
|
||||
|
||||
class ProxyManager(PoolManager):
|
||||
"""
|
||||
Behaves just like :class:`PoolManager`, but sends all requests through
|
||||
the defined proxy, using the CONNECT method for HTTPS URLs.
|
||||
|
||||
:param proxy_url:
|
||||
The URL of the proxy to be used.
|
||||
|
||||
:param proxy_headers:
|
||||
A dictionary contaning headers that will be sent to the proxy. In case
|
||||
of HTTP they are being sent with each request, while in the
|
||||
HTTPS/CONNECT case they are sent only once. Could be used for proxy
|
||||
authentication.
|
||||
|
||||
Example:
|
||||
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
|
||||
>>> r1 = proxy.request('GET', 'http://google.com/')
|
||||
>>> r2 = proxy.request('GET', 'http://httpbin.org/')
|
||||
>>> len(proxy.pools)
|
||||
1
|
||||
>>> r3 = proxy.request('GET', 'https://httpbin.org/')
|
||||
>>> r4 = proxy.request('GET', 'https://twitter.com/')
|
||||
>>> len(proxy.pools)
|
||||
3
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, proxy_url, num_pools=10, headers=None,
|
||||
proxy_headers=None, **connection_pool_kw):
|
||||
|
||||
if isinstance(proxy_url, HTTPConnectionPool):
|
||||
proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
|
||||
proxy_url.port)
|
||||
proxy = parse_url(proxy_url)
|
||||
if not proxy.port:
|
||||
port = port_by_scheme.get(proxy.scheme, 80)
|
||||
proxy = proxy._replace(port=port)
|
||||
|
||||
if proxy.scheme not in ("http", "https"):
|
||||
raise ProxySchemeUnknown(proxy.scheme)
|
||||
|
||||
self.proxy = proxy
|
||||
self.proxy_headers = proxy_headers or {}
|
||||
|
||||
connection_pool_kw['_proxy'] = self.proxy
|
||||
connection_pool_kw['_proxy_headers'] = self.proxy_headers
|
||||
|
||||
super(ProxyManager, self).__init__(
|
||||
num_pools, headers, **connection_pool_kw)
|
||||
|
||||
def connection_from_host(self, host, port=None, scheme='http'):
|
||||
if scheme == "https":
|
||||
return super(ProxyManager, self).connection_from_host(
|
||||
host, port, scheme)
|
||||
|
||||
return super(ProxyManager, self).connection_from_host(
|
||||
self.proxy.host, self.proxy.port, self.proxy.scheme)
|
||||
|
||||
def _set_proxy_headers(self, url, headers=None):
|
||||
"""
|
||||
Sets headers needed by proxies: specifically, the Accept and Host
|
||||
headers. Only sets headers not provided by the user.
|
||||
"""
|
||||
headers_ = {'Accept': '*/*'}
|
||||
|
||||
netloc = parse_url(url).netloc
|
||||
if netloc:
|
||||
headers_['Host'] = netloc
|
||||
|
||||
if headers:
|
||||
headers_.update(headers)
|
||||
return headers_
|
||||
|
||||
def urlopen(self, method, url, redirect=True, **kw):
|
||||
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
|
||||
u = parse_url(url)
|
||||
|
||||
if u.scheme == "http":
|
||||
# For proxied HTTPS requests, httplib sets the necessary headers
|
||||
# on the CONNECT to the proxy. For HTTP, we'll definitely
|
||||
# need to set 'Host' at the very least.
|
||||
headers = kw.get('headers', self.headers)
|
||||
kw['headers'] = self._set_proxy_headers(url, headers)
|
||||
|
||||
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
|
||||
|
||||
|
||||
def proxy_from_url(url, **kw):
|
||||
return ProxyManager(proxy_url=url, **kw)
|
151
libraries/requests/packages/urllib3/request.py
Normal file
151
libraries/requests/packages/urllib3/request.py
Normal file
|
@ -0,0 +1,151 @@
|
|||
from __future__ import absolute_import
|
||||
try:
|
||||
from urllib.parse import urlencode
|
||||
except ImportError:
|
||||
from urllib import urlencode
|
||||
|
||||
from .filepost import encode_multipart_formdata
|
||||
|
||||
|
||||
__all__ = ['RequestMethods']
|
||||
|
||||
|
||||
class RequestMethods(object):
|
||||
"""
|
||||
Convenience mixin for classes who implement a :meth:`urlopen` method, such
|
||||
as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
|
||||
:class:`~urllib3.poolmanager.PoolManager`.
|
||||
|
||||
Provides behavior for making common types of HTTP request methods and
|
||||
decides which type of request field encoding to use.
|
||||
|
||||
Specifically,
|
||||
|
||||
:meth:`.request_encode_url` is for sending requests whose fields are
|
||||
encoded in the URL (such as GET, HEAD, DELETE).
|
||||
|
||||
:meth:`.request_encode_body` is for sending requests whose fields are
|
||||
encoded in the *body* of the request using multipart or www-form-urlencoded
|
||||
(such as for POST, PUT, PATCH).
|
||||
|
||||
:meth:`.request` is for making any kind of request, it will look up the
|
||||
appropriate encoding format and use one of the above two methods to make
|
||||
the request.
|
||||
|
||||
Initializer parameters:
|
||||
|
||||
:param headers:
|
||||
Headers to include with all requests, unless other headers are given
|
||||
explicitly.
|
||||
"""
|
||||
|
||||
_encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
|
||||
|
||||
def __init__(self, headers=None):
|
||||
self.headers = headers or {}
|
||||
|
||||
def urlopen(self, method, url, body=None, headers=None,
|
||||
encode_multipart=True, multipart_boundary=None,
|
||||
**kw): # Abstract
|
||||
raise NotImplemented("Classes extending RequestMethods must implement "
|
||||
"their own ``urlopen`` method.")
|
||||
|
||||
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
|
||||
"""
|
||||
Make a request using :meth:`urlopen` with the appropriate encoding of
|
||||
``fields`` based on the ``method`` used.
|
||||
|
||||
This is a convenience method that requires the least amount of manual
|
||||
effort. It can be used in most situations, while still having the
|
||||
option to drop down to more specific methods when necessary, such as
|
||||
:meth:`request_encode_url`, :meth:`request_encode_body`,
|
||||
or even the lowest level :meth:`urlopen`.
|
||||
"""
|
||||
method = method.upper()
|
||||
|
||||
if method in self._encode_url_methods:
|
||||
return self.request_encode_url(method, url, fields=fields,
|
||||
headers=headers,
|
||||
**urlopen_kw)
|
||||
else:
|
||||
return self.request_encode_body(method, url, fields=fields,
|
||||
headers=headers,
|
||||
**urlopen_kw)
|
||||
|
||||
def request_encode_url(self, method, url, fields=None, headers=None,
|
||||
**urlopen_kw):
|
||||
"""
|
||||
Make a request using :meth:`urlopen` with the ``fields`` encoded in
|
||||
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = self.headers
|
||||
|
||||
extra_kw = {'headers': headers}
|
||||
extra_kw.update(urlopen_kw)
|
||||
|
||||
if fields:
|
||||
url += '?' + urlencode(fields)
|
||||
|
||||
return self.urlopen(method, url, **extra_kw)
|
||||
|
||||
def request_encode_body(self, method, url, fields=None, headers=None,
|
||||
encode_multipart=True, multipart_boundary=None,
|
||||
**urlopen_kw):
|
||||
"""
|
||||
Make a request using :meth:`urlopen` with the ``fields`` encoded in
|
||||
the body. This is useful for request methods like POST, PUT, PATCH, etc.
|
||||
|
||||
When ``encode_multipart=True`` (default), then
|
||||
:meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
|
||||
the payload with the appropriate content type. Otherwise
|
||||
:meth:`urllib.urlencode` is used with the
|
||||
'application/x-www-form-urlencoded' content type.
|
||||
|
||||
Multipart encoding must be used when posting files, and it's reasonably
|
||||
safe to use it in other times too. However, it may break request
|
||||
signing, such as with OAuth.
|
||||
|
||||
Supports an optional ``fields`` parameter of key/value strings AND
|
||||
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
|
||||
the MIME type is optional. For example::
|
||||
|
||||
fields = {
|
||||
'foo': 'bar',
|
||||
'fakefile': ('foofile.txt', 'contents of foofile'),
|
||||
'realfile': ('barfile.txt', open('realfile').read()),
|
||||
'typedfile': ('bazfile.bin', open('bazfile').read(),
|
||||
'image/jpeg'),
|
||||
'nonamefile': 'contents of nonamefile field',
|
||||
}
|
||||
|
||||
When uploading a file, providing a filename (the first parameter of the
|
||||
tuple) is optional but recommended to best mimick behavior of browsers.
|
||||
|
||||
Note that if ``headers`` are supplied, the 'Content-Type' header will
|
||||
be overwritten because it depends on the dynamic random boundary string
|
||||
which is used to compose the body of the request. The random boundary
|
||||
string can be explicitly set with the ``multipart_boundary`` parameter.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = self.headers
|
||||
|
||||
extra_kw = {'headers': {}}
|
||||
|
||||
if fields:
|
||||
if 'body' in urlopen_kw:
|
||||
raise TypeError(
|
||||
"request got values for both 'fields' and 'body', can only specify one.")
|
||||
|
||||
if encode_multipart:
|
||||
body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
|
||||
else:
|
||||
body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
|
||||
|
||||
extra_kw['body'] = body
|
||||
extra_kw['headers'] = {'Content-Type': content_type}
|
||||
|
||||
extra_kw['headers'].update(headers)
|
||||
extra_kw.update(urlopen_kw)
|
||||
|
||||
return self.urlopen(method, url, **extra_kw)
|
514
libraries/requests/packages/urllib3/response.py
Normal file
514
libraries/requests/packages/urllib3/response.py
Normal file
|
@ -0,0 +1,514 @@
|
|||
from __future__ import absolute_import
|
||||
from contextlib import contextmanager
|
||||
import zlib
|
||||
import io
|
||||
from socket import timeout as SocketTimeout
|
||||
from socket import error as SocketError
|
||||
|
||||
from ._collections import HTTPHeaderDict
|
||||
from .exceptions import (
|
||||
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
|
||||
)
|
||||
from .packages.six import string_types as basestring, binary_type, PY3
|
||||
from .packages.six.moves import http_client as httplib
|
||||
from .connection import HTTPException, BaseSSLError
|
||||
from .util.response import is_fp_closed, is_response_to_head
|
||||
|
||||
|
||||
class DeflateDecoder(object):
|
||||
|
||||
def __init__(self):
|
||||
self._first_try = True
|
||||
self._data = binary_type()
|
||||
self._obj = zlib.decompressobj()
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._obj, name)
|
||||
|
||||
def decompress(self, data):
|
||||
if not data:
|
||||
return data
|
||||
|
||||
if not self._first_try:
|
||||
return self._obj.decompress(data)
|
||||
|
||||
self._data += data
|
||||
try:
|
||||
return self._obj.decompress(data)
|
||||
except zlib.error:
|
||||
self._first_try = False
|
||||
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
|
||||
try:
|
||||
return self.decompress(self._data)
|
||||
finally:
|
||||
self._data = None
|
||||
|
||||
|
||||
class GzipDecoder(object):
|
||||
|
||||
def __init__(self):
|
||||
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._obj, name)
|
||||
|
||||
def decompress(self, data):
|
||||
if not data:
|
||||
return data
|
||||
return self._obj.decompress(data)
|
||||
|
||||
|
||||
def _get_decoder(mode):
|
||||
if mode == 'gzip':
|
||||
return GzipDecoder()
|
||||
|
||||
return DeflateDecoder()
|
||||
|
||||
|
||||
class HTTPResponse(io.IOBase):
|
||||
"""
|
||||
HTTP Response container.
|
||||
|
||||
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
|
||||
loaded and decoded on-demand when the ``data`` property is accessed. This
|
||||
class is also compatible with the Python standard library's :mod:`io`
|
||||
module, and can hence be treated as a readable object in the context of that
|
||||
framework.
|
||||
|
||||
Extra parameters for behaviour not present in httplib.HTTPResponse:
|
||||
|
||||
:param preload_content:
|
||||
If True, the response's body will be preloaded during construction.
|
||||
|
||||
:param decode_content:
|
||||
If True, attempts to decode specific content-encoding's based on headers
|
||||
(like 'gzip' and 'deflate') will be skipped and raw data will be used
|
||||
instead.
|
||||
|
||||
:param original_response:
|
||||
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
|
||||
object, it's convenient to include the original for debug purposes. It's
|
||||
otherwise unused.
|
||||
"""
|
||||
|
||||
CONTENT_DECODERS = ['gzip', 'deflate']
|
||||
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
|
||||
|
||||
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
|
||||
strict=0, preload_content=True, decode_content=True,
|
||||
original_response=None, pool=None, connection=None):
|
||||
|
||||
if isinstance(headers, HTTPHeaderDict):
|
||||
self.headers = headers
|
||||
else:
|
||||
self.headers = HTTPHeaderDict(headers)
|
||||
self.status = status
|
||||
self.version = version
|
||||
self.reason = reason
|
||||
self.strict = strict
|
||||
self.decode_content = decode_content
|
||||
|
||||
self._decoder = None
|
||||
self._body = None
|
||||
self._fp = None
|
||||
self._original_response = original_response
|
||||
self._fp_bytes_read = 0
|
||||
|
||||
if body and isinstance(body, (basestring, binary_type)):
|
||||
self._body = body
|
||||
|
||||
self._pool = pool
|
||||
self._connection = connection
|
||||
|
||||
if hasattr(body, 'read'):
|
||||
self._fp = body
|
||||
|
||||
# Are we using the chunked-style of transfer encoding?
|
||||
self.chunked = False
|
||||
self.chunk_left = None
|
||||
tr_enc = self.headers.get('transfer-encoding', '').lower()
|
||||
# Don't incur the penalty of creating a list and then discarding it
|
||||
encodings = (enc.strip() for enc in tr_enc.split(","))
|
||||
if "chunked" in encodings:
|
||||
self.chunked = True
|
||||
|
||||
# If requested, preload the body.
|
||||
if preload_content and not self._body:
|
||||
self._body = self.read(decode_content=decode_content)
|
||||
|
||||
def get_redirect_location(self):
|
||||
"""
|
||||
Should we redirect and where to?
|
||||
|
||||
:returns: Truthy redirect location string if we got a redirect status
|
||||
code and valid location. ``None`` if redirect status and no
|
||||
location. ``False`` if not a redirect status code.
|
||||
"""
|
||||
if self.status in self.REDIRECT_STATUSES:
|
||||
return self.headers.get('location')
|
||||
|
||||
return False
|
||||
|
||||
def release_conn(self):
|
||||
if not self._pool or not self._connection:
|
||||
return
|
||||
|
||||
self._pool._put_conn(self._connection)
|
||||
self._connection = None
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
# For backwords-compat with earlier urllib3 0.4 and earlier.
|
||||
if self._body:
|
||||
return self._body
|
||||
|
||||
if self._fp:
|
||||
return self.read(cache_content=True)
|
||||
|
||||
def tell(self):
|
||||
"""
|
||||
Obtain the number of bytes pulled over the wire so far. May differ from
|
||||
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
|
||||
are encoded on the wire (e.g, compressed).
|
||||
"""
|
||||
return self._fp_bytes_read
|
||||
|
||||
def _init_decoder(self):
|
||||
"""
|
||||
Set-up the _decoder attribute if necessar.
|
||||
"""
|
||||
# Note: content-encoding value should be case-insensitive, per RFC 7230
|
||||
# Section 3.2
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
|
||||
self._decoder = _get_decoder(content_encoding)
|
||||
|
||||
def _decode(self, data, decode_content, flush_decoder):
|
||||
"""
|
||||
Decode the data passed in and potentially flush the decoder.
|
||||
"""
|
||||
try:
|
||||
if decode_content and self._decoder:
|
||||
data = self._decoder.decompress(data)
|
||||
except (IOError, zlib.error) as e:
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
raise DecodeError(
|
||||
"Received response with content-encoding: %s, but "
|
||||
"failed to decode it." % content_encoding, e)
|
||||
|
||||
if flush_decoder and decode_content:
|
||||
data += self._flush_decoder()
|
||||
|
||||
return data
|
||||
|
||||
def _flush_decoder(self):
|
||||
"""
|
||||
Flushes the decoder. Should only be called if the decoder is actually
|
||||
being used.
|
||||
"""
|
||||
if self._decoder:
|
||||
buf = self._decoder.decompress(b'')
|
||||
return buf + self._decoder.flush()
|
||||
|
||||
return b''
|
||||
|
||||
@contextmanager
|
||||
def _error_catcher(self):
|
||||
"""
|
||||
Catch low-level python exceptions, instead re-raising urllib3
|
||||
variants, so that low-level exceptions are not leaked in the
|
||||
high-level api.
|
||||
|
||||
On exit, release the connection back to the pool.
|
||||
"""
|
||||
try:
|
||||
try:
|
||||
yield
|
||||
|
||||
except SocketTimeout:
|
||||
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
|
||||
# there is yet no clean way to get at it from this context.
|
||||
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
|
||||
|
||||
except BaseSSLError as e:
|
||||
# FIXME: Is there a better way to differentiate between SSLErrors?
|
||||
if 'read operation timed out' not in str(e): # Defensive:
|
||||
# This shouldn't happen but just in case we're missing an edge
|
||||
# case, let's avoid swallowing SSL errors.
|
||||
raise
|
||||
|
||||
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
|
||||
|
||||
except (HTTPException, SocketError) as e:
|
||||
# This includes IncompleteRead.
|
||||
raise ProtocolError('Connection broken: %r' % e, e)
|
||||
|
||||
except Exception:
|
||||
# The response may not be closed but we're not going to use it anymore
|
||||
# so close it now to ensure that the connection is released back to the pool.
|
||||
if self._original_response and not self._original_response.isclosed():
|
||||
self._original_response.close()
|
||||
|
||||
# Closing the response may not actually be sufficient to close
|
||||
# everything, so if we have a hold of the connection close that
|
||||
# too.
|
||||
if self._connection is not None:
|
||||
self._connection.close()
|
||||
|
||||
raise
|
||||
finally:
|
||||
if self._original_response and self._original_response.isclosed():
|
||||
self.release_conn()
|
||||
|
||||
def read(self, amt=None, decode_content=None, cache_content=False):
|
||||
"""
|
||||
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
||||
parameters: ``decode_content`` and ``cache_content``.
|
||||
|
||||
:param amt:
|
||||
How much of the content to read. If specified, caching is skipped
|
||||
because it doesn't make sense to cache partial content as the full
|
||||
response.
|
||||
|
||||
:param decode_content:
|
||||
If True, will attempt to decode the body based on the
|
||||
'content-encoding' header.
|
||||
|
||||
:param cache_content:
|
||||
If True, will save the returned data such that the same result is
|
||||
returned despite of the state of the underlying file object. This
|
||||
is useful if you want the ``.data`` property to continue working
|
||||
after having ``.read()`` the file object. (Overridden if ``amt`` is
|
||||
set.)
|
||||
"""
|
||||
self._init_decoder()
|
||||
if decode_content is None:
|
||||
decode_content = self.decode_content
|
||||
|
||||
if self._fp is None:
|
||||
return
|
||||
|
||||
flush_decoder = False
|
||||
data = None
|
||||
|
||||
with self._error_catcher():
|
||||
if amt is None:
|
||||
# cStringIO doesn't like amt=None
|
||||
data = self._fp.read()
|
||||
flush_decoder = True
|
||||
else:
|
||||
cache_content = False
|
||||
data = self._fp.read(amt)
|
||||
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
|
||||
# Close the connection when no data is returned
|
||||
#
|
||||
# This is redundant to what httplib/http.client _should_
|
||||
# already do. However, versions of python released before
|
||||
# December 15, 2012 (http://bugs.python.org/issue16298) do
|
||||
# not properly close the connection in all cases. There is
|
||||
# no harm in redundantly calling close.
|
||||
self._fp.close()
|
||||
flush_decoder = True
|
||||
|
||||
if data:
|
||||
self._fp_bytes_read += len(data)
|
||||
|
||||
data = self._decode(data, decode_content, flush_decoder)
|
||||
|
||||
if cache_content:
|
||||
self._body = data
|
||||
|
||||
return data
|
||||
|
||||
def stream(self, amt=2**16, decode_content=None):
|
||||
"""
|
||||
A generator wrapper for the read() method. A call will block until
|
||||
``amt`` bytes have been read from the connection or until the
|
||||
connection is closed.
|
||||
|
||||
:param amt:
|
||||
How much of the content to read. The generator will return up to
|
||||
much data per iteration, but may return less. This is particularly
|
||||
likely when using compressed data. However, the empty string will
|
||||
never be returned.
|
||||
|
||||
:param decode_content:
|
||||
If True, will attempt to decode the body based on the
|
||||
'content-encoding' header.
|
||||
"""
|
||||
if self.chunked:
|
||||
for line in self.read_chunked(amt, decode_content=decode_content):
|
||||
yield line
|
||||
else:
|
||||
while not is_fp_closed(self._fp):
|
||||
data = self.read(amt=amt, decode_content=decode_content)
|
||||
|
||||
if data:
|
||||
yield data
|
||||
|
||||
@classmethod
|
||||
def from_httplib(ResponseCls, r, **response_kw):
|
||||
"""
|
||||
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
|
||||
corresponding :class:`urllib3.response.HTTPResponse` object.
|
||||
|
||||
Remaining parameters are passed to the HTTPResponse constructor, along
|
||||
with ``original_response=r``.
|
||||
"""
|
||||
headers = r.msg
|
||||
|
||||
if not isinstance(headers, HTTPHeaderDict):
|
||||
if PY3: # Python 3
|
||||
headers = HTTPHeaderDict(headers.items())
|
||||
else: # Python 2
|
||||
headers = HTTPHeaderDict.from_httplib(headers)
|
||||
|
||||
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
||||
strict = getattr(r, 'strict', 0)
|
||||
resp = ResponseCls(body=r,
|
||||
headers=headers,
|
||||
status=r.status,
|
||||
version=r.version,
|
||||
reason=r.reason,
|
||||
strict=strict,
|
||||
original_response=r,
|
||||
**response_kw)
|
||||
return resp
|
||||
|
||||
# Backwards-compatibility methods for httplib.HTTPResponse
|
||||
def getheaders(self):
|
||||
return self.headers
|
||||
|
||||
def getheader(self, name, default=None):
|
||||
return self.headers.get(name, default)
|
||||
|
||||
# Overrides from io.IOBase
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self._fp.close()
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
if self._fp is None:
|
||||
return True
|
||||
elif hasattr(self._fp, 'closed'):
|
||||
return self._fp.closed
|
||||
elif hasattr(self._fp, 'isclosed'): # Python 2
|
||||
return self._fp.isclosed()
|
||||
else:
|
||||
return True
|
||||
|
||||
def fileno(self):
|
||||
if self._fp is None:
|
||||
raise IOError("HTTPResponse has no file to get a fileno from")
|
||||
elif hasattr(self._fp, "fileno"):
|
||||
return self._fp.fileno()
|
||||
else:
|
||||
raise IOError("The file-like object this HTTPResponse is wrapped "
|
||||
"around has no file descriptor")
|
||||
|
||||
def flush(self):
|
||||
if self._fp is not None and hasattr(self._fp, 'flush'):
|
||||
return self._fp.flush()
|
||||
|
||||
def readable(self):
|
||||
# This method is required for `io` module compatibility.
|
||||
return True
|
||||
|
||||
def readinto(self, b):
|
||||
# This method is required for `io` module compatibility.
|
||||
temp = self.read(len(b))
|
||||
if len(temp) == 0:
|
||||
return 0
|
||||
else:
|
||||
b[:len(temp)] = temp
|
||||
return len(temp)
|
||||
|
||||
def _update_chunk_length(self):
|
||||
# First, we'll figure out length of a chunk and then
|
||||
# we'll try to read it from socket.
|
||||
if self.chunk_left is not None:
|
||||
return
|
||||
line = self._fp.fp.readline()
|
||||
line = line.split(b';', 1)[0]
|
||||
try:
|
||||
self.chunk_left = int(line, 16)
|
||||
except ValueError:
|
||||
# Invalid chunked protocol response, abort.
|
||||
self.close()
|
||||
raise httplib.IncompleteRead(line)
|
||||
|
||||
def _handle_chunk(self, amt):
|
||||
returned_chunk = None
|
||||
if amt is None:
|
||||
chunk = self._fp._safe_read(self.chunk_left)
|
||||
returned_chunk = chunk
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
elif amt < self.chunk_left:
|
||||
value = self._fp._safe_read(amt)
|
||||
self.chunk_left = self.chunk_left - amt
|
||||
returned_chunk = value
|
||||
elif amt == self.chunk_left:
|
||||
value = self._fp._safe_read(amt)
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
returned_chunk = value
|
||||
else: # amt > self.chunk_left
|
||||
returned_chunk = self._fp._safe_read(self.chunk_left)
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
return returned_chunk
|
||||
|
||||
def read_chunked(self, amt=None, decode_content=None):
|
||||
"""
|
||||
Similar to :meth:`HTTPResponse.read`, but with an additional
|
||||
parameter: ``decode_content``.
|
||||
|
||||
:param decode_content:
|
||||
If True, will attempt to decode the body based on the
|
||||
'content-encoding' header.
|
||||
"""
|
||||
self._init_decoder()
|
||||
# FIXME: Rewrite this method and make it a class with a better structured logic.
|
||||
if not self.chunked:
|
||||
raise ResponseNotChunked(
|
||||
"Response is not chunked. "
|
||||
"Header 'transfer-encoding: chunked' is missing.")
|
||||
|
||||
# Don't bother reading the body of a HEAD request.
|
||||
if self._original_response and is_response_to_head(self._original_response):
|
||||
self._original_response.close()
|
||||
return
|
||||
|
||||
with self._error_catcher():
|
||||
while True:
|
||||
self._update_chunk_length()
|
||||
if self.chunk_left == 0:
|
||||
break
|
||||
chunk = self._handle_chunk(amt)
|
||||
decoded = self._decode(chunk, decode_content=decode_content,
|
||||
flush_decoder=False)
|
||||
if decoded:
|
||||
yield decoded
|
||||
|
||||
if decode_content:
|
||||
# On CPython and PyPy, we should never need to flush the
|
||||
# decoder. However, on Jython we *might* need to, so
|
||||
# lets defensively do it anyway.
|
||||
decoded = self._flush_decoder()
|
||||
if decoded: # Platform-specific: Jython.
|
||||
yield decoded
|
||||
|
||||
# Chunk content ends with \r\n: discard it.
|
||||
while True:
|
||||
line = self._fp.fp.readline()
|
||||
if not line:
|
||||
# Some sites may not end with '\r\n'.
|
||||
break
|
||||
if line == b'\r\n':
|
||||
break
|
||||
|
||||
# We read everything; close the "file".
|
||||
if self._original_response:
|
||||
self._original_response.close()
|
44
libraries/requests/packages/urllib3/util/__init__.py
Normal file
44
libraries/requests/packages/urllib3/util/__init__.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
from __future__ import absolute_import
|
||||
# For backwards compatibility, provide imports that used to be here.
|
||||
from .connection import is_connection_dropped
|
||||
from .request import make_headers
|
||||
from .response import is_fp_closed
|
||||
from .ssl_ import (
|
||||
SSLContext,
|
||||
HAS_SNI,
|
||||
assert_fingerprint,
|
||||
resolve_cert_reqs,
|
||||
resolve_ssl_version,
|
||||
ssl_wrap_socket,
|
||||
)
|
||||
from .timeout import (
|
||||
current_time,
|
||||
Timeout,
|
||||
)
|
||||
|
||||
from .retry import Retry
|
||||
from .url import (
|
||||
get_host,
|
||||
parse_url,
|
||||
split_first,
|
||||
Url,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
'HAS_SNI',
|
||||
'SSLContext',
|
||||
'Retry',
|
||||
'Timeout',
|
||||
'Url',
|
||||
'assert_fingerprint',
|
||||
'current_time',
|
||||
'is_connection_dropped',
|
||||
'is_fp_closed',
|
||||
'get_host',
|
||||
'parse_url',
|
||||
'make_headers',
|
||||
'resolve_cert_reqs',
|
||||
'resolve_ssl_version',
|
||||
'split_first',
|
||||
'ssl_wrap_socket',
|
||||
)
|
101
libraries/requests/packages/urllib3/util/connection.py
Normal file
101
libraries/requests/packages/urllib3/util/connection.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
from __future__ import absolute_import
|
||||
import socket
|
||||
try:
|
||||
from select import poll, POLLIN
|
||||
except ImportError: # `poll` doesn't exist on OSX and other platforms
|
||||
poll = False
|
||||
try:
|
||||
from select import select
|
||||
except ImportError: # `select` doesn't exist on AppEngine.
|
||||
select = False
|
||||
|
||||
|
||||
def is_connection_dropped(conn): # Platform-specific
|
||||
"""
|
||||
Returns True if the connection is dropped and should be closed.
|
||||
|
||||
:param conn:
|
||||
:class:`httplib.HTTPConnection` object.
|
||||
|
||||
Note: For platforms like AppEngine, this will always return ``False`` to
|
||||
let the platform handle connection recycling transparently for us.
|
||||
"""
|
||||
sock = getattr(conn, 'sock', False)
|
||||
if sock is False: # Platform-specific: AppEngine
|
||||
return False
|
||||
if sock is None: # Connection already closed (such as by httplib).
|
||||
return True
|
||||
|
||||
if not poll:
|
||||
if not select: # Platform-specific: AppEngine
|
||||
return False
|
||||
|
||||
try:
|
||||
return select([sock], [], [], 0.0)[0]
|
||||
except socket.error:
|
||||
return True
|
||||
|
||||
# This version is better on platforms that support it.
|
||||
p = poll()
|
||||
p.register(sock, POLLIN)
|
||||
for (fno, ev) in p.poll(0.0):
|
||||
if fno == sock.fileno():
|
||||
# Either data is buffered (bad), or the connection is dropped.
|
||||
return True
|
||||
|
||||
|
||||
# This function is copied from socket.py in the Python 2.7 standard
|
||||
# library test suite. Added to its signature is only `socket_options`.
|
||||
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||
source_address=None, socket_options=None):
|
||||
"""Connect to *address* and return the socket object.
|
||||
|
||||
Convenience function. Connect to *address* (a 2-tuple ``(host,
|
||||
port)``) and return the socket object. Passing the optional
|
||||
*timeout* parameter will set the timeout on the socket instance
|
||||
before attempting to connect. If no *timeout* is supplied, the
|
||||
global default timeout setting returned by :func:`getdefaulttimeout`
|
||||
is used. If *source_address* is set it must be a tuple of (host, port)
|
||||
for the socket to bind as a source address before making the connection.
|
||||
An host of '' or port 0 tells the OS to use the default.
|
||||
"""
|
||||
|
||||
host, port = address
|
||||
if host.startswith('['):
|
||||
host = host.strip('[]')
|
||||
err = None
|
||||
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
|
||||
af, socktype, proto, canonname, sa = res
|
||||
sock = None
|
||||
try:
|
||||
sock = socket.socket(af, socktype, proto)
|
||||
|
||||
# If provided, set socket level options before connecting.
|
||||
# This is the only addition urllib3 makes to this function.
|
||||
_set_socket_options(sock, socket_options)
|
||||
|
||||
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
|
||||
sock.settimeout(timeout)
|
||||
if source_address:
|
||||
sock.bind(source_address)
|
||||
sock.connect(sa)
|
||||
return sock
|
||||
|
||||
except socket.error as e:
|
||||
err = e
|
||||
if sock is not None:
|
||||
sock.close()
|
||||
sock = None
|
||||
|
||||
if err is not None:
|
||||
raise err
|
||||
|
||||
raise socket.error("getaddrinfo returns an empty list")
|
||||
|
||||
|
||||
def _set_socket_options(sock, options):
|
||||
if options is None:
|
||||
return
|
||||
|
||||
for opt in options:
|
||||
sock.setsockopt(*opt)
|
72
libraries/requests/packages/urllib3/util/request.py
Normal file
72
libraries/requests/packages/urllib3/util/request.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
from __future__ import absolute_import
|
||||
from base64 import b64encode
|
||||
|
||||
from ..packages.six import b
|
||||
|
||||
ACCEPT_ENCODING = 'gzip,deflate'
|
||||
|
||||
|
||||
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
|
||||
basic_auth=None, proxy_basic_auth=None, disable_cache=None):
|
||||
"""
|
||||
Shortcuts for generating request headers.
|
||||
|
||||
:param keep_alive:
|
||||
If ``True``, adds 'connection: keep-alive' header.
|
||||
|
||||
:param accept_encoding:
|
||||
Can be a boolean, list, or string.
|
||||
``True`` translates to 'gzip,deflate'.
|
||||
List will get joined by comma.
|
||||
String will be used as provided.
|
||||
|
||||
:param user_agent:
|
||||
String representing the user-agent you want, such as
|
||||
"python-urllib3/0.6"
|
||||
|
||||
:param basic_auth:
|
||||
Colon-separated username:password string for 'authorization: basic ...'
|
||||
auth header.
|
||||
|
||||
:param proxy_basic_auth:
|
||||
Colon-separated username:password string for 'proxy-authorization: basic ...'
|
||||
auth header.
|
||||
|
||||
:param disable_cache:
|
||||
If ``True``, adds 'cache-control: no-cache' header.
|
||||
|
||||
Example::
|
||||
|
||||
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
|
||||
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
|
||||
>>> make_headers(accept_encoding=True)
|
||||
{'accept-encoding': 'gzip,deflate'}
|
||||
"""
|
||||
headers = {}
|
||||
if accept_encoding:
|
||||
if isinstance(accept_encoding, str):
|
||||
pass
|
||||
elif isinstance(accept_encoding, list):
|
||||
accept_encoding = ','.join(accept_encoding)
|
||||
else:
|
||||
accept_encoding = ACCEPT_ENCODING
|
||||
headers['accept-encoding'] = accept_encoding
|
||||
|
||||
if user_agent:
|
||||
headers['user-agent'] = user_agent
|
||||
|
||||
if keep_alive:
|
||||
headers['connection'] = 'keep-alive'
|
||||
|
||||
if basic_auth:
|
||||
headers['authorization'] = 'Basic ' + \
|
||||
b64encode(b(basic_auth)).decode('utf-8')
|
||||
|
||||
if proxy_basic_auth:
|
||||
headers['proxy-authorization'] = 'Basic ' + \
|
||||
b64encode(b(proxy_basic_auth)).decode('utf-8')
|
||||
|
||||
if disable_cache:
|
||||
headers['cache-control'] = 'no-cache'
|
||||
|
||||
return headers
|
74
libraries/requests/packages/urllib3/util/response.py
Normal file
74
libraries/requests/packages/urllib3/util/response.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
from __future__ import absolute_import
|
||||
from ..packages.six.moves import http_client as httplib
|
||||
|
||||
from ..exceptions import HeaderParsingError
|
||||
|
||||
|
||||
def is_fp_closed(obj):
|
||||
"""
|
||||
Checks whether a given file-like object is closed.
|
||||
|
||||
:param obj:
|
||||
The file-like object to check.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Check via the official file-like-object way.
|
||||
return obj.closed
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Check if the object is a container for another file-like object that
|
||||
# gets released on exhaustion (e.g. HTTPResponse).
|
||||
return obj.fp is None
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
raise ValueError("Unable to determine whether fp is closed.")
|
||||
|
||||
|
||||
def assert_header_parsing(headers):
|
||||
"""
|
||||
Asserts whether all headers have been successfully parsed.
|
||||
Extracts encountered errors from the result of parsing headers.
|
||||
|
||||
Only works on Python 3.
|
||||
|
||||
:param headers: Headers to verify.
|
||||
:type headers: `httplib.HTTPMessage`.
|
||||
|
||||
:raises urllib3.exceptions.HeaderParsingError:
|
||||
If parsing errors are found.
|
||||
"""
|
||||
|
||||
# This will fail silently if we pass in the wrong kind of parameter.
|
||||
# To make debugging easier add an explicit check.
|
||||
if not isinstance(headers, httplib.HTTPMessage):
|
||||
raise TypeError('expected httplib.Message, got {0}.'.format(
|
||||
type(headers)))
|
||||
|
||||
defects = getattr(headers, 'defects', None)
|
||||
get_payload = getattr(headers, 'get_payload', None)
|
||||
|
||||
unparsed_data = None
|
||||
if get_payload: # Platform-specific: Python 3.
|
||||
unparsed_data = get_payload()
|
||||
|
||||
if defects or unparsed_data:
|
||||
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
|
||||
|
||||
|
||||
def is_response_to_head(response):
|
||||
"""
|
||||
Checks, wether a the request of a response has been a HEAD-request.
|
||||
Handles the quirks of AppEngine.
|
||||
|
||||
:param conn:
|
||||
:type conn: :class:`httplib.HTTPResponse`
|
||||
"""
|
||||
# FIXME: Can we do this somehow without accessing private httplib _method?
|
||||
method = response._method
|
||||
if isinstance(method, int): # Platform-specific: Appengine
|
||||
return method == 3
|
||||
return method.upper() == 'HEAD'
|
286
libraries/requests/packages/urllib3/util/retry.py
Normal file
286
libraries/requests/packages/urllib3/util/retry.py
Normal file
|
@ -0,0 +1,286 @@
|
|||
from __future__ import absolute_import
|
||||
import time
|
||||
import logging
|
||||
|
||||
from ..exceptions import (
|
||||
ConnectTimeoutError,
|
||||
MaxRetryError,
|
||||
ProtocolError,
|
||||
ReadTimeoutError,
|
||||
ResponseError,
|
||||
)
|
||||
from ..packages import six
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Retry(object):
|
||||
""" Retry configuration.
|
||||
|
||||
Each retry attempt will create a new Retry object with updated values, so
|
||||
they can be safely reused.
|
||||
|
||||
Retries can be defined as a default for a pool::
|
||||
|
||||
retries = Retry(connect=5, read=2, redirect=5)
|
||||
http = PoolManager(retries=retries)
|
||||
response = http.request('GET', 'http://example.com/')
|
||||
|
||||
Or per-request (which overrides the default for the pool)::
|
||||
|
||||
response = http.request('GET', 'http://example.com/', retries=Retry(10))
|
||||
|
||||
Retries can be disabled by passing ``False``::
|
||||
|
||||
response = http.request('GET', 'http://example.com/', retries=False)
|
||||
|
||||
Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
|
||||
retries are disabled, in which case the causing exception will be raised.
|
||||
|
||||
:param int total:
|
||||
Total number of retries to allow. Takes precedence over other counts.
|
||||
|
||||
Set to ``None`` to remove this constraint and fall back on other
|
||||
counts. It's a good idea to set this to some sensibly-high value to
|
||||
account for unexpected edge cases and avoid infinite retry loops.
|
||||
|
||||
Set to ``0`` to fail on the first retry.
|
||||
|
||||
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
|
||||
|
||||
:param int connect:
|
||||
How many connection-related errors to retry on.
|
||||
|
||||
These are errors raised before the request is sent to the remote server,
|
||||
which we assume has not triggered the server to process the request.
|
||||
|
||||
Set to ``0`` to fail on the first retry of this type.
|
||||
|
||||
:param int read:
|
||||
How many times to retry on read errors.
|
||||
|
||||
These errors are raised after the request was sent to the server, so the
|
||||
request may have side-effects.
|
||||
|
||||
Set to ``0`` to fail on the first retry of this type.
|
||||
|
||||
:param int redirect:
|
||||
How many redirects to perform. Limit this to avoid infinite redirect
|
||||
loops.
|
||||
|
||||
A redirect is a HTTP response with a status code 301, 302, 303, 307 or
|
||||
308.
|
||||
|
||||
Set to ``0`` to fail on the first retry of this type.
|
||||
|
||||
Set to ``False`` to disable and imply ``raise_on_redirect=False``.
|
||||
|
||||
:param iterable method_whitelist:
|
||||
Set of uppercased HTTP method verbs that we should retry on.
|
||||
|
||||
By default, we only retry on methods which are considered to be
|
||||
indempotent (multiple requests with the same parameters end with the
|
||||
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
|
||||
|
||||
:param iterable status_forcelist:
|
||||
A set of HTTP status codes that we should force a retry on.
|
||||
|
||||
By default, this is disabled with ``None``.
|
||||
|
||||
:param float backoff_factor:
|
||||
A backoff factor to apply between attempts. urllib3 will sleep for::
|
||||
|
||||
{backoff factor} * (2 ^ ({number of total retries} - 1))
|
||||
|
||||
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
|
||||
for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
|
||||
than :attr:`Retry.BACKOFF_MAX`.
|
||||
|
||||
By default, backoff is disabled (set to 0).
|
||||
|
||||
:param bool raise_on_redirect: Whether, if the number of redirects is
|
||||
exhausted, to raise a MaxRetryError, or to return a response with a
|
||||
response code in the 3xx range.
|
||||
"""
|
||||
|
||||
DEFAULT_METHOD_WHITELIST = frozenset([
|
||||
'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
|
||||
|
||||
#: Maximum backoff time.
|
||||
BACKOFF_MAX = 120
|
||||
|
||||
def __init__(self, total=10, connect=None, read=None, redirect=None,
|
||||
method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
|
||||
backoff_factor=0, raise_on_redirect=True, _observed_errors=0):
|
||||
|
||||
self.total = total
|
||||
self.connect = connect
|
||||
self.read = read
|
||||
|
||||
if redirect is False or total is False:
|
||||
redirect = 0
|
||||
raise_on_redirect = False
|
||||
|
||||
self.redirect = redirect
|
||||
self.status_forcelist = status_forcelist or set()
|
||||
self.method_whitelist = method_whitelist
|
||||
self.backoff_factor = backoff_factor
|
||||
self.raise_on_redirect = raise_on_redirect
|
||||
self._observed_errors = _observed_errors # TODO: use .history instead?
|
||||
|
||||
def new(self, **kw):
|
||||
params = dict(
|
||||
total=self.total,
|
||||
connect=self.connect, read=self.read, redirect=self.redirect,
|
||||
method_whitelist=self.method_whitelist,
|
||||
status_forcelist=self.status_forcelist,
|
||||
backoff_factor=self.backoff_factor,
|
||||
raise_on_redirect=self.raise_on_redirect,
|
||||
_observed_errors=self._observed_errors,
|
||||
)
|
||||
params.update(kw)
|
||||
return type(self)(**params)
|
||||
|
||||
@classmethod
|
||||
def from_int(cls, retries, redirect=True, default=None):
|
||||
""" Backwards-compatibility for the old retries format."""
|
||||
if retries is None:
|
||||
retries = default if default is not None else cls.DEFAULT
|
||||
|
||||
if isinstance(retries, Retry):
|
||||
return retries
|
||||
|
||||
redirect = bool(redirect) and None
|
||||
new_retries = cls(retries, redirect=redirect)
|
||||
log.debug("Converted retries value: %r -> %r" % (retries, new_retries))
|
||||
return new_retries
|
||||
|
||||
def get_backoff_time(self):
|
||||
""" Formula for computing the current backoff
|
||||
|
||||
:rtype: float
|
||||
"""
|
||||
if self._observed_errors <= 1:
|
||||
return 0
|
||||
|
||||
backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
|
||||
return min(self.BACKOFF_MAX, backoff_value)
|
||||
|
||||
def sleep(self):
|
||||
""" Sleep between retry attempts using an exponential backoff.
|
||||
|
||||
By default, the backoff factor is 0 and this method will return
|
||||
immediately.
|
||||
"""
|
||||
backoff = self.get_backoff_time()
|
||||
if backoff <= 0:
|
||||
return
|
||||
time.sleep(backoff)
|
||||
|
||||
def _is_connection_error(self, err):
|
||||
""" Errors when we're fairly sure that the server did not receive the
|
||||
request, so it should be safe to retry.
|
||||
"""
|
||||
return isinstance(err, ConnectTimeoutError)
|
||||
|
||||
def _is_read_error(self, err):
|
||||
""" Errors that occur after the request has been started, so we should
|
||||
assume that the server began processing it.
|
||||
"""
|
||||
return isinstance(err, (ReadTimeoutError, ProtocolError))
|
||||
|
||||
def is_forced_retry(self, method, status_code):
|
||||
""" Is this method/status code retryable? (Based on method/codes whitelists)
|
||||
"""
|
||||
if self.method_whitelist and method.upper() not in self.method_whitelist:
|
||||
return False
|
||||
|
||||
return self.status_forcelist and status_code in self.status_forcelist
|
||||
|
||||
def is_exhausted(self):
|
||||
""" Are we out of retries? """
|
||||
retry_counts = (self.total, self.connect, self.read, self.redirect)
|
||||
retry_counts = list(filter(None, retry_counts))
|
||||
if not retry_counts:
|
||||
return False
|
||||
|
||||
return min(retry_counts) < 0
|
||||
|
||||
def increment(self, method=None, url=None, response=None, error=None,
|
||||
_pool=None, _stacktrace=None):
|
||||
""" Return a new Retry object with incremented retry counters.
|
||||
|
||||
:param response: A response object, or None, if the server did not
|
||||
return a response.
|
||||
:type response: :class:`~urllib3.response.HTTPResponse`
|
||||
:param Exception error: An error encountered during the request, or
|
||||
None if the response was received successfully.
|
||||
|
||||
:return: A new ``Retry`` object.
|
||||
"""
|
||||
if self.total is False and error:
|
||||
# Disabled, indicate to re-raise the error.
|
||||
raise six.reraise(type(error), error, _stacktrace)
|
||||
|
||||
total = self.total
|
||||
if total is not None:
|
||||
total -= 1
|
||||
|
||||
_observed_errors = self._observed_errors
|
||||
connect = self.connect
|
||||
read = self.read
|
||||
redirect = self.redirect
|
||||
cause = 'unknown'
|
||||
|
||||
if error and self._is_connection_error(error):
|
||||
# Connect retry?
|
||||
if connect is False:
|
||||
raise six.reraise(type(error), error, _stacktrace)
|
||||
elif connect is not None:
|
||||
connect -= 1
|
||||
_observed_errors += 1
|
||||
|
||||
elif error and self._is_read_error(error):
|
||||
# Read retry?
|
||||
if read is False:
|
||||
raise six.reraise(type(error), error, _stacktrace)
|
||||
elif read is not None:
|
||||
read -= 1
|
||||
_observed_errors += 1
|
||||
|
||||
elif response and response.get_redirect_location():
|
||||
# Redirect retry?
|
||||
if redirect is not None:
|
||||
redirect -= 1
|
||||
cause = 'too many redirects'
|
||||
|
||||
else:
|
||||
# Incrementing because of a server error like a 500 in
|
||||
# status_forcelist and a the given method is in the whitelist
|
||||
_observed_errors += 1
|
||||
cause = ResponseError.GENERIC_ERROR
|
||||
if response and response.status:
|
||||
cause = ResponseError.SPECIFIC_ERROR.format(
|
||||
status_code=response.status)
|
||||
|
||||
new_retry = self.new(
|
||||
total=total,
|
||||
connect=connect, read=read, redirect=redirect,
|
||||
_observed_errors=_observed_errors)
|
||||
|
||||
if new_retry.is_exhausted():
|
||||
raise MaxRetryError(_pool, url, error or ResponseError(cause))
|
||||
|
||||
log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry))
|
||||
|
||||
return new_retry
|
||||
|
||||
def __repr__(self):
|
||||
return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
|
||||
'read={self.read}, redirect={self.redirect})').format(
|
||||
cls=type(self), self=self)
|
||||
|
||||
|
||||
# For backwards compatibility (equivalent to pre-v1.9):
|
||||
Retry.DEFAULT = Retry(3)
|
317
libraries/requests/packages/urllib3/util/ssl_.py
Normal file
317
libraries/requests/packages/urllib3/util/ssl_.py
Normal file
|
@ -0,0 +1,317 @@
|
|||
from __future__ import absolute_import
|
||||
import errno
|
||||
import warnings
|
||||
import hmac
|
||||
|
||||
from binascii import hexlify, unhexlify
|
||||
from hashlib import md5, sha1, sha256
|
||||
|
||||
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
|
||||
|
||||
|
||||
SSLContext = None
|
||||
HAS_SNI = False
|
||||
create_default_context = None
|
||||
|
||||
# Maps the length of a digest to a possible hash function producing this digest
|
||||
HASHFUNC_MAP = {
|
||||
32: md5,
|
||||
40: sha1,
|
||||
64: sha256,
|
||||
}
|
||||
|
||||
|
||||
def _const_compare_digest_backport(a, b):
|
||||
"""
|
||||
Compare two digests of equal length in constant time.
|
||||
|
||||
The digests must be of type str/bytes.
|
||||
Returns True if the digests match, and False otherwise.
|
||||
"""
|
||||
result = abs(len(a) - len(b))
|
||||
for l, r in zip(bytearray(a), bytearray(b)):
|
||||
result |= l ^ r
|
||||
return result == 0
|
||||
|
||||
|
||||
_const_compare_digest = getattr(hmac, 'compare_digest',
|
||||
_const_compare_digest_backport)
|
||||
|
||||
|
||||
try: # Test for SSL features
|
||||
import ssl
|
||||
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
|
||||
from ssl import HAS_SNI # Has SNI?
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
|
||||
except ImportError:
|
||||
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
||||
OP_NO_COMPRESSION = 0x20000
|
||||
|
||||
# A secure default.
|
||||
# Sources for more information on TLS ciphers:
|
||||
#
|
||||
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# The general intent is:
|
||||
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
||||
# - prefer ECDHE over DHE for better performance,
|
||||
# - prefer any AES-GCM over any AES-CBC for better performance and security,
|
||||
# - use 3DES as fallback which is secure but slow,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
DEFAULT_CIPHERS = (
|
||||
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
|
||||
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
|
||||
'!eNULL:!MD5'
|
||||
)
|
||||
|
||||
try:
|
||||
from ssl import SSLContext # Modern SSL?
|
||||
except ImportError:
|
||||
import sys
|
||||
|
||||
class SSLContext(object): # Platform-specific: Python 2 & 3.1
|
||||
supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
|
||||
(3, 2) <= sys.version_info)
|
||||
|
||||
def __init__(self, protocol_version):
|
||||
self.protocol = protocol_version
|
||||
# Use default values from a real SSLContext
|
||||
self.check_hostname = False
|
||||
self.verify_mode = ssl.CERT_NONE
|
||||
self.ca_certs = None
|
||||
self.options = 0
|
||||
self.certfile = None
|
||||
self.keyfile = None
|
||||
self.ciphers = None
|
||||
|
||||
def load_cert_chain(self, certfile, keyfile):
|
||||
self.certfile = certfile
|
||||
self.keyfile = keyfile
|
||||
|
||||
def load_verify_locations(self, cafile=None, capath=None):
|
||||
self.ca_certs = cafile
|
||||
|
||||
if capath is not None:
|
||||
raise SSLError("CA directories not supported in older Pythons")
|
||||
|
||||
def set_ciphers(self, cipher_suite):
|
||||
if not self.supports_set_ciphers:
|
||||
raise TypeError(
|
||||
'Your version of Python does not support setting '
|
||||
'a custom cipher suite. Please upgrade to Python '
|
||||
'2.7, 3.2, or later if you need this functionality.'
|
||||
)
|
||||
self.ciphers = cipher_suite
|
||||
|
||||
def wrap_socket(self, socket, server_hostname=None):
|
||||
warnings.warn(
|
||||
'A true SSLContext object is not available. This prevents '
|
||||
'urllib3 from configuring SSL appropriately and may cause '
|
||||
'certain SSL connections to fail. For more information, see '
|
||||
'https://urllib3.readthedocs.org/en/latest/security.html'
|
||||
'#insecureplatformwarning.',
|
||||
InsecurePlatformWarning
|
||||
)
|
||||
kwargs = {
|
||||
'keyfile': self.keyfile,
|
||||
'certfile': self.certfile,
|
||||
'ca_certs': self.ca_certs,
|
||||
'cert_reqs': self.verify_mode,
|
||||
'ssl_version': self.protocol,
|
||||
}
|
||||
if self.supports_set_ciphers: # Platform-specific: Python 2.7+
|
||||
return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
|
||||
else: # Platform-specific: Python 2.6
|
||||
return wrap_socket(socket, **kwargs)
|
||||
|
||||
|
||||
def assert_fingerprint(cert, fingerprint):
|
||||
"""
|
||||
Checks if given fingerprint matches the supplied certificate.
|
||||
|
||||
:param cert:
|
||||
Certificate as bytes object.
|
||||
:param fingerprint:
|
||||
Fingerprint as string of hexdigits, can be interspersed by colons.
|
||||
"""
|
||||
|
||||
fingerprint = fingerprint.replace(':', '').lower()
|
||||
digest_length = len(fingerprint)
|
||||
hashfunc = HASHFUNC_MAP.get(digest_length)
|
||||
if not hashfunc:
|
||||
raise SSLError(
|
||||
'Fingerprint of invalid length: {0}'.format(fingerprint))
|
||||
|
||||
# We need encode() here for py32; works on py2 and p33.
|
||||
fingerprint_bytes = unhexlify(fingerprint.encode())
|
||||
|
||||
cert_digest = hashfunc(cert).digest()
|
||||
|
||||
if not _const_compare_digest(cert_digest, fingerprint_bytes):
|
||||
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
|
||||
.format(fingerprint, hexlify(cert_digest)))
|
||||
|
||||
|
||||
def resolve_cert_reqs(candidate):
|
||||
"""
|
||||
Resolves the argument to a numeric constant, which can be passed to
|
||||
the wrap_socket function/method from the ssl module.
|
||||
Defaults to :data:`ssl.CERT_NONE`.
|
||||
If given a string it is assumed to be the name of the constant in the
|
||||
:mod:`ssl` module or its abbrevation.
|
||||
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
|
||||
If it's neither `None` nor a string we assume it is already the numeric
|
||||
constant which can directly be passed to wrap_socket.
|
||||
"""
|
||||
if candidate is None:
|
||||
return CERT_NONE
|
||||
|
||||
if isinstance(candidate, str):
|
||||
res = getattr(ssl, candidate, None)
|
||||
if res is None:
|
||||
res = getattr(ssl, 'CERT_' + candidate)
|
||||
return res
|
||||
|
||||
return candidate
|
||||
|
||||
|
||||
def resolve_ssl_version(candidate):
|
||||
"""
|
||||
like resolve_cert_reqs
|
||||
"""
|
||||
if candidate is None:
|
||||
return PROTOCOL_SSLv23
|
||||
|
||||
if isinstance(candidate, str):
|
||||
res = getattr(ssl, candidate, None)
|
||||
if res is None:
|
||||
res = getattr(ssl, 'PROTOCOL_' + candidate)
|
||||
return res
|
||||
|
||||
return candidate
|
||||
|
||||
|
||||
def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
||||
options=None, ciphers=None):
|
||||
"""All arguments have the same meaning as ``ssl_wrap_socket``.
|
||||
|
||||
By default, this function does a lot of the same work that
|
||||
``ssl.create_default_context`` does on Python 3.4+. It:
|
||||
|
||||
- Disables SSLv2, SSLv3, and compression
|
||||
- Sets a restricted set of server ciphers
|
||||
|
||||
If you wish to enable SSLv3, you can do::
|
||||
|
||||
from urllib3.util import ssl_
|
||||
context = ssl_.create_urllib3_context()
|
||||
context.options &= ~ssl_.OP_NO_SSLv3
|
||||
|
||||
You can do the same to enable compression (substituting ``COMPRESSION``
|
||||
for ``SSLv3`` in the last line above).
|
||||
|
||||
:param ssl_version:
|
||||
The desired protocol version to use. This will default to
|
||||
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
|
||||
the server and your installation of OpenSSL support.
|
||||
:param cert_reqs:
|
||||
Whether to require the certificate verification. This defaults to
|
||||
``ssl.CERT_REQUIRED``.
|
||||
:param options:
|
||||
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
|
||||
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
|
||||
:param ciphers:
|
||||
Which cipher suites to allow the server to select.
|
||||
:returns:
|
||||
Constructed SSLContext object with specified options
|
||||
:rtype: SSLContext
|
||||
"""
|
||||
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
|
||||
|
||||
# Setting the default here, as we may have no ssl module on import
|
||||
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
|
||||
|
||||
if options is None:
|
||||
options = 0
|
||||
# SSLv2 is easily broken and is considered harmful and dangerous
|
||||
options |= OP_NO_SSLv2
|
||||
# SSLv3 has several problems and is now dangerous
|
||||
options |= OP_NO_SSLv3
|
||||
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
|
||||
# (issue #309)
|
||||
options |= OP_NO_COMPRESSION
|
||||
|
||||
context.options |= options
|
||||
|
||||
if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
|
||||
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
|
||||
|
||||
context.verify_mode = cert_reqs
|
||||
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
|
||||
# We do our own verification, including fingerprints and alternative
|
||||
# hostnames. So disable it here
|
||||
context.check_hostname = False
|
||||
return context
|
||||
|
||||
|
||||
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||
ca_certs=None, server_hostname=None,
|
||||
ssl_version=None, ciphers=None, ssl_context=None,
|
||||
ca_cert_dir=None):
|
||||
"""
|
||||
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
||||
the same meaning as they do when using :func:`ssl.wrap_socket`.
|
||||
|
||||
:param server_hostname:
|
||||
When SNI is supported, the expected hostname of the certificate
|
||||
:param ssl_context:
|
||||
A pre-made :class:`SSLContext` object. If none is provided, one will
|
||||
be created using :func:`create_urllib3_context`.
|
||||
:param ciphers:
|
||||
A string of ciphers we wish the client to support. This is not
|
||||
supported on Python 2.6 as the ssl module does not support it.
|
||||
:param ca_cert_dir:
|
||||
A directory containing CA certificates in multiple separate files, as
|
||||
supported by OpenSSL's -CApath flag or the capath argument to
|
||||
SSLContext.load_verify_locations().
|
||||
"""
|
||||
context = ssl_context
|
||||
if context is None:
|
||||
context = create_urllib3_context(ssl_version, cert_reqs,
|
||||
ciphers=ciphers)
|
||||
|
||||
if ca_certs or ca_cert_dir:
|
||||
try:
|
||||
context.load_verify_locations(ca_certs, ca_cert_dir)
|
||||
except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
|
||||
raise SSLError(e)
|
||||
# Py33 raises FileNotFoundError which subclasses OSError
|
||||
# These are not equivalent unless we check the errno attribute
|
||||
except OSError as e: # Platform-specific: Python 3.3 and beyond
|
||||
if e.errno == errno.ENOENT:
|
||||
raise SSLError(e)
|
||||
raise
|
||||
|
||||
if certfile:
|
||||
context.load_cert_chain(certfile, keyfile)
|
||||
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
|
||||
return context.wrap_socket(sock, server_hostname=server_hostname)
|
||||
|
||||
warnings.warn(
|
||||
'An HTTPS request has been made, but the SNI (Subject Name '
|
||||
'Indication) extension to TLS is not available on this platform. '
|
||||
'This may cause the server to present an incorrect TLS '
|
||||
'certificate, which can cause validation failures. For more '
|
||||
'information, see '
|
||||
'https://urllib3.readthedocs.org/en/latest/security.html'
|
||||
'#snimissingwarning.',
|
||||
SNIMissingWarning
|
||||
)
|
||||
return context.wrap_socket(sock)
|
242
libraries/requests/packages/urllib3/util/timeout.py
Normal file
242
libraries/requests/packages/urllib3/util/timeout.py
Normal file
|
@ -0,0 +1,242 @@
|
|||
from __future__ import absolute_import
|
||||
# The default socket timeout, used by httplib to indicate that no timeout was
|
||||
# specified by the user
|
||||
from socket import _GLOBAL_DEFAULT_TIMEOUT
|
||||
import time
|
||||
|
||||
from ..exceptions import TimeoutStateError
|
||||
|
||||
# A sentinel value to indicate that no timeout was specified by the user in
|
||||
# urllib3
|
||||
_Default = object()
|
||||
|
||||
|
||||
def current_time():
|
||||
"""
|
||||
Retrieve the current time. This function is mocked out in unit testing.
|
||||
"""
|
||||
return time.time()
|
||||
|
||||
|
||||
class Timeout(object):
|
||||
""" Timeout configuration.
|
||||
|
||||
Timeouts can be defined as a default for a pool::
|
||||
|
||||
timeout = Timeout(connect=2.0, read=7.0)
|
||||
http = PoolManager(timeout=timeout)
|
||||
response = http.request('GET', 'http://example.com/')
|
||||
|
||||
Or per-request (which overrides the default for the pool)::
|
||||
|
||||
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
|
||||
|
||||
Timeouts can be disabled by setting all the parameters to ``None``::
|
||||
|
||||
no_timeout = Timeout(connect=None, read=None)
|
||||
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
|
||||
|
||||
|
||||
:param total:
|
||||
This combines the connect and read timeouts into one; the read timeout
|
||||
will be set to the time leftover from the connect attempt. In the
|
||||
event that both a connect timeout and a total are specified, or a read
|
||||
timeout and a total are specified, the shorter timeout will be applied.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
:type total: integer, float, or None
|
||||
|
||||
:param connect:
|
||||
The maximum amount of time to wait for a connection attempt to a server
|
||||
to succeed. Omitting the parameter will default the connect timeout to
|
||||
the system default, probably `the global default timeout in socket.py
|
||||
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
||||
None will set an infinite timeout for connection attempts.
|
||||
|
||||
:type connect: integer, float, or None
|
||||
|
||||
:param read:
|
||||
The maximum amount of time to wait between consecutive
|
||||
read operations for a response from the server. Omitting
|
||||
the parameter will default the read timeout to the system
|
||||
default, probably `the global default timeout in socket.py
|
||||
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
||||
None will set an infinite timeout.
|
||||
|
||||
:type read: integer, float, or None
|
||||
|
||||
.. note::
|
||||
|
||||
Many factors can affect the total amount of time for urllib3 to return
|
||||
an HTTP response.
|
||||
|
||||
For example, Python's DNS resolver does not obey the timeout specified
|
||||
on the socket. Other factors that can affect total request time include
|
||||
high CPU load, high swap, the program running at a low priority level,
|
||||
or other behaviors.
|
||||
|
||||
In addition, the read and total timeouts only measure the time between
|
||||
read operations on the socket connecting the client and the server,
|
||||
not the total amount of time for the request to return a complete
|
||||
response. For most requests, the timeout is raised because the server
|
||||
has not sent the first byte in the specified time. This is not always
|
||||
the case; if a server streams one byte every fifteen seconds, a timeout
|
||||
of 20 seconds will not trigger, even though the request will take
|
||||
several minutes to complete.
|
||||
|
||||
If your goal is to cut off any request after a set amount of wall clock
|
||||
time, consider having a second "watcher" thread to cut off a slow
|
||||
request.
|
||||
"""
|
||||
|
||||
#: A sentinel object representing the default timeout value
|
||||
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
|
||||
|
||||
def __init__(self, total=None, connect=_Default, read=_Default):
|
||||
self._connect = self._validate_timeout(connect, 'connect')
|
||||
self._read = self._validate_timeout(read, 'read')
|
||||
self.total = self._validate_timeout(total, 'total')
|
||||
self._start_connect = None
|
||||
|
||||
def __str__(self):
|
||||
return '%s(connect=%r, read=%r, total=%r)' % (
|
||||
type(self).__name__, self._connect, self._read, self.total)
|
||||
|
||||
@classmethod
|
||||
def _validate_timeout(cls, value, name):
|
||||
""" Check that a timeout attribute is valid.
|
||||
|
||||
:param value: The timeout value to validate
|
||||
:param name: The name of the timeout attribute to validate. This is
|
||||
used to specify in error messages.
|
||||
:return: The validated and casted version of the given value.
|
||||
:raises ValueError: If the type is not an integer or a float, or if it
|
||||
is a numeric value less than zero.
|
||||
"""
|
||||
if value is _Default:
|
||||
return cls.DEFAULT_TIMEOUT
|
||||
|
||||
if value is None or value is cls.DEFAULT_TIMEOUT:
|
||||
return value
|
||||
|
||||
try:
|
||||
float(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError("Timeout value %s was %s, but it must be an "
|
||||
"int or float." % (name, value))
|
||||
|
||||
try:
|
||||
if value < 0:
|
||||
raise ValueError("Attempted to set %s timeout to %s, but the "
|
||||
"timeout cannot be set to a value less "
|
||||
"than 0." % (name, value))
|
||||
except TypeError: # Python 3
|
||||
raise ValueError("Timeout value %s was %s, but it must be an "
|
||||
"int or float." % (name, value))
|
||||
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def from_float(cls, timeout):
|
||||
""" Create a new Timeout from a legacy timeout value.
|
||||
|
||||
The timeout value used by httplib.py sets the same timeout on the
|
||||
connect(), and recv() socket requests. This creates a :class:`Timeout`
|
||||
object that sets the individual timeouts to the ``timeout`` value
|
||||
passed to this function.
|
||||
|
||||
:param timeout: The legacy timeout value.
|
||||
:type timeout: integer, float, sentinel default object, or None
|
||||
:return: Timeout object
|
||||
:rtype: :class:`Timeout`
|
||||
"""
|
||||
return Timeout(read=timeout, connect=timeout)
|
||||
|
||||
def clone(self):
|
||||
""" Create a copy of the timeout object
|
||||
|
||||
Timeout properties are stored per-pool but each request needs a fresh
|
||||
Timeout object to ensure each one has its own start/stop configured.
|
||||
|
||||
:return: a copy of the timeout object
|
||||
:rtype: :class:`Timeout`
|
||||
"""
|
||||
# We can't use copy.deepcopy because that will also create a new object
|
||||
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
|
||||
# detect the user default.
|
||||
return Timeout(connect=self._connect, read=self._read,
|
||||
total=self.total)
|
||||
|
||||
def start_connect(self):
|
||||
""" Start the timeout clock, used during a connect() attempt
|
||||
|
||||
:raises urllib3.exceptions.TimeoutStateError: if you attempt
|
||||
to start a timer that has been started already.
|
||||
"""
|
||||
if self._start_connect is not None:
|
||||
raise TimeoutStateError("Timeout timer has already been started.")
|
||||
self._start_connect = current_time()
|
||||
return self._start_connect
|
||||
|
||||
def get_connect_duration(self):
|
||||
""" Gets the time elapsed since the call to :meth:`start_connect`.
|
||||
|
||||
:return: Elapsed time.
|
||||
:rtype: float
|
||||
:raises urllib3.exceptions.TimeoutStateError: if you attempt
|
||||
to get duration for a timer that hasn't been started.
|
||||
"""
|
||||
if self._start_connect is None:
|
||||
raise TimeoutStateError("Can't get connect duration for timer "
|
||||
"that has not started.")
|
||||
return current_time() - self._start_connect
|
||||
|
||||
@property
|
||||
def connect_timeout(self):
|
||||
""" Get the value to use when setting a connection timeout.
|
||||
|
||||
This will be a positive float or integer, the value None
|
||||
(never timeout), or the default system timeout.
|
||||
|
||||
:return: Connect timeout.
|
||||
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
|
||||
"""
|
||||
if self.total is None:
|
||||
return self._connect
|
||||
|
||||
if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
|
||||
return self.total
|
||||
|
||||
return min(self._connect, self.total)
|
||||
|
||||
@property
|
||||
def read_timeout(self):
|
||||
""" Get the value for the read timeout.
|
||||
|
||||
This assumes some time has elapsed in the connection timeout and
|
||||
computes the read timeout appropriately.
|
||||
|
||||
If self.total is set, the read timeout is dependent on the amount of
|
||||
time taken by the connect timeout. If the connection time has not been
|
||||
established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
|
||||
raised.
|
||||
|
||||
:return: Value to use for the read timeout.
|
||||
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
|
||||
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
|
||||
has not yet been called on this object.
|
||||
"""
|
||||
if (self.total is not None and
|
||||
self.total is not self.DEFAULT_TIMEOUT and
|
||||
self._read is not None and
|
||||
self._read is not self.DEFAULT_TIMEOUT):
|
||||
# In case the connect timeout has not yet been established.
|
||||
if self._start_connect is None:
|
||||
return self._read
|
||||
return max(0, min(self.total - self.get_connect_duration(),
|
||||
self._read))
|
||||
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
|
||||
return max(0, self.total - self.get_connect_duration())
|
||||
else:
|
||||
return self._read
|
217
libraries/requests/packages/urllib3/util/url.py
Normal file
217
libraries/requests/packages/urllib3/util/url.py
Normal file
|
@ -0,0 +1,217 @@
|
|||
from __future__ import absolute_import
|
||||
from collections import namedtuple
|
||||
|
||||
from ..exceptions import LocationParseError
|
||||
|
||||
|
||||
url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
|
||||
|
||||
|
||||
class Url(namedtuple('Url', url_attrs)):
|
||||
"""
|
||||
Datastructure for representing an HTTP URL. Used as a return value for
|
||||
:func:`parse_url`.
|
||||
"""
|
||||
slots = ()
|
||||
|
||||
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
|
||||
query=None, fragment=None):
|
||||
if path and not path.startswith('/'):
|
||||
path = '/' + path
|
||||
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
|
||||
query, fragment)
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""For backwards-compatibility with urlparse. We're nice like that."""
|
||||
return self.host
|
||||
|
||||
@property
|
||||
def request_uri(self):
|
||||
"""Absolute path including the query string."""
|
||||
uri = self.path or '/'
|
||||
|
||||
if self.query is not None:
|
||||
uri += '?' + self.query
|
||||
|
||||
return uri
|
||||
|
||||
@property
|
||||
def netloc(self):
|
||||
"""Network location including host and port"""
|
||||
if self.port:
|
||||
return '%s:%d' % (self.host, self.port)
|
||||
return self.host
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""
|
||||
Convert self into a url
|
||||
|
||||
This function should more or less round-trip with :func:`.parse_url`. The
|
||||
returned url may not be exactly the same as the url inputted to
|
||||
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
|
||||
with a blank port will have : removed).
|
||||
|
||||
Example: ::
|
||||
|
||||
>>> U = parse_url('http://google.com/mail/')
|
||||
>>> U.url
|
||||
'http://google.com/mail/'
|
||||
>>> Url('http', 'username:password', 'host.com', 80,
|
||||
... '/path', 'query', 'fragment').url
|
||||
'http://username:password@host.com:80/path?query#fragment'
|
||||
"""
|
||||
scheme, auth, host, port, path, query, fragment = self
|
||||
url = ''
|
||||
|
||||
# We use "is not None" we want things to happen with empty strings (or 0 port)
|
||||
if scheme is not None:
|
||||
url += scheme + '://'
|
||||
if auth is not None:
|
||||
url += auth + '@'
|
||||
if host is not None:
|
||||
url += host
|
||||
if port is not None:
|
||||
url += ':' + str(port)
|
||||
if path is not None:
|
||||
url += path
|
||||
if query is not None:
|
||||
url += '?' + query
|
||||
if fragment is not None:
|
||||
url += '#' + fragment
|
||||
|
||||
return url
|
||||
|
||||
def __str__(self):
|
||||
return self.url
|
||||
|
||||
|
||||
def split_first(s, delims):
|
||||
"""
|
||||
Given a string and an iterable of delimiters, split on the first found
|
||||
delimiter. Return two split parts and the matched delimiter.
|
||||
|
||||
If not found, then the first part is the full input string.
|
||||
|
||||
Example::
|
||||
|
||||
>>> split_first('foo/bar?baz', '?/=')
|
||||
('foo', 'bar?baz', '/')
|
||||
>>> split_first('foo/bar?baz', '123')
|
||||
('foo/bar?baz', '', None)
|
||||
|
||||
Scales linearly with number of delims. Not ideal for large number of delims.
|
||||
"""
|
||||
min_idx = None
|
||||
min_delim = None
|
||||
for d in delims:
|
||||
idx = s.find(d)
|
||||
if idx < 0:
|
||||
continue
|
||||
|
||||
if min_idx is None or idx < min_idx:
|
||||
min_idx = idx
|
||||
min_delim = d
|
||||
|
||||
if min_idx is None or min_idx < 0:
|
||||
return s, '', None
|
||||
|
||||
return s[:min_idx], s[min_idx + 1:], min_delim
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
"""
|
||||
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
|
||||
performed to parse incomplete urls. Fields not provided will be None.
|
||||
|
||||
Partly backwards-compatible with :mod:`urlparse`.
|
||||
|
||||
Example::
|
||||
|
||||
>>> parse_url('http://google.com/mail/')
|
||||
Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
|
||||
>>> parse_url('google.com:80')
|
||||
Url(scheme=None, host='google.com', port=80, path=None, ...)
|
||||
>>> parse_url('/foo?bar')
|
||||
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
|
||||
"""
|
||||
|
||||
# While this code has overlap with stdlib's urlparse, it is much
|
||||
# simplified for our needs and less annoying.
|
||||
# Additionally, this implementations does silly things to be optimal
|
||||
# on CPython.
|
||||
|
||||
if not url:
|
||||
# Empty
|
||||
return Url()
|
||||
|
||||
scheme = None
|
||||
auth = None
|
||||
host = None
|
||||
port = None
|
||||
path = None
|
||||
fragment = None
|
||||
query = None
|
||||
|
||||
# Scheme
|
||||
if '://' in url:
|
||||
scheme, url = url.split('://', 1)
|
||||
|
||||
# Find the earliest Authority Terminator
|
||||
# (http://tools.ietf.org/html/rfc3986#section-3.2)
|
||||
url, path_, delim = split_first(url, ['/', '?', '#'])
|
||||
|
||||
if delim:
|
||||
# Reassemble the path
|
||||
path = delim + path_
|
||||
|
||||
# Auth
|
||||
if '@' in url:
|
||||
# Last '@' denotes end of auth part
|
||||
auth, url = url.rsplit('@', 1)
|
||||
|
||||
# IPv6
|
||||
if url and url[0] == '[':
|
||||
host, url = url.split(']', 1)
|
||||
host += ']'
|
||||
|
||||
# Port
|
||||
if ':' in url:
|
||||
_host, port = url.split(':', 1)
|
||||
|
||||
if not host:
|
||||
host = _host
|
||||
|
||||
if port:
|
||||
# If given, ports must be integers.
|
||||
if not port.isdigit():
|
||||
raise LocationParseError(url)
|
||||
port = int(port)
|
||||
else:
|
||||
# Blank ports are cool, too. (rfc3986#section-3.2.3)
|
||||
port = None
|
||||
|
||||
elif not host and url:
|
||||
host = url
|
||||
|
||||
if not path:
|
||||
return Url(scheme, auth, host, port, path, query, fragment)
|
||||
|
||||
# Fragment
|
||||
if '#' in path:
|
||||
path, fragment = path.split('#', 1)
|
||||
|
||||
# Query
|
||||
if '?' in path:
|
||||
path, query = path.split('?', 1)
|
||||
|
||||
return Url(scheme, auth, host, port, path, query, fragment)
|
||||
|
||||
|
||||
def get_host(url):
|
||||
"""
|
||||
Deprecated. Use :func:`.parse_url` instead.
|
||||
"""
|
||||
p = parse_url(url)
|
||||
return p.scheme or 'http', p.hostname, p.port
|
Loading…
Add table
Add a link
Reference in a new issue