mirror of
https://github.com/jellyfin/jellyfin-kodi.git
synced 2024-11-10 12:16:12 +00:00
bab67ddf9b
* Adjust refresh behavior * Fix favorites * Add option to mask info * Fix keymap delete * Fix empty show * Version bump 3.1.14 * Reset rescan flag * Fix subtitles encoding * Fix path verification * Fix update library Plug in remove library percentage * Fix unauthorized behavior Reprompt user with login * Fix series pooling * Version bump 3.1.15 * Fix for additional users Return all users, not just public users * Fix http potential errors Prevent from going further if {server} or {userid} is requested but not filled to avoid 401 errors * Fix extra fanart * Fix patch make a case insensitive search * Version bump 3.1.16 Additional logging, fix kodi source. * Fix library tags on update * Version bump 3.1.17 * Fix season artwork * Fix season artwork * Fix logging * Fix blank files sources * Add backup option * Fix userdata song * Transfer data.txt to data.json Use default port for webserver caching * Fix mixed content shortcut * Fix path encoding for patch Hopefully this works... * Fix source nonetype error Just incase, wrap in a try/except because it's not important. * Base fast sync on server time Try to fix music video refresh to prevent cursor from moving up. * Prep subfolders for dynamic Support homevideos for now * Fix empty artist, missing Title * Version bump 3.1.18a * Version bump for objects 171076013 * Notify user of large updates Give option to back out if the user wants to manually update the libraries * Fix sources.xml verification * Prevent error in monitor Put in place try/except in case data is None * Remember sync position for manual triggers Allow to resume sync on restart for manual user triggers (update, repair). Automatically refresh boxsets if movie library is selected. use waitForAbort and emby_should_stop prop to terminate threads * Update string for sync later * Add subfolders for dynamic movies * Small fixes * Version bump 3.1.19 * Fix fast sync try/except, default back to previous behavior. * Fix artwork * Change settings name To ensure it takes default value instead of previous value set in 3.0.34 * Fix transcode flac and live tv * Fix episodes for series pooling * Add live tv support * Version bump 3.1.20 * Revert "Small fixes" This reverts commit9ec1fa3585
. * Version bump 3.1.21 * Fix playback starting server connection instance * Fix show update * Fix boxsets * Fix lastplayed * Patch to support pre 3.6 libraries * Fix slowness * Plug in settings for threading * Plug in settings for threading * Adjust sleep behavior * Version bump 3.1.22 * Fix server detection in monitor * Version bump 3.1.23 * Fix potential error with checksum * Fix missing new artists * Fix library sync Adjust lock, re-add screensaver deactivated during sync, prep compare sync, stop library updates from being processed before startup sync is completed * Version bump 3.1.25 * Fix local trailers * Adjust lock modification * Check db version * Prevent error from creating nodes The addon automatically creates nodes at startup with prefilled information. Prevent errors in the event something goes wrong. It will fix itself down the line, after user has logged in. * Version bump 3.1.26 * Revert "Version bump 3.1.26" This reverts commitc583a69a4b
. * Fix screensaver toggle * Fix source selection for direct stream * Version bump 3.1.26 * Add progress for updates * Revise progress bar Fix typos and subsetting * content notification * Remove content with update library Now remove irrelevant content as well * Fix slowness * Version bump 3.1.27 * Stop trying to get items if server offline * Fix content type for dynamic music * Fix resume sync Now save progress, unless exited due to path validation * Fix artwork for shortcuts on profile switch * Add force transcode settings * Fix audiobooks back to video type Add shortcuts. Audiobook can't be music type otherwise it break resume behavior and it won't play the right item. Has to be video type. * Update general info To finish, download and installation * Update README.md * Move welcome message to service * Prevent patch loop Try once, then let it go, to avoid locking user in a restart loop * Review library threads * Prep for audiobook transcode Still need to implement universal for audio transcode * Version bump 3.1.28 * Fix emby database locked * Fix regression to welcome message * Version bump 3.1.29 * Adjust playback Allow direct play for http streams * Ensure all threads are terminated correctly * Fix empty results due to error 500 * Fix boxset refresh * Fix resume sync behavior Allow to complete the startup sync in the event user backs out of resume sync * Version bump 3.1.30 * Update patch Move patch from cache to addon_data. No longer need to restart Kodi to apply the first patch. * Fix inital sync leading to fast sync * Fix user settings Due to api change in 3.6.0.55 * krypton update * Adjust for resume settings With .55 the resume setting is set per library. Instead query server to see if the item is played to offer delete * Restart service upon check for updates To reload the new objects module. * Fix update library Only do the compare when user selects update library, also add a restart service option in the add-on settings > advanced * Version bump 3.1.31 * Update dependencies * Update FR translation * Update DE translation * Add translation * Support up next * Small service adjustment * Krypton update to support upnext * Add a verification onwake Somehow, Kodi can trigger OnWake without first trigger OnSleep. * Fix loading if special char in path * Add logging and small fixes Prepare userdata by date modified * Version bump 3.1.32 * Change default behavior of startup dialog In case it is forced closed by Kodi, allow the sync to proceed * Ensure deliveryurl is an actual url * Update README.md * Fix nextup * Fix dynamic widgets * Detect coreelect, etc * Fix progress report Silent RefreshProgress in websocket * Follow emby settings for subtitles * Version bump 3.1.33 * Add Italian translation * Fix playback for server 3.6.0.61 * Version bump 3.1.34a * Add silent catch for errors * Adjust playback progress monitor Only track progress report if the item is an emby item * Fix subtitles not following server settings * Add remove libraries, fix mixed libraries * Fix live tv For now, use transcode since direct play returns a 127.0.0.1 unusable address as the path. * Allow live tv to direct stream * Fix LiveTV * Add setting to sync during playback * Fix updates * Fix encoding error * Add optional rotten tomatoes option * Version bump 3.1.35 * Fix emby connect auth string Was preventing proper device detection when using emby connect, play to, etc. * Add setup RT * Fix audio/sub change Only for addon playback * Add developer mode * Update patch Check for updates + dev = forced grab from github * Fix RT string * Fix patch Allow dev mode to redownload zip * Fix patch ugh sleep!! * Verify patch connection * Version bump 3.1.36 * Fix libraries being wiped Catch errors to prevent false positive * Add dateutil library * Prep convert to local time * Fix string * Prep for multi db version support * Fix service restart * Add shortcut restart addon Add notification * Add database discovery * Ensure previous playback terminated * Update translation New: Polish, Dutch Updated: German, French, Italian * Version bump 3.1.37 * Quick fix for new library dateutil * Catch error for dateutil In the event the server has some weird date that can't be converted * Version bump 3.1.38 * Fix dateutil import * Fix db discovery Ignore emby.db * Version bump 3.1.39 * Add a delay if setup not completed Avoid crash from everything loading at once. * Fix database discovery Add table verification + date modified verification * Container optional playutils * Version bump 3.1.40 * Adjust database discovery Compare loaded vs discovered to avoid loading old databases by accident. * Version bump 3.1.41 * Fix discovery toggle * Version bump 3.1.42 * Add webservice for playback prep * Fix service restart * Version bump 3.1.43 * Update default sync indicator Based on overall feedback * Fix check update * Fix if server is selected but unavailable * Support songs without albums * Fix encode and params * Increase retry timeout * Fix update generating duplicates * Add manage libraries Too many entries * Fix database discovery * Fixed transcode via context menu * Fix context transcode * Quiet webservice * Update Krypton objects * Fix database discovery prompt * fixed video listitem issues for krypton * load all item details for playlists * Fix playlist * Version bump 3.1.44 * Fix force hi10p transcoding behavior Fixes the "Force Hi10p transcoding" option to only apply to h264 video codecs * Clear playlist on player.onstop * Don't clear playlist if busy spinner is active * Fix case sensitive issue at calling the log function * fix db stuff (#164) * Reload objects upon initial setup * Fix database discovery ignore db-journal * Update translation German, Italian * Use LastConnectionMode for server test * Fix compare sync * Version bump 3.1.45 * Ensure widgets get updated Container.Refresh alone doesn't seem to work * Update database discovery * Re-add texture to database discovery * Add option to enable/disable service * Remove unused strings * Fix object reload upon restart service * Update Krypton objects * Update translation Dutch, Polish * Version bump 3.1.46 * Adjust client api * Adjust subtitles behavior * Fix string typo * Only run one full sync instance Prevent user from launching multiple syncs and freezing the add-on. * added "playlists" to wnodes * Disable Audiobooks Server doesn't have a set structure yet. This feature is broken atm. * Version bump 4.0.0 * License GPL v3 * Update readme
680 lines
24 KiB
Python
680 lines
24 KiB
Python
# -*- coding: utf-8 -*-
|
||
|
||
"""
|
||
requests.session
|
||
~~~~~~~~~~~~~~~~
|
||
|
||
This module provides a Session object to manage and persist settings across
|
||
requests (cookies, auth, proxies).
|
||
|
||
"""
|
||
import os
|
||
from collections import Mapping
|
||
from datetime import datetime
|
||
|
||
from .auth import _basic_auth_str
|
||
from .compat import cookielib, OrderedDict, urljoin, urlparse
|
||
from .cookies import (
|
||
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
|
||
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
|
||
from .hooks import default_hooks, dispatch_hook
|
||
from .utils import to_key_val_list, default_headers, to_native_string
|
||
from .exceptions import (
|
||
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
|
||
from .packages.urllib3._collections import RecentlyUsedContainer
|
||
from .structures import CaseInsensitiveDict
|
||
|
||
from .adapters import HTTPAdapter
|
||
|
||
from .utils import (
|
||
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
|
||
get_auth_from_url
|
||
)
|
||
|
||
from .status_codes import codes
|
||
|
||
# formerly defined here, reexposed here for backward compatibility
|
||
from .models import REDIRECT_STATI
|
||
|
||
REDIRECT_CACHE_SIZE = 1000
|
||
|
||
|
||
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
|
||
"""
|
||
Determines appropriate setting for a given request, taking into account the
|
||
explicit setting on that request, and the setting in the session. If a
|
||
setting is a dictionary, they will be merged together using `dict_class`
|
||
"""
|
||
|
||
if session_setting is None:
|
||
return request_setting
|
||
|
||
if request_setting is None:
|
||
return session_setting
|
||
|
||
# Bypass if not a dictionary (e.g. verify)
|
||
if not (
|
||
isinstance(session_setting, Mapping) and
|
||
isinstance(request_setting, Mapping)
|
||
):
|
||
return request_setting
|
||
|
||
merged_setting = dict_class(to_key_val_list(session_setting))
|
||
merged_setting.update(to_key_val_list(request_setting))
|
||
|
||
# Remove keys that are set to None. Extract keys first to avoid altering
|
||
# the dictionary during iteration.
|
||
none_keys = [k for (k, v) in merged_setting.items() if v is None]
|
||
for key in none_keys:
|
||
del merged_setting[key]
|
||
|
||
return merged_setting
|
||
|
||
|
||
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
|
||
"""
|
||
Properly merges both requests and session hooks.
|
||
|
||
This is necessary because when request_hooks == {'response': []}, the
|
||
merge breaks Session hooks entirely.
|
||
"""
|
||
if session_hooks is None or session_hooks.get('response') == []:
|
||
return request_hooks
|
||
|
||
if request_hooks is None or request_hooks.get('response') == []:
|
||
return session_hooks
|
||
|
||
return merge_setting(request_hooks, session_hooks, dict_class)
|
||
|
||
|
||
class SessionRedirectMixin(object):
|
||
def resolve_redirects(self, resp, req, stream=False, timeout=None,
|
||
verify=True, cert=None, proxies=None, **adapter_kwargs):
|
||
"""Receives a Response. Returns a generator of Responses."""
|
||
|
||
i = 0
|
||
hist = [] # keep track of history
|
||
|
||
while resp.is_redirect:
|
||
prepared_request = req.copy()
|
||
|
||
if i > 0:
|
||
# Update history and keep track of redirects.
|
||
hist.append(resp)
|
||
new_hist = list(hist)
|
||
resp.history = new_hist
|
||
|
||
try:
|
||
resp.content # Consume socket so it can be released
|
||
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
|
||
resp.raw.read(decode_content=False)
|
||
|
||
if i >= self.max_redirects:
|
||
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
|
||
|
||
# Release the connection back into the pool.
|
||
resp.close()
|
||
|
||
url = resp.headers['location']
|
||
method = req.method
|
||
|
||
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
||
if url.startswith('//'):
|
||
parsed_rurl = urlparse(resp.url)
|
||
url = '%s:%s' % (parsed_rurl.scheme, url)
|
||
|
||
# The scheme should be lower case...
|
||
parsed = urlparse(url)
|
||
url = parsed.geturl()
|
||
|
||
# Facilitate relative 'location' headers, as allowed by RFC 7231.
|
||
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
||
# Compliant with RFC3986, we percent encode the url.
|
||
if not parsed.netloc:
|
||
url = urljoin(resp.url, requote_uri(url))
|
||
else:
|
||
url = requote_uri(url)
|
||
|
||
prepared_request.url = to_native_string(url)
|
||
# Cache the url, unless it redirects to itself.
|
||
if resp.is_permanent_redirect and req.url != prepared_request.url:
|
||
self.redirect_cache[req.url] = prepared_request.url
|
||
|
||
# http://tools.ietf.org/html/rfc7231#section-6.4.4
|
||
if (resp.status_code == codes.see_other and
|
||
method != 'HEAD'):
|
||
method = 'GET'
|
||
|
||
# Do what the browsers do, despite standards...
|
||
# First, turn 302s into GETs.
|
||
if resp.status_code == codes.found and method != 'HEAD':
|
||
method = 'GET'
|
||
|
||
# Second, if a POST is responded to with a 301, turn it into a GET.
|
||
# This bizarre behaviour is explained in Issue 1704.
|
||
if resp.status_code == codes.moved and method == 'POST':
|
||
method = 'GET'
|
||
|
||
prepared_request.method = method
|
||
|
||
# https://github.com/kennethreitz/requests/issues/1084
|
||
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
|
||
if 'Content-Length' in prepared_request.headers:
|
||
del prepared_request.headers['Content-Length']
|
||
|
||
prepared_request.body = None
|
||
|
||
headers = prepared_request.headers
|
||
try:
|
||
del headers['Cookie']
|
||
except KeyError:
|
||
pass
|
||
|
||
# Extract any cookies sent on the response to the cookiejar
|
||
# in the new request. Because we've mutated our copied prepared
|
||
# request, use the old one that we haven't yet touched.
|
||
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
|
||
prepared_request._cookies.update(self.cookies)
|
||
prepared_request.prepare_cookies(prepared_request._cookies)
|
||
|
||
# Rebuild auth and proxy information.
|
||
proxies = self.rebuild_proxies(prepared_request, proxies)
|
||
self.rebuild_auth(prepared_request, resp)
|
||
|
||
# Override the original request.
|
||
req = prepared_request
|
||
|
||
resp = self.send(
|
||
req,
|
||
stream=stream,
|
||
timeout=timeout,
|
||
verify=verify,
|
||
cert=cert,
|
||
proxies=proxies,
|
||
allow_redirects=False,
|
||
**adapter_kwargs
|
||
)
|
||
|
||
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
|
||
|
||
i += 1
|
||
yield resp
|
||
|
||
def rebuild_auth(self, prepared_request, response):
|
||
"""
|
||
When being redirected we may want to strip authentication from the
|
||
request to avoid leaking credentials. This method intelligently removes
|
||
and reapplies authentication where possible to avoid credential loss.
|
||
"""
|
||
headers = prepared_request.headers
|
||
url = prepared_request.url
|
||
|
||
if 'Authorization' in headers:
|
||
# If we get redirected to a new host, we should strip out any
|
||
# authentication headers.
|
||
original_parsed = urlparse(response.request.url)
|
||
redirect_parsed = urlparse(url)
|
||
|
||
if (original_parsed.hostname != redirect_parsed.hostname):
|
||
del headers['Authorization']
|
||
|
||
# .netrc might have more auth for us on our new host.
|
||
new_auth = get_netrc_auth(url) if self.trust_env else None
|
||
if new_auth is not None:
|
||
prepared_request.prepare_auth(new_auth)
|
||
|
||
return
|
||
|
||
def rebuild_proxies(self, prepared_request, proxies):
|
||
"""
|
||
This method re-evaluates the proxy configuration by considering the
|
||
environment variables. If we are redirected to a URL covered by
|
||
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
|
||
proxy keys for this URL (in case they were stripped by a previous
|
||
redirect).
|
||
|
||
This method also replaces the Proxy-Authorization header where
|
||
necessary.
|
||
"""
|
||
headers = prepared_request.headers
|
||
url = prepared_request.url
|
||
scheme = urlparse(url).scheme
|
||
new_proxies = proxies.copy() if proxies is not None else {}
|
||
|
||
if self.trust_env and not should_bypass_proxies(url):
|
||
environ_proxies = get_environ_proxies(url)
|
||
|
||
proxy = environ_proxies.get(scheme)
|
||
|
||
if proxy:
|
||
new_proxies.setdefault(scheme, environ_proxies[scheme])
|
||
|
||
if 'Proxy-Authorization' in headers:
|
||
del headers['Proxy-Authorization']
|
||
|
||
try:
|
||
username, password = get_auth_from_url(new_proxies[scheme])
|
||
except KeyError:
|
||
username, password = None, None
|
||
|
||
if username and password:
|
||
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
|
||
|
||
return new_proxies
|
||
|
||
|
||
class Session(SessionRedirectMixin):
|
||
"""A Requests session.
|
||
|
||
Provides cookie persistence, connection-pooling, and configuration.
|
||
|
||
Basic Usage::
|
||
|
||
>>> import requests
|
||
>>> s = requests.Session()
|
||
>>> s.get('http://httpbin.org/get')
|
||
<Response [200]>
|
||
|
||
Or as a context manager::
|
||
|
||
>>> with requests.Session() as s:
|
||
>>> s.get('http://httpbin.org/get')
|
||
<Response [200]>
|
||
"""
|
||
|
||
__attrs__ = [
|
||
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
|
||
'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
|
||
'max_redirects',
|
||
]
|
||
|
||
def __init__(self):
|
||
|
||
#: A case-insensitive dictionary of headers to be sent on each
|
||
#: :class:`Request <Request>` sent from this
|
||
#: :class:`Session <Session>`.
|
||
self.headers = default_headers()
|
||
|
||
#: Default Authentication tuple or object to attach to
|
||
#: :class:`Request <Request>`.
|
||
self.auth = None
|
||
|
||
#: Dictionary mapping protocol or protocol and host to the URL of the proxy
|
||
#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
|
||
#: be used on each :class:`Request <Request>`.
|
||
self.proxies = {}
|
||
|
||
#: Event-handling hooks.
|
||
self.hooks = default_hooks()
|
||
|
||
#: Dictionary of querystring data to attach to each
|
||
#: :class:`Request <Request>`. The dictionary values may be lists for
|
||
#: representing multivalued query parameters.
|
||
self.params = {}
|
||
|
||
#: Stream response content default.
|
||
self.stream = False
|
||
|
||
#: SSL Verification default.
|
||
self.verify = True
|
||
|
||
#: SSL certificate default.
|
||
self.cert = None
|
||
|
||
#: Maximum number of redirects allowed. If the request exceeds this
|
||
#: limit, a :class:`TooManyRedirects` exception is raised.
|
||
self.max_redirects = DEFAULT_REDIRECT_LIMIT
|
||
|
||
#: Trust environment settings for proxy configuration, default
|
||
#: authentication and similar.
|
||
self.trust_env = True
|
||
|
||
#: A CookieJar containing all currently outstanding cookies set on this
|
||
#: session. By default it is a
|
||
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
|
||
#: may be any other ``cookielib.CookieJar`` compatible object.
|
||
self.cookies = cookiejar_from_dict({})
|
||
|
||
# Default connection adapters.
|
||
self.adapters = OrderedDict()
|
||
self.mount('https://', HTTPAdapter())
|
||
self.mount('http://', HTTPAdapter())
|
||
|
||
# Only store 1000 redirects to prevent using infinite memory
|
||
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
|
||
|
||
def __enter__(self):
|
||
return self
|
||
|
||
def __exit__(self, *args):
|
||
self.close()
|
||
|
||
def prepare_request(self, request):
|
||
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
|
||
transmission and returns it. The :class:`PreparedRequest` has settings
|
||
merged from the :class:`Request <Request>` instance and those of the
|
||
:class:`Session`.
|
||
|
||
:param request: :class:`Request` instance to prepare with this
|
||
session's settings.
|
||
"""
|
||
cookies = request.cookies or {}
|
||
|
||
# Bootstrap CookieJar.
|
||
if not isinstance(cookies, cookielib.CookieJar):
|
||
cookies = cookiejar_from_dict(cookies)
|
||
|
||
# Merge with session cookies
|
||
merged_cookies = merge_cookies(
|
||
merge_cookies(RequestsCookieJar(), self.cookies), cookies)
|
||
|
||
|
||
# Set environment's basic authentication if not explicitly set.
|
||
auth = request.auth
|
||
if self.trust_env and not auth and not self.auth:
|
||
auth = get_netrc_auth(request.url)
|
||
|
||
p = PreparedRequest()
|
||
p.prepare(
|
||
method=request.method.upper(),
|
||
url=request.url,
|
||
files=request.files,
|
||
data=request.data,
|
||
json=request.json,
|
||
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
|
||
params=merge_setting(request.params, self.params),
|
||
auth=merge_setting(auth, self.auth),
|
||
cookies=merged_cookies,
|
||
hooks=merge_hooks(request.hooks, self.hooks),
|
||
)
|
||
return p
|
||
|
||
def request(self, method, url,
|
||
params=None,
|
||
data=None,
|
||
headers=None,
|
||
cookies=None,
|
||
files=None,
|
||
auth=None,
|
||
timeout=None,
|
||
allow_redirects=True,
|
||
proxies=None,
|
||
hooks=None,
|
||
stream=None,
|
||
verify=None,
|
||
cert=None,
|
||
json=None):
|
||
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
|
||
Returns :class:`Response <Response>` object.
|
||
|
||
:param method: method for the new :class:`Request` object.
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param params: (optional) Dictionary or bytes to be sent in the query
|
||
string for the :class:`Request`.
|
||
:param data: (optional) Dictionary, bytes, or file-like object to send
|
||
in the body of the :class:`Request`.
|
||
:param json: (optional) json to send in the body of the
|
||
:class:`Request`.
|
||
:param headers: (optional) Dictionary of HTTP Headers to send with the
|
||
:class:`Request`.
|
||
:param cookies: (optional) Dict or CookieJar object to send with the
|
||
:class:`Request`.
|
||
:param files: (optional) Dictionary of ``'filename': file-like-objects``
|
||
for multipart encoding upload.
|
||
:param auth: (optional) Auth tuple or callable to enable
|
||
Basic/Digest/Custom HTTP Auth.
|
||
:param timeout: (optional) How long to wait for the server to send
|
||
data before giving up, as a float, or a :ref:`(connect timeout,
|
||
read timeout) <timeouts>` tuple.
|
||
:type timeout: float or tuple
|
||
:param allow_redirects: (optional) Set to True by default.
|
||
:type allow_redirects: bool
|
||
:param proxies: (optional) Dictionary mapping protocol or protocol and
|
||
hostname to the URL of the proxy.
|
||
:param stream: (optional) whether to immediately download the response
|
||
content. Defaults to ``False``.
|
||
:param verify: (optional) whether the SSL cert will be verified.
|
||
A CA_BUNDLE path can also be provided. Defaults to ``True``.
|
||
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
||
If Tuple, ('cert', 'key') pair.
|
||
"""
|
||
# Create the Request.
|
||
req = Request(
|
||
method = method.upper(),
|
||
url = url,
|
||
headers = headers,
|
||
files = files,
|
||
data = data or {},
|
||
json = json,
|
||
params = params or {},
|
||
auth = auth,
|
||
cookies = cookies,
|
||
hooks = hooks,
|
||
)
|
||
prep = self.prepare_request(req)
|
||
|
||
proxies = proxies or {}
|
||
|
||
settings = self.merge_environment_settings(
|
||
prep.url, proxies, stream, verify, cert
|
||
)
|
||
|
||
# Send the request.
|
||
send_kwargs = {
|
||
'timeout': timeout,
|
||
'allow_redirects': allow_redirects,
|
||
}
|
||
send_kwargs.update(settings)
|
||
resp = self.send(prep, **send_kwargs)
|
||
|
||
return resp
|
||
|
||
def get(self, url, **kwargs):
|
||
"""Sends a GET request. Returns :class:`Response` object.
|
||
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||
"""
|
||
|
||
kwargs.setdefault('allow_redirects', True)
|
||
return self.request('GET', url, **kwargs)
|
||
|
||
def options(self, url, **kwargs):
|
||
"""Sends a OPTIONS request. Returns :class:`Response` object.
|
||
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||
"""
|
||
|
||
kwargs.setdefault('allow_redirects', True)
|
||
return self.request('OPTIONS', url, **kwargs)
|
||
|
||
def head(self, url, **kwargs):
|
||
"""Sends a HEAD request. Returns :class:`Response` object.
|
||
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||
"""
|
||
|
||
kwargs.setdefault('allow_redirects', False)
|
||
return self.request('HEAD', url, **kwargs)
|
||
|
||
def post(self, url, data=None, json=None, **kwargs):
|
||
"""Sends a POST request. Returns :class:`Response` object.
|
||
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
|
||
:param json: (optional) json to send in the body of the :class:`Request`.
|
||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||
"""
|
||
|
||
return self.request('POST', url, data=data, json=json, **kwargs)
|
||
|
||
def put(self, url, data=None, **kwargs):
|
||
"""Sends a PUT request. Returns :class:`Response` object.
|
||
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
|
||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||
"""
|
||
|
||
return self.request('PUT', url, data=data, **kwargs)
|
||
|
||
def patch(self, url, data=None, **kwargs):
|
||
"""Sends a PATCH request. Returns :class:`Response` object.
|
||
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
|
||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||
"""
|
||
|
||
return self.request('PATCH', url, data=data, **kwargs)
|
||
|
||
def delete(self, url, **kwargs):
|
||
"""Sends a DELETE request. Returns :class:`Response` object.
|
||
|
||
:param url: URL for the new :class:`Request` object.
|
||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||
"""
|
||
|
||
return self.request('DELETE', url, **kwargs)
|
||
|
||
def send(self, request, **kwargs):
|
||
"""Send a given PreparedRequest."""
|
||
# Set defaults that the hooks can utilize to ensure they always have
|
||
# the correct parameters to reproduce the previous request.
|
||
kwargs.setdefault('stream', self.stream)
|
||
kwargs.setdefault('verify', self.verify)
|
||
kwargs.setdefault('cert', self.cert)
|
||
kwargs.setdefault('proxies', self.proxies)
|
||
|
||
# It's possible that users might accidentally send a Request object.
|
||
# Guard against that specific failure case.
|
||
if not isinstance(request, PreparedRequest):
|
||
raise ValueError('You can only send PreparedRequests.')
|
||
|
||
checked_urls = set()
|
||
while request.url in self.redirect_cache:
|
||
checked_urls.add(request.url)
|
||
new_url = self.redirect_cache.get(request.url)
|
||
if new_url in checked_urls:
|
||
break
|
||
request.url = new_url
|
||
|
||
# Set up variables needed for resolve_redirects and dispatching of hooks
|
||
allow_redirects = kwargs.pop('allow_redirects', True)
|
||
stream = kwargs.get('stream')
|
||
hooks = request.hooks
|
||
|
||
# Get the appropriate adapter to use
|
||
adapter = self.get_adapter(url=request.url)
|
||
|
||
# Start time (approximately) of the request
|
||
start = datetime.utcnow()
|
||
|
||
# Send the request
|
||
r = adapter.send(request, **kwargs)
|
||
|
||
# Total elapsed time of the request (approximately)
|
||
r.elapsed = datetime.utcnow() - start
|
||
|
||
# Response manipulation hooks
|
||
r = dispatch_hook('response', hooks, r, **kwargs)
|
||
|
||
# Persist cookies
|
||
if r.history:
|
||
|
||
# If the hooks create history then we want those cookies too
|
||
for resp in r.history:
|
||
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
|
||
|
||
extract_cookies_to_jar(self.cookies, request, r.raw)
|
||
|
||
# Redirect resolving generator.
|
||
gen = self.resolve_redirects(r, request, **kwargs)
|
||
|
||
# Resolve redirects if allowed.
|
||
history = [resp for resp in gen] if allow_redirects else []
|
||
|
||
# Shuffle things around if there's history.
|
||
if history:
|
||
# Insert the first (original) request at the start
|
||
history.insert(0, r)
|
||
# Get the last request made
|
||
r = history.pop()
|
||
r.history = history
|
||
|
||
if not stream:
|
||
r.content
|
||
|
||
return r
|
||
|
||
def merge_environment_settings(self, url, proxies, stream, verify, cert):
|
||
"""Check the environment and merge it with some settings."""
|
||
# Gather clues from the surrounding environment.
|
||
if self.trust_env:
|
||
# Set environment's proxies.
|
||
env_proxies = get_environ_proxies(url) or {}
|
||
for (k, v) in env_proxies.items():
|
||
proxies.setdefault(k, v)
|
||
|
||
# Look for requests environment configuration and be compatible
|
||
# with cURL.
|
||
if verify is True or verify is None:
|
||
verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
|
||
os.environ.get('CURL_CA_BUNDLE'))
|
||
|
||
# Merge all the kwargs.
|
||
proxies = merge_setting(proxies, self.proxies)
|
||
stream = merge_setting(stream, self.stream)
|
||
verify = merge_setting(verify, self.verify)
|
||
cert = merge_setting(cert, self.cert)
|
||
|
||
return {'verify': verify, 'proxies': proxies, 'stream': stream,
|
||
'cert': cert}
|
||
|
||
def get_adapter(self, url):
|
||
"""Returns the appropriate connection adapter for the given URL."""
|
||
for (prefix, adapter) in self.adapters.items():
|
||
|
||
if url.lower().startswith(prefix):
|
||
return adapter
|
||
|
||
# Nothing matches :-/
|
||
raise InvalidSchema("No connection adapters were found for '%s'" % url)
|
||
|
||
def close(self):
|
||
"""Closes all adapters and as such the session"""
|
||
for v in self.adapters.values():
|
||
v.close()
|
||
|
||
def mount(self, prefix, adapter):
|
||
"""Registers a connection adapter to a prefix.
|
||
|
||
Adapters are sorted in descending order by key length."""
|
||
|
||
self.adapters[prefix] = adapter
|
||
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
|
||
|
||
for key in keys_to_move:
|
||
self.adapters[key] = self.adapters.pop(key)
|
||
|
||
def __getstate__(self):
|
||
state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
|
||
state['redirect_cache'] = dict(self.redirect_cache)
|
||
return state
|
||
|
||
def __setstate__(self, state):
|
||
redirect_cache = state.pop('redirect_cache', {})
|
||
for attr, value in state.items():
|
||
setattr(self, attr, value)
|
||
|
||
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
|
||
for redirect, to in redirect_cache.items():
|
||
self.redirect_cache[redirect] = to
|
||
|
||
|
||
def session():
|
||
"""Returns a :class:`Session` for context-management."""
|
||
|
||
return Session()
|