adapters.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437
  1. # -*- coding: utf-8 -*-
  2. """
  3. requests.adapters
  4. ~~~~~~~~~~~~~~~~~
  5. This module contains the transport adapters that Requests uses to define
  6. and maintain connections.
  7. """
  8. import socket
  9. from .models import Response
  10. from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
  11. from .packages.urllib3.response import HTTPResponse
  12. from .packages.urllib3.util import Timeout as TimeoutSauce
  13. from .packages.urllib3.util.retry import Retry
  14. from .compat import urlparse, basestring
  15. from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
  16. prepend_scheme_if_needed, get_auth_from_url, urldefragauth)
  17. from .structures import CaseInsensitiveDict
  18. from .packages.urllib3.exceptions import ConnectTimeoutError
  19. from .packages.urllib3.exceptions import HTTPError as _HTTPError
  20. from .packages.urllib3.exceptions import MaxRetryError
  21. from .packages.urllib3.exceptions import ProxyError as _ProxyError
  22. from .packages.urllib3.exceptions import ProtocolError
  23. from .packages.urllib3.exceptions import ReadTimeoutError
  24. from .packages.urllib3.exceptions import SSLError as _SSLError
  25. from .packages.urllib3.exceptions import ResponseError
  26. from .cookies import extract_cookies_to_jar
  27. from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
  28. ProxyError, RetryError)
  29. from .auth import _basic_auth_str
  30. DEFAULT_POOLBLOCK = False
  31. DEFAULT_POOLSIZE = 10
  32. DEFAULT_RETRIES = 0
  33. class BaseAdapter(object):
  34. """The Base Transport Adapter"""
  35. def __init__(self):
  36. super(BaseAdapter, self).__init__()
  37. def send(self):
  38. raise NotImplementedError
  39. def close(self):
  40. raise NotImplementedError
  41. class HTTPAdapter(BaseAdapter):
  42. """The built-in HTTP Adapter for urllib3.
  43. Provides a general-case interface for Requests sessions to contact HTTP and
  44. HTTPS urls by implementing the Transport Adapter interface. This class will
  45. usually be created by the :class:`Session <Session>` class under the
  46. covers.
  47. :param pool_connections: The number of urllib3 connection pools to cache.
  48. :param pool_maxsize: The maximum number of connections to save in the pool.
  49. :param int max_retries: The maximum number of retries each connection
  50. should attempt. Note, this applies only to failed DNS lookups, socket
  51. connections and connection timeouts, never to requests where data has
  52. made it to the server. By default, Requests does not retry failed
  53. connections. If you need granular control over the conditions under
  54. which we retry a request, import urllib3's ``Retry`` class and pass
  55. that instead.
  56. :param pool_block: Whether the connection pool should block for connections.
  57. Usage::
  58. >>> import requests
  59. >>> s = requests.Session()
  60. >>> a = requests.adapters.HTTPAdapter(max_retries=3)
  61. >>> s.mount('http://', a)
  62. """
  63. __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
  64. '_pool_block']
  65. def __init__(self, pool_connections=DEFAULT_POOLSIZE,
  66. pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
  67. pool_block=DEFAULT_POOLBLOCK):
  68. if max_retries == DEFAULT_RETRIES:
  69. self.max_retries = Retry(0, read=False)
  70. else:
  71. self.max_retries = Retry.from_int(max_retries)
  72. self.config = {}
  73. self.proxy_manager = {}
  74. super(HTTPAdapter, self).__init__()
  75. self._pool_connections = pool_connections
  76. self._pool_maxsize = pool_maxsize
  77. self._pool_block = pool_block
  78. self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
  79. def __getstate__(self):
  80. return dict((attr, getattr(self, attr, None)) for attr in
  81. self.__attrs__)
  82. def __setstate__(self, state):
  83. # Can't handle by adding 'proxy_manager' to self.__attrs__ because
  84. # because self.poolmanager uses a lambda function, which isn't pickleable.
  85. self.proxy_manager = {}
  86. self.config = {}
  87. for attr, value in state.items():
  88. setattr(self, attr, value)
  89. self.init_poolmanager(self._pool_connections, self._pool_maxsize,
  90. block=self._pool_block)
  91. def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
  92. """Initializes a urllib3 PoolManager.
  93. This method should not be called from user code, and is only
  94. exposed for use when subclassing the
  95. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  96. :param connections: The number of urllib3 connection pools to cache.
  97. :param maxsize: The maximum number of connections to save in the pool.
  98. :param block: Block when no free connections are available.
  99. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
  100. """
  101. # save these values for pickling
  102. self._pool_connections = connections
  103. self._pool_maxsize = maxsize
  104. self._pool_block = block
  105. self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
  106. block=block, strict=True, **pool_kwargs)
  107. def proxy_manager_for(self, proxy, **proxy_kwargs):
  108. """Return urllib3 ProxyManager for the given proxy.
  109. This method should not be called from user code, and is only
  110. exposed for use when subclassing the
  111. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  112. :param proxy: The proxy to return a urllib3 ProxyManager for.
  113. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
  114. :returns: ProxyManager
  115. """
  116. if not proxy in self.proxy_manager:
  117. proxy_headers = self.proxy_headers(proxy)
  118. self.proxy_manager[proxy] = proxy_from_url(
  119. proxy,
  120. proxy_headers=proxy_headers,
  121. num_pools=self._pool_connections,
  122. maxsize=self._pool_maxsize,
  123. block=self._pool_block,
  124. **proxy_kwargs)
  125. return self.proxy_manager[proxy]
  126. def cert_verify(self, conn, url, verify, cert):
  127. """Verify a SSL certificate. This method should not be called from user
  128. code, and is only exposed for use when subclassing the
  129. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  130. :param conn: The urllib3 connection object associated with the cert.
  131. :param url: The requested URL.
  132. :param verify: Whether we should actually verify the certificate.
  133. :param cert: The SSL certificate to verify.
  134. """
  135. if url.lower().startswith('https') and verify:
  136. cert_loc = None
  137. # Allow self-specified cert location.
  138. if verify is not True:
  139. cert_loc = verify
  140. if not cert_loc:
  141. cert_loc = DEFAULT_CA_BUNDLE_PATH
  142. if not cert_loc:
  143. raise Exception("Could not find a suitable SSL CA certificate bundle.")
  144. conn.cert_reqs = 'CERT_REQUIRED'
  145. conn.ca_certs = cert_loc
  146. else:
  147. conn.cert_reqs = 'CERT_NONE'
  148. conn.ca_certs = None
  149. if cert:
  150. if not isinstance(cert, basestring):
  151. conn.cert_file = cert[0]
  152. conn.key_file = cert[1]
  153. else:
  154. conn.cert_file = cert
  155. def build_response(self, req, resp):
  156. """Builds a :class:`Response <requests.Response>` object from a urllib3
  157. response. This should not be called from user code, and is only exposed
  158. for use when subclassing the
  159. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
  160. :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
  161. :param resp: The urllib3 response object.
  162. """
  163. response = Response()
  164. # Fallback to None if there's no status_code, for whatever reason.
  165. response.status_code = getattr(resp, 'status', None)
  166. # Make headers case-insensitive.
  167. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
  168. # Set encoding.
  169. response.encoding = get_encoding_from_headers(response.headers)
  170. response.raw = resp
  171. response.reason = response.raw.reason
  172. if isinstance(req.url, bytes):
  173. response.url = req.url.decode('utf-8')
  174. else:
  175. response.url = req.url
  176. # Add new cookies from the server.
  177. extract_cookies_to_jar(response.cookies, req, resp)
  178. # Give the Response some context.
  179. response.request = req
  180. response.connection = self
  181. return response
  182. def get_connection(self, url, proxies=None):
  183. """Returns a urllib3 connection for the given URL. This should not be
  184. called from user code, and is only exposed for use when subclassing the
  185. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  186. :param url: The URL to connect to.
  187. :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
  188. """
  189. proxies = proxies or {}
  190. proxy = proxies.get(urlparse(url.lower()).scheme)
  191. if proxy:
  192. proxy = prepend_scheme_if_needed(proxy, 'http')
  193. proxy_manager = self.proxy_manager_for(proxy)
  194. conn = proxy_manager.connection_from_url(url)
  195. else:
  196. # Only scheme should be lower case
  197. parsed = urlparse(url)
  198. url = parsed.geturl()
  199. conn = self.poolmanager.connection_from_url(url)
  200. return conn
  201. def close(self):
  202. """Disposes of any internal state.
  203. Currently, this just closes the PoolManager, which closes pooled
  204. connections.
  205. """
  206. self.poolmanager.clear()
  207. def request_url(self, request, proxies):
  208. """Obtain the url to use when making the final request.
  209. If the message is being sent through a HTTP proxy, the full URL has to
  210. be used. Otherwise, we should only use the path portion of the URL.
  211. This should not be called from user code, and is only exposed for use
  212. when subclassing the
  213. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  214. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  215. :param proxies: A dictionary of schemes to proxy URLs.
  216. """
  217. proxies = proxies or {}
  218. scheme = urlparse(request.url).scheme
  219. proxy = proxies.get(scheme)
  220. if proxy and scheme != 'https':
  221. url = urldefragauth(request.url)
  222. else:
  223. url = request.path_url
  224. return url
  225. def add_headers(self, request, **kwargs):
  226. """Add any headers needed by the connection. As of v2.0 this does
  227. nothing by default, but is left for overriding by users that subclass
  228. the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  229. This should not be called from user code, and is only exposed for use
  230. when subclassing the
  231. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  232. :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
  233. :param kwargs: The keyword arguments from the call to send().
  234. """
  235. pass
  236. def proxy_headers(self, proxy):
  237. """Returns a dictionary of the headers to add to any request sent
  238. through a proxy. This works with urllib3 magic to ensure that they are
  239. correctly sent to the proxy, rather than in a tunnelled request if
  240. CONNECT is being used.
  241. This should not be called from user code, and is only exposed for use
  242. when subclassing the
  243. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  244. :param proxies: The url of the proxy being used for this request.
  245. :param kwargs: Optional additional keyword arguments.
  246. """
  247. headers = {}
  248. username, password = get_auth_from_url(proxy)
  249. if username and password:
  250. headers['Proxy-Authorization'] = _basic_auth_str(username,
  251. password)
  252. return headers
  253. def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
  254. """Sends PreparedRequest object. Returns Response object.
  255. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  256. :param stream: (optional) Whether to stream the request content.
  257. :param timeout: (optional) How long to wait for the server to send
  258. data before giving up, as a float, or a (`connect timeout, read
  259. timeout <user/advanced.html#timeouts>`_) tuple.
  260. :type timeout: float or tuple
  261. :param verify: (optional) Whether to verify SSL certificates.
  262. :param cert: (optional) Any user-provided SSL certificate to be trusted.
  263. :param proxies: (optional) The proxies dictionary to apply to the request.
  264. """
  265. conn = self.get_connection(request.url, proxies)
  266. self.cert_verify(conn, request.url, verify, cert)
  267. url = self.request_url(request, proxies)
  268. self.add_headers(request)
  269. chunked = not (request.body is None or 'Content-Length' in request.headers)
  270. if isinstance(timeout, tuple):
  271. try:
  272. connect, read = timeout
  273. timeout = TimeoutSauce(connect=connect, read=read)
  274. except ValueError as e:
  275. # this may raise a string formatting error.
  276. err = ("Invalid timeout {0}. Pass a (connect, read) "
  277. "timeout tuple, or a single float to set "
  278. "both timeouts to the same value".format(timeout))
  279. raise ValueError(err)
  280. else:
  281. timeout = TimeoutSauce(connect=timeout, read=timeout)
  282. try:
  283. if not chunked:
  284. resp = conn.urlopen(
  285. method=request.method,
  286. url=url,
  287. body=request.body,
  288. headers=request.headers,
  289. redirect=False,
  290. assert_same_host=False,
  291. preload_content=False,
  292. decode_content=False,
  293. retries=self.max_retries,
  294. timeout=timeout
  295. )
  296. # Send the request.
  297. else:
  298. if hasattr(conn, 'proxy_pool'):
  299. conn = conn.proxy_pool
  300. low_conn = conn._get_conn(timeout=timeout)
  301. try:
  302. low_conn.putrequest(request.method,
  303. url,
  304. skip_accept_encoding=True)
  305. for header, value in request.headers.items():
  306. low_conn.putheader(header, value)
  307. low_conn.endheaders()
  308. for i in request.body:
  309. low_conn.send(hex(len(i))[2:].encode('utf-8'))
  310. low_conn.send(b'\r\n')
  311. low_conn.send(i)
  312. low_conn.send(b'\r\n')
  313. low_conn.send(b'0\r\n\r\n')
  314. r = low_conn.getresponse()
  315. resp = HTTPResponse.from_httplib(
  316. r,
  317. pool=conn,
  318. connection=low_conn,
  319. preload_content=False,
  320. decode_content=False
  321. )
  322. except:
  323. # If we hit any problems here, clean up the connection.
  324. # Then, reraise so that we can handle the actual exception.
  325. low_conn.close()
  326. raise
  327. else:
  328. # All is well, return the connection to the pool.
  329. conn._put_conn(low_conn)
  330. except (ProtocolError, socket.error) as err:
  331. raise ConnectionError(err, request=request)
  332. except MaxRetryError as e:
  333. if isinstance(e.reason, ConnectTimeoutError):
  334. raise ConnectTimeout(e, request=request)
  335. if isinstance(e.reason, ResponseError):
  336. raise RetryError(e, request=request)
  337. raise ConnectionError(e, request=request)
  338. except _ProxyError as e:
  339. raise ProxyError(e)
  340. except (_SSLError, _HTTPError) as e:
  341. if isinstance(e, _SSLError):
  342. raise SSLError(e, request=request)
  343. elif isinstance(e, ReadTimeoutError):
  344. raise ReadTimeout(e, request=request)
  345. else:
  346. raise
  347. return self.build_response(request, resp)