httpsession.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. import os.path
  2. import logging
  3. import socket
  4. from base64 import b64encode
  5. import sys
  6. from urllib3 import PoolManager, ProxyManager, proxy_from_url, Timeout
  7. from urllib3.util.retry import Retry
  8. from urllib3.util.ssl_ import (
  9. ssl, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION, DEFAULT_CIPHERS,
  10. )
  11. from urllib3.exceptions import SSLError as URLLib3SSLError
  12. from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError
  13. from urllib3.exceptions import ConnectTimeoutError as URLLib3ConnectTimeoutError
  14. from urllib3.exceptions import NewConnectionError, ProtocolError, ProxyError
  15. try:
  16. # Always import the original SSLContext, even if it has been patched
  17. from urllib3.contrib.pyopenssl import orig_util_SSLContext as SSLContext
  18. except ImportError:
  19. from urllib3.util.ssl_ import SSLContext
  20. import botocore.awsrequest
  21. from botocore.vendored import six
  22. from botocore.vendored.six.moves.urllib_parse import unquote
  23. from botocore.compat import filter_ssl_warnings, urlparse
  24. from botocore.exceptions import (
  25. ConnectionClosedError, EndpointConnectionError, HTTPClientError,
  26. ReadTimeoutError, ProxyConnectionError, ConnectTimeoutError, SSLError,
  27. InvalidProxiesConfigError
  28. )
  29. filter_ssl_warnings()
  30. logger = logging.getLogger(__name__)
  31. DEFAULT_TIMEOUT = 60
  32. MAX_POOL_CONNECTIONS = 10
  33. DEFAULT_CA_BUNDLE = os.path.join(os.path.dirname(__file__), 'cacert.pem')
  34. try:
  35. from certifi import where
  36. except ImportError:
  37. def where():
  38. return DEFAULT_CA_BUNDLE
  39. def get_cert_path(verify):
  40. if verify is not True:
  41. return verify
  42. cert_path = where()
  43. logger.debug("Certificate path: {0}".format(cert_path))
  44. return cert_path
  45. def create_urllib3_context(ssl_version=None, cert_reqs=None,
  46. options=None, ciphers=None):
  47. """ This function is a vendored version of the same function in urllib3
  48. We vendor this function to ensure that the SSL contexts we construct
  49. always use the std lib SSLContext instead of pyopenssl.
  50. """
  51. context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
  52. # Setting the default here, as we may have no ssl module on import
  53. cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
  54. if options is None:
  55. options = 0
  56. # SSLv2 is easily broken and is considered harmful and dangerous
  57. options |= OP_NO_SSLv2
  58. # SSLv3 has several problems and is now dangerous
  59. options |= OP_NO_SSLv3
  60. # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
  61. # (issue urllib3#309)
  62. options |= OP_NO_COMPRESSION
  63. context.options |= options
  64. if getattr(context, 'supports_set_ciphers', True):
  65. # Platform-specific: Python 2.6
  66. context.set_ciphers(ciphers or DEFAULT_CIPHERS)
  67. context.verify_mode = cert_reqs
  68. if getattr(context, 'check_hostname', None) is not None:
  69. # Platform-specific: Python 3.2
  70. # We do our own verification, including fingerprints and alternative
  71. # hostnames. So disable it here
  72. context.check_hostname = False
  73. # Enable logging of TLS session keys via defacto standard environment variable
  74. # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
  75. if hasattr(context, 'keylog_filename'):
  76. keylogfile = os.environ.get('SSLKEYLOGFILE')
  77. if keylogfile and not sys.flags.ignore_environment:
  78. context.keylog_filename = keylogfile
  79. return context
  80. class ProxyConfiguration(object):
  81. """Represents a proxy configuration dictionary and additional settings.
  82. This class represents a proxy configuration dictionary and provides utility
  83. functions to retreive well structured proxy urls and proxy headers from the
  84. proxy configuration dictionary.
  85. """
  86. def __init__(self, proxies=None, proxies_settings=None):
  87. if proxies is None:
  88. proxies = {}
  89. if proxies_settings is None:
  90. proxies_settings = {}
  91. self._proxies = proxies
  92. self._proxies_settings = proxies_settings
  93. def proxy_url_for(self, url):
  94. """Retrieves the corresponding proxy url for a given url. """
  95. parsed_url = urlparse(url)
  96. proxy = self._proxies.get(parsed_url.scheme)
  97. if proxy:
  98. proxy = self._fix_proxy_url(proxy)
  99. return proxy
  100. def proxy_headers_for(self, proxy_url):
  101. """Retrieves the corresponding proxy headers for a given proxy url. """
  102. headers = {}
  103. username, password = self._get_auth_from_url(proxy_url)
  104. if username and password:
  105. basic_auth = self._construct_basic_auth(username, password)
  106. headers['Proxy-Authorization'] = basic_auth
  107. return headers
  108. @property
  109. def settings(self):
  110. return self._proxies_settings
  111. def _fix_proxy_url(self, proxy_url):
  112. if proxy_url.startswith('http:') or proxy_url.startswith('https:'):
  113. return proxy_url
  114. elif proxy_url.startswith('//'):
  115. return 'http:' + proxy_url
  116. else:
  117. return 'http://' + proxy_url
  118. def _construct_basic_auth(self, username, password):
  119. auth_str = '{0}:{1}'.format(username, password)
  120. encoded_str = b64encode(auth_str.encode('ascii')).strip().decode()
  121. return 'Basic {0}'.format(encoded_str)
  122. def _get_auth_from_url(self, url):
  123. parsed_url = urlparse(url)
  124. try:
  125. return unquote(parsed_url.username), unquote(parsed_url.password)
  126. except (AttributeError, TypeError):
  127. return None, None
  128. class URLLib3Session(object):
  129. """A basic HTTP client that supports connection pooling and proxies.
  130. This class is inspired by requests.adapters.HTTPAdapter, but has been
  131. boiled down to meet the use cases needed by botocore. For the most part
  132. this classes matches the functionality of HTTPAdapter in requests v2.7.0
  133. (the same as our vendored version). The only major difference of note is
  134. that we currently do not support sending chunked requests. While requests
  135. v2.7.0 implemented this themselves, later version urllib3 support this
  136. directly via a flag to urlopen so enabling it if needed should be trivial.
  137. """
  138. def __init__(self,
  139. verify=True,
  140. proxies=None,
  141. timeout=None,
  142. max_pool_connections=MAX_POOL_CONNECTIONS,
  143. socket_options=None,
  144. client_cert=None,
  145. proxies_config=None,
  146. ):
  147. self._verify = verify
  148. self._proxy_config = ProxyConfiguration(proxies=proxies,
  149. proxies_settings=proxies_config)
  150. self._pool_classes_by_scheme = {
  151. 'http': botocore.awsrequest.AWSHTTPConnectionPool,
  152. 'https': botocore.awsrequest.AWSHTTPSConnectionPool,
  153. }
  154. if timeout is None:
  155. timeout = DEFAULT_TIMEOUT
  156. if not isinstance(timeout, (int, float)):
  157. timeout = Timeout(connect=timeout[0], read=timeout[1])
  158. self._cert_file = None
  159. self._key_file = None
  160. if isinstance(client_cert, str):
  161. self._cert_file = client_cert
  162. elif isinstance(client_cert, tuple):
  163. self._cert_file, self._key_file = client_cert
  164. self._timeout = timeout
  165. self._max_pool_connections = max_pool_connections
  166. self._socket_options = socket_options
  167. if socket_options is None:
  168. self._socket_options = []
  169. self._proxy_managers = {}
  170. self._manager = PoolManager(**self._get_pool_manager_kwargs())
  171. self._manager.pool_classes_by_scheme = self._pool_classes_by_scheme
  172. @property
  173. def _proxies_kwargs(self):
  174. proxies_settings = self._proxy_config.settings
  175. proxy_ssl_context = self._setup_proxy_ssl_context(proxies_settings)
  176. proxies_kwargs = {
  177. 'proxy_ssl_context': proxy_ssl_context,
  178. 'use_forwarding_for_https': proxies_settings.get(
  179. 'proxy_use_forwarding_for_https'),
  180. }
  181. return {k: v for k, v in proxies_kwargs.items() if v is not None}
  182. def _get_pool_manager_kwargs(self, **extra_kwargs):
  183. pool_manager_kwargs = {
  184. 'strict': True,
  185. 'timeout': self._timeout,
  186. 'maxsize': self._max_pool_connections,
  187. 'ssl_context': self._get_ssl_context(),
  188. 'socket_options': self._socket_options,
  189. 'cert_file': self._cert_file,
  190. 'key_file': self._key_file,
  191. }
  192. pool_manager_kwargs.update(**extra_kwargs)
  193. return pool_manager_kwargs
  194. def _get_ssl_context(self):
  195. return create_urllib3_context()
  196. def _get_proxy_manager(self, proxy_url):
  197. if proxy_url not in self._proxy_managers:
  198. proxy_headers = self._proxy_config.proxy_headers_for(proxy_url)
  199. proxy_manager_kwargs = self._get_pool_manager_kwargs(
  200. proxy_headers=proxy_headers)
  201. proxy_manager_kwargs.update(**self._proxies_kwargs)
  202. proxy_manager = proxy_from_url(proxy_url, **proxy_manager_kwargs)
  203. proxy_manager.pool_classes_by_scheme = self._pool_classes_by_scheme
  204. self._proxy_managers[proxy_url] = proxy_manager
  205. return self._proxy_managers[proxy_url]
  206. def _path_url(self, url):
  207. parsed_url = urlparse(url)
  208. path = parsed_url.path
  209. if not path:
  210. path = '/'
  211. if parsed_url.query:
  212. path = path + '?' + parsed_url.query
  213. return path
  214. def _setup_ssl_cert(self, conn, url, verify):
  215. if url.lower().startswith('https') and verify:
  216. conn.cert_reqs = 'CERT_REQUIRED'
  217. conn.ca_certs = get_cert_path(verify)
  218. else:
  219. conn.cert_reqs = 'CERT_NONE'
  220. conn.ca_certs = None
  221. def _setup_proxy_ssl_context(self, proxies_settings):
  222. proxy_ca_bundle = proxies_settings.get('proxy_ca_bundle')
  223. proxy_cert = proxies_settings.get('proxy_client_cert')
  224. if proxy_ca_bundle is None and proxy_cert is None:
  225. return None
  226. context = self._get_ssl_context()
  227. try:
  228. # urllib3 disables this by default but we need
  229. # it for proper proxy tls negotiation.
  230. context.check_hostname = True
  231. if proxy_ca_bundle is not None:
  232. context.load_verify_locations(cafile=proxy_ca_bundle)
  233. if isinstance(proxy_cert, tuple):
  234. context.load_cert_chain(proxy_cert[0], keyfile=proxy_cert[1])
  235. elif isinstance(proxy_cert, str):
  236. context.load_cert_chain(proxy_cert)
  237. return context
  238. except (IOError, URLLib3SSLError) as e:
  239. raise InvalidProxiesConfigError(error=e)
  240. def _get_connection_manager(self, url, proxy_url=None):
  241. if proxy_url:
  242. manager = self._get_proxy_manager(proxy_url)
  243. else:
  244. manager = self._manager
  245. return manager
  246. def _get_request_target(self, url, proxy_url):
  247. has_proxy = proxy_url is not None
  248. if not has_proxy:
  249. return self._path_url(url)
  250. # HTTP proxies expect the request_target to be the absolute url to know
  251. # which host to establish a connection to. urllib3 also supports
  252. # forwarding for HTTPS through the 'use_forwarding_for_https' parameter.
  253. proxy_scheme = urlparse(proxy_url).scheme
  254. using_https_forwarding_proxy = (
  255. proxy_scheme == 'https' and
  256. self._proxies_kwargs.get('use_forwarding_for_https', False)
  257. )
  258. if using_https_forwarding_proxy or url.startswith('http:'):
  259. return url
  260. else:
  261. return self._path_url(url)
  262. def _chunked(self, headers):
  263. return headers.get('Transfer-Encoding', '') == 'chunked'
  264. def send(self, request):
  265. try:
  266. proxy_url = self._proxy_config.proxy_url_for(request.url)
  267. manager = self._get_connection_manager(request.url, proxy_url)
  268. conn = manager.connection_from_url(request.url)
  269. self._setup_ssl_cert(conn, request.url, self._verify)
  270. request_target = self._get_request_target(request.url, proxy_url)
  271. urllib_response = conn.urlopen(
  272. method=request.method,
  273. url=request_target,
  274. body=request.body,
  275. headers=request.headers,
  276. retries=Retry(False),
  277. assert_same_host=False,
  278. preload_content=False,
  279. decode_content=False,
  280. chunked=self._chunked(request.headers),
  281. )
  282. http_response = botocore.awsrequest.AWSResponse(
  283. request.url,
  284. urllib_response.status,
  285. urllib_response.headers,
  286. urllib_response,
  287. )
  288. if not request.stream_output:
  289. # Cause the raw stream to be exhausted immediately. We do it
  290. # this way instead of using preload_content because
  291. # preload_content will never buffer chunked responses
  292. http_response.content
  293. return http_response
  294. except URLLib3SSLError as e:
  295. raise SSLError(endpoint_url=request.url, error=e)
  296. except (NewConnectionError, socket.gaierror) as e:
  297. raise EndpointConnectionError(endpoint_url=request.url, error=e)
  298. except ProxyError as e:
  299. raise ProxyConnectionError(proxy_url=proxy_url, error=e)
  300. except URLLib3ConnectTimeoutError as e:
  301. raise ConnectTimeoutError(endpoint_url=request.url, error=e)
  302. except URLLib3ReadTimeoutError as e:
  303. raise ReadTimeoutError(endpoint_url=request.url, error=e)
  304. except ProtocolError as e:
  305. raise ConnectionClosedError(
  306. error=e,
  307. request=request,
  308. endpoint_url=request.url
  309. )
  310. except Exception as e:
  311. message = 'Exception received when sending urllib3 HTTP request'
  312. logger.debug(message, exc_info=True)
  313. raise HTTPClientError(error=e)