From 992e038ef38afbaed76a7edd72b200687ae28382 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 13:48:59 +0000 Subject: [PATCH 01/13] BaseClient and AsyncClient --- httpx/client.py | 308 ++++++++++++++++++++++++++---------------------- 1 file changed, 166 insertions(+), 142 deletions(-) diff --git a/httpx/client.py b/httpx/client.py index 2cc45c024a..4bd9a59f0e 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -53,59 +53,7 @@ logger = get_logger(__name__) -class AsyncClient: - """ - An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, - cookie persistence, etc. - - Usage: - - ```python - >>> client = httpx.AsyncClient() - >>> response = client.get('https://example.org') - ``` - - **Parameters:** - - * **auth** - *(optional)* An authentication class to use when sending - requests. - * **params** - *(optional)* Query parameters to include in request URLs, as - a string, dictionary, or list of two-tuples. - * **headers** - *(optional)* Dictionary of HTTP headers to include when - sending requests. - * **cookies** - *(optional)* Dictionary of Cookie items to include when - sending requests. - * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to - verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, or `False` (disable verification). - * **cert** - *(optional)* An SSL certificate used by the requested host - to authenticate the client. Either a path to an SSL certificate file, or - two-tuple of (certificate file, key file), or a three-tuple of (certificate - file, key file, password). - * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be - enabled. Defaults to `False`. - * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy - URLs. - * **timeout** - *(optional)* The timeout configuration to use when sending - requests. - * **pool_limits** - *(optional)* The connection pool configuration to use - when determining the maximum number of concurrently open HTTP connections. - * **max_redirects** - *(optional)* The maximum number of redirect responses - that should be followed. - * **base_url** - *(optional)* A URL to use as the base when building - request URLs. - * **dispatch** - *(optional)* A dispatch class to use for sending requests - over the network. - * **app** - *(optional)* An ASGI application to send requests to, - rather than sending actual network requests. - * **backend** - *(optional)* A concurrency backend to use when issuing - async requests. Either 'auto', 'asyncio', 'trio', or a `ConcurrencyBackend` - instance. Defaults to 'auto', for autodetection. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - * **uds** - *(optional)* A path to a Unix domain socket to connect through. - """ - +class BaseClient: def __init__( self, *, @@ -113,19 +61,10 @@ def __init__( params: QueryParamTypes = None, headers: HeaderTypes = None, cookies: CookieTypes = None, - verify: VerifyTypes = True, - cert: CertTypes = None, - http2: bool = False, - proxies: ProxiesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, base_url: URLTypes = None, - dispatch: AsyncDispatcher = None, - app: typing.Callable = None, - backend: typing.Union[str, ConcurrencyBackend] = "auto", trust_env: bool = True, - uds: str = None, ): if base_url is None: self.base_url = URL("", allow_relative=True) @@ -135,9 +74,6 @@ def __init__( if params is None: params = {} - if proxies is None and trust_env: - proxies = typing.cast(ProxiesTypes, get_environment_proxies()) - self.auth = auth self._params = QueryParams(params) self._headers = Headers(headers) @@ -147,82 +83,6 @@ def __init__( self.trust_env = trust_env self.netrc = NetRCInfo() - proxy_map = self.get_proxy_map(proxies, trust_env) - - self.dispatch = self.init_dispatch( - verify=verify, - cert=cert, - http2=http2, - pool_limits=pool_limits, - dispatch=dispatch, - app=app, - backend=backend, - trust_env=trust_env, - uds=uds, - ) - self.proxies: typing.Dict[str, AsyncDispatcher] = { - key: self.init_proxy_dispatch( - proxy, - verify=verify, - cert=cert, - http2=http2, - pool_limits=pool_limits, - backend=backend, - trust_env=trust_env, - ) - for key, proxy in proxy_map.items() - } - - def init_dispatch( - self, - verify: VerifyTypes = True, - cert: CertTypes = None, - http2: bool = False, - pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, - dispatch: AsyncDispatcher = None, - app: typing.Callable = None, - backend: typing.Union[str, ConcurrencyBackend] = "auto", - trust_env: bool = True, - uds: str = None, - ) -> AsyncDispatcher: - if dispatch is not None: - return dispatch - - if app is not None: - return ASGIDispatch(app=app) - - return ConnectionPool( - verify=verify, - cert=cert, - http2=http2, - pool_limits=pool_limits, - backend=backend, - trust_env=trust_env, - uds=uds, - ) - - def init_proxy_dispatch( - self, - proxy: Proxy, - verify: VerifyTypes = True, - cert: CertTypes = None, - http2: bool = False, - pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, - backend: typing.Union[str, ConcurrencyBackend] = "auto", - trust_env: bool = True, - ) -> AsyncDispatcher: - return HTTPProxy( - proxy_url=proxy.url, - proxy_headers=proxy.headers, - proxy_mode=proxy.mode, - verify=verify, - cert=cert, - http2=http2, - pool_limits=pool_limits, - backend=backend, - trust_env=trust_env, - ) - def get_proxy_map( self, proxies: typing.Optional[ProxiesTypes], trust_env: bool, ) -> typing.Dict[str, Proxy]: @@ -520,6 +380,168 @@ def redirect_stream( return request.stream + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + Usage: + + ```python + >>> client = httpx.AsyncClient() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or list of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, or `False` (disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **pool_limits** - *(optional)* The connection pool configuration to use + when determining the maximum number of concurrently open HTTP connections. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **dispatch** - *(optional)* A dispatch class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **backend** - *(optional)* A concurrency backend to use when issuing + async requests. Either 'auto', 'asyncio', 'trio', or a `ConcurrencyBackend` + instance. Defaults to 'auto', for autodetection. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **uds** - *(optional)* A path to a Unix domain socket to connect through. + """ + + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + verify: VerifyTypes = True, + cert: CertTypes = None, + http2: bool = False, + proxies: ProxiesTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + base_url: URLTypes = None, + dispatch: AsyncDispatcher = None, + app: typing.Callable = None, + backend: typing.Union[str, ConcurrencyBackend] = "auto", + trust_env: bool = True, + uds: str = None, + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + max_redirects=max_redirects, + base_url=base_url, + trust_env=trust_env, + ) + + proxy_map = self.get_proxy_map(proxies, trust_env) + + self.dispatch = self.init_dispatch( + verify=verify, + cert=cert, + http2=http2, + pool_limits=pool_limits, + dispatch=dispatch, + app=app, + backend=backend, + trust_env=trust_env, + uds=uds, + ) + self.proxies: typing.Dict[str, AsyncDispatcher] = { + key: self.init_proxy_dispatch( + proxy, + verify=verify, + cert=cert, + http2=http2, + pool_limits=pool_limits, + backend=backend, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + + def init_dispatch( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + http2: bool = False, + pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, + dispatch: AsyncDispatcher = None, + app: typing.Callable = None, + backend: typing.Union[str, ConcurrencyBackend] = "auto", + trust_env: bool = True, + uds: str = None, + ) -> AsyncDispatcher: + if dispatch is not None: + return dispatch + + if app is not None: + return ASGIDispatch(app=app) + + return ConnectionPool( + verify=verify, + cert=cert, + http2=http2, + pool_limits=pool_limits, + backend=backend, + trust_env=trust_env, + uds=uds, + ) + + def init_proxy_dispatch( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes = None, + http2: bool = False, + pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, + backend: typing.Union[str, ConcurrencyBackend] = "auto", + trust_env: bool = True, + ) -> AsyncDispatcher: + return HTTPProxy( + proxy_url=proxy.url, + proxy_headers=proxy.headers, + proxy_mode=proxy.mode, + verify=verify, + cert=cert, + http2=http2, + pool_limits=pool_limits, + backend=backend, + trust_env=trust_env, + ) + def dispatcher_for_url(self, url: URL) -> AsyncDispatcher: """ Returns the AsyncDispatcher instance that should be used for a given URL. @@ -887,7 +909,7 @@ async def __aexit__( class StreamContextManager: def __init__( self, - client: AsyncClient, + client: BaseClient, request: Request, *, auth: AuthTypes = None, @@ -903,6 +925,7 @@ def __init__( self.close_client = close_client async def __aenter__(self) -> "Response": + assert isinstance(self.client, AsyncClient) self.response = await self.client.send( request=self.request, auth=self.auth, @@ -918,6 +941,7 @@ async def __aexit__( exc_value: BaseException = None, traceback: TracebackType = None, ) -> None: + assert isinstance(self.client, AsyncClient) await self.response.aclose() if self.close_client: await self.client.aclose() From f97d27f6f80900af03d4643aab32335d8a2b0d45 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 13:58:19 +0000 Subject: [PATCH 02/13] Introduce 'httpx.Client' --- httpx/client.py | 514 ++++++++++++++++++++++++++++++++++++++++- httpx/dispatch/base.py | 12 + httpx/models.py | 12 + 3 files changed, 533 insertions(+), 5 deletions(-) diff --git a/httpx/client.py b/httpx/client.py index 4bd9a59f0e..a24f638e91 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -22,7 +22,7 @@ ) from .content_streams import ContentStream from .dispatch.asgi import ASGIDispatch -from .dispatch.base import AsyncDispatcher +from .dispatch.base import AsyncDispatcher, Dispatcher from .dispatch.connection_pool import ConnectionPool from .dispatch.proxy_http import HTTPProxy from .exceptions import ( @@ -381,6 +381,492 @@ def redirect_stream( return request.stream +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or list of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to + verify the identity of requested hosts. Either `True` (default CA bundle), + a path to an SSL certificate file, or `False` (disable verification). + * **cert** - *(optional)* An SSL certificate used by the requested host + to authenticate the client. Either a path to an SSL certificate file, or + two-tuple of (certificate file, key file), or a three-tuple of (certificate + file, key file, password). + * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy + URLs. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **pool_limits** - *(optional)* The connection pool configuration to use + when determining the maximum number of concurrently open HTTP connections. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **dispatch** - *(optional)* A dispatch class to use for sending requests + over the network. + * **app** - *(optional)* An ASGI application to send requests to, + rather than sending actual network requests. + * **backend** - *(optional)* A concurrency backend to use when issuing + async requests. Either 'auto', 'asyncio', 'trio', or a `ConcurrencyBackend` + instance. Defaults to 'auto', for autodetection. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + """ + + def __init__( + self, + *, + auth: AuthTypes = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + verify: VerifyTypes = True, + cert: CertTypes = None, + proxies: ProxiesTypes = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + base_url: URLTypes = None, + dispatch: AsyncDispatcher = None, + app: typing.Callable = None, + backend: typing.Union[str, ConcurrencyBackend] = "auto", + trust_env: bool = True, + ): + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + max_redirects=max_redirects, + base_url=base_url, + trust_env=trust_env, + ) + + proxy_map = self.get_proxy_map(proxies, trust_env) + + self.dispatch = self.init_dispatch( + verify=verify, + cert=cert, + pool_limits=pool_limits, + dispatch=dispatch, + app=app, + backend=backend, + trust_env=trust_env, + ) + self.proxies: typing.Dict[str, Dispatcher] = { + key: self.init_proxy_dispatch( + proxy, + verify=verify, + cert=cert, + pool_limits=pool_limits, + backend=backend, + trust_env=trust_env, + ) + for key, proxy in proxy_map.items() + } + + def init_dispatch( + self, + verify: VerifyTypes = True, + cert: CertTypes = None, + pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, + dispatch: AsyncDispatcher = None, + app: typing.Callable = None, + backend: typing.Union[str, ConcurrencyBackend] = "auto", + trust_env: bool = True, + ) -> Dispatcher: + raise NotImplementedError() + + def init_proxy_dispatch( + self, + proxy: Proxy, + verify: VerifyTypes = True, + cert: CertTypes = None, + pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, + backend: typing.Union[str, ConcurrencyBackend] = "auto", + trust_env: bool = True, + ) -> Dispatcher: + raise NotImplementedError() + + def dispatcher_for_url(self, url: URL) -> Dispatcher: + """ + Returns the Dispatcher instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + if self.proxies: + is_default_port = (url.scheme == "http" and url.port == 80) or ( + url.scheme == "https" and url.port == 443 + ) + hostname = f"{url.host}:{url.port}" + proxy_keys = ( + f"{url.scheme}://{hostname}", + f"{url.scheme}://{url.host}" if is_default_port else None, + f"all://{hostname}", + f"all://{url.host}" if is_default_port else None, + url.scheme, + "all", + ) + for proxy_key in proxy_keys: + if proxy_key and proxy_key in self.proxies: + dispatcher = self.proxies[proxy_key] + return dispatcher + + return self.dispatch + + def request( + self, + method: str, + url: URLTypes, + *, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + request = self.build_request( + method=method, + url=url, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + ) + return self.send( + request, auth=auth, allow_redirects=allow_redirects, timeout=timeout, + ) + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + if request.url.scheme not in ("http", "https"): + raise InvalidURL('URL scheme must be "http" or "https".') + + timeout = self.timeout if isinstance(timeout, UnsetType) else Timeout(timeout) + + auth = self.build_auth(request, auth) + + response = self.send_handling_redirects( + request, auth=auth, timeout=timeout, allow_redirects=allow_redirects, + ) + + if not stream: + try: + response.read() + finally: + response.close() + + return response + + def send_handling_redirects( + self, + request: Request, + auth: Auth, + timeout: Timeout, + allow_redirects: bool = True, + history: typing.List[Response] = None, + ) -> Response: + if history is None: + history = [] + + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects() + if request.url in (response.url for response in history): + raise RedirectLoop() + + response = self.send_handling_auth( + request, auth=auth, timeout=timeout, history=history + ) + response.history = list(history) + + if not response.is_redirect: + return response + + response.read() + request = self.build_redirect_request(request, response) + history = history + [response] + + if not allow_redirects: + response.call_next = functools.partial( + self.send_handling_redirects, + request=request, + auth=auth, + timeout=timeout, + allow_redirects=False, + history=history, + ) + return response + + def send_handling_auth( + self, + request: Request, + history: typing.List[Response], + auth: Auth, + timeout: Timeout, + ) -> Response: + if auth.requires_request_body: + request.read() + + auth_flow = auth.auth_flow(request) + request = next(auth_flow) + while True: + response = self.send_single_request(request, timeout) + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + except BaseException as exc: + response.close() + raise exc from None + else: + response.history = list(history) + response.read() + request = next_request + history.append(response) + + def send_single_request(self, request: Request, timeout: Timeout,) -> Response: + """ + Sends a single request, without handling any redirections. + """ + + dispatcher = self.dispatcher_for_url(request.url) + + try: + response = dispatcher.send(request, timeout=timeout) + except HTTPError as exc: + # Add the original request to any HTTPError unless + # there'a already a request attached in the case of + # a ProxyError. + if exc.request is None: + exc.request = request + raise + + self.cookies.extract_cookies(response) + + status = f"{response.status_code} {response.reason_phrase}" + response_line = f"{response.http_version} {status}" + logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"') + + return response + + def get( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def options( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def head( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = False, # NOTE: Differs to usual default. + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def post( + self, + url: URLTypes, + *, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + return self.request( + "POST", + url, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def put( + self, + url: URLTypes, + *, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + return self.request( + "PUT", + url, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def patch( + self, + url: URLTypes, + *, + data: RequestData = None, + files: RequestFiles = None, + json: typing.Any = None, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + return self.request( + "PATCH", + url, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def delete( + self, + url: URLTypes, + *, + params: QueryParamTypes = None, + headers: HeaderTypes = None, + cookies: CookieTypes = None, + auth: AuthTypes = None, + allow_redirects: bool = True, + timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, + ) -> Response: + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + allow_redirects=allow_redirects, + timeout=timeout, + ) + + def close(self) -> None: + self.dispatch.close() + + def __enter__(self) -> "Client": + return self + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + self.close() + + class AsyncClient(BaseClient): """ An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, @@ -924,6 +1410,28 @@ def __init__( self.timeout = timeout self.close_client = close_client + def __enter__(self) -> "Response": + assert isinstance(self.client, Client) + self.response = self.client.send( + request=self.request, + auth=self.auth, + allow_redirects=self.allow_redirects, + timeout=self.timeout, + stream=True, + ) + return self.response + + def __exit__( + self, + exc_type: typing.Type[BaseException] = None, + exc_value: BaseException = None, + traceback: TracebackType = None, + ) -> None: + assert isinstance(self.client, Client) + self.response.close() + if self.close_client: + self.client.close() + async def __aenter__(self) -> "Response": assert isinstance(self.client, AsyncClient) self.response = await self.client.send( @@ -945,7 +1453,3 @@ async def __aexit__( await self.response.aclose() if self.close_client: await self.client.aclose() - - -# For compatibility with 0.9.x. -Client = AsyncClient diff --git a/httpx/dispatch/base.py b/httpx/dispatch/base.py index 4879ea8296..178f878636 100644 --- a/httpx/dispatch/base.py +++ b/httpx/dispatch/base.py @@ -12,6 +12,18 @@ ) +class Dispatcher: + """ + Base class for Dispatcher classes, that handle sending the request. + """ + + def send(self, request: Request, timeout: Timeout = None) -> Response: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + pass # pragma: nocover + + class AsyncDispatcher: """ Base class for AsyncDispatcher classes, that handle sending the request. diff --git a/httpx/models.py b/httpx/models.py index 0cc13ed1ad..acaaf66d4a 100644 --- a/httpx/models.py +++ b/httpx/models.py @@ -648,6 +648,18 @@ def content(self) -> bytes: raise RequestNotRead() return self._content + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.stream]) + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + async def aread(self) -> bytes: """ Read and return the request content. From 1d16f5a8c4c67dbefb00ab3978c93918fd67cdb5 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 14:17:48 +0000 Subject: [PATCH 03/13] Top level API -> sync --- httpx/api.py | 38 +++++++++++++------------- httpx/client.py | 35 ++++++++++++------------ httpx/dispatch/__init__.py | 7 ----- httpx/dispatch/base.py | 2 +- httpx/dispatch/urllib3.py | 56 ++++++++++++++++++++++++++++++++++++++ setup.py | 1 + tests/test_api.py | 49 ++++++++++++++------------------- 7 files changed, 115 insertions(+), 73 deletions(-) create mode 100644 httpx/dispatch/urllib3.py diff --git a/httpx/api.py b/httpx/api.py index bf9d60320f..b030c5b524 100644 --- a/httpx/api.py +++ b/httpx/api.py @@ -1,7 +1,7 @@ import typing from .auth import AuthTypes -from .client import AsyncClient, StreamContextManager +from .client import Client, StreamContextManager from .config import DEFAULT_TIMEOUT_CONFIG, CertTypes, TimeoutTypes, VerifyTypes from .models import ( CookieTypes, @@ -15,7 +15,7 @@ ) -async def request( +def request( method: str, url: URLTypes, *, @@ -80,10 +80,10 @@ async def request( ``` """ - async with AsyncClient( + with Client( cert=cert, verify=verify, timeout=timeout, trust_env=trust_env, ) as client: - return await client.request( + return client.request( method=method, url=url, data=data, @@ -114,7 +114,7 @@ def stream( cert: CertTypes = None, trust_env: bool = True, ) -> StreamContextManager: - client = AsyncClient(cert=cert, verify=verify, trust_env=trust_env) + client = Client(cert=cert, verify=verify, trust_env=trust_env) request = Request( method=method, url=url, @@ -135,7 +135,7 @@ def stream( ) -async def get( +def get( url: URLTypes, *, params: QueryParamTypes = None, @@ -156,7 +156,7 @@ async def get( Note that the `data`, `files`, and `json` parameters are not available on this function, as `GET` requests should not include a request body. """ - return await request( + return request( "GET", url, params=params, @@ -171,7 +171,7 @@ async def get( ) -async def options( +def options( url: URLTypes, *, params: QueryParamTypes = None, @@ -192,7 +192,7 @@ async def options( Note that the `data`, `files`, and `json` parameters are not available on this function, as `OPTIONS` requests should not include a request body. """ - return await request( + return request( "OPTIONS", url, params=params, @@ -207,7 +207,7 @@ async def options( ) -async def head( +def head( url: URLTypes, *, params: QueryParamTypes = None, @@ -230,7 +230,7 @@ async def head( `HEAD` method also differs from the other cases in that `allow_redirects` defaults to `False`. """ - return await request( + return request( "HEAD", url, params=params, @@ -245,7 +245,7 @@ async def head( ) -async def post( +def post( url: URLTypes, *, data: RequestData = None, @@ -266,7 +266,7 @@ async def post( **Parameters**: See `httpx.request`. """ - return await request( + return request( "POST", url, data=data, @@ -284,7 +284,7 @@ async def post( ) -async def put( +def put( url: URLTypes, *, data: RequestData = None, @@ -305,7 +305,7 @@ async def put( **Parameters**: See `httpx.request`. """ - return await request( + return request( "PUT", url, data=data, @@ -323,7 +323,7 @@ async def put( ) -async def patch( +def patch( url: URLTypes, *, data: RequestData = None, @@ -344,7 +344,7 @@ async def patch( **Parameters**: See `httpx.request`. """ - return await request( + return request( "PATCH", url, data=data, @@ -362,7 +362,7 @@ async def patch( ) -async def delete( +def delete( url: URLTypes, *, params: QueryParamTypes = None, @@ -383,7 +383,7 @@ async def delete( Note that the `data`, `files`, and `json` parameters are not available on this function, as `DELETE` requests should not include a request body. """ - return await request( + return request( "DELETE", url, params=params, diff --git a/httpx/client.py b/httpx/client.py index a24f638e91..ed1dad7428 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -22,9 +22,10 @@ ) from .content_streams import ContentStream from .dispatch.asgi import ASGIDispatch -from .dispatch.base import AsyncDispatcher, Dispatcher +from .dispatch.base import AsyncDispatcher, SyncDispatcher from .dispatch.connection_pool import ConnectionPool from .dispatch.proxy_http import HTTPProxy +from .dispatch.urllib3 import URLLib3Dispatcher from .exceptions import ( HTTPError, InvalidURL, @@ -423,9 +424,6 @@ class Client(BaseClient): over the network. * **app** - *(optional)* An ASGI application to send requests to, rather than sending actual network requests. - * **backend** - *(optional)* A concurrency backend to use when issuing - async requests. Either 'auto', 'asyncio', 'trio', or a `ConcurrencyBackend` - instance. Defaults to 'auto', for autodetection. * **trust_env** - *(optional)* Enables or disables usage of environment variables for configuration. """ @@ -444,9 +442,8 @@ def __init__( pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, base_url: URLTypes = None, - dispatch: AsyncDispatcher = None, + dispatch: SyncDispatcher = None, app: typing.Callable = None, - backend: typing.Union[str, ConcurrencyBackend] = "auto", trust_env: bool = True, ): super().__init__( @@ -468,16 +465,14 @@ def __init__( pool_limits=pool_limits, dispatch=dispatch, app=app, - backend=backend, trust_env=trust_env, ) - self.proxies: typing.Dict[str, Dispatcher] = { + self.proxies: typing.Dict[str, SyncDispatcher] = { key: self.init_proxy_dispatch( proxy, verify=verify, cert=cert, pool_limits=pool_limits, - backend=backend, trust_env=trust_env, ) for key, proxy in proxy_map.items() @@ -488,12 +483,19 @@ def init_dispatch( verify: VerifyTypes = True, cert: CertTypes = None, pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, - dispatch: AsyncDispatcher = None, + dispatch: SyncDispatcher = None, app: typing.Callable = None, - backend: typing.Union[str, ConcurrencyBackend] = "auto", trust_env: bool = True, - ) -> Dispatcher: - raise NotImplementedError() + ) -> SyncDispatcher: + if dispatch is not None: + return dispatch + + # if app is not None: + # return WSGIDispatch(app=app) + + return URLLib3Dispatcher( + verify=verify, cert=cert, pool_limits=pool_limits, trust_env=trust_env, + ) def init_proxy_dispatch( self, @@ -501,14 +503,13 @@ def init_proxy_dispatch( verify: VerifyTypes = True, cert: CertTypes = None, pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, - backend: typing.Union[str, ConcurrencyBackend] = "auto", trust_env: bool = True, - ) -> Dispatcher: + ) -> SyncDispatcher: raise NotImplementedError() - def dispatcher_for_url(self, url: URL) -> Dispatcher: + def dispatcher_for_url(self, url: URL) -> SyncDispatcher: """ - Returns the Dispatcher instance that should be used for a given URL. + Returns the SyncDispatcher instance that should be used for a given URL. This will either be the standard connection pool, or a proxy. """ if self.proxies: diff --git a/httpx/dispatch/__init__.py b/httpx/dispatch/__init__.py index 2be8dd6d62..e69de29bb2 100644 --- a/httpx/dispatch/__init__.py +++ b/httpx/dispatch/__init__.py @@ -1,7 +0,0 @@ -""" -Dispatch classes handle the raw network connections and the implementation -details of making the HTTP request and receiving the response. -""" -from .asgi import ASGIDispatch - -__all__ = ["ASGIDispatch"] diff --git a/httpx/dispatch/base.py b/httpx/dispatch/base.py index 178f878636..ae25e4db4c 100644 --- a/httpx/dispatch/base.py +++ b/httpx/dispatch/base.py @@ -12,7 +12,7 @@ ) -class Dispatcher: +class SyncDispatcher: """ Base class for Dispatcher classes, that handle sending the request. """ diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py new file mode 100644 index 0000000000..d3140985b7 --- /dev/null +++ b/httpx/dispatch/urllib3.py @@ -0,0 +1,56 @@ +import typing + +import urllib3 + +from ..config import ( + DEFAULT_POOL_LIMITS, + CertTypes, + PoolLimits, + SSLConfig, + Timeout, + VerifyTypes, +) +from ..content_streams import IteratorStream +from ..models import Request, Response +from .base import SyncDispatcher + + +class URLLib3Dispatcher(SyncDispatcher): + def __init__( + self, + *, + verify: VerifyTypes = True, + cert: CertTypes = None, + trust_env: bool = None, + pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, + ): + ssl = SSLConfig(verify=verify, cert=cert, trust_env=trust_env, http2=False) + self.pool = urllib3.PoolManager(ssl_context=ssl.ssl_context) + + def send(self, request: Request, timeout: Timeout = None) -> Response: + conn = self.pool.urlopen( + method=request.method, + url=str(request.url), + headers=dict(request.headers), + redirect=False, + assert_same_host=False, + retries=0, + preload_content=False, + ) + + def response_bytes() -> typing.Iterator[bytes]: + for chunk in conn.stream(4096, decode_content=False): + yield chunk + + return Response( + status_code=conn.status, + http_version="HTTP/1.1", + headers=list(conn.headers.items()), + stream=IteratorStream( + iterator=response_bytes(), close_func=conn.release_conn + ), + request=request, + ) + + def close(self) -> None: + self.pool.clear() diff --git a/setup.py b/setup.py index d1f15a12da..2f313bdd9f 100644 --- a/setup.py +++ b/setup.py @@ -59,6 +59,7 @@ def get_packages(package): "idna==2.*", "rfc3986>=1.3,<2", "sniffio==1.*", + "urllib3==1.*", ], classifiers=[ "Development Status :: 3 - Alpha", diff --git a/tests/test_api.py b/tests/test_api.py index 1c658e91a0..4c1d611620 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -3,73 +3,64 @@ import httpx -@pytest.mark.asyncio -async def test_get(server): - response = await httpx.get(server.url) +def test_get(server): + response = httpx.get(server.url) assert response.status_code == 200 assert response.reason_phrase == "OK" assert response.text == "Hello, world!" assert response.http_version == "HTTP/1.1" -@pytest.mark.asyncio -async def test_post(server): - response = await httpx.post(server.url, data=b"Hello, world!") +def test_post(server): + response = httpx.post(server.url, data=b"Hello, world!") assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_post_byte_iterator(server): - async def data(): +def test_post_byte_iterator(server): + def data(): yield b"Hello" yield b", " yield b"world!" - response = await httpx.post(server.url, data=data()) + response = httpx.post(server.url, data=data()) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_options(server): - response = await httpx.options(server.url) +def test_options(server): + response = httpx.options(server.url) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_head(server): - response = await httpx.head(server.url) +def test_head(server): + response = httpx.head(server.url) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_put(server): - response = await httpx.put(server.url, data=b"Hello, world!") +def test_put(server): + response = httpx.put(server.url, data=b"Hello, world!") assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_patch(server): - response = await httpx.patch(server.url, data=b"Hello, world!") +def test_patch(server): + response = httpx.patch(server.url, data=b"Hello, world!") assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_delete(server): - response = await httpx.delete(server.url) +def test_delete(server): + response = httpx.delete(server.url) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_stream(server): - async with httpx.stream("GET", server.url) as response: - await response.aread() +def test_stream(server): + with httpx.stream("GET", server.url) as response: + response.read() assert response.status_code == 200 assert response.reason_phrase == "OK" From 8412915eacb9c6ee5e743b129827291fbe797653 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 14:28:11 +0000 Subject: [PATCH 04/13] Top level API -> sync --- httpx/dispatch/urllib3.py | 4 ++ setup.cfg | 2 +- tests/client/test_client.py | 125 +++++++++++++--------------------- tests/models/test_requests.py | 21 ++++-- 4 files changed, 68 insertions(+), 84 deletions(-) diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py index d3140985b7..82fa595a12 100644 --- a/httpx/dispatch/urllib3.py +++ b/httpx/dispatch/urllib3.py @@ -28,14 +28,18 @@ def __init__( self.pool = urllib3.PoolManager(ssl_context=ssl.ssl_context) def send(self, request: Request, timeout: Timeout = None) -> Response: + chunked = request.headers.get("Transfer-Encoding") == "chunked" + conn = self.pool.urlopen( method=request.method, url=str(request.url), headers=dict(request.headers), + body=request.stream, redirect=False, assert_same_host=False, retries=0, preload_content=False, + chunked=chunked, ) def response_bytes() -> typing.Iterator[bytes]: diff --git a/setup.cfg b/setup.cfg index b69228a5a3..5b2e962ff7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -14,7 +14,7 @@ combine_as_imports = True force_grid_wrap = 0 include_trailing_comma = True known_first_party = httpx,tests -known_third_party = brotli,certifi,chardet,cryptography,h11,h2,hstspreload,pytest,rfc3986,setuptools,sniffio,trio,trustme,uvicorn +known_third_party = brotli,certifi,chardet,cryptography,h11,h2,hstspreload,pytest,rfc3986,setuptools,sniffio,trio,trustme,urllib3,uvicorn line_length = 88 multi_line_output = 3 diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 7c87722404..71687e64b6 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -5,11 +5,10 @@ import httpx -@pytest.mark.asyncio -async def test_get(server): +def test_get(server): url = server.url - async with httpx.AsyncClient() as http: - response = await http.get(url) + with httpx.Client() as http: + response = http.get(url) assert response.status_code == 200 assert response.url == url assert response.content == b"Hello, world!" @@ -23,15 +22,14 @@ async def test_get(server): assert response.elapsed > timedelta(0) -@pytest.mark.asyncio -async def test_build_request(server): +def test_build_request(server): url = server.url.copy_with(path="/echo_headers") headers = {"Custom-header": "value"} - async with httpx.AsyncClient() as client: + with httpx.Client() as client: request = client.build_request("GET", url) request.headers.update(headers) - response = await client.send(request) + response = client.send(request) assert response.status_code == 200 assert response.url == url @@ -39,62 +37,56 @@ async def test_build_request(server): assert response.json()["Custom-header"] == "value" -@pytest.mark.asyncio -async def test_post(server): - async with httpx.AsyncClient() as client: - response = await client.post(server.url, data=b"Hello, world!") +def test_post(server): + with httpx.Client() as client: + response = client.post(server.url, data=b"Hello, world!") assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_post_json(server): - async with httpx.AsyncClient() as client: - response = await client.post(server.url, json={"text": "Hello, world!"}) +def test_post_json(server): + with httpx.Client() as client: + response = client.post(server.url, json={"text": "Hello, world!"}) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_stream_response(server): - async with httpx.AsyncClient() as client: - async with client.stream("GET", server.url) as response: - content = await response.aread() +def test_stream_response(server): + with httpx.Client() as client: + with client.stream("GET", server.url) as response: + content = response.read() assert response.status_code == 200 assert content == b"Hello, world!" -@pytest.mark.asyncio -async def test_stream_iterator(server): +def test_stream_iterator(server): body = b"" - async with httpx.AsyncClient() as client: - async with client.stream("GET", server.url) as response: - async for chunk in response.aiter_bytes(): + with httpx.Client() as client: + with client.stream("GET", server.url) as response: + for chunk in response.iter_bytes(): body += chunk assert response.status_code == 200 assert body == b"Hello, world!" -@pytest.mark.asyncio -async def test_raw_iterator(server): +def test_raw_iterator(server): body = b"" - async with httpx.AsyncClient() as client: - async with client.stream("GET", server.url) as response: - async for chunk in response.aiter_raw(): + with httpx.Client() as client: + with client.stream("GET", server.url) as response: + for chunk in response.iter_raw(): body += chunk assert response.status_code == 200 assert body == b"Hello, world!" -@pytest.mark.asyncio -async def test_raise_for_status(server): - async with httpx.AsyncClient() as client: +def test_raise_for_status(server): + with httpx.Client() as client: for status_code in (200, 400, 404, 500, 505): - response = await client.request( + response = client.request( "GET", server.url.copy_with(path="/status/{}".format(status_code)) ) if 400 <= status_code < 600: @@ -105,78 +97,59 @@ async def test_raise_for_status(server): assert response.raise_for_status() is None -@pytest.mark.asyncio -async def test_options(server): - async with httpx.AsyncClient() as client: - response = await client.options(server.url) +def test_options(server): + with httpx.Client() as client: + response = client.options(server.url) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_head(server): - async with httpx.AsyncClient() as client: - response = await client.head(server.url) +def test_head(server): + with httpx.Client() as client: + response = client.head(server.url) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_put(server): - async with httpx.AsyncClient() as client: - response = await client.put(server.url, data=b"Hello, world!") +def test_put(server): + with httpx.Client() as client: + response = client.put(server.url, data=b"Hello, world!") assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_patch(server): - async with httpx.AsyncClient() as client: - response = await client.patch(server.url, data=b"Hello, world!") +def test_patch(server): + with httpx.Client() as client: + response = client.patch(server.url, data=b"Hello, world!") assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_delete(server): - async with httpx.AsyncClient() as client: - response = await client.delete(server.url) +def test_delete(server): + with httpx.Client() as client: + response = client.delete(server.url) assert response.status_code == 200 assert response.reason_phrase == "OK" -@pytest.mark.asyncio -async def test_base_url(server): +def test_base_url(server): base_url = server.url - async with httpx.AsyncClient(base_url=base_url) as client: - response = await client.get("/") + with httpx.Client(base_url=base_url) as client: + response = client.get("/") assert response.status_code == 200 assert response.url == base_url -@pytest.mark.asyncio -async def test_uds(uds_server): - url = uds_server.url - uds = uds_server.config.uds - assert uds is not None - async with httpx.AsyncClient(uds=uds) as client: - response = await client.get(url) - assert response.status_code == 200 - assert response.text == "Hello, world!" - assert response.encoding == "iso-8859-1" - - def test_merge_url(): - client = httpx.AsyncClient(base_url="https://www.paypal.com/") + client = httpx.Client(base_url="https://www.paypal.com/") url = client.merge_url("http://www.paypal.com") assert url.scheme == "https" assert url.is_ssl -@pytest.mark.asyncio -async def test_elapsed_delay(server): +def test_elapsed_delay(server): url = server.url.copy_with(path="/slow_response/100") - async with httpx.AsyncClient() as client: - response = await client.get(url) + with httpx.Client() as client: + response = client.get(url) assert response.elapsed.total_seconds() > 0.0 diff --git a/tests/models/test_requests.py b/tests/models/test_requests.py index 0c7269e219..c72e0af964 100644 --- a/tests/models/test_requests.py +++ b/tests/models/test_requests.py @@ -18,26 +18,33 @@ def test_content_length_header(): assert request.headers["Content-Length"] == "8" -@pytest.mark.asyncio -async def test_url_encoded_data(): +def test_url_encoded_data(): request = httpx.Request("POST", "http://example.org", data={"test": "123"}) - await request.aread() + request.read() assert request.headers["Content-Type"] == "application/x-www-form-urlencoded" assert request.content == b"test=123" -@pytest.mark.asyncio -async def test_json_encoded_data(): +def test_json_encoded_data(): request = httpx.Request("POST", "http://example.org", json={"test": 123}) - await request.aread() + request.read() assert request.headers["Content-Type"] == "application/json" assert request.content == b'{"test": 123}' +def test_read_and_stream_data(): + # Ensure a request may still be streamed if it has been read. + # Needed for cases such as authentication classes that read the request body. + request = httpx.Request("POST", "http://example.org", json={"test": 123}) + request.read() + content = b"".join([part for part in request.stream]) + assert content == request.content + + @pytest.mark.asyncio -async def test_read_and_stream_data(): +async def test_aread_and_stream_data(): # Ensure a request may still be streamed if it has been read. # Needed for cases such as authentication classes that read the request body. request = httpx.Request("POST", "http://example.org", json={"test": 123}) From f1c02057310cf203c8e97b8f892f3a6d178eb02b Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 14:59:15 +0000 Subject: [PATCH 05/13] Add WSGI support, drop deprecated imports --- httpx/__init__.py | 9 +- httpx/client.py | 5 +- httpx/config.py | 3 - httpx/dispatch/asgi.py | 3 - httpx/dispatch/wsgi.py | 156 ++++++++++++++++++++++++++++++ tests/client/test_proxies.py | 1 - tests/dispatch/test_proxy_http.py | 13 +-- tests/test_wsgi.py | 95 ++++++++++++++++++ 8 files changed, 265 insertions(+), 20 deletions(-) create mode 100644 httpx/dispatch/wsgi.py create mode 100644 tests/test_wsgi.py diff --git a/httpx/__init__.py b/httpx/__init__.py index 80c29da713..4a133e8efd 100644 --- a/httpx/__init__.py +++ b/httpx/__init__.py @@ -2,9 +2,9 @@ from .api import delete, get, head, options, patch, post, put, request, stream from .auth import Auth, BasicAuth, DigestAuth from .client import AsyncClient, Client -from .config import TimeoutConfig # For 0.8 backwards compat. from .config import PoolLimits, Proxy, Timeout -from .dispatch.proxy_http import HTTPProxy, HTTPProxyMode +from .dispatch.asgi import ASGIDispatch +from .dispatch.wsgi import WSGIDispatch from .exceptions import ( ConnectionClosed, ConnectTimeout, @@ -45,6 +45,7 @@ "request", "stream", "codes", + "ASGIDispatch", "AsyncClient", "Auth", "BasicAuth", @@ -53,9 +54,6 @@ "PoolLimits", "Proxy", "Timeout", - "TimeoutConfig", # For 0.8 backwards compat. - "HTTPProxy", - "HTTPProxyMode", # For 0.8 backwards compat. "ConnectTimeout", "CookieConflict", "ConnectionClosed", @@ -84,4 +82,5 @@ "TimeoutException", "Response", "DigestAuth", + "WSGIDispatch", ] diff --git a/httpx/client.py b/httpx/client.py index ed1dad7428..696d408431 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -26,6 +26,7 @@ from .dispatch.connection_pool import ConnectionPool from .dispatch.proxy_http import HTTPProxy from .dispatch.urllib3 import URLLib3Dispatcher +from .dispatch.wsgi import WSGIDispatch from .exceptions import ( HTTPError, InvalidURL, @@ -490,8 +491,8 @@ def init_dispatch( if dispatch is not None: return dispatch - # if app is not None: - # return WSGIDispatch(app=app) + if app is not None: + return WSGIDispatch(app=app) return URLLib3Dispatcher( verify=verify, cert=cert, pool_limits=pool_limits, trust_env=trust_env, diff --git a/httpx/config.py b/httpx/config.py index ee82a3822d..ef413bde0e 100644 --- a/httpx/config.py +++ b/httpx/config.py @@ -330,9 +330,6 @@ def __repr__(self) -> str: ) -TimeoutConfig = Timeout # Synonym for backwards compat - - DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) DEFAULT_POOL_LIMITS = PoolLimits(soft_limit=10, hard_limit=100) DEFAULT_MAX_REDIRECTS = 20 diff --git a/httpx/dispatch/asgi.py b/httpx/dispatch/asgi.py index 335540b6ef..a0b55e2b16 100644 --- a/httpx/dispatch/asgi.py +++ b/httpx/dispatch/asgi.py @@ -9,10 +9,7 @@ class ASGIDispatch(AsyncDispatcher): """ A custom AsyncDispatcher that handles sending requests directly to an ASGI app. - The simplest way to use this functionality is to use the `app` argument. - This will automatically infer if 'app' is a WSGI or an ASGI application, - and will setup an appropriate dispatch class: ``` client = httpx.AsyncClient(app=app) diff --git a/httpx/dispatch/wsgi.py b/httpx/dispatch/wsgi.py new file mode 100644 index 0000000000..96ceacee14 --- /dev/null +++ b/httpx/dispatch/wsgi.py @@ -0,0 +1,156 @@ +import io +import typing + +from ..config import TimeoutTypes +from ..content_streams import IteratorStream +from ..models import Request, Response +from .base import SyncDispatcher + + +class WSGIDispatch(SyncDispatcher): + """ + A custom SyncDispatcher that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the dispatch instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGIDispatch class: + + ``` + dispatch = httpx.WSGIDispatch( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(dispatch=dispatch) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the ASGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: typing.Callable, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + + def send(self, request: Request, timeout: TimeoutTypes = None) -> Response: + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": BodyStream(request.stream.__iter__()), + "wsgi.errors": io.BytesIO(), + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query, + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(request.url.port), + "REMOTE_ADDR": self.remote_addr, + } + for key, value in request.headers.items(): + key = key.upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = value + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, response_headers: list, exc_info: typing.Any = None + ) -> None: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + + result = self.app(environ, start_response) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and self.raise_app_exceptions: + raise seen_exc_info[1] + + return Response( + status_code=int(seen_status.split()[0]), + http_version="HTTP/1.1", + headers=seen_response_headers, + stream=IteratorStream(chunk for chunk in result), + request=request, + ) + + +class BodyStream(io.RawIOBase): + def __init__(self, iterator: typing.Iterator[bytes]) -> None: + self._iterator = iterator + self._buffer = b"" + self._closed = False + + def read(self, size: int = -1) -> bytes: + if self._closed: + return b"" + + if size == -1: + return self.readall() + + try: + while len(self._buffer) < size: + self._buffer += next(self._iterator) + except StopIteration: + self._closed = True + return self._buffer + + output = self._buffer[:size] + self._buffer = self._buffer[size:] + return output + + def readall(self) -> bytes: + if self._closed: + raise OSError("Stream closed") # pragma: nocover + + for chunk in self._iterator: + self._buffer += chunk + + self._closed = True + return self._buffer + + def readinto(self, b: bytearray) -> typing.Optional[int]: # pragma: nocover + output = self.read(len(b)) + count = len(output) + b[:count] = output + return count + + def write(self, b: bytes) -> int: + raise OSError("Operation not supported") # pragma: nocover + + def fileno(self) -> int: + raise OSError("Operation not supported") # pragma: nocover + + def seek(self, offset: int, whence: int = 0) -> int: + raise OSError("Operation not supported") # pragma: nocover + + def truncate(self, size: int = None) -> int: + raise OSError("Operation not supported") # pragma: nocover diff --git a/tests/client/test_proxies.py b/tests/client/test_proxies.py index b2e704747a..a1008739c9 100644 --- a/tests/client/test_proxies.py +++ b/tests/client/test_proxies.py @@ -81,7 +81,6 @@ def test_dispatcher_for_request(url, proxies, expected): if expected is None: assert dispatcher is client.dispatch else: - assert isinstance(dispatcher, httpx.HTTPProxy) assert dispatcher.proxy_url == expected diff --git a/tests/dispatch/test_proxy_http.py b/tests/dispatch/test_proxy_http.py index 5acb6579d0..bdb4d6bf8b 100644 --- a/tests/dispatch/test_proxy_http.py +++ b/tests/dispatch/test_proxy_http.py @@ -1,6 +1,7 @@ import pytest import httpx +from httpx.dispatch.proxy_http import HTTPProxy from .utils import MockRawSocketBackend @@ -21,7 +22,7 @@ async def test_proxy_tunnel_success(): ] ), ) - async with httpx.HTTPProxy( + async with HTTPProxy( proxy_url="http://127.0.0.1:8000", backend=raw_io, proxy_mode="TUNNEL_ONLY", ) as proxy: response = await proxy.request("GET", "http://example.com") @@ -57,7 +58,7 @@ async def test_proxy_tunnel_non_2xx_response(status_code): ) with pytest.raises(httpx.ProxyError) as e: - async with httpx.HTTPProxy( + async with HTTPProxy( proxy_url="http://127.0.0.1:8000", backend=raw_io, proxy_mode="TUNNEL_ONLY", ) as proxy: await proxy.request("GET", "http://example.com") @@ -107,7 +108,7 @@ async def test_proxy_tunnel_start_tls(): ] ), ) - async with httpx.HTTPProxy( + async with HTTPProxy( proxy_url="http://127.0.0.1:8000", backend=raw_io, proxy_mode="TUNNEL_ONLY", ) as proxy: resp = await proxy.request("GET", "https://example.com") @@ -157,7 +158,7 @@ async def test_proxy_forwarding(proxy_mode): ] ), ) - async with httpx.HTTPProxy( + async with HTTPProxy( proxy_url="http://127.0.0.1:8000", backend=raw_io, proxy_mode=proxy_mode, @@ -186,14 +187,14 @@ async def test_proxy_forwarding(proxy_mode): def test_proxy_url_with_username_and_password(): - proxy = httpx.HTTPProxy("http://user:password@example.com:1080") + proxy = HTTPProxy("http://user:password@example.com:1080") assert proxy.proxy_url == "http://example.com:1080" assert proxy.proxy_headers["Proxy-Authorization"] == "Basic dXNlcjpwYXNzd29yZA==" def test_proxy_repr(): - proxy = httpx.HTTPProxy( + proxy = HTTPProxy( "http://127.0.0.1:1080", proxy_headers={"Custom": "Header"}, proxy_mode="DEFAULT", diff --git a/tests/test_wsgi.py b/tests/test_wsgi.py new file mode 100644 index 0000000000..0dcf99535d --- /dev/null +++ b/tests/test_wsgi.py @@ -0,0 +1,95 @@ +import sys + +import pytest + +import httpx + + +def hello_world(environ, start_response): + status = "200 OK" + output = b"Hello, World!" + + response_headers = [ + ("Content-type", "text/plain"), + ("Content-Length", str(len(output))), + ] + + start_response(status, response_headers) + + return [output] + + +def echo_body(environ, start_response): + status = "200 OK" + output = environ["wsgi.input"].read() + + response_headers = [ + ("Content-type", "text/plain"), + ("Content-Length", str(len(output))), + ] + + start_response(status, response_headers) + + return [output] + + +def echo_body_with_response_stream(environ, start_response): + status = "200 OK" + + response_headers = [("Content-Type", "text/plain")] + + start_response(status, response_headers) + + def output_generator(f): + while True: + output = f.read(2) + if not output: + break + yield output + + return output_generator(f=environ["wsgi.input"]) + + +def raise_exc(environ, start_response): + status = "500 Server Error" + output = b"Nope!" + + response_headers = [ + ("Content-type", "text/plain"), + ("Content-Length", str(len(output))), + ] + + try: + raise ValueError() + except ValueError: + exc_info = sys.exc_info() + start_response(status, response_headers, exc_info=exc_info) + + return [output] + + +def test_wsgi(): + client = httpx.Client(app=hello_world) + response = client.get("http://www.example.org/") + assert response.status_code == 200 + assert response.text == "Hello, World!" + + +def test_wsgi_upload(): + client = httpx.Client(app=echo_body) + response = client.post("http://www.example.org/", data=b"example") + assert response.status_code == 200 + assert response.text == "example" + + +def test_wsgi_upload_with_response_stream(): + client = httpx.Client(app=echo_body_with_response_stream) + response = client.post("http://www.example.org/", data=b"example") + assert response.status_code == 200 + assert response.text == "example" + + +def test_wsgi_exc(): + client = httpx.Client(app=raise_exc) + with pytest.raises(ValueError): + client.get("http://www.example.org/") From 90ff641823643101b43b571e363083fcddcb0f52 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 15:28:53 +0000 Subject: [PATCH 06/13] Wire up timeouts to urllib3 --- httpx/dispatch/urllib3.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py index 82fa595a12..c27b81cf71 100644 --- a/httpx/dispatch/urllib3.py +++ b/httpx/dispatch/urllib3.py @@ -25,9 +25,13 @@ def __init__( pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, ): ssl = SSLConfig(verify=verify, cert=cert, trust_env=trust_env, http2=False) - self.pool = urllib3.PoolManager(ssl_context=ssl.ssl_context) + self.pool = urllib3.PoolManager(ssl_context=ssl.ssl_context, block=True) def send(self, request: Request, timeout: Timeout = None) -> Response: + timeout = Timeout() if timeout is None else timeout + urllib3_timeout = urllib3.util.Timeout( + connect=timeout.connect_timeout, read=timeout.read_timeout + ) chunked = request.headers.get("Transfer-Encoding") == "chunked" conn = self.pool.urlopen( @@ -40,6 +44,8 @@ def send(self, request: Request, timeout: Timeout = None) -> Response: retries=0, preload_content=False, chunked=chunked, + timeout=urllib3_timeout, + pool_timeout=timeout.pool_timeout, ) def response_bytes() -> typing.Iterator[bytes]: From 14d52783498575aead038de715e1d660830e811d Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 15:47:30 +0000 Subject: [PATCH 07/13] Wire up pool_limits --- httpx/dispatch/urllib3.py | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py index c27b81cf71..2f01baf52f 100644 --- a/httpx/dispatch/urllib3.py +++ b/httpx/dispatch/urllib3.py @@ -1,3 +1,4 @@ +import math import typing import urllib3 @@ -25,7 +26,30 @@ def __init__( pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, ): ssl = SSLConfig(verify=verify, cert=cert, trust_env=trust_env, http2=False) - self.pool = urllib3.PoolManager(ssl_context=ssl.ssl_context, block=True) + hard_limit = pool_limits.hard_limit + soft_limit = pool_limits.soft_limit + + # Our connection pool configuration doesn't quite match up with urllib3's + # controls, but we can put sensible mappings in place: + if hard_limit is None: + block = False + if soft_limit is None: + num_pools = 1000 + maxsize = 1000 + else: + num_pools = int(math.sqrt(soft_limit)) + maxsize = int(math.sqrt(soft_limit)) + else: + block = True + num_pools = int(math.sqrt(hard_limit)) + maxsize = int(math.sqrt(hard_limit)) + + self.pool = urllib3.PoolManager( + ssl_context=ssl.ssl_context, + num_pools=num_pools, + maxsize=maxsize, + block=block, + ) def send(self, request: Request, timeout: Timeout = None) -> Response: timeout = Timeout() if timeout is None else timeout From 1e48a7b5c25cbaf6fadcc2304420528f3a6229fe Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 16:10:01 +0000 Subject: [PATCH 08/13] Add urllib3 proxy support --- httpx/client.py | 8 +++++++- httpx/dispatch/urllib3.py | 37 ++++++++++++++++++++++++++++++++++--- 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/httpx/client.py b/httpx/client.py index 696d408431..38525d93fa 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -506,7 +506,13 @@ def init_proxy_dispatch( pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, trust_env: bool = True, ) -> SyncDispatcher: - raise NotImplementedError() + return URLLib3Dispatcher( + proxy=proxy, + verify=verify, + cert=cert, + pool_limits=pool_limits, + trust_env=trust_env, + ) def dispatcher_for_url(self, url: URL) -> SyncDispatcher: """ diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py index 2f01baf52f..d51f49e061 100644 --- a/httpx/dispatch/urllib3.py +++ b/httpx/dispatch/urllib3.py @@ -1,4 +1,5 @@ import math +import ssl import typing import urllib3 @@ -7,6 +8,7 @@ DEFAULT_POOL_LIMITS, CertTypes, PoolLimits, + Proxy, SSLConfig, Timeout, VerifyTypes, @@ -20,12 +22,15 @@ class URLLib3Dispatcher(SyncDispatcher): def __init__( self, *, + proxy: Proxy = None, verify: VerifyTypes = True, cert: CertTypes = None, trust_env: bool = None, pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, ): - ssl = SSLConfig(verify=verify, cert=cert, trust_env=trust_env, http2=False) + ssl_config = SSLConfig( + verify=verify, cert=cert, trust_env=trust_env, http2=False + ) hard_limit = pool_limits.hard_limit soft_limit = pool_limits.soft_limit @@ -44,13 +49,39 @@ def __init__( num_pools = int(math.sqrt(hard_limit)) maxsize = int(math.sqrt(hard_limit)) - self.pool = urllib3.PoolManager( - ssl_context=ssl.ssl_context, + self.pool = self.init_pool_manager( + proxy=proxy, + ssl_context=ssl_config.ssl_context, num_pools=num_pools, maxsize=maxsize, block=block, ) + def init_pool_manager( + self, + proxy: typing.Optional[Proxy], + ssl_context: ssl.SSLContext, + num_pools: int, + maxsize: int, + block: bool, + ) -> typing.Union[urllib3.PoolManager, urllib3.ProxyManager]: + if proxy is None: + return urllib3.PoolManager( + ssl_context=ssl_context, + num_pools=num_pools, + maxsize=maxsize, + block=block, + ) + else: + return urllib3.ProxyManager( + proxy_url=proxy.url, + proxy_headers=dict(proxy.headers), + ssl_context=ssl_context, + num_pools=num_pools, + maxsize=maxsize, + block=block, + ) + def send(self, request: Request, timeout: Timeout = None) -> Response: timeout = Timeout() if timeout is None else timeout urllib3_timeout = urllib3.util.Timeout( From 2ad83b0408adac63f6343d5074b8644f011ed79c Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Tue, 7 Jan 2020 16:14:38 +0000 Subject: [PATCH 09/13] Pull #734 into sync Client --- httpx/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/httpx/client.py b/httpx/client.py index b297b64647..051826e09d 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -610,7 +610,8 @@ def send_handling_redirects( while True: if len(history) > self.max_redirects: raise TooManyRedirects() - if request.url in (response.url for response in history): + urls = ((resp.request.method, resp.url) for resp in history) + if (request.method, request.url) in urls: raise RedirectLoop() response = self.send_handling_auth( From a0ba9641bbde8b147488a4aa19773ed2763fbbed Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Wed, 8 Jan 2020 09:02:48 +0000 Subject: [PATCH 10/13] Update AsyncClient docstring --- httpx/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/httpx/client.py b/httpx/client.py index 051826e09d..9eec6641bb 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -884,8 +884,8 @@ class AsyncClient(BaseClient): Usage: ```python - >>> client = httpx.AsyncClient() - >>> response = client.get('https://example.org') + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') ``` **Parameters:** From 58f90264ff0d823f08e8ec6b3e51022a5c352866 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Wed, 8 Jan 2020 09:04:09 +0000 Subject: [PATCH 11/13] Simpler WSGI implementation --- httpx/dispatch/wsgi.py | 55 +----------------------------------------- 1 file changed, 1 insertion(+), 54 deletions(-) diff --git a/httpx/dispatch/wsgi.py b/httpx/dispatch/wsgi.py index 96ceacee14..2a90187c60 100644 --- a/httpx/dispatch/wsgi.py +++ b/httpx/dispatch/wsgi.py @@ -56,7 +56,7 @@ def send(self, request: Request, timeout: TimeoutTypes = None) -> Response: environ = { "wsgi.version": (1, 0), "wsgi.url_scheme": request.url.scheme, - "wsgi.input": BodyStream(request.stream.__iter__()), + "wsgi.input": io.BytesIO(request.read()), "wsgi.errors": io.BytesIO(), "wsgi.multithread": True, "wsgi.multiprocess": False, @@ -101,56 +101,3 @@ def start_response( stream=IteratorStream(chunk for chunk in result), request=request, ) - - -class BodyStream(io.RawIOBase): - def __init__(self, iterator: typing.Iterator[bytes]) -> None: - self._iterator = iterator - self._buffer = b"" - self._closed = False - - def read(self, size: int = -1) -> bytes: - if self._closed: - return b"" - - if size == -1: - return self.readall() - - try: - while len(self._buffer) < size: - self._buffer += next(self._iterator) - except StopIteration: - self._closed = True - return self._buffer - - output = self._buffer[:size] - self._buffer = self._buffer[size:] - return output - - def readall(self) -> bytes: - if self._closed: - raise OSError("Stream closed") # pragma: nocover - - for chunk in self._iterator: - self._buffer += chunk - - self._closed = True - return self._buffer - - def readinto(self, b: bytearray) -> typing.Optional[int]: # pragma: nocover - output = self.read(len(b)) - count = len(output) - b[:count] = output - return count - - def write(self, b: bytes) -> int: - raise OSError("Operation not supported") # pragma: nocover - - def fileno(self) -> int: - raise OSError("Operation not supported") # pragma: nocover - - def seek(self, offset: int, whence: int = 0) -> int: - raise OSError("Operation not supported") # pragma: nocover - - def truncate(self, size: int = None) -> int: - raise OSError("Operation not supported") # pragma: nocover From cbde50e9eaa4dd36675fccfa1f9bdf59ea4e3851 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Wed, 8 Jan 2020 09:43:21 +0000 Subject: [PATCH 12/13] Set body=None when no content --- httpx/dispatch/urllib3.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py index d51f49e061..0cbd5eaa73 100644 --- a/httpx/dispatch/urllib3.py +++ b/httpx/dispatch/urllib3.py @@ -88,12 +88,14 @@ def send(self, request: Request, timeout: Timeout = None) -> Response: connect=timeout.connect_timeout, read=timeout.read_timeout ) chunked = request.headers.get("Transfer-Encoding") == "chunked" + content_length = int(request.headers.get("Content-Length", "0")) + body = request.stream if chunked or content_length else None conn = self.pool.urlopen( method=request.method, url=str(request.url), headers=dict(request.headers), - body=request.stream, + body=body, redirect=False, assert_same_host=False, retries=0, From 067cef2842c98747d7a9111ce54633b3f87573f1 Mon Sep 17 00:00:00 2001 From: Tom Christie Date: Wed, 8 Jan 2020 09:58:25 +0000 Subject: [PATCH 13/13] Wrap urllib3 connection/read exceptions --- httpx/dispatch/urllib3.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py index 0cbd5eaa73..1782834400 100644 --- a/httpx/dispatch/urllib3.py +++ b/httpx/dispatch/urllib3.py @@ -1,8 +1,10 @@ import math +import socket import ssl import typing import urllib3 +from urllib3.exceptions import MaxRetryError, SSLError from ..config import ( DEFAULT_POOL_LIMITS, @@ -15,6 +17,7 @@ ) from ..content_streams import IteratorStream from ..models import Request, Response +from ..utils import as_network_error from .base import SyncDispatcher @@ -91,23 +94,25 @@ def send(self, request: Request, timeout: Timeout = None) -> Response: content_length = int(request.headers.get("Content-Length", "0")) body = request.stream if chunked or content_length else None - conn = self.pool.urlopen( - method=request.method, - url=str(request.url), - headers=dict(request.headers), - body=body, - redirect=False, - assert_same_host=False, - retries=0, - preload_content=False, - chunked=chunked, - timeout=urllib3_timeout, - pool_timeout=timeout.pool_timeout, - ) + with as_network_error(MaxRetryError, SSLError, socket.error): + conn = self.pool.urlopen( + method=request.method, + url=str(request.url), + headers=dict(request.headers), + body=body, + redirect=False, + assert_same_host=False, + retries=0, + preload_content=False, + chunked=chunked, + timeout=urllib3_timeout, + pool_timeout=timeout.pool_timeout, + ) def response_bytes() -> typing.Iterator[bytes]: - for chunk in conn.stream(4096, decode_content=False): - yield chunk + with as_network_error(socket.error): + for chunk in conn.stream(4096, decode_content=False): + yield chunk return Response( status_code=conn.status,