How to use the requests3.basics.urlparse function in requests3

To help you get started, we’ve selected a few requests3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github psf / requests / tests / test_requests.py View on Github external
def test_proxy_env_vars_override_default(var, url, proxy, s):

    prep = PreparedRequest()
    prep.prepare(method='GET', url=url)
    kwargs = {var: proxy}
    scheme = urlparse(url).scheme
    with override_environ(**kwargs):
        proxies = s.rebuild_proxies(prep, {})
        assert scheme in proxies
        assert proxies[scheme] == proxy
github psf / requests / requests3 / sessions.py View on Github external
if len(response.history) >= self.max_redirects:
                raise TooManyRedirects(
                    'Exceeded %s redirects.' % self.max_redirects,
                    response=response,
                )

            # Release the connection back into the pool.
            response.close()
            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if location_url.startswith('//'):
                parsed_rurl = urlparse(response.url)
                location_url = '%s:%s' % (
                    to_native_string(parsed_rurl.scheme), location_url
                )
            # The scheme should be lower case...
            parsed = urlparse(location_url)
            location_url = parsed.geturl()
            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                location_url = urljoin(response.url, requote_uri(location_url))
            else:
                location_url = requote_uri(location_url)
            prepared_request.url = to_native_string(location_url)
            method_changed = self.rebuild_method(prepared_request, response)
            # https://github.com/kennethreitz/requests/issues/2590
            # If method is changed to GET we need to remove body and associated headers.
            if method_changed and prepared_request.method == 'GET':
                # https://github.com/requests/requests/issues/3490
                purged_headers = (
                    'Content-Length', 'Content-Type', 'Transfer-Encoding'
github psf / requests / requests3 / sessions.py View on Github external
ConnectionError,
                ContentDecodingError,
                RuntimeError,
            ):
                response.raw.read(decode_content=False)
            if len(response.history) >= self.max_redirects:
                raise TooManyRedirects(
                    'Exceeded %s redirects.' % self.max_redirects,
                    response=response,
                )

            # Release the connection back into the pool.
            response.close()
            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if location_url.startswith('//'):
                parsed_rurl = urlparse(response.url)
                location_url = '%s:%s' % (
                    to_native_string(parsed_rurl.scheme), location_url
                )
            # The scheme should be lower case...
            parsed = urlparse(location_url)
            location_url = parsed.geturl()
            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                location_url = urljoin(response.url, requote_uri(location_url))
            else:
                location_url = requote_uri(location_url)
            prepared_request.url = to_native_string(location_url)
            method_changed = self.rebuild_method(prepared_request, response)
            # https://github.com/kennethreitz/requests/issues/2590
github psf / requests / requests3 / sessions.py View on Github external
def rebuild_auth(self, prepared_request, response):
        """When being redirected we may want to strip authentication from the
        request to avoid leaking credentials. This method intelligently
        removes
        and reapplies authentication where possible to avoid credential loss.
        """
        headers = prepared_request.headers
        url = prepared_request.url
        if 'Authorization' in headers:
            # If we get redirected to a new host, we should strip out any
            # authentication headers.
            original_parsed = urlparse(response.request.url)
            redirect_parsed = urlparse(url)
            if (original_parsed.hostname != redirect_parsed.hostname):
                del headers['Authorization']
        # .netrc might have more auth for us on our new host.
        new_auth = get_netrc_auth(url) if self.trust_env else None
        if new_auth is not None:
            prepared_request.prepare_auth(new_auth)
        return
github psf / requests / requests3 / adapters.py View on Github external
def request_url(self, request, proxies):
        """Obtain the url to use when making the final request.

        If the message is being sent through a HTTP proxy, the full URL has to
        be used. Otherwise, we should only use the path portion of the URL.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter `.

        :param request: The :class:`PreparedRequest ` being sent.
        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
        :rtype: str
        """
        proxy = select_proxy(request.url, proxies)
        scheme = urlparse(request.url).scheme
        is_proxied_http_request = (proxy and scheme != 'https')
        using_socks_proxy = False
        if proxy:
            proxy_scheme = urlparse(proxy).scheme.lower()
            using_socks_proxy = proxy_scheme.startswith('socks')
        url = request.path_url
        if is_proxied_http_request and not using_socks_proxy:
            url = urldefragauth(request.url)
        return url
github psf / requests / requests3 / sessions.py View on Github external
def rebuild_auth(self, prepared_request, response):
        """When being redirected we may want to strip authentication from the
        request to avoid leaking credentials. This method intelligently
        removes
        and reapplies authentication where possible to avoid credential loss.
        """
        headers = prepared_request.headers
        url = prepared_request.url
        if 'Authorization' in headers:
            # If we get redirected to a new host, we should strip out any
            # authentication headers.
            original_parsed = urlparse(response.request.url)
            redirect_parsed = urlparse(url)
            if (original_parsed.hostname != redirect_parsed.hostname):
                del headers['Authorization']
        # .netrc might have more auth for us on our new host.
        new_auth = get_netrc_auth(url) if self.trust_env else None
        if new_auth is not None:
            prepared_request.prepare_auth(new_auth)
        return
github psf / requests / requests3 / sessions.py View on Github external
def rebuild_proxies(self, prepared_request, proxies):
        """This method re-evaluates the proxy configuration by
        considering the environment variables. If we are redirected to a
        URL covered by NO_PROXY, we strip the proxy configuration.
        Otherwise, we set missing proxy keys for this URL (in case they
        were stripped by a previous redirect).

        This method also replaces the Proxy-Authorization header where
        necessary.

        :rtype: dict
        """
        proxies = proxies if proxies is not None else {}
        headers = prepared_request.headers
        url = prepared_request.url
        scheme = urlparse(url).scheme
        new_proxies = proxies.copy()
        no_proxy = proxies.get('no_proxy')
        bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)
        if self.trust_env and not bypass_proxy:
            environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
            proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
            if proxy:
                new_proxies.setdefault(scheme, proxy)
        if 'Proxy-Authorization' in headers:
            del headers['Proxy-Authorization']
        try:
            username, password = get_auth_from_url(new_proxies[scheme])
        except KeyError:
            username, password = None, None
        if username and password:
            headers['Proxy-Authorization'] = _basic_auth_str(
github psf / requests / requests3 / adapters.py View on Github external
be used. Otherwise, we should only use the path portion of the URL.

        This should not be called from user code, and is only exposed for use
        when subclassing the
        :class:`HTTPAdapter `.

        :param request: The :class:`PreparedRequest ` being sent.
        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
        :rtype: str
        """
        proxy = select_proxy(request.url, proxies)
        scheme = urlparse(request.url).scheme
        is_proxied_http_request = (proxy and scheme != 'https')
        using_socks_proxy = False
        if proxy:
            proxy_scheme = urlparse(proxy).scheme.lower()
            using_socks_proxy = proxy_scheme.startswith('socks')
        url = request.path_url
        if is_proxied_http_request and not using_socks_proxy:
            url = urldefragauth(request.url)
        return url
github psf / requests / requests3 / adapters.py View on Github external
:param url: The URL to connect to.
        :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
        :rtype: urllib3.ConnectionPool
        """
        pool_kwargs = _pool_kwargs(verify, cert)
        proxy = select_proxy(url, proxies)
        if proxy:
            proxy = prepend_scheme_if_needed(proxy, 'http')
            proxy_manager = self.proxy_manager_for(proxy)
            conn = proxy_manager.connection_from_url(
                url, pool_kwargs=pool_kwargs
            )
        else:
            # Only scheme should be lower case
            parsed = urlparse(url)
            url = parsed.geturl()
            conn = self.poolmanager.connection_from_url(
                url, pool_kwargs=pool_kwargs
            )
        return conn