How to use the distlib.locators function in distlib

To help you get started, we’ve selected a few distlib examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github clarete / curdling / curdling / services / downloader.py View on Github external
http_proxy, proxy=True)
        return urllib3.ProxyManager(
            proxy_url=parsed_url.geturl(),
            proxy_headers=proxy_headers)
    return urllib3.PoolManager()


class ComparableLocator(object):
    def __eq__(self, other):
        return self.base_url == other.base_url

    def __repr__(self):
        return '{0}(\'{1}\')'.format(self.__class__.__name__, self.base_url)


class AggregatingLocator(locators.AggregatingLocator):

    def locate(self, requirement, prereleases=True):
        pkg = util.parse_requirement(requirement)
        for locator in self.locators:
            versions = locator.get_project(pkg.name)
            packages = find_packages(locator, pkg, versions)
            if packages:
                return packages


class PyPiLocator(locators.SimpleScrapingLocator, ComparableLocator):
    def __init__(self, url, **kwargs):
        super(PyPiLocator, self).__init__(url, **kwargs)
        self.opener = get_opener()

    def _get_project(self, name):
github clarete / curdling / curdling / services / downloader.py View on Github external
encoding = response.headers.get('content-encoding')
            if encoding:
                decoder = self.decoders[encoding]   # fail if not found
                data = decoder(data)
            encoding = 'utf-8'
            m = locators.CHARSET.search(content_type)
            if m:
                encoding = m.group(1)
            try:
                data = data.decode(encoding)
            except UnicodeError:
                data = data.decode('latin-1')    # fallback
            return locators.Page(data, final_url)


class CurdlingLocator(locators.Locator, ComparableLocator):

    def __init__(self, url, **kwargs):
        super(CurdlingLocator, self).__init__(**kwargs)
        self.base_url = url
        self.url = url
        self.opener = get_opener()
        self.requirements_not_found = []

    def get_distribution_names(self):
        return json.loads(
            http_retrieve(self.opener,
                compat.urljoin(self.url, 'api'))[0].data)

    def _get_project(self, name):
        # Retrieve the info
        url = compat.urljoin(self.url, 'api/' + name)
github clarete / curdling / curdling / services / downloader.py View on Github external
class Pool(urllib3.PoolManager):

    def retrieve(self, url):
        # Params to be passed to request. The `preload_content` must be set to
        # False, otherwise `read()` wont honor `decode_content`.
        params = {
            'headers': util.get_auth_info_from_url(url),
            'preload_content': False,
        }

        # Request the url and ensure we've reached the final location
        response = self.request('GET', url, **params)
        return response, response.get_redirect_location() or url


class AggregatingLocator(locators.AggregatingLocator):

    def locate(self, requirement, prereleases=True):
        pkg = dutil.parse_requirement(requirement)
        for locator in self.locators:
            # We're not using `util.safe_name` here cause it does actually the
            # opposite of what we need. PyPi packages with underscores (`_`)
            # will fall back to hyphens (`-`) but the opposite is not true.
            name = pkg.name.lower().replace('-', '_')
            versions = locator.get_project(name)
            package = find_packages(locator, pkg, versions)
            if package:
                return package


class PyPiLocator(locators.SimpleScrapingLocator):
    def __init__(self, url, **kwargs):
github stevearc / pypicloud / pypicloud / util.py View on Github external
not (self.prefer_wheel ^ filename.endswith(".whl")),
            "pypi.org" in t.netloc,
            filename,
        )


# Distlib checks if wheels are compatible before returning them.
# This is useful if you are attempting to install on the system running
# distlib, but we actually want ALL wheels so we can display them to the
# clients.  So we have to monkey patch the method. I'm sorry.
def is_compatible(wheel, tags=None):
    """ Hacked function to monkey patch into distlib """
    return True


distlib.locators.is_compatible = is_compatible


def create_matcher(queries, query_type):
    """
    Create a matcher for a list of queries

    Parameters
    ----------
    queries : list
        List of queries

    query_type: str
        Type of query to run: ["or"|"and"]

    Returns
    -------
github brettcannon / caniusepython3 / caniusepython3 / dependencies.py View on Github external
def dependencies(project_name):
    """Get the dependencies for a project."""
    log = logging.getLogger('ciu')
    log.info('Locating dependencies for {}'.format(project_name))
    located = distlib.locators.locate(project_name, prereleases=True)
    if not located:
        log.warning('{0} not found'.format(project_name))
        return None
    return {packaging.utils.canonicalize_name(pypi.just_name(dep))
            for dep in located.run_requires}
github clarete / curdling / curdling / services / downloader.py View on Github external
encoding = response.headers.get('content-encoding')
            if encoding:
                decoder = self.decoders[encoding]   # fail if not found
                data = decoder(data)
            encoding = 'utf-8'
            m = locators.CHARSET.search(content_type)
            if m:
                encoding = m.group(1)
            try:
                data = data.decode(encoding)
            except UnicodeError:
                data = data.decode('latin-1')    # fallback
            return locators.Page(data, final_url)


class CurdlingLocator(locators.Locator):

    def __init__(self, url, **kwargs):
        super(CurdlingLocator, self).__init__(**kwargs)
        self.base_url = url
        self.url = url
        self.opener = Pool()
        self.packages_not_found = []

    def get_distribution_names(self):
        return json.loads(
            self.opener.retrieve(
                urljoin(self.url, 'api'))[0].data)

    def _get_project(self, name):
        # Retrieve the info
        url = urljoin(self.url, 'api/' + name)