Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def parse_args(args: Optional[List[str]]) -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"package_names",
metavar="PACKAGE",
nargs="+",
type=packaging.utils.canonicalize_name,
)
parser.add_argument(
"--python-version",
dest="python_version",
type=_parse_python_version,
default=".".join(str(v) for v in sys.version_info[:2]),
)
parser.add_argument(
"--interpreter", default=None,
)
parser.add_argument(
"--platform", dest="platforms", action="append", default=None,
)
parser.add_argument(
"--output",
type=pathlib.Path,
def _is_valid_dependency(self, dependency_name: str) -> bool:
canonicalized_dependency = canonicalize_name(dependency_name)
return canonicalized_dependency != self.name
# Update the graph!
node_name, node = self._get_node_for(candidate)
node.attr.update(shape="signature", color="red")
for edge in self.graph.out_edges_iter([node_name]):
edge.attr.update(style="dotted", arrowhead="vee", color="#FF9999")
_, to = edge
to.attr.update(color="black")
for edge in self.graph.in_edges_iter([node_name]):
edge.attr.update(style="dotted", color="#808080")
# Trim "active" requirements to remove anything not relevant now.
for requirement in self._dependencies[candidate]:
active = self._active_requirements[
canonicalize_name(requirement.name)
]
active[requirement] -= 1
if not active[requirement]:
del active[requirement]
self.parent = parent
self.index_url = index_url
self.env = env
self.extra_index_url = extra_index_url
self._recursed = False
if req_string.endswith(".whl") and os.path.isfile(req_string):
whl = distlib.wheel.Wheel(req_string)
self.name = canonicalize_name(whl.name)
self.specifier = "==" + canonicalize_version(whl.version)
self.req = pkg_resources.Requirement.parse(self.name + self.specifier)
self.import_names = _discover_import_names(req_string)
self.metadata = _extract_metadata(req_string)
else:
self.req = pkg_resources.Requirement.parse(req_string)
self.name = canonicalize_name(self.req.name)
self.specifier = str(self.req.specifier)
log.debug("fetching best wheel")
self.import_names, self.metadata = _get_info(
dist_name=req_string,
index_url=index_url,
env=env,
extra_index_url=extra_index_url,
)
self.extras_requested = sorted(self.req.extras)
if parent is None:
if env:
log.debug("root node target env", **dict(env))
self.required_by = []
else:
self.required_by = [str(parent.req)]
def create(
cls,
*,
filename: str,
hash: Optional[str] = None,
upload_timestamp: Optional[int] = None,
uploaded_by: Optional[str] = None,
) -> 'Package':
if not re.match(r'[a-zA-Z0-9_\-\.\+]+$', filename) or '..' in filename:
raise ValueError(f'Unsafe package name: {filename}')
name, version = guess_name_version_from_filename(filename)
return cls(
filename=filename,
name=packaging.utils.canonicalize_name(name),
version=version,
parsed_version=packaging.version.parse(version or '0'),
hash=hash,
upload_timestamp=upload_timestamp,
uploaded_by=uploaded_by,
)
url_prefixes = [
proc_url[:-7] # Strip "/simple".
for proc_url in (
raw_url.rstrip("/")
for raw_url in (source.get("url", "") for source in sources)
)
if proc_url.endswith("/simple")
]
session = requests.session()
for prefix in url_prefixes:
url = "{prefix}/pypi/{name}/{version}/json".format(
prefix=prefix,
name=packaging.utils.canonicalize_name(ireq.name),
version=version,
)
try:
dependencies = _get_dependencies_from_json_url(url, session)
if dependencies is not None:
return dependencies
except Exception as e:
print("unable to read dependencies via {0} ({1})".format(url, e))
return
def do_clean(
ctx, three=None, python=None, dry_run=False, bare=False, pypi_mirror=None,
system=False
):
# Ensure that virtualenv is available.
from packaging.utils import canonicalize_name
ensure_project(three=three, python=python, validate=False, pypi_mirror=pypi_mirror)
ensure_lockfile(pypi_mirror=pypi_mirror)
# Make sure that the virtualenv's site packages are configured correctly
# otherwise we may end up removing from the global site packages directory
installed_package_names = project.installed_package_names.copy()
# Remove known "bad packages" from the list.
for bad_package in BAD_PACKAGES:
if canonicalize_name(bad_package) in installed_package_names:
if environments.is_verbose():
click.echo("Ignoring {0}.".format(bad_package), err=True)
installed_package_names.remove(canonicalize_name(bad_package))
# Intelligently detect if --dev should be used or not.
locked_packages = {
canonicalize_name(pkg) for pkg in project.lockfile_package_names["combined"]
}
for used_package in locked_packages:
if used_package in installed_package_names:
installed_package_names.remove(used_package)
failure = False
cmd = [which_pip(allow_global=system), "uninstall", "-y", "-qq"]
for apparent_bad_package in installed_package_names:
if dry_run and not bare:
click.echo(apparent_bad_package)
else:
def projects_from_parsed(parsed):
"""Take parsed arguments from CLI to create a list of specified projects."""
projects = []
projects.extend(projects_.projects_from_requirements(parsed.requirements))
metadata = []
for metadata_path in parsed.metadata:
with io.open(metadata_path) as file:
metadata.append(file.read())
projects.extend(projects_.projects_from_metadata(metadata))
projects.extend(map(packaging.utils.canonicalize_name, parsed.projects))
projects = {i for i in projects if i not in parsed.exclude}
return projects
def get_releases(self, dep) -> tuple:
links = self._get_links(name=dep.base_name)
releases_info = dict()
for link in links:
name, version = self._parse_name(link['name'])
if canonicalize_name(name) != canonicalize_name(dep.base_name):
logger.warning('bad dist name', extra=dict(
dist_name=link['name'],
package_name=dep.base_name,
reason='package name does not match',
))
continue
if not version:
logger.warning('bad dist name', extra=dict(
dist_name=link['name'],
package_name=dep.base_name,
reason='no version specified',
))
continue
if version not in releases_info:
releases_info[version] = dict(hashes=[], pythons=[])