Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _parse_requirement_line(line):
if len(line) == 0:
return None
if line[:2] == '-i':
return None
return {
"line": line,
"requirement": Requirement.from_line(line)
}
def _get_requirements(model, section_name):
"""Produce a mapping of identifier: requirement from the section.
"""
if not model:
return {}
return {identify_requirment(r): r for r in (
requirementslib.Requirement.from_pipfile(name, package._data)
for name, package in model.get(section_name, {}).items()
)}
* `uptodate`: These match the specifications.
* `outdated`: These installations are specified, but don't match the
specifications in `packages`.
* `unneeded`: These are installed, but not specified in `packages`.
"""
groupcoll = GroupCollection(set(), set(), set(), set())
for distro in pkg_resources.working_set:
name = distro.key
try:
package = packages[name]
except KeyError:
groupcoll.unneeded.add(name)
continue
r = requirementslib.Requirement.from_pipfile(name, package)
if not r.is_named:
# Always mark non-named. I think pip does something similar?
groupcoll.outdated.add(name)
elif not _is_up_to_date(distro, r.get_version()):
groupcoll.outdated.add(name)
else:
groupcoll.uptodate.add(name)
return groupcoll
* `uptodate`: These match the specifications.
* `outdated`: These installations are specified, but don't match the
specifications in `packages`.
* `unneeded`: These are installed, but not specified in `packages`.
"""
groupcoll = GroupCollection(set(), set(), set(), set())
for distro in pkg_resources.working_set:
name = distro.key
try:
package = packages[name]
except KeyError:
groupcoll.unneeded.add(name)
continue
r = requirementslib.Requirement.from_pipfile(name, package)
if not r.is_named:
# Always mark non-named. I think pip does something similar?
groupcoll.outdated.add(name)
elif not _is_up_to_date(distro, r.get_version()):
groupcoll.outdated.add(name)
else:
groupcoll.uptodate.add(name)
return groupcoll
lockfile = plette.Lockfile.load(f)
libdir = OUTPUT_DIR.joinpath('lib')
paths = {'purelib': libdir, 'platlib': libdir}
sources = lockfile.meta.sources._data
maker = distlib.scripts.ScriptMaker(None, None)
# Install packages from Pipfile.lock.
for name, package in lockfile.default._data.items():
if name in DONT_PACKAGE:
continue
print(f'[pack] Installing {name}')
package.pop('editable', None) # Don't install things as editable.
package.pop('markers', None) # Always install everything.
r = requirementslib.Requirement.from_pipfile(name, package)
wheel = passa.internals._pip.build_wheel(
r.as_ireq(), sources, r.hashes or None,
)
wheel.install(paths, maker, lib_only=True)
for pattern in IGNORE_LIB_PATTERNS:
for path in libdir.rglob(pattern):
print(f'[pack] Removing {path}')
path.unlink()
# Pack everything into ZIP.
zipname = OUTPUT_DIR.joinpath('passa.zip')
with zipfile.ZipFile(zipname, 'w') as zf:
_recursive_write_to_zip(zf, OUTPUT_DIR)
_recursive_write_to_zip(zf, STUBFILES_DIR)
print(f'[pack] Written archive {zipname}')
def format_remote_package(
package_name, config, dev=False
): # type: (str, LockConfig, bool) -> Tuple[str, str]
"""
format and return a string that can be put into either install_requires or dependency_links or extras_require
:param package_name:
:param config:
:param dev: is package a development package
:return: Tuple[keyword_target, list_argument]
:raise ValueError: if a package config is not understood
"""
if dev:
return (
"extras_require",
Requirement.from_pipfile(package_name, config).as_line(
include_hashes=False
),
)
else:
# fixme: stronger checks?
# https://setuptools.readthedocs.io/en/latest/setuptools.html#dependencies-that-aren-t-in-pypi
if "file" in config: # remote built distribution '.zip' file for example
return "dependency_links", config["file"]
if "version" in config: # pypi package
return (
"install_requires",
Requirement.from_pipfile(package_name, config).as_line(
include_hashes=False
),
)
else: # vcs
getters = [
_get_dependencies_from_cache,
_cached(_get_dependencies_from_json, sources=sources),
_cached(_get_dependencies_from_pip, sources=sources),
]
ireq = requirement.as_ireq()
last_exc = None
for getter in getters:
try:
result = getter(ireq)
except Exception as e:
last_exc = sys.exc_info()
continue
if result is not None:
deps, pyreq = result
reqs = [requirementslib.Requirement.from_line(d) for d in deps]
return reqs, pyreq
if last_exc:
six.reraise(*last_exc)
raise RuntimeError("failed to get dependencies for {}".format(
requirement.as_line(),
))
response = session.get(url)
response.raise_for_status()
info = response.json()["info"]
requires_python = info["requires_python"] or ""
try:
requirement_lines = info["requires_dist"]
except KeyError:
requirement_lines = info["requires"]
# The JSON API returns null both when there are not requirements, or the
# requirement list cannot be retrieved. We can't safely assume, so it's
# better to drop it and fall back to downloading the package.
try:
dependency_requirements_iterator = (
requirementslib.Requirement.from_line(line)
for line in requirement_lines
)
except TypeError:
return
dependencies = [
dep_req.as_line(include_hashes=False)
for dep_req in dependency_requirements_iterator
if not contains_extra(dep_req.markers)
]
return dependencies, requires_python
def format_remote_package(
package_name, config, dev=False
): # type: (str, PipfileConfig, bool) -> Tuple[str, str]
"""
format and return a string that can be put into either install_requires or dependency_links or extras_require
:param package_name:
:param config:
:param dev: is package a development package
:return: Tuple[keyword_target, list_argument]
:raise ValueError: if a package config is not understood
"""
if dev:
return (
"extras_require",
Requirement.from_pipfile(package_name, config).as_line(
include_hashes=False
),
)
else:
# fixme: stronger checks?
# https://setuptools.readthedocs.io/en/latest/setuptools.html#dependencies-that-aren-t-in-pypi
if "file" in config: # remote built distribution '.zip' file for example
assert isinstance(config, dict)
return "dependency_links", config["file"]
if is_pypi_package(config): # pypi package
return (
"install_requires",
Requirement.from_pipfile(package_name, config).as_line(
include_hashes=False
),
)
def add_line_to_pipfile(self, line, develop):
from requirementslib import Requirement
requirement = Requirement.from_line(line)
section = self._get_pipfile_section(develop=develop)
key = requirement.normalized_name
entry = next(iter(requirement.as_pipfile().values()))
if isinstance(entry, dict):
# HACK: TOMLKit prefers to expand tables by default, but we
# always want inline tables here. Also tomlkit.inline_table
# does not have `update()`.
table = tomlkit.inline_table()
for k, v in entry.items():
table[k] = v
entry = table
section[key] = entry