Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async def _handle_component(self, component, release, release_file, file_references):
# Create release_component
release_component_dc = DeclarativeContent(
content=ReleaseComponent(component=component, release=release)
)
release_component = await self._create_unit(release_component_dc)
architectures = _filter_split(release_file.architectures, self.architectures)
pending_tasks = []
# Handle package indices
pending_tasks.extend(
[
self._handle_package_index(
release_file, release_component, architecture, file_references
)
for architecture in architectures
]
)
# Handle installer package indices
if self.remote.sync_udebs:
# Create release_file
release_file_dc = DeclarativeContent(
content=ReleaseFile(distribution=distribution),
d_artifacts=[
self._to_d_artifact(os.path.join("dists", distribution, filename))
for filename in ["Release", "InRelease", "Release.gpg"]
],
)
release_file = await self._create_unit(release_file_dc)
if release_file is None:
return
# Create release object
release_unit = Release(
codename=release_file.codename, suite=release_file.suite, distribution=distribution
)
release_dc = DeclarativeContent(content=release_unit)
release = await self._create_unit(release_dc)
# Create release architectures
for architecture in _filter_split(release_file.architectures, self.architectures):
release_architecture_dc = DeclarativeContent(
content=ReleaseArchitecture(architecture=architecture, release=release)
)
await self.put(release_architecture_dc)
# Parse release file
log.info('Parsing Release file for release: "{}"'.format(release_file.codename))
release_file_dict = deb822.Release(release_file.main_artifact.file)
# collect file references in new dict
file_references = defaultdict(deb822.Deb822Dict)
for digest_name in ["SHA512", "SHA256", "SHA1", "MD5sum"]:
if digest_name in release_file_dict:
for unit in release_file_dict[digest_name]:
file_references[unit["Name"]].update(unit)
digest=digest,
)
url = urljoin(self.remote.url, relative_url)
manifest_list = Manifest(
digest=digest,
schema_version=manifest_list_data['schemaVersion'],
media_type=manifest_list_data['mediaType'],
)
da = DeclarativeArtifact(
artifact=tag_dc.d_artifacts[0].artifact,
url=url,
relative_path=digest,
remote=self.remote,
extra_data={'headers': V2_ACCEPT_HEADERS}
)
list_dc = DeclarativeContent(content=manifest_list, d_artifacts=[da])
return list_dc
elif record.type not in PACKAGE_DB_REPODATA:
file_data = {record.checksum_type: record.checksum, "size": record.size}
da = DeclarativeArtifact(
artifact=Artifact(**file_data),
url=urljoin(remote_url, record.location_href),
relative_path=record.location_href,
remote=self.remote,
deferred_download=False
)
repo_metadata_file = RepoMetadataFile(
data_type=record.type,
checksum_type=record.checksum_type,
checksum=record.checksum,
)
dc = DeclarativeContent(content=repo_metadata_file, d_artifacts=[da])
await self.put(dc)
missing_type = set(PACKAGE_REPODATA) - main_types
if missing_type:
raise FileNotFoundError(_("XML file(s): {filename} not found").format(
filename=", ".join(missing_type)))
# we have to sync module.yaml first if it exists, to make relations to packages
if modulemd_results:
modulemd_index = mmdlib.ModuleIndex.new()
open_func = gzip.open if modulemd_results.url.endswith('.gz') else open
with open_func(modulemd_results.path, 'r') as moduleyaml:
modulemd_index.update_from_string(
moduleyaml.read().decode(), True
)
if filename in InstallerFileIndex.FILE_ALGORITHM: # strangely they may appear here
continue
file_list[filename][algorithm] = digest
for filename, digests in file_list.items():
relpath = os.path.join(installer_file_index.relative_path, filename)
urlpath = os.path.join(self.parsed_url.path, relpath)
content_unit = GenericContent(sha256=digests["sha256"], relative_path=relpath)
d_artifact = DeclarativeArtifact(
artifact=Artifact(**digests),
url=urlunparse(self.parsed_url._replace(path=urlpath)),
relative_path=relpath,
remote=self.remote,
deferred_download=deferred_download,
)
d_content = DeclarativeContent(content=content_unit, d_artifacts=[d_artifact])
await self.put(d_content)
media_type=blob_data.get('mediaType', MEDIA_TYPE.REGULAR_BLOB),
)
relative_url = '/v2/{name}/blobs/{digest}'.format(
name=self.remote.namespaced_upstream_name,
digest=digest,
)
blob_url = urljoin(self.remote.url, relative_url)
da = DeclarativeArtifact(
artifact=blob_artifact,
url=blob_url,
relative_path=digest,
remote=self.remote,
extra_data={'headers': V2_ACCEPT_HEADERS},
deferred_download=self.deferred_download
)
blob_dc = DeclarativeContent(
content=blob,
d_artifacts=[da],
)
return blob_dc
paths = [path for path in file_references.keys() if path.startswith(translation_dir)]
translations = {}
for path in paths:
relative_path = os.path.join(os.path.dirname(release_file.relative_path))
d_artifact = self._to_d_artifact(relative_path, file_references[path])
key, ext = os.path.splitext(relative_path)
if key not in translations:
translations[key] = {"sha256": None, "d_artifacts": []}
if not ext:
translations[key]["sha256"] = d_artifact.artifact.sha256
translations[key]["d_artifacts"].append(d_artifact)
for relative_path, translation in translations.items():
content_unit = GenericContent(sha256=translation["sha256"], relative_path=relative_path)
await self.put(
DeclarativeContent(content=content_unit, d_artifacts=translation["d_artifacts"])
)
deferred_download=deferred_download,
)
package_dc = DeclarativeContent(
content=package_content_unit, d_artifacts=[package_da]
)
package_futures.append(package_dc)
await self.put(package_dc)
except KeyError:
log.warning("Ignoring invalid package paragraph. {}".format(package_paragraph))
# Assign packages to this release_component
for package_future in package_futures:
package = await package_future.resolution()
if not isinstance(package, Package):
# TODO repeat this for installer packages
continue
package_release_component_dc = DeclarativeContent(
content=PackageReleaseComponent(
package=package, release_component=release_component
)
)
await self.put(package_release_component_dc)
if self.treeinfo:
d_artifacts = []
for path, checksum in self.treeinfo["download"]["images"].items():
artifact = Artifact(**checksum)
da = DeclarativeArtifact(
artifact=artifact,
url=urljoin(remote_url, path),
relative_path=path,
remote=self.remote,
deferred_download=self.deferred_download
)
d_artifacts.append(da)
distribution_tree = DistributionTree(**self.treeinfo["distribution_tree"])
dc = DeclarativeContent(content=distribution_tree, d_artifacts=d_artifacts)
dc.extra_data = self.treeinfo
await self.put(dc)
repomd_path = result.path
repomd = cr.Repomd(repomd_path)
package_repodata_urls = {}
downloaders = []
modulemd_list = list()
dc_groups = []
dc_categories = []
dc_environments = []
nevra_to_module = defaultdict(dict)
pkgname_to_groups = defaultdict(list)
group_to_categories = defaultdict(list)
group_to_environments = defaultdict(list)
optionalgroup_to_environments = defaultdict(list)