async def create_and_process_tagged_manifest(self, tag_dc, manifest_data, out_q): """ Create a Manifest and nested ManifestBlobs from the Tag artifact. Args: tag_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a Tag manifest_data (dict): Data about a single new ImageManifest. out_q (asyncio.Queue): Queue to put created ImageManifest dcs and Blob dcs. """ manifest = ImageManifest( digest=tag_dc.d_artifacts[0].artifact.sha256, schema_version=manifest_data['schemaVersion'], media_type=manifest_data['mediaType'], ) man_dc = DeclarativeContent(content=manifest, d_artifacts=[tag_dc.d_artifacts[0]]) for layer in manifest_data.get('layers'): blob_dc = await self.create_pending_blob(man_dc, layer, out_q) blob_dc.extra_data['relation'] = man_dc await out_q.put(blob_dc) config_layer = manifest_data.get('config') if config_layer: config_blob_dc = await self.create_pending_blob( man_dc, config_layer, out_q) config_blob_dc.extra_data['config_relation'] = man_dc await out_q.put(config_blob_dc) man_dc.extra_data['relation'] = tag_dc tag_dc.extra_data['processed'] = True man_dc.extra_data['processed'] = True await out_q.put(man_dc)
async def parse_distribution_tree(self): """Parse content from the file treeinfo if present.""" if self.treeinfo: d_artifacts = [ DeclarativeArtifact( artifact=Artifact(), url=urljoin(self.data.remote_url, self.treeinfo["filename"]), relative_path=".treeinfo", remote=self.remote, deferred_download=False, ) ] for path, checksum in self.treeinfo["download"]["images"].items(): artifact = Artifact(**checksum) da = DeclarativeArtifact( artifact=artifact, url=urljoin(self.data.remote_url, path), relative_path=path, remote=self.remote, deferred_download=self.deferred_download, ) d_artifacts.append(da) distribution_tree = DistributionTree( **self.treeinfo["distribution_tree"]) dc = DeclarativeContent(content=distribution_tree, d_artifacts=d_artifacts) dc.extra_data = self.treeinfo await self.put(dc)
def _init_dc_groups(self, comps): if comps.groups: for group in comps.groups: group_dict = PackageGroup.libcomps_to_dict(group) group_dict["digest"] = dict_digest(group_dict) packagegroup = PackageGroup(**group_dict) dc = DeclarativeContent(content=packagegroup) dc.extra_data = defaultdict(list) if packagegroup.packages: for package in packagegroup.packages: self.data.pkgname_to_groups[package["name"]].append(dc) if dc.content.id in self.group_to_categories.keys(): for dc_category in self.group_to_categories[dc.content.id]: dc.extra_data["category_relations"].append(dc_category) dc_category.extra_data["packagegroups"].append(dc) if dc.content.id in self.group_to_environments.keys(): for dc_environment in self.group_to_environments[ dc.content.id]: dc.extra_data["environment_relations"].append( dc_environment) dc_environment.extra_data["packagegroups"].append(dc) if dc.content.id in self.optionalgroup_to_environments.keys(): for dc_environment in self.optionalgroup_to_environments[ dc.content.id]: dc.extra_data["env_relations_optional"].append( dc_environment) dc_environment.extra_data["optionalgroups"].append(dc) self.data.dc_groups.append(dc)
async def create_and_process_tagged_manifest_list(self, tag_dc, manifest_list_data): """ Create a ManifestList and nested ImageManifests from the Tag artifact. Args: tag_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a Tag manifest_list_data (dict): Data about a ManifestList """ tag_dc.content = ManifestListTag(name=tag_dc.content.name) digest = "sha256:{digest}".format( digest=tag_dc.d_artifacts[0].artifact.sha256) relative_url = '/v2/{name}/manifests/{digest}'.format( name=self.remote.namespaced_upstream_name, digest=digest, ) url = urljoin(self.remote.url, relative_url) manifest_list = ManifestList( digest=digest, schema_version=manifest_list_data['schemaVersion'], media_type=manifest_list_data['mediaType'], ) da = DeclarativeArtifact(artifact=tag_dc.d_artifacts[0].artifact, url=url, relative_path=digest, remote=self.remote, extra_data={'headers': V2_ACCEPT_HEADERS}) list_dc = DeclarativeContent(content=manifest_list, d_artifacts=[da]) for manifest in manifest_list_data.get('manifests'): await self.create_pending_manifest(list_dc, manifest) list_dc.extra_data['relation'] = tag_dc list_dc.extra_data['processed'] = True tag_dc.extra_data['processed'] = True await self.put(list_dc)
def test_content_associated_using_repo_key(self): stage = QueryExistingRepoContentAndArtifacts( new_version=self.new_version_all_content()) # c1: Existing content unit with Artifact c1 = CookbookPackageContent(name="c1", version="1.0.0", dependencies={}) # c2: content unit does not exist in DB c2 = CookbookPackageContent(name="c2", version="1.0.0", dependencies={}) # c3: content unit does exist, has a content_artifact association, # but no artifact (i.e. is a non-immediate content unit) c3 = CookbookPackageContent(name="c3", version="1.0.0", dependencies={}) d_c1_d_a1 = DeclarativeArtifact( artifact=Artifact(), url="http://a1", relative_path=c1.relative_path(), remote=self.remote, ) d_c2_d_a2 = DeclarativeArtifact( artifact=Artifact(), url="http://a2", relative_path=c2.relative_path(), remote=self.remote, ) d_c3_d_a3 = DeclarativeArtifact( artifact=Artifact(), url="http://a3", relative_path=c3.relative_path(), remote=self.remote, ) batch = [ DeclarativeContent(content=c1, d_artifacts=[d_c1_d_a1]), DeclarativeContent(content=c2, d_artifacts=[d_c2_d_a2]), DeclarativeContent(content=c3, d_artifacts=[d_c3_d_a3]), ] stage._process_batch(batch) self.assertEqual(batch[0].content.content_id, "1") self.assertEqual(batch[0].content.pk, self.c1.pk) self.assertEqual(batch[0].d_artifacts[0].artifact.pk, self.a1.pk) self.assertIsNone(batch[1].content.pk) self.assertTrue(batch[1].d_artifacts[0].artifact._state.adding) self.assertEqual(batch[2].content.pk, self.c3.pk) self.assertTrue(batch[2].d_artifacts[0].artifact._state.adding)
async def migrate_to_pulp3(self, batch, pb=None): """ Docker specific implementation of DeclarativeContent creation for migrating docker content to Pulp 3. Args: batch: A batch of Pulp2Content objects to migrate to Pulp 3 """ for pulp2content in batch: pulp_2to3_detail_content = pulp2content.detail_model pulp3content = pulp_2to3_detail_content.create_pulp3_content() future_relations = {'pulp2content': pulp2content} # store digests for future pulp3 content relations if pulp_2to3_detail_content.type == 'docker_manifest': future_relations['blob_rel'] = pulp_2to3_detail_content.blobs future_relations[ 'config_blob_rel'] = pulp_2to3_detail_content.config_blob if pulp_2to3_detail_content.type == 'docker_manifest_list': future_relations[ 'man_rel'] = pulp_2to3_detail_content.listed_manifests if pulp_2to3_detail_content.type == 'docker_tag': future_relations[ 'tag_rel'] = pulp_2to3_detail_content.tagged_manifest if pulp_2to3_detail_content.type == 'docker_tag': # dc without artifact, will assign arifact in the _pre_save hook dc = DeclarativeContent(content=pulp3content) else: artifact = await self.create_artifact( pulp2content.pulp2_storage_path, pulp_2to3_detail_content.expected_digests, pulp_2to3_detail_content.expected_size) da = DeclarativeArtifact( artifact=artifact, url=NOT_USED, relative_path=pulp_2to3_detail_content. relative_path_for_content_artifact, remote=NOT_USED, deferred_download=False) dc = DeclarativeContent(content=pulp3content, d_artifacts=[da], does_batch=False) dc.extra_data = future_relations await self.put(dc) if pb: pb.increment()
async def _handle_distribution(self, distribution): log.info( _('Downloading Release file for distribution: "{}"').format( distribution)) # Create release_file if distribution[-1] == "/": release_file_dir = distribution.strip("/") else: release_file_dir = os.path.join("dists", distribution) release_file_dc = DeclarativeContent( content=ReleaseFile(distribution=distribution), d_artifacts=[ self._to_d_artifact(os.path.join(release_file_dir, filename)) for filename in ["Release", "InRelease", "Release.gpg"] ], ) release_file = await self._create_unit(release_file_dc) if release_file is None: return # Create release object release_unit = Release(codename=release_file.codename, suite=release_file.suite, distribution=distribution) release_dc = DeclarativeContent(content=release_unit) release = await self._create_unit(release_dc) # Create release architectures architectures = _filter_split_architectures(release_file.architectures, self.remote.architectures, distribution) for architecture in architectures: release_architecture_dc = DeclarativeContent( content=ReleaseArchitecture(architecture=architecture, release=release)) await self.put(release_architecture_dc) # Parse release file log.info( _('Parsing Release file at distribution="{}"').format( distribution)) release_artifact = await _get_main_artifact_blocking(release_file) release_file_dict = deb822.Release(release_artifact.file) # collect file references in new dict file_references = defaultdict(deb822.Deb822Dict) for digest_name in ["SHA512", "SHA256", "SHA1", "MD5sum"]: if digest_name in release_file_dict: for unit in release_file_dict[digest_name]: file_references[unit["Name"]].update(unit) await asyncio.gather(*[ self._handle_component(component, release, release_file, file_references, architectures) for component in _filter_split_components( release_file.components, self.remote.components, distribution) ])
def _init_dc_categories(self, comps): if comps.categories: for category in comps.categories: category_dict = PackageCategory.libcomps_to_dict(category) category_dict["digest"] = dict_digest(category_dict) packagecategory = PackageCategory(**category_dict) dc = DeclarativeContent(content=packagecategory) dc.extra_data = defaultdict(list) if packagecategory.group_ids: for group_id in packagecategory.group_ids: self.group_to_categories[group_id["name"]].append(dc) self.dc_categories.append(dc)
async def _parse_packages(self, packages): progress_data = { "message": "Parsed Packages", "code": "sync.parsing.packages", "total": len(packages), } with ProgressReport(**progress_data) as packages_pb: while True: try: (_, pkg) = packages.popitem(last=False) except KeyError: break package = Package(**Package.createrepo_to_dict(pkg)) del pkg artifact = Artifact(size=package.size_package) checksum_type = getattr(CHECKSUM_TYPES, package.checksum_type.upper()) setattr(artifact, checksum_type, package.pkgId) url = urlpath_sanitize(self.data.remote_url, package.location_href) filename = os.path.basename(package.location_href) da = DeclarativeArtifact( artifact=artifact, url=url, relative_path=filename, remote=self.remote, deferred_download=self.deferred_download, ) dc = DeclarativeContent(content=package, d_artifacts=[da]) dc.extra_data = defaultdict(list) # find if a package relates to a modulemd if dc.content.nevra in self.data.nevra_to_module.keys(): dc.content.is_modular = True for dc_modulemd in self.data.nevra_to_module[ dc.content.nevra]: dc.extra_data["modulemd_relation"].append(dc_modulemd) dc_modulemd.extra_data["package_relation"].append(dc) if dc.content.name in self.data.pkgname_to_groups.keys(): for dc_group in self.data.pkgname_to_groups[ dc.content.name]: dc.extra_data["group_relations"].append(dc_group) dc_group.extra_data["related_packages"].append(dc) packages_pb.increment() await self.put(dc)
async def sign_collection_version(self, collection_version): """Signs the collection version.""" def _extract_manifest(): cartifact = collection_version.contentartifact_set.select_related( "artifact").first() artifact_name = cartifact.artifact.file.name artifact_file = storage.open(artifact_name) with tarfile.open(fileobj=artifact_file, mode="r") as tar: manifest = get_file_obj_from_tarball(tar, "MANIFEST.json", artifact_name) return manifest.read() # Limits the number of subprocesses spawned/running at one time async with self.semaphore: # We use the manifest to create the signature # OpenPGP doesn't take filename into account for signatures, not sure about others async with aiofiles.tempfile.NamedTemporaryFile(dir=".", mode="wb") as m: manifest_data = await sync_to_async(_extract_manifest)() await m.write(manifest_data) await m.flush() result = await self.signing_service.asign(m.name) async with aiofiles.open(result["signature"], "rb") as sig: data = await sig.read() cv_signature = CollectionVersionSignature( data=data, digest=hashlib.sha256(data), signed_collection=collection_version, pubkey_fingerprint=self.signing_service.pubkey_fingerprint, signing_service=self.signing_service, ) dc = DeclarativeContent(content=cv_signature) await self.progress_report.aincrement() await self.put(dc)
async def run(self): """Signs collections if they have not been signed with key.""" tasks = [] # Filter out any content that already has a signature with pubkey_fingerprint current_signatures = CollectionVersionSignature.objects.filter( pubkey_fingerprint=self.signing_service.pubkey_fingerprint) new_content = self.content.exclude(signatures__in=current_signatures) ntotal = await sync_to_async(new_content.count)() nmsg = _("Signing new CollectionVersions") async with ProgressReport(message=nmsg, code="sign.new.signature", total=ntotal) as p: self.progress_report = p async for collection_version in sync_to_async_iterable( new_content.iterator()): tasks.append( asyncio.create_task( self.sign_collection_version(collection_version))) await asyncio.gather(*tasks) # Add any signatures already present in Pulp if part of content list present_content = current_signatures.filter( signed_collection__in=self.content).exclude( pk__in=self.repos_current_signatures) ptotal = await sync_to_async(present_content.count)() pmsg = _("Adding present CollectionVersionSignatures") async with ProgressReport(message=pmsg, code="sign.present.signature", total=ptotal) as np: async for signature in sync_to_async_iterable( present_content.iterator()): await np.aincrement() await self.put(DeclarativeContent(content=signature))
async def _handle_translation_files(self, release_file, release_component, file_references): translation_dir = os.path.join(release_component.plain_component, "i18n") paths = [ path for path in file_references.keys() if path.startswith(translation_dir) ] translations = {} for path in paths: relative_path = os.path.join( os.path.dirname(release_file.relative_path)) d_artifact = self._to_d_artifact(relative_path, file_references[path]) key, ext = os.path.splitext(relative_path) if key not in translations: translations[key] = {"sha256": None, "d_artifacts": []} if not ext: translations[key]["sha256"] = d_artifact.artifact.sha256 translations[key]["d_artifacts"].append(d_artifact) for relative_path, translation in translations.items(): content_unit = GenericContent(sha256=translation["sha256"], relative_path=relative_path) await self.put( DeclarativeContent(content=content_unit, d_artifacts=translation["d_artifacts"]))
def create_blob(self, man_dc, blob_data): """ Create blob. Args: man_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a ImageManifest blob_data (dict): Data about a blob """ digest = blob_data.get("digest") or blob_data.get("blobSum") blob_artifact = Artifact(sha256=digest[len("sha256:"):]) blob = Blob(digest=digest, media_type=blob_data.get("mediaType", MEDIA_TYPE.REGULAR_BLOB)) relative_url = "/v2/{name}/blobs/{digest}".format( name=self.remote.namespaced_upstream_name, digest=digest) blob_url = urljoin(self.remote.url, relative_url) da = DeclarativeArtifact( artifact=blob_artifact, url=blob_url, relative_path=digest, remote=self.remote, deferred_download=self.deferred_download, ) blob_dc = DeclarativeContent(content=blob, d_artifacts=[da]) return blob_dc
def create_tagged_manifest(self, tag_dc, manifest_data, raw_data): """ Create an Image Manifest. Args: tag_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a Tag manifest_data (dict): Data about a single new ImageManifest. raw_data: (str): The raw JSON representation of the ImageManifest. """ media_type = manifest_data.get('mediaType', MEDIA_TYPE.MANIFEST_V1) if media_type in (MEDIA_TYPE.MANIFEST_V2, MEDIA_TYPE.MANIFEST_OCI): digest = "sha256:{digest}".format(digest=tag_dc.d_artifacts[0].artifact.sha256) else: digest = self._calculate_digest(raw_data) manifest = Manifest( digest=digest, schema_version=manifest_data['schemaVersion'], media_type=media_type ) relative_url = '/v2/{name}/manifests/{digest}'.format( name=self.remote.namespaced_upstream_name, digest=digest, ) url = urljoin(self.remote.url, relative_url) da = DeclarativeArtifact( artifact=tag_dc.d_artifacts[0].artifact, url=url, relative_path=digest, remote=self.remote, extra_data={'headers': V2_ACCEPT_HEADERS} ) man_dc = DeclarativeContent(content=manifest, d_artifacts=[da]) return man_dc
def create_tag(self, saved_artifact, url): """ Create `DeclarativeContent` for each tag. Each dc contains enough information to be dowloaded by an ArtifactDownload Stage. Args: tag_name (str): Name of each tag Returns: pulpcore.plugin.stages.DeclarativeContent: A Tag DeclarativeContent object """ tag_name = url.split('/')[-1] relative_url = '/v2/{name}/manifests/{tag}'.format( name=self.remote.namespaced_upstream_name, tag=tag_name, ) url = urljoin(self.remote.url, relative_url) tag = Tag(name=tag_name) da = DeclarativeArtifact( artifact=saved_artifact, url=url, relative_path=tag_name, remote=self.remote, extra_data={'headers': V2_ACCEPT_HEADERS} ) tag_dc = DeclarativeContent(content=tag, d_artifacts=[da]) return tag_dc
def generate_installer_file_index(component, architecture): nonlocal release nonlocal file_references installer_file_index_dir = os.path.join( os.path.basename(component), "installer-{}".format(architecture), "current", "images", ) log.info("Downloading installer files from {}".format( installer_file_index_dir)) d_artifacts = [] for filename in InstallerFileIndex.FILE_ALGORITHM.keys(): path = os.path.join(installer_file_index_dir, filename) if path in file_references: d_artifacts.append(to_d_artifact( file_references.pop(path))) if not d_artifacts: return content_unit = InstallerFileIndex( release=release, component=component, architecture=architecture, sha256=d_artifacts[0].artifact.sha256, relative_path=os.path.join( os.path.dirname(release.relative_path), installer_file_index_dir), ) d_content = DeclarativeContent(content=content_unit, d_artifacts=d_artifacts, does_batch=False) yield d_content
async def run(self): """ Build and emit `DeclarativeContent` from the ansible metadata. """ async with ProgressReport( message="Parsing Role Metadata", code="sync.parsing.metadata" ) as pb: async for metadata in self._fetch_roles(): for version in metadata["summary_fields"]["versions"]: url = GITHUB_URL % ( metadata["github_user"], metadata["github_repo"], version["name"], ) role = Role( version=version["name"], name=metadata["name"], namespace=metadata["namespace"], ) relative_path = "%s/%s/%s.tar.gz" % ( metadata["namespace"], metadata["name"], version["name"], ) d_artifact = DeclarativeArtifact( artifact=Artifact(), url=url, relative_path=relative_path, remote=self.remote, deferred_download=self.deferred_download, ) d_content = DeclarativeContent(content=role, d_artifacts=[d_artifact]) await pb.aincrement() await self.put(d_content)
async def run(self): """ Build and emit `DeclarativeContent` from the Manifest data. """ deferred_download = (self.remote.policy != Remote.IMMEDIATE) # Interpret download policy with ProgressBar(message='Downloading Metadata') as pb: parsed_url = urlparse(self.remote.url) root_dir = os.path.dirname(parsed_url.path) downloader = self.remote.get_downloader(url=self.remote.url) result = await downloader.run() pb.increment() with ProgressBar(message='Parsing Metadata') as pb: manifest = Manifest(result.path) for entry in manifest.read(): path = os.path.join(root_dir, entry.relative_path) url = urlunparse(parsed_url._replace(path=path)) file = FileContent(relative_path=entry.relative_path, digest=entry.digest) artifact = Artifact(size=entry.size, sha256=entry.digest) da = DeclarativeArtifact( artifact=artifact, url=url, relative_path=entry.relative_path, remote=self.remote, deferred_download=deferred_download, ) dc = DeclarativeContent(content=file, d_artifacts=[da]) pb.increment() await self.put(dc)
async def create_pending_blob(self, man_dc, blob_data, out_q): """ Create a pending blob from a layer in the ImageManifest. Args: man_dc (pulpcore.plugin.stages.DeclarativeContent): dc for an ImageManifest blob_data (dict): Data about a single new blob. out_q (asyncio.Queue): Queue to put created blob dcs. """ digest = blob_data['digest'] blob_artifact = Artifact(sha256=digest[len("sha256:"):]) blob = ManifestBlob( digest=digest, media_type=blob_data['mediaType'], ) relative_url = '/v2/{name}/blobs/{digest}'.format( name=self.remote.namespaced_upstream_name, digest=blob_data['digest'], ) blob_url = urljoin(self.remote.url, relative_url) da = DeclarativeArtifact(artifact=blob_artifact, url=blob_url, relative_path=blob_data['digest'], remote=self.remote, extra_data={'headers': V2_ACCEPT_HEADERS}) blob_dc = DeclarativeContent( content=blob, d_artifacts=[da], ) return blob_dc
async def create_pending_manifest(self, list_dc, manifest_data, out_q): """ Create a pending manifest from manifest data in a ManifestList. Args: list_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a ManifestList manifest_data (dict): Data about a single new ImageManifest. out_q (asyncio.Queue): Queue to put created ImageManifest dcs. """ digest = manifest_data['digest'] relative_url = '/v2/{name}/manifests/{digest}'.format( name=self.remote.namespaced_upstream_name, digest=digest) manifest_url = urljoin(self.remote.url, relative_url) manifest_artifact = Artifact(sha256=digest[len("sha256:"):]) da = DeclarativeArtifact(artifact=manifest_artifact, url=manifest_url, relative_path=digest, remote=self.remote, extra_data={'headers': V2_ACCEPT_HEADERS}) manifest = ImageManifest( digest=manifest_data['digest'], schema_version=2, media_type=manifest_data['mediaType'], ) man_dc = DeclarativeContent(content=manifest, d_artifacts=[da], extra_data={'relation': list_dc}) await out_q.put(man_dc)
async def run(self): """ Build and emit `DeclarativeContent` from the Manifest data. If a cookbook specifier is set in the remote, cookbooks are filtered using this specifier. """ with ProgressBar(message="Downloading Metadata", total=1) as pb: downloader = self.remote.get_downloader(url=urljoin(self.remote.url + "/", "universe")) result = await downloader.run() pb.increment() cookbook_names = self.remote.specifier_cookbook_names() with ProgressBar(message="Parsing Metadata") as pb: universe = Universe(result.path) for entry in universe.read(): if cookbook_names and entry.name not in cookbook_names: continue cookbook = CookbookPackageContent( name=entry.name, version=entry.version, dependencies=entry.dependencies ) artifact = Artifact() da = DeclarativeArtifact( artifact=artifact, url=entry.download_url, relative_path=cookbook.relative_path(), remote=self.remote, deferred_download=not self.download_artifacts, ) dc = DeclarativeContent(content=cookbook, d_artifacts=[da]) pb.increment() await self.put(dc)
async def run(self): """ Build and emit `DeclarativeContent` from the ansible metadata. """ with ProgressReport(message="Parsing Collection Metadata", code="parsing.metadata") as pb: async for metadata in self._fetch_collections(): url = metadata["download_url"] collection_version = CollectionVersion( namespace=metadata["namespace"]["name"], name=metadata["collection"]["name"], version=metadata["version"], ) artifact = metadata["artifact"] d_artifact = DeclarativeArtifact( artifact=Artifact(sha256=artifact["sha256"], size=artifact["size"]), url=url, relative_path=collection_version.relative_path, remote=self.remote, deferred_download=self.deferred_download, ) d_content = DeclarativeContent(content=collection_version, d_artifacts=[d_artifact]) pb.increment() await self.put(d_content)
def generate_package_index(component, architecture, infix=""): nonlocal release nonlocal file_references package_index_dir = os.path.join(os.path.basename(component), infix, "binary-{}".format(architecture)) log.info("Downloading: {}/Packages".format(package_index_dir)) d_artifacts = [] for filename in [ "Packages", "Packages.gz", "Packages.xz", "Release" ]: path = os.path.join(package_index_dir, filename) if path in file_references: d_artifacts.append(to_d_artifact( file_references.pop(path))) if not d_artifacts: return content_unit = PackageIndex( release=release, component=component, architecture=architecture, sha256=d_artifacts[0].artifact.sha256, relative_path=os.path.join( os.path.dirname(release.relative_path), package_index_dir, "Packages"), ) d_content = DeclarativeContent(content=content_unit, d_artifacts=d_artifacts, does_batch=False) yield d_content
def create_blob(self, blob_data, deferred_download=True): """ Create blob. Args: blob_data (dict): Data about a blob deferred_download (bool): boolean that indicates whether not to download a blob immediatly. Config blob is downloaded regardless of the remote's settings """ digest = blob_data.get("digest") or blob_data.get("blobSum") blob_artifact = Artifact(sha256=digest[len("sha256:"):]) blob = Blob(digest=digest, media_type=blob_data.get("mediaType", MEDIA_TYPE.REGULAR_BLOB)) relative_url = "/v2/{name}/blobs/{digest}".format( name=self.remote.namespaced_upstream_name, digest=digest) blob_url = urljoin(self.remote.url, relative_url) da = DeclarativeArtifact( artifact=blob_artifact, url=blob_url, relative_path=digest, remote=self.remote, deferred_download=deferred_download and self.deferred_download, ) blob_dc = DeclarativeContent(content=blob, d_artifacts=[da]) return blob_dc
def generate_translation_files(component): nonlocal release nonlocal file_references translation_dir = os.path.join(os.path.basename(component), "i18n") paths = [ path for path in file_references.keys() if path.startswith(translation_dir) ] translations = {} for path in paths: d_artifact = to_d_artifact(file_references.pop(path)) key, ext = os.path.splitext(path) if key not in translations: translations[key] = {"sha256": None, "d_artifacts": []} if not ext: translations[key]["sha256"] = d_artifact.artifact.sha256 translations[key]["d_artifacts"].append(d_artifact) for path, translation in translations.items(): content_unit = GenericContent( sha256=translation["sha256"], relative_path=os.path.join( os.path.dirname(release.relative_path), path), ) d_content = DeclarativeContent( content=content_unit, d_artifacts=translation["d_artifacts"]) yield d_content
async def _find_all_collections_from_unpaginated_data(self): tasks = [] loop = asyncio.get_event_loop() for collection_namespace_dict in self._unpaginated_collection_metadata.values( ): for collection in collection_namespace_dict.values(): if collection["deprecated"]: d_content = DeclarativeContent( content=AnsibleCollectionDeprecated( namespace=collection["namespace"], name=collection["name"]), ) self.deprecation_after_sync.add( f"{collection['namespace']}.{collection['name']}") await self.put(d_content) for collections_in_namespace in self._unpaginated_collection_version_metadata.values( ): for collection_versions in collections_in_namespace.values(): for collection_version in collection_versions: collection_version_url = urljoin( self.remote.url, f"{collection_version['href']}") tasks.append( loop.create_task( self._add_collection_version( self._api_version, collection_version_url, collection_version))) await asyncio.gather(*tasks)
def create_tagged_manifest_list(self, tag_dc, manifest_list_data): """ Create a ManifestList. Args: tag_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a Tag manifest_list_data (dict): Data about a ManifestList """ digest = "sha256:{digest}".format(digest=tag_dc.d_artifacts[0].artifact.sha256) relative_url = '/v2/{name}/manifests/{digest}'.format( name=self.remote.namespaced_upstream_name, digest=digest, ) url = urljoin(self.remote.url, relative_url) manifest_list = Manifest( digest=digest, schema_version=manifest_list_data['schemaVersion'], media_type=manifest_list_data['mediaType'], ) da = DeclarativeArtifact( artifact=tag_dc.d_artifacts[0].artifact, url=url, relative_path=digest, remote=self.remote, extra_data={'headers': V2_ACCEPT_HEADERS} ) list_dc = DeclarativeContent(content=manifest_list, d_artifacts=[da]) return list_dc
async def __call__(self, in_q, out_q): """ Build and emit `DeclarativeContent` from the Manifest data. Args: in_q (asyncio.Queue): Unused because the first stage doesn't read from an input queue. out_q (asyncio.Queue): The out_q to send `DeclarativeContent` objects to """ with ProgressBar(message='Downloading Metadata') as pb: parsed_url = urlparse(self.remote.url) root_dir = os.path.dirname(parsed_url.path) downloader = self.remote.get_downloader(self.remote.url) result = await downloader.run() pb.increment() with ProgressBar(message='Parsing Metadata') as pb: manifest = Manifest(result.path) for entry in manifest.read(): path = os.path.join(root_dir, entry.relative_path) url = urlunparse(parsed_url._replace(path=path)) file = FileContent(relative_path=entry.relative_path, digest=entry.digest) artifact = Artifact(size=entry.size, sha256=entry.digest) da = DeclarativeArtifact(artifact, url, entry.relative_path, self.remote) dc = DeclarativeContent(content=file, d_artifacts=[da]) pb.increment() await out_q.put(dc) await out_q.put(None)
def create_blob(self, man_dc, blob_data): """ Create blob. Args: man_dc (pulpcore.plugin.stages.DeclarativeContent): dc for a ImageManifest blob_data (dict): Data about a blob """ digest = blob_data.get('digest') or blob_data.get('blobSum') blob_artifact = Artifact(sha256=digest[len("sha256:"):]) blob = Blob( digest=digest, media_type=blob_data.get('mediaType', MEDIA_TYPE.REGULAR_BLOB), ) relative_url = '/v2/{name}/blobs/{digest}'.format( name=self.remote.namespaced_upstream_name, digest=digest, ) blob_url = urljoin(self.remote.url, relative_url) da = DeclarativeArtifact( artifact=blob_artifact, url=blob_url, relative_path=digest, remote=self.remote, extra_data={'headers': V2_ACCEPT_HEADERS}, deferred_download=self.deferred_download ) blob_dc = DeclarativeContent( content=blob, d_artifacts=[da], ) return blob_dc
def queue_dc(self, delays=[], artifact_path=None): """Put a DeclarativeContent instance into `in_q` For each `delay` in `delays`, associate a DeclarativeArtifact with download duration `delay` to the content unit. `delay == None` means that the artifact is already present (pk is set) and no download is required. `artifact_path != None` means that the Artifact already has a file associated with it and a download does not need to be scheduled. """ das = [] for delay in delays: artifact = mock.Mock() artifact.pk = uuid4() artifact._state.adding = delay is not None artifact.DIGEST_FIELDS = [] artifact.file = artifact_path remote = mock.Mock() remote.get_downloader = DownloaderMock das.append( DeclarativeArtifact(artifact=artifact, url=str(delay), relative_path="path", remote=remote)) dc = DeclarativeContent(content=mock.Mock(), d_artifacts=das) self.in_q.put_nowait(dc)