def handle_uploaded_unit(repo, type_id, unit_key, metadata, file_path, conduit): """ Handles an upload unit request to the importer. This call is responsible for moving the unit from its temporary location where Pulp stored the upload to the final storage location (as dictated by Pulp) for the unit. This call will also update the database in Pulp to reflect the unit and its association to the repository. :param repo: repository into which the unit is being uploaded :type repo: pulp.plugins.model.Repository :param type_id: type of unit being uploaded :type type_id: str :param unit_key: unique identifier for the unit :type unit_key: dict :param metadata: extra data about the unit :type metadata: dict :param file_path: temporary location of the uploaded file :type file_path: str :param conduit: for calls back into Pulp :type conduit: pulp.plugins.conduit.upload.UploadConduit """ if type_id != constants.TYPE_DEB: raise NotImplementedError() # Create a deb out of the uploaded metadata combined = copy.copy(unit_key) combined.update(metadata) deb = Package.from_dict(combined) # Extract the metadata from the package # FIXME: What does this do? metadata_parser.extract_metadata(package, file_path, repo.working_dir) # Create the Pulp unit type_id = constants.TYPE_DEB unit_key = deb.unit_key() unit_metadata = deb.unit_metadata() relative_path = constants.STORAGE_DEB_RELATIVE_PATH % deb.filename() unit = conduit.init_unit(type_id, unit_key, unit_metadata, relative_path) # Copy from the upload temporary location into where Pulp wants it to live shutil.copy(file_path, unit.storage_path) # Save the unit into the destination repository conduit.save_unit(unit)
def _do_import_packages(self): """ Actual logic of the import. This method will do a best effort per package; if an individual package fails it will be recorded and the import will continue. This method will only raise an exception in an extreme case where it cannot react and continue. """ def unit_key_str(unit_key_dict): return u'%(package)s-%(version)s-%(maintainer)s' % unit_key_dict downloader = self._create_downloader() # Ease lookup of packages packages_by_key = dict([(p.key, p) for p in self.dist.packages]) # Collect information about the repository's packages before changing it package_criteria = UnitAssociationCriteria(type_ids=[constants.TYPE_DEB]) existing_units = self.sync_conduit.get_units(criteria=package_criteria) existing_packages = [Package.from_unit(u) for u in existing_units] existing_package_keys = [p.key for p in existing_packages] new_unit_keys = self._resolve_new_units(existing_package_keys, packages_by_key.keys()) remove_unit_keys = self._resolve_remove_units(existing_package_keys, packages_by_key.keys()) # Once we know how many things need to be processed, we can update the # progress report self.progress_report.packages_total_count = len(new_unit_keys) self.progress_report.packages_finished_count = 0 self.progress_report.packages_error_count = 0 self.progress_report.update_progress() # Add new units for key in new_unit_keys: package = packages_by_key[key] try: self._add_new_package(downloader, package) self.progress_report.packages_finished_count += 1 except Exception, e: self.progress_report.add_failed_package(package, e, sys.exc_info()[2]) self.progress_report.update_progress()