def __init__(self, repo, publish_conduit, config): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration """ super(WebPublisher, self).__init__( step_type=constants.PUBLISH_STEP_WEB_PUBLISHER, repo=repo, publish_conduit=publish_conduit, config=config) docker_api_version = 'v1' publish_dir = configuration.get_web_publish_dir(repo, config, docker_api_version) app_file = configuration.get_redirect_file_name(repo) app_publish_location = os.path.join( configuration.get_app_publish_dir(config, docker_api_version), app_file) self.working_dir = os.path.join(self.get_working_dir(), docker_api_version) misc.mkdir(self.working_dir) self.web_working_dir = os.path.join(self.get_working_dir(), 'web') master_publish_dir = configuration.get_master_publish_dir(repo, config, docker_api_version) atomic_publish_step = AtomicDirectoryPublishStep(self.get_working_dir(), [('web', publish_dir), (app_file, app_publish_location)], master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish_step.description = _('Making v1 files available via web.') self.add_child(PublishImagesStep()) self.add_child(atomic_publish_step)
def process_main(self, item=None): wdir = os.path.join(self.get_working_dir()) csums_to_download = dict( (u.checksum, u) for u in self.parent.step_local_units.units_to_download) repometa = self.parent.apt_repo_meta reqs = [] # upstream_url points to the dist itself, dists/stable upstream_url = repometa.upstream_url.rstrip('/') upstream_url = os.path.dirname(os.path.dirname(upstream_url)) step_download_units = self.parent.step_download_units step_download_units.path_to_unit = dict() for ca in repometa.iter_component_arch_binaries(): dest_dir = os.path.join(wdir, "packages", ca.component) misc.mkdir(dest_dir) for pkg in ca.iter_packages(): unit = csums_to_download.get(pkg['SHA256']) if not unit: continue url = os.path.join(upstream_url, pkg['Filename']) dest = os.path.join(dest_dir, os.path.basename(url)) reqs.append(DownloadRequest(url, dest)) step_download_units.path_to_unit[dest] = unit step_download_units._downloads = reqs
def __init__(self, repo, conduit, config, working_dir=None, **kwargs): """ :param repo: The repository being published. :type repo: pulp.plugins.model.Repository :param conduit: Conduit providing access to relative Pulp functionality :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param working_dir: The temp directory this step should use for processing. :type working_dir: str """ super(WebPublisher, self).__init__(step_type=constants.PUBLISH_STEP_WEB_PUBLISHER, repo=repo, conduit=conduit, config=config, working_dir=working_dir, plugin_type=constants.WEB_DISTRIBUTOR_TYPE_ID, **kwargs) self.publish_dir = os.path.join(self.get_working_dir(), repo.id) atomic_publish = AtomicDirectoryPublishStep( self.get_working_dir(), [(repo.id, configuration.get_web_publish_dir( repo.repo_obj, config))], configuration.get_master_publish_dir(repo.repo_obj, config), step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish.description = _('Making files available via web.') main = MainStep(config=config) self.add_child(main) self.add_child(atomic_publish) mkdir(self.publish_dir)
def __init__(self, repo, publish_conduit, config): """ Initialize the V2WebPublisher. :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration """ super(V2WebPublisher, self).__init__(constants.PUBLISH_STEP_WEB_PUBLISHER, repo, publish_conduit, config) # Map tags we've seen to the "newest" manifests that go with them self.tags = {} docker_api_version = 'v2' publish_dir = configuration.get_web_publish_dir(repo, config, docker_api_version) app_file = configuration.get_redirect_file_name(repo) app_publish_location = os.path.join( configuration.get_app_publish_dir(config, docker_api_version), app_file) self.working_dir = os.path.join(self.get_working_dir(), docker_api_version) misc.mkdir(self.working_dir) self.web_working_dir = os.path.join(self.get_working_dir(), 'web') master_publish_dir = configuration.get_master_publish_dir(repo, config, docker_api_version) atomic_publish_step = publish_step.AtomicDirectoryPublishStep( self.get_working_dir(), [('', publish_dir), (app_file, app_publish_location)], master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish_step.description = _('Making v2 files available via web.') self.add_child(PublishBlobsStep()) self.publish_manifests_step = PublishManifestsStep() self.add_child(self.publish_manifests_step) self.add_child(PublishTagsStep()) self.add_child(atomic_publish_step) self.add_child(RedirectFileStep(app_publish_location))
def __init__(self, repo, conduit, config, working_dir=None, **kwargs): """ :param repo: The repository being published. :type repo: pulp.plugins.model.Repository :param conduit: Conduit providing access to relative Pulp functionality :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param working_dir: The temp directory this step should use for processing. :type working_dir: str """ super(WebPublisher, self).__init__( step_type=constants.PUBLISH_STEP_WEB_PUBLISHER, repo=repo, conduit=conduit, config=config, working_dir=working_dir, plugin_type=constants.WEB_DISTRIBUTOR_TYPE_ID, **kwargs) self.publish_dir = os.path.join(self.get_working_dir(), repo.id) atomic_publish = AtomicDirectoryPublishStep( self.get_working_dir(), [(repo.id, configuration.get_web_publish_dir(repo.repo_obj, config))], configuration.get_master_publish_dir(repo.repo_obj, config), step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish.description = _('Making files available via web.') main = MainStep() self.add_child(main) self.add_child(atomic_publish) mkdir(self.publish_dir)
def open(self): """ Open the shared storage. The shared storage location is created as needed. """ misc.mkdir(self.content_dir) misc.mkdir(self.links_dir)
def process_main(self): """ Publish the JSON file for Crane. """ registry = configuration.get_repo_registry_id(self.get_repo(), self.get_config()) redirect_url = configuration.get_redirect_url(self.get_config(), self.get_repo(), 'v2') schema2_data = self.redirect_data[2] manifest_list_data = self.redirect_data['list'] manifest_list_amd64 = self.redirect_data['amd64'] redirect_data = { 'type': 'pulp-docker-redirect', 'version': 4, 'repository': self.get_repo().id, 'repo-registry-id': registry, 'url': redirect_url, 'protected': self.get_config().get('protected', False), 'schema2_data': list(schema2_data), 'manifest_list_data': list(manifest_list_data), 'manifest_list_amd64_tags': manifest_list_amd64 } misc.mkdir(os.path.dirname(self.app_publish_location)) with open(self.app_publish_location, 'w') as app_file: app_file.write(json.dumps(redirect_data))
def __init__(self, repo, conduit, config): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param conduit: Conduit providing access to relative Pulp functionality :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration """ super(WebPublisher, self).__init__(constants.PUBLISH_STEP_WEB_PUBLISHER, repo, conduit, config) publish_dir = configuration.get_web_publish_dir(repo, config) self.web_working_dir = os.path.join(self.get_working_dir(), repo.id) master_publish_dir = configuration.get_master_publish_dir(repo, config) atomic_publish = AtomicDirectoryPublishStep( self.get_working_dir(), [(repo.id, publish_dir)], master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish.description = _('Making files available via web.') main = MainStep() self.add_child(main) self.add_child(atomic_publish) mkdir(self.web_working_dir)
def generate_download_requests(self): """ generator that yields DownloadRequests for needed units. :return: generator of DownloadRequest instances :rtype: collections.Iterable[DownloadRequest] """ download_url = self.get_config().get("download-root") for deb_unit in self.step_get_local_units.units_to_download: key_hash = deb_unit.unit_key_hash # Don't save all the units in one directory as there could be 50k + units hash_dir = generate_internal_storage_path(self.deb_data[key_hash]['file_name']) # make sure the download directory exists dest_dir = os.path.join(self.working_dir, hash_dir) download_dir = os.path.dirname(dest_dir) misc.mkdir(download_dir) file_path = self.deb_data[key_hash]['file_path'] packages_url = urlparse.urljoin(download_url, file_path) # check that the package is not already saved on local disk test_path = "/var/lib/pulp/content/deb/" + generate_internal_storage_path(hash_dir) if os.path.exists(test_path): continue yield DownloadRequest(packages_url, dest_dir)
def __init__(self, repo, conduit, config): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param conduit: Conduit providing access to relative Pulp functionality :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration """ super(WebPublisher, self).__init__( constants.PUBLISH_STEP_WEB_PUBLISHER, repo, conduit, config) publish_dir = configuration.get_web_publish_dir(repo, config) self.web_working_dir = os.path.join(self.get_working_dir(), repo.id) master_publish_dir = configuration.get_master_publish_dir(repo, config) atomic_publish = AtomicDirectoryPublishStep( self.get_working_dir(), [(repo.id, publish_dir)], master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish.description = _('Making files available via web.') main = MainStep() self.add_child(main) self.add_child(atomic_publish) mkdir(self.web_working_dir)
def migrate(plan, unit_id, path, new_path): """ Migrate the unit. 1. move content 2. update the DB :param plan: A plan object. :type plan: Plan :param unit_id: A unit UUID. :type unit_id: str :param path: The current storage path. :type path: str :param new_path: The new storage path. :type new_path: str """ if os.path.exists(path): mkdir(os.path.dirname(new_path)) shutil.move(path, new_path) plan.collection.update_one( filter={ '_id': unit_id }, update={ '$set': {'_storage_path': new_path} })
def __init__(self, repo, publish_conduit, config): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration """ super(WebPublisher, self).__init__(constants.PUBLISH_STEP_WEB_PUBLISHER, repo, publish_conduit, config) docker_api_version = "v1" publish_dir = configuration.get_web_publish_dir(repo, config, docker_api_version) app_file = configuration.get_redirect_file_name(repo) app_publish_location = os.path.join(configuration.get_app_publish_dir(config, docker_api_version), app_file) self.working_dir = os.path.join(self.get_working_dir(), docker_api_version) misc.mkdir(self.working_dir) self.web_working_dir = os.path.join(self.get_working_dir(), "web") master_publish_dir = configuration.get_master_publish_dir(repo, config, docker_api_version) atomic_publish_step = AtomicDirectoryPublishStep( self.get_working_dir(), [("web", publish_dir), (app_file, app_publish_location)], master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP, ) atomic_publish_step.description = _("Making v1 files available via web.") self.add_child(PublishImagesStep()) self.add_child(atomic_publish_step)
def rsync(self): """ This method formulates the rsync command based on parameters passed in to the __init__ and then executes it. :return: (boolean indicating success or failure, str made up of stdout and stderr generated by rsync command) :rtype: tuple """ if not self.file_list and not self.delete: return (True, _("Nothing to sync")) misc.mkdir(self.src_directory) output = "" list_of_files = os.path.join(self.get_working_dir(), str(uuid.uuid4())) open(list_of_files, 'w').write("\n".join(sorted(self.file_list))) # copy files here, not symlinks (is_successful, this_output) = self.remote_mkdir(self.dest_directory) if not is_successful: params = {'directory': self.dest_directory, 'output': this_output} _logger.error( _("Cannot create directory %(directory)s: %(output)s") % params) return (is_successful, this_output) output += this_output rsync_args = self.make_rsync_args(list_of_files, self.src_directory, self.dest_directory, self.exclude) (is_successful, this_output) = self.call(rsync_args) _logger.info(this_output) if not is_successful: _logger.error(this_output) return (is_successful, this_output) output += this_output return (is_successful, output)
def __init__(self, repo, publish_conduit, config, repo_content_unit_q=None): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param repo_content_unit_q: optional Q object that will be applied to the queries performed against RepoContentUnit model :type repo_content_unit_q: mongoengine.Q """ super(WebPublisher, self).__init__( step_type=constants.PUBLISH_STEP_WEB_PUBLISHER, repo=repo, publish_conduit=publish_conduit, config=config) docker_api_version = 'v1' publish_dir = configuration.get_web_publish_dir(repo, config, docker_api_version) app_file = configuration.get_redirect_file_name(repo) app_publish_location = os.path.join( configuration.get_app_publish_dir(config, docker_api_version), app_file) self.working_dir = os.path.join(self.get_working_dir(), docker_api_version) misc.mkdir(self.working_dir) self.web_working_dir = os.path.join(self.get_working_dir(), 'web') master_publish_dir = configuration.get_master_publish_dir(repo, config, docker_api_version) atomic_publish_step = AtomicDirectoryPublishStep(self.get_working_dir(), [('web', publish_dir), (app_file, app_publish_location)], master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish_step.description = _('Making v1 files available via web.') self.add_child(PublishImagesStep(repo_content_unit_q=repo_content_unit_q)) self.add_child(atomic_publish_step)
def _ensure_destination_dir(self, destination): """ Ensure that the directory specified by destination exists :param destination: The full path to the directory to create :type destination: str """ mkdir(destination)
def __init__(self, repo, publish_conduit, config, repo_content_unit_q=None): """ Initialize the V2WebPublisher. :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param repo_content_unit_q: optional Q object that will be applied to the queries performed against RepoContentUnit model :type repo_content_unit_q: mongoengine.Q """ super(V2WebPublisher, self).__init__(step_type=constants.PUBLISH_STEP_WEB_PUBLISHER, repo=repo, publish_conduit=publish_conduit, config=config) self.redirect_data = {1: set(), 2: set(), 'list': set(), 'amd64': {}} docker_api_version = 'v2' publish_dir = configuration.get_web_publish_dir( repo, config, docker_api_version) app_file = configuration.get_redirect_file_name(repo) app_publish_location = os.path.join( configuration.get_app_publish_dir(config, docker_api_version), app_file) self.working_dir = os.path.join(self.get_working_dir(), docker_api_version) misc.mkdir(self.working_dir) self.web_working_dir = os.path.join(self.get_working_dir(), 'web') master_publish_dir = configuration.get_master_publish_dir( repo, config, docker_api_version) atomic_publish_step = publish_step.AtomicDirectoryPublishStep( self.get_working_dir(), [('', publish_dir), (app_file, app_publish_location)], master_publish_dir, step_type=constants.PUBLISH_STEP_OVER_HTTP) atomic_publish_step.description = _( 'Making v2 files available via web.') self.add_child( PublishBlobsStep(repo_content_unit_q=repo_content_unit_q)) self.publish_manifests_step = PublishManifestsStep( self.redirect_data, repo_content_unit_q=repo_content_unit_q) self.add_child(self.publish_manifests_step) self.publish_manifest_lists_step = PublishManifestListsStep( self.redirect_data, repo_content_unit_q=repo_content_unit_q) self.add_child(self.publish_manifest_lists_step) self.add_child(PublishTagsStep(self.redirect_data)) self.add_child(atomic_publish_step) self.add_child( RedirectFileStep(app_publish_location, self.redirect_data))
def process_main(self): """ Ensure the local ostree repository has been created and the configured. :raises PulpCodedException: """ path = self.parent.storage_path mkdir(path) mkdir(os.path.join(os.path.dirname(path), constants.LINKS_DIR)) self._init_repository(path)
def _handle_cProfile(self, task_id): """ If cProfiling is enabled, stop the profiler and write out the data. :param task_id: the id of the task :type task_id: unicode """ if config.getboolean('profiling', 'enabled') is True: self.pr.disable() profile_directory = config.get('profiling', 'directory') misc.mkdir(profile_directory, mode=0755) self.pr.dump_stats("%s/%s" % (profile_directory, task_id))
def publish_repo(self, repo, publish_conduit, config): """ Publish the repository by "installing" each puppet module into the given destination directory. This effectively means extracting each module's tarball in that directory. :param repo: plugin repository object :type repo: pulp.plugins.model.Repository :param publish_conduit: provides access to relevant Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginConfiguration :return: report describing the publish run :rtype: pulp.plugins.model.PublishReport """ # get dir from config destination = config.get(constants.CONFIG_INSTALL_PATH) subdir = config.get(constants.CONFIG_SUBDIR) if not destination: return publish_conduit.build_failure_report( _('install path not provided'), self.detail_report.report) if subdir: destination = os.path.join(destination, subdir) units = list( repo_controller.find_repo_content_units(repo.repo_obj, yield_content_unit=True)) duplicate_units = self._find_duplicate_names(units) if duplicate_units: for unit in duplicate_units: self.detail_report.error( unit.unit_key, 'another unit in this repo also has this name') return publish_conduit.build_failure_report( _('duplicate unit names'), self.detail_report.report) # check for unsafe paths in tarballs, and fail early if problems are found self._check_for_unsafe_archive_paths(units, destination) if self.detail_report.has_errors: return publish_conduit.build_failure_report( 'failed', self.detail_report.report) # ensure the destination directory exists try: mkdir(destination) temporarydestination = self._create_temporary_destination_directory( destination) except OSError, e: return publish_conduit.build_failure_report( _('failed to create destination directory: %s') % str(e), self.detail_report.report)
def finalize(self): """ Write the Tag list file so that clients can retrieve the list of available Tags. """ tags_path = os.path.join(self.parent.get_working_dir(), 'tags') misc.mkdir(tags_path) with open(os.path.join(tags_path, 'list'), 'w') as list_file: tag_data = { 'name': configuration.get_repo_registry_id(self.get_repo(), self.get_config()), 'tags': list(self._tag_names)} list_file.write(json.dumps(tag_data)) # We don't need the tag names anymore del self._tag_names
def process_main(self): """ Ensure the local ostree repository has been created and the configured. :raises PulpCodedException: """ path = self.parent.storage_path remote_id = self.parent.remote_id url = self.parent.feed_url mkdir(path) mkdir(os.path.join(os.path.dirname(path), constants.LINKS_DIR)) self._init_repository(path, remote_id, url)
def finalize(self): """ Write the Tag list file so that clients can retrieve the list of available Tags. """ tags_path = os.path.join(self.parent.get_working_dir(), '.relative', 'tags') misc.mkdir(tags_path) with open(os.path.join(tags_path, 'list'), 'w') as list_file: tag_data = { 'name': self.repo_registry_id, 'tags': list(self._tag_names)} list_file.write(json.dumps(tag_data)) # We don't need the tag names anymore del self._tag_names
def get_root_content_dir(self, content_type): """ Get the full path to Pulp's root content directory for a given content type. @param content_type: unique id of content collection @type content_type: str @return: file system path for content type's root directory @rtype: str """ # I'm partitioning the content on the file system based on content type storage_dir = pulp_config.config.get('server', 'storage_dir') root = os.path.join(storage_dir, 'content', content_type) mkdir(root) return root
def process_main(self, item=False): repo_registry_id = configuration.get_repo_registry_id( self.get_repo(), self.get_config()) for img_name, tags in self.parent.imgname_to_tags.items(): dest_name = os.path.join(self.get_working_dir(), img_name, "tags", constants.MANIFEST_LIST_TYPE) repo_name = os.path.join(repo_registry_id, img_name) tag_data = { 'name': repo_name, 'tags': sorted(tags), } misc.mkdir(os.path.dirname(dest_name)) with open(dest_name, 'w') as list_file: list_file.write(json.dumps(tag_data))
def process_main(self): """ Publish the JSON file for Crane. """ registry = configuration.get_repo_registry_id(self.get_repo(), self.get_config()) redirect_url = configuration.get_redirect_url(self.get_config(), self.get_repo(), 'v2') redirect_data = { 'type': 'pulp-docker-redirect', 'version': 2, 'repository': self.get_repo().id, 'repo-registry-id': registry, 'url': redirect_url, 'protected': self.get_config().get('protected', False)} misc.mkdir(os.path.dirname(self.app_publish_location)) with open(self.app_publish_location, 'w') as app_file: app_file.write(json.dumps(redirect_data))
def _add_ref(path, branch, commit): """ Write a branch (ref) file into the published repository. :param path: The absolute path to the repository. :type path: str :param branch: The branch relative path. :type branch: str :param commit: The commit hash. :type commit: str """ path = os.path.join(path, 'refs', 'heads', os.path.dirname(branch)) mkdir(path) path = os.path.join(path, os.path.basename(branch)) with open(path, 'w+') as fp: fp.write(commit)
def migrate(): """ Move all files and directories from /var/lib/pulp/published/docker/ to /var/lib/pulp/published/docker/v1/. """ misc.mkdir(NEW_DOCKER_V1_PUBLISH_PATH) for folder in os.listdir(OLD_DOCKER_V1_PUBLISH_PATH): if folder == 'v1': continue folder = os.path.join(OLD_DOCKER_V1_PUBLISH_PATH, folder) if os.path.exists(folder): shutil.move(folder, NEW_DOCKER_V1_PUBLISH_PATH) # Now we must look for and repair broken symlinks _repair_links()
def _write_pem_file(self, config_key, path): """ Write the PEM data from self.config[config_key] to the given path, if the key is defined and is "truthy". :param config_key: The key corresponding to a value in self.config to write to path. :type config_key: basestring :param path: The path to write the PEM data to. :type path: basestring """ if config_key in self.config and self.config[config_key]: if not os.path.exists(self._pki_path): misc.mkdir(os.path.dirname(self._pki_path)) os.mkdir(self._pki_path, 0700) with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, 0600), 'w') as pem_file: pem_file.write(self.config[config_key].encode('utf-8'))
def _upload_storage_dir(): """ Calculates the location of the directory into which to store uploaded files. This is necessary as a dynamic call so unit tests have the opportunity to change the constants entry for local storage. This call will create the directory if it doesn't exist. :return: full path to the upload directory """ storage_dir = pulp_config.config.get('server', 'storage_dir') upload_storage_dir = os.path.join(storage_dir, 'uploads') misc.mkdir(upload_storage_dir) return upload_storage_dir
def _write_pem_file(self, config_key, path): """ Write the PEM data from self.config[config_key] to the given path, if the key is defined and is "truthy". :param config_key: The key corresponding to a value in self.config to write to path. :type config_key: basestring :param path: The path to write the PEM data to. :type path: basestring """ if config_key in self.config and self.config[config_key]: if not os.path.exists(self._pki_path): misc.mkdir(os.path.dirname(self._pki_path)) os.mkdir(self._pki_path, 0700) with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, 0600), 'w') as pem_file: pem_file.write(self.config[config_key])
def publish_repo(self, repo, publish_conduit, config): """ Publish the repository by "installing" each puppet module into the given destination directory. This effectively means extracting each module's tarball in that directory. :param repo: plugin repository object :type repo: pulp.plugins.model.Repository :param publish_conduit: provides access to relevant Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginConfiguration :return: report describing the publish run :rtype: pulp.plugins.model.PublishReport """ # get dir from config destination = config.get(constants.CONFIG_INSTALL_PATH) subdir = config.get(constants.CONFIG_SUBDIR) if not destination: return publish_conduit.build_failure_report(_('install path not provided'), self.detail_report.report) if subdir: destination = os.path.join(destination, subdir) units = list(repo_controller.find_repo_content_units(repo.repo_obj, yield_content_unit=True)) duplicate_units = self._find_duplicate_names(units) if duplicate_units: for unit in duplicate_units: self.detail_report.error(unit.unit_key, 'another unit in this repo also has this name') return publish_conduit.build_failure_report(_('duplicate unit names'), self.detail_report.report) # check for unsafe paths in tarballs, and fail early if problems are found self._check_for_unsafe_archive_paths(units, destination) if self.detail_report.has_errors: return publish_conduit.build_failure_report('failed', self.detail_report.report) # ensure the destination directory exists try: mkdir(destination) temporarydestination = self._create_temporary_destination_directory(destination) except OSError, e: return publish_conduit.build_failure_report( _('failed to create destination directory: %s') % str(e), self.detail_report.report)
def migrate(self, unit_id, path, new_path): """ Migrate the unit. 1. copy content 2. update the DB :param unit_id: A unit UUID. :type unit_id: str :param path: The current storage path. :type path: str :param new_path: The new storage path. :type new_path: str """ # the content should be copied(and not moved) due to this issue #1944 if os.path.exists(path): mkdir(os.path.dirname(new_path)) shutil.copy(path, new_path) self.collection.update_one(filter={"_id": unit_id}, update={"$set": {"_storage_path": new_path}})
def process_main(self): """ Publish the JSON file for Crane. """ repo = self.get_repo() config = self.get_config() registry_prefix = configuration.get_repo_registry_id(repo, config) global_app_publish_dir = os.path.join( configuration.get_app_publish_dir(config, self.parent.docker_api_version), registry_prefix) self.parent.atomic_publish_step.publish_locations.append( ("app", global_app_publish_dir)) # Compute image references: tags and manifest IDs img_refs = dict( (img_name, set(tags)) for img_name, tags in self.parent.imgname_to_tags.items()) for img_name, refs in self.parent.imgname_to_mfid.items(): img_refs.setdefault(img_name, set()).update(refs) redirect_url = configuration.get_redirect_url( config, repo, self.parent.docker_api_version) # Defaults redirect_tmpl = { 'type': 'pulp-docker-redirect', 'version': 4, 'repository': self.get_repo().id, 'protected': config.get('protected', False), 'manifest_list_data': [], 'manifest_list_amd64_tags': {} } working_dir = self.get_working_dir() for img_name, refs in img_refs.items(): app_publish_path = os.path.join("app", "%s.json" % img_name) app_publish_location = os.path.join(working_dir, app_publish_path) misc.mkdir(os.path.dirname(app_publish_location)) redirect_data = dict(redirect_tmpl) redirect_data['repo-registry-id'] = os.path.join( registry_prefix, img_name) redirect_data['url'] = os.path.join(redirect_url, img_name) redirect_data['schema2_data'] = sorted(refs) with open(app_publish_location, 'w') as app_file: app_file.write(json.dumps(redirect_data))
def migrate(): """ Move all files and directories from /var/lib/pulp/published/docker/ to /var/lib/pulp/published/docker/v1/. """ misc.mkdir(NEW_DOCKER_V1_PUBLISH_PATH) for folder in os.listdir(OLD_DOCKER_V1_PUBLISH_PATH): if folder == 'v1': continue source_folder = os.path.join(OLD_DOCKER_V1_PUBLISH_PATH, folder) destination_folder = os.path.join(NEW_DOCKER_V1_PUBLISH_PATH, folder) if os.path.exists( source_folder) and not os.path.exists(destination_folder): shutil.move(source_folder, NEW_DOCKER_V1_PUBLISH_PATH) # Now we must look for and repair broken symlinks _repair_links()
def generate_download_requests(self): """ generator that yields DownloadRequests for needed units. :return: generator of DownloadRequest instances :rtype: collections.Iterable[DownloadRequest] """ feed_url = self.get_config().get('feed') for unit_key in self.step_get_local_units.units_to_download: key_hash = get_key_hash(unit_key) # Don't save all the units in one directory as there could be 50k + units hash_dir = generate_internal_storage_path(self.deb_data[key_hash]['file_name']) # make sure the download directory exists dest_dir = os.path.join(self.working_dir, hash_dir) download_dir = os.path.dirname(dest_dir) misc.mkdir(download_dir) file_path = self.deb_data[key_hash]['file_path'] packages_url = urlparse.urljoin(feed_url, file_path) yield DownloadRequest(packages_url, dest_dir)
def request_content_unit_file_path(self, content_type, relative_path): """ @param content_type: unique id of content collection @type content_type: str @param relative_path: on disk path of a content unit relative to the root directory for the given content type @type relative_path: str @return: full file system path for given relative path @rtype: str """ # Strip off the leading / if it exists; the importer may be sloppy and # hand it in and its presence breaks makedirs if relative_path.startswith('/'): relative_path = relative_path[1:] unit_path = os.path.join(self.get_root_content_dir(content_type), relative_path) unit_dir = os.path.dirname(unit_path) mkdir(unit_dir) return unit_path
def generate_download_requests(self): """ generator that yields DownloadRequests for needed units. :return: generator of DownloadRequest instances :rtype: collections.Iterable[DownloadRequest] """ feed_url = self.get_config().get('feed') for unit_key in self.step_get_local_units.units_to_download: key_hash = get_key_hash(unit_key) # Don't save all the units in one directory as there could be 50k + units hash_dir = generate_internal_storage_path( self.deb_data[key_hash]['file_name']) # make sure the download directory exists dest_dir = os.path.join(self.working_dir, hash_dir) download_dir = os.path.dirname(dest_dir) misc.mkdir(download_dir) file_path = self.deb_data[key_hash]['file_path'] packages_url = urlparse.urljoin(feed_url, file_path) yield DownloadRequest(packages_url, dest_dir)
def process_main(self): """ Create the list file and add the manifest tag links. :param unit: The unit to process :type unit: pulp_docker.common.models.Tag """ tags_path = os.path.join(self.parent.get_working_dir(), 'tags') misc.mkdir(tags_path) with open(os.path.join(tags_path, 'list'), 'w') as list_file: tag_data = { 'name': configuration.get_repo_registry_id(self.get_repo(), self.get_config()), 'tags': list(self.parent.tags)} list_file.write(json.dumps(tag_data)) # Add the links to make Manifests accessible by tags as well for tag, unit in self.parent.tags.items(): self.parent.publish_manifests_step._create_symlink( unit.storage_path, os.path.join(self.parent.publish_manifests_step.get_manifests_directory(), tag))
def migrate(self, unit_id, path, new_path): """ Migrate the unit. 1. move content 2. update the DB :param unit_id: A unit UUID. :type unit_id: str :param path: The current storage path. :type path: str :param new_path: The new storage path. :type new_path: str """ if os.path.exists(path): mkdir(os.path.dirname(new_path)) shutil.move(path, new_path) self.collection.update_one( filter={'_id': unit_id}, update={'$set': { '_storage_path': new_path }})
def migrate(self, unit_id, path, new_path): """ Migrate the unit. 1. copy content 2. update the DB :param unit_id: A unit UUID. :type unit_id: str :param path: The current storage path. :type path: str :param new_path: The new storage path. :type new_path: str """ # the content should be copied(and not moved) due to this issue #1944 if os.path.exists(path): mkdir(os.path.dirname(new_path)) shutil.copy(path, new_path) self.collection.update_one( filter={'_id': unit_id}, update={'$set': { '_storage_path': new_path }})
def process_main(self): """ Publish the JSON file for Crane. """ registry = configuration.get_repo_registry_id(self.get_repo(), self.get_config()) redirect_url = configuration.get_redirect_url(self.get_config(), self.get_repo(), 'v2') redirect_data = { 'type': 'pulp-docker-redirect', 'version': 2, 'repository': self.get_repo().id, 'repo-registry-id': registry, 'url': redirect_url, 'protected': self.get_config().get('protected', False) } misc.mkdir(os.path.dirname(self.app_publish_location)) with open(self.app_publish_location, 'w') as app_file: app_file.write(json.dumps(redirect_data))
def _symlink_unit(self, build_dir, unit, target_paths): """ For each unit, put a symlink in the build dir that points to its canonical location on disk. :param build_dir: The path on the local filesystem that we want to symlink the units into. This path should already exist. :type build_dir: basestring :param unit: The unit to be symlinked :type unit: pulp.plugins.model.AssociatedUnit :param target_paths: The list of paths the unit should be symlinked to. :type target_paths: list of L{str} """ for target_path in target_paths: symlink_filename = self._target_symlink_path(build_dir, target_path) if os.path.exists(symlink_filename) or os.path.islink(symlink_filename): # There's already something there with the desired symlink filename. Let's try and # see if it points at the right thing. If it does, we don't need to do anything. If # it does not, we should remove what's there and add the correct symlink. try: existing_link_path = os.readlink(symlink_filename) if existing_link_path == unit.storage_path: # We don't need to do anything more for this unit, move on to the next one continue # The existing symlink is incorrect, so let's remove it os.remove(symlink_filename) except OSError, e: # This will happen if we attempt to call readlink() on a file that wasn't a # symlink. We should remove the file and add the symlink. There error code # should be EINVAL. If it isn't, something else is wrong and we should raise. if e.errno != errno.EINVAL: raise e # Remove the file that's at the symlink_filename path os.remove(symlink_filename) # If we've gotten here, we've removed any existing file at the symlink_filename path, # so now we should recreate it. dir_path = os.path.dirname(symlink_filename) # make sure any required subdirectory exists misc.mkdir(dir_path) os.symlink(unit.storage_path, symlink_filename)
def put(self, unit, path, location=None): """ Put the content defined by the content unit into storage. The file at the specified *path* is transferred into storage: - Copy file to the temporary file at its final directory. - If possible, verify size of the file to make sure that file is not corrupted. - Do atomic rename. :param unit: The content unit to be stored. :type unit: pulp.sever.db.model.ContentUnit :param path: The absolute path to the file (or directory) to be stored. :type path: str :param location: The (optional) location within the path where the content is to be stored. :type location: str """ destination = unit.storage_path if location: destination = os.path.join(destination, location.lstrip('/')) misc.mkdir(os.path.dirname(destination)) fd, temp_destination = tempfile.mkstemp( dir=os.path.dirname(destination)) # to avoid a file descriptor leak, close the one opened by tempfile.mkstemp which we are not # going to use. os.close(fd) shutil.copy(path, temp_destination) try: unit.verify_size(temp_destination) except AttributeError: # verify_size method is not implemented for the unit pass except Exception: os.remove(temp_destination) raise os.rename(temp_destination, destination)
def process_main(self, item=None): wdir = os.path.join(self.get_working_dir()) reqs = [] feed_url = self.parent.feed_url step_download_units = self.parent.step_download_units step_download_units.path_to_unit = dict() dirs_to_create = set() for unit in self.parent.step_local_units.units_to_download: url = os.path.join(feed_url, self.parent.unit_relative_urls[unit.checksum]) filename = os.path.basename(url) dest_dir = os.path.join(wdir, "packages", generate_internal_storage_path(filename)) dirs_to_create.add(dest_dir) dest = os.path.join(dest_dir, filename) reqs.append(DownloadRequest(url, dest)) step_download_units.path_to_unit[dest] = unit for dest_dir in dirs_to_create: misc.mkdir(dest_dir) step_download_units._downloads = reqs
def _open_metadata_file_handle(self): """ Open the metadata file handle, creating any missing parent directories. If the file already exists, this will overwrite it. """ assert self.metadata_file_handle is None _LOG.debug('Opening metadata file: %s' % self.metadata_file_path) if not os.path.exists(self.metadata_file_path): parent_dir = os.path.dirname(self.metadata_file_path) if not os.path.exists(parent_dir): misc.mkdir(parent_dir, mode=0770) elif not os.access(parent_dir, os.R_OK | os.W_OK | os.X_OK): msg = _('Insufficient permissions to write metadata file in directory [%(d)s]') raise RuntimeError(msg % {'d': parent_dir}) else: msg = _('Overwriting existing metadata file [%(p)s]') _LOG.warn(msg % {'p': self.metadata_file_path}) if not os.access(self.metadata_file_path, os.R_OK | os.W_OK): msg = _('Insufficient permissions to overwrite [%(p)s]') raise RuntimeError(msg % {'p': self.metadata_file_path}) msg = _('Opening metadata file handle for [%(p)s]') _LOG.debug(msg % {'p': self.metadata_file_path}) if self.metadata_file_path.endswith('.gz'): self.metadata_file_handle = gzip.open(self.metadata_file_path, 'w') else: self.metadata_file_handle = open(self.metadata_file_path, 'w')
def test_succeeded(self, fake_mkdir): path = "path-123" misc.mkdir(path) fake_mkdir.assert_called_once_with(path)
def initialize(self): """ Perform setup required before we start processing the individual units """ misc.mkdir(self.get_working_dir())
def test_already_exists(self, fake_mkdir): path = "path-123" misc.mkdir(path) fake_mkdir.assert_called_once_with(path) fake_mkdir.side_effect = OSError(errno.EEXIST, path)
def test_other_exception(self, fake_mkdir): path = "path-123" misc.mkdir(path) fake_mkdir.side_effect = OSError(errno.EPERM, path) self.assertRaises(OSError, misc.mkdir, path)
def test_mode(self, fake_mkdir): path = "path-123" mode = 0750 misc.mkdir(path, mode) fake_mkdir.assert_called_once_with(path, mode)
def _add_ref(path, branch, commit): path = os.path.join(path, 'refs', 'heads', os.path.dirname(branch)) mkdir(path) path = os.path.join(path, os.path.basename(branch)) with open(path, 'w+') as fp: fp.write(commit)