def test_process_main(self): source_dir = os.path.join(self.working_directory, 'source') os.makedirs(source_dir) touch(os.path.join(source_dir, 'foo.txt')) target_dir = os.path.join(self.working_directory, 'target') target_file = os.path.join(target_dir, 'foo.txt') step = publish_step.CopyDirectoryStep(source_dir, target_dir) touch(os.path.join(source_dir, 'foo.txt')) step.process_main() self.assertTrue(os.path.exists(target_file))
def __init__(self, transfer_repo, publish_conduit, config, distributor_type, **kwargs): """ :param transfer_repo: repository being published :type transfer_repo: pulp.plugins.db.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param distributor_type: The type of the distributor that is being published :type distributor_type: str """ repo = transfer_repo.repo_obj repo_relative_path = configuration.get_repo_relative_path(repo, config) last_published = publish_conduit.last_publish() last_deleted = repo.last_unit_removed date_filter = None if last_published and \ ((last_deleted and last_published > last_deleted) or not last_deleted): # Add the step to copy the current published directory into place specific_master = None if config.get(constants.PUBLISH_HTTPS_KEYWORD): root_publish_dir = configuration.get_https_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) specific_master = os.path.realpath(repo_publish_dir) if not specific_master and config.get( constants.PUBLISH_HTTP_KEYWORD): root_publish_dir = configuration.get_http_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) specific_master = os.path.realpath(repo_publish_dir) # Only do an incremental publish if the previous publish can be found if os.path.exists(specific_master): # Pass something useful to the super so that it knows the publish info string_date = dateutils.format_iso8601_datetime(last_published) date_filter = mongoengine.Q(created__gte=string_date) super(Publisher, self).__init__(transfer_repo, publish_conduit, config, distributor_type, association_filters=date_filter, **kwargs) if date_filter: insert_step = platform_steps.CopyDirectoryStep( specific_master, self.get_working_dir(), preserve_symlinks=True) self.insert_child(0, insert_step) self.rpm_step.fast_forward = True # Add the web specific directory publishing processing steps target_directories = [] # it's convenient to create these now, but we won't add them until later, # because we want them to run last listing_steps = [] if config.get(constants.PUBLISH_HTTPS_KEYWORD): root_publish_dir = configuration.get_https_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) target_directories.append(['/', repo_publish_dir]) listing_steps.append( GenerateListingFileStep(root_publish_dir, repo_publish_dir)) if config.get(constants.PUBLISH_HTTP_KEYWORD): root_publish_dir = configuration.get_http_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) target_directories.append(['/', repo_publish_dir]) listing_steps.append( GenerateListingFileStep(root_publish_dir, repo_publish_dir)) master_publish_dir = configuration.get_master_publish_dir( repo, distributor_type) atomic_publish_step = platform_steps.AtomicDirectoryPublishStep( self.get_working_dir(), target_directories, master_publish_dir) atomic_publish_step.description = _("Publishing files to web") self.add_child(atomic_publish_step) # add the listing file generation step(s) for step in listing_steps: self.add_child(step)
def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param distributor_type: The type of the distributor that is being published :type distributor_type: str """ super(ExportRepoPublisher, self).__init__(repo, publish_conduit, config, distributor_type, **kwargs) date_q = export_utils.create_date_range_filter(config) if date_q: # Since this is a partial export we don't generate metadata # we have to clear out the previously added steps # we only need special version s of the rpm, drpm, and errata steps self.clear_children() self.add_child( PublishRpmAndDrpmStepIncremental(repo_content_unit_q=date_q)) self.add_child( PublishErrataStepIncremental(repo_content_unit_q=date_q)) working_directory = self.get_working_dir() export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD) if export_dir: target_dir = os.path.join( export_dir, configuration.get_repo_relative_path(repo.repo_obj, config)) self.add_child( platform_steps.CopyDirectoryStep(working_directory, target_dir)) self.add_child(GenerateListingFileStep(export_dir, target_dir)) else: # Reset the steps to use an internal scratch directory other than the base working dir content_dir = os.path.join(working_directory, 'scratch') for step in self.children: step.working_dir = content_dir self.working_dir = content_dir # Set up step to copy all the files to a realized directory with no symlinks # This could be optimized with a pathspec so that we don't create all the files # separately realized_dir = os.path.join(working_directory, 'realized') copy_target = os.path.join( realized_dir, configuration.get_repo_relative_path(repo.repo_obj, config)) self.add_child( platform_steps.CopyDirectoryStep(content_dir, copy_target)) self.add_child(GenerateListingFileStep(realized_dir, copy_target)) # Create the steps to generate the ISO and publish them to their final location output_dir = os.path.join(working_directory, 'output') self.add_child(CreateIsoStep(realized_dir, output_dir)) # create the PULP_MANIFEST file if requested in the config if config.get_boolean(constants.CREATE_PULP_MANIFEST) is True: self.add_child( platform_steps.CreatePulpManifestStep(output_dir)) dirs = configuration.get_export_repo_publish_dirs( repo.repo_obj, config) publish_location = [('/', location) for location in dirs] master_dir = configuration.get_master_publish_dir( repo.repo_obj, self.get_plugin_type()) atomic_publish = platform_steps.AtomicDirectoryPublishStep( output_dir, publish_location, master_dir) atomic_publish.description = _('Moving ISO to final location') self.add_child(atomic_publish)
def __init__(self, transfer_repo, publish_conduit, config, distributor_type, association_filters=None, **kwargs): """ :param transfer_repo: repository being published :type transfer_repo: pulp.plugins.db.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param association_filters: Any filters to be applied to the list of RPMs being published :type association_filters: mongoengine.Q :param distributor_type: The type of the distributor that is being published :type distributor_type: str """ repo = transfer_repo.repo_obj repo_relative_path = configuration.get_repo_relative_path(repo, config) last_published = publish_conduit.last_publish() last_deleted = repo.last_unit_removed # NB: there is an "incremental publish optmization" (aka fast-forward # publish), and an unrelated "incremental publish". The former is # related to avoiding extra disk IO on publishes, and the latter is for # publishing units in a date range. In order to do the "incremental # publish", we need to disable the "incremental publish optimization" # to ensure the prior published repo contents are cleared out. This is # done via the "force_full" option. if association_filters: force_full = True date_filter = association_filters else: force_full = config.get(constants.FORCE_FULL_KEYWORD, False) date_filter = None if last_published and \ (last_deleted is None or last_published > last_deleted) and \ not force_full: # Add the step to copy the current published directory into place specific_master = None if config.get(constants.PUBLISH_HTTPS_KEYWORD): root_publish_dir = configuration.get_https_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) specific_master = os.path.realpath(repo_publish_dir) if not specific_master and config.get( constants.PUBLISH_HTTP_KEYWORD): root_publish_dir = configuration.get_http_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) specific_master = os.path.realpath(repo_publish_dir) # Only do an incremental publish if the previous publish can be found if os.path.exists(specific_master): # Pass something useful to the super so that it knows the publish info string_date = dateutils.format_iso8601_datetime(last_published) date_filter = mongoengine.Q(created__gte=string_date) super(Publisher, self).__init__(transfer_repo, publish_conduit, config, distributor_type, association_filters=date_filter, **kwargs) if date_filter: insert_step = platform_steps.CopyDirectoryStep( specific_master, self.get_working_dir(), preserve_symlinks=True) self.insert_child(0, insert_step) self.rpm_step.fast_forward = True # Add the web specific directory publishing processing steps target_directories = [] # it's convenient to create these now, but we won't add them until later, # because we want them to run last listing_steps = [] if config.get(constants.PUBLISH_HTTPS_KEYWORD): root_publish_dir = configuration.get_https_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) target_directories.append(['/', repo_publish_dir]) listing_steps.append( GenerateListingFileStep(root_publish_dir, repo_publish_dir)) if config.get(constants.PUBLISH_HTTP_KEYWORD): root_publish_dir = configuration.get_http_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) target_directories.append(['/', repo_publish_dir]) listing_steps.append( GenerateListingFileStep(root_publish_dir, repo_publish_dir)) self.add_child(GenerateRepoviewStep(self.get_working_dir())) master_publish_dir = configuration.get_master_publish_dir( repo, distributor_type) atomic_publish_step = platform_steps.AtomicDirectoryPublishStep( self.get_working_dir(), target_directories, master_publish_dir) atomic_publish_step.description = _("Publishing files to web") self.add_child(atomic_publish_step) # add the listing file generation step(s) for step in listing_steps: self.add_child(step)