def test_end_date_only(self): # Set up a configuration with an end date, and the expected return self.repo_config[constants.END_DATE_KEYWORD] = self.end_date config = PluginCallConfiguration({}, self.repo_config) expected = mongoengine.Q(created__lte=self.end_date) # Test date_filter = export_utils.create_date_range_filter(config) self.assertDictEqual(expected.query, date_filter.query)
def test_end_date_only(self): # Set up a configuration with an end date, and the expected return self.repo_config[constants.END_DATE_KEYWORD] = self.test_date config = PluginCallConfiguration({}, self.repo_config) expected_filter = {export_utils.ASSOCIATED_UNIT_DATE_KEYWORD: {'$lte': self.test_date}} # Test date_filter = export_utils.create_date_range_filter(config) self.assertEqual(expected_filter, date_filter)
def test_start_and_end_date(self): # Set up a configuration with both a start date and an end date. self.repo_config[constants.START_DATE_KEYWORD] = self.start_date self.repo_config[constants.END_DATE_KEYWORD] = self.end_date config = PluginCallConfiguration({}, self.repo_config) expected = mongoengine.Q(created__gte=self.start_date, created__lte=self.end_date) # Test date_filter = export_utils.create_date_range_filter(config) self.assertDictEqual(expected.query, date_filter.query)
def __init__(self, repo, publish_conduit, config, distributor_type): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param distributor_type: The type of the distributor that is being published :type distributor_type: str """ super(ExportRepoPublisher, self).__init__(repo, publish_conduit, config, distributor_type) date_filter = export_utils.create_date_range_filter(config) if date_filter: # Since this is a partial export we don't generate metadata # we have to clear out the previously added steps # we only need special version s of the rpm, drpm, and errata steps self.clear_children() self.add_child(PublishRpmAndDrpmStepIncremental(association_filters=date_filter)) self.add_child(PublishErrataStepIncremental(association_filters=date_filter)) working_directory = self.get_working_dir() export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD) if export_dir: target_dir = os.path.join(export_dir, configuration.get_repo_relative_path(repo, config)) self.add_child(CopyDirectoryStep(working_directory, target_dir)) self.add_child(GenerateListingFileStep(export_dir, target_dir)) else: # Reset the steps to use an internal scratch directory other than the base working dir content_dir = os.path.join(working_directory, 'scratch') for step in self.children: step.working_dir = content_dir # Set up step to copy all the files to a realized directory with no symlinks # This could be optimized with a pathspec so that we don't create all the files # separately realized_dir = os.path.join(working_directory, 'realized') copy_target = os.path.join(realized_dir, configuration.get_repo_relative_path(repo, config)) self.add_child(CopyDirectoryStep(content_dir, copy_target)) self.add_child(GenerateListingFileStep(realized_dir, copy_target)) # Create the steps to generate the ISO and publish them to their final location output_dir = os.path.join(working_directory, 'output') self.add_child(CreateIsoStep(realized_dir, output_dir)) publish_location = [('/', location) for location in configuration.get_export_repo_publish_dirs(repo, config)] master_dir = configuration.get_master_publish_dir(repo, self.get_distributor_type()) atomic_publish = AtomicDirectoryPublishStep(output_dir, publish_location, master_dir) atomic_publish.description = _('Moving ISO to final location') self.add_child(atomic_publish)
def test_start_and_end_date(self): # Set up a configuration with both a start date and an end date. self.repo_config[constants.START_DATE_KEYWORD] = self.test_date self.repo_config[constants.END_DATE_KEYWORD] = self.test_date config = PluginCallConfiguration({}, self.repo_config) expected_filter = {export_utils.ASSOCIATED_UNIT_DATE_KEYWORD: {'$gte': self.test_date, '$lte': self.test_date}} # Test date_filter = export_utils.create_date_range_filter(config) self.assertEqual(expected_filter, date_filter)
def __init__(self, repo, publish_conduit, config, distributor_type, **kwargs): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param distributor_type: The type of the distributor that is being published :type distributor_type: str """ repo_relative_path = configuration.get_repo_relative_path(repo, config) last_published = publish_conduit.last_publish() last_deleted = repo.last_unit_removed date_filter = None insert_step = None if last_published and \ ((last_deleted and last_published > last_deleted) or not last_deleted): # Add the step to copy the current published directory into place working_dir = repo.working_dir specific_master = None if config.get(constants.PUBLISH_HTTPS_KEYWORD): root_publish_dir = configuration.get_https_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) specific_master = os.path.realpath(repo_publish_dir) if not specific_master and config.get(constants.PUBLISH_HTTP_KEYWORD): root_publish_dir = configuration.get_http_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) specific_master = os.path.realpath(repo_publish_dir) # Only do an incremental publish if the previous publish can be found if os.path.exists(specific_master): insert_step = CopyDirectoryStep(specific_master, working_dir, preserve_symlinks=True) # Pass something useful to the super so that it knows the publish info string_date = dateutils.format_iso8601_datetime(last_published) date_filter = export_utils.create_date_range_filter( {constants.START_DATE_KEYWORD: string_date}) super(Publisher, self).__init__(repo, publish_conduit, config, distributor_type, association_filters=date_filter, **kwargs) if insert_step: self.insert_child(0, insert_step) self.rpm_step.fast_forward = True # Add the web specific directory publishing processing steps target_directories = [] # it's convenient to create these now, but we won't add them until later, # because we want them to run last listing_steps = [] if config.get(constants.PUBLISH_HTTPS_KEYWORD): root_publish_dir = configuration.get_https_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) target_directories.append(['/', repo_publish_dir]) listing_steps.append(GenerateListingFileStep(root_publish_dir, repo_publish_dir)) if config.get(constants.PUBLISH_HTTP_KEYWORD): root_publish_dir = configuration.get_http_publish_dir(config) repo_publish_dir = os.path.join(root_publish_dir, repo_relative_path) target_directories.append(['/', repo_publish_dir]) listing_steps.append(GenerateListingFileStep(root_publish_dir, repo_publish_dir)) master_publish_dir = configuration.get_master_publish_dir(repo, distributor_type) atomic_publish_step = AtomicDirectoryPublishStep(self.get_working_dir(), target_directories, master_publish_dir) atomic_publish_step.description = _("Publishing files to web") self.add_child(atomic_publish_step) # add the listing file generation step(s) for step in listing_steps: self.add_child(step)
def test_no_filter(self): # Test calling create_date_range_filter with no dates in the configuration date = export_utils.create_date_range_filter(PluginCallConfiguration({}, self.repo_config)) self.assertTrue(date is None)