Esempio n. 1
0
    def test_process_main_only_publish_directory_contents(self):
        source_dir = os.path.join(self.working_directory, 'source')
        master_dir = os.path.join(self.working_directory, 'master')
        publish_dir = os.path.join(self.working_directory, 'publish', 'bar')
        publish_dir += '/'
        step = publish_step.AtomicDirectoryPublishStep(
            source_dir, [('/', publish_dir)], master_dir, only_publish_directory_contents=True)
        step.parent = Mock(timestamp=str(time.time()))

        # create some files to test
        sub_file = os.path.join(source_dir, 'bar.html')
        touch(sub_file)

        # create an existing file that will be maintained
        existing_file = os.path.join(source_dir, 'bar.html')
        touch(existing_file)

        # Create an old directory to test
        old_dir = os.path.join(master_dir, 'foo')
        os.makedirs(old_dir)
        step.process_main()

        target_file = os.path.join(publish_dir, 'bar.html')
        self.assertEquals(True, os.path.exists(target_file))
        self.assertTrue(os.path.exists(existing_file))
        self.assertEquals(1, len(os.listdir(master_dir)))
Esempio n. 2
0
    def __init__(self,
                 repo,
                 publish_conduit,
                 config,
                 repo_content_unit_q=None):
        """
        Initialize the V2WebPublisher.

        :param repo: Pulp managed Yum repository
        :type  repo: pulp.plugins.model.Repository
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param repo_content_unit_q: optional Q object that will be applied to the queries performed
                                    against RepoContentUnit model
        :type  repo_content_unit_q: mongoengine.Q
        """
        super(V2WebPublisher,
              self).__init__(step_type=constants.PUBLISH_STEP_WEB_PUBLISHER,
                             repo=repo,
                             publish_conduit=publish_conduit,
                             config=config)

        self.redirect_data = {1: set(), 2: set(), 'list': set(), 'amd64': {}}

        docker_api_version = 'v2'
        publish_dir = configuration.get_web_publish_dir(
            repo, config, docker_api_version)
        app_file = configuration.get_redirect_file_name(repo)
        app_publish_location = os.path.join(
            configuration.get_app_publish_dir(config, docker_api_version),
            app_file)
        self.working_dir = os.path.join(self.get_working_dir(),
                                        docker_api_version)
        misc.mkdir(self.working_dir)
        self.web_working_dir = os.path.join(self.get_working_dir(), 'web')
        master_publish_dir = configuration.get_master_publish_dir(
            repo, config, docker_api_version)
        atomic_publish_step = publish_step.AtomicDirectoryPublishStep(
            self.get_working_dir(), [('', publish_dir),
                                     (app_file, app_publish_location)],
            master_publish_dir,
            step_type=constants.PUBLISH_STEP_OVER_HTTP)
        atomic_publish_step.description = _(
            'Making v2 files available via web.')
        self.add_child(
            PublishBlobsStep(repo_content_unit_q=repo_content_unit_q))
        self.publish_manifests_step = PublishManifestsStep(
            self.redirect_data, repo_content_unit_q=repo_content_unit_q)
        self.add_child(self.publish_manifests_step)
        self.publish_manifest_lists_step = PublishManifestListsStep(
            self.redirect_data, repo_content_unit_q=repo_content_unit_q)
        self.add_child(self.publish_manifest_lists_step)
        self.add_child(PublishTagsStep(self.redirect_data))
        self.add_child(atomic_publish_step)
        self.add_child(
            RedirectFileStep(app_publish_location, self.redirect_data))
Esempio n. 3
0
    def __init__(self,
                 repo,
                 publish_conduit,
                 config,
                 repo_content_unit_q=None):
        """
        Initialize the V2WebPublisher.

        :param repo: Pulp managed Yum repository
        :type  repo: pulp.plugins.model.Repository
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param repo_content_unit_q: optional Q object that will be applied to the queries performed
                                    against RepoContentUnit model
        :type  repo_content_unit_q: mongoengine.Q
        """
        super(V2MultiWebPublisher,
              self).__init__(step_type=constants.PUBLISH_STEP_WEB_PUBLISHER,
                             repo=repo,
                             publish_conduit=publish_conduit,
                             config=config)
        self.docker_api_version = 'v2'
        self.manifest_to_imgname_to_tags = {}
        # Needed when publishing tags and redirect files
        self.imgname_to_tags = {}
        # Needed when publishing redirect files
        self.imgname_to_mfid = {}
        # Needed when publishing blobs
        self.layer_to_manifests = {}
        # Will be set by PublishV2FileStep
        self.redirect_files = None
        self.add_child(V2MultiCollectTagsStep())
        self.add_child(V2MultiPublishManifestsStep(repo_content_unit_q))
        self.add_child(
            V2MultiPublishBlobsStep(repo_content_unit_q=repo_content_unit_q))
        self.add_child(V2MultiPublishTagsStep())
        self.add_child(V2MultiRedirectFilesStep())
        self.add_child(V2MultiImageListingStep())

        publish_dir = configuration.get_web_publish_dir(
            repo, config, self.docker_api_version)
        master_publish_dir = configuration.get_master_publish_dir(
            repo, config, self.docker_api_version)
        self.atomic_publish_step = publish_step.AtomicDirectoryPublishStep(
            self.get_working_dir(), [('', publish_dir)],
            master_publish_dir,
            step_type=constants.PUBLISH_STEP_OVER_HTTP)
        self.atomic_publish_step.description = _(
            'Making v2 files available via web.')
        self.add_child(self.atomic_publish_step)
Esempio n. 4
0
    def __init__(self, repo, publish_conduit, config):
        """
        Initialize the V2WebPublisher.

        :param repo: Pulp managed Yum repository
        :type  repo: pulp.plugins.model.Repository
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        """
        super(V2WebPublisher,
              self).__init__(step_type=constants.PUBLISH_STEP_WEB_PUBLISHER,
                             repo=repo,
                             publish_conduit=publish_conduit,
                             config=config)

        docker_api_version = 'v2'
        publish_dir = configuration.get_web_publish_dir(
            repo, config, docker_api_version)
        app_file = configuration.get_redirect_file_name(repo)
        app_publish_location = os.path.join(
            configuration.get_app_publish_dir(config, docker_api_version),
            app_file)
        self.working_dir = os.path.join(self.get_working_dir(),
                                        docker_api_version)
        misc.mkdir(self.working_dir)
        self.web_working_dir = os.path.join(self.get_working_dir(), 'web')
        master_publish_dir = configuration.get_master_publish_dir(
            repo, config, docker_api_version)
        atomic_publish_step = publish_step.AtomicDirectoryPublishStep(
            self.get_working_dir(), [('', publish_dir),
                                     (app_file, app_publish_location)],
            master_publish_dir,
            step_type=constants.PUBLISH_STEP_OVER_HTTP)
        atomic_publish_step.description = _(
            'Making v2 files available via web.')
        self.add_child(PublishBlobsStep())
        self.publish_manifests_step = PublishManifestsStep()
        self.add_child(self.publish_manifests_step)
        self.add_child(PublishTagsStep())
        self.add_child(atomic_publish_step)
        self.add_child(RedirectFileStep(app_publish_location))
Esempio n. 5
0
    def test_process_main_multiple_targets(self):
        source_dir = os.path.join(self.working_directory, 'source')
        master_dir = os.path.join(self.working_directory, 'master')
        publish_dir = os.path.join(self.working_directory, 'publish', 'bar')
        publish_dir += '/'
        # create some files to test
        sub_file = os.path.join(source_dir, 'foo', 'bar.html')
        touch(sub_file)
        sub_file = os.path.join(source_dir, 'qux', 'quux.html')
        touch(sub_file)

        target_qux = os.path.join(self.working_directory, 'publish', 'qux.html')

        step = publish_step.AtomicDirectoryPublishStep(
            source_dir, [('/', publish_dir), ('qux/quux.html', target_qux)], master_dir)
        step.parent = Mock(timestamp=str(time.time()))

        step.process_main()

        target_file = os.path.join(publish_dir, 'foo', 'bar.html')
        self.assertEquals(True, os.path.exists(target_file))
        self.assertEquals(True, os.path.exists(target_qux))
Esempio n. 6
0
    def __init__(self, transfer_repo, publish_conduit, config,
                 distributor_type, **kwargs):
        """
        :param transfer_repo: repository being published
        :type  transfer_repo: pulp.plugins.db.model.Repository
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param distributor_type: The type of the distributor that is being published
        :type distributor_type: str
        """
        repo = transfer_repo.repo_obj

        repo_relative_path = configuration.get_repo_relative_path(repo, config)

        last_published = publish_conduit.last_publish()
        last_deleted = repo.last_unit_removed
        date_filter = None

        if last_published and \
                ((last_deleted and last_published > last_deleted) or not last_deleted):
            # Add the step to copy the current published directory into place
            specific_master = None
            if config.get(constants.PUBLISH_HTTPS_KEYWORD):
                root_publish_dir = configuration.get_https_publish_dir(config)
                repo_publish_dir = os.path.join(root_publish_dir,
                                                repo_relative_path)
                specific_master = os.path.realpath(repo_publish_dir)
            if not specific_master and config.get(
                    constants.PUBLISH_HTTP_KEYWORD):
                root_publish_dir = configuration.get_http_publish_dir(config)
                repo_publish_dir = os.path.join(root_publish_dir,
                                                repo_relative_path)
                specific_master = os.path.realpath(repo_publish_dir)

            # Only do an incremental publish if the previous publish can be found
            if os.path.exists(specific_master):
                # Pass something useful to the super so that it knows the publish info
                string_date = dateutils.format_iso8601_datetime(last_published)
                date_filter = mongoengine.Q(created__gte=string_date)

        super(Publisher, self).__init__(transfer_repo,
                                        publish_conduit,
                                        config,
                                        distributor_type,
                                        association_filters=date_filter,
                                        **kwargs)

        if date_filter:
            insert_step = platform_steps.CopyDirectoryStep(
                specific_master,
                self.get_working_dir(),
                preserve_symlinks=True)
            self.insert_child(0, insert_step)
            self.rpm_step.fast_forward = True

        # Add the web specific directory publishing processing steps
        target_directories = []

        # it's convenient to create these now, but we won't add them until later,
        # because we want them to run last
        listing_steps = []

        if config.get(constants.PUBLISH_HTTPS_KEYWORD):
            root_publish_dir = configuration.get_https_publish_dir(config)
            repo_publish_dir = os.path.join(root_publish_dir,
                                            repo_relative_path)
            target_directories.append(['/', repo_publish_dir])
            listing_steps.append(
                GenerateListingFileStep(root_publish_dir, repo_publish_dir))
        if config.get(constants.PUBLISH_HTTP_KEYWORD):
            root_publish_dir = configuration.get_http_publish_dir(config)
            repo_publish_dir = os.path.join(root_publish_dir,
                                            repo_relative_path)
            target_directories.append(['/', repo_publish_dir])
            listing_steps.append(
                GenerateListingFileStep(root_publish_dir, repo_publish_dir))

        master_publish_dir = configuration.get_master_publish_dir(
            repo, distributor_type)
        atomic_publish_step = platform_steps.AtomicDirectoryPublishStep(
            self.get_working_dir(), target_directories, master_publish_dir)
        atomic_publish_step.description = _("Publishing files to web")

        self.add_child(atomic_publish_step)

        # add the listing file generation step(s)
        for step in listing_steps:
            self.add_child(step)
Esempio n. 7
0
    def __init__(self, repo_group, publish_conduit, config, distributor_type):
        """
        :param repo_group: Pulp managed Yum repository
        :type  repo_group: pulp.plugins.model.RepositoryGroup
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoGroupPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param distributor_type: The type of the distributor that is being published
        :type distributor_type: str
        """
        super(ExportRepoGroupPublisher,
              self).__init__(constants.PUBLISH_STEP_EXPORT_REPO_GROUP,
                             repo_group,
                             publish_conduit,
                             config,
                             plugin_type=distributor_type)

        working_dir = self.get_working_dir()
        scratch_dir = os.path.join(working_dir, 'scratch')
        realized_dir = os.path.join(working_dir, 'realized')

        flat_config = config.flatten()
        export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD)
        if export_dir:
            repo_config = config
        else:
            repo_config = PluginCallConfiguration(
                flat_config,
                {constants.EXPORT_DIRECTORY_KEYWORD: realized_dir})

        repo_objs = model.Repository.objects(repo_id__in=repo_group.repo_ids)
        empty_repos = True
        for repo_obj in repo_objs:
            empty_repos = False
            repo = repo_obj.to_transfer_repo()
            # Make sure we only publish rpm repo's
            if repo.notes['_repo-type'] != 'rpm-repo':
                continue

            repo_config_copy = copy.deepcopy(repo_config)

            # Need some code to pull the distributor
            distributor = model.Distributor.objects(
                repo_id=repo_obj['repo_id'],
                distributor_id=ids.EXPORT_DISTRIBUTOR_ID,
                config__relative_url__exists=True).first()

            if distributor is not None:
                relative_url = distributor['config']['relative_url']
            else:
                relative_url = repo_obj['repo_id']

            if not export_dir:
                repo_config_copy.override_config['relative_url'] = relative_url
            else:
                merged_rel = repo_config_copy.get('relative_url',
                                                  '') + '/' + relative_url
                repo_config_copy.override_config['relative_url'] = merged_rel

            repo_working_dir = os.path.join(scratch_dir, repo.id)
            repo_conduit = RepoPublishConduit(repo.id, distributor_type)
            publisher = ExportRepoPublisher(repo,
                                            repo_conduit,
                                            repo_config_copy,
                                            distributor_type,
                                            working_dir=repo_working_dir)
            publisher.description = _("Exporting Repo: %s") % repo.id
            self.add_child(publisher)
        if empty_repos:
            os.makedirs(realized_dir)
            self.add_child(GenerateListingFileStep(realized_dir, realized_dir))

        # If we aren't exporting to a directory add the ISO create & publish steps
        if not export_dir:
            # Create the steps to generate the ISO and publish them to their final location
            output_dir = os.path.join(working_dir, 'output')
            self.add_child(CreateIsoStep(realized_dir, output_dir))

            # create the PULP_MANIFEST file if requested in the config
            if config.get_boolean(constants.CREATE_PULP_MANIFEST) is True:
                self.add_child(
                    platform_steps.CreatePulpManifestStep(output_dir))

            export_dirs = configuration.get_export_repo_group_publish_dirs(
                repo_group, config)
            publish_location = [('/', location) for location in export_dirs]

            master_dir = configuration.get_master_publish_dir_from_group(
                repo_group, distributor_type)
            self.add_child(
                platform_steps.AtomicDirectoryPublishStep(
                    output_dir, publish_location, master_dir))
Esempio n. 8
0
    def __init__(self, repo, publish_conduit, config, distributor_type,
                 **kwargs):
        """
        :param repo: Pulp managed Yum repository
        :type  repo: pulp.plugins.model.Repository
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param distributor_type: The type of the distributor that is being published
        :type distributor_type: str
        """
        super(ExportRepoPublisher,
              self).__init__(repo, publish_conduit, config, distributor_type,
                             **kwargs)

        date_q = export_utils.create_date_range_filter(config)
        if date_q:
            # Since this is a partial export we don't generate metadata
            # we have to clear out the previously added steps
            # we only need special version s of the rpm, drpm, and errata steps
            self.clear_children()
            self.add_child(
                PublishRpmAndDrpmStepIncremental(repo_content_unit_q=date_q))
            self.add_child(
                PublishErrataStepIncremental(repo_content_unit_q=date_q))

        working_directory = self.get_working_dir()
        export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD)
        if export_dir:
            target_dir = os.path.join(
                export_dir,
                configuration.get_repo_relative_path(repo.repo_obj, config))
            self.add_child(
                platform_steps.CopyDirectoryStep(working_directory,
                                                 target_dir))
            self.add_child(GenerateListingFileStep(export_dir, target_dir))
        else:
            # Reset the steps to use an internal scratch directory other than the base working dir
            content_dir = os.path.join(working_directory, 'scratch')
            for step in self.children:
                step.working_dir = content_dir
            self.working_dir = content_dir

            # Set up step to copy all the files to a realized directory with no symlinks
            # This could be optimized with a pathspec so that we don't create all the files
            # separately
            realized_dir = os.path.join(working_directory, 'realized')
            copy_target = os.path.join(
                realized_dir,
                configuration.get_repo_relative_path(repo.repo_obj, config))
            self.add_child(
                platform_steps.CopyDirectoryStep(content_dir, copy_target))
            self.add_child(GenerateListingFileStep(realized_dir, copy_target))

            # Create the steps to generate the ISO and publish them to their final location
            output_dir = os.path.join(working_directory, 'output')
            self.add_child(CreateIsoStep(realized_dir, output_dir))

            # create the PULP_MANIFEST file if requested in the config
            if config.get_boolean(constants.CREATE_PULP_MANIFEST) is True:
                self.add_child(
                    platform_steps.CreatePulpManifestStep(output_dir))

            dirs = configuration.get_export_repo_publish_dirs(
                repo.repo_obj, config)
            publish_location = [('/', location) for location in dirs]

            master_dir = configuration.get_master_publish_dir(
                repo.repo_obj, self.get_plugin_type())
            atomic_publish = platform_steps.AtomicDirectoryPublishStep(
                output_dir, publish_location, master_dir)
            atomic_publish.description = _('Moving ISO to final location')
            self.add_child(atomic_publish)
Esempio n. 9
0
 def test_process_main_default_id(self):
     step = publish_step.AtomicDirectoryPublishStep('foo', 'bar', 'baz')
     self.assertEquals(step.step_id, reporting_constants.PUBLISH_STEP_DIRECTORY)
Esempio n. 10
0
 def test_process_main_alternate_id(self):
     step = publish_step.AtomicDirectoryPublishStep('foo', 'bar', 'baz', step_type='alternate')
     self.assertEquals(step.step_id, 'alternate')
Esempio n. 11
0
    def __init__(self,
                 transfer_repo,
                 publish_conduit,
                 config,
                 distributor_type,
                 association_filters=None,
                 **kwargs):
        """
        :param transfer_repo: repository being published
        :type  transfer_repo: pulp.plugins.db.model.Repository
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param association_filters: Any filters to be applied to the list of RPMs being published
        :type association_filters: mongoengine.Q
        :param distributor_type: The type of the distributor that is being published
        :type distributor_type: str
        """
        repo = transfer_repo.repo_obj

        repo_relative_path = configuration.get_repo_relative_path(repo, config)

        last_published = publish_conduit.last_publish()
        last_deleted = repo.last_unit_removed

        # NB: there is an "incremental publish optmization" (aka fast-forward
        # publish), and an unrelated "incremental publish". The former is
        # related to avoiding extra disk IO on publishes, and the latter is for
        # publishing units in a date range.  In order to do the "incremental
        # publish", we need to disable the "incremental publish optimization"
        # to ensure the prior published repo contents are cleared out. This is
        # done via the "force_full" option.

        if association_filters:
            force_full = True
            date_filter = association_filters
        else:
            force_full = config.get(constants.FORCE_FULL_KEYWORD, False)
            date_filter = None

        if last_published and \
                (last_deleted is None or last_published > last_deleted) and \
                not force_full:
            # Add the step to copy the current published directory into place
            specific_master = None
            if config.get(constants.PUBLISH_HTTPS_KEYWORD):
                root_publish_dir = configuration.get_https_publish_dir(config)
                repo_publish_dir = os.path.join(root_publish_dir,
                                                repo_relative_path)
                specific_master = os.path.realpath(repo_publish_dir)
            if not specific_master and config.get(
                    constants.PUBLISH_HTTP_KEYWORD):
                root_publish_dir = configuration.get_http_publish_dir(config)
                repo_publish_dir = os.path.join(root_publish_dir,
                                                repo_relative_path)
                specific_master = os.path.realpath(repo_publish_dir)

            # Only do an incremental publish if the previous publish can be found
            if os.path.exists(specific_master):
                # Pass something useful to the super so that it knows the publish info
                string_date = dateutils.format_iso8601_datetime(last_published)
                date_filter = mongoengine.Q(created__gte=string_date)

        super(Publisher, self).__init__(transfer_repo,
                                        publish_conduit,
                                        config,
                                        distributor_type,
                                        association_filters=date_filter,
                                        **kwargs)

        if date_filter:
            insert_step = platform_steps.CopyDirectoryStep(
                specific_master,
                self.get_working_dir(),
                preserve_symlinks=True)
            self.insert_child(0, insert_step)
            self.rpm_step.fast_forward = True

        # Add the web specific directory publishing processing steps
        target_directories = []

        # it's convenient to create these now, but we won't add them until later,
        # because we want them to run last
        listing_steps = []

        if config.get(constants.PUBLISH_HTTPS_KEYWORD):
            root_publish_dir = configuration.get_https_publish_dir(config)
            repo_publish_dir = os.path.join(root_publish_dir,
                                            repo_relative_path)
            target_directories.append(['/', repo_publish_dir])
            listing_steps.append(
                GenerateListingFileStep(root_publish_dir, repo_publish_dir))
        if config.get(constants.PUBLISH_HTTP_KEYWORD):
            root_publish_dir = configuration.get_http_publish_dir(config)
            repo_publish_dir = os.path.join(root_publish_dir,
                                            repo_relative_path)
            target_directories.append(['/', repo_publish_dir])
            listing_steps.append(
                GenerateListingFileStep(root_publish_dir, repo_publish_dir))

        self.add_child(GenerateRepoviewStep(self.get_working_dir()))

        master_publish_dir = configuration.get_master_publish_dir(
            repo, distributor_type)
        atomic_publish_step = platform_steps.AtomicDirectoryPublishStep(
            self.get_working_dir(), target_directories, master_publish_dir)
        atomic_publish_step.description = _("Publishing files to web")

        self.add_child(atomic_publish_step)

        # add the listing file generation step(s)
        for step in listing_steps:
            self.add_child(step)