コード例 #1
0
ファイル: test_plugins.py プロジェクト: bartwo/pulp
 def test_import(self):
     # Setup
     self.populate()
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     repo = Repository(self.REPO_ID)
     cfg = {
         'protocol':'file',
         'http':{'alias':self.alias},
         'https':{'alias':self.alias},
         'file':{'alias':self.alias},
     }
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = 'file://' + publisher.manifest_path()
     cfg = dict(manifest_url=manifest_url, strategy=constants.MIRROR_STRATEGY)
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     importer.sync_repo(repo, conduit, cfg)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
コード例 #2
0
ファイル: test_repo_publish.py プロジェクト: maxamillion/pulp
class RepoPublishConduitTests(base.PulpServerTests):

    def clean(self):
        super(RepoPublishConduitTests, self).clean()

        mock_plugins.reset()
        model.Repository.objects.delete()
        model.Distributor.objects.delete()

    @mock.patch('pulp.server.controllers.distributor.model.Repository.objects')
    def setUp(self, mock_repo_qs):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        # Populate the database with a repo with units
        dist_controller.add_distributor('repo-1', 'mock-distributor', {}, True,
                                        distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')

    def tearDown(self):
        super(RepoPublishConduitTests, self).tearDown()
        mock_plugins.reset()

    def test_str(self):
        """
        Makes sure the __str__ implementation doesn't crash.
        """
        str(self.conduit)

    def test_last_publish(self):
        """
        Tests retrieving the last publish time in both the unpublish and previously published cases.
        """

        # Test - Unpublished
        unpublished = self.conduit.last_publish()
        self.assertTrue(unpublished is None)

        # Setup - Previous publish
        last_publish = datetime.datetime(2015, 4, 29, 20, 23, 56, 0)
        repo_dist = model.Distributor.objects.get_or_404(repo_id='repo-1')
        repo_dist['last_publish'] = last_publish
        repo_dist.save()

        # Test - Last publish
        found = self.conduit.last_publish()
        self.assertTrue(isinstance(found, datetime.datetime))  # check returned format

        self.assertEqual(found.tzinfo, dateutils.utc_tz())
        self.assertEqual(repo_dist['last_publish'], found.replace(tzinfo=None))

    @mock.patch('pulp.plugins.conduits.repo_publish.model.Distributor.objects')
    def test_last_publish_with_error(self, m_dist_qs):
        """
        Test the handling of an error getting last_publish information.
        """
        m_dist_qs.only.return_value.get_or_404.side_effect = exceptions.MissingResource
        self.assertRaises(DistributorConduitException, self.conduit.last_publish)
コード例 #3
0
ファイル: test_plugins.py プロジェクト: maxamillion/pulp
 def test_import_cached_manifest_missing_units(self, *unused):
     # Setup
     self.populate()
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         configuration = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, configuration)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         publisher = dist.publisher(repo, configuration)
         manifest_path = publisher.manifest_path()
         manifest = Manifest(manifest_path)
         manifest.read()
         shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME))
         # Test
         importer = NodesHttpImporter()
         manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
コード例 #4
0
ファイル: test_repo_publish.py プロジェクト: nbetm/pulp
class RepoPublishConduitTests(base.PulpServerTests):

    def clean(self):
        super(RepoPublishConduitTests, self).clean()

        mock_plugins.reset()
        model.Repository.drop_collection()
        RepoDistributor.get_collection().remove()

    @mock.patch('pulp.server.managers.repo.importer.model.Repository.objects')
    def setUp(self, mock_repo_qs):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        self.distributor_manager = manager_factory.repo_distributor_manager()

        # Populate the database with a repo with units
        self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {}, True,
                                                 distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')

    def tearDown(self):
        super(RepoPublishConduitTests, self).tearDown()
        mock_plugins.reset()

    def test_str(self):
        """
        Makes sure the __str__ implementation doesn't crash.
        """
        str(self.conduit)

    def test_last_publish(self):
        """
        Tests retrieving the last publish time in both the unpublish and previously published cases.
        """

        # Test - Unpublished
        unpublished = self.conduit.last_publish()
        self.assertTrue(unpublished is None)

        # Setup - Previous publish
        last_publish = datetime.datetime(2015, 4, 29, 20, 23, 56, 0)
        repo_dist = RepoDistributor.get_collection().find_one({'repo_id': 'repo-1'})
        repo_dist['last_publish'] = last_publish
        RepoDistributor.get_collection().save(repo_dist, safe=True)

        # Test - Last publish
        found = self.conduit.last_publish()
        self.assertTrue(isinstance(found, datetime.datetime))  # check returned format
        self.assertEqual(repo_dist['last_publish'], found)

    @mock.patch('pulp.plugins.conduits.repo_publish.RepoDistributor')
    def test_last_publish_with_error(self, mock_dist):
        """
        Test the handling of an error getting last_publish information.
        """
        mock_dist.get_collection().find_one.return_value = None
        self.assertRaises(DistributorConduitException, self.conduit.last_publish)
コード例 #5
0
    def _init_publisher(self):

        repo = Repository(self.repo_id, working_dir=self.working_dir)
        self.repo = repo

        conduit = RepoPublishConduit(repo.id, YUM_DISTRIBUTOR_ID)
        conduit.last_publish = mock.Mock(return_value=None)
        conduit.get_repo_scratchpad = mock.Mock(return_value={})

        config_defaults = {'http': True,
                           'https': True,
                           'relative_url': None,
                           'http_publish_dir': os.path.join(self.published_dir, 'http'),
                           'https_publish_dir': os.path.join(self.published_dir, 'https')}
        config = PluginCallConfiguration(None, None)
        config.default_config.update(config_defaults)

        self.publisher = publish.BaseYumRepoPublisher(repo, conduit, config, YUM_DISTRIBUTOR_ID,
                                                      working_dir=self.working_dir)
        self.publisher.get_checksum_type = mock.Mock(return_value=None)

        # mock out the repomd_file_context, so _publish_<step> can be called
        # outside of the publish() method
        self.publisher.repomd_file_context = mock.MagicMock()
        self.publisher.all_steps = mock.MagicMock()
コード例 #6
0
class RepoPublishConduitTests(base.PulpServerTests):

    def clean(self):
        super(RepoPublishConduitTests, self).clean()

        mock_plugins.reset()

        Repo.get_collection().remove()
        RepoDistributor.get_collection().remove()

    def setUp(self):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        self.repo_manager = manager_factory.repo_manager()
        self.distributor_manager = manager_factory.repo_distributor_manager()

        # Populate the database with a repo with units
        self.repo_manager.create_repo('repo-1')
        self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {}, True, distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')

    def test_str(self):
        """
        Makes sure the __str__ implementation doesn't crash.
        """
        str(self.conduit)

    def test_last_publish(self):
        """
        Tests retrieving the last publish time in both the unpublish and previously published cases.
        """

        # Test - Unpublished
        unpublished = self.conduit.last_publish()
        self.assertTrue(unpublished is None)

        # Setup - Previous publish
        last_publish = datetime.datetime.now()
        repo_dist = RepoDistributor.get_collection().find_one({'repo_id' : 'repo-1'})
        repo_dist['last_publish'] = dateutils.format_iso8601_datetime(last_publish)
        RepoDistributor.get_collection().save(repo_dist, safe=True)

        # Test - Last publish
        found = self.conduit.last_publish()
        self.assertTrue(isinstance(found, datetime.datetime)) # check returned format
        self.assertEqual(repo_dist['last_publish'], dateutils.format_iso8601_datetime(found))

    @mock.patch('pulp.server.managers.repo.publish.RepoPublishManager.last_publish')
    def test_last_publish_with_error(self, mock_call):
        # Setup
        mock_call.side_effect = Exception()

        # Test
        self.assertRaises(DistributorConduitException, self.conduit.last_publish)
コード例 #7
0
class RepoPublishConduitTests(base.PulpServerTests):

    def clean(self):
        super(RepoPublishConduitTests, self).clean()

        mock_plugins.reset()

        Repo.get_collection().remove()
        RepoDistributor.get_collection().remove()

    def setUp(self):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        self.repo_manager = manager_factory.repo_manager()
        self.distributor_manager = manager_factory.repo_distributor_manager()

        # Populate the database with a repo with units
        self.repo_manager.create_repo('repo-1')
        self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {}, True, distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')

    def test_str(self):
        """
        Makes sure the __str__ implementation doesn't crash.
        """
        str(self.conduit)

    def test_last_publish(self):
        """
        Tests retrieving the last publish time in both the unpublish and previously published cases.
        """

        # Test - Unpublished
        unpublished = self.conduit.last_publish()
        self.assertTrue(unpublished is None)

        # Setup - Previous publish
        last_publish = datetime.datetime.now()
        repo_dist = RepoDistributor.get_collection().find_one({'repo_id' : 'repo-1'})
        repo_dist['last_publish'] = dateutils.format_iso8601_datetime(last_publish)
        RepoDistributor.get_collection().save(repo_dist, safe=True)

        # Test - Last publish
        found = self.conduit.last_publish()
        self.assertTrue(isinstance(found, datetime.datetime)) # check returned format
        self.assertEqual(repo_dist['last_publish'], dateutils.format_iso8601_datetime(found))

    @mock.patch('pulp.server.managers.repo.publish.RepoPublishManager.last_publish')
    def test_last_publish_with_error(self, mock_call):
        # Setup
        mock_call.side_effect = Exception()

        # Test
        self.assertRaises(DistributorConduitException, self.conduit.last_publish)
コード例 #8
0
ファイル: test_steps.py プロジェクト: beav/pulp_ostree
 def setUp(self):
     self.working_directory = tempfile.mkdtemp()
     self.content_dir = os.path.join(self.working_directory, 'content')
     self.working_dir = os.path.join(self.working_directory, 'work')
     os.makedirs(self.working_dir)
     self.repo = Repository(id='foo', working_dir=self.working_dir)
     config = PluginCallConfiguration(None, None)
     conduit = RepoPublishConduit(self.repo.id, 'foo_repo')
     conduit.get_repo_scratchpad = Mock(return_value={u'tags': {}})
     self.parent = steps.PluginStep('test-step', self.repo, conduit, config)
コード例 #9
0
ファイル: test_steps.py プロジェクト: pombreda/pulp_deb
    def setUp(self):
        self.working_directory = tempfile.mkdtemp()
        self.source_dir = os.path.join(self.working_directory, 'src')
        self.target_dir = os.path.join(self.working_directory, 'target')

        os.makedirs(self.source_dir)
        os.makedirs(self.target_dir)
        self.repo = Repository(id='foo', working_dir=self.target_dir)
        config = PluginCallConfiguration(None, None)
        conduit = RepoPublishConduit(self.repo.id, 'foo_repo')
        conduit.get_repo_scratchpad = Mock(return_value={})
        self.parent = steps.PluginStep('test-step', self.repo, conduit, config)
コード例 #10
0
ファイル: test_repo_publish.py プロジェクト: shubham90/pulp
    def setUp(self, mock_repo_qs):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        # Populate the database with a repo with units
        dist_controller.add_distributor('repo-1',
                                        'mock-distributor', {},
                                        True,
                                        distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')
コード例 #11
0
 def setUp(self):
     self.temp_dir = tempfile.mkdtemp()
     self.working_directory = os.path.join(self.temp_dir, 'working')
     self.publish_directory = os.path.join(self.temp_dir, 'publish')
     self.content_directory = os.path.join(self.temp_dir, 'content')
     os.makedirs(self.working_directory)
     os.makedirs(self.publish_directory)
     os.makedirs(self.content_directory)
     repo = Repository('foo_repo_id', working_dir=self.working_directory)
     config = PluginCallConfiguration(None, None)
     conduit = RepoPublishConduit(repo.id, 'foo_repo')
     conduit.get_repo_scratchpad = Mock(return_value={u'tags': {}})
     self.parent = PublishStep('test-step', repo, conduit, config)
コード例 #12
0
 def setUp(self):
     self.temp_dir = tempfile.mkdtemp()
     self.working_directory = os.path.join(self.temp_dir, 'working')
     self.publish_directory = os.path.join(self.temp_dir, 'publish')
     self.content_directory = os.path.join(self.temp_dir, 'content')
     os.makedirs(self.working_directory)
     os.makedirs(self.publish_directory)
     os.makedirs(self.content_directory)
     repo = Repository('foo_repo_id', working_dir=self.working_directory)
     config = PluginCallConfiguration(None, None)
     conduit = RepoPublishConduit(repo.id, 'foo_repo')
     conduit.get_repo_scratchpad = Mock(return_value={u'tags': {}})
     self.parent = PublishStep('test-step', repo, conduit, config)
コード例 #13
0
ファイル: test_plugins.py プロジェクト: taftsanders/pulp
 def test_import_unit_files_already_exist_size_mismatch(
         self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         parent_content = os.path.join(self.parentfs, 'content')
         child_content = os.path.join(self.childfs, 'content')
         shutil.copytree(parent_content, child_content)
         for fn in os.listdir(child_content):
             path = os.path.join(child_content, fn)
             if os.path.isdir(path):
                 continue
             with open(path, 'w') as fp:
                 fp.truncate()
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url,
                                         publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         mock_importer_config_to_nectar_config = mocks[0]
         mock_importer_config_to_nectar_config.assert_called_with(
             configuration.flatten())
コード例 #14
0
 def setUp(self):
     self.working_dir = tempfile.mkdtemp()
     self.repo = Repository('test')
     self.repo.working_dir = self.working_dir
     self.config = PluginCallConfiguration(None, None)
     self.conduit = RepoPublishConduit(self.repo.id,
                                       TYPE_ID_DISTRIBUTOR_EXPORT)
コード例 #15
0
    def publish(repo_id, distributor_id, publish_config_override=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None

        :return: report of the details of the publish
        :rtype: pulp.server.db.model.repository.RepoPublishResult
        """
        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(repository=repo_id,
                                  distributor=distributor_id)

        distributor_instance, distributor_config = RepoPublishManager.\
            _get_distributor_instance_and_config(repo_id, distributor_id)

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id)

        call_config = PluginCallConfiguration(distributor_config,
                                              repo_distributor['config'],
                                              publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.get_working_directory()

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = RepoPublishManager._do_publish(repo, distributor_id,
                                                distributor_instance,
                                                transfer_repo, conduit,
                                                call_config)
        fire_manager.fire_repo_publish_finished(result)

        return result
コード例 #16
0
    def test_publish_repo(self, mock_publisher):
        repo = Repository('test')
        config = PluginCallConfiguration(None, None)
        conduit = RepoPublishConduit(repo.id, 'foo_repo')
        self.distributor.publish_repo(repo, conduit, config)

        mock_publisher.return_value.assert_called_once()
コード例 #17
0
    def test_publish(self, mock_repo_ctrl):
        # Setup
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            # Test
            dist = NodesHttpDistributor()
            repo = Repository(self.REPO_ID)
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, self.dist_conf())
            # Verify
            conf = DownloaderConfig()
            downloader = LocalFileDownloader(conf)
            pub = dist.publisher(repo, self.dist_conf())
            url = pathlib.url_join(pub.base_url, pub.manifest_path())
            working_dir = self.childfs
            manifest = RemoteManifest(url, downloader, working_dir)
            manifest.fetch()
            manifest.fetch_units()
            units = [u for u, r in manifest.get_units()]
            self.assertEqual(len(units), self.NUM_UNITS)
            for n in range(0, self.NUM_UNITS):
                unit = units[n]
                created = self.units[n]
                for p, v in unit['unit_key'].items():
                    self.assertEqual(created[p], v)
                for p, v in unit['metadata'].items():
                    if p in ('_ns', '_content_type_id'):
                        continue
                    self.assertEqual(created[p], v)
                self.assertEqual(created.get('_storage_path'),
                                 unit['storage_path'])
                self.assertEqual(unit['type_id'], self.UNIT_TYPE_ID)
コード例 #18
0
ファイル: publish.py プロジェクト: pombreda/pulp_rpm
    def __init__(self, repo_group, publish_conduit, config, distributor_type):
        """
        :param repo_group: Pulp managed Yum repository
        :type  repo_group: pulp.plugins.model.RepositoryGroup
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoGroupPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param distributor_type: The type of the distributor that is being published
        :type distributor_type: str
        """
        super(ExportRepoGroupPublisher, self).__init__(constants.PUBLISH_STEP_EXPORT_REPO_GROUP,
                                                       repo_group, publish_conduit, config,
                                                       working_dir=repo_group.working_dir,
                                                       distributor_type=distributor_type)

        working_dir = self.get_working_dir()
        scratch_dir = os.path.join(working_dir, 'scratch')
        realized_dir = os.path.join(working_dir, 'realized')

        flat_config = config.flatten()
        export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD)
        if export_dir:
            repo_config = config
        else:
            repo_config = PluginCallConfiguration(flat_config, {constants.EXPORT_DIRECTORY_KEYWORD:
                                                                realized_dir})
        query_manager = RepoQueryManager()

        repos = query_manager.find_by_id_list(repo_group.repo_ids)
        empty_repos = True
        for repo in repos:
            empty_repos = False
            repo = common_utils.to_transfer_repo(repo)
            # Make sure we only publish rpm repo's
            if repo.notes['_repo-type'] != 'rpm-repo':
                continue

            repo_config_copy = copy.deepcopy(repo_config)
            repo.working_dir = os.path.join(scratch_dir, repo.id)
            repo_conduit = RepoPublishConduit(repo.id, distributor_type)
            publisher = ExportRepoPublisher(repo, repo_conduit, repo_config_copy,
                                            distributor_type)
            publisher.description = _("Exporting Repo: %s") % repo.id
            self.add_child(publisher)
        if empty_repos:
            os.makedirs(realized_dir)
            self.add_child(GenerateListingFileStep(realized_dir, realized_dir))

        # If we aren't exporting to a directory add the ISO create & publish steps
        if not export_dir:
            # Create the steps to generate the ISO and publish them to their final location
            output_dir = os.path.join(working_dir, 'output')
            self.add_child(CreateIsoStep(realized_dir, output_dir))
            export_dirs = configuration.get_export_repo_group_publish_dirs(repo_group, config)
            publish_location = [('/', location) for location in export_dirs]

            master_dir = configuration.get_master_publish_dir(repo_group, distributor_type)
            self.add_child(AtomicDirectoryPublishStep(output_dir, publish_location, master_dir))
コード例 #19
0
ファイル: test_distributor.py プロジェクト: pombreda/pulp_rpm
    def test_publish_repo(self, mock_publish):
        repo = Repository('test')
        config = PluginCallConfiguration(None, None)
        conduit = RepoPublishConduit(repo.id, TYPE_ID_DISTRIBUTOR_YUM)

        self.distributor.publish_repo(repo, conduit, config)

        mock_publish.Publisher.return_value.publish.assert_called_once()
コード例 #20
0
ファイル: test_plugins.py プロジェクト: ipanova/pulp
 def test_import_unit_files_already_exist_size_mismatch(self, *mocks):
     # Setup
     self.populate()
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     cfg = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     parent_content = os.path.join(self.parentfs, 'content')
     child_content = os.path.join(self.childfs, 'content')
     shutil.copytree(parent_content, child_content)
     for fn in os.listdir(child_content):
         path = os.path.join(child_content, fn)
         if os.path.isdir(path):
             continue
         with open(path, 'w') as fp:
             fp.truncate()
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
     mock_importer_config_to_nectar_config = mocks[0]
     mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
コード例 #21
0
ファイル: test_plugins.py プロジェクト: taftsanders/pulp
 def test_import_cached_manifest_matched(self, mock_get_working, mock_fetch,
                                         *unused):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         configuration = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, configuration)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         publisher = dist.publisher(repo, configuration)
         manifest_path = publisher.manifest_path()
         units_path = os.path.join(os.path.dirname(manifest_path),
                                   UNITS_FILE_NAME)
         manifest = Manifest(manifest_path)
         manifest.read()
         shutil.copy(manifest_path,
                     os.path.join(working_dir, MANIFEST_FILE_NAME))
         shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME))
         # Test
         importer = NodesHttpImporter()
         manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         self.assertFalse(mock_fetch.called)
コード例 #22
0
ファイル: test_repo_publish.py プロジェクト: maxamillion/pulp
    def setUp(self, mock_repo_qs):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        # Populate the database with a repo with units
        dist_controller.add_distributor('repo-1', 'mock-distributor', {}, True,
                                        distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')
コード例 #23
0
ファイル: test_plugins.py プロジェクト: ipanova/pulp
 def test_import_cached_manifest_matched(self, mock_fetch, *unused):
     # Setup
     self.populate()
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     configuration = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, configuration)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     publisher = dist.publisher(repo, configuration)
     manifest_path = publisher.manifest_path()
     units_path = os.path.join(os.path.dirname(manifest_path), UNITS_FILE_NAME)
     manifest = Manifest(manifest_path)
     manifest.read()
     shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME))
     shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME))
     # Test
     importer = NodesHttpImporter()
     manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
     self.assertFalse(mock_fetch.called)
コード例 #24
0
ファイル: test_plugins.py プロジェクト: zjhuntin/pulp
    def test_import(self, *mocks):
        # Setup
        self.populate()
        max_concurrency = 5
        max_bandwidth = 12345

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            cfg = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, cfg)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            # Test
            importer = NodesHttpImporter()
            publisher = dist.publisher(repo, cfg)
            manifest_url = pathlib.url_join(publisher.base_url,
                                            publisher.manifest_path())
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
                importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
                importer_constants.KEY_MAX_SPEED: max_bandwidth,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
            mock_importer_config_to_nectar_config = mocks[0]
            mock_importer_config_to_nectar_config.assert_called_with(
                configuration.flatten())
コード例 #25
0
    def setUp(self):
        self.working_dir = tempfile.mkdtemp(prefix='working_')

        self.repo_id = 'publish-test-repo'
        self.repo = Repository(self.repo_id, working_dir=self.working_dir)
        self.conduit = RepoPublishConduit(self.repo_id, 'test_plugin_id')
        self.conduit.get_repo_scratchpad = Mock(return_value={})

        self.config = PluginCallConfiguration(None, None)
        self.pluginstep = PluginStep("base-step", repo=self.repo, conduit=self.conduit,
                                     config=self.config, plugin_type='test_plugin_type')
コード例 #26
0
ファイル: test_plugins.py プロジェクト: ipanova/pulp
 def test_import(self, *mocks):
     # Setup
     self.populate()
     max_concurrency = 5
     max_bandwidth = 12345
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     cfg = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
         importer_constants.KEY_MAX_SPEED: max_bandwidth,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
     mock_importer_config_to_nectar_config = mocks[0]
     mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
コード例 #27
0
ファイル: test_plugins.py プロジェクト: zjhuntin/pulp
    def test_import_cached_manifest_units_invalid(self, *unused):
        # Setup
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            configuration = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, configuration)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            publisher = dist.publisher(repo, configuration)
            manifest_path = publisher.manifest_path()
            manifest = Manifest(manifest_path)
            manifest.read()
            shutil.copy(manifest_path,
                        os.path.join(working_dir, MANIFEST_FILE_NAME))
            with open(os.path.join(working_dir, UNITS_FILE_NAME), 'w+') as fp:
                fp.write('invalid-units')
            # Test
            importer = NodesHttpImporter()
            manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
コード例 #28
0
def publish(repo_id,
            dist_id,
            publish_config_override=None,
            scheduled_call_id=None):
    """
    Uses the given distributor to publish the repository.

    The publish operation is executed synchronously in the caller's thread and will block until it
    is completed. The caller must take the necessary steps to address the fact that a publish call
    may be time intensive.

    :param repo_id: identifies the repo being published
    :type  repo_id: str
    :param dist_id: identifies the repo's distributor to publish
    :type  dist_id: str
    :param publish_config_override: optional config values to use for this publish call only
    :type  publish_config_override: dict, None
    :param scheduled_call_id: id of scheduled call that dispatched this task
    :type  scheduled_call_id: str

    :return: report of the details of the publish
    :rtype:  pulp.server.db.model.repository.RepoPublishResult

    :raises pulp_exceptions.MissingResource: if distributor/repo pair does not exist
    """
    distributor_coll = RepoDistributor.get_collection()
    repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id)
    repo_distributor = distributor_coll.find_one({
        'repo_id': repo_id,
        'id': dist_id
    })
    if repo_distributor is None:
        raise pulp_exceptions.MissingResource(repository=repo_id,
                                              distributor=dist_id)

    dist_inst, dist_conf = _get_distributor_instance_and_config(
        repo_id, dist_id)

    # Assemble the data needed for the publish
    conduit = RepoPublishConduit(repo_id, dist_id)
    call_config = PluginCallConfiguration(dist_conf,
                                          repo_distributor['config'],
                                          publish_config_override)
    transfer_repo = repo_obj.to_transfer_repo()
    transfer_repo.working_dir = common_utils.get_working_directory()

    # Fire events describing the publish state
    fire_manager = manager_factory.event_fire_manager()
    fire_manager.fire_repo_publish_started(repo_id, dist_id)
    result = _do_publish(repo_obj, dist_id, dist_inst, transfer_repo, conduit,
                         call_config)
    fire_manager.fire_repo_publish_finished(result)
    return result
コード例 #29
0
 def setUp(self):
     self.distributor = installdistributor.PuppetModuleInstallDistributor()
     self.puppet_dir = '/opt/my/modules/'
     self.repo = Repository('repo1', '', repo_obj=mock.MagicMock())
     self.conduit = RepoPublishConduit('repo1', self.distributor.metadata()['id'])
     self.uk1 = {'author': 'puppetlabs', 'name': 'stdlib', 'version': '1.2.0'}
     self.uk2 = {'author': 'puppetlabs', 'name': 'java', 'version': '1.3.1'}
     self.units = [
         Module(_storage_path='/a/b/x', **self.uk1),
         Module(_storage_path='/a/b/y', **self.uk2)
     ]
     self.conduit.get_units = mock.MagicMock(return_value=self.units, spec_set=self.conduit.get_units)
コード例 #30
0
ファイル: test_plugins.py プロジェクト: aweiteka/pulp
 def test_import_unit_files_already_exist(self, *mocks):
     # Setup
     self.populate()
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     cfg = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     parent_content = os.path.join(self.parentfs, 'content')
     child_content = os.path.join(self.childfs, 'content')
     shutil.copytree(parent_content, child_content)
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = pathlib.url_join(publisher.base_url,
                                     publisher.manifest_path())
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                               RepoContentUnit.OWNER_TYPE_IMPORTER,
                               constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
     mock_importer_config_to_nectar_config = mocks[0]
     mock_importer_config_to_nectar_config.assert_called_with(
         configuration.flatten())
コード例 #31
0
    def _init_publisher(self):

        repo = Repository(self.repo_id, working_dir=self.working_dir)

        conduit = RepoPublishConduit(repo.id, YUM_DISTRIBUTOR_ID)
        conduit.get_repo_scratchpad = mock.Mock(return_value={})

        config_defaults = {
            "http": True,
            "https": True,
            "relative_url": None,
            "http_publish_dir": os.path.join(self.published_dir, "http"),
            "https_publish_dir": os.path.join(self.published_dir, "https"),
        }
        config = PluginCallConfiguration(None, None)
        config.default_config.update(config_defaults)

        self.publisher = publish.Publisher(repo, conduit, config)

        # mock out the repomd_file_context, so _publish_<step> can be called
        # outside of the publish() method
        self.publisher.repomd_file_context = mock.MagicMock()
コード例 #32
0
ファイル: test_plugins.py プロジェクト: taftsanders/pulp
 def test_import_modified_units(self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     max_concurrency = 5
     max_bandwidth = 12345
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         # make the published unit have a newer _last_updated.
         collection = connection.get_collection(
             unit_db.unit_collection_name(self.UNIT_TYPE_ID))
         # N=0 (no file)
         unit = collection.find_one({'N': 0})
         unit['age'] = 84  # this will be updated back to 42.
         unit['_last_updated'] -= 1
         unit['_storage_path'] = None
         collection.update({'N': 0}, unit)
         # N=1
         unit = collection.find_one({'N': 1})
         unit['age'] = 85  # this will be updated back to 42.
         unit['_last_updated'] -= 1
         collection.update({'N': 1}, unit)
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url,
                                         publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
             importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
             importer_constants.KEY_MAX_SPEED: max_bandwidth,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         unit = collection.find_one({'N': 0})
         self.assertEqual(unit['age'], 42)
         unit = collection.find_one({'N': 1})
         self.assertEqual(unit['age'], 42)
コード例 #33
0
    def setUp(self):
        self.working_dir = tempfile.mkdtemp(prefix='working_')
        self.published_dir = tempfile.mkdtemp(prefix='published_')
        self.master_dir = os.path.join(self.published_dir, 'master')

        self.repo_id = 'publish-test-repo'
        self.repo = Repository(self.repo_id, working_dir=self.working_dir)
        self.conduit = Mock()
        self.conduit = RepoPublishConduit(self.repo_id, 'test_distributor_id')
        self.conduit.get_repo_scratchpad = Mock(return_value={})

        self.config = PluginCallConfiguration(None, None)
        self.publisher = PublishStep("base-step", repo=self.repo, publish_conduit=self.conduit,
                                     config=self.config, distributor_type='test_distributor_type')
コード例 #34
0
 def setUp(self):
     self.working_directory = tempfile.mkdtemp()
     self.repo = Repository('foo_repo_id', working_dir=self.working_directory)
     self.config = PluginCallConfiguration(None, None)
     self.conduit = RepoPublishConduit(self.repo.id, 'foo_repo')
     self.conduit.get_repo_scratchpad = Mock(return_value={u'tags': []})
     tag_list = [{constants.IMAGE_TAG_KEY: u'latest',
                  constants.IMAGE_ID_KEY: u'image_id'}]
     self.conduit.get_repo_scratchpad.return_value = {u'tags': tag_list}
     self.context = metadata.RedirectFileContext(self.working_directory,
                                                 self.conduit,
                                                 self.config,
                                                 self.repo)
     self.context.metadata_file_handle = Mock()
コード例 #35
0
ファイル: test_plugins.py プロジェクト: BrnoPCmaniak/pulp
 def test_import_unit_files_already_exist(self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         parent_content = os.path.join(self.parentfs, 'content')
         child_content = os.path.join(self.childfs, 'content')
         shutil.copytree(parent_content, child_content)
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         mock_importer_config_to_nectar_config = mocks[0]
         mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
コード例 #36
0
def _re_publish_repository(repo_obj, distributor):
    """
    Re-publish the repository using the new yum distributor.

    NOTE: this may be a bit time-consuming.
    """

    repo = repo_obj.to_transfer_repo()
    repo.working_dir = distributor_working_dir(distributor['distributor_type_id'],
                                               repo.id)
    conduit = RepoPublishConduit(repo.id, distributor['distributor_id'])
    config = PluginCallConfiguration(NEW_DISTRIBUTOR_CONF, distributor['config'])

    publisher = Publisher(repo, conduit, config, YUM_DISTRIBUTOR_ID)
    publisher.process_lifecycle()
コード例 #37
0
 def setUp(self):
     self.distributor = installdistributor.PuppetModuleInstallDistributor()
     self.repo = Repository('repo1', '', {})
     self.conduit = RepoPublishConduit('repo1',
                                       self.distributor.metadata()['id'])
     self.uk1 = {
         'author': 'puppetlabs',
         'name': 'stdlib',
         'version': '1.2.0'
     }
     self.uk2 = {'author': 'puppetlabs', 'name': 'java', 'version': '1.3.1'}
     self.units = [
         AssociatedUnit(constants.TYPE_PUPPET_MODULE, self.uk1, {},
                        '/a/b/x', '', '', '', ''),
         AssociatedUnit(constants.TYPE_PUPPET_MODULE, self.uk2, {},
                        '/a/b/y', '', '', '', ''),
     ]
     self.conduit.get_units = mock.MagicMock(
         return_value=self.units, spec_set=self.conduit.get_units)
コード例 #38
0
ファイル: test_plugins.py プロジェクト: aweiteka/pulp
 def test_import_modified_units(self, *mocks):
     # Setup
     self.populate()
     max_concurrency = 5
     max_bandwidth = 12345
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     cfg = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     # make the published unit have a newer _last_updated.
     collection = connection.get_collection(
         unit_db.unit_collection_name(self.UNIT_TYPE_ID))
     unit = collection.find_one({'N': 0})
     unit['age'] = 84
     unit['_last_updated'] -= 1
     collection.update({'N': 0}, unit, safe=True)
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = pathlib.url_join(publisher.base_url,
                                     publisher.manifest_path())
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
         importer_constants.KEY_MAX_SPEED: max_bandwidth,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                               RepoContentUnit.OWNER_TYPE_IMPORTER,
                               constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     unit = collection.find_one({'N': 0})
     self.assertEqual(unit['age'], 42)
コード例 #39
0
    def __init__(self, repo_group, publish_conduit, config, distributor_type):
        """
        :param repo_group: Pulp managed Yum repository
        :type  repo_group: pulp.plugins.model.RepositoryGroup
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoGroupPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param distributor_type: The type of the distributor that is being published
        :type distributor_type: str
        """
        super(ExportRepoGroupPublisher,
              self).__init__(constants.PUBLISH_STEP_EXPORT_REPO_GROUP,
                             repo_group,
                             publish_conduit,
                             config,
                             plugin_type=distributor_type)

        working_dir = self.get_working_dir()
        scratch_dir = os.path.join(working_dir, 'scratch')
        realized_dir = os.path.join(working_dir, 'realized')

        flat_config = config.flatten()
        export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD)
        if export_dir:
            repo_config = config
        else:
            repo_config = PluginCallConfiguration(
                flat_config,
                {constants.EXPORT_DIRECTORY_KEYWORD: realized_dir})

        repo_objs = model.Repository.objects(repo_id__in=repo_group.repo_ids)
        empty_repos = True
        for repo_obj in repo_objs:
            empty_repos = False
            repo = repo_obj.to_transfer_repo()
            # Make sure we only publish rpm repo's
            if repo.notes['_repo-type'] != 'rpm-repo':
                continue

            repo_config_copy = copy.deepcopy(repo_config)

            # Need some code to pull the distributor
            distributor = model.Distributor.objects(
                repo_id=repo_obj['repo_id'],
                distributor_id=ids.EXPORT_DISTRIBUTOR_ID,
                config__relative_url__exists=True).first()

            if distributor is not None:
                relative_url = distributor['config']['relative_url']
            else:
                relative_url = repo_obj['repo_id']

            if not export_dir:
                repo_config_copy.override_config['relative_url'] = relative_url
            else:
                merged_rel = repo_config_copy.get('relative_url',
                                                  '') + '/' + relative_url
                repo_config_copy.override_config['relative_url'] = merged_rel

            repo_working_dir = os.path.join(scratch_dir, repo.id)
            repo_conduit = RepoPublishConduit(repo.id, distributor_type)
            publisher = ExportRepoPublisher(repo,
                                            repo_conduit,
                                            repo_config_copy,
                                            distributor_type,
                                            working_dir=repo_working_dir)
            publisher.description = _("Exporting Repo: %s") % repo.id
            self.add_child(publisher)
        if empty_repos:
            os.makedirs(realized_dir)
            self.add_child(GenerateListingFileStep(realized_dir, realized_dir))

        # If we aren't exporting to a directory add the ISO create & publish steps
        if not export_dir:
            # Create the steps to generate the ISO and publish them to their final location
            output_dir = os.path.join(working_dir, 'output')
            self.add_child(CreateIsoStep(realized_dir, output_dir))

            # create the PULP_MANIFEST file if requested in the config
            if config.get_boolean(constants.CREATE_PULP_MANIFEST) is True:
                self.add_child(
                    platform_steps.CreatePulpManifestStep(output_dir))

            export_dirs = configuration.get_export_repo_group_publish_dirs(
                repo_group, config)
            publish_location = [('/', location) for location in export_dirs]

            master_dir = configuration.get_master_publish_dir_from_group(
                repo_group, distributor_type)
            self.add_child(
                platform_steps.AtomicDirectoryPublishStep(
                    output_dir, publish_location, master_dir))
コード例 #40
0
ファイル: test_repo_publish.py プロジェクト: zjhuntin/pulp
class RepoPublishConduitTests(base.PulpServerTests):
    def clean(self):
        super(RepoPublishConduitTests, self).clean()

        mock_plugins.reset()
        model.Repository.drop_collection()
        RepoDistributor.get_collection().remove()

    @mock.patch('pulp.server.managers.repo.importer.model.Repository.objects')
    def setUp(self, mock_repo_qs):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        self.distributor_manager = manager_factory.repo_distributor_manager()

        # Populate the database with a repo with units
        self.distributor_manager.add_distributor('repo-1',
                                                 'mock-distributor', {},
                                                 True,
                                                 distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')

    def tearDown(self):
        super(RepoPublishConduitTests, self).tearDown()
        mock_plugins.reset()

    def test_str(self):
        """
        Makes sure the __str__ implementation doesn't crash.
        """
        str(self.conduit)

    def test_last_publish(self):
        """
        Tests retrieving the last publish time in both the unpublish and previously published cases.
        """

        # Test - Unpublished
        unpublished = self.conduit.last_publish()
        self.assertTrue(unpublished is None)

        # Setup - Previous publish
        last_publish = datetime.datetime(2015, 4, 29, 20, 23, 56, 0)
        repo_dist = RepoDistributor.get_collection().find_one(
            {'repo_id': 'repo-1'})
        repo_dist['last_publish'] = last_publish
        RepoDistributor.get_collection().save(repo_dist, safe=True)

        # Test - Last publish
        found = self.conduit.last_publish()
        self.assertTrue(isinstance(found,
                                   datetime.datetime))  # check returned format
        self.assertEqual(repo_dist['last_publish'], found)

    @mock.patch('pulp.plugins.conduits.repo_publish.RepoDistributor')
    def test_last_publish_with_error(self, mock_dist):
        """
        Test the handling of an error getting last_publish information.
        """
        mock_dist.get_collection().find_one.return_value = None
        self.assertRaises(DistributorConduitException,
                          self.conduit.last_publish)
コード例 #41
0
ファイル: test_repo_publish.py プロジェクト: shubham90/pulp
class RepoPublishConduitTests(base.PulpServerTests):
    def clean(self):
        super(RepoPublishConduitTests, self).clean()

        mock_plugins.reset()
        model.Repository.objects.delete()
        model.Distributor.objects.delete()

    @mock.patch('pulp.server.controllers.distributor.model.Repository.objects')
    def setUp(self, mock_repo_qs):
        super(RepoPublishConduitTests, self).setUp()
        mock_plugins.install()
        manager_factory.initialize()

        # Populate the database with a repo with units
        dist_controller.add_distributor('repo-1',
                                        'mock-distributor', {},
                                        True,
                                        distributor_id='dist-1')

        self.conduit = RepoPublishConduit('repo-1', 'dist-1')

    def tearDown(self):
        super(RepoPublishConduitTests, self).tearDown()
        mock_plugins.reset()

    def test_str(self):
        """
        Makes sure the __str__ implementation doesn't crash.
        """
        str(self.conduit)

    def test_last_publish(self):
        """
        Tests retrieving the last publish time in both the unpublish and previously published cases.
        """

        # Test - Unpublished
        unpublished = self.conduit.last_publish()
        self.assertTrue(unpublished is None)

        # Setup - Previous publish
        last_publish = datetime.datetime(2015, 4, 29, 20, 23, 56, 0)
        repo_dist = model.Distributor.objects.get_or_404(repo_id='repo-1')
        repo_dist['last_publish'] = last_publish
        repo_dist.save()

        # Test - Last publish
        found = self.conduit.last_publish()
        self.assertTrue(isinstance(found,
                                   datetime.datetime))  # check returned format

        self.assertEqual(found.tzinfo, dateutils.utc_tz())
        self.assertEqual(repo_dist['last_publish'], found.replace(tzinfo=None))

    @mock.patch('pulp.plugins.conduits.repo_publish.model.Distributor.objects')
    def test_last_publish_with_error(self, m_dist_qs):
        """
        Test the handling of an error getting last_publish information.
        """
        m_dist_qs.only.return_value.get_or_404.side_effect = exceptions.MissingResource
        self.assertRaises(DistributorConduitException,
                          self.conduit.last_publish)
コード例 #42
0
ファイル: publish.py プロジェクト: tomlanyon/pulp
    def publish(self, repo_id, distributor_id, publish_config_override=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(repository=repo_id,
                                  distributor=distributor_id)

        distributor_instance, distributor_config = self._get_distributor_instance_and_config(
            repo_id, distributor_id)

        if distributor_instance is None:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        dispatch_context = dispatch_factory.context()
        dispatch_context.set_cancel_control_hook(
            distributor_instance.cancel_publish_repo)

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id)

        call_config = PluginCallConfiguration(distributor_config,
                                              repo_distributor['config'],
                                              publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            repo_distributor['distributor_type_id'], repo_id, mkdir=True)

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = self._do_publish(repo, distributor_id, distributor_instance,
                                  transfer_repo, conduit, call_config)
        fire_manager.fire_repo_publish_finished(result)

        dispatch_context.clear_cancel_control_hook()