Example #1
0
 def setUp(self):
     super(QueryTests, self).setUp()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     plugin_api._create_manager()
     plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, {})
Example #2
0
 def setUp(self):
     ServerTests.setUp(self)
     self.parentfs = self.tmpdir('parent-')
     self.childfs = self.tmpdir('child-')
     self.alias = (self.parentfs, self.parentfs)
     self.temp_dir = tempfile.mkdtemp()
     Consumer.get_collection().remove()
     Bind.get_collection().remove()
     model.Repository.objects.delete()
     model.Distributor.objects.delete()
     model.Importer.objects.delete()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     plugin_api._create_manager()
     imp_conf = dict(strategy=constants.MIRROR_STRATEGY)
     plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER,
                                              NodesHttpImporter, imp_conf)
     plugin_api._MANAGER.distributors.add_plugin(constants.HTTP_DISTRIBUTOR,
                                                 NodesHttpDistributor, {})
     plugin_api._MANAGER.distributors.add_plugin(FAKE_DISTRIBUTOR,
                                                 FakeDistributor,
                                                 FAKE_DISTRIBUTOR_CONFIG)
     plugin_api._MANAGER.profilers.add_plugin(constants.PROFILER_ID,
                                              NodeProfiler, {})
Example #3
0
 def test_import(self):
     # Setup
     self.populate()
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     repo = Repository(self.REPO_ID)
     cfg = {
         'protocol':'file',
         'http':{'alias':self.alias},
         'https':{'alias':self.alias},
         'file':{'alias':self.alias},
     }
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = 'file://' + publisher.manifest_path()
     cfg = dict(manifest_url=manifest_url, strategy=constants.MIRROR_STRATEGY)
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     importer.sync_repo(repo, conduit, cfg)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
def _fix_pkg_group_category_repoid(repoid, typeid):
    """
    Looks up units with in a repo and validate if the repoid in the unit metadata matches the repo
    the unit is associated with. If they dont match,
     * take a deep copy of the pkg group or category unit
     * create(save) new unit with fixed repoid
     * re-associate new unit with the repo
    """
    units = ass_query_mgr.get_units(repo_id=repoid,
                                    criteria=UnitAssociationCriteria(type_ids=typeid))
    for unit in units:
        if unit['metadata']['repo_id'] != repoid:
            _log.debug("Found unit %s to migrate" % unit['id'])
            # take a copy of the unit and fix the repoid
            new_unit_metadata = _safe_copy_unit(unit['metadata'])
            new_unit_metadata['repo_id'] = repoid
            try:
                new_unit_id = content_mgr.add_content_unit(content_type=typeid, unit_id=None,
                                                           unit_metadata=new_unit_metadata)
                # Grab the association doc itself from the DB directly
                association = RepoContentUnit.get_collection().find_one({'_id': unit['_id']})
                # Update to point to the new unit
                association['unit_id'] = new_unit_id
                # Save it back to the DB
                RepoContentUnit.get_collection().save(association)
            except pymongo.errors.DuplicateKeyError:
                # If migrating this Unit to have the correct repo_id causes a duplicate,
                # then there already
                # is a Unit that has the correct metadata in place in this repository. Because of
                #  this, we
                # should remove the association of the unit with the repository
                RepoContentUnit.get_collection().remove({'_id': unit['_id']})
                # Since we removed a Unit from the repo, we should decrement the repo unit count
                repo_controller.update_unit_count(repoid, typeid, -1)
def add_content_unit_to_repo(repo_id, content_unit):
    associated_unit = RepoContentUnit(repo_id, content_unit['_id'],
                                      _TYPE_YUM_REPO_METADATA_FILE,
                                      RepoContentUnit.OWNER_TYPE_IMPORTER,
                                      _TYPE_YUM_IMPORTER)
    collection = RepoContentUnit.get_collection()
    collection.insert(associated_unit)
Example #6
0
 def test_import_cached_manifest_missing_units(self, *unused):
     # Setup
     self.populate()
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         configuration = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, configuration)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         publisher = dist.publisher(repo, configuration)
         manifest_path = publisher.manifest_path()
         manifest = Manifest(manifest_path)
         manifest.read()
         shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME))
         # Test
         importer = NodesHttpImporter()
         manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
Example #7
0
    def process_main(self, item=None):
        repo = self.get_repo()

        units_coll = RepoContentUnit.get_collection()
        units = self._get_units(units_coll, repo.id)

        snapshot_name = repo.notes.get(REPO_SNAPSHOT_NAME)
        if snapshot_name:
            old_units = self._get_units(units_coll, snapshot_name)
        else:
            old_units = []
        units = self._units_to_set(units)
        old_units = self._units_to_set(old_units)
        # Create a snapshot if one did not exist before (snapshot_name is
        # None) and the repo is not empty, or if the unit contents are
        # different
        if units == old_units and (snapshot_name or not units):
            return self._build_report(snapshot_name)

        now = time.time()
        suffix = time.strftime("%Y%m%d%H%M%S", time.gmtime(now))
        suffix = "__%s.%04dZ" % (suffix, 10000 * (now - int(now)))
        new_name = "%s%s" % (repo.id, suffix)
        notes = {}
        notes[REPO_SNAPSHOT_TIMESTAMP] = now
        if '_repo-type' in repo.notes:
            notes['_repo-type'] = repo.notes['_repo-type']
        notes[REPO_SNAPSHOT_NAME] = new_name
        notes[REPO_SNAPSHOT_TIMESTAMP] = now
        distributors = []
        # Fetch the repo's existing importers

        repo_importer = RepoImporter.objects.filter(repo_id=repo.id).first()
        if repo_importer is not None:
            importer_type_id = repo_importer['importer_type_id']
        else:
            importer_type_id = None

        repo_obj = repo_controller.create_repo(
            new_name,
            notes=notes,
            importer_type_id=importer_type_id,
            importer_repo_plugin_config={},
            distributor_list=distributors)
        copied = []
        for unit in sorted(units):
            copied.append(
                RepoContentUnit(
                    repo_id=new_name,
                    unit_id=unit.unit_id,
                    unit_type_id=unit.unit_type_id,
                ))
        if copied:
            units_coll.insert(copied)
        repo_controller.rebuild_content_unit_counts(repo_obj)

        group_coll = RepoGroup.get_collection()
        group_coll.update(dict(repo_ids=repo.id),
                          {'$addToSet': dict(repo_ids=new_name)})
        return self._build_report(new_name)
Example #8
0
 def setUp(self):
     super(QueryTests, self).setUp()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     plugin_api._create_manager()
     plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, {})
Example #9
0
 def tearDown(self):
     super(OrphanManagerTests, self).tearDown()
     RepoContentUnit.get_collection().remove(safe=True)
     content_type_db.clean()
     if os.path.exists(
             self.content_root):  # can be removed by delete operations
         shutil.rmtree(self.content_root)
Example #10
0
def associate_content_unit_with_repo(content_unit):
    repo_content_unit = RepoContentUnit(PHONY_REPO_ID, content_unit['_id'],
                                        content_unit['_content_type_id'],
                                        RepoContentUnit.OWNER_TYPE_USER,
                                        PHONY_USER_ID)
    collection = RepoContentUnit.get_collection()
    collection.insert(repo_content_unit, safe=True)
Example #11
0
    def clean(self):
        super(RepoSyncConduitTests, self).clean()
        types_database.clean()
        mock_plugins.reset()

        RepoContentUnit.get_collection().remove()
        Repo.get_collection().remove()
Example #12
0
 def clean(self):
     Bind.get_collection().remove()
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoImporter.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
Example #13
0
    def associate_unit_by_id(self,
                             repo_id,
                             unit_type_id,
                             unit_id,
                             update_repo_metadata=True):
        """
        Creates an association between the given repository and content unit.

        If there is already an association between the given repo and content
        unit where all other metadata matches the input to this method,
        this call has no effect.

        Both repo and unit must exist in the database prior to this call,
        however this call will not verify that for performance reasons. Care
        should be taken by the caller to preserve the data integrity.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param unit_type_id: identifies the type of unit being added
        @type  unit_type_id: str

        @param unit_id: uniquely identifies the unit within the given type
        @type  unit_id: str

        @param update_repo_metadata: if True, updates the unit association count
                                  after the new association is made. The last
                                  unit added field will also be updated.  Set this
                                  to False when doing bulk associations, and
                                  make one call to update the count at the end.
                                  defaults to True
        @type  update_repo_metadata: bool

        @raise InvalidType: if the given owner type is not of the valid enumeration
        """

        # If the association already exists, no need to do anything else
        spec = {
            'repo_id': repo_id,
            'unit_id': unit_id,
            'unit_type_id': unit_type_id
        }
        existing_association = RepoContentUnit.get_collection().find_one(spec)
        if existing_association is not None:
            return

        similar_exists = False
        if update_repo_metadata:
            similar_exists = RepoUnitAssociationManager.association_exists(
                repo_id, unit_id, unit_type_id)

        # Create the database entry
        association = RepoContentUnit(repo_id, unit_id, unit_type_id)
        RepoContentUnit.get_collection().save(association, safe=True)

        # update the count and times of associated units on the repo object
        if update_repo_metadata and not similar_exists:
            repo_controller.update_unit_count(repo_id, unit_type_id, 1)
            repo_controller.update_last_unit_added(repo_id)
Example #14
0
 def tearDown(self):
     PulpRPMTests.tearDown(self)
     Consumer.get_collection().remove()
     Repo.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     RepoDistributor.get_collection().remove()
     database.clean()
     plugins.finalize()
Example #15
0
    def tearDown(self):
        super(Migration0004Tests, self).tearDown()

        # Delete any sample data added for the test
        types_db.clean()

        RepoContentUnit.get_collection().remove()
        RepoImporter.get_collection().remove()
        Repo.get_collection().remove()
Example #16
0
    def associate_unit_by_id(self, repo_id, unit_type_id, unit_id, update_repo_metadata=True):
        """
        Creates an association between the given repository and content unit.

        If there is already an association between the given repo and content
        unit where all other metadata matches the input to this method,
        this call has no effect.

        Both repo and unit must exist in the database prior to this call,
        however this call will not verify that for performance reasons. Care
        should be taken by the caller to preserve the data integrity.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param unit_type_id: identifies the type of unit being added
        @type  unit_type_id: str

        @param unit_id: uniquely identifies the unit within the given type
        @type  unit_id: str

        @param update_repo_metadata: if True, updates the unit association count
                                  after the new association is made. The last
                                  unit added field will also be updated.  Set this
                                  to False when doing bulk associations, and
                                  make one call to update the count at the end.
                                  defaults to True
        @type  update_repo_metadata: bool

        @raise InvalidType: if the given owner type is not of the valid enumeration
        """

        # If the association already exists, no need to do anything else
        spec = {'repo_id': repo_id,
                'unit_id': unit_id,
                'unit_type_id': unit_type_id}
        existing_association = RepoContentUnit.get_collection().find_one(spec)
        if existing_association is not None:
            return

        similar_exists = False
        if update_repo_metadata:
            similar_exists = RepoUnitAssociationManager.association_exists(repo_id, unit_id,
                                                                           unit_type_id)

        # Create the database entry
        association = RepoContentUnit(repo_id, unit_id, unit_type_id)
        RepoContentUnit.get_collection().save(association, safe=True)

        manager = manager_factory.repo_manager()

        # update the count of associated units on the repo object
        if update_repo_metadata and not similar_exists:
            manager.update_unit_count(repo_id, unit_type_id, 1)

            # update the record for the last added field
            manager.update_last_unit_added(repo_id)
    def tearDown(self):
        super(Migration0004Tests, self).tearDown()

        # Delete any sample data added for the test
        types_db.clean()

        RepoContentUnit.get_collection().remove()
        get_collection('repo_importers').remove()
        model.Repository.drop_collection()
Example #18
0
    def tearDown(self):
        super(Migration0004Tests, self).tearDown()

        # Delete any sample data added for the test
        types_db.clean()

        RepoContentUnit.get_collection().remove()
        get_collection('repo_importers').remove()
        model.Repository.drop_collection()
Example #19
0
    def tearDown(self):
        super(Migration0004Tests, self).tearDown()

        # Delete any sample data added for the test
        types_db.clean()

        RepoContentUnit.get_collection().remove()
        RepoImporter.get_collection().remove()
        Repo.get_collection().remove()
Example #20
0
 def setUp(self):
     super(BaseProfilerConduitTests, self).setUp()
     Consumer.get_collection().remove()
     RepoDistributor.get_collection().remove()
     Bind.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     UnitProfile.get_collection().remove()
     plugin_api._create_manager()
     typedb.update_database([self.TYPE_1_DEF, self.TYPE_2_DEF])
     mock_plugins.install()
Example #21
0
 def tearDown(self):
     super(BaseProfilerConduitTests, self).tearDown()
     Consumer.get_collection().remove()
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     Bind.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     UnitProfile.get_collection().remove()
     typedb.clean()
     factory.reset()
Example #22
0
    def clean(self):
        super(DependencyManagerTests, self).clean()

        database.clean()

        Repo.get_collection().remove()
        RepoImporter.get_collection().remove()
        RepoContentUnit.get_collection().remove()

        mock_plugins.MOCK_IMPORTER.resolve_dependencies.return_value = None
Example #23
0
 def setUp(self):
     super(BaseProfilerConduitTests, self).setUp()
     Consumer.get_collection().remove()
     RepoDistributor.get_collection().remove()
     Bind.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     UnitProfile.get_collection().remove()
     plugin_api._create_manager()
     typedb.update_database([self.TYPE_1_DEF, self.TYPE_2_DEF])
     mock_plugins.install()
    def clean(self):
        super(DependencyManagerTests, self).clean()

        database.clean()

        Repo.get_collection().remove()
        RepoImporter.get_collection().remove()
        RepoContentUnit.get_collection().remove()

        mock_plugins.MOCK_IMPORTER.resolve_dependencies.return_value = None
Example #25
0
 def setUp(self):
     super(QueryTests, self).setUp()
     Repo.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     for type_id in ALL_TYPES:
         unit_db.type_definition = Mock(return_value=dict(id=type_id, unit_key=UNIT_METADATA))
     unit_db.type_units_unit_key = Mock(return_value=['A', 'B', 'C', 'N'])
     plugin_api._create_manager()
     plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, {})
Example #26
0
 def tearDown(self):
     super(BaseProfilerConduitTests, self).tearDown()
     Consumer.get_collection().remove()
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     Bind.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     UnitProfile.get_collection().remove()
     typedb.clean()
     factory.reset()
Example #27
0
 def tearDown(self):
     WebTest.tearDown(self)
     shutil.rmtree(self.parentfs)
     shutil.rmtree(self.childfs)
     Consumer.get_collection().remove()
     Bind.get_collection().remove()
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoImporter.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
Example #28
0
 def tearDown(self):
     WebTest.tearDown(self)
     shutil.rmtree(self.parentfs)
     shutil.rmtree(self.childfs)
     Consumer.get_collection().remove()
     Bind.get_collection().remove()
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoImporter.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
Example #29
0
 def tearDown(self):
     ServerTests.tearDown(self)
     shutil.rmtree(self.parentfs)
     shutil.rmtree(self.childfs)
     Consumer.get_collection().remove()
     Bind.get_collection().remove()
     model.Repository.objects.delete()
     model.Distributor.objects.delete()
     model.Importer.objects.delete()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
Example #30
0
 def tearDown(self):
     ServerTests.tearDown(self)
     shutil.rmtree(self.parentfs)
     shutil.rmtree(self.childfs)
     Consumer.get_collection().remove()
     Bind.get_collection().remove()
     model.Repository.objects.delete()
     model.Distributor.objects.delete()
     model.Importer.objects.delete()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
Example #31
0
 def clean(self, units_only=False, plugins=False):
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     if units_only:
         return
     Bind.get_collection().remove()
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoImporter.get_collection().remove()
     if plugins:
         plugin_api._MANAGER.distributors.plugins = {}
Example #32
0
 def tearDown(self):
     super(BaseProfilerConduitTests, self).tearDown()
     Consumer.get_collection().remove()
     model.Repository.objects.delete()
     model.Distributor.objects.delete()
     Bind.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     UnitProfile.get_collection().remove()
     typedb.clean()
     factory.reset()
     mock_plugins.reset()
Example #33
0
 def tearDown(self):
     super(BaseProfilerConduitTests, self).tearDown()
     Consumer.get_collection().remove()
     model.Repository.objects.delete()
     model.Distributor.objects.delete()
     Bind.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     UnitProfile.get_collection().remove()
     typedb.clean()
     factory.reset()
     mock_plugins.reset()
Example #34
0
 def clean(self, just_units=False, purge_plugins=False):
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     if just_units:
         return
     Bind.get_collection().remove()
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoImporter.get_collection().remove()
     if purge_plugins:
         plugin_api._MANAGER.importers.plugins = {}
         plugin_api._MANAGER.distributors.plugins = {}
Example #35
0
 def test_import_unit_files_already_exist_size_mismatch(
         self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         parent_content = os.path.join(self.parentfs, 'content')
         child_content = os.path.join(self.childfs, 'content')
         shutil.copytree(parent_content, child_content)
         for fn in os.listdir(child_content):
             path = os.path.join(child_content, fn)
             if os.path.isdir(path):
                 continue
             with open(path, 'w') as fp:
                 fp.truncate()
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url,
                                         publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         mock_importer_config_to_nectar_config = mocks[0]
         mock_importer_config_to_nectar_config.assert_called_with(
             configuration.flatten())
    def test_unassociate_all(self):
        """
        Tests unassociating multiple units in a single call.
        """

        # Setup
        self.manager.associate_unit_by_id(self.repo_id, self.unit_type_id, self.unit_id, OWNER_TYPE_USER, "admin")
        self.manager.associate_unit_by_id(self.repo_id, self.unit_type_id, self.unit_id_2, OWNER_TYPE_USER, "admin")
        self.manager.associate_unit_by_id(self.repo_id, "type-2", "unit-1", OWNER_TYPE_USER, "admin")
        self.manager.associate_unit_by_id(self.repo_id, "type-2", "unit-2", OWNER_TYPE_USER, "admin")

        unit_coll = RepoContentUnit.get_collection()
        self.assertEqual(4, len(list(unit_coll.find({"repo_id": self.repo_id}))))

        # Test
        self.manager.unassociate_all_by_ids(
            self.repo_id, self.unit_type_id, [self.unit_id, self.unit_id_2], OWNER_TYPE_USER, "admin"
        )

        # Verify
        self.assertEqual(2, len(list(unit_coll.find({"repo_id": self.repo_id}))))

        self.assertTrue(
            unit_coll.find_one({"repo_id": self.repo_id, "unit_type_id": "type-2", "unit_id": "unit-1"}) is not None
        )
        self.assertTrue(
            unit_coll.find_one({"repo_id": self.repo_id, "unit_type_id": "type-2", "unit_id": "unit-2"}) is not None
        )
Example #37
0
    def association_exists(repo_id, unit_id, unit_type_id):
        """
        Determines if an identical association already exists.

        I know the order of arguments does not match other methods in this
        module, but it does match the constructor for the RepoContentUnit
        object, which I think is the higher authority.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param unit_type_id: identifies the type of unit being removed
        @type  unit_type_id: str

        @param unit_id: uniquely identifies the unit within the given type
        @type  unit_id: str

        @return: True if unique else False
        @rtype:  bool
        """
        spec = {
            'repo_id' : repo_id,
            'unit_id' : unit_id,
            'unit_type_id' : unit_type_id,
        }
        unit_coll = RepoContentUnit.get_collection()

        existing_count = unit_coll.find(spec).count()
        return bool(existing_count)
Example #38
0
    def rebuild_content_unit_counts(repo_ids=None):
        """
        WARNING: This might take a long time, and it should not be used unless
        absolutely necessary. Not responsible for melted servers.

        This will iterate through the given repositories, which defaults to ALL
        repositories, and recalculate the content unit counts for each content
        type.

        This method is called from platform migration 0004, so consult that
        migration before changing this method.

        :param repo_ids:    list of repository IDs. DEFAULTS TO ALL REPO IDs!!!
        :type  repo_ids:    list
        """
        association_collection = RepoContentUnit.get_collection()
        repo_collection = Repo.get_collection()

        # default to all repos if none were specified
        if not repo_ids:
            repo_ids = [repo['id'] for repo in repo_collection.find(fields=['id'])]

        _LOG.info('regenerating content unit counts for %d repositories' % len(repo_ids))

        for repo_id in repo_ids:
            _LOG.debug('regenerating content unit count for repository "%s"' % repo_id)
            counts = {}
            cursor = association_collection.find({'repo_id':repo_id})
            type_ids = cursor.distinct('unit_type_id')
            cursor.close()
            for type_id in type_ids:
                spec = {'repo_id': repo_id, 'unit_type_id': type_id}
                counts[type_id] = association_collection.find(spec).count()
            repo_collection.update({'id': repo_id}, {'$set':{'content_unit_counts': counts}}, safe=True)
    def test_unassociate_all(self):
        """
        Tests unassociating multiple units in a single call.
        """

        # Setup
        self.manager.associate_unit_by_id(self.repo_id, self.unit_type_id, self.unit_id, OWNER_TYPE_USER, 'admin')
        self.manager.associate_unit_by_id(self.repo_id, self.unit_type_id, self.unit_id_2, OWNER_TYPE_USER, 'admin')
        self.manager.associate_unit_by_id(self.repo_id, 'type-2', 'unit-1', OWNER_TYPE_USER, 'admin')
        self.manager.associate_unit_by_id(self.repo_id, 'type-2', 'unit-2', OWNER_TYPE_USER, 'admin')

        unit_coll = RepoContentUnit.get_collection()
        self.assertEqual(4, len(list(unit_coll.find({'repo_id' : self.repo_id}))))

        # Test
        unassociated = self.manager.unassociate_all_by_ids(self.repo_id, self.unit_type_id,
                                                           [self.unit_id, self.unit_id_2],
                                                           OWNER_TYPE_USER, 'admin')

        # Verify
        self.assertEqual(len(unassociated), 2)
        for u in unassociated:
            self.assertTrue(isinstance(u, dict))
            self.assertTrue(u['type_id'], self.unit_type_id)
            self.assertTrue(u['unit_key'] in [self.unit_key, self.unit_key_2])

        self.assertEqual(2, len(list(unit_coll.find({'repo_id' : self.repo_id}))))

        self.assertTrue(unit_coll.find_one({'repo_id' : self.repo_id, 'unit_type_id' : 'type-2', 'unit_id' : 'unit-1'}) is not None)
        self.assertTrue(unit_coll.find_one({'repo_id' : self.repo_id, 'unit_type_id' : 'type-2', 'unit_id' : 'unit-2'}) is not None)
Example #40
0
    def test_with_db(self):
        REPO_ID = 'repo123'
        repo_collection = Repo.get_collection()
        repo_collection.save({'id': REPO_ID, 'content_unit_count': 0})

        assoc_collection = RepoContentUnit.get_collection()
        assoc_collection.insert({
            'repo_id': REPO_ID,
            'unit_type_id': 'rpm',
            'unit_id': 'unit1'
        })
        assoc_collection.insert({
            'repo_id': REPO_ID,
            'unit_type_id': 'rpm',
            'unit_id': 'unit2'
        })

        self.module.migrate()

        repo = repo_collection.find({'id': REPO_ID})[0]

        self.assertTrue('content_unit_count' not in repo)
        self.assertEqual(repo['content_unit_counts'], {'rpm': 2})

        # cleanup
        repo_collection.remove({'id': REPO_ID})
        assoc_collection.remove({'repo_id': REPO_ID})
Example #41
0
    def test_get_remove_unit(self):
        """
        Tests retrieving units through the conduit and removing them.
        """
        model.Repository(repo_id='repo-1').save()
        unit_1_key = {'key-1': 'unit_1'}
        unit_1_metadata = {'meta_1': 'value_1'}
        unit_1 = self.conduit.init_unit(TYPE_1_DEF.id, unit_1_key, unit_1_metadata, '/foo/bar')
        self.conduit.save_unit(unit_1)

        # Test - get_units
        units = self.conduit.get_units()

        #   Verify returned units
        self.assertEqual(1, len(units))
        self.assertEqual(unit_1_key, units[0].unit_key)
        self.assertTrue(units[0].id is not None)

        # Test - remove_units
        self.conduit.remove_unit(units[0])

        # Verify repo association removed in the database
        associated_units = list(RepoContentUnit.get_collection().find({'repo_id': 'repo-1'}))
        self.assertEqual(0, len(associated_units))

        #   Verify the unit itself is still in the database
        db_unit = self.query_manager.get_content_unit_by_id(TYPE_1_DEF.id, unit_1.id)
        self.assertTrue(db_unit is not None)
Example #42
0
    def unassociate_by_criteria(repo_id, criteria, notify_plugins=True):
        """
        Unassociate units that are matched by the given criteria.

        :param repo_id:        identifies the repo
        :type  repo_id:        str
        :param criteria:
        :param notify_plugins: if true, relevant plugins will be informed of the removal
        :type  notify_plugins: bool
        """
        criteria = UnitAssociationCriteria.from_dict(criteria)
        association_query_manager = manager_factory.repo_unit_association_query_manager(
        )
        unassociate_units = association_query_manager.get_units(
            repo_id, criteria=criteria)

        if len(unassociate_units) == 0:
            return {}

        unit_map = {}  # maps unit_type_id to a list of unit_ids

        for unit in unassociate_units:
            id_list = unit_map.setdefault(unit['unit_type_id'], [])
            id_list.append(unit['unit_id'])

        collection = RepoContentUnit.get_collection()

        for unit_type_id, unit_ids in unit_map.items():
            spec = {
                'repo_id': repo_id,
                'unit_type_id': unit_type_id,
                'unit_id': {
                    '$in': unit_ids
                }
            }
            collection.remove(spec)

            unique_count = sum(
                1 for unit_id in unit_ids
                if not RepoUnitAssociationManager.association_exists(
                    repo_id, unit_id, unit_type_id))
            if not unique_count:
                continue

            repo_controller.update_unit_count(repo_id, unit_type_id,
                                              -unique_count)

        repo_controller.update_last_unit_removed(repo_id)

        # Convert the units into transfer units. This happens regardless of whether or not
        # the plugin will be notified as it's used to generate the return result,
        transfer_units = create_transfer_units(unassociate_units)

        if notify_plugins:
            remove_from_importer(repo_id, transfer_units)

        # Match the return type/format as copy
        serializable_units = [u.to_id_dict() for u in transfer_units]

        return {'units_successful': serializable_units}
Example #43
0
    def unassociate_by_criteria(cls, repo_id, criteria, notify_plugins=True):
        """
        Unassociate units that are matched by the given criteria.

        :param repo_id:        identifies the repo
        :type  repo_id:        str
        :param criteria:
        :param notify_plugins: if true, relevant plugins will be informed of the removal
        :type  notify_plugins: bool
        """
        criteria = UnitAssociationCriteria.from_dict(criteria)
        repo = model.Repository.objects.get_repo_or_missing_resource(repo_id)

        unassociate_units = load_associated_units(repo_id, criteria)

        if len(unassociate_units) == 0:
            return {}

        # Convert the units into transfer units. This happens regardless of whether or not
        # the plugin will be notified as it's used to generate the return result.
        # If all source types have been converted to mongo, search via new style.
        repo_unit_types = set(repo.content_unit_counts.keys())
        if repo_unit_types.issubset(set(plugin_api.list_unit_models())):
            transfer_units = list(cls._units_from_criteria(repo, criteria))
        else:
            transfer_units = None
            if unassociate_units is not None:
                transfer_units = list(create_transfer_units(unassociate_units))

        if notify_plugins:
            remove_from_importer(repo_id, transfer_units)

        unit_map = {}  # maps unit_type_id to a list of unit_ids

        for unit in unassociate_units:
            id_list = unit_map.setdefault(unit["unit_type_id"], [])
            id_list.append(unit["unit_id"])

        collection = RepoContentUnit.get_collection()

        for unit_type_id, unit_ids in unit_map.items():
            spec = {"repo_id": repo_id, "unit_type_id": unit_type_id, "unit_id": {"$in": unit_ids}}
            collection.remove(spec)

            unique_count = sum(
                1
                for unit_id in unit_ids
                if not RepoUnitAssociationManager.association_exists(repo_id, unit_id, unit_type_id)
            )
            if not unique_count:
                continue

            repo_controller.update_unit_count(repo_id, unit_type_id, -unique_count)

        repo_controller.update_last_unit_removed(repo_id)

        # Match the return type/format as copy
        serializable_units = [u.to_id_dict() for u in transfer_units]

        return {"units_successful": serializable_units}
Example #44
0
    def test_unassociate_all(self, mock_ctrl, mock_repo_qs):
        """
        Tests unassociating multiple units in a single call.
        """

        # Setup
        self.manager.associate_unit_by_id(self.repo_id, self.unit_type_id, self.unit_id)
        # Add a different user to ensure they will remove properly
        self.manager.associate_unit_by_id(self.repo_id, self.unit_type_id, self.unit_id_2)
        self.manager.associate_unit_by_id(self.repo_id, 'type-2', 'unit-1')
        self.manager.associate_unit_by_id(self.repo_id, 'type-2', 'unit-2')

        unit_coll = RepoContentUnit.get_collection()
        self.assertEqual(4, len(list(unit_coll.find({'repo_id': self.repo_id}))))

        # Test
        results = self.manager.unassociate_all_by_ids(self.repo_id, self.unit_type_id,
                                                      [self.unit_id, self.unit_id_2])
        unassociated = results['units_successful']

        # Verify
        self.assertEqual(len(unassociated), 2)
        for u in unassociated:
            self.assertTrue(isinstance(u, dict))
            self.assertTrue(u['type_id'], self.unit_type_id)
            self.assertTrue(u['unit_key'] in [self.unit_key, self.unit_key_2])

        self.assertEqual(2, len(list(unit_coll.find({'repo_id': self.repo_id}))))

        self.assertTrue(unit_coll.find_one({'repo_id': self.repo_id, 'unit_type_id': 'type-2',
                                            'unit_id': 'unit-1'}) is not None)
        self.assertTrue(unit_coll.find_one({'repo_id': self.repo_id, 'unit_type_id': 'type-2',
                                            'unit_id': 'unit-2'}) is not None)
Example #45
0
    def generate_orphans_by_type(content_type_id, fields=None):
        """
        Return an generator of all orphaned content units of the given content type.

        If fields is not specified, only the `_id` field will be present.

        :param content_type_id: id of the content type
        :type content_type_id: basestring
        :param fields: list of fields to include in each content unit
        :type fields: list or None
        :return: generator of orphaned content units for the given content type
        :rtype: generator
        """

        fields = fields if fields is not None else ['_id']
        content_units_collection = content_types_db.type_units_collection(
            content_type_id)
        repo_content_units_collection = RepoContentUnit.get_collection()

        for content_unit in content_units_collection.find({}, fields=fields):

            repo_content_units_cursor = repo_content_units_collection.find(
                {'unit_id': content_unit['_id']})

            if repo_content_units_cursor.count() > 0:
                continue

            yield content_unit
Example #46
0
    def test_get_remove_unit(self, mock_repo_qs):
        """
        Tests retrieving units through the conduit and removing them.
        """

        # Setup
        unit_1_key = {'key-1': 'unit_1'}
        unit_1_metadata = {'meta_1': 'value_1'}
        unit_1 = self.conduit.init_unit(TYPE_1_DEF.id, unit_1_key,
                                        unit_1_metadata, '/foo/bar')
        self.conduit.save_unit(unit_1)

        # Test - get_units
        units = self.conduit.get_units()

        #   Verify returned units
        self.assertEqual(1, len(units))
        self.assertEqual(unit_1_key, units[0].unit_key)
        self.assertTrue(units[0].id is not None)

        # Test - remove_units
        self.conduit.remove_unit(units[0])

        # Verify repo association removed in the database
        associated_units = list(RepoContentUnit.get_collection().find(
            {'repo_id': 'repo-1'}))
        self.assertEqual(0, len(associated_units))

        #   Verify the unit itself is still in the database
        db_unit = self.query_manager.get_content_unit_by_id(
            TYPE_1_DEF.id, unit_1.id)
        self.assertTrue(db_unit is not None)
Example #47
0
    def association_exists(repo_id, unit_id, unit_type_id):
        """
        Determines if an identical association already exists.

        I know the order of arguments does not match other methods in this
        module, but it does match the constructor for the RepoContentUnit
        object, which I think is the higher authority.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param unit_type_id: identifies the type of unit being removed
        @type  unit_type_id: str

        @param unit_id: uniquely identifies the unit within the given type
        @type  unit_id: str

        @return: True if unique else False
        @rtype:  bool
        """
        spec = {
            'repo_id': repo_id,
            'unit_id': unit_id,
            'unit_type_id': unit_type_id,
        }
        unit_coll = RepoContentUnit.get_collection()

        existing_count = unit_coll.find(spec).count()
        return bool(existing_count)
Example #48
0
    def generate_orphans_by_type(content_type_id, fields=None):
        """
        Return an generator of all orphaned content units of the given content type.

        If fields is not specified, only the `_id` field will be present.

        :param content_type_id: id of the content type
        :type content_type_id: basestring
        :param fields: list of fields to include in each content unit
        :type fields: list or None
        :return: generator of orphaned content units for the given content type
        :rtype: generator
        """

        fields = fields if fields is not None else ['_id']
        content_units_collection = content_types_db.type_units_collection(content_type_id)
        repo_content_units_collection = RepoContentUnit.get_collection()

        for content_unit in content_units_collection.find({}, fields=fields):

            repo_content_units_cursor = repo_content_units_collection.find(
                {'unit_id': content_unit['_id']})

            if repo_content_units_cursor.count() > 0:
                continue

            yield content_unit
Example #49
0
File: cud.py Project: aweiteka/pulp
    def rebuild_content_unit_counts(repo_ids=None):
        """
        WARNING: This might take a long time, and it should not be used unless
        absolutely necessary. Not responsible for melted servers.

        This will iterate through the given repositories, which defaults to ALL
        repositories, and recalculate the content unit counts for each content
        type.

        This method is called from platform migration 0004, so consult that
        migration before changing this method.

        :param repo_ids:    list of repository IDs. DEFAULTS TO ALL REPO IDs!!!
        :type  repo_ids:    list
        """
        association_collection = RepoContentUnit.get_collection()
        repo_collection = Repo.get_collection()

        # default to all repos if none were specified
        if not repo_ids:
            repo_ids = [repo['id'] for repo in repo_collection.find(fields=['id'])]

        logger.info('regenerating content unit counts for %d repositories' % len(repo_ids))

        for repo_id in repo_ids:
            logger.debug('regenerating content unit count for repository "%s"' % repo_id)
            counts = {}
            cursor = association_collection.find({'repo_id':repo_id})
            type_ids = cursor.distinct('unit_type_id')
            cursor.close()
            for type_id in type_ids:
                spec = {'repo_id': repo_id, 'unit_type_id': type_id}
                counts[type_id] = association_collection.find(spec).count()
            repo_collection.update({'id': repo_id}, {'$set':{'content_unit_counts': counts}}, safe=True)
Example #50
0
    def unassociate_by_criteria(repo_id, criteria, notify_plugins=True):
        """
        Unassociate units that are matched by the given criteria.

        :param repo_id:        identifies the repo
        :type  repo_id:        str
        :param criteria:
        :param notify_plugins: if true, relevant plugins will be informed of the removal
        :type  notify_plugins: bool
        """
        criteria = UnitAssociationCriteria.from_dict(criteria)
        repo = model.Repository.objects.get_repo_or_missing_resource(repo_id)

        unassociate_units = load_associated_units(repo_id, criteria)

        if len(unassociate_units) == 0:
            return {}

        # Convert the units into transfer units. This happens regardless of whether or not
        # the plugin will be notified as it's used to generate the return result.
        # If all source types have been converted to mongo, search via new style.
        repo_unit_types = set(repo.content_unit_counts.keys())
        if repo_unit_types.issubset(set(plugin_api.list_unit_models())):
            transfer_units = list(
                RepoUnitAssociationManager._units_from_criteria(
                    repo, criteria))
        else:
            transfer_units = None
            if unassociate_units is not None:
                transfer_units = list(create_transfer_units(unassociate_units))

        if notify_plugins:
            remove_from_importer(repo_id, transfer_units)

        unit_map = {}  # maps unit_type_id to a list of unit_ids

        for unit in unassociate_units:
            id_list = unit_map.setdefault(unit['unit_type_id'], [])
            id_list.append(unit['unit_id'])

        collection = RepoContentUnit.get_collection()

        for unit_type_id, unit_ids in unit_map.items():
            spec = {
                'repo_id': repo_id,
                'unit_type_id': unit_type_id,
                'unit_id': {
                    '$in': unit_ids
                }
            }
            collection.remove(spec)

        repo_controller.update_last_unit_removed(repo_id)
        repo_controller.rebuild_content_unit_counts(repo)

        # Match the return type/format as copy
        serializable_units = [u.to_id_dict() for u in transfer_units]

        return {'units_successful': serializable_units}
def associate_content_unit_with_repo(content_unit):
    repo_content_unit = RepoContentUnit(PHONY_REPO_ID,
                                        content_unit['_id'],
                                        content_unit['_content_type_id'],
                                        RepoContentUnit.OWNER_TYPE_USER,
                                        PHONY_USER_ID)
    collection = RepoContentUnit.get_collection()
    collection.insert(repo_content_unit, safe=True)
Example #52
0
 def test_import_unit_files_already_exist_size_mismatch(self, *mocks):
     # Setup
     self.populate()
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     cfg = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     parent_content = os.path.join(self.parentfs, 'content')
     child_content = os.path.join(self.childfs, 'content')
     shutil.copytree(parent_content, child_content)
     for fn in os.listdir(child_content):
         path = os.path.join(child_content, fn)
         if os.path.isdir(path):
             continue
         with open(path, 'w') as fp:
             fp.truncate()
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
     mock_importer_config_to_nectar_config = mocks[0]
     mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
Example #53
0
    def unassociate_by_criteria(repo_id, criteria, owner_type, owner_id, notify_plugins=True):
        """
        Unassociate units that are matched by the given criteria.

        :param repo_id:        identifies the repo
        :type  repo_id:        str
        :param criteria:
        :param owner_type:     category of the caller who created the association
        :type  owner_type:     str
        :param owner_id:       identifies the call who created the association
        :type  owner_id:       str
        :param notify_plugins: if true, relevant plugins will be informed of the removal
        :type  notify_plugins: bool
        """
        association_query_manager = manager_factory.repo_unit_association_query_manager()
        unassociate_units = association_query_manager.get_units(repo_id, criteria=criteria)

        if len(unassociate_units) == 0:
            return {}

        unit_map = {}  # maps unit_type_id to a list of unit_ids

        for unit in unassociate_units:
            id_list = unit_map.setdefault(unit['unit_type_id'], [])
            id_list.append(unit['unit_id'])

        collection = RepoContentUnit.get_collection()
        repo_manager = manager_factory.repo_manager()

        for unit_type_id, unit_ids in unit_map.items():
            spec = {'repo_id': repo_id,
                    'unit_type_id': unit_type_id,
                    'unit_id': {'$in': unit_ids}
                    }
            collection.remove(spec, safe=True)

            unique_count = sum(
                1 for unit_id in unit_ids if not RepoUnitAssociationManager.association_exists(
                    repo_id, unit_id, unit_type_id))
            if not unique_count:
                continue

            repo_manager.update_unit_count(repo_id, unit_type_id, -unique_count)

        repo_manager.update_last_unit_removed(repo_id)

        # Convert the units into transfer units. This happens regardless of whether or not
        # the plugin will be notified as it's used to generate the return result,
        unit_type_ids = calculate_associated_type_ids(repo_id, unassociate_units)
        transfer_units = create_transfer_units(unassociate_units, unit_type_ids)

        if notify_plugins:
            remove_from_importer(repo_id, transfer_units)

        # Match the return type/format as copy
        serializable_units = [u.to_id_dict() for u in transfer_units]

        return {'units_successful': serializable_units}
Example #54
0
 def test_import_cached_manifest_matched(self, mock_get_working, mock_fetch,
                                         *unused):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         configuration = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, configuration)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         publisher = dist.publisher(repo, configuration)
         manifest_path = publisher.manifest_path()
         units_path = os.path.join(os.path.dirname(manifest_path),
                                   UNITS_FILE_NAME)
         manifest = Manifest(manifest_path)
         manifest.read()
         shutil.copy(manifest_path,
                     os.path.join(working_dir, MANIFEST_FILE_NAME))
         shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME))
         # Test
         importer = NodesHttpImporter()
         manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         self.assertFalse(mock_fetch.called)
Example #55
0
 def test_associate_by_id_other_owner(self, mock_repo_qs):
     """
     Tests making a second association using a different owner.
     """
     self.manager.associate_unit_by_id(self.repo_id, 'type-1', 'unit-1')
     self.manager.associate_unit_by_id(self.repo_id, 'type-1', 'unit-1')
     repo_units = list(RepoContentUnit.get_collection().find(
         {'repo_id': self.repo_id}))
     self.assertEqual(1, len(repo_units))
     self.assertEqual('unit-1', repo_units[0]['unit_id'])
Example #56
0
    def unassociate_by_criteria(self, repo_id, criteria, owner_type, owner_id, notify_plugins=True):
        """
        Unassociate units that are matched by the given criteria.
        @param repo_id: identifies the repo
        @type repo_id: str
        @param criteria:
        @param owner_type: category of the caller who created the association
        @type owner_type: str
        @param owner_id: identifies the call who created the association
        @type owner_id: str
        @param notify_plugins: if true, relevant plugins will be informed of the
               removal
        @type  notify_plugins: bool
        """
        association_query_manager = manager_factory.repo_unit_association_query_manager()
        unassociate_units = association_query_manager.get_units(repo_id, criteria=criteria)

        if len(unassociate_units) is 0:
            return []

        unit_map = {} # maps unit_type_id to a list of unit_ids

        for unit in unassociate_units:
            id_list = unit_map.setdefault(unit['unit_type_id'], [])
            id_list.append(unit['unit_id'])

        collection = RepoContentUnit.get_collection()
        repo_manager = manager_factory.repo_manager()

        for unit_type_id, unit_ids in unit_map.items():
            spec = {'repo_id': repo_id,
                    'unit_type_id': unit_type_id,
                    'unit_id': {'$in': unit_ids},
                    'owner_type': owner_type,
                    'owner_id': owner_id}
            collection.remove(spec, safe=True)

            unique_count = sum(1 for unit_id in unit_ids
                               if not self.association_exists(repo_id, unit_id, unit_type_id))
            if not unique_count:
                continue

            repo_manager.update_unit_count(repo_id, unit_type_id, -unique_count)

        # Convert the units into transfer units. This happens regardless of whether or not
        # the plugin will be notified as it's used to generate the return result,
        unit_type_ids = calculate_associated_type_ids(repo_id, unassociate_units)
        transfer_units = create_transfer_units(unassociate_units, unit_type_ids)

        if notify_plugins:
            remove_from_importer(repo_id, transfer_units)

        # Match the return type/format as copy
        serializable_units = [u.to_id_dict() for u in transfer_units]
        return serializable_units
Example #57
0
    def test_import(self, *mocks):
        # Setup
        self.populate()
        max_concurrency = 5
        max_bandwidth = 12345

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            cfg = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, cfg)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            # Test
            importer = NodesHttpImporter()
            publisher = dist.publisher(repo, cfg)
            manifest_url = pathlib.url_join(publisher.base_url,
                                            publisher.manifest_path())
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
                importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
                importer_constants.KEY_MAX_SPEED: max_bandwidth,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
            mock_importer_config_to_nectar_config = mocks[0]
            mock_importer_config_to_nectar_config.assert_called_with(
                configuration.flatten())
Example #58
0
    def test_import_cached_manifest_units_invalid(self, *unused):
        # Setup
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            configuration = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, configuration)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            publisher = dist.publisher(repo, configuration)
            manifest_path = publisher.manifest_path()
            manifest = Manifest(manifest_path)
            manifest.read()
            shutil.copy(manifest_path,
                        os.path.join(working_dir, MANIFEST_FILE_NAME))
            with open(os.path.join(working_dir, UNITS_FILE_NAME), 'w+') as fp:
                fp.write('invalid-units')
            # Test
            importer = NodesHttpImporter()
            manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
Example #59
0
        def make_association(repo_id, type_id, unit_id, owner_type, owner_id, index):
            """
            Utility to perform standard association test data stuff such as
            setting the created/updated timestamps.
            """

            association_collection = RepoContentUnit.get_collection()
            self.association_manager.associate_unit_by_id(repo_id, type_id, unit_id, owner_type, owner_id)
            a = association_collection.find_one({'repo_id' : repo_id, 'unit_type_id' : type_id, 'unit_id' : unit_id})
            a['created'] = self.timestamps[index]
            a['updated'] = self.timestamps[index]
            association_collection.save(a, safe=True)