def setUp(self): super(Migration0004Tests, self).setUp() # Special way to import modules that start with a number self.migration = _import_all_the_way( 'pulp_rpm.plugins.migrations.0004_pkg_group_category_repoid') factory.initialize() types_db.update_database([TYPE_DEF_GROUP, TYPE_DEF_CATEGORY]) # Create the repositories necessary for the tests self.source_repo_id = 'source-repo' # where units were copied from with the bad code self.dest_repo_id = 'dest-repo' # where bad units were copied to source_repo = Repo(self.source_repo_id, '') Repo.get_collection().insert(source_repo, safe=True) dest_repo = Repo(self.dest_repo_id, '') Repo.get_collection().insert(dest_repo, safe=True) source_importer = RepoImporter(self.source_repo_id, 'yum_importer', 'yum_importer', {}) RepoImporter.get_collection().insert(source_importer, safe=True) dest_importer = RepoImporter(self.dest_repo_id, 'yum_importer', 'yum_importer', {}) RepoImporter.get_collection().insert(dest_importer, safe=True)
def setUp(self): super(RepoConfigConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() self.repo_manager = manager_factory.repo_manager() self.distributor_manager = manager_factory.repo_distributor_manager() # Populate the database with a repo with units self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor( 'repo-1', 'mock-distributor', {"relative_url": "/a/bc/d"}, True, distributor_id='dist-1') self.distributor_manager.add_distributor( 'repo-1', 'mock-distributor', {"relative_url": "/a/c"}, True, distributor_id='dist-2') self.repo_manager.create_repo('repo-2') self.distributor_manager.add_distributor( 'repo-2', 'mock-distributor', {"relative_url": "/a/bc/e"}, True, distributor_id='dist-3') self.repo_manager.create_repo('repo-3') self.distributor_manager.add_distributor('repo-3', 'mock-distributor', {}, True, distributor_id='dist-4') self.repo_manager.create_repo('repo-4') self.distributor_manager.add_distributor( 'repo-4', 'mock-distributor', {"relative_url": "repo-5"}, True, distributor_id='dist-5') self.repo_manager.create_repo('repo-5') self.distributor_manager.add_distributor( 'repo-5', 'mock-distributor', {"relative_url": "a/bcd/e"}, True, distributor_id='dist-1') self.repo_manager.create_repo('repo-6') self.distributor_manager.add_distributor( 'repo-6', 'mock-distributor', {"relative_url": "a/bcde/f/"}, True, distributor_id='dist-1') self.conduit = RepoConfigConduit('rpm')
def setUp(self): manager_factory.initialize() self.repo_id = 'add-repo' self.importer_id = 'add-importer' self.mixin = mixins.AddUnitMixin(self.repo_id, self.importer_id)
def setUp(self): super(RepoConfigConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() self.distributor_manager = manager_factory.repo_distributor_manager() with mock.patch("pulp.server.managers.repo.distributor.model.Repository.objects"): # Populate the database with a repo with units self.distributor_manager.add_distributor( "repo-1", "mock-distributor", {"relative_url": "/a/bc/d"}, True, distributor_id="dist-1" ) self.distributor_manager.add_distributor( "repo-1", "mock-distributor", {"relative_url": "/a/c"}, True, distributor_id="dist-2" ) self.distributor_manager.add_distributor( "repo-2", "mock-distributor", {"relative_url": "/a/bc/e"}, True, distributor_id="dist-3" ) self.distributor_manager.add_distributor("repo-3", "mock-distributor", {}, True, distributor_id="dist-4") self.distributor_manager.add_distributor( "repo-4", "mock-distributor", {"relative_url": "repo-5"}, True, distributor_id="dist-5" ) self.distributor_manager.add_distributor( "repo-5", "mock-distributor", {"relative_url": "a/bcd/e"}, True, distributor_id="dist-1" ) self.distributor_manager.add_distributor( "repo-6", "mock-distributor", {"relative_url": "a/bcde/f/"}, True, distributor_id="dist-1" ) self.conduit = RepoConfigConduit("rpm")
def setUp(self): super(RepoConfigConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() with mock.patch('pulp.server.controllers.distributor.model.Repository.objects'): # Populate the database with a repo with units dist_controller.add_distributor( 'repo-1', 'mock-distributor', {"relative_url": "/a/bc/d"}, True, distributor_id='dist-1') dist_controller.add_distributor( 'repo-1', 'mock-distributor', {"relative_url": "/a/c"}, True, distributor_id='dist-2') dist_controller.add_distributor( 'repo-2', 'mock-distributor', {"relative_url": "/a/bc/e"}, True, distributor_id='dist-3') dist_controller.add_distributor( 'repo-3', 'mock-distributor', {}, True, distributor_id='dist-4') dist_controller.add_distributor( 'repo-4', 'mock-distributor', {"relative_url": "repo-5"}, True, distributor_id='dist-5') dist_controller.add_distributor( 'repo-5', 'mock-distributor', {"relative_url": "a/bcd/e"}, True, distributor_id='dist-1') dist_controller.add_distributor( 'repo-6', 'mock-distributor', {"relative_url": "a/bcde/f/"}, True, distributor_id='dist-1') self.conduit = RepoConfigConduit('rpm')
def main(): """ Populate ldap server with some test data """ print("See populate.log for descriptive output.") factory.initialize() connection.initialize() ldapserv = LDAPConnection(admin='cn=Directory Manager', password='******', server='ldap://*****:*****@redhat.com' % userid) lattr.setDN("uid=%s,dc=rdu,dc=redhat,dc=com" % userid) attr, dn = lattr.buildBody() ldapserv.add_users(dn, attrs=attr) ldapserv.lookup_user("dc=rdu,dc=redhat,dc=com", "pulpuser1") ldapserv.authenticate_user("dc=rdu,dc=redhat,dc=com", "pulpuser1", "redhat") ldapserv.disconnect()
def setUp(self): manager_factory.initialize() self.group_id = 'group-id' self.distributor_id = 'group-dist' self.mixin = mixins.RepoGroupDistributorScratchPadMixin( self.group_id, self.distributor_id)
def test_syntactic_sugar_methods(self): """ Tests the syntactic sugar methods for retrieving specific managers. """ # Setup factory.initialize() # Test self.assertTrue(isinstance(factory.authentication_manager(), AuthenticationManager)) self.assertTrue(isinstance(factory.cert_generation_manager(), CertGenerationManager)) self.assertTrue(isinstance(factory.certificate_manager(), CertificateManager)) self.assertTrue(isinstance(factory.password_manager(), PasswordManager)) self.assertTrue(isinstance(factory.permission_manager(), PermissionManager)) self.assertTrue(isinstance(factory.permission_query_manager(), PermissionQueryManager)) self.assertTrue(isinstance(factory.role_manager(), RoleManager)) self.assertTrue(isinstance(factory.role_query_manager(), RoleQueryManager)) self.assertTrue(isinstance(factory.user_manager(), UserManager)) self.assertTrue(isinstance(factory.user_query_manager(), UserQueryManager)) self.assertTrue(isinstance(factory.repo_manager(), RepoManager)) self.assertTrue(isinstance(factory.repo_unit_association_manager(), RepoUnitAssociationManager)) self.assertTrue(isinstance(factory.repo_publish_manager(), RepoPublishManager)) self.assertTrue(isinstance(factory.repo_query_manager(), RepoQueryManager)) self.assertTrue(isinstance(factory.repo_sync_manager(), RepoSyncManager)) self.assertTrue(isinstance(factory.content_manager(), ContentManager)) self.assertTrue(isinstance(factory.content_query_manager(), ContentQueryManager)) self.assertTrue(isinstance(factory.content_upload_manager(), ContentUploadManager)) self.assertTrue(isinstance(factory.consumer_manager(), ConsumerManager)) self.assertTrue(isinstance(factory.topic_publish_manager(), TopicPublishManager))
def _initialize_pulp(): # XXX ORDERING COUNTS # This initialization order is very sensitive, and each touches a number of # sub-systems in pulp. If you get this wrong, you will have pulp tripping # over itself on start up. If you do not know where to add something, ASK! global _IS_INITIALIZED, STACK_TRACER if _IS_INITIALIZED: return _IS_INITIALIZED = True # check our db version and other support check_version() # pulp generic content initialization manager_factory.initialize() plugin_api.initialize() # new async dispatch initialization dispatch_factory.initialize() # ensure necessary infrastructure ensure_builtin_roles() ensure_admin() # agent services AgentServices.start() # setup debugging, if configured if config.config.getboolean('server', 'debugging_mode'): STACK_TRACER = StacktraceDumper() STACK_TRACER.start()
def _auto_manage_db(options): """ Find and apply all available database migrations, and install or update all available content types. :param options: The command line parameters from the user. """ message = _('Loading content types.') logger.info(message) load_content_types() message = _('Content types loaded.') logger.info(message) message = _('Ensuring the admin role and user are in place.') logger.info(message) # Due to the silliness of the factory, we have to initialize it because the UserManager and # RoleManager are going to try to use it. factory.initialize() role_manager = RoleManager() role_manager.ensure_super_user_role() user_manager = UserManager() user_manager.ensure_admin() message = _('Admin role and user are in place.') logger.info(message) message = _('Beginning database migrations.') logger.info(message) migrate_database(options) message = _('Database migrations complete.') logger.info(message) return os.EX_OK
def setUp(self): super(Migration0004Tests, self).setUp() # Special way to import modules that start with a number self.migration = _import_all_the_way( 'pulp_rpm.plugins.migrations.0004_pkg_group_category_repoid') factory.initialize() api.initialize(False) types_db.update_database([TYPE_DEF_GROUP, TYPE_DEF_CATEGORY]) # Create the repositories necessary for the tests self.source_repo_id = 'source-repo' # where units were copied from with the bad code self.dest_repo_id = 'dest-repo' # where bad units were copied to source_repo = model.Repository(repo_id=self.source_repo_id) source_repo.save() dest_repo = model.Repository(repo_id=self.dest_repo_id) dest_repo.save() source_importer = model.Importer(self.source_repo_id, 'yum_importer', {}) source_importer.save() dest_importer = model.Importer(self.dest_repo_id, 'yum_importer', {}) dest_importer.save()
def setUp(self): manager_factory.initialize() self.repo_id = 'dsp-repo' self.distributor_id = 'dsp-distributor' self.mixin = mixins.DistributorScratchPadMixin(self.repo_id, self.distributor_id)
def setUp(self): manager_factory.initialize() self.repo_id = 'isp-repo' self.importer_id = 'isp-importer' self.mixin = mixins.ImporterScratchPadMixin(self.repo_id, self.importer_id)
def setUp(self): super(RepoConfigConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() self.repo_manager = manager_factory.repo_manager() self.distributor_manager = manager_factory.repo_distributor_manager() # Populate the database with a repo with units self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {"relative_url": "/a/bc/d"}, True, distributor_id='dist-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', {"relative_url": "/a/c"}, True, distributor_id='dist-2') self.repo_manager.create_repo('repo-2') self.distributor_manager.add_distributor('repo-2', 'mock-distributor', {"relative_url": "/a/bc/e"}, True, distributor_id='dist-3') self.repo_manager.create_repo('repo-3') self.distributor_manager.add_distributor('repo-3', 'mock-distributor', {}, True, distributor_id='dist-4') self.repo_manager.create_repo('repo-4') self.distributor_manager.add_distributor('repo-4', 'mock-distributor', {"relative_url": "/repo-5"}, True, distributor_id='dist-5') self.conduit = RepoConfigConduit('rpm')
def _initialize_pulp(): # This initialization order is very sensitive, and each touches a number of # sub-systems in pulp. If you get this wrong, you will have pulp tripping # over itself on start up. global _IS_INITIALIZED, STACK_TRACER if _IS_INITIALIZED: return # Verify the database has been migrated to the correct version. This is # very likely a reason the server will fail to start. try: migration_models.check_package_versions() except Exception: msg = 'The database has not been migrated to the current version. ' msg += 'Run pulp-manage-db and restart the application.' raise InitializationException(msg), None, sys.exc_info()[2] # Load plugins and resolve against types. This is also a likely candidate # for causing the server to fail to start. try: plugin_api.initialize() except Exception: msg = 'One or more plugins failed to initialize. If a new type has ' msg += 'been added, run pulp-manage-db to load the type into the ' msg += 'database and restart the application.' raise InitializationException(msg), None, sys.exc_info()[2] # There's a significantly smaller chance the following calls will fail. # The previous two are likely user errors, but the remainder represent # something gone horribly wrong. As such, I'm not going to account for each # and instead simply let the exception itself bubble up. # Load the mappings of manager type to managers manager_factory.initialize() # Initialize the tasking subsystem dispatch_factory.initialize() # Ensure the minimal auth configuration role_manager = manager_factory.role_manager() role_manager.ensure_super_user_role() user_manager = manager_factory.user_manager() user_manager.ensure_admin() # database document reaper reaper.initialize() # agent services AgentServices.start() # Setup debugging, if configured if config.config.getboolean('server', 'debugging_mode'): STACK_TRACER = StacktraceDumper() STACK_TRACER.start() # If we got this far, it was successful, so flip the flag _IS_INITIALIZED = True
def test_fire(self, mock_get_collection, mock_getbool, mock_smtp, mock_publish): # verify that the event system will trigger listeners of this type mock_get_collection.return_value.find.return_value = [self.event_doc] event = data.Event(data.TYPE_REPO_SYNC_FINISHED, 'stuff') factory.initialize() factory.event_fire_manager()._do_fire(event) # verify that the mail event handler was called and processed something self.assertTrue(mock_smtp.return_value.sendmail.call_count, 2)
def setUpClass(cls): if not os.path.exists('/tmp/pulp'): os.makedirs('/tmp/pulp') stop_logging() config_filename = os.path.join(TEST_DATA_DIR, 'test-override-pulp.conf') config.config.read(config_filename) start_logging() manager_factory.initialize() constants.DISTRIBUTION_STORAGE_PATH = TEMP_DISTRO_STORAGE_DIR
def _auto_manage_db(options): """ Find and apply all available database migrations, and install or update all available content types. :param options: The command line parameters from the user. """ unperformed_migrations = False message = _('Loading content types.') _logger.info(message) # Note that if dry_run is False, None is always returned old_content_types = load_content_types(dry_run=options.dry_run) if old_content_types: for content_type in old_content_types: message = _( 'Would have created or updated the following type definition: ' + content_type.id) _logger.info(message) message = _('Content types loaded.') _logger.info(message) message = _('Ensuring the admin role and user are in place.') _logger.info(message) # Due to the silliness of the factory, we have to initialize it because the UserManager and # RoleManager are going to try to use it. factory.initialize() role_manager = RoleManager() if options.dry_run: if not role_manager.get_role(SUPER_USER_ROLE): unperformed_migrations = True message = _('Would have created the admin role.') _logger.info(message) else: role_manager.ensure_super_user_role() user_manager = UserManager() if options.dry_run: if not user_manager.get_admins(): unperformed_migrations = True message = _('Would have created the default admin user.') _logger.info(message) else: user_manager.ensure_admin() message = _('Admin role and user are in place.') _logger.info(message) message = _('Beginning database migrations.') _logger.info(message) migrate_database(options) message = _('Database migrations complete.') _logger.info(message) if unperformed_migrations: return 1 return os.EX_OK
def _auto_manage_db(options): """ Find and apply all available database migrations, and install or update all available content types. :param options: The command line parameters from the user. """ unperformed_migrations = False message = _('Loading content types.') _logger.info(message) # Note that if dry_run is False, None is always returned old_content_types = load_content_types(dry_run=options.dry_run) if old_content_types: for content_type in old_content_types: message = _( 'Would have created or updated the following type definition: ' + content_type.id) _logger.info(message) message = _('Content types loaded.') _logger.info(message) message = _('Ensuring the admin role and user are in place.') _logger.info(message) # Due to the silliness of the factory, we have to initialize it because the UserManager and # RoleManager are going to try to use it. factory.initialize() role_manager = RoleManager() if options.dry_run: if not role_manager.get_role(SUPER_USER_ROLE): unperformed_migrations = True message = _('Would have created the admin role.') _logger.info(message) else: role_manager.ensure_super_user_role() user_manager = managers.UserManager() if options.dry_run: if not user_manager.get_admins(): unperformed_migrations = True message = _('Would have created the default admin user.') _logger.info(message) else: user_manager.ensure_admin() message = _('Admin role and user are in place.') _logger.info(message) message = _('Beginning database migrations.') _logger.info(message) migrate_database(options) message = _('Database migrations complete.') _logger.info(message) if unperformed_migrations: return 1 return os.EX_OK
def test_fire(self, mock_get_collection, mock_getbool, mock_smtp, mock_publish, mock_task_ser): # verify that the event system will trigger listeners of this type mock_get_collection.return_value.find.return_value = [self.event_doc] mock_task_ser.return_value = 'serialized task' event = data.Event(data.TYPE_REPO_SYNC_FINISHED, 'stuff') factory.initialize() factory.event_fire_manager()._do_fire(event) # verify that the mail event handler was called and processed something self.assertEqual(mock_smtp.return_value.sendmail.call_count, 2)
def setUp(self, mock_repo_qs): super(RepoPublishConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() # Populate the database with a repo with units dist_controller.add_distributor('repo-1', 'mock-distributor', {}, True, distributor_id='dist-1') self.conduit = RepoPublishConduit('repo-1', 'dist-1')
def setUp(self): manager_factory.initialize() self.repo_id = 'add-repo' self.importer_id = 'add-importer' self.association_owner_type = 'importer' self.association_owner_id = 'imp-id' self.mixin = mixins.AddUnitMixin(self.repo_id, self.importer_id, self.association_owner_type, self.association_owner_id)
def setUpClass(cls): if not os.path.exists('/tmp/pulp'): os.makedirs('/tmp/pulp') stop_logging() config_filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'test-override-pulp.conf') config.config.read(config_filename) start_logging() name = config.config.get('database', 'name') connection.initialize(name) manager_factory.initialize()
def setUpClass(cls): if not os.path.exists('/tmp/pulp'): os.makedirs('/tmp/pulp') stop_logging() config_filename = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../../pulp_rpm/test/unit/data', 'test-override-pulp.conf') config.config.read(config_filename) start_logging() name = config.config.get('database', 'name') connection.initialize(name) manager_factory.initialize() constants.DISTRIBUTION_STORAGE_PATH = TEMP_DISTRO_STORAGE_DIR
def preserve_custom_metadata_on_repo_scratchpad(): """ Lookups all the yum based repos in pulp; grabs any custom metadata and set the the data on repo scratchpad. """ factory.initialize() repos = factory.repo_query_manager().find_with_importer_type( "yum_importer") if not repos: _log.debug("No repos found to perform db migrate") return repo_ids = [repo['id'] for repo in repos] for repo_id in repo_ids: _log.debug("Processing repo %s" % repo_id) repo_scratchpad = factory.repo_manager().get_repo_scratchpad(repo_id) if "repodata" in repo_scratchpad and repo_scratchpad["repodata"]: # repo scratchpad already has repodata, skip migration _log.debug( "repo [%s] scratchpad already has repodata, skip migration" % repo_id) continue repo_working_dir = importer_working_dir('yum_importer', repo_id) importer_repodata_dir = os.path.join(repo_working_dir, repo_id, "repodata") repomd_xml_path = os.path.join(importer_repodata_dir, "repomd.xml") if not os.path.exists(repomd_xml_path): # repodata doesn't exist on filesystem cannot lookup custom data, continue to next continue ftypes = util.get_repomd_filetypes(repomd_xml_path) base_ftypes = [ 'primary', 'primary_db', 'filelists_db', 'filelists', 'other', 'other_db', 'group', 'group_gz', 'updateinfo', 'updateinfo_db' ] for ftype in ftypes: if ftype in base_ftypes: # no need to process these again continue filetype_path = os.path.join( importer_repodata_dir, os.path.basename( util.get_repomd_filetype_path(repomd_xml_path, ftype))) if filetype_path.endswith('.gz'): # if file is gzipped, decompress data = gzip.open(filetype_path).read().decode( "utf-8", "replace") else: data = open(filetype_path).read().decode("utf-8", "replace") repo_scratchpad["repodata"].update({ftype: data}) # set the custom metadata on scratchpad factory.repo_manager().set_repo_scratchpad(repo_id, repo_scratchpad) _log.info("Updated repo [%s] scratchpad with new custom repodata" % repo_id)
def setUp(self): super(ImporterScratchPadMixinTests, self).setUp() mock_plugins.install() self.importer_manager = RepoImporterManager() self.repo_id = 'repo-1' self.conduit = mixins.ImporterScratchPadMixin(self.repo_id, 'test-importer') manager_factory.initialize() self.importer_id = 'isp-importer' self.mixin = mixins.ImporterScratchPadMixin(self.repo_id, self.importer_id)
def test_get_manager(self): """ Tests retrieving a manager instance for a valid manager mapping. """ # Setup factory.initialize() # Test manager = factory.get_manager(factory.TYPE_REPO) # Verify self.assertTrue(manager is not None) self.assertTrue(isinstance(manager, RepoManager))
def setUp(self): super(RepoGroupPublishConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() self.group_manager = manager_factory.repo_group_manager() self.distributor_manager = manager_factory.repo_group_distributor_manager() self.group_id = 'conduit-group' self.distributor_id = 'conduit-distributor' self.group_manager.create_repo_group(self.group_id) self.distributor_manager.add_distributor(self.group_id, 'mock-group-distributor', {}, distributor_id=self.distributor_id) self.conduit = RepoGroupPublishConduit(self.group_id, self.distributor_id)
def setUp(self): super(BaseYumDistributorPublishTests, self).setUp() manager_factory.initialize() self.working_dir = tempfile.mkdtemp(prefix='working_') self.published_dir = tempfile.mkdtemp(prefix='published_') self.master_dir = os.path.join(self.published_dir, 'master') self.repo_id = 'yum-distributor-publish-test' self.publisher = None # make sure the master dir is somemplace we can actually write to self._original_master_dir = configuration.MASTER_PUBLISH_DIR configuration.MASTER_PUBLISH_DIR = self.master_dir
def setUpClass(cls): if not os.path.exists(cls.TMP_ROOT): os.makedirs(cls.TMP_ROOT) stop_logging() path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'pulp.conf') pulp_conf.read(path) start_logging() storage_dir = pulp_conf.get('server', 'storage_dir') if not os.path.exists(storage_dir): os.makedirs(storage_dir) shutil.rmtree(storage_dir + '/*', ignore_errors=True) name = pulp_conf.get('database', 'name') connection.initialize(name) managers.initialize()
def setUpClass(cls): # This will make Celery tasks run synchronously celery_instance.celery.conf.CELERY_ALWAYS_EAGER = True if not os.path.exists(cls.TMP_ROOT): os.makedirs(cls.TMP_ROOT) stop_logging() path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'pulp.conf') pulp_conf.read(path) start_logging() storage_dir = pulp_conf.get('server', 'storage_dir') if not os.path.exists(storage_dir): os.makedirs(storage_dir) shutil.rmtree(storage_dir + '/*', ignore_errors=True) managers.initialize()
def setUpClass(cls): if not os.path.exists(cls.TMP_ROOT): os.makedirs(cls.TMP_ROOT) stop_logging() path = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'data', 'pulp.conf') pulp_conf.read(path) start_logging() storage_dir = pulp_conf.get('server', 'storage_dir') if not os.path.exists(storage_dir): os.makedirs(storage_dir) shutil.rmtree(storage_dir+'/*', ignore_errors=True) name = pulp_conf.get('database', 'name') connection.initialize(name) managers.initialize()
def setUp(self): super(RepoConfigConduitTests, self).setUp() mock_plugins.install() manager_factory.initialize() with mock.patch( 'pulp.server.controllers.distributor.model.Repository.objects' ): # Populate the database with a repo with units dist_controller.add_distributor('repo-1', 'mock-distributor', {"relative_url": "/a/bc/d"}, True, distributor_id='dist-1') dist_controller.add_distributor('repo-1', 'mock-distributor', {"relative_url": "/a/c"}, True, distributor_id='dist-2') dist_controller.add_distributor('repo-2', 'mock-distributor', {"relative_url": "/a/bc/e"}, True, distributor_id='dist-3') dist_controller.add_distributor('repo-3', 'mock-distributor', {}, True, distributor_id='dist-4') dist_controller.add_distributor('repo-4', 'mock-distributor', {"relative_url": "repo-5"}, True, distributor_id='dist-5') dist_controller.add_distributor('repo-5', 'mock-distributor', {"relative_url": "a/bcd/e"}, True, distributor_id='dist-1') dist_controller.add_distributor('repo-6', 'mock-distributor', {"relative_url": "a/bcde/f/"}, True, distributor_id='dist-1') self.conduit = RepoConfigConduit('rpm')
def setUpClass(cls): # This will make Celery tasks run synchronously celery_instance.celery.conf.CELERY_ALWAYS_EAGER = True if not os.path.exists(cls.TMP_ROOT): os.makedirs(cls.TMP_ROOT) stop_logging() path = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'data', 'pulp.conf') pulp_conf.read(path) start_logging() storage_dir = pulp_conf.get('server', 'storage_dir') if not os.path.exists(storage_dir): os.makedirs(storage_dir) shutil.rmtree(storage_dir+'/*', ignore_errors=True) managers.initialize()
def preserve_custom_metadata_on_repo_scratchpad(): """ Lookups all the yum based repos in pulp; grabs any custom metadata and set the the data on repo scratchpad. """ factory.initialize() repos = factory.repo_query_manager().find_with_importer_type("yum_importer") if not repos: _log.debug("No repos found to perform db migrate") return repo_ids = [repo['id'] for repo in repos] for repo_id in repo_ids: _log.debug("Processing repo %s" % repo_id) repo_scratchpad = factory.repo_manager().get_repo_scratchpad(repo_id) if "repodata" in repo_scratchpad and repo_scratchpad["repodata"]: # repo scratchpad already has repodata, skip migration _log.debug("repo [%s] scratchpad already has repodata, skip migration" % repo_id) continue repo_working_dir = importer_working_dir('yum_importer', repo_id) importer_repodata_dir = os.path.join(repo_working_dir, repo_id, "repodata") repomd_xml_path = os.path.join(importer_repodata_dir, "repomd.xml") if not os.path.exists(repomd_xml_path): # repodata doesn't exist on filesystem cannot lookup custom data, continue to next continue ftypes = util.get_repomd_filetypes(repomd_xml_path) base_ftypes = ['primary', 'primary_db', 'filelists_db', 'filelists', 'other', 'other_db', 'group', 'group_gz', 'updateinfo', 'updateinfo_db'] for ftype in ftypes: if ftype in base_ftypes: # no need to process these again continue filetype_path = os.path.join(importer_repodata_dir, os.path.basename( util.get_repomd_filetype_path(repomd_xml_path, ftype))) if filetype_path.endswith('.gz'): # if file is gzipped, decompress data = gzip.open(filetype_path).read().decode("utf-8", "replace") else: data = open(filetype_path).read().decode("utf-8", "replace") repo_scratchpad["repodata"].update({ftype: data}) # set the custom metadata on scratchpad factory.repo_manager().set_repo_scratchpad(repo_id, repo_scratchpad) _log.info("Updated repo [%s] scratchpad with new custom repodata" % repo_id)
def setUpClass(cls): # This will make Celery tasks run synchronously celery_instance.celery.conf.CELERY_ALWAYS_EAGER = True cls.reserve_resources_patch = mock.patch( 'pulp.server.async.tasks._reserve_resource.' 'apply_async') mock_patch = cls.reserve_resources_patch.start() mock_patch.return_value.get.return_value = 'some_queue' if not os.path.exists(cls.TMP_ROOT): os.makedirs(cls.TMP_ROOT) stop_logging() path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'pulp.conf') pulp_conf.read(path) start_logging() storage_dir = pulp_conf.get('server', 'storage_dir') if not os.path.exists(storage_dir): os.makedirs(storage_dir) shutil.rmtree(storage_dir + '/*', ignore_errors=True) name = pulp_conf.get('database', 'name') connection.initialize(name) managers.initialize()
def setUpClass(cls): # This will make Celery tasks run synchronously celery_instance.celery.conf.CELERY_ALWAYS_EAGER = True cls.reserve_resources_patch = mock.patch('pulp.server.async.tasks._reserve_resource.' 'apply_async') mock_patch = cls.reserve_resources_patch.start() mock_patch.return_value.get.return_value = 'some_queue' if not os.path.exists(cls.TMP_ROOT): os.makedirs(cls.TMP_ROOT) stop_logging() path = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'data', 'pulp.conf') pulp_conf.read(path) start_logging() storage_dir = pulp_conf.get('server', 'storage_dir') if not os.path.exists(storage_dir): os.makedirs(storage_dir) shutil.rmtree(storage_dir+'/*', ignore_errors=True) name = pulp_conf.get('database', 'name') connection.initialize(name) managers.initialize()
from datetime import datetime, timedelta import time import unittest from celery.beat import ScheduleEntry import mock from pulp.server.async import scheduler from pulp.server.async.celery_instance import celery as app from pulp.server.db.model import dispatch, resources from pulp.server.db.model.criteria import Criteria from pulp.server.managers.factory import initialize initialize() class TestFailureWatcherLen(unittest.TestCase): def test_empty(self): watcher = scheduler.FailureWatcher() self.assertEqual(len(watcher), 0) def test_increments(self): watcher = scheduler.FailureWatcher() for i in range(1, 10): watcher.add('task-%d' % i, 'schedule-%d' % i, False) self.assertEqual(len(watcher), i)
def initialize(): """ This function performs common initialization tasks that all of our processes need to perform. It starts the database connection, initializes the plugin API, and starts the manager factory. """ global _IS_INITIALIZED if _IS_INITIALIZED: return db_connection.initialize() # Load plugins and resolve against types. This is also a likely candidate # for causing the server to fail to start. try: plugin_api.initialize() except Exception, e: msg = _( 'One or more plugins failed to initialize. If a new type has been added, ' 'run pulp-manage-db to load the type into the database and restart the application. ' 'Error message: %s') msg = msg % str(e) logger.error(msg) raise # Load the mappings of manager type to managers manager_factory.initialize() _IS_INITIALIZED = True
def setUp(self): manager_factory.initialize() self.mixin = mixins.RepoScratchpadReadMixin(mixins.DistributorConduitException)
def setUp(self): manager_factory.initialize()
def setUpClass(cls): PulpServerTests.CONFIG = load_test_config() connection.initialize() manager_factory.initialize() # This will make Celery tasks run synchronously celery_instance.celery.conf.CELERY_ALWAYS_EAGER = True
def setUp(self): manager_factory.initialize() self.group_id = 'group-id' self.distributor_id = 'group-dist' self.mixin = mixins.RepoGroupDistributorScratchPadMixin(self.group_id, self.distributor_id)
def setUp(self): TestCase.setUp(self) managers.initialize() self.tmp_dir = mkdtemp() with closing(TarFile(TAR_PATH)) as tar: tar.extractall(self.tmp_dir)
def setUpClass(cls): manager_factory.initialize() # This will make Celery tasks run synchronously celery_instance.celery.conf.CELERY_ALWAYS_EAGER = True
import mongoengine from mock import Mock, patch, MagicMock from nectar.downloaders.local import LocalFileDownloader from nectar.request import DownloadRequest from pulp.common.plugins import reporting_constants, importer_constants from pulp.devel.unit.util import touch, compare_dict from pulp.plugins.conduits.repo_publish import RepoPublishConduit from pulp.plugins.conduits.repo_sync import RepoSyncConduit from pulp.plugins.config import PluginCallConfiguration from pulp.plugins.model import Repository, SyncReport, Unit from pulp.plugins.util import publish_step from pulp.server.db import model from pulp.server.managers import factory factory.initialize() class PublisherBase(unittest.TestCase): def setUp(self): self.working_dir = tempfile.mkdtemp(prefix='working_') self.published_dir = tempfile.mkdtemp(prefix='published_') self.master_dir = os.path.join(self.published_dir, 'master') self.repo_id = 'publish-test-repo' self.repo = Repository(self.repo_id, working_dir=self.working_dir) self.conduit = Mock() self.conduit = RepoPublishConduit(self.repo_id, 'test_distributor_id') self.conduit.get_repo_scratchpad = Mock(return_value={})
import pickle import time import unittest import bson import celery from celery.schedules import schedule as CelerySchedule import mock from pulp.common import dateutils from pulp.server.db.model.auth import User from pulp.server.db.model.dispatch import TaskStatus, ScheduledCall, ScheduleEntry from pulp.server.managers.factory import initialize initialize() class TestTaskStatus(unittest.TestCase): """ Test the TaskStatus class. """ def tearDown(self): """ Remove the TaskStatus objects that were generated by these tests. """ TaskStatus.get_collection().remove() def test___init__(self): """ Test the __init__() method.