def test_associate_from_repo_no_criteria(self): # Setup source_repo_id = 'source-repo' dest_repo_id = 'dest-repo' self.repo_manager.create_repo(source_repo_id) self.importer_manager.set_importer(source_repo_id, 'mock-importer', {}) self.repo_manager.create_repo(dest_repo_id) self.importer_manager.set_importer(dest_repo_id, 'mock-importer', {}) self.content_manager.add_content_unit('mock-type', 'unit-1', {'key-1': 'unit-1'}) self.content_manager.add_content_unit('mock-type', 'unit-2', {'key-1': 'unit-2'}) self.content_manager.add_content_unit('mock-type', 'unit-3', {'key-1': 'unit-3'}) self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-1') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-2') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-3') fake_user = User('associate-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) mock_plugins.MOCK_IMPORTER.import_units.return_value = [ Unit('mock-type', {'k': 'v'}, {}, '') ] # Test results = self.manager.associate_from_repo(source_repo_id, dest_repo_id) associated = results['units_successful'] # Verify self.assertEqual(1, len(associated)) self.assertEqual(associated[0]['type_id'], 'mock-type') self.assertEqual(associated[0]['unit_key'], {'k': 'v'}) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.import_units.call_count) args = mock_plugins.MOCK_IMPORTER.import_units.call_args[0] kwargs = mock_plugins.MOCK_IMPORTER.import_units.call_args[1] self.assertTrue(isinstance(args[0], Repository)) # repository transfer object self.assertEqual(args[0].id, 'source-repo') # repo importing units from self.assertEqual(args[1].id, 'dest-repo') # repo importing units into self.assertEqual(None, kwargs['units']) # units to import self.assertTrue(isinstance(args[3], PluginCallConfiguration)) # config conduit = args[2] self.assertTrue(isinstance(conduit, ImportUnitConduit)) # Clean Up manager_factory.principal_manager().set_principal(principal=None)
def test_associate_from_repo_no_criteria(self): # Setup source_repo_id = 'source-repo' dest_repo_id = 'dest-repo' self.repo_manager.create_repo(source_repo_id) self.importer_manager.set_importer(source_repo_id, 'mock-importer', {}) self.repo_manager.create_repo(dest_repo_id) self.importer_manager.set_importer(dest_repo_id, 'mock-importer', {}) self.content_manager.add_content_unit('mock-type', 'unit-1', {'key-1': 'unit-1'}) self.content_manager.add_content_unit('mock-type', 'unit-2', {'key-1': 'unit-2'}) self.content_manager.add_content_unit('mock-type', 'unit-3', {'key-1': 'unit-3'}) self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-1', OWNER_TYPE_USER, 'admin') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-2', OWNER_TYPE_USER, 'admin') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-3', OWNER_TYPE_USER, 'admin') fake_user = User('associate-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) mock_plugins.MOCK_IMPORTER.import_units.return_value = [Unit('mock-type', {'k': 'v'}, {}, '')] # Test results = self.manager.associate_from_repo(source_repo_id, dest_repo_id) associated = results['units_successful'] # Verify self.assertEqual(1, len(associated)) self.assertEqual(associated[0]['type_id'], 'mock-type') self.assertEqual(associated[0]['unit_key'], {'k': 'v'}) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.import_units.call_count) args = mock_plugins.MOCK_IMPORTER.import_units.call_args[0] kwargs = mock_plugins.MOCK_IMPORTER.import_units.call_args[1] self.assertTrue(isinstance(args[0], Repository)) # repository transfer object self.assertEqual(args[0].id, 'source-repo') # repo importing units from self.assertEqual(args[1].id, 'dest-repo') # repo importing units into self.assertEqual(None, kwargs['units']) # units to import self.assertTrue(isinstance(args[3], PluginCallConfiguration)) # config conduit = args[2] self.assertTrue(isinstance(conduit, ImportUnitConduit)) self.assertEqual(conduit.association_owner_type, OWNER_TYPE_USER) self.assertEqual(conduit.association_owner_id, fake_user.login) # Clean Up manager_factory.principal_manager().set_principal(principal=None)
def setUp(self): super(PermissionManagerTests, self).setUp() self.alpha_num = string.letters + string.digits self.role_manager = manager_factory.role_manager() self.role_query_manager = manager_factory.role_query_manager() self.permission_manager = manager_factory.permission_manager() self.permission_query_manager = manager_factory.permission_query_manager() self.role_manager.ensure_super_user_role() manager_factory.principal_manager().clear_principal()
def setUp(self): super(RoleManagerTests, self).setUp() self.alpha_num = string.letters + string.digits self.role_manager = manager_factory.role_manager() self.role_query_manager = manager_factory.role_query_manager() self.permission_manager = manager_factory.permission_manager() self.permission_query_manager = manager_factory.permission_query_manager() self.role_manager.ensure_super_user_role() manager_factory.principal_manager().clear_principal()
def test_import_uploaded_unit(self, mock_repo_qs, mock_rebuild): importer_controller.set_importer('repo-u', 'mock-importer', {}) key = {'key': 'value'} metadata = {'k1': 'v1'} timestamp_pre_upload = dateutils.now_utc_datetime_with_tzinfo() mock_repo = mock_repo_qs.get_repo_or_missing_resource.return_value importer_return_report = { 'success_flag': True, 'summary': '', 'details': {} } mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report upload_id = self.upload_manager.initialize_upload() file_path = self.upload_manager._upload_file_path(upload_id) fake_user = model.User('import-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) response = self.upload_manager.import_uploaded_unit( 'repo-u', 'mock-type', key, metadata, upload_id) # import_uploaded_unit() should have returned our importer_return_report self.assertTrue(response is importer_return_report) call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0] self.assertTrue(call_args[0] is mock_repo.to_transfer_repo()) self.assertEqual(call_args[1], 'mock-type') self.assertEqual(call_args[2], key) self.assertEqual(call_args[3], metadata) self.assertEqual(call_args[4], file_path) conduit = call_args[5] self.assertTrue(isinstance(conduit, UploadConduit)) self.assertEqual(call_args[5].repo_id, 'repo-u') # It is now platform's responsibility to update plugin content unit counts self.assertTrue(mock_rebuild.called, "rebuild_content_unit_counts must be called") # Make sure that the last_unit_added timestamp was updated self.assertTrue(mock_repo.last_unit_added > timestamp_pre_upload) # Clean up mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None manager_factory.principal_manager().set_principal(principal=None)
def POST(self, repo_id): params = self.params() criteria = params.get('criteria', None) if criteria is not None: try: criteria = UnitAssociationCriteria.from_client_input(criteria) except: _logger.error('Error parsing unassociation criteria [%s]' % criteria) raise exceptions.PulpDataException(), None, sys.exc_info()[2] task_tags = [ tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.action_tag('unassociate') ] async_result = unassociate_by_criteria.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [ repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login'] ], tags=task_tags) raise exceptions.OperationPostponed(async_result)
def _run(self): """ Run the call in the call request. Generally the target of a new thread. """ # used for calling _run directly during testing principal_manager = managers_factory.principal_manager() principal_manager.set_principal(self.call_request.principal) # generally set in the wrapper, but not when called directly if self.call_report.state in dispatch_constants.CALL_READY_STATES: self.call_report.state = dispatch_constants.CALL_RUNNING_STATE self.call_report.start_time = datetime.datetime.now(dateutils.utc_tz()) dispatch_context.CONTEXT.set_task_attributes(self) call = self.call_request.call args = copy.copy(self.call_request.args) kwargs = copy.copy(self.call_request.kwargs) try: result = call(*args, **kwargs) except: e, tb = sys.exc_info()[1:] _LOG.exception(e) return self._failed(e, tb) else: return self._succeeded(result) finally: principal_manager.clear_principal() dispatch_context.CONTEXT.clear_task_attributes()
def _run(self): """ Run the call in the call request. Generally the target of a new thread. """ # used for calling _run directly during testing principal_manager = managers_factory.principal_manager() principal_manager.set_principal(self.call_request.principal) # usually set in the wrapper, unless called directly if self.call_report.state in dispatch_constants.CALL_READY_STATES: self.call_report.state = dispatch_constants.CALL_RUNNING_STATE self.call_report.start_time = datetime.datetime.now(dateutils.utc_tz()) dispatch_context.CONTEXT.set_task_attributes(self) call = self.call_request.call args = copy.copy(self.call_request.args) kwargs = copy.copy(self.call_request.kwargs) try: result = call(*args, **kwargs) except: # NOTE: this is making an assumption here that the call failed to # execute, if this isn't the case, or it got far enough, we may be # faced with _succeeded or _failed being called again e, tb = sys.exc_info()[1:] _LOG.exception(e) # too bad 2.4 doesn't support try/except/finally blocks principal_manager.clear_principal() dispatch_context.CONTEXT.clear_task_attributes() return self._failed(e, tb) principal_manager.clear_principal() dispatch_context.CONTEXT.clear_task_attributes() return result
def POST(self, repo_id): params = self.params() criteria = params.get("criteria", None) if criteria is not None: try: criteria = UnitAssociationCriteria.from_client_input(criteria) except: logger.error("Error parsing unassociation criteria [%s]" % criteria) raise exceptions.PulpDataException(), None, sys.exc_info()[2] tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("unassociate")] async_result = unassociate_by_criteria.apply_async_with_reservation( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id, [ repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()["login"], ], tags=tags, ) raise exceptions.OperationPostponed(async_result)
def __call__(self, call_request, call_report): if self.user_name is factory.principal_manager().system_login: return resource = '/v2/tasks/%s/' % call_report.call_request_id operations = ['READ', 'DELETE'] factory.permission_manager().revoke(resource, self.user_name, operations)
def __call__(self, task): if self.user_name is factory.principal_manager().system_login: return resource = '/tasks/%s/' % task.id operations = ['READ', 'DELETE'] factory.permission_manager().revoke(resource, self.user_name, operations)
def POST(self, repo_id): params = self.params() criteria = params.get('criteria', None) if criteria is not None: try: criteria = UnitAssociationCriteria.from_client_input(criteria) except: _LOG.error('Error parsing unassociation criteria [%s]' % criteria) raise exceptions.PulpDataException(), None, sys.exc_info()[2] association_manager = manager_factory.repo_unit_association_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('unassociate') ] call_request = CallRequest( association_manager.unassociate_by_criteria, [ repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login'] ], tags=tags, archive=True) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return execution.execute_async(self, call_request)
def test_associate_from_repo_no_criteria(self, mock_repo_qs): source_repo_id = 'source-repo' dest_repo_id = 'dest-repo' self.importer_manager.set_importer(source_repo_id, 'mock-importer', {}) self.importer_manager.set_importer(dest_repo_id, 'mock-importer', {}) self.content_manager.add_content_unit('mock-type', 'unit-1', {'key-1': 'unit-1'}) self.content_manager.add_content_unit('mock-type', 'unit-2', {'key-1': 'unit-2'}) self.content_manager.add_content_unit('mock-type', 'unit-3', {'key-1': 'unit-3'}) self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-1') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-2') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-3') fake_user = User('associate-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) mock_plugins.MOCK_IMPORTER.import_units.return_value = [Unit('mock-type', {'k': 'v'}, {}, '')] # Test results = self.manager.associate_from_repo(source_repo_id, dest_repo_id) associated = results['units_successful'] # Verify self.assertEqual(1, len(associated)) self.assertEqual(associated[0]['type_id'], 'mock-type') self.assertEqual(associated[0]['unit_key'], {'k': 'v'}) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.import_units.call_count) mock_repo = mock_repo_qs.get_repo_or_missing_resource.return_value args = mock_plugins.MOCK_IMPORTER.import_units.call_args[0] kwargs = mock_plugins.MOCK_IMPORTER.import_units.call_args[1] self.assertEqual(args[0], mock_repo.to_transfer_repo()) self.assertEqual(args[1], mock_repo.to_transfer_repo()) self.assertEqual(None, kwargs['units']) # units to import self.assertTrue(isinstance(args[3], PluginCallConfiguration)) # config conduit = args[2] self.assertTrue(isinstance(conduit, ImportUnitConduit)) # Clean Up manager_factory.principal_manager().set_principal(principal=None)
def _originator(self): ''' Returns the value to use as the originator of the consumer event (either the consumer itself or an admin user). @return: login of the originator value to use in the event @rtype: string ''' return managers_factory.principal_manager().get_principal()['login']
def _test_generate_user_certificate(self): # Setup admin_user = self.user_manager.create_user('test-admin') manager_factory.principal_manager().set_principal(admin_user) # pretend the user is logged in # Test cert = self.user_manager.generate_user_certificate() # Verify self.assertTrue(cert is not None) certificate = manager_factory.certificate_manager(content=cert) cn = certificate.subject()['CN'] username, id = self.cert_generation_manager.decode_admin_user(cn) self.assertEqual(username, admin_user['login']) self.assertEqual(id, admin_user['id'])
def _auth_decorator(self, *args, **kwargs): # Check Authentication # Run through each registered and enabled auth function is_consumer = False registered_auth_functions = [check_preauthenticated, password_authentication, user_cert_authentication, consumer_cert_authentication, oauth_authentication] user_authenticated = False for authenticate_user in registered_auth_functions: if authenticate_user == oauth_authentication: userid, is_consumer = authenticate_user() else: userid = authenticate_user() if userid is not None: user_authenticated = True if authenticate_user == consumer_cert_authentication: is_consumer = True break if not user_authenticated: raise PulpCodedAuthenticationException(error_code=error_codes.PLP0025) # Check Authorization principal_manager = factory.principal_manager() user_query_manager = factory.user_query_manager() if super_user_only and not user_query_manager.is_superuser(userid): raise PulpCodedAuthenticationException(error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) # if the operation is None, don't check authorization elif operation is not None: if is_consumer: if is_consumer_authorized(http.resource_path(), userid, operation): # set default principal = SYSTEM principal_manager.set_principal() else: raise PulpCodedAuthenticationException(error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) elif user_query_manager.is_authorized(http.resource_path(), userid, operation): user = user_query_manager.find_by_login(userid) principal_manager.set_principal(user) else: raise PulpCodedAuthenticationException(error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) # Authentication and authorization succeeded. Call method and then clear principal. value = method(self, *args, **kwargs) principal_manager.clear_principal() return value
def test_associate_from_repo_no_criteria(self): # Setup source_repo_id = "source-repo" dest_repo_id = "dest-repo" self.repo_manager.create_repo(source_repo_id) self.importer_manager.set_importer(source_repo_id, "mock-importer", {}) self.repo_manager.create_repo(dest_repo_id) self.importer_manager.set_importer(dest_repo_id, "mock-importer", {}) self.content_manager.add_content_unit("mock-type", "unit-1", {"key-1": "unit-1"}) self.content_manager.add_content_unit("mock-type", "unit-2", {"key-1": "unit-2"}) self.content_manager.add_content_unit("mock-type", "unit-3", {"key-1": "unit-3"}) self.manager.associate_unit_by_id(source_repo_id, "mock-type", "unit-1", OWNER_TYPE_USER, "admin") self.manager.associate_unit_by_id(source_repo_id, "mock-type", "unit-2", OWNER_TYPE_USER, "admin") self.manager.associate_unit_by_id(source_repo_id, "mock-type", "unit-3", OWNER_TYPE_USER, "admin") fake_user = User("associate-user", "") manager_factory.principal_manager().set_principal(principal=fake_user) # Test self.manager.associate_from_repo(source_repo_id, dest_repo_id) # Verify self.assertEqual(1, mock_plugins.MOCK_IMPORTER.import_units.call_count) args = mock_plugins.MOCK_IMPORTER.import_units.call_args[0] kwargs = mock_plugins.MOCK_IMPORTER.import_units.call_args[1] self.assertTrue(isinstance(args[0], Repository)) # repository transfer object self.assertEqual(args[0].id, "source-repo") # repo importing units from self.assertEqual(args[1].id, "dest-repo") # repo importing units into self.assertEqual(None, kwargs["units"]) # units to import self.assertTrue(isinstance(args[3], PluginCallConfiguration)) # config conduit = args[2] self.assertTrue(isinstance(conduit, ImportUnitConduit)) self.assertEqual(conduit.association_owner_type, OWNER_TYPE_USER) self.assertEqual(conduit.association_owner_id, fake_user.login) # Clean Up manager_factory.principal_manager().set_principal(principal=None)
def _test_generate_user_certificate(self): # Setup admin_user = self.user_manager.create_user('test-admin') manager_factory.principal_manager().set_principal( admin_user) # pretend the user is logged in # Test cert = self.user_manager.generate_user_certificate() # Verify self.assertTrue(cert is not None) certificate = manager_factory.certificate_manager(content=cert) cn = certificate.subject()['CN'] username, id = self.cert_generation_manager.decode_admin_user(cn) self.assertEqual(username, admin_user['login']) self.assertEqual(id, admin_user['id'])
def test_import_uploaded_unit(self, mock_repo_qs, mock_rebuild): importer_controller.set_importer('repo-u', 'mock-importer', {}) key = {'key': 'value'} metadata = {'k1': 'v1'} timestamp_pre_upload = dateutils.now_utc_datetime_with_tzinfo() mock_repo = mock_repo_qs.get_repo_or_missing_resource.return_value importer_return_report = {'success_flag': True, 'summary': '', 'details': {}} mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report upload_id = self.upload_manager.initialize_upload() file_path = self.upload_manager._upload_file_path(upload_id) fake_user = model.User('import-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) response = self.upload_manager.import_uploaded_unit('repo-u', 'mock-type', key, metadata, upload_id) # import_uploaded_unit() should have returned our importer_return_report self.assertTrue(response is importer_return_report) call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0] self.assertTrue(call_args[0] is mock_repo.to_transfer_repo()) self.assertEqual(call_args[1], 'mock-type') self.assertEqual(call_args[2], key) self.assertEqual(call_args[3], metadata) self.assertEqual(call_args[4], file_path) conduit = call_args[5] self.assertTrue(isinstance(conduit, UploadConduit)) self.assertEqual(call_args[5].repo_id, 'repo-u') # It is now platform's responsibility to update plugin content unit counts self.assertTrue(mock_rebuild.called, "rebuild_content_unit_counts must be called") # Make sure that the last_unit_added timestamp was updated self.assertTrue(mock_repo.last_unit_added > timestamp_pre_upload) # Clean up mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None manager_factory.principal_manager().set_principal(principal=None)
def _auth_decorator(self, *args, **kwargs): # Check Authentication # Run through each registered and enabled auth function is_consumer = False registered_auth_functions = [ check_preauthenticated, password_authentication, user_cert_authentication, consumer_cert_authentication, oauth_authentication ] user_authenticated = False for authenticate_user in registered_auth_functions: if authenticate_user == oauth_authentication: userid, is_consumer = authenticate_user() else: userid = authenticate_user() if userid is not None: user_authenticated = True if authenticate_user == consumer_cert_authentication: is_consumer = True break if not user_authenticated: raise AuthenticationFailed(auth_utils.CODE_FAILED) # Check Authorization principal_manager = factory.principal_manager() user_query_manager = factory.user_query_manager() if super_user_only and not user_query_manager.is_superuser(userid): raise AuthenticationFailed(auth_utils.CODE_PERMISSION) # if the operation is None, don't check authorization elif operation is not None: if is_consumer: if is_consumer_authorized(http.resource_path(), userid, operation): # set default principal = SYSTEM principal_manager.set_principal() else: raise AuthenticationFailed(auth_utils.CODE_PERMISSION) elif user_query_manager.is_authorized(http.resource_path(), userid, operation): user = user_query_manager.find_by_login(userid) principal_manager.set_principal(user) else: raise AuthenticationFailed(auth_utils.CODE_PERMISSION) # Authentication and authorization succeeded. Call method and then clear principal. value = method(self, *args, **kwargs) principal_manager.clear_principal() return value
def __init__(self, call, args=None, kwargs=None, principal=None, tags=None, resources=None, dependencies=None, weight=1, asynchronous=False, archive=False, kwarg_blacklist=()): assert callable(call) assert isinstance(args, (NoneType, tuple, list)) assert isinstance(kwargs, (NoneType, dict)) assert isinstance(principal, (NoneType, User, dict)) assert isinstance(tags, (NoneType, list)) assert isinstance(resources, (NoneType, dict)) assert isinstance(dependencies, (NoneType, dict)) assert isinstance(weight, int) assert weight > -1 assert isinstance(asynchronous, bool) assert isinstance(archive, bool) assert isinstance(kwarg_blacklist, (list, tuple)) self.id = str(uuid.uuid4()) self.group_id = None self.schedule_id = None self.call = call self.args = args or [] self.kwargs = kwargs or {} self.principal = principal or managers_factory.principal_manager( ).get_principal() self.tags = tags or [] self.resources = resources or {} self.dependencies = dependencies or {} self.weight = weight self.asynchronous = asynchronous self.archive = archive self.kwarg_blacklist = kwarg_blacklist self.execution_hooks = [ [] for i in range(len(dispatch_constants.CALL_LIFE_CYCLE_CALLBACKS)) ] self.control_hooks = [ None for i in range(len(dispatch_constants.CALL_CONTROL_HOOKS)) ]
def post(self, request): """ Return client SSL certificate and a private key. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :return: Response containing cert and key :rtype: django.http.HttpResponse """ user = factory.principal_manager().get_principal() key, certificate = factory.cert_generation_manager().make_admin_user_cert(user) key_cert = {'key': key, 'certificate': certificate} return generate_json_response(key_cert)
def test_import_uploaded_unit(self): # Setup self.repo_manager.create_repo('repo-u') self.importer_manager.set_importer('repo-u', 'mock-importer', {}) key = {'key': 'value'} metadata = {'k1': 'v1'} importer_return_report = object() mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report upload_id = self.upload_manager.initialize_upload() file_path = self.upload_manager._upload_file_path(upload_id) fake_user = User('import-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) # Test self.upload_manager.import_uploaded_unit('repo-u', 'mock-type', key, metadata, upload_id) # Verify call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0] self.assertTrue(isinstance(call_args[0], Repository)) self.assertEqual(call_args[1], 'mock-type') self.assertEqual(call_args[2], key) self.assertEqual(call_args[3], metadata) self.assertEqual(call_args[4], file_path) conduit = call_args[5] self.assertTrue(isinstance(conduit, UploadConduit)) self.assertEqual(call_args[5].repo_id, 'repo-u') self.assertEqual(conduit.association_owner_type, OWNER_TYPE_USER) self.assertEqual(conduit.association_owner_id, fake_user.login) # Clean up mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None manager_factory.principal_manager().set_principal(principal=None)
def test_import_uploaded_unit(self): self.repo_manager.create_repo('repo-u') self.importer_manager.set_importer('repo-u', 'mock-importer', {}) key = {'key' : 'value'} metadata = {'k1' : 'v1'} importer_return_report = object() mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report upload_id = self.upload_manager.initialize_upload() file_path = self.upload_manager._upload_file_path(upload_id) fake_user = User('import-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) response = self.upload_manager.import_uploaded_unit('repo-u', 'mock-type', key, metadata, upload_id) # import_uploaded_unit() should have returned our importer_return_report self.assertTrue(response is importer_return_report) call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0] self.assertTrue(isinstance(call_args[0], Repository)) self.assertEqual(call_args[1], 'mock-type') self.assertEqual(call_args[2], key) self.assertEqual(call_args[3], metadata) self.assertEqual(call_args[4], file_path) conduit = call_args[5] self.assertTrue(isinstance(conduit, UploadConduit)) self.assertEqual(call_args[5].repo_id, 'repo-u') self.assertEqual(conduit.association_owner_type, OWNER_TYPE_USER) self.assertEqual(conduit.association_owner_id, fake_user.login) # Clean up mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None manager_factory.principal_manager().set_principal(principal=None)
def post(self, request): """ Return client SSL certificate and a private key. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :return: Response containing cert and key :rtype: django.http.HttpResponse """ user = factory.principal_manager().get_principal() key, certificate = factory.cert_generation_manager( ).make_admin_user_cert(user) key_cert = {'key': key, 'certificate': certificate} return generate_json_response(key_cert)
def test_import_uploaded_unit(self): # Setup self.repo_manager.create_repo("repo-u") self.importer_manager.set_importer("repo-u", "mock-importer", {}) key = {"key": "value"} metadata = {"k1": "v1"} importer_return_report = object() mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report upload_id = self.upload_manager.initialize_upload() file_path = self.upload_manager._upload_file_path(upload_id) fake_user = User("import-user", "") manager_factory.principal_manager().set_principal(principal=fake_user) # Test self.upload_manager.import_uploaded_unit("repo-u", "mock-type", key, metadata, upload_id) # Verify call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0] self.assertTrue(isinstance(call_args[0], Repository)) self.assertEqual(call_args[1], "mock-type") self.assertEqual(call_args[2], key) self.assertEqual(call_args[3], metadata) self.assertEqual(call_args[4], file_path) conduit = call_args[5] self.assertTrue(isinstance(conduit, UploadConduit)) self.assertEqual(call_args[5].repo_id, "repo-u") self.assertEqual(conduit.association_owner_type, OWNER_TYPE_USER) self.assertEqual(conduit.association_owner_id, fake_user.login) # Clean up mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None manager_factory.principal_manager().set_principal(principal=None)
def test_import_uploaded_unit(self): self.repo_manager.create_repo('repo-u') self.importer_manager.set_importer('repo-u', 'mock-importer', {}) key = {'key': 'value'} metadata = {'k1': 'v1'} importer_return_report = object() mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report upload_id = self.upload_manager.initialize_upload() file_path = self.upload_manager._upload_file_path(upload_id) fake_user = User('import-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) response = self.upload_manager.import_uploaded_unit( 'repo-u', 'mock-type', key, metadata, upload_id) # import_uploaded_unit() should have returned our importer_return_report self.assertTrue(response is importer_return_report) call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0] self.assertTrue(isinstance(call_args[0], Repository)) self.assertEqual(call_args[1], 'mock-type') self.assertEqual(call_args[2], key) self.assertEqual(call_args[3], metadata) self.assertEqual(call_args[4], file_path) conduit = call_args[5] self.assertTrue(isinstance(conduit, UploadConduit)) self.assertEqual(call_args[5].repo_id, 'repo-u') # Clean up mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None manager_factory.principal_manager().set_principal(principal=None)
def test_import_uploaded_unit(self, mock_repo_qs): self.importer_manager.set_importer('repo-u', 'mock-importer', {}) key = {'key': 'value'} metadata = {'k1': 'v1'} mock_repo = mock_repo_qs.get_repo_or_missing_resource.return_value importer_return_report = object() mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report upload_id = self.upload_manager.initialize_upload() file_path = self.upload_manager._upload_file_path(upload_id) fake_user = User('import-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) response = self.upload_manager.import_uploaded_unit('repo-u', 'mock-type', key, metadata, upload_id) # import_uploaded_unit() should have returned our importer_return_report self.assertTrue(response is importer_return_report) call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0] self.assertTrue(call_args[0] is mock_repo.to_transfer_repo()) self.assertEqual(call_args[1], 'mock-type') self.assertEqual(call_args[2], key) self.assertEqual(call_args[3], metadata) self.assertEqual(call_args[4], file_path) conduit = call_args[5] self.assertTrue(isinstance(conduit, UploadConduit)) self.assertEqual(call_args[5].repo_id, 'repo-u') # Clean up mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None manager_factory.principal_manager().set_principal(principal=None)
def __init__(self, call, args=None, kwargs=None, principal=None, tags=None, resources=None, dependencies=None, weight=1, asynchronous=False, archive=False, kwarg_blacklist=()): assert callable(call) assert isinstance(args, (NoneType, tuple, list)) assert isinstance(kwargs, (NoneType, dict)) assert isinstance(principal, (NoneType, User, dict)) assert isinstance(tags, (NoneType, list)) assert isinstance(resources, (NoneType, dict)) assert isinstance(dependencies, (NoneType, dict)) assert isinstance(weight, int) assert weight > -1 assert isinstance(asynchronous, bool) assert isinstance(archive, bool) assert isinstance(kwarg_blacklist, (list, tuple)) self.id = str(uuid.uuid4()) self.group_id = None self.schedule_id = None self.call = call self.args = args or [] self.kwargs = kwargs or {} self.principal = principal or managers_factory.principal_manager().get_principal() self.tags = tags or [] self.resources = resources or {} self.dependencies = dependencies or {} self.weight = weight self.asynchronous = asynchronous self.archive = archive self.kwarg_blacklist = kwarg_blacklist self.execution_hooks = [[] for i in range(len(dispatch_constants.CALL_LIFE_CYCLE_CALLBACKS))] self.control_hooks = [None for i in range(len(dispatch_constants.CALL_CONTROL_HOOKS))]
def grant_automatic_permissions_for_resource(self, resource): """ Grant CRUDE permissions for a newly created resource to current principal. :param resource: resource path to grant permissions to :type resource: str :raises PulpExecutionException: if the system principal has not been set """ principal_manager = factory.principal_manager() user = principal_manager.get_principal() if principal_manager.is_system_principal(): raise PulpExecutionException( _('Cannot grant automatic permissions for [%(user)s] on resource [%(resource)s]') % {'user': user, 'resource': resource}) self.grant(resource, user['login'], authorization.OPERATION_NAMES)
def POST(self, repo_id): params = self.params() criteria = params.get('criteria', None) if criteria is not None: try: criteria = UnitAssociationCriteria.from_client_input(criteria) except: _logger.error('Error parsing unassociation criteria [%s]' % criteria) raise exceptions.PulpDataException(), None, sys.exc_info()[2] task_tags = [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.action_tag('unassociate')] async_result = unassociate_by_criteria.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login']], tags=task_tags) raise exceptions.OperationPostponed(async_result)
def grant_automatic_permissions_for_resource(self, resource): """ Grant CRUDE permissions for a newly created resource to current principal. @type resource: str @param resource: resource path to grant permissions to @rtype: bool @return: True on success, False otherwise @raise PulpExecutionException: if the system principal has not been set """ principal_manager = factory.principal_manager() user = principal_manager.get_principal() if principal_manager.is_system_principal(): raise PulpExecutionException(_('Cannot grant automatic permissions for [%s] on resource [%s]') % (user, resource)) operations = [self.CREATE, self.READ, self.UPDATE, self.DELETE, self.EXECUTE] self.grant(resource, user['login'], operations)
def _run(self): """ Run the call in the call request. Generally the target of a new thread. """ # used for calling _run directly during testing principal_manager = managers_factory.principal_manager() principal_manager.set_principal(self.call_request.principal) # usually set in the wrapper, unless called directly if self.call_report.state in dispatch_constants.CALL_READY_STATES: self.call_report.state = dispatch_constants.CALL_RUNNING_STATE self.call_report.start_time = datetime.datetime.now(dateutils.utc_tz()) dispatch_context.CONTEXT.set_task_attributes(self) call = self.call_request.call args = copy.copy(self.call_request.args) kwargs = copy.copy(self.call_request.kwargs) try: result = call(*args, **kwargs) except: # NOTE: this is making an assumption here that the call failed to # execute, if this isn't the case, or it got far enough, we may be # faced with _succeeded or _failed being called again e, tb = sys.exc_info()[1:] _LOG.exception(e) return self._failed(e, tb) else: return result finally: principal_manager.clear_principal() dispatch_context.CONTEXT.clear_task_attributes()
def grant_automatic_permissions_for_resource(self, resource): """ Grant CRUDE permissions for a newly created resource to current principal. @type resource: str @param resource: resource path to grant permissions to @rtype: bool @return: True on success, False otherwise @raise PulpExecutionException: if the system principal has not been set """ principal_manager = factory.principal_manager() user = principal_manager.get_principal() if principal_manager.is_system_principal(): raise PulpExecutionException( _('Cannot grant automatic permissions for [%s] on resource [%s]' ) % (user, resource)) operations = [ self.CREATE, self.READ, self.UPDATE, self.DELETE, self.EXECUTE ] self.grant(resource, user['login'], operations)
def import_uploaded_unit(repo_id, unit_type_id, unit_key, unit_metadata, upload_id): """ Called to trigger the importer's handling of an uploaded unit. This should not be called until the bits have finished uploading. The importer is then responsible for moving the file to the correct location, adding it to the Pulp server's inventory, and associating it with the repository. This call will first call is_valid_upload to check the integrity of the destination repository. See that method's documentation for exception possibilities. :param repo_id: identifies the repository into which the unit is uploaded :type repo_id: str :param unit_type_id: type of unit being uploaded :type unit_type_id: str :param unit_key: unique identifier for the unit (user-specified) :type unit_key: dict :param unit_metadata: any user-specified information about the unit :type unit_metadata: dict :param upload_id: upload being imported :type upload_id: str :return: A SyncReport indicating the success or failure of the upload :rtype: pulp.plugins.model.SyncReport """ # If it doesn't raise an exception, it's good to go ContentUploadManager.is_valid_upload(repo_id, unit_type_id) repo_query_manager = manager_factory.repo_query_manager() importer_manager = manager_factory.repo_importer_manager() repo = repo_query_manager.find_by_id(repo_id) repo_importer = importer_manager.get_importer(repo_id) try: importer_instance, plugin_config = plugin_api.get_importer_by_id( repo_importer['importer_type_id']) except plugin_exceptions.PluginNotFound: raise MissingResource(repo_id), None, sys.exc_info()[2] # Assemble the data needed for the import conduit = UploadConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login']) call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], None) transfer_repo = repo_common_utils.to_transfer_repo(repo) transfer_repo.working_dir = repo_common_utils.importer_working_dir( repo_importer['importer_type_id'], repo_id, mkdir=True) file_path = ContentUploadManager._upload_file_path(upload_id) # Invoke the importer try: return importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key, unit_metadata, file_path, conduit, call_config) except PulpException: msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]') msg = msg % {'r': repo_id} logger.exception(msg) raise except Exception, e: msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]') msg = msg % {'r': repo_id} logger.exception(msg) raise PulpExecutionException(e), None, sys.exc_info()[2]
def _verify_auth(self, operation, super_user_only, method, *args, **kwargs): """ Internal method for checking authentication and authorization. This code is kept outside of the decorator which calls it so that it can be mocked. This allows for the decorator itself which calls here to have assertions made about the operation and super_user values set in the view code. An operation of None means not to check authorization; only check authentication. The super_user_only flag set to True means that only members of the built in SuperUsers role are authorized. :type operation: int or None :param operation: The operation a user needs permission for, or None to skip authorization. :type super_user_only: bool :param super_user_only: Only authorize a user if they are a super user. """ # Check Authentication # Run through each registered and enabled auth function is_consumer = False registered_auth_functions = [check_preauthenticated, password_authentication, user_cert_authentication, consumer_cert_authentication, oauth_authentication] user_authenticated = False for authenticate_user in registered_auth_functions: if authenticate_user == oauth_authentication: userid, is_consumer = authenticate_user() else: userid = authenticate_user() if userid is not None: user_authenticated = True if authenticate_user == consumer_cert_authentication: is_consumer = True break if not user_authenticated: raise PulpCodedAuthenticationException(error_code=error_codes.PLP0025) # Check Authorization principal_manager = factory.principal_manager() user_query_manager = factory.user_query_manager() if super_user_only and not user_query_manager.is_superuser(userid): raise PulpCodedAuthenticationException(error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) # if the operation is None, don't check authorization elif operation is not None: if is_consumer: if is_consumer_authorized(http.resource_path(), userid, operation): # set default principal = SYSTEM principal_manager.set_principal() else: raise PulpCodedAuthenticationException(error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) elif user_query_manager.is_authorized(http.resource_path(), userid, operation): user = user_query_manager.find_by_login(userid) principal_manager.set_principal(user) else: raise PulpCodedAuthenticationException(error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) # Authentication and authorization succeeded. Call method and then clear principal. value = method(self, *args, **kwargs) principal_manager.clear_principal() return value
def __init__(self): self.user_name = factory.principal_manager().get_principal()['login']
def associate_from_repo(source_repo_id, dest_repo_id, criteria=None, import_config_override=None): """ Creates associations in a repository based on the contents of a source repository. Units from the source repository can be filtered by specifying a criteria object. The destination repository must have an importer that can support the types of units being associated. This is done by analyzing the unit list and the importer metadata and takes place before the destination repository is called. Pulp does not actually perform the associations as part of this call. The unit list is determined and passed to the destination repository's importer. It is the job of the importer to make the associate calls back into Pulp where applicable. If criteria is None, the effect of this call is to copy the source repository's associations into the destination repository. :param source_repo_id: identifies the source repository :type source_repo_id: str :param dest_repo_id: identifies the destination repository :type dest_repo_id: str :param criteria: optional; if specified, will filter the units retrieved from the source repository :type criteria: UnitAssociationCriteria :param import_config_override: optional config containing values to use for this import only :type import_config_override: dict :return: list of unit IDs (see pulp.plugins.model.Unit.to_id_dict) for units that were associated by this operation :rtype: list :raise MissingResource: if either of the specified repositories don't exist """ # Validation repo_query_manager = manager_factory.repo_query_manager() importer_manager = manager_factory.repo_importer_manager() source_repo = repo_query_manager.get_repository(source_repo_id) dest_repo = repo_query_manager.get_repository(dest_repo_id) # This will raise MissingResource if there isn't one, which is the # behavior we want this method to exhibit, so just let it bubble up. dest_repo_importer = importer_manager.get_importer(dest_repo_id) source_repo_importer = importer_manager.get_importer(source_repo_id) # The docs are incorrect on the list_importer_types call; it actually # returns a dict with the types under key "types" for some reason. supported_type_ids = plugin_api.list_importer_types( dest_repo_importer['importer_type_id'])['types'] # If criteria is specified, retrieve the list of units now associate_us = None if criteria is not None: associate_us = load_associated_units(source_repo_id, criteria) # If units were supposed to be filtered but none matched, we're done if len(associate_us) is 0: # Return an empty list to indicate nothing was copied return [] # Now we can make sure the destination repository's importer is capable # of importing either the selected units or all of the units associated_unit_type_ids = calculate_associated_type_ids(source_repo_id, associate_us) unsupported_types = [t for t in associated_unit_type_ids if t not in supported_type_ids] if len(unsupported_types) > 0: raise exceptions.InvalidValue(['types']) # Convert all of the units into the plugin standard representation if # a filter was specified transfer_units = None if associate_us is not None: transfer_units = create_transfer_units(associate_us, associated_unit_type_ids) # Convert the two repos into the plugin API model transfer_dest_repo = common_utils.to_transfer_repo(dest_repo) transfer_dest_repo.working_dir = common_utils.importer_working_dir( dest_repo_importer['importer_type_id'], dest_repo['id'], mkdir=True) transfer_source_repo = common_utils.to_transfer_repo(source_repo) transfer_source_repo.working_dir = common_utils.importer_working_dir( source_repo_importer['importer_type_id'], source_repo['id'], mkdir=True) # Invoke the importer importer_instance, plugin_config = plugin_api.get_importer_by_id( dest_repo_importer['importer_type_id']) call_config = PluginCallConfiguration(plugin_config, dest_repo_importer['config'], import_config_override) login = manager_factory.principal_manager().get_principal()['login'] conduit = ImportUnitConduit( source_repo_id, dest_repo_id, source_repo_importer['id'], dest_repo_importer['id'], RepoContentUnit.OWNER_TYPE_USER, login) try: copied_units = importer_instance.import_units( transfer_source_repo, transfer_dest_repo, conduit, call_config, units=transfer_units) unit_ids = [u.to_id_dict() for u in copied_units] return {'units_successful': unit_ids} except Exception: msg = _('Exception from importer [%(i)s] while importing units into repository [%(r)s]') msg = msg % {'i': dest_repo_importer['importer_type_id'], 'r': dest_repo_id} logger.exception(msg) raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
def associate_from_repo(self, source_repo_id, dest_repo_id, criteria=None, import_config_override=None): """ Creates associations in a repository based on the contents of a source repository. Units from the source repository can be filtered by specifying a criteria object. The destination repository must have an importer that can support the types of units being associated. This is done by analyzing the unit list and the importer metadata and takes place before the destination repository is called. Pulp does not actually perform the associations as part of this call. The unit list is determined and passed to the destination repository's importer. It is the job of the importer to make the associate calls back into Pulp where applicable. If criteria is None, the effect of this call is to copy the source repository's associations into the destination repository. @param source_repo_id: identifies the source repository @type source_repo_id: str @param dest_repo_id: identifies the destination repository @type dest_repo_id: str @param criteria: optional; if specified, will filter the units retrieved from the source repository @type criteria: L{UnitAssociationCriteria} @param import_config_override: optional config containing values to use for this import only @type import_config_override: dict @return: list of unit IDs (see pulp.plugins.model.Unit.to_id_dict) for units that were associated by this operation @rtype: list @raise MissingResource: if either of the specified repositories don't exist """ # Validation repo_query_manager = manager_factory.repo_query_manager() importer_manager = manager_factory.repo_importer_manager() source_repo = repo_query_manager.get_repository(source_repo_id) dest_repo = repo_query_manager.get_repository(dest_repo_id) # This will raise MissingResource if there isn't one, which is the # behavior we want this method to exhibit, so just let it bubble up. dest_repo_importer = importer_manager.get_importer(dest_repo_id) source_repo_importer = importer_manager.get_importer(source_repo_id) # The docs are incorrect on the list_importer_types call; it actually # returns a dict with the types under key "types" for some reason. supported_type_ids = plugin_api.list_importer_types(dest_repo_importer['importer_type_id'])['types'] # If criteria is specified, retrieve the list of units now associate_us = None if criteria is not None: associate_us = load_associated_units(source_repo_id, criteria) # If units were supposed to be filtered but none matched, we're done if len(associate_us) is 0: # Return an empty list to indicate nothing was copied return [] # Now we can make sure the destination repository's importer is capable # of importing either the selected units or all of the units associated_unit_type_ids = calculate_associated_type_ids(source_repo_id, associate_us) unsupported_types = [t for t in associated_unit_type_ids if t not in supported_type_ids] if len(unsupported_types) > 0: raise exceptions.InvalidValue(['types']) # Convert all of the units into the plugin standard representation if # a filter was specified transfer_units = None if associate_us is not None: transfer_units = create_transfer_units(associate_us, associated_unit_type_ids) # Convert the two repos into the plugin API model transfer_dest_repo = common_utils.to_transfer_repo(dest_repo) transfer_dest_repo.working_dir = common_utils.importer_working_dir(dest_repo_importer['importer_type_id'], dest_repo['id'], mkdir=True) transfer_source_repo = common_utils.to_transfer_repo(source_repo) transfer_source_repo.working_dir = common_utils.importer_working_dir(source_repo_importer['importer_type_id'], source_repo['id'], mkdir=True) # Invoke the importer importer_instance, plugin_config = plugin_api.get_importer_by_id(dest_repo_importer['importer_type_id']) call_config = PluginCallConfiguration(plugin_config, dest_repo_importer['config'], import_config_override) login = manager_factory.principal_manager().get_principal()['login'] conduit = ImportUnitConduit(source_repo_id, dest_repo_id, source_repo_importer['id'], dest_repo_importer['id'], RepoContentUnit.OWNER_TYPE_USER, login) try: copied_units = importer_instance.import_units(transfer_source_repo, transfer_dest_repo, conduit, call_config, units=transfer_units) unit_ids = [u.to_id_dict() for u in copied_units] return unit_ids except Exception: _LOG.exception('Exception from importer [%s] while importing units into repository [%s]' % (dest_repo_importer['importer_type_id'], dest_repo_id)) raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
def __init__(self, iso_schedule, task, total_run_count=0, next_run=None, schedule=None, args=None, kwargs=None, principal=None, last_updated=None, consecutive_failures=0, enabled=True, failure_threshold=None, last_run_at=None, first_run=None, remaining_runs=None, id=None, tags=None, name=None, options=None, resource=None): """ :param iso_schedule: string representing the schedule in ISO8601 format :type iso_schedule: basestring :param task: the task that should be run on a schedule. This can be an instance of a celery task or the name of the task, as taken from a task's "name" attribute :type task: basestring or celery.Task :param total_run_count: total number of times this schedule has run :type total_run_count: int :param next_run: ignored, because it is always re-calculated at instantiation :param schedule: pickled instance of celery.schedules.schedule, representing the schedule that should be run. This is optional. :type schedule: basestring or None :param args: list of arguments that should be passed to the task's apply_async function as its "args" argument :type args: list :param kwargs: dict of keyword arguments that should be passed to the task's apply_async function as its "kwargs" argument :type kwargs: dict :param principal: pickled instance of pulp.server.db.model.auth.User representing the pulp user who the task should be run as. This is optional. :type principal: basestring or None :param last_updated: timestamp for the last time this schedule was updated in the database as seconds since the epoch :type last_updated: float :param consecutive_failures: number of times this task has failed consecutively. This gets reset to zero if the task succeeds. :type consecutive_failures: int :param enabled: boolean indicating whether this schedule should be actively run by the scheduler. If False, the schedule will be ignored. :type enabled: bool :param failure_threshold: number of consecutive failures after which this task should be automatically disabled. Because these tasks run asynchronously, they may finish in a different order than they were queued in. Thus, it is possible that n consecutive failures will be reported by jobs that were not queued consecutively. So do not depend on the queuing order when using this feature. If this value is 0, no automatic disabling will occur. :type failure_threshold: int :param last_run_at: ISO8601 string representing when this schedule last ran. :type last_run_at: basestring :param first_run: ISO8601 string or datetime instance (in UTC timezone) representing when this schedule should run or should have been run for the first time. If the schedule has a specified date and time to start, this will be that value. If not, the value from the first time the schedule was actually run will be used. :type first_run: basestring or datetime.datetime or NoneType :param remaining_runs: number of runs remaining until this schedule will be automatically disabled. :type remaining_runs: int or NoneType :param id: unique ID used by mongodb to identify this schedule :type id: basestring :param tags: ignored, but allowed to exist as historical data for now :param name: ignored, because the "id" value is used for this now. The value is here for backward compatibility. :param options: dictionary that should be passed to the apply_async function as its "options" argument. :type options: dict :param resource: optional string indicating a unique resource that should be used to find this schedule. For example, to find all schedules for a given repository, a resource string will be derived for that repo, and this collection will be searched for that resource string. :type resource: basestring """ if id is None: # this creates self._id and self.id super(ScheduledCall, self).__init__() self._new = True else: self.id = id self._id = ObjectId(id) self._new = False if hasattr(task, 'name'): task = task.name # generate this if it wasn't passed in if schedule is None: interval, start_time, occurrences = dateutils.parse_iso8601_interval( iso_schedule) schedule = pickle.dumps(CelerySchedule(interval)) # generate this if it wasn't passed in principal = principal or factory.principal_manager().get_principal() self.args = args or [] self.consecutive_failures = consecutive_failures self.enabled = enabled self.failure_threshold = failure_threshold self.iso_schedule = iso_schedule self.kwargs = kwargs or {} self.last_run_at = last_run_at self.last_updated = last_updated or time.time() self.name = id self.options = options or {} self.principal = principal self.resource = resource self.schedule = schedule self.task = task self.total_run_count = total_run_count if first_run is None: # get the date and time from the iso_schedule value, and if it does not have a date and # time, use the current date and time self.first_run = dateutils.format_iso8601_datetime( dateutils.parse_iso8601_interval(iso_schedule)[1] or datetime.utcnow().replace(tzinfo=isodate.UTC)) elif isinstance(first_run, datetime): self.first_run = dateutils.format_iso8601_datetime(first_run) else: self.first_run = first_run if remaining_runs is None: self.remaining_runs = dateutils.parse_iso8601_interval( iso_schedule)[2] else: self.remaining_runs = remaining_runs self.next_run = self.calculate_next_run()
if criteria_body: try: criteria = UnitAssociationCriteria.from_client_input(criteria_body) except pulp_exceptions.InvalidValue, e: invalid_criteria = pulp_exceptions.InvalidValue('criteria') invalid_criteria.add_child_exception(e) raise invalid_criteria else: criteria = None task_tags = [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.action_tag('unassociate')] async_result = unassociate_by_criteria.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login']], tags=task_tags) raise pulp_exceptions.OperationPostponed(async_result) class RepoImportUpload(View): """ View to import units into a repository. """ @auth_required(authorization.UPDATE) @json_body_required def post(self, request, repo_id): """ Import an uploaded unit into the given repository. :param request: WSGI request object
def POST(self): user = factory.principal_manager().get_principal() key, certificate = factory.cert_generation_manager().make_admin_user_cert(user) certificate = key + certificate return self.ok(certificate)
def __init__(self, iso_schedule, task, total_run_count=0, next_run=None, schedule=None, args=None, kwargs=None, principal=None, last_updated=None, consecutive_failures=0, enabled=True, failure_threshold=None, last_run_at=None, first_run=None, remaining_runs=None, id=None, tags=None, name=None, options=None, resource=None): """ :param iso_schedule: string representing the schedule in ISO8601 format :type iso_schedule: basestring :param task: the task that should be run on a schedule. This can be an instance of a celery task or the name of the task, as taken from a task's "name" attribute :type task: basestring or celery.Task :param total_run_count: total number of times this schedule has run :type total_run_count: int :param next_run: ignored, because it is always re-calculated at instantiation :param schedule: pickled instance of celery.schedules.schedule, representing the schedule that should be run. This is optional. :type schedule: basestring or None :param args: list of arguments that should be passed to the task's apply_async function as its "args" argument :type args: list :param kwargs: dict of keyword arguments that should be passed to the task's apply_async function as its "kwargs" argument :type kwargs: dict :param principal: pickled instance of pulp.server.db.model.auth.User representing the pulp user who the task should be run as. This is optional. :type principal: basestring or None :param last_updated: timestamp for the last time this schedule was updated in the database as seconds since the epoch :type last_updated: float :param consecutive_failures: number of times this task has failed consecutively. This gets reset to zero if the task succeeds. :type consecutive_failures: int :param enabled: boolean indicating whether this schedule should be actively run by the scheduler. If False, the schedule will be ignored. :type enabled: bool :param failure_threshold: number of consecutive failures after which this task should be automatically disabled. Because these tasks run asynchronously, they may finish in a different order than they were queued in. Thus, it is possible that n consecutive failures will be reported by jobs that were not queued consecutively. So do not depend on the queuing order when using this feature. If this value is 0, no automatic disabling will occur. :type failure_threshold: int :param last_run_at: ISO8601 string representing when this schedule last ran. :type last_run_at: basestring :param first_run: ISO8601 string or datetime instance (in UTC timezone) representing when this schedule should run or should have been run for the first time. If the schedule has a specified date and time to start, this will be that value. If not, the value from the first time the schedule was actually run will be used. :type first_run: basestring or datetime.datetime or NoneType :param remaining_runs: number of runs remaining until this schedule will be automatically disabled. :type remaining_runs: int or NoneType :param id: unique ID used by mongodb to identify this schedule :type id: basestring :param tags: ignored, but allowed to exist as historical data for now :param name: ignored, because the "id" value is used for this now. The value is here for backward compatibility. :param options: dictionary that should be passed to the apply_async function as its "options" argument. :type options: dict :param resource: optional string indicating a unique resource that should be used to find this schedule. For example, to find all schedules for a given repository, a resource string will be derived for that repo, and this collection will be searched for that resource string. :type resource: basestring """ if id is None: # this creates self._id and self.id super(ScheduledCall, self).__init__() self._new = True else: self.id = id self._id = ObjectId(id) self._new = False if hasattr(task, 'name'): task = task.name # generate this if it wasn't passed in if schedule is None: interval, start_time, occurrences = dateutils.parse_iso8601_interval(iso_schedule) schedule = pickle.dumps(CelerySchedule(interval)) # generate this if it wasn't passed in principal = principal or factory.principal_manager().get_principal() self.args = args or [] self.consecutive_failures = consecutive_failures self.enabled = enabled self.failure_threshold = failure_threshold self.iso_schedule = iso_schedule self.kwargs = kwargs or {} self.last_run_at = last_run_at self.last_updated = last_updated or time.time() self.name = id self.options = options or {} self.principal = principal self.resource = resource self.schedule = schedule self.task = task self.total_run_count = total_run_count if first_run is None: # get the date and time from the iso_schedule value, and if it does not have a date and # time, use the current date and time self.first_run = dateutils.format_iso8601_datetime( dateutils.parse_iso8601_interval(iso_schedule)[1] or datetime.utcnow().replace(tzinfo=isodate.UTC)) elif isinstance(first_run, datetime): self.first_run = dateutils.format_iso8601_datetime(first_run) else: self.first_run = first_run if remaining_runs is None: self.remaining_runs = dateutils.parse_iso8601_interval(iso_schedule)[2] else: self.remaining_runs = remaining_runs self.next_run = self.calculate_next_run()
def POST(self, repo_id): params = self.params() criteria = params.get('criteria', None) if criteria is not None: try: criteria = UnitAssociationCriteria.from_client_input(criteria) except: _LOG.exception('Error parsing unassociation criteria [%s]' % criteria) raise exceptions.PulpDataException(), None, sys.exc_info()[2] association_manager = manager_factory.repo_unit_association_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('unassociate')] call_request = CallRequest(association_manager.unassociate_by_criteria, [repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login']], tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return execution.execute_async(self, call_request)
def _verify_auth(self, operation, super_user_only, method, *args, **kwargs): """ Internal method for checking authentication and authorization. This code is kept outside of the decorator which calls it so that it can be mocked. This allows for the decorator itself which calls here to have assertions made about the operation and super_user values set in the view code. An operation of None means not to check authorization; only check authentication. The super_user_only flag set to True means that only members of the built in SuperUsers role are authorized. :type operation: int or None :param operation: The operation a user needs permission for, or None to skip authorization. :type super_user_only: bool :param super_user_only: Only authorize a user if they are a super user. """ # Check Authentication # Run through each registered and enabled auth function is_consumer = False registered_auth_functions = [ check_preauthenticated, password_authentication, user_cert_authentication, consumer_cert_authentication, oauth_authentication ] user_authenticated = False for authenticate_user in registered_auth_functions: if authenticate_user == oauth_authentication: userid, is_consumer = authenticate_user() else: userid = authenticate_user() if userid is not None: user_authenticated = True if authenticate_user == consumer_cert_authentication: is_consumer = True break if not user_authenticated: raise PulpCodedAuthenticationException(error_code=error_codes.PLP0025) # Check Authorization principal_manager = factory.principal_manager() user_query_manager = factory.user_query_manager() if super_user_only and not user_query_manager.is_superuser(userid): raise PulpCodedAuthenticationException( error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) # if the operation is None, don't check authorization elif operation is not None: if is_consumer: if is_consumer_authorized(http.resource_path(), userid, operation): # set default principal = SYSTEM principal_manager.set_principal() else: raise PulpCodedAuthenticationException( error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) elif user_query_manager.is_authorized(http.resource_path(), userid, operation): user = user_query_manager.find_by_login(userid) principal_manager.set_principal(user) else: raise PulpCodedAuthenticationException( error_code=error_codes.PLP0026, user=userid, operation=OPERATION_NAMES[operation]) # Authentication and authorization succeeded. Call method and then clear principal. value = method(self, *args, **kwargs) principal_manager.clear_principal() return value
def POST(self): user = factory.principal_manager().get_principal() key, certificate = factory.cert_generation_manager( ).make_admin_user_cert(user) key_cert = {"key": key, "certificate": certificate} return self.ok(key_cert)
def import_uploaded_unit(repo_id, unit_type_id, unit_key, unit_metadata, upload_id): """ Called to trigger the importer's handling of an uploaded unit. This should not be called until the bits have finished uploading. The importer is then responsible for moving the file to the correct location, adding it to the Pulp server's inventory, and associating it with the repository. This call will first call is_valid_upload to check the integrity of the destination repository. See that method's documentation for exception possibilities. :param repo_id: identifies the repository into which the unit is uploaded :type repo_id: str :param unit_type_id: type of unit being uploaded :type unit_type_id: str :param unit_key: unique identifier for the unit (user-specified) :type unit_key: dict :param unit_metadata: any user-specified information about the unit :type unit_metadata: dict :param upload_id: upload being imported :type upload_id: str :return: A SyncReport indicating the success or failure of the upload :rtype: pulp.plugins.model.SyncReport """ # If it doesn't raise an exception, it's good to go ContentUploadManager.is_valid_upload(repo_id, unit_type_id) repo_query_manager = manager_factory.repo_query_manager() importer_manager = manager_factory.repo_importer_manager() repo = repo_query_manager.find_by_id(repo_id) repo_importer = importer_manager.get_importer(repo_id) try: importer_instance, plugin_config = plugin_api.get_importer_by_id( repo_importer['importer_type_id']) except plugin_exceptions.PluginNotFound: raise MissingResource(repo_id), None, sys.exc_info()[2] # Assemble the data needed for the import conduit = UploadConduit( repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login']) call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], None) transfer_repo = repo_common_utils.to_transfer_repo(repo) transfer_repo.working_dir = repo_common_utils.importer_working_dir( repo_importer['importer_type_id'], repo_id, mkdir=True) file_path = ContentUploadManager._upload_file_path(upload_id) # Invoke the importer try: return importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key, unit_metadata, file_path, conduit, call_config) except PulpException: msg = _( 'Error from the importer while importing uploaded unit to repository [%(r)s]' ) msg = msg % {'r': repo_id} logger.exception(msg) raise except Exception, e: msg = _( 'Error from the importer while importing uploaded unit to repository [%(r)s]' ) msg = msg % {'r': repo_id} logger.exception(msg) raise PulpExecutionException(e), None, sys.exc_info()[2]
def POST(self): user = factory.principal_manager().get_principal() key, certificate = factory.cert_generation_manager().make_admin_user_cert(user) key_cert = {"key": key, "certificate": certificate} return self.ok(key_cert)