def POST(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer_id != importer['id']: raise exceptions.MissingResource(importer=importer_id) schedule_options = self.params() sync_options = {'override_config': schedule_options.pop('override_config', {})} schedule_manager = manager_factory.schedule_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: {importer_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('create_sync_schedule')] call_request = CallRequest(schedule_manager.create_sync_schedule, [repo_id, importer_id, sync_options, schedule_options], resources=resources, weight=weight, tags=tags, archive=True) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def create_publish_schedule(self, repo_id, distributor_id, publish_options, schedule_data): """ Create a new scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) schedule_utils.validate_keys(publish_options, _PUBLISH_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the publish call args = [repo_id, distributor_id] kwargs = {'overrides': publish_options['override_config']} call_request = CallRequest(publish_itinerary, args, kwargs, weight=0) # schedule the publish scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.add_publish_schedule(repo_id, distributor_id, schedule_id) return schedule_id
def update_sync_schedule(self, repo_id, importer_id, schedule_id, sync_options, schedule_data): """ Update an existing sync schedule. @param repo_id: @param importer_id: @param schedule_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) schedule_updates = copy.copy(schedule_data) # prepare the call request if there are changes to the sync itself scheduler = dispatch_factory.scheduler() if sync_options: report = scheduler.get(schedule_id) call_request = report['call_request'] if 'override_config' in sync_options: call_request.kwargs = { 'overrides': sync_options['override_config'] } schedule_updates['call_request'] = call_request # update the scheduled sync scheduler.update(schedule_id, **schedule_updates)
def test_update_schedule(self): publish_options = {'override_config': {}} schedule_data = {'schedule': 'R1/P1DT'} schedule_id = self.schedule_manager.create_publish_schedule(self.repo_id, self.distributor_id, publish_options, schedule_data) scheduler = dispatch_factory.scheduler() schedule_report = scheduler.get(schedule_id) self.assertTrue(schedule_id == schedule_report['_id']) self.assertTrue(publish_options['override_config'] == schedule_report['call_request'].kwargs['overrides']) self.assertTrue(schedule_data['schedule'] == schedule_report['schedule']) new_publish_options = {'override_config': {'option_1': 'new_option'}} new_schedule_data = {'schedule': 'R4/PT24H', 'failure_threshold': 4} self.schedule_manager.update_publish_schedule(self.repo_id, self.distributor_id, schedule_id, new_publish_options, new_schedule_data) schedule_report = scheduler.get(schedule_id) self.assertTrue(schedule_id == schedule_report['_id']) self.assertTrue(new_publish_options['override_config'] == schedule_report['call_request'].kwargs['overrides']) self.assertTrue(new_schedule_data['schedule'] == schedule_report['schedule']) self.assertTrue(new_schedule_data['failure_threshold'] == schedule_report['failure_threshold'])
def PUT(self, repo_id, distributor_id, schedule_id): distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules(repo_id, distributor_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id) publish_update = {} schedule_update = self.params() if 'override_config' in schedule_update: publish_update['override_config'] = schedule_update.pop('override_config') schedule_manager = manager_factory.schedule_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_SCHEDULE_TYPE: {schedule_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_publish_schedule')] call_request = CallRequest(schedule_manager.update_publish_schedule, [repo_id, distributor_id, schedule_id, publish_update, schedule_update], resources=resources, tags=tags, archive=True) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def create_sync_schedule(self, repo_id, importer_id, sync_options, schedule_data): """ Create a new sync schedule for a given repository using the given importer. @param repo_id: @param importer_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) self._validate_keys(sync_options, _SYNC_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the sync call request sync_manager = managers_factory.repo_sync_manager() args = [repo_id] kwargs = {'sync_config_override': sync_options['override_config']} weight = pulp_config.config.getint('tasks', 'sync_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id)] call_request = CallRequest(sync_manager.sync, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, sync_manager.prep_sync) # schedule the sync scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) importer_manager = managers_factory.repo_importer_manager() importer_manager.add_sync_schedule(repo_id, schedule_id) return schedule_id
def create_sync_schedule(self, repo_id, importer_id, sync_options, schedule_data): """ Create a new sync schedule for a given repository using the given importer. @param repo_id: @param importer_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) schedule_utils.validate_keys(sync_options, _SYNC_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the sync call request args = [repo_id] kwargs = {'overrides': sync_options['override_config']} call_request = CallRequest(sync_with_auto_publish_itinerary, args, kwargs, weight=0) # schedule the sync scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) importer_manager = managers_factory.repo_importer_manager() importer_manager.add_sync_schedule(repo_id, schedule_id) return schedule_id
def PUT(self, repo_id, importer_id, schedule_id): importer_manager = manager_factory.repo_importer_manager() schedule_list = importer_manager.list_sync_schedules(repo_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, importer=importer_id, publish_schedule=schedule_id) sync_updates = {} schedule_updates = self.params() if 'override_config' in schedule_updates: sync_updates['override_config'] = schedule_updates.pop('override_config') schedule_manager = manager_factory.schedule_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_sync_schedule')] call_request = CallRequest(schedule_manager.update_sync_schedule, [repo_id, importer_id, schedule_id, sync_updates, schedule_updates], tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) call_request.updates_resource(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def POST(self, consumer_id): consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) schedule_data = self.params() units = schedule_data.pop('units', None) uninstall_options = {'options': schedule_data.pop('options', {})} if not units: raise MissingValue(['units']) schedule_manager = managers.schedule_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id), action_tag('create_unit_uninstall_schedule')] call_request = CallRequest(schedule_manager.create_unit_uninstall_schedule, [consumer_id, units, uninstall_options, schedule_data], weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) scheduled_obj = serialization.dispatch.scheduled_unit_management_obj(scheduled_call) scheduled_obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(scheduled_obj['_href'], scheduled_obj)
def test_update_schedule(self): units = copy.copy(_TEST_UNITS) uninstall_options = {'options': {}} schedule_data = {'schedule': 'R1/P1DT'} schedule_id = self.schedule_manager.create_unit_uninstall_schedule(self.consumer_id, units, uninstall_options, schedule_data) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) self.assertFalse(scheduled_call is None) self.assertTrue(schedule_data['schedule'] == scheduled_call['schedule']) self.assertTrue(self.consumer_id in scheduled_call['call_request'].args) self.assertTrue(units == scheduled_call['call_request'].kwargs['units']) self.assertTrue(uninstall_options['options'] == scheduled_call['call_request'].kwargs['options']) units.append({'type_id': 'mock-type', 'unit_key': {'id': 'redis'}}) uninstall_options['options'] = {'option': 'value'} schedule_data['schedule'] = 'R3/P1DT' self.schedule_manager.update_unit_uninstall_schedule(self.consumer_id, schedule_id, units, uninstall_options, schedule_data) updated_call = scheduler.get(schedule_id) self.assertFalse(updated_call is None) self.assertTrue(schedule_data['schedule'] == updated_call['schedule'], '%s != %s' % (schedule_data['schedule'], updated_call['schedule'])) self.assertTrue(self.consumer_id in updated_call['call_request'].args) self.assertTrue(units == updated_call['call_request'].kwargs['units']) self.assertTrue(uninstall_options['options'] == updated_call['call_request'].kwargs['options'])
def PUT(self, consumer_id, schedule_id): consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) schedule_data = self.params() install_options = None units = schedule_data.pop('units', None) if 'options' in schedule_data: install_options = {'options': schedule_data.pop('options')} schedule_manager = managers.schedule_manager() tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_unit_uninstall_schedule')] call_request = CallRequest(schedule_manager.update_unit_uninstall_schedule, [consumer_id, schedule_id, units, install_options, schedule_data], tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) call_request.updates_resource(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) execution.execute(call_request) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) scheduled_obj = serialization.dispatch.scheduled_unit_management_obj(scheduled_call) scheduled_obj.update(serialization.link.current_link_obj()) return self.ok(scheduled_obj)
def update_unit_install_schedule(self, consumer_id, schedule_id, units=None, install_options=None, schedule_data=None): """ Update an existing schedule for installing content units on a consumer. @param consumer_id: unique id for the consumer @param schedule_id: unique id for the schedule @param units: optional list of units to install @param install_options: optional options to pass to the install manager @param schedule_data: optional schedule updates """ self._validate_consumer(consumer_id) schedule_updates = copy.copy(schedule_data) or {} scheduler = dispatch_factory.scheduler() report = scheduler.get(schedule_id) call_request = report['call_request'] if units is not None: call_request.kwargs['units'] = units schedule_updates['call_request'] = call_request if install_options is not None and 'options' in install_options: call_request.kwargs['options'] = install_options['options'] schedule_updates['call_request'] = call_request scheduler.update(schedule_id, **schedule_updates)
def update_sync_schedule(self, repo_id, importer_id, schedule_id, sync_options, schedule_data): """ Update an existing sync schedule. @param repo_id: @param importer_id: @param schedule_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) schedule_updates = copy.copy(schedule_data) # prepare the call request if there are changes to the sync itself scheduler = dispatch_factory.scheduler() if sync_options: report = scheduler.get(schedule_id) call_request = report['call_request'] if 'override_config' in sync_options: call_request.kwargs = {'sync_config_override': sync_options['override_config']} schedule_updates['call_request'] = call_request # update the scheduled sync scheduler.update(schedule_id, **schedule_updates)
def test_update_schedule(self): publish_options = {'override_config': {}} schedule_data = {'schedule': 'R1/P1DT'} schedule_id = self.schedule_manager.create_publish_schedule(self.repo_id, self.distributor_id, publish_options, schedule_data) scheduler = dispatch_factory.scheduler() schedule_report = scheduler.get(schedule_id) self.assertTrue(schedule_id == schedule_report['_id']) self.assertTrue(publish_options['override_config'] == schedule_report['call_request'].kwargs['publish_config_override']) self.assertTrue(schedule_data['schedule'] == schedule_report['schedule']) new_publish_options = {'override_config': {'option_1': 'new_option'}} new_schedule_data = {'schedule': 'R4/PT24H', 'failure_threshold': 4} self.schedule_manager.update_publish_schedule(self.repo_id, self.distributor_id, schedule_id, new_publish_options, new_schedule_data) schedule_report = scheduler.get(schedule_id) self.assertTrue(schedule_id == schedule_report['_id']) self.assertTrue(new_publish_options['override_config'] == schedule_report['call_request'].kwargs['publish_config_override']) self.assertTrue(new_schedule_data['schedule'] == schedule_report['schedule']) self.assertTrue(new_schedule_data['failure_threshold'] == schedule_report['failure_threshold'])
def POST(self, repo_id, distributor_id): distributor_manager = manager_factory.repo_distributor_manager() distributor_manager.get_distributor(repo_id, distributor_id) schedule_options = self.params() publish_options = {'override_config': schedule_options.pop('override_config', {})} schedule_manager = manager_factory.schedule_manager() resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}} weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('create_publish_schedule')] call_request = CallRequest(schedule_manager.create_publish_schedule, [repo_id, distributor_id, publish_options, schedule_options], resources=resources, weight=weight, tags=tags, archive=True) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def update_publish_schedule(self, repo_id, distributor_id, schedule_id, publish_options, schedule_data): """ Update an existing scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param schedule_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) schedule_updates = copy.copy(schedule_data) # prepare the call request if there are changes to the publish itself scheduler = dispatch_factory.scheduler() if publish_options: report = scheduler.get(schedule_id) call_request = report['call_request'] if 'override_config' in publish_options: call_request.kwargs = {'publish_config_override': publish_options['override_config']} schedule_updates['call_request'] = call_request # update the scheduled publish scheduler.update(schedule_id, **schedule_updates)
def create_unit_install_schedule(self, consumer_id, units, install_options, schedule_data ): """ Create a schedule for installing content units on a consumer. @param consumer_id: unique id for the consumer @param units: list of unit type and unit key dicts @param install_options: options to pass to the install manager @param schedule_data: scheduling data @return: schedule id """ self._validate_consumer(consumer_id) self._validate_keys(install_options, _UNIT_INSTALL_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) manager = managers_factory.consumer_agent_manager() args = [consumer_id] kwargs = {'units': units, 'options': install_options.get('options', {})} weight = pulp_config.config.getint('tasks', 'consumer_content_weight') tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id), action_tag('unit_install'), action_tag('scheduled_unit_install')] call_request = CallRequest(manager.install_content, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) return schedule_id
def update_publish_schedule(self, repo_id, distributor_id, schedule_id, publish_options, schedule_data): """ Update an existing scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param schedule_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) schedule_updates = copy.copy(schedule_data) # prepare the call request if there are changes to the publish itself scheduler = dispatch_factory.scheduler() if publish_options: report = scheduler.get(schedule_id) call_request = report['call_request'] if 'override_config' in publish_options: call_request.kwargs = { 'overrides': publish_options['override_config'] } schedule_updates['call_request'] = call_request # update the scheduled publish scheduler.update(schedule_id, **schedule_updates)
def create_publish_schedule(self, repo_id, distributor_id, publish_options, schedule_data): """ Create a new scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) self._validate_keys(publish_options, _PUBLISH_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the publish call publish_manager = managers_factory.repo_publish_manager() args = [repo_id, distributor_id] kwargs = {'publish_config_override': publish_options['override_config']} weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)] call_request = CallRequest(publish_manager.publish, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, publish_manager.prep_publish) # schedule the publish scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.add_publish_schedule(repo_id, distributor_id, schedule_id) return schedule_id
def GET(self, repo_id, importer_id, schedule_id): importer_manager = manager_factory.repo_importer_manager() schedule_list = importer_manager.list_sync_schedules(repo_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, importer=importer_id, publish_schedule=schedule_id) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def delete_unit_install_schedule(self, consumer_id, schedule_id): """ Delete an existing schedule for installing content units on a consumer. @param consumer_id: unique id of the consumer @param schedule_id: unique id of the schedule """ self._validate_consumer(consumer_id) scheduler = dispatch_factory.scheduler() scheduler.remove(schedule_id)
def _delete_all_schedules(self, management_action_name, consumer_id): self._validate_consumer(consumer_id) scheduler = dispatch_factory.scheduler() consumer_tag = resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) management_tag = action_tag(management_action_name) reports = scheduler.find(consumer_tag, management_tag) for r in reports: scheduler.remove(r['call_report']['schedule_id'])
def test_delete_schedule(self): schedule_data = {'schedule': 'R1/P1DT'} schedule_id = self.schedule_manager.create_unit_uninstall_schedule(self.consumer_id, _TEST_UNITS, {}, schedule_data) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) self.assertFalse(scheduled_call is None) self.schedule_manager.delete_unit_uninstall_schedule(self.consumer_id, schedule_id) self.assertRaises(pulp_exceptions.MissingResource, scheduler.get, schedule_id)
def scheduler_complete_callback(call_request, call_report): """ Call back for call request results and rescheduling """ scheduler = dispatch_factory.scheduler() scheduled_call_collection = ScheduledCall.get_collection() schedule_id = call_report.schedule_id scheduled_call = scheduled_call_collection.find_one({'_id': ObjectId(schedule_id)}) scheduler.update_last_run(scheduled_call, call_report) scheduler.update_next_run(scheduled_call)
def delete_repo(self, repo_id): """ Deletes the given repository, optionally requesting the associated importer clean up any content in the repository. :param repo_id: identifies the repo being deleted :type repo_id: str :raise MissingResource: if the given repo does not exist :raise OperationFailed: if any part of the delete process fails; the exception will contain information on which sections failed """ # Validation found = Repo.get_collection().find_one({'id': repo_id}) if found is None: raise MissingResource(repo_id) # With so much going on during a delete, it's possible that a few things # could go wrong while others are successful. We track lesser errors # that shouldn't abort the entire process until the end and then raise # an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user # will have to look at the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments # Remove and scheduled activities scheduler = dispatch_factory.scheduler() importer_manager = manager_factory.repo_importer_manager() importers = importer_manager.get_importers(repo_id) if importers: for schedule_id in importer_manager.list_sync_schedules(repo_id): scheduler.remove(schedule_id) distributor_manager = manager_factory.repo_distributor_manager() for distributor in distributor_manager.get_distributors(repo_id): for schedule_id in distributor_manager.list_publish_schedules( repo_id, distributor['id']): scheduler.remove(schedule_id) # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _LOG.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append((_('Importer Delete Error'), e.args))
def GET(self, consumer_id, schedule_id): consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) if consumer_id not in scheduled_call['call_request'].args: raise MissingResource(consumer=consumer_id, unit_uninstall_schedule=schedule_id) scheduled_obj = serialization.dispatch.scheduled_unit_management_obj(scheduled_call) scheduled_obj.update(serialization.link.current_link_obj()) return self.ok(scheduled_obj)
def scheduler_complete_callback(call_request, call_report): """ Call back for call request results and rescheduling """ scheduled_call_collection = ScheduledCall.get_collection() schedule_id = call_report.schedule_id scheduled_call = scheduled_call_collection.find_one({'_id': ObjectId(schedule_id)}) if scheduled_call is None: # schedule was deleted while call was running return scheduler = dispatch_factory.scheduler() scheduler.update_last_run(scheduled_call, call_report)
def test_create_schedule(self): uninstall_options = {'options': {}} schedule_data = {'schedule': 'R1/P1DT'} schedule_id = self.schedule_manager.create_unit_uninstall_schedule(self.consumer_id, _TEST_UNITS, uninstall_options, schedule_data) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) self.assertFalse(scheduled_call is None) self.assertTrue(schedule_data['schedule'] == scheduled_call['schedule']) self.assertTrue(self.consumer_id in scheduled_call['call_request'].args) self.assertTrue(_TEST_UNITS == scheduled_call['call_request'].kwargs['units']) self.assertTrue(uninstall_options['options'] == scheduled_call['call_request'].kwargs['options'])
def PUT(self, repo_id, distributor_id, schedule_id): distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules( repo_id, distributor_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id) publish_update = {} schedule_update = self.params() if 'override_config' in schedule_update: publish_update['override_config'] = schedule_update.pop( 'override_config') schedule_manager = manager_factory.schedule_manager() resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: { repo_id: dispatch_constants.RESOURCE_READ_OPERATION }, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: { distributor_id: dispatch_constants.RESOURCE_READ_OPERATION }, dispatch_constants.RESOURCE_SCHEDULE_TYPE: { schedule_id: dispatch_constants.RESOURCE_UPDATE_OPERATION } } tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_publish_schedule') ] call_request = CallRequest(schedule_manager.update_publish_schedule, [ repo_id, distributor_id, schedule_id, publish_update, schedule_update ], resources=resources, tags=tags, archive=True) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def delete_repo(self, repo_id): """ Deletes the given repository, optionally requesting the associated importer clean up any content in the repository. :param repo_id: identifies the repo being deleted :type repo_id: str :raise MissingResource: if the given repo does not exist :raise OperationFailed: if any part of the delete process fails; the exception will contain information on which sections failed """ # Validation found = Repo.get_collection().find_one({'id' : repo_id}) if found is None: raise MissingResource(repo_id) # With so much going on during a delete, it's possible that a few things # could go wrong while others are successful. We track lesser errors # that shouldn't abort the entire process until the end and then raise # an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user # will have to look at the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments # Remove any scheduled activities scheduler = dispatch_factory.scheduler() importer_manager = manager_factory.repo_importer_manager() importers = importer_manager.get_importers(repo_id) if importers: for schedule_id in importer_manager.list_sync_schedules(repo_id): scheduler.remove(schedule_id) distributor_manager = manager_factory.repo_distributor_manager() for distributor in distributor_manager.get_distributors(repo_id): for schedule_id in distributor_manager.list_publish_schedules(repo_id, distributor['id']): scheduler.remove(schedule_id) # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id' : repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _LOG.exception('Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append( (_('Importer Delete Error'), e.args) )
def delete_all_unit_install_schedules(self, consumer_id): """ Delete all unit install schedules for a consumer. Useful for unassociating consumers from the server. @param consumer_id: unique id of the consumer """ self._validate_consumer(consumer_id) scheduler = dispatch_factory.scheduler() consumer_tag = resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) install_tag = action_tag('unit_install') reports = scheduler.find(consumer_tag, install_tag) for r in reports: scheduler.remove(r['call_report']['schedule_id'])
def GET(self, consumer_id): consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) consumer_tag = resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) uninstall_tag = action_tag('scheduled_unit_uninstall') scheduler = dispatch_factory.scheduler() scheduled_calls = scheduler.find(consumer_tag, uninstall_tag) schedule_objs = [] for call in scheduled_calls: obj = serialization.dispatch.scheduled_unit_management_obj(call) obj.update(serialization.link.child_link_obj(obj['_id'])) schedule_objs.append(obj) return self.ok(schedule_objs)
def GET(self, repo_id, distributor_id): scheduler = dispatch_factory.scheduler() distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules(repo_id, distributor_id) schedule_objs = [] for schedule_id in schedule_list: try: scheduled_call = scheduler.get(schedule_id) except exceptions.MissingResource: msg = _('Repository %(r)s; Distributor %(d)s: scheduled publish does not exist: %(s)s') _LOG.warn(msg % {'r': repo_id, 'd': distributor_id, 's': schedule_id}) else: obj = serialization.dispatch.scheduled_publish_obj(scheduled_call) obj.update(serialization.link.child_link_obj(schedule_id)) schedule_objs.append(obj) return self.ok(schedule_objs)
def _update_schedule(self, consumer_id, schedule_id, units=None, options=None, schedule_data=None): self._validate_consumer(consumer_id) schedule_updates = copy.copy(schedule_data) or {} scheduler = dispatch_factory.scheduler() report = scheduler.get(schedule_id) call_request = report['call_request'] if units is not None: call_request.kwargs['units'] = units schedule_updates['call_request'] = call_request if options is not None and 'options' in options: call_request.kwargs['options'] = options['options'] schedule_updates['call_request'] = call_request scheduler.update(schedule_id, **schedule_updates)
def scheduler_complete_callback(call_request, call_report): """ Call back for task (call_request) results and rescheduling """ scheduler = dispatch_factory.scheduler() tag_prefix = resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, '') index = 0 for i, tag in enumerate(call_request.tags): if not tag.startswith(tag_prefix): continue index = i break schedule_id = call_request.tags[index][len(tag_prefix):] scheduled_call_collection = ScheduledCall.get_collection() scheduled_call = scheduled_call_collection.find_one({'_id': ObjectId(schedule_id)}) scheduler.update_last_run(scheduled_call, call_report) scheduler.update_next_run(scheduled_call)
def delete_sync_schedule(self, repo_id, importer_id, schedule_id): """ Delete a scheduled sync from a given repository and importer. @param repo_id: @param importer_id: @param schedule_id: @return: """ # validate the input self._validate_importer(repo_id, importer_id) # remove from the scheduler scheduler = dispatch_factory.scheduler() scheduler.remove(schedule_id) # remove from the importer importer_manager = managers_factory.repo_importer_manager() importer_manager.remove_sync_schedule(repo_id, schedule_id)
def delete_publish_schedule(self, repo_id, distributor_id, schedule_id): """ Delete an existing scheduled publish from the given repository and distributor. @param repo_id: @param distributor_id: @param schedule_id: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) # remove from the scheduler scheduler = dispatch_factory.scheduler() scheduler.remove(schedule_id) # remove from the distributor dispatch_manager = managers_factory.repo_distributor_manager() dispatch_manager.remove_publish_schedule(repo_id, distributor_id, schedule_id)
def POST(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer_id != importer['id']: raise exceptions.MissingResource(importer=importer_id) schedule_options = self.params() sync_options = { 'override_config': schedule_options.pop('override_config', {}) } schedule_manager = manager_factory.schedule_manager() resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: { repo_id: dispatch_constants.RESOURCE_READ_OPERATION }, dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: { importer_id: dispatch_constants.RESOURCE_UPDATE_OPERATION } } weight = pulp_config.config.getint('tasks', 'create_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('create_sync_schedule') ] call_request = CallRequest( schedule_manager.create_sync_schedule, [repo_id, importer_id, sync_options, schedule_options], resources=resources, weight=weight, tags=tags, archive=True) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def PUT(self, repo_id, importer_id, schedule_id): importer_manager = manager_factory.repo_importer_manager() schedule_list = importer_manager.list_sync_schedules(repo_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, importer=importer_id, publish_schedule=schedule_id) sync_updates = {} schedule_updates = self.params() if 'override_config' in schedule_updates: sync_updates['override_config'] = schedule_updates.pop( 'override_config') schedule_manager = manager_factory.schedule_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_sync_schedule') ] call_request = CallRequest(schedule_manager.update_sync_schedule, [ repo_id, importer_id, schedule_id, sync_updates, schedule_updates ], tags=tags, archive=True) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) call_request.updates_resource( dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def POST(self, repo_id, distributor_id): distributor_manager = manager_factory.repo_distributor_manager() distributor_manager.get_distributor(repo_id, distributor_id) schedule_options = self.params() publish_options = { 'override_config': schedule_options.pop('override_config', {}) } schedule_manager = manager_factory.schedule_manager() resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: { repo_id: dispatch_constants.RESOURCE_READ_OPERATION }, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: { distributor_id: dispatch_constants.RESOURCE_UPDATE_OPERATION } } weight = pulp_config.config.getint('tasks', 'create_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('create_publish_schedule') ] call_request = CallRequest( schedule_manager.create_publish_schedule, [repo_id, distributor_id, publish_options, schedule_options], resources=resources, weight=weight, tags=tags, archive=True) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def _create_schedule(self, itinerary_method, action_name, consumer_id, units, options, schedule_data): self._validate_consumer(consumer_id) schedule_utils.validate_keys(options, _UNIT_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) args = [consumer_id] kwargs = {'units': units, 'options': options.get('options', {})} tags = [ resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id), action_tag(action_name) ] call_request = CallRequest(itinerary_method, args, kwargs, weight=0, tags=tags) scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) return schedule_id
def GET(self, repo_id, distributor_id): scheduler = dispatch_factory.scheduler() distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules( repo_id, distributor_id) schedule_objs = [] for schedule_id in schedule_list: try: scheduled_call = scheduler.get(schedule_id) except exceptions.MissingResource: msg = _( 'Repository %(r)s; Distributor %(d)s: scheduled publish does not exist: %(s)s' ) _LOG.warn(msg % { 'r': repo_id, 'd': distributor_id, 's': schedule_id }) else: obj = serialization.dispatch.scheduled_publish_obj( scheduled_call) obj.update(serialization.link.child_link_obj(schedule_id)) schedule_objs.append(obj) return self.ok(schedule_objs)
def GET(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer_id != importer['id']: raise exceptions.MissingResource(importer=importer_id) scheduler = dispatch_factory.scheduler() schedule_objs = [] for schedule_id in importer_manager.list_sync_schedules(repo_id): try: schedule = scheduler.get(schedule_id) except exceptions.MissingResource: msg = _( 'Repository %(r)s; Importer %(i)s: scheduled sync does not exist: %(s)s' ) _LOG.warn(msg % { 'r': repo_id, 'i': importer_id, 's': schedule_id }) else: obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) schedule_objs.append(obj) return self.ok(schedule_objs)
def _delete_schedule(self, consumer_id, schedule_id): self._validate_consumer(consumer_id) scheduler = dispatch_factory.scheduler() scheduler.remove(schedule_id)