def get(self, request, repo_id): """ Retrieve sync history for a specified repository. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param repo_id: id of the repository :type repo_id: str :return: Response containing a list of dicts, one for each sync event :rtype : django.http.HttpResponse :raises pulp_exceptions.InvalidValue: if limit is not an integer """ sort = request.GET.get(constants.REPO_HISTORY_FILTER_SORT) start_date = request.GET.get(constants.REPO_HISTORY_FILTER_START_DATE) end_date = request.GET.get(constants.REPO_HISTORY_FILTER_END_DATE) limit = request.GET.get(constants.REPO_HISTORY_FILTER_LIMIT) if limit: try: limit = int(limit) except ValueError: raise pulp_exceptions.InvalidValue([constants.REPO_HISTORY_FILTER_LIMIT]) if not sort: sort = constants.SORT_DESCENDING sync_manager = manager_factory.repo_sync_manager() # Error checking is done on these options in the sync manager before the database is queried entries = sync_manager.sync_history(repo_id, limit=limit, sort=sort, start_date=start_date, end_date=end_date) return generate_json_response_with_pulp_encoder(entries)
def create_sync_schedule(self, repo_id, importer_id, sync_options, schedule_data): """ Create a new sync schedule for a given repository using the given importer. @param repo_id: @param importer_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) self._validate_keys(sync_options, _SYNC_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the sync call request sync_manager = managers_factory.repo_sync_manager() args = [repo_id] kwargs = {'sync_config_override': sync_options['override_config']} weight = pulp_config.config.getint('tasks', 'sync_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id)] call_request = CallRequest(sync_manager.sync, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, sync_manager.prep_sync) # schedule the sync scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) importer_manager = managers_factory.repo_importer_manager() importer_manager.add_sync_schedule(repo_id, schedule_id) return schedule_id
def test_syntactic_sugar_methods(self): """ Tests the syntactic sugar methods for retrieving specific managers. """ # Setup factory.initialize() # Test self.assertTrue(isinstance(factory.authentication_manager(), AuthenticationManager)) self.assertTrue(isinstance(factory.cert_generation_manager(), CertGenerationManager)) self.assertTrue(isinstance(factory.certificate_manager(), CertificateManager)) self.assertTrue(isinstance(factory.password_manager(), PasswordManager)) self.assertTrue(isinstance(factory.permission_manager(), PermissionManager)) self.assertTrue(isinstance(factory.permission_query_manager(), PermissionQueryManager)) self.assertTrue(isinstance(factory.role_manager(), RoleManager)) self.assertTrue(isinstance(factory.role_query_manager(), RoleQueryManager)) self.assertTrue(isinstance(factory.user_manager(), UserManager)) self.assertTrue(isinstance(factory.user_query_manager(), UserQueryManager)) self.assertTrue(isinstance(factory.repo_manager(), RepoManager)) self.assertTrue(isinstance(factory.repo_unit_association_manager(), RepoUnitAssociationManager)) self.assertTrue(isinstance(factory.repo_publish_manager(), RepoPublishManager)) self.assertTrue(isinstance(factory.repo_query_manager(), RepoQueryManager)) self.assertTrue(isinstance(factory.repo_sync_manager(), RepoSyncManager)) self.assertTrue(isinstance(factory.content_manager(), ContentManager)) self.assertTrue(isinstance(factory.content_query_manager(), ContentQueryManager)) self.assertTrue(isinstance(factory.content_upload_manager(), ContentUploadManager)) self.assertTrue(isinstance(factory.consumer_manager(), ConsumerManager)) self.assertTrue(isinstance(factory.topic_publish_manager(), TopicPublishManager))
def GET(self, repo_id): # Params filters = self.filters([constants.REPO_HISTORY_FILTER_LIMIT, constants.REPO_HISTORY_FILTER_SORT, constants.REPO_HISTORY_FILTER_START_DATE, constants.REPO_HISTORY_FILTER_END_DATE]) limit = filters.get(constants.REPO_HISTORY_FILTER_LIMIT, None) sort = filters.get(constants.REPO_HISTORY_FILTER_SORT, None) start_date = filters.get(constants.REPO_HISTORY_FILTER_START_DATE, None) end_date = filters.get(constants.REPO_HISTORY_FILTER_END_DATE, None) if limit is not None: try: limit = int(limit[0]) except ValueError: _LOG.error('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue([constants.REPO_HISTORY_FILTER_LIMIT]) # Error checking is done on these options in the sync manager before the database is queried if sort is None: sort = constants.SORT_DESCENDING else: sort = sort[0] if start_date: start_date = start_date[0] if end_date: end_date = end_date[0] sync_manager = manager_factory.repo_sync_manager() entries = sync_manager.sync_history(repo_id, limit=limit, sort=sort, start_date=start_date, end_date=end_date) return self.ok(entries)
def sync_with_auto_publish(repo_id, overrides=None): """ Sync a repository and upon successful completion, publish any distributors that are configured for auto publish. :param repo_id: id of the repository to create a sync call request list for :type repo_id: str :param overrides: dictionary of configuration overrides for this sync :type overrides: dict or None :return: A task result containing the details of the task executed and any spawned tasks :rtype: TaskResult """ sync_result = managers.repo_sync_manager().sync(repo_id, sync_config_override=overrides) result = TaskResult(sync_result) repo_publish_manager = managers.repo_publish_manager() auto_distributors = repo_publish_manager.auto_distributors(repo_id) spawned_tasks = [] for distributor in auto_distributors: distributor_id = distributor['id'] spawned_tasks.append(publish(repo_id, distributor_id)) result.spawned_tasks = spawned_tasks return result
def POST(self, repo_id): # TODO: Add timeout support # Params params = self.params() overrides = params.get("override_config", None) # Execute the sync asynchronously repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint("tasks", "sync_weight") sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("sync")] sync_call_request = CallRequest( repo_sync_manager.sync, [repo_id], {"sync_config_override": overrides}, weight=sync_weight, tags=sync_tags, archive=True, ) sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) sync_call_request.add_life_cycle_callback( dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_sync_manager.prep_sync ) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("auto_publish"), action_tag("publish"), ] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor["id"] publish_call_request = CallRequest( repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True ) publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.add_life_cycle_callback( dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish ) publish_call_request.depends_on(sync_call_request) call_requests.append(publish_call_request) # this raises an exception that is handled by the middleware, # so no return is needed execution.execute_multiple(call_requests)
def sync_with_auto_publish_itinerary(repo_id, overrides=None): """ Create a call request list for the synchronization of a repository and the publishing of any distributors that are configured for auto publish. @param repo_id: id of the repository to create a sync call request list for @type repo_id: str @param overrides: dictionary of configuration overrides for this sync @type overrides: dict or None @return: list of call request instances @rtype: list """ repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint('tasks', 'sync_weight') sync_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('sync') ] sync_call_request = CallRequest(repo_sync_manager.sync, [repo_id], {'sync_config_override': overrides}, weight=sync_weight, tags=sync_tags, archive=True) sync_call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('auto_publish'), action_tag('publish') ] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor['id'] publish_call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True) publish_call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.depends_on( sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE]) call_requests.append(publish_call_request) return call_requests
def sync_with_auto_publish(repo_id, overrides=None): """ Sync a repository and upon successful completion, publish any distributors that are configured for auto publish. :param repo_id: id of the repository to create a sync call request list for :type repo_id: str :param overrides: dictionary of configuration overrides for this sync :type overrides: dict or None :return: A task result containing the details of the task executed and any spawned tasks :rtype: TaskResult """ return managers.repo_sync_manager().queue_sync_with_auto_publish(repo_id, overrides)
def sync_with_auto_publish(repo_id, overrides=None): """ Sync a repository and upon successful completion, publish any distributors that are configured for auto publish. :param repo_id: id of the repository to create a sync call request list for :type repo_id: str :param overrides: dictionary of configuration overrides for this sync :type overrides: dict or None :return: A task result containing the details of the task executed and any spawned tasks :rtype: TaskResult """ return managers.repo_sync_manager().queue_sync_with_auto_publish( repo_id, overrides)
def sync_with_auto_publish_itinerary(repo_id, overrides=None): """ Create a call request list for the synchronization of a repository and the publishing of any distributors that are configured for auto publish. @param repo_id: id of the repository to create a sync call request list for @type repo_id: str @param overrides: dictionary of configuration overrides for this sync @type overrides: dict or None @return: list of call request instances @rtype: list """ repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint('tasks', 'sync_weight') sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('sync')] sync_call_request = CallRequest(repo_sync_manager.sync, [repo_id], {'sync_config_override': overrides}, weight=sync_weight, tags=sync_tags, archive=True) sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) sync_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_sync_manager.prep_sync) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('auto_publish'), action_tag('publish')] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor['id'] publish_call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True) publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish) publish_call_request.depends_on(sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE]) call_requests.append(publish_call_request) return call_requests
def GET(self, repo_id): # Params filters = self.filters(['limit']) limit = filters.get('limit', None) if limit is not None: try: limit = int(limit[0]) except ValueError: _LOG.exception('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue(['limit']) sync_manager = manager_factory.repo_sync_manager() entries = sync_manager.sync_history(repo_id, limit=limit) return self.ok(entries)
def GET(self, repo_id): # Params filters = self.filters(['limit']) limit = filters.get('limit', None) if limit is not None: try: limit = int(limit[0]) except ValueError: _LOG.error('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue(['limit']) sync_manager = manager_factory.repo_sync_manager() entries = sync_manager.sync_history(repo_id, limit=limit) return self.ok(entries)
def test_syntactic_sugar_methods(self): """ Tests the syntactic sugar methods for retrieving specific managers. """ # Test self.assertTrue(isinstance(factory.repo_manager(), RepoManager)) self.assertTrue(isinstance(factory.repo_unit_association_manager(), RepoUnitAssociationManager)) self.assertTrue(isinstance(factory.repo_publish_manager(), RepoPublishManager)) self.assertTrue(isinstance(factory.repo_query_manager(), RepoQueryManager)) self.assertTrue(isinstance(factory.repo_sync_manager(), RepoSyncManager)) self.assertTrue(isinstance(factory.content_manager(), ContentManager)) self.assertTrue(isinstance(factory.content_query_manager(), ContentQueryManager)) self.assertTrue(isinstance(factory.content_upload_manager(), ContentUploadManager)) self.assertTrue(isinstance(factory.consumer_manager(), ConsumerManager))
def create_sync_schedule(self, repo_id, importer_id, sync_options, schedule_data): """ Create a new sync schedule for a given repository using the given importer. @param repo_id: @param importer_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) self._validate_keys(sync_options, _SYNC_OPTION_KEYS) if "schedule" not in schedule_data: raise pulp_exceptions.MissingValue(["schedule"]) # build the sync call request sync_manager = managers_factory.repo_sync_manager() args = [repo_id] kwargs = {"sync_config_override": sync_options["override_config"]} resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: { importer_id: dispatch_constants.RESOURCE_READ_OPERATION }, } weight = pulp_config.config.getint("tasks", "sync_weight") tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), ] call_request = CallRequest(sync_manager.sync, args, kwargs, resources, None, weight, tags, archive=True) # schedule the sync scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) importer_manager = managers_factory.repo_importer_manager() importer_manager.add_sync_schedule(repo_id, schedule_id) return schedule_id
def GET(self, repo_id): # Params filters = self.filters([ constants.REPO_HISTORY_FILTER_LIMIT, constants.REPO_HISTORY_FILTER_SORT, constants.REPO_HISTORY_FILTER_START_DATE, constants.REPO_HISTORY_FILTER_END_DATE ]) limit = filters.get(constants.REPO_HISTORY_FILTER_LIMIT, None) sort = filters.get(constants.REPO_HISTORY_FILTER_SORT, None) start_date = filters.get(constants.REPO_HISTORY_FILTER_START_DATE, None) end_date = filters.get(constants.REPO_HISTORY_FILTER_END_DATE, None) if limit is not None: try: limit = int(limit[0]) except ValueError: _logger.error('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue( [constants.REPO_HISTORY_FILTER_LIMIT]) # Error checking is done on these options in the sync manager before the database is queried if sort is None: sort = constants.SORT_DESCENDING else: sort = sort[0] if start_date: start_date = start_date[0] if end_date: end_date = end_date[0] sync_manager = manager_factory.repo_sync_manager() entries = sync_manager.sync_history(repo_id, limit=limit, sort=sort, start_date=start_date, end_date=end_date) return self.ok(entries)