def PUT(self, request_id): """ Update Request properties with a given id. HTTP Success: 200 OK HTTP Error: 400 Bad request 404 Not Found 500 Internal Error """ json_data = data() args = ['scope', 'name', 'data_type', 'status', 'granularity_type', 'granularity_level', 'priority', 'request_meta'] try: parameters = {} json_data = json.loads(json_data) for key, value in json_data.items(): if key in args: parameters[key] = value except ValueError: raise self.generate_http_response(HTTP_STATUS_CODE.BadRequest, exc_cls=exceptions.BadRequest.__name__, exc_msg='Cannot decode json parameter dictionary') try: update_request(request_id, parameters) except exceptions.NoObject as error: raise self.generate_http_response(HTTP_STATUS_CODE.NotFound, exc_cls=error.__class__.__name__, exc_msg=error) except exceptions.ESSException as error: raise self.generate_http_response(HTTP_STATUS_CODE.InternalError, exc_cls=error.__class__.__name__, exc_msg=error) except Exception as error: print(error) print(format_exc()) raise self.generate_http_response(HTTP_STATUS_CODE.InternalError, exc_cls=exceptions.CoreException.__name__, exc_msg=error) raise self.generate_http_response(HTTP_STATUS_CODE.OK, data={'status': 0, 'message': 'update successfully'})
def test_create_and_check_for_request_core(self): """ Request (CORE): Test the creation, query, and deletion of a Request """ properties = { 'scope': 'test_scope', 'name': 'test_name_%s' % str(uuid()), 'data_type': 'DATASET', 'granularity_type': 'FILE', 'granularity_level': 1, 'priority': 99, 'edge_id': None, 'status': 'NEW', 'request_meta': { 'taskid': 975, 'job_id': 864 }, 'processing_meta': None, 'errors': None, } request_id = add_request(**properties) # request = get_request(properties['scope'], properties['name'], request_metadata=properties['request_meta']) # assert_equal(request_id, request.request_id) request = get_request(request_id=request_id) assert_equal(request_id, request.request_id) json.dumps(request.to_dict()) assert_equal(request.scope, properties['scope']) assert_equal(request.name, properties['name']) assert_equal(str(request.data_type), properties['data_type']) assert_equal(str(request.granularity_type), properties['granularity_type']) assert_equal(request.granularity_level, properties['granularity_level']) assert_equal(request.priority, properties['priority']) assert_equal(request.edge_id, properties['edge_id']) assert_equal(str(request.status), properties['status']) assert_equal(str(request.request_meta['taskid']), str(properties['request_meta']['taskid'])) assert_equal(str(request.request_meta['job_id']), str(properties['request_meta']['job_id'])) # assert_equal(str(request.processing_meta), str(properties['processing_meta'])) assert_equal(request.errors, properties['errors']) request_id1 = add_request(**properties) delete_request(request_id1) update_request(request_id, parameters={'status': 'ERROR'}) request = get_request(request_id=request_id) assert_equal(str(request.status), 'ERROR') with assert_raises(exceptions.NoObject): get_request(request_id=999999) delete_request(request_id) with assert_raises(exceptions.NoObject): get_request(request_id=request_id)
def get_tasks(self): """ Get tasks to process """ requests = get_requests(status=RequestStatus.ASSIGNED, edge_name=self.resource_name) self.logger.info("Main thread get %s tasks" % len(requests)) for req in requests: self.tasks.put(req) update_request(request_id=req.request_id, parameters={'status': RequestStatus.PRECACHING})
def get_tasks(self): """ Get tasks to process """ requests = get_requests(status=RequestStatus.NEW) self.logger.info("Main thread get %s tasks" % len(requests)) for req in requests: req.errors = None self.tasks.put(req) update_request(request_id=req.request_id, parameters={'status': RequestStatus.BROKERING}) if requests: self.get_resources()
def finish_tasks(self): """ Finish processing the finished tasks, for example, update db status. """ while not self.finished_tasks.empty(): req = self.finished_tasks.get() self.logger.info("Main thread finishing task: %s" % req) try: parameters = {'status': req.status, 'edge_id': req.edge_id} parameters['errors'] = req.errors if req.processing_meta: parameters['processing_meta'] = req.processing_meta self.logger.info("Updating request %s: %s" % (req.request_id, parameters)) update_request(request_id=req.request_id, parameters=parameters) except Exception as error: self.logger.critical("Failed to update request %s: %s, %s" % (req, error, traceback.format_exc()))
def prepare_split_request_task(self): """ Prepare split request """ requests = get_requests(status=RequestStatus.PRECACHED, edge_name=self.resource_name) if requests: self.logger.info("Main thread get %s split requests" % len(requests)) for req in requests: self.logger.info("Prepare to_split files for request %s" % req.request_id) update_request(request_id=req.request_id, parameters={'status': RequestStatus.TOSPLITTING}) self.prepare_to_split_files(req) update_request(request_id=req.request_id, parameters={'status': RequestStatus.SPLITTING})
def assign_local_requests(self): reqs = get_requests(edge_name=self.resource_name, status=RequestStatus.ASSIGNING) for req in reqs: req.status = RequestStatus.ASSIGNED update_request(req.request_id, {'status': req.status})
def finish_local_requests(self): reqs = get_requests(edge_name=self.resource_name, status=RequestStatus.SPLITTING) for req in reqs: if req.granularity_type == GranularityType.FILE: statistics = get_contents_statistics( edge_name=self.resource_name, edge_id=req.edge_id, coll_id=req.processing_meta['coll_id'], content_type=ContentType.FILE) items = {} for item in statistics: items[item.status] = items.counter if len(items.keys()) == 1 and items.keys( )[0] == ContentStatus.AVAILABLE and items.values()[0] > 0: self.logger.info( 'All files are available for request(%s): %s' % (req.request_id, items)) # To sync content info to the head service self.sync_contents(collection_scope=req.scope, collection_name=req.name, edge_name=self.resource_name, edge_id=req.edge_id, coll_id=req.processing_meta['coll_id']) req.status = RequestStatus.AVAILABLE self.logger.info("Updating request %s to status %s" % (req.request_id, req.status)) update_request(req.request_id, {'status': req.status}) if self.send_messaging: msg = { 'event_type': 'REQUEST_DONE', 'payload': { 'scope': req.scope, 'name': req.name, 'metadata': req.request_metadata }, 'created_at': date_to_str(datetime.datetime.utcnow()) } self.messaging_queue.put(msg) else: self.logger.info( 'Not all files are available for request(%s): %s' % (req.request_id, items)) if req.granularity_type == GranularityType.PARTIAL: statistics = get_contents_statistics( edge_name=self.resource_name, edge_id=req.edge_id, coll_id=req.processing_meta['coll_id'], content_type=ContentType.PARTIAL) items = {} for item in statistics: items[item.status] = item.counter if len(items.keys()) == 1 and items.keys( )[0] == ContentStatus.AVAILABLE and items.values()[0] > 0: self.logger.info( 'All partial files are available for request(%s): %s' % (req.request_id, items)) # To sync content info to the head service self.sync_contents(collection_scope=req.scope, collection_name=req.name, edge_name=self.resource_name, edge_id=req.edge_id, coll_id=req.processing_meta['coll_id']) req.status = RequestStatus.AVAILABLE self.logger.info("Updating request %s to status %s" % (req.request_id, req.status)) update_request(req.request_id, {'status': req.status}) if self.send_messaging: msg = { 'event_type': 'REQUEST_DONE', 'payload': { 'scope': req.scope, 'name': req.name, 'metadata': req.request_meta }, 'created_at': date_to_str(datetime.datetime.utcnow()) } self.messaging_queue.put(msg) else: self.logger.info( 'Not all partial files are available for request(%s): %s' % (req.request_id, items))