def test_create_and_check_for_request_rest(self): """ Request (REST): Test the creation, query, and deletion of a Request """ host = get_rest_host() properties = get_request_properties() client = Client(host=host) request_id = client.add_request(**properties) requests = client.get_requests(request_id=request_id) assert_equal(len(requests), 1) assert_equal(request_id, requests[0]['request_id']) for key in properties: if key in ['lifetime']: continue assert_equal(requests[0][key], properties[key]) client.update_request(request_id, parameters={'status': RequestStatus.Failed}) requests = client.get_requests(request_id=request_id) assert_equal(len(requests), 1) assert_equal(requests[0]['status'], RequestStatus.Failed) reqs = client.get_requests(request_id=999999) assert_equal(len(reqs), 0)
def test_workflow_request(self): workflow = self.init() props = { 'scope': 'workflow', 'name': workflow.get_name(), 'requester': 'panda', 'request_type': RequestType.Workflow, 'transform_tag': 'workflow', 'status': RequestStatus.New, 'priority': 0, 'lifetime': 30, 'request_metadata': {'workload_id': '20776840', 'workflow': workflow.serialize()} } # print(props) host = get_rest_host() client = Client(host=host) request_id = client.add_request(**props) print(request_id)
def submit(self, workflow): props = { 'scope': 'workflow', 'name': workflow.get_name(), 'requester': 'panda', 'request_type': RequestType.Workflow, 'transform_tag': 'workflow', 'status': RequestStatus.New, 'priority': 0, 'lifetime': 30, 'workload_id': workflow.get_workload_id(), 'request_metadata': { 'workload_id': workflow.get_workload_id(), 'workflow': workflow } } print(props) client = Client(host=self.host) request_id = client.add_request(**props) return request_id
def get_req_properties(): req_properties = { 'scope': 'data16_13TeV', 'name': 'data16_13TeV.00298862.physics_Main.daq.RAW', 'requester': 'panda', 'request_type': RequestType.StageIn, 'transform_tag': 'prodsys2', 'status': RequestStatus.New, 'priority': 0, 'lifetime': 30, 'request_metadata': { 'workload_id': '20776840', 'src_rse': 'NDGF-T1_DATATAPE', 'rule_id': '236e4bf87e11490291e3259b14724e30' } } return req_properties host = get_rest_host() props = get_req_properties() # props = get_example_real_tape_stagein_request() # props = get_example_prodsys2_tape_stagein_request() # props = get_example_active_learning_request() client = Client(host=host) request_id = client.add_request(**props) print(request_id)
def __init__(self, host=None): self.host = host if self.host is None: self.host = get_rest_host() self.client = Client(host=self.host)
class ClientManager: def __init__(self, host=None): self.host = host if self.host is None: self.host = get_rest_host() self.client = Client(host=self.host) @exception_handler def submit(self, workflow): """ Submit the workflow as a request to iDDS server. :param workflow: The workflow to be submitted. """ props = { 'scope': 'workflow', 'name': workflow.name, 'requester': 'panda', 'request_type': RequestType.Workflow, 'transform_tag': 'workflow', 'status': RequestStatus.New, 'priority': 0, 'lifetime': workflow.lifetime, 'workload_id': workflow.get_workload_id(), 'request_metadata': { 'version': release_version, 'workload_id': workflow.get_workload_id(), 'workflow': workflow } } workflow.add_proxy() primary_init_work = workflow.get_primary_initial_collection() if primary_init_work: if type(primary_init_work) in [Collection]: props['scope'] = primary_init_work.scope props['name'] = primary_init_work.name else: props['scope'] = primary_init_work['scope'] props['name'] = primary_init_work['name'] # print(props) request_id = self.client.add_request(**props) return request_id @exception_handler def abort(self, request_id=None, workload_id=None): """ Abort requests. :param workload_id: the workload id. :param request_id: the request. """ if request_id is None and workload_id is None: logging.error( "Both request_id and workload_id are None. One of them should not be None" ) return ( -1, "Both request_id and workload_id are None. One of them should not be None" ) reqs = self.client.get_requests(request_id=request_id, workload_id=workload_id) if reqs: rets = [] for req in reqs: logging.info("Aborting request: %s" % req['request_id']) # self.client.update_request(request_id=req['request_id'], parameters={'substatus': RequestStatus.ToCancel}) self.client.send_message(request_id=req['request_id'], msg={ 'command': 'update_request', 'parameters': { 'status': RequestStatus.ToCancel } }) logging.info("Abort request registered successfully: %s" % req['request_id']) ret = (0, "Abort request registered successfully: %s" % req['request_id']) rets.append(ret) return rets else: return (-1, 'No matching requests') @exception_handler def suspend(self, request_id=None, workload_id=None): """ Suspend requests. :param workload_id: the workload id. :param request_id: the request. """ if request_id is None and workload_id is None: logging.error( "Both request_id and workload_id are None. One of them should not be None" ) return ( -1, "Both request_id and workload_id are None. One of them should not be None" ) reqs = self.client.get_requests(request_id=request_id, workload_id=workload_id) if reqs: rets = [] for req in reqs: logging.info("Suspending request: %s" % req['request_id']) # self.client.update_request(request_id=req['request_id'], parameters={'substatus': RequestStatus.ToSuspend}) self.client.send_message(request_id=req['request_id'], msg={ 'command': 'update_request', 'parameters': { 'status': RequestStatus.ToSuspend } }) logging.info("Suspend request registered successfully: %s" % req['request_id']) ret = (0, "Suspend request registered successfully: %s" % req['request_id']) rets.append(ret) return rets else: return (-1, 'No matching requests') @exception_handler def resume(self, request_id=None, workload_id=None): """ Resume requests. :param workload_id: the workload id. :param request_id: the request. """ if request_id is None and workload_id is None: logging.error( "Both request_id and workload_id are None. One of them should not be None" ) return ( -1, "Both request_id and workload_id are None. One of them should not be None" ) reqs = self.client.get_requests(request_id=request_id, workload_id=workload_id) if reqs: rets = [] for req in reqs: logging.info("Resuming request: %s" % req['request_id']) # self.client.update_request(request_id=req['request_id'], parameters={'substatus': RequestStatus.ToResume}) self.client.send_message(request_id=req['request_id'], msg={ 'command': 'update_request', 'parameters': { 'status': RequestStatus.ToResume } }) logging.info("Resume request registered successfully: %s" % req['request_id']) ret = (0, "Resume request registered successfully: %s" % req['request_id']) rets.append(ret) return rets else: return (-1, 'No matching requests') @exception_handler def retry(self, request_id=None, workload_id=None): """ Retry requests. :param workload_id: the workload id. :param request_id: the request. """ if request_id is None and workload_id is None: logging.error( "Both request_id and workload_id are None. One of them should not be None" ) return ( -1, "Both request_id and workload_id are None. One of them should not be None" ) reqs = self.client.get_requests(request_id=request_id, workload_id=workload_id) if reqs: rets = [] for req in reqs: logging.info("Retrying request: %s" % req['request_id']) # self.client.update_request(request_id=req['request_id'], parameters={'substatus': RequestStatus.ToResume}) self.client.send_message(request_id=req['request_id'], msg={ 'command': 'update_request', 'parameters': { 'status': RequestStatus.ToResume } }) logging.info("Retry request registered successfully: %s" % req['request_id']) ret = (0, "Retry request registered successfully: %s" % req['request_id']) rets.append(ret) return rets else: return (-1, 'No matching requests') @exception_handler def finish(self, request_id=None, workload_id=None, set_all_finished=False): """ Retry requests. :param workload_id: the workload id. :param request_id: the request. """ if request_id is None and workload_id is None: logging.error( "Both request_id and workload_id are None. One of them should not be None" ) return ( -1, "Both request_id and workload_id are None. One of them should not be None" ) reqs = self.client.get_requests(request_id=request_id, workload_id=workload_id) if reqs: rets = [] for req in reqs: logging.info("Finishing request: %s" % req['request_id']) if set_all_finished: # self.client.update_request(request_id=req['request_id'], parameters={'substatus': RequestStatus.ToForceFinish}) self.client.send_message( request_id=req['request_id'], msg={ 'command': 'update_request', 'parameters': { 'status': RequestStatus.ToForceFinish } }) else: # self.client.update_request(request_id=req['request_id'], parameters={'substatus': RequestStatus.ToFinish}) self.client.send_message(request_id=req['request_id'], msg={ 'command': 'update_request', 'parameters': { 'status': RequestStatus.ToFinish } }) logging.info("ToFinish request registered successfully: %s" % req['request_id']) ret = (0, "ToFinish request registered successfully: %s" % req['request_id']) rets.append(ret) return rets else: return (-1, 'No matching requests') @exception_handler def get_requests(self, request_id=None, workload_id=None, with_detail=False, with_metadata=False): """ Get requests. :param workload_id: the workload id. :param request_id: the request. :param with_detail: Whether to show detail info. """ reqs = self.client.get_requests(request_id=request_id, workload_id=workload_id, with_detail=with_detail, with_metadata=with_metadata) return reqs @exception_handler def get_status(self, request_id=None, workload_id=None, with_detail=False, with_metadata=False): """ Get the status progress report of requests. :param workload_id: the workload id. :param request_id: the request. :param with_detail: Whether to show detail info. """ reqs = self.client.get_requests(request_id=request_id, workload_id=workload_id, with_detail=with_detail, with_metadata=with_metadata) if with_detail: table = [] for req in reqs: table.append([ req['request_id'], req['transform_id'], req['workload_id'], req['transform_workload_id'], "%s:%s" % (req['output_coll_scope'], req['output_coll_name']), "%s[%s/%s/%s]" % (req['transform_status'].name, req['output_total_files'], req['output_processed_files'], req['output_processing_files']), req['errors'] ]) ret = tabulate.tabulate(table, tablefmt='simple', headers=[ 'request_id', 'transform_id', 'request_workload_id', 'transform_workload_id', 'scope:name', 'status[Total/OK/Processing]', 'errors' ]) # print(ret) return str(ret) else: table = [] for req in reqs: table.append([ req['request_id'], req['workload_id'], "%s:%s" % (req['scope'], req['name']), req['status'].name, req['errors'] ]) ret = tabulate.tabulate(table, tablefmt='simple', headers=[ 'request_id', 'request_workload_id', 'scope:name', 'status', 'errors' ]) # print(ret) return str(ret) @exception_handler def download_logs(self, request_id=None, workload_id=None, dest_dir='./', filename=None): """ Download logs for a request. :param workload_id: the workload id. :param request_id: the request. :param dest_dir: The destination directory. :param filename: The destination filename to be saved. If it's None, default filename will be saved. """ filename = self.client.download_logs(request_id=request_id, workload_id=workload_id, dest_dir=dest_dir, filename=filename) if filename: logging.info("Logs are downloaded to %s" % filename) return (0, "Logs are downloaded to %s" % filename) else: logging.info( "Failed to download logs for workload_id(%s) and request_id(%s)" % (workload_id, request_id)) return ( -1, "Failed to download logs for workload_id(%s) and request_id(%s)" % (workload_id, request_id)) @exception_handler def upload_to_cacher(self, filename): """ Upload file to iDDS cacher: On the cacher, the filename will be the basename of the file. """ return self.client.upload(filename) @exception_handler def download_from_cacher(self, filename): """ Download file from iDDS cacher: On the cacher, the filename will be the basename of the file. """ return self.client.download(filename) @exception_handler def get_hyperparameters(self, workload_id, request_id, id=None, status=None, limit=None): """ Get hyperparameters from the Head service. :param workload_id: the workload id. :param request_id: the request id. :param status: the status of the hyperparameters. :param limit: limit number of hyperparameters :raise exceptions if it's not got successfully. """ return self.client.get_hyperparameters(workload_id=workload_id, request_id=request_id, id=id, status=status, limit=limit) @exception_handler def update_hyperparameter(self, workload_id, request_id, id, loss): """ Update hyperparameter to the Head service. :param workload_id: the workload id. :param request_id: the request. :param id: id of the hyper parameter. :param loss: the loss. :raise exceptions if it's not updated successfully. """ return self.client.update_hyperparameter(workload_id=workload_id, request_id=request_id, id=id, loss=loss) @exception_handler def get_messages(self, request_id=None, workload_id=None): """ Get messages. :param workload_id: the workload id. :param request_id: the request. """ if request_id is None and workload_id is None: logging.error( "Both request_id and workload_id are None. One of them should not be None" ) return ( -1, "Both request_id and workload_id are None. One of them should not be None" ) logging.info( "Retrieving messages for request_id: %s, workload_id: %s" % (request_id, workload_id)) msgs = self.client.get_messages(request_id=request_id, workload_id=workload_id) logging.info( "Retrieved %s messages for request_id: %s, workload_id: %s" % (len(msgs), request_id, workload_id)) return (0, msgs)
#!/usr/bin/env python # # Licensed under the Apache License, Version 2.0 (the "License"); # You may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0OA # # Authors: # - Wen Guan, <*****@*****.**>, 2020 """ Test cacher. """ import os from idds.client.client import Client from idds.common.utils import get_rest_host full_name = os.path.abspath(__file__) host = get_rest_host() client = Client(host=host) client.upload(full_name) client.download(os.path.join('/tmp', os.path.basename(full_name))) full_name = '/bin/hostname' client.upload(full_name) client.download(os.path.join('/tmp', os.path.basename(full_name)))
# Licensed under the Apache License, Version 2.0 (the "License"); # You may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0OA # # Authors: # - Wen Guan, <*****@*****.**>, 2020 """ Test cacher. """ import sys from idds.client.client import Client from idds.common.utils import get_rest_host if len(sys.argv) == 2: workload_id = sys.argv[1] request_id = None elif len(sys.argv) == 3: workload_id = sys.argv[1] request_id = sys.argv[2] host = get_rest_host() client = Client(host=host) filename = client.download_logs(workload_id=workload_id, request_id=request_id, dest_dir='/tmp') print(filename)
def test_catalog_rest(self): """ Catalog (Rest): Test catalog rest functions """ host = get_rest_host() client = Client(host=host) req_properties = get_request_properties() origin_request_id = add_request(**req_properties) trans_properties = get_transform_properties() trans_properties['request_id'] = origin_request_id origin_trans_id = add_transform(**trans_properties) coll_properties = get_collection_properties() coll_properties['transform_id'] = origin_trans_id coll_properties['request_id'] = origin_request_id coll_properties['relation_type'] = CollectionRelationType.Output origin_coll_id = add_collection(**coll_properties) req_trans_colls = client.get_collections(request_id=origin_request_id, workload_id=None) assert_equal(len(req_trans_colls.keys()), 1) req_id = list(req_trans_colls.keys())[0] assert_equal(origin_request_id, req_id) assert_equal(len(req_trans_colls[req_id].keys()), 1) trans_id = list(req_trans_colls[req_id].keys())[0] assert_equal(trans_id, origin_trans_id) colls = req_trans_colls[req_id][trans_id] assert_equal(len(colls), 1) assert_equal(colls[0]['coll_id'], origin_coll_id) req_trans_colls1 = client.get_collections(request_id=None, workload_id=req_properties['request_metadata']['workload_id']) assert_equal(is_same_req_trans_colls(req_trans_colls, req_trans_colls1), True) req_trans_colls1 = client.get_collections(scope=coll_properties['scope'], name=coll_properties['name'], request_id=None, workload_id=None) assert_equal(is_same_req_trans_colls(req_trans_colls, req_trans_colls1), True) content_output_properties = get_content_properties() content_output_properties['content_type'] = ContentType.File content_output_properties['min_id'] = 0 content_output_properties['max_id'] = 1000 content_output_properties['coll_id'] = origin_coll_id origin_content_output_id_0_1000 = add_content(returning_id=True, **content_output_properties) content_output_properties_0_100 = copy.deepcopy(content_output_properties) content_output_properties_0_100['min_id'] = 0 content_output_properties_0_100['max_id'] = 100 content_output_properties['content_type'] = ContentType.Event origin_content_output_id_0_100 = add_content(returning_id=True, **content_output_properties_0_100) content_output_properties_100_200 = copy.deepcopy(content_output_properties) content_output_properties_100_200['min_id'] = 100 content_output_properties_100_200['max_id'] = 200 content_output_properties['content_type'] = ContentType.Event origin_content_output_id_100_200 = add_content(returning_id=True, **content_output_properties_100_200) content_output_properties_name1 = copy.deepcopy(content_output_properties) content_output_properties_name1['name'] = content_output_properties_name1['name'] + '_1' content_output_properties_name1_id = add_content(returning_id=True, **content_output_properties_name1) req_trans_coll_contents = client.get_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'], request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id']) coll_contents = req_trans_coll_contents[origin_request_id][origin_trans_id] coll_scope_name = '%s:%s' % (coll_properties['scope'], coll_properties['name']) coll_scope_names = [scope_name for scope_name in coll_contents] assert_equal(coll_scope_names, [coll_scope_name]) contents = coll_contents[coll_scope_name]['contents'] assert_equal(len(contents), 4) output_content_ids = [output_content['content_id'] for output_content in contents] assert_equal(output_content_ids, [origin_content_output_id_0_1000, origin_content_output_id_0_100, origin_content_output_id_100_200, content_output_properties_name1_id]) req_trans_coll_contents1 = client.get_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'], request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id'], relation_type=CollectionRelationType.Output) assert_equal(is_same_req_trans_coll_contents(req_trans_coll_contents, req_trans_coll_contents1), True) contents = client.get_match_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'], scope=content_output_properties['scope'], name=content_output_properties['name'], min_id=None, max_id=None, request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id'], only_return_best_match=False) assert_equal(len(contents), 3) content_ids = [content['content_id'] for content in contents] content_ids.sort() content_ids1 = [origin_content_output_id_0_1000, origin_content_output_id_0_100, origin_content_output_id_100_200] content_ids1.sort() assert_equal(content_ids, content_ids1) contents = client.get_match_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'], scope=content_output_properties['scope'], name=content_output_properties['name'], min_id=0, max_id=50, request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id'], only_return_best_match=False) assert_equal(len(contents), 2) content_ids = [content['content_id'] for content in contents] content_ids.sort() content_ids1 = [origin_content_output_id_0_1000, origin_content_output_id_0_100] content_ids1.sort() assert_equal(content_ids, content_ids1) contents = client.get_match_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'], scope=content_output_properties['scope'], name=content_output_properties['name'], min_id=0, max_id=50, request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id'], only_return_best_match=True) assert_equal(len(contents), 1) content_ids = [content['content_id'] for content in contents] assert_equal(content_ids, [origin_content_output_id_0_100]) contents = [{'scope': content_output_properties['scope'], 'name': content_output_properties['name'], 'min_id': content_output_properties['min_id'], 'max_id': content_output_properties['max_id'], 'status': ContentStatus.Available, 'path': '/abc/test_path'}, {'scope': content_output_properties_name1['scope'], 'name': content_output_properties_name1['name'], 'min_id': content_output_properties_name1['min_id'], 'max_id': content_output_properties_name1['max_id'], 'status': ContentStatus.Failed}] client.register_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'], contents=contents, request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id']) content = get_content(content_id=origin_content_output_id_0_1000) assert_equal(content['status'], ContentStatus.Available) assert_equal(content['path'], '/abc/test_path') content = get_content(content_id=content_output_properties_name1_id) assert_equal(content['status'], ContentStatus.Failed)
# http://www.apache.org/licenses/LICENSE-2.0OA # # Authors: # - Wen Guan, <*****@*****.**>, 2020 """ Test hyper parameter optimization test client. """ from idds.client.client import Client from idds.common.constants import CollectionRelationType, ContentStatus from idds.common.utils import get_rest_host # host = "https://aipanda181.cern.ch:443/idds" host = get_rest_host() client = Client(host=host) # props['request_metadata']['result_parser'] = 'default' scope = 'data16_13TeV' name = 'data16_13TeV.00298862.physics_Main.daq.RAW.idds.stagein' request_id = 12 workload_id = 1601235010 relation_type = CollectionRelationType.Output # Input, Log status = ContentStatus.Available # New, Processing, Available, ... colls = client.get_collections(scope=scope, name=name, request_id=request_id, workload_id=workload_id, relation_type=relation_type) print(colls)
'requester': 'panda', 'request_type': RequestType.ActiveLearning, 'transform_tag': 'prodsys2', 'status': RequestStatus.New, 'priority': 0, 'lifetime': 30, 'request_metadata': { 'workload_id': '20525134', 'sandbox': 'https://', 'executable': 'hostname', 'arguments': '-s --input %IN', 'output_json': 'output.json' } } return req_properties host = get_rest_host() props = get_req_properties() test_codes = get_test_codes() client = Client(host=host) test_codes_url = client.upload(test_codes) props['request_metadata']['sandbox'] = test_codes_url props['request_metadata']['executable'] = 'test.sh' props['request_metadata']['arguments'] = '-1 -2 test' # props['request_metadata']['result_parser'] = 'default' request_id = client.add_request(**props) print(request_id)
import sys from idds.client.client import Client from idds.common.utils import get_rest_host if len(sys.argv) == 2: workload_id = sys.argv[1] request_id = None elif len(sys.argv) == 3: workload_id = sys.argv[1] request_id = sys.argv[2] # host = "https://aipanda181.cern.ch:443/idds" host = get_rest_host() client = Client(host=host) # props['request_metadata']['result_parser'] = 'default' params = client.get_hyperparameters(workload_id=workload_id, request_id=request_id) print(params) if not params: print("No parameters") else: for param in params: id = param['id'] if param['loss'] is None: print("updating %s" % id) ret = client.update_hyperparameter(workload_id=workload_id, request_id=request_id, id=id, loss=0.3) print(ret) break