コード例 #1
0
    def test_create_and_check_for_request_rest(self):
        """ Request (REST): Test the creation, query, and deletion of a Request """
        host = get_rest_host()

        properties = get_request_properties()

        client = Client(host=host)

        request_id = client.add_request(**properties)

        requests = client.get_requests(request_id=request_id)
        assert_equal(len(requests), 1)
        assert_equal(request_id, requests[0]['request_id'])

        for key in properties:
            if key in ['lifetime']:
                continue
            assert_equal(requests[0][key], properties[key])

        client.update_request(request_id,
                              parameters={'status': RequestStatus.Failed})
        requests = client.get_requests(request_id=request_id)
        assert_equal(len(requests), 1)
        assert_equal(requests[0]['status'], RequestStatus.Failed)

        reqs = client.get_requests(request_id=999999)
        assert_equal(len(reqs), 0)
コード例 #2
0
def convert_req2reqv2(req):
    # v1: {'created_at': datetime.datetime(2020, 11, 3, 10, 9, 32), 'substatus': None, 'priority': 0, 'transform_tag': '2', 'requester': 'panda', 'request_metadata': {'workload_id': 23083304, 'rule_id': 'bef3da17f17c49ac97863bb9e96af672'}, 'name': 'valid1.361027.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ7W.simul.HITS.e5112_s3227_tid12560193_00', 'request_id': 3775, 'accessed_at': datetime.datetime(2020, 11, 3, 10, 9, 32), 'updated_at': datetime.datetime(2020, 11, 3, 10, 9, 32), 'locking': <RequestLocking.Idle: 0>, 'status': <RequestStatus.Cancelled: 9>, 'workload_id': 23083304, 'request_type': <RequestType.StageIn: 2>, 'errors': None, 'processing_metadata': None, 'scope': 'valid1', 'expired_at': datetime.datetime(2020, 12, 3, 10, 9, 32), 'next_poll_at': datetime.datetime(2020, 11, 3, 10, 9, 32)}  # noqa E501

    if req['request_type'] == RequestType.StageIn:
        request_metadata = req['request_metadata']
        work = ATLASStageinWork(executable=None, arguments=None, parameters=None, setup=None,
                                exec_type='local', sandbox=None,
                                primary_input_collection={'scope': req['scope'], 'name': req['name']},
                                other_input_collections=None,
                                output_collections={'scope': req['scope'], 'name': req['name'] + '.idds.stagein'},
                                log_collections=None,
                                logger=None,
                                max_waiting_time=request_metadata.get('max_waiting_time', 3600 * 7 * 24),
                                src_rse=request_metadata.get('src_rse', None),
                                dest_rse=request_metadata.get('dest_rse', None),
                                rule_id=request_metadata.get('rule_id', None))
    if req['request_type'] == RequestType.Workflow:
        pass
        ori_workflow = req['request_metadata']['workflow']
        ori_work = ori_workflow.works[ori_workflow.primary_initial_work]
        input_coll = ori_work.collections[ori_work.primary_input_collection]
        work = ATLASStageinWork(executable=None, arguments=None, parameters=None, setup=None,
                                exec_type='local', sandbox=None,
                                primary_input_collection={'scope': input_coll['scope'], 'name': input_coll['name']},
                                other_input_collections=None,
                                output_collections={'scope': input_coll['scope'], 'name': input_coll['name'] + '.idds.stagein'},
                                log_collections=None,
                                logger=None,
                                max_waiting_time=ori_work.max_waiting_time,
                                src_rse=ori_work.src_rse,
                                dest_rse=ori_work.dest_rse,
                                rule_id=ori_work.rule_id)

    workload_id = req['workload_id']
    if not workload_id and 'workload_id' in request_metadata:
        workload_id = request_metadata['workload_id']

    wf = Workflow()
    wf.set_workload_id(workload_id)
    wf.add_work(work)

    host = get_rest_host()
    wm = ClientManager(host=host)
    request_id = wm.submit(wf)
    # print(request_id)
    return request_id
コード例 #3
0
ファイル: test_workflow.py プロジェクト: SergeyPod/iDDS
    def test_workflow_request(self):
        workflow = self.init()

        props = {
            'scope': 'workflow',
            'name': workflow.get_name(),
            'requester': 'panda',
            'request_type': RequestType.Workflow,
            'transform_tag': 'workflow',
            'status': RequestStatus.New,
            'priority': 0,
            'lifetime': 30,
            'request_metadata': {'workload_id': '20776840', 'workflow': workflow.serialize()}
        }

        # print(props)
        host = get_rest_host()
        client = Client(host=host)
        request_id = client.add_request(**props)
        print(request_id)
コード例 #4
0
ファイル: datacarousel_test.py プロジェクト: chnzhangrui/iDDS

def get_req_properties():
    req_properties = {
        'scope': 'data16_13TeV',
        'name': 'data16_13TeV.00298862.physics_Main.daq.RAW',
        'requester': 'panda',
        'request_type': RequestType.StageIn,
        'transform_tag': 'prodsys2',
        'status': RequestStatus.New,
        'priority': 0,
        'lifetime': 30,
        'request_metadata': {
            'workload_id': '20776840',
            'src_rse': 'NDGF-T1_DATATAPE',
            'rule_id': '236e4bf87e11490291e3259b14724e30'
        }
    }
    return req_properties


host = get_rest_host()
props = get_req_properties()
# props = get_example_real_tape_stagein_request()
# props = get_example_prodsys2_tape_stagein_request()
# props = get_example_active_learning_request()

client = Client(host=host)
request_id = client.add_request(**props)
print(request_id)
コード例 #5
0
 def __init__(self, host=None):
     self.host = host
     if self.host is None:
         self.host = get_rest_host()
     self.client = Client(host=self.host)
コード例 #6
0
def core_exec(sandbox_url, log_token, dump_workflow, ops_file, user_name, test_mode):
    tmpLog = LogWrapper(_logger, log_token)
    is_OK = True
    is_fatal = False
    request_id = None
    if dump_workflow == 'True':
        dump_workflow = True
    else:
        dump_workflow = False
    if test_mode == 'True':
        test_mode = True
    else:
        test_mode = False
    try:
        with open(ops_file) as f:
            ops = json.load(f)
        try:
            os.remove(ops_file)
        except Exception:
            pass
        # go to temp dir
        cur_dir = os.getcwd()
        with tempfile.TemporaryDirectory() as tmp_dirname:
            os.chdir(tmp_dirname)
            # download sandbox
            tmpLog.info('downloading sandbox from {}'.format(sandbox_url))
            with requests.get(sandbox_url, allow_redirects=True, verify=False, stream=True) as r:
                if r.status_code == 400:
                    tmpLog.error("not found")
                    is_fatal = True
                    is_OK = False
                elif r.status_code != 200:
                    tmpLog.error("bad HTTP response {}".format(r.status_code))
                    is_OK = False
                # extract sandbox
                if is_OK:
                    with open(ops['data']['sandbox'], 'wb') as fs:
                        for chunk in r.raw.stream(1024, decode_content=False):
                            if chunk:
                                fs.write(chunk)
                        fs.close()
                        tmp_stat, tmp_out = commands_get_status_output(
                            'tar xvfz {}'.format(ops['data']['sandbox']))
                        if tmp_stat != 0:
                            tmpLog.error(tmp_out)
                            dump_str = 'failed to extract {}'.format(ops['data']['sandbox'])
                            tmpLog.error(dump_str)
                            is_fatal = True
                            is_OK = False
                # parse workflow files
                if is_OK:
                    tmpLog.info('parse workflow')
                    if ops['data']['language'] == 'cwl':
                        nodes, root_in = pcwl_utils.parse_workflow_file(ops['data']['workflowSpecFile'],
                                                                        tmpLog)
                        with open(ops['data']['workflowInputFile']) as workflow_input:
                            data = yaml.safe_load(workflow_input)
                        s_id, t_nodes, nodes = pcwl_utils.resolve_nodes(nodes, root_in, data, 0, set(),
                                                                        ops['data']['outDS'], tmpLog)
                        workflow_utils.set_workflow_outputs(nodes)
                        id_node_map = workflow_utils.get_node_id_map(nodes)
                        [node.resolve_params(ops['data']['taskParams'], id_node_map) for node in nodes]
                        dump_str = "the description was internally converted as follows\n" \
                                   + workflow_utils.dump_nodes(nodes)
                        tmpLog.info(dump_str)
                        for node in nodes:
                            s_check, o_check = node.verify()
                            tmp_str = 'Verification failure in ID:{} {}'.format(node.id, o_check)
                            if not s_check:
                                tmpLog.error(tmp_str)
                                dump_str += tmp_str
                                dump_str += '\n'
                                is_fatal = True
                                is_OK = False
                    else:
                        dump_str = "{} is not supported to describe the workflow"
                        tmpLog.error(dump_str)
                        is_fatal = True
                        is_OK = False
                    # convert to workflow
                    if is_OK:
                        workflow_to_submit, dump_str_list = workflow_utils.convert_nodes_to_workflow(nodes)
                        try:
                            if workflow_to_submit:
                                if not test_mode:
                                    tmpLog.info('submit workflow')
                                    wm = ClientManager(host=get_rest_host())
                                    request_id = wm.submit(workflow_to_submit, username=user_name)
                            else:
                                dump_str = 'workflow is empty'
                                tmpLog.error(dump_str)
                                is_fatal = True
                                is_OK = False
                        except Exception as e:
                            dump_str = 'failed to submit the workflow with {}'.format(str(e))
                            tmpLog.error('{} {}'.format(dump_str, traceback.format_exc()))
                        if dump_workflow:
                            tmpLog.debug('\n' + ''.join(dump_str_list))
        os.chdir(cur_dir)
    except Exception as e:
        is_OK = False
        is_fatal = True
        tmpLog.error("failed to run with {} {}".format(str(e), traceback.format_exc()))

    with tempfile.NamedTemporaryFile(delete=False, mode='w') as tmp_json:
        json.dump([is_OK, is_fatal, request_id, tmpLog.dumpToString()], tmp_json)
        print(tmp_json.name)
    sys.exit(0)
コード例 #7
0
    def test_catalog_rest(self):
        """ Catalog (Rest): Test catalog rest functions """
        host = get_rest_host()
        client = Client(host=host)

        req_properties = get_request_properties()
        origin_request_id = add_request(**req_properties)

        trans_properties = get_transform_properties()
        trans_properties['request_id'] = origin_request_id
        origin_trans_id = add_transform(**trans_properties)

        coll_properties = get_collection_properties()
        coll_properties['transform_id'] = origin_trans_id
        coll_properties['request_id'] = origin_request_id
        coll_properties['relation_type'] = CollectionRelationType.Output
        origin_coll_id = add_collection(**coll_properties)

        req_trans_colls = client.get_collections(request_id=origin_request_id, workload_id=None)
        assert_equal(len(req_trans_colls.keys()), 1)
        req_id = list(req_trans_colls.keys())[0]
        assert_equal(origin_request_id, req_id)
        assert_equal(len(req_trans_colls[req_id].keys()), 1)
        trans_id = list(req_trans_colls[req_id].keys())[0]
        assert_equal(trans_id, origin_trans_id)
        colls = req_trans_colls[req_id][trans_id]
        assert_equal(len(colls), 1)
        assert_equal(colls[0]['coll_id'], origin_coll_id)

        req_trans_colls1 = client.get_collections(request_id=None, workload_id=req_properties['request_metadata']['workload_id'])
        assert_equal(is_same_req_trans_colls(req_trans_colls, req_trans_colls1), True)

        req_trans_colls1 = client.get_collections(scope=coll_properties['scope'], name=coll_properties['name'],
                                                  request_id=None, workload_id=None)
        assert_equal(is_same_req_trans_colls(req_trans_colls, req_trans_colls1), True)

        content_output_properties = get_content_properties()
        content_output_properties['content_type'] = ContentType.File
        content_output_properties['min_id'] = 0
        content_output_properties['max_id'] = 1000
        content_output_properties['coll_id'] = origin_coll_id
        origin_content_output_id_0_1000 = add_content(returning_id=True, **content_output_properties)
        content_output_properties_0_100 = copy.deepcopy(content_output_properties)
        content_output_properties_0_100['min_id'] = 0
        content_output_properties_0_100['max_id'] = 100
        content_output_properties['content_type'] = ContentType.Event
        origin_content_output_id_0_100 = add_content(returning_id=True, **content_output_properties_0_100)
        content_output_properties_100_200 = copy.deepcopy(content_output_properties)
        content_output_properties_100_200['min_id'] = 100
        content_output_properties_100_200['max_id'] = 200
        content_output_properties['content_type'] = ContentType.Event
        origin_content_output_id_100_200 = add_content(returning_id=True, **content_output_properties_100_200)
        content_output_properties_name1 = copy.deepcopy(content_output_properties)
        content_output_properties_name1['name'] = content_output_properties_name1['name'] + '_1'
        content_output_properties_name1_id = add_content(returning_id=True, **content_output_properties_name1)

        req_trans_coll_contents = client.get_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'],
                                                      request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id'])
        coll_contents = req_trans_coll_contents[origin_request_id][origin_trans_id]
        coll_scope_name = '%s:%s' % (coll_properties['scope'], coll_properties['name'])
        coll_scope_names = [scope_name for scope_name in coll_contents]
        assert_equal(coll_scope_names, [coll_scope_name])
        contents = coll_contents[coll_scope_name]['contents']
        assert_equal(len(contents), 4)
        output_content_ids = [output_content['content_id'] for output_content in contents]
        assert_equal(output_content_ids, [origin_content_output_id_0_1000, origin_content_output_id_0_100,
                                          origin_content_output_id_100_200, content_output_properties_name1_id])

        req_trans_coll_contents1 = client.get_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'],
                                                       request_id=origin_request_id, workload_id=req_properties['request_metadata']['workload_id'],
                                                       relation_type=CollectionRelationType.Output)
        assert_equal(is_same_req_trans_coll_contents(req_trans_coll_contents, req_trans_coll_contents1), True)

        contents = client.get_match_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'],
                                             scope=content_output_properties['scope'], name=content_output_properties['name'],
                                             min_id=None, max_id=None, request_id=origin_request_id,
                                             workload_id=req_properties['request_metadata']['workload_id'], only_return_best_match=False)
        assert_equal(len(contents), 3)
        content_ids = [content['content_id'] for content in contents]
        content_ids.sort()
        content_ids1 = [origin_content_output_id_0_1000, origin_content_output_id_0_100, origin_content_output_id_100_200]
        content_ids1.sort()
        assert_equal(content_ids, content_ids1)

        contents = client.get_match_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'],
                                             scope=content_output_properties['scope'], name=content_output_properties['name'],
                                             min_id=0, max_id=50, request_id=origin_request_id,
                                             workload_id=req_properties['request_metadata']['workload_id'], only_return_best_match=False)
        assert_equal(len(contents), 2)
        content_ids = [content['content_id'] for content in contents]
        content_ids.sort()
        content_ids1 = [origin_content_output_id_0_1000, origin_content_output_id_0_100]
        content_ids1.sort()
        assert_equal(content_ids, content_ids1)

        contents = client.get_match_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'],
                                             scope=content_output_properties['scope'], name=content_output_properties['name'],
                                             min_id=0, max_id=50, request_id=origin_request_id,
                                             workload_id=req_properties['request_metadata']['workload_id'], only_return_best_match=True)
        assert_equal(len(contents), 1)
        content_ids = [content['content_id'] for content in contents]
        assert_equal(content_ids, [origin_content_output_id_0_100])

        contents = [{'scope': content_output_properties['scope'], 'name': content_output_properties['name'],
                     'min_id': content_output_properties['min_id'], 'max_id': content_output_properties['max_id'],
                     'status': ContentStatus.Available, 'path': '/abc/test_path'},
                    {'scope': content_output_properties_name1['scope'], 'name': content_output_properties_name1['name'],
                     'min_id': content_output_properties_name1['min_id'], 'max_id': content_output_properties_name1['max_id'],
                     'status': ContentStatus.Failed}]
        client.register_contents(coll_scope=coll_properties['scope'], coll_name=coll_properties['name'],
                                 contents=contents, request_id=origin_request_id,
                                 workload_id=req_properties['request_metadata']['workload_id'])
        content = get_content(content_id=origin_content_output_id_0_1000)
        assert_equal(content['status'], ContentStatus.Available)
        assert_equal(content['path'], '/abc/test_path')
        content = get_content(content_id=content_output_properties_name1_id)
        assert_equal(content['status'], ContentStatus.Failed)
コード例 #8
0
 def __init__(self, host=None):
     self.host = host
     if self.host is None:
         self.host = get_rest_host()