def update_job_status(job_id, role, party_id, job_info, create=False): job_info['f_run_ip'] = RuntimeConfig.JOB_SERVER_HOST if create: dsl = json_loads(job_info['f_dsl']) runtime_conf = json_loads(job_info['f_runtime_conf']) train_runtime_conf = json_loads(job_info['f_train_runtime_conf']) if USE_AUTHENTICATION: authentication_check(src_role=job_info.get('src_role', None), src_party_id=job_info.get('src_party_id', None), dsl=dsl, runtime_conf=runtime_conf, role=role, party_id=party_id) save_job_conf(job_id=job_id, job_dsl=dsl, job_runtime_conf=runtime_conf, train_runtime_conf=train_runtime_conf, pipeline_dsl=None) job_parameters = runtime_conf['job_parameters'] job_tracker = Tracking(job_id=job_id, role=role, party_id=party_id, model_id=job_parameters["model_id"], model_version=job_parameters["model_version"]) if job_parameters.get("job_type", "") != "predict": job_tracker.init_pipelined_model() roles = json_loads(job_info['f_roles']) partner = {} show_role = {} is_initiator = job_info.get('f_is_initiator', 0) for _role, _role_party in roles.items(): if is_initiator or _role == role: show_role[_role] = show_role.get(_role, []) for _party_id in _role_party: if is_initiator or _party_id == party_id: show_role[_role].append(_party_id) if _role != role: partner[_role] = partner.get(_role, []) partner[_role].extend(_role_party) else: for _party_id in _role_party: if _party_id != party_id: partner[_role] = partner.get(_role, []) partner[_role].append(_party_id) dag = get_job_dsl_parser(dsl=dsl, runtime_conf=runtime_conf, train_runtime_conf=train_runtime_conf) job_args = dag.get_args_input() dataset = {} for _role, _role_party_args in job_args.items(): if is_initiator or _role == role: for _party_index in range(len(_role_party_args)): _party_id = roles[_role][_party_index] if is_initiator or _party_id == party_id: dataset[_role] = dataset.get(_role, {}) dataset[_role][_party_id] = dataset[_role].get(_party_id, {}) for _data_type, _data_location in _role_party_args[_party_index]['args']['data'].items(): dataset[_role][_party_id][_data_type] = '{}.{}'.format(_data_location['namespace'], _data_location['name']) job_tracker.log_job_view({'partner': partner, 'dataset': dataset, 'roles': show_role}) else: job_tracker = Tracking(job_id=job_id, role=role, party_id=party_id) job_tracker.save_job_info(role=role, party_id=party_id, job_info=job_info, create=create)
def job_quantity_constraint(job_id, role, party_id, job_info): lock = Lock() with lock: time.sleep(1) if RuntimeConfig.WORK_MODE == WorkMode.CLUSTER: if role == LIMIT_ROLE: running_jobs = job_utils.query_job(status='running', role=role) ready_jobs = job_utils.query_job(tag='ready', role=role) if len(running_jobs)+len(ready_jobs) >= MAX_CONCURRENT_JOB_RUN_HOST: return False else: tracker = Tracking(job_id=job_id, role=role, party_id=party_id) tracker.save_job_info(role=role, party_id=party_id, job_info={'f_tag': 'ready'}) return True