def clean(job_id, role, party_id, roles, party_ids): JobController.clean_job(job_id=job_id, role=role, party_id=party_id, roles=roles, party_ids=party_ids) return get_json_result(retcode=0, retmsg='success')
def kill_job(job_id, role, party_id): JobController.kill_job(job_id=job_id, role=role, party_id=int(party_id), job_initiator=request.json.get('job_initiator', {}), timeout=request.json.get('timeout', False)) return get_json_result(retcode=0, retmsg='success')
def cancel_job(job_id, role, party_id): JobController.cancel_job(job_id=job_id, role=role, party_id=int(party_id), job_initiator=request.json.get( 'job_initiator', {})) return get_json_result(retcode=0, retmsg='success')
def job_status(job_id, role, party_id): JobController.update_job_status(job_id=job_id, role=role, party_id=int(party_id), job_info=request.json, create=False) return get_json_result(retcode=0, retmsg='success')
def save_pipeline(job_id, role, party_id, model_id, model_version): JobController.save_pipeline(job_id=job_id, role=role, party_id=party_id, model_id=base64_decode(model_id), model_version=base64_decode(model_version)) return get_json_result(retcode=0, retmsg='success')
def submit_job(): work_mode = request.json.get('job_runtime_conf', {}).get('job_parameters', {}).get('work_mode', None) detect_utils.check_config({'work_mode': work_mode}, required_arguments=[('work_mode', (WorkMode.CLUSTER, WorkMode.STANDALONE))]) if work_mode == RuntimeConfig.WORK_MODE: job_id, job_dsl_path, job_runtime_conf_path, model_info, board_url = JobController.submit_job( request.json) return get_json_result(job_id=job_id, data={ 'job_dsl_path': job_dsl_path, 'job_runtime_conf_path': job_runtime_conf_path, 'model_info': model_info, 'board_url': board_url }) else: if RuntimeConfig.WORK_MODE == WorkMode.CLUSTER and work_mode == WorkMode.STANDALONE: # use cluster standalone job server to execute standalone job return request_execute_server( request=request, execute_host='{}:{}'.format( request.remote_addr, CLUSTER_STANDALONE_JOB_SERVER_PORT)) else: raise Exception( 'server run on standalone can not support cluster mode job')
def cancel_job(job_id, role, party_id): res = JobController.cancel_job(job_id=job_id, role=role, party_id=int(party_id), job_initiator=request.json.get( 'job_initiator', {})) if res: return get_json_result(retcode=0, retmsg='cancel job success') return get_json_result(retcode=101, retmsg='cancel job failed')
def submit_job(): job_id, job_dsl_path, job_runtime_conf_path, model_info, board_url = JobController.submit_job( request.json) return get_json_result(job_id=job_id, data={ 'job_dsl_path': job_dsl_path, 'job_runtime_conf_path': job_runtime_conf_path, 'model_info': model_info, 'board_url': board_url })
def query_task_input_args(job_id, component_name, task_id, role, party_id): task_input_args = JobController.query_task_input_args( job_id, task_id, role, party_id, job_args=request.json.get('job_args', {}), job_parameters=request.json.get('job_parameters', {}), input_dsl=request.json.get('input', {}), filter_type=['data'], filter_attr={'data': ['partitions']}) return get_json_result(retcode=0, retmsg='success', data=task_input_args)
def job_check(job_id, role, party_id): status = JobController.check_job_run(job_id, role, party_id, job_info=request.json) if status: return get_json_result(retcode=0, retmsg='success') else: return get_json_result( retcode=101, retmsg= 'The job running on the host side exceeds the maximum running amount' )
def task_status(job_id, component_name, task_id, role, party_id): JobController.update_task_status(job_id, component_name, task_id, role, party_id, request.json) return get_json_result(retcode=0, retmsg='success')
def download_upload(access_module): request_config = request.json required_arguments = ['work_mode', 'namespace', 'table_name'] if access_module == 'upload': required_arguments.extend(['file', 'head', 'partition']) elif access_module == 'download': required_arguments.extend(['output_path']) else: raise Exception( 'can not support this operating: {}'.format(access_module)) detect_utils.check_config(request_config, required_arguments=required_arguments) data = {} if access_module == "upload": data['table_name'] = request_config["table_name"] data['namespace'] = request_config["namespace"] job_dsl, job_runtime_conf = gen_data_access_job_config( request_config, access_module) job_id, job_dsl_path, job_runtime_conf_path, logs_directory, model_info, board_url = JobController.submit_job( { 'job_dsl': job_dsl, 'job_runtime_conf': job_runtime_conf }) data.update({ 'job_dsl_path': job_dsl_path, 'job_runtime_conf_path': job_runtime_conf_path, 'board_url': board_url, 'logs_directory': logs_directory }) return get_json_result(job_id=job_id, data=data)
def download_upload(access_module): job_id = generate_job_id() if access_module == "upload" and USE_LOCAL_DATA and not ( request.json and request.json.get("use_local_data") == 0): file = request.files['file'] filename = os.path.join(get_job_directory(job_id), 'fate_upload_tmp', file.filename) os.makedirs(os.path.dirname(filename), exist_ok=True) try: file.save(filename) except Exception as e: shutil.rmtree(os.path.join(get_job_directory(job_id), 'tmp')) raise e request_config = request.args.to_dict() request_config['file'] = filename else: request_config = request.json required_arguments = ['work_mode', 'namespace', 'table_name'] if access_module == 'upload': required_arguments.extend(['file', 'head', 'partition']) elif access_module == 'download': required_arguments.extend(['output_path']) elif access_module == 'download_test': required_arguments.extend(['output_path']) else: raise Exception( 'can not support this operating: {}'.format(access_module)) detect_utils.check_config(request_config, required_arguments=required_arguments) data = {} if access_module == "upload": data['table_name'] = request_config["table_name"] data['namespace'] = request_config["namespace"] if WORK_MODE != 0: data_table = session.get_data_table( name=request_config["table_name"], namespace=request_config["namespace"]) count = data_table.count() if count and int(request_config.get('drop', 2)) == 2: return get_json_result( retcode=100, retmsg='The data table already exists, table data count:{}.' 'If you still want to continue uploading, please add the parameter -drop. ' '0 means not to delete and continue uploading, ' '1 means to upload again after deleting the table'.format( count)) elif count and int(request_config.get('drop', 2)) == 1: data_table.destroy() job_dsl, job_runtime_conf = gen_data_access_job_config( request_config, access_module) job_id, job_dsl_path, job_runtime_conf_path, logs_directory, model_info, board_url = JobController.submit_job( { 'job_dsl': job_dsl, 'job_runtime_conf': job_runtime_conf }, job_id=job_id) data.update({ 'job_dsl_path': job_dsl_path, 'job_runtime_conf_path': job_runtime_conf_path, 'board_url': board_url, 'logs_directory': logs_directory }) return get_json_result(job_id=job_id, data=data)
def operate_model(model_operation): request_config = request.json or request.form.to_dict() job_id = generate_job_id() required_arguments = ["model_id", "model_version"] if model_operation not in [ ModelOperation.STORE, ModelOperation.RESTORE, ModelOperation.EXPORT, ModelOperation.IMPORT ]: raise Exception( 'Can not support this operating now: {}'.format(model_operation)) check_config(request_config, required_arguments=required_arguments) if model_operation in [ModelOperation.EXPORT, ModelOperation.IMPORT]: if model_operation == ModelOperation.IMPORT: file = request.files.get('file') file_path = os.path.join(TEMP_DIRECTORY, file.filename) try: os.makedirs(os.path.dirname(file_path), exist_ok=True) file.save(file_path) except Exception as e: shutil.rmtree(file_path) raise e request_config['file'] = file_path model = pipelined_model.PipelinedModel( model_id=request_config["model_id"], model_version=request_config["model_version"]) model.unpack_model(file_path) return get_json_result() else: model = pipelined_model.PipelinedModel( model_id=request_config["model_id"], model_version=request_config["model_version"]) archive_file_path = model.packaging_model() return send_file( archive_file_path, attachment_filename=os.path.basename(archive_file_path), as_attachment=True) else: data = {} job_dsl, job_runtime_conf = gen_model_operation_job_config( request_config, model_operation) job_id, job_dsl_path, job_runtime_conf_path, logs_directory, model_info, board_url = JobController.submit_job( { 'job_dsl': job_dsl, 'job_runtime_conf': job_runtime_conf }, job_id=job_id) data.update({ 'job_dsl_path': job_dsl_path, 'job_runtime_conf_path': job_runtime_conf_path, 'board_url': board_url, 'logs_directory': logs_directory }) return get_json_result(job_id=job_id, data=data)