def toy(ctx, **kwargs): flow_sdk = FlowClient(ip=ctx.obj["ip"], port=ctx.obj["http_port"], version=ctx.obj["api_version"], app_key=ctx.obj.get("app_key"), secret_key=ctx.obj.get("secret_key")) submit_result = flow_sdk.test.toy(**kwargs) if submit_result["retcode"] == 0: for t in range(kwargs["timeout"]): job_id = submit_result["jobId"] r = flow_sdk.job.query(job_id=job_id, role="guest", party_id=kwargs["guest_party_id"]) if r["retcode"] == 0 and len(r["data"]): job_status = r["data"][0]["f_status"] print(f"toy test job {job_id} is {job_status}") if job_status in {"success", "failed", "canceled"}: check_log(flow_sdk, kwargs["guest_party_id"], job_id, job_status) break time.sleep(1) else: print(f"check job status timeout") check_log(flow_sdk, kwargs["guest_party_id"], job_id, job_status) else: prettify(submit_result)
def load(ctx, **kwargs): """ \b - DESCRIPTION: Load Model Command \b - USAGE: flow model load -c fate_flow/examples/publish_load_model.json flow model load -j $JOB_ID """ if not kwargs.get("conf_path") and not kwargs.get("job_id"): prettify({ "retcode": 100, "retmsg": "Load model failed. No arguments received, " "please provide one of arguments from job id and conf path." }) else: if kwargs.get("conf_path") and kwargs.get("job_id"): prettify({ "retcode": 100, "retmsg": "Load model failed. Please do not provide job id and " "conf path at the same time." }) else: config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'model/load', config_data)
def get_predict_conf(ctx, **kwargs): """ \b - DESCRIPTION: Get predict conf template. \b - USAGE: flow model get-predict-conf --model_id $MODEL_ID --model_version $MODEL_VERSION -o ./examples/ """ config_data, dsl_data = preprocess(**kwargs) conf_filename = "predict_conf_{}.json".format(datetime.now().strftime('%Y%m%d%H%M%S')) output_path = os.path.join(check_abs_path(kwargs.get("output_path")), conf_filename) config_data["filename"] = conf_filename with closing(access_server('post', ctx, 'model/get/predict/conf', config_data, False, stream=True)) as response: if response.status_code == 200: os.makedirs(os.path.dirname(output_path), exist_ok=True) with open(output_path, "wb") as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) res = {'retcode': 0, 'retmsg': "Query predict conf successfully. " "File path is: {}".format(output_path)} else: try: res = response.json() if isinstance(response, requests.models.Response) else response except Exception: res = {'retcode': 100, 'retmsg': "Query predict conf failed." "For more details, please check logs/fate_flow/fate_flow_stat.log"} prettify(res)
def import_model(ctx, **kwargs): """ \b - DESCRIPTION: Import Model Command. Users can currently import models from files or databases (including mysql and redis). \b - USAGE: flow model import -c fate_flow/examples/import_model.json flow model import -c fate_flow/examples/restore_model.json --from-database """ config_data, dsl_data = preprocess(**kwargs) if not config_data.pop('from_database'): file_path = config_data.get("file", None) if file_path: if not os.path.isabs(file_path): file_path = os.path.join(get_project_base_directory(), file_path) if os.path.exists(file_path): files = {'file': open(file_path, 'rb')} access_server('post', ctx, 'model/import', data=config_data, files=files) else: prettify({'retcode': 100, 'retmsg': 'Import model failed. The file is obtained from the fate flow client machine, ' 'but it does not exist, please check the path: {}'.format(file_path)}) else: prettify({ 'retcode': 100, 'retmsg': "Import model failed. Please specify the valid model file path and try again." }) else: access_server('post', ctx, 'model/restore', config_data)
def export_model(ctx, **kwargs): """ \b - DESCRIPTION: Export Model Command. Users can currently export models to files or databases (including mysql and redis). \b - USAGE: flow model export -c fate_flow/examples/export_model.json flow model export -c fate_flow/examplse/store_model.json --to-database """ config_data, dsl_data = preprocess(**kwargs) if not config_data.pop('to_database'): with closing(access_server('get', ctx, 'model/export', config_data, False, stream=True)) as response: if response.status_code == 200: archive_file_name = re.findall("filename=(.+)", response.headers["Content-Disposition"])[0] os.makedirs(config_data["output_path"], exist_ok=True) archive_file_path = os.path.join(config_data["output_path"], archive_file_name) with open(archive_file_path, 'wb') as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) response_dict = {'retcode': 0, 'file': archive_file_path, 'retmsg': 'download successfully, please check {}'.format(archive_file_path)} else: response_dict = response.json() if isinstance(response, requests.models.Response) else response.json prettify(response_dict) else: access_server('post', ctx, 'model/store', config_data)
def homo_deploy_model(ctx, **kwargs): """ \b - DESCRIPTION: Deploy trained homogenous model to a target online serving system. The model must be converted beforehand. Currently the supported target serving system is KFServing. Refer to the example json for detailed parameters. \b - USAGE: flow model h**o-deploy -c fate_flow/examples/homo_deploy_model.json """ config_data, dsl_data = preprocess(**kwargs) if config_data.get('deployment_type') == "kfserving": kube_config = config_data.get('deployment_parameters', {}).get('config_file') if kube_config: if check_abs_path(kube_config): with open(kube_config, 'r') as fp: config_data['deployment_parameters'][ 'config_file_content'] = fp.read() del config_data['deployment_parameters']['config_file'] else: prettify({ "retcode": 100, "retmsg": "The kube_config file is obtained from the fate flow client machine, " "but it does not exist. Please check the path: {}".format( kube_config) }) return access_server('post', ctx, 'model/h**o/deploy', config_data)
def config(ctx, **kwargs): """ \b - DESCRIPTION: Download Configurations of A Specified Job. \b - USAGE: flow job config -j $JOB_ID -r host -p 10000 --output-path ./examples/ """ config_data, dsl_data = preprocess(**kwargs) response = access_server('post', ctx, 'job/config', config_data, False) if isinstance(response, requests.models.Response): response = response.json() if isinstance(response, flask.wrappers.Response): response = response.json if response['retcode'] == 0: job_id = response['data']['job_id'] download_directory = os.path.join( os.path.abspath(config_data['output_path']), 'job_{}_config'.format(job_id)) os.makedirs(download_directory, exist_ok=True) for k, v in response['data'].items(): if k == 'job_id': continue with open('{}/{}.json'.format(download_directory, k), 'w') as fw: json.dump(v, fw, indent=4) del response['data']['dsl'] del response['data']['runtime_conf'] response['directory'] = download_directory response[ 'retmsg'] = 'download successfully, please check {} directory'.format( download_directory) prettify(response)
def output_data(ctx, **kwargs): """ \b - DESCRIPTION: Download the Output Data of A Specified Component. \b - USAGE: flow component output-data -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 --output-path ./examples/ """ config_data, dsl_data = preprocess(**kwargs) tar_file_name = 'job_{}_{}_{}_{}_output_data.tar.gz'.format(config_data['job_id'], config_data['component_name'], config_data['role'], config_data['party_id']) extract_dir = os.path.join(config_data['output_path'], tar_file_name.replace('.tar.gz', '')) with closing(access_server('get', ctx, 'tracking/component/output/data/download', config_data, False, stream=True)) as response: if response.status_code == 200: try: download_from_request(http_response=response, tar_file_name=tar_file_name, extract_dir=extract_dir) res = {'retcode': 0, 'directory': os.path.abspath(extract_dir), 'retmsg': 'Download successfully, please check {} directory'.format(os.path.abspath(extract_dir))} except: res = {'retcode': 100, 'retmsg': 'Download failed, please check if the parameters are correct.'} else: try: res = response.json() if isinstance(response, requests.models.Response) else response except Exception: res = {'retcode': 100, 'retmsg': 'Download failed, for more details please check logs/fate_flow/fate_flow_stat.log.'} prettify(res)
def upload(ctx, **kwargs): """ \b - DESCRIPTION: Upload Data Table. \b - Usage: flow data upload -c fate_flow/examples/upload_guest.json flow data upload -c fate_flow/examples/upload_host.json --verbose --drop """ kwargs['drop'] = 1 if kwargs['drop'] else 2 kwargs['verbose'] = int(kwargs['verbose']) config_data, dsl_data = preprocess(**kwargs) if config_data.get('use_local_data', 1): file_name = check_abs_path(config_data.get('file')) if os.path.exists(file_name): with open(file_name, 'rb') as fp: data = MultipartEncoder( fields={ 'file': (os.path.basename(file_name), fp, 'application/octet-stream') }) tag = [0] def read_callback(monitor): if config_data.get('verbose') == 1: sys.stdout.write("\r UPLOADING:{0}{1}".format( "|" * (monitor.bytes_read * 100 // monitor.len), '%.2f%%' % (monitor.bytes_read * 100 // monitor.len))) sys.stdout.flush() if monitor.bytes_read / monitor.len == 1: tag[0] += 1 if tag[0] == 2: sys.stdout.write('\n') data = MultipartEncoderMonitor(data, read_callback) access_server('post', ctx, 'data/upload', json_data=None, data=data, params=config_data, headers={'Content-Type': data.content_type}) else: prettify({ "retcode": 100, "retmsg": "The file is obtained from the fate flow client machine, but it does not exist, " "please check the path: {}".format(file_name) }) else: access_server('post', ctx, 'data/upload', config_data)
def download_summary(ctx, **kwargs): """ \b - DESCRIPTION: Download summary of a specified component and save it as a json file. \b - USAGE: flow component download-summary -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 flow component download-summary -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 -o ./examples/ """ config_data, dsl_data = preprocess(**kwargs) if config_data.get("output_path"): if not os.path.isdir(config_data.get("output_path")): res = { "retcode": 100, "retmsg": "Please input a valid directory path." } else: config_data["filename"] = "summary_{}_{}.json".format(config_data['component_name'], datetime.now().strftime('%Y%m%d%H%M%S')) config_data["output_path"] = os.path.join(check_abs_path(config_data["output_path"]), config_data["filename"]) with closing(access_server("post", ctx, "tracking/component/summary/download", config_data, False, stream=True)) as response: if response.status_code == 200: with open(config_data["output_path"], "wb") as fout: for chunk in response.iter_content(1024): if chunk: fout.write(chunk) res = { "retcode": 0, "retmsg": "The summary of component <{}> has been stored successfully. " "File path is: {}.".format(config_data["component_name"], config_data["output_path"]) } else: try: res = response.json() if isinstance(response, requests.models.Response) else response except Exception: res = {"retcode": 100, "retmsg": "Download component summary failed, " "for more details, please check logs/fate_flow/fate_flow_stat.log."} prettify(res) else: access_server("post", ctx, "tracking/component/summary/download", config_data)
def initialization(**kwargs): """ \b - DESCRIPTION: Flow CLI Init Command. Custom can choose to provide an absolute path of server conf file, or provide ip address and http port of a valid fate flow server. Notice that, if custom provides both, the server conf would be loaded in priority. In this case, ip address and http port would be ignored. \b - USAGE: flow init -c /data/projects/fate/python/conf/service_conf.yaml flow init --ip 127.0.0.1 --port 9380 """ with open(os.path.join(os.path.dirname(__file__), "settings.yaml"), "r") as fin: config = yaml.safe_load(fin) if kwargs.get('reset'): config["server_conf_path"] = None config["ip"] = None config["port"] = None with open(os.path.join(os.path.dirname(__file__), "settings.yaml"), "w") as fout: yaml.dump(config, fout, Dumper=yaml.RoundTripDumper) prettify({ "retcode": 0, "retmsg": "Fate Flow CLI has been reset successfully. " "Please do initialization again before you using flow CLI v2." }) else: if kwargs.get("server_conf_path"): config["server_conf_path"] = os.path.abspath( kwargs.get("server_conf_path")) if kwargs.get("ip"): config["ip"] = kwargs.get("ip") if kwargs.get("port"): config["port"] = kwargs.get("port") if kwargs.get("server_conf_path") or (kwargs.get("ip") and kwargs.get("port")): with open(os.path.join(os.path.dirname(__file__), "settings.yaml"), "w") as fout: yaml.dump(config, fout, Dumper=yaml.RoundTripDumper) prettify({ "retcode": 0, "retmsg": "Fate Flow CLI has been initialized successfully." }) else: prettify({ "retcode": 100, "retmsg": "Fate Flow CLI initialization failed. Please provides server configuration file path " "or server http ip address and port information." })
def log(ctx, **kwargs): """ \b - DESCRIPTION: Download Log Files of A Specified Job. \b - USAGE: flow job log -j JOB_ID --output-path ./examples/ """ config_data, dsl_data = preprocess(**kwargs) job_id = config_data['job_id'] tar_file_name = 'job_{}_log.tar.gz'.format(job_id) extract_dir = os.path.join(config_data['output_path'], 'job_{}_log'.format(job_id)) with closing( access_server('get', ctx, 'job/log', config_data, False, stream=True)) as response: if response.status_code == 200: download_from_request(http_response=response, tar_file_name=tar_file_name, extract_dir=extract_dir) res = { 'retcode': 0, 'directory': extract_dir, 'retmsg': 'download successfully, please check {} directory'.format( extract_dir) } else: res = response.json() if isinstance( response, requests.models.Response) else response prettify(res)
def query(ctx, **kwargs): """ \b - DESCRIPTION: Query job information by filters. Used to be 'query_job'. \b - USAGE: flow job query -r guest -p 9999 -s success flow job query -j $JOB_ID -cpn hetero_feature_binning_0 """ config_data, dsl_data = preprocess(**kwargs) response = access_server('post', ctx, "job/query", config_data, False) if isinstance(response, requests.models.Response): response = response.json() if isinstance(response, flask.wrappers.Response): response = response.json if response['retcode'] == 0: for i in range(len(response['data'])): del response['data'][i]['f_runtime_conf'] del response['data'][i]['f_dsl'] prettify(response)
def download(ctx, **kwargs): """ \b - DESCRIPTION: Download template conf/dsl/data files \b - USAGE: flow template download --min-data 1 --output-path ./examples/ """ config_data, dsl_data = preprocess(**kwargs) tar_file_name = 'template.tar.gz' extract_dir = config_data['output_path'] with closing( access_server('post', ctx, 'template/download', config_data, False, stream=True)) as response: if response.status_code == 200: download_from_request(http_response=response, tar_file_name=tar_file_name, extract_dir=extract_dir) res = { 'retcode': 0, 'directory': extract_dir, 'retmsg': 'download successfully, please check {} directory'.format( extract_dir) } else: res = response.json() if isinstance( response, requests.models.Response) else response prettify(res)
def dsl_generator(ctx, **kwargs): """ \b - DESCRIPTION: A predict dsl generator. Before using predict dsl generator, users should prepare: 1. name list of component which you are going to use in predict progress, 2. the train dsl file path you specified in train progress. \b Notice that users can choose to specify the component name list by using a text file, or, by typing in terminal. We, however, strongly recommend users using prepared files to specify the component list in order to avoid some unnecessary mistakes. \b - USAGE: flow job dsl --cpn-path fate_flow/examples/component_list.txt --train-dsl-path fate_flow/examples/test_hetero_lr_job_dsl.json -o fate_flow/examples/ flow job dsl --cpn-list "dataio_0, hetero_feature_binning_0, hetero_feature_selection_0, evaluation_0" --train-dsl-path fate_flow/examples/test_hetero_lr_job_dsl.json -o fate_flow/examples/ flow job dsl --cpn-list [dataio_0,hetero_feature_binning_0,hetero_feature_selection_0,evaluation_0] --train-dsl-path fate_flow/examples/test_hetero_lr_job_dsl.json -o fate_flow/examples/ """ if kwargs.get("cpn_list"): cpn_str = kwargs.get("cpn_list") elif kwargs.get("cpn_path"): with open(kwargs.get("cpn_path"), "r") as fp: cpn_str = fp.read() else: cpn_str = "" with open(kwargs.get("train_dsl_path"), "r") as ft: train_dsl = ft.read() config_data = { "cpn_str": cpn_str, "train_dsl": train_dsl, "version": "2", } if kwargs.get("output_path"): dsl_filename = "predict_dsl_{}.json".format( datetime.now().strftime('%Y%m%d%H%M%S')) output_path = os.path.join(check_abs_path(kwargs.get("output_path")), dsl_filename) config_data["filename"] = dsl_filename with closing( access_server('post', ctx, 'job/dsl/generate', config_data, False, stream=True)) as response: if response.status_code == 200: os.makedirs(os.path.dirname(output_path), exist_ok=True) with open(output_path, "wb") as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) res = { 'retcode': 0, 'retmsg': "New predict dsl file has been generated successfully. " "File path is: {}".format(output_path) } else: try: res = response.json() if isinstance( response, requests.models.Response) else response except Exception: res = { 'retcode': 100, 'retmsg': "New predict dsl file generated failed." "For more details, please check logs/fate_flow/fate_flow_stat.log" } prettify(res) else: access_server('post', ctx, 'job/dsl/generate', config_data)