def get_predict_conf(ctx, **kwargs): """ \b - DESCRIPTION: Get predict conf template. \b - USAGE: flow model get-predict-conf --model_id $MODEL_ID --model_version $MODEL_VERSION -o ./examples/ """ config_data, dsl_data = preprocess(**kwargs) conf_filename = "predict_conf_{}.json".format(datetime.now().strftime('%Y%m%d%H%M%S')) output_path = os.path.join(check_abs_path(kwargs.get("output_path")), conf_filename) config_data["filename"] = conf_filename with closing(access_server('post', ctx, 'model/get/predict/conf', config_data, False, stream=True)) as response: if response.status_code == 200: os.makedirs(os.path.dirname(output_path), exist_ok=True) with open(output_path, "wb") as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) res = {'retcode': 0, 'retmsg': "Query predict conf successfully. " "File path is: {}".format(output_path)} else: try: res = response.json() if isinstance(response, requests.models.Response) else response except Exception: res = {'retcode': 100, 'retmsg': "Query predict conf failed." "For more details, please check logs/fate_flow/fate_flow_stat.log"} prettify(res)
def homo_deploy_model(ctx, **kwargs): """ \b - DESCRIPTION: Deploy trained homogenous model to a target online serving system. The model must be converted beforehand. Currently the supported target serving system is KFServing. Refer to the example json for detailed parameters. \b - USAGE: flow model h**o-deploy -c fate_flow/examples/homo_deploy_model.json """ config_data, dsl_data = preprocess(**kwargs) if config_data.get('deployment_type') == "kfserving": kube_config = config_data.get('deployment_parameters', {}).get('config_file') if kube_config: if check_abs_path(kube_config): with open(kube_config, 'r') as fp: config_data['deployment_parameters'][ 'config_file_content'] = fp.read() del config_data['deployment_parameters']['config_file'] else: prettify({ "retcode": 100, "retmsg": "The kube_config file is obtained from the fate flow client machine, " "but it does not exist. Please check the path: {}".format( kube_config) }) return access_server('post', ctx, 'model/h**o/deploy', config_data)
def upload(ctx, **kwargs): """ \b - DESCRIPTION: Upload Data Table. \b - Usage: flow data upload -c fate_flow/examples/upload_guest.json flow data upload -c fate_flow/examples/upload_host.json --verbose --drop """ kwargs['drop'] = 1 if kwargs['drop'] else 2 kwargs['verbose'] = int(kwargs['verbose']) config_data, dsl_data = preprocess(**kwargs) if config_data.get('use_local_data', 1): file_name = check_abs_path(config_data.get('file')) if os.path.exists(file_name): with open(file_name, 'rb') as fp: data = MultipartEncoder( fields={ 'file': (os.path.basename(file_name), fp, 'application/octet-stream') }) tag = [0] def read_callback(monitor): if config_data.get('verbose') == 1: sys.stdout.write("\r UPLOADING:{0}{1}".format( "|" * (monitor.bytes_read * 100 // monitor.len), '%.2f%%' % (monitor.bytes_read * 100 // monitor.len))) sys.stdout.flush() if monitor.bytes_read / monitor.len == 1: tag[0] += 1 if tag[0] == 2: sys.stdout.write('\n') data = MultipartEncoderMonitor(data, read_callback) access_server('post', ctx, 'data/upload', json_data=None, data=data, params=config_data, headers={'Content-Type': data.content_type}) else: prettify({ "retcode": 100, "retmsg": "The file is obtained from the fate flow client machine, but it does not exist, " "please check the path: {}".format(file_name) }) else: access_server('post', ctx, 'data/upload', config_data)
def download_summary(ctx, **kwargs): """ \b - DESCRIPTION: Download summary of a specified component and save it as a json file. \b - USAGE: flow component download-summary -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 flow component download-summary -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 -o ./examples/ """ config_data, dsl_data = preprocess(**kwargs) if config_data.get("output_path"): if not os.path.isdir(config_data.get("output_path")): res = { "retcode": 100, "retmsg": "Please input a valid directory path." } else: config_data["filename"] = "summary_{}_{}.json".format(config_data['component_name'], datetime.now().strftime('%Y%m%d%H%M%S')) config_data["output_path"] = os.path.join(check_abs_path(config_data["output_path"]), config_data["filename"]) with closing(access_server("post", ctx, "tracking/component/summary/download", config_data, False, stream=True)) as response: if response.status_code == 200: with open(config_data["output_path"], "wb") as fout: for chunk in response.iter_content(1024): if chunk: fout.write(chunk) res = { "retcode": 0, "retmsg": "The summary of component <{}> has been stored successfully. " "File path is: {}.".format(config_data["component_name"], config_data["output_path"]) } else: try: res = response.json() if isinstance(response, requests.models.Response) else response except Exception: res = {"retcode": 100, "retmsg": "Download component summary failed, " "for more details, please check logs/fate_flow/fate_flow_stat.log."} prettify(res) else: access_server("post", ctx, "tracking/component/summary/download", config_data)
def register(ctx, **kwargs): config_data, dsl_data = preprocess(**kwargs) for p in {"path"}: config_data[p] = check_abs_path(config_data.get(p)) access_server("post", ctx, "provider/register", config_data)
def dsl_generator(ctx, **kwargs): """ \b - DESCRIPTION: A predict dsl generator. Before using predict dsl generator, users should prepare: 1. name list of component which you are going to use in predict progress, 2. the train dsl file path you specified in train progress. \b Notice that users can choose to specify the component name list by using a text file, or, by typing in terminal. We, however, strongly recommend users using prepared files to specify the component list in order to avoid some unnecessary mistakes. \b - USAGE: flow job dsl --cpn-path fate_flow/examples/component_list.txt --train-dsl-path fate_flow/examples/test_hetero_lr_job_dsl.json -o fate_flow/examples/ flow job dsl --cpn-list "dataio_0, hetero_feature_binning_0, hetero_feature_selection_0, evaluation_0" --train-dsl-path fate_flow/examples/test_hetero_lr_job_dsl.json -o fate_flow/examples/ flow job dsl --cpn-list [dataio_0,hetero_feature_binning_0,hetero_feature_selection_0,evaluation_0] --train-dsl-path fate_flow/examples/test_hetero_lr_job_dsl.json -o fate_flow/examples/ """ if kwargs.get("cpn_list"): cpn_str = kwargs.get("cpn_list") elif kwargs.get("cpn_path"): with open(kwargs.get("cpn_path"), "r") as fp: cpn_str = fp.read() else: cpn_str = "" with open(kwargs.get("train_dsl_path"), "r") as ft: train_dsl = ft.read() config_data = { "cpn_str": cpn_str, "train_dsl": train_dsl, "version": "2", } if kwargs.get("output_path"): dsl_filename = "predict_dsl_{}.json".format( datetime.now().strftime('%Y%m%d%H%M%S')) output_path = os.path.join(check_abs_path(kwargs.get("output_path")), dsl_filename) config_data["filename"] = dsl_filename with closing( access_server('post', ctx, 'job/dsl/generate', config_data, False, stream=True)) as response: if response.status_code == 200: os.makedirs(os.path.dirname(output_path), exist_ok=True) with open(output_path, "wb") as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) res = { 'retcode': 0, 'retmsg': "New predict dsl file has been generated successfully. " "File path is: {}".format(output_path) } else: try: res = response.json() if isinstance( response, requests.models.Response) else response except Exception: res = { 'retcode': 100, 'retmsg': "New predict dsl file generated failed." "For more details, please check logs/fate_flow/fate_flow_stat.log" } prettify(res) else: access_server('post', ctx, 'job/dsl/generate', config_data)