def homo_deploy_model(ctx, **kwargs): """ \b - DESCRIPTION: Deploy trained homogenous model to a target online serving system. The model must be converted beforehand. Currently the supported target serving system is KFServing. Refer to the example json for detailed parameters. \b - USAGE: flow model h**o-deploy -c fate_flow/examples/homo_deploy_model.json """ config_data, dsl_data = preprocess(**kwargs) if config_data.get('deployment_type') == "kfserving": kube_config = config_data.get('deployment_parameters', {}).get('config_file') if kube_config: if check_abs_path(kube_config): with open(kube_config, 'r') as fp: config_data['deployment_parameters'][ 'config_file_content'] = fp.read() del config_data['deployment_parameters']['config_file'] else: prettify({ "retcode": 100, "retmsg": "The kube_config file is obtained from the fate flow client machine, " "but it does not exist. Please check the path: {}".format( kube_config) }) return access_server('post', ctx, 'model/h**o/deploy', config_data)
def import_model(ctx, **kwargs): """ \b - DESCRIPTION: Import Model Command. Users can currently import models from files or databases (including mysql and redis). \b - USAGE: flow model import -c fate_flow/examples/import_model.json flow model import -c fate_flow/examples/restore_model.json --from-database """ config_data, dsl_data = preprocess(**kwargs) if not config_data.pop('from_database'): file_path = config_data.get("file", None) if file_path: if not os.path.isabs(file_path): file_path = os.path.join(get_project_base_directory(), file_path) if os.path.exists(file_path): files = {'file': open(file_path, 'rb')} access_server('post', ctx, 'model/import', data=config_data, files=files) else: prettify({'retcode': 100, 'retmsg': 'Import model failed. The file is obtained from the fate flow client machine, ' 'but it does not exist, please check the path: {}'.format(file_path)}) else: prettify({ 'retcode': 100, 'retmsg': "Import model failed. Please specify the valid model file path and try again." }) else: access_server('post', ctx, 'model/restore', config_data)
def export_model(ctx, **kwargs): """ \b - DESCRIPTION: Export Model Command. Users can currently export models to files or databases (including mysql and redis). \b - USAGE: flow model export -c fate_flow/examples/export_model.json flow model export -c fate_flow/examplse/store_model.json --to-database """ config_data, dsl_data = preprocess(**kwargs) if not config_data.pop('to_database'): with closing(access_server('get', ctx, 'model/export', config_data, False, stream=True)) as response: if response.status_code == 200: archive_file_name = re.findall("filename=(.+)", response.headers["Content-Disposition"])[0] os.makedirs(config_data["output_path"], exist_ok=True) archive_file_path = os.path.join(config_data["output_path"], archive_file_name) with open(archive_file_path, 'wb') as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) response_dict = {'retcode': 0, 'file': archive_file_path, 'retmsg': 'download successfully, please check {}'.format(archive_file_path)} else: response_dict = response.json() if isinstance(response, requests.models.Response) else response.json prettify(response_dict) else: access_server('post', ctx, 'model/store', config_data)
def config(ctx, **kwargs): """ \b - DESCRIPTION: Download Configurations of A Specified Job. \b - USAGE: flow job config -j $JOB_ID -r host -p 10000 --output-path ./examples/ """ config_data, dsl_data = preprocess(**kwargs) response = access_server('post', ctx, 'job/config', config_data, False) if isinstance(response, requests.models.Response): response = response.json() if isinstance(response, flask.wrappers.Response): response = response.json if response['retcode'] == 0: job_id = response['data']['job_id'] download_directory = os.path.join( os.path.abspath(config_data['output_path']), 'job_{}_config'.format(job_id)) os.makedirs(download_directory, exist_ok=True) for k, v in response['data'].items(): if k == 'job_id': continue with open('{}/{}.json'.format(download_directory, k), 'w') as fw: json.dump(v, fw, indent=4) del response['data']['dsl'] del response['data']['runtime_conf'] response['directory'] = download_directory response[ 'retmsg'] = 'download successfully, please check {} directory'.format( download_directory) prettify(response)
def get_checkpoint(ctx, **kwargs): config_data, dsl_data = preprocess(**kwargs) if len(config_data.keys() & {'step_index', 'step_name'}) != 1: click.echo("Error: Missing option '--step-index' or '--step-name'.", err=True) sys.exit(2) access_server('post', ctx, 'checkpoint/get', config_data)
def get_predict_conf(ctx, **kwargs): """ \b - DESCRIPTION: Get predict conf template. \b - USAGE: flow model get-predict-conf --model_id $MODEL_ID --model_version $MODEL_VERSION -o ./examples/ """ config_data, dsl_data = preprocess(**kwargs) conf_filename = "predict_conf_{}.json".format(datetime.now().strftime('%Y%m%d%H%M%S')) output_path = os.path.join(check_abs_path(kwargs.get("output_path")), conf_filename) config_data["filename"] = conf_filename with closing(access_server('post', ctx, 'model/get/predict/conf', config_data, False, stream=True)) as response: if response.status_code == 200: os.makedirs(os.path.dirname(output_path), exist_ok=True) with open(output_path, "wb") as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) res = {'retcode': 0, 'retmsg': "Query predict conf successfully. " "File path is: {}".format(output_path)} else: try: res = response.json() if isinstance(response, requests.models.Response) else response except Exception: res = {'retcode': 100, 'retmsg': "Query predict conf failed." "For more details, please check logs/fate_flow/fate_flow_stat.log"} prettify(res)
def output_data(ctx, **kwargs): """ \b - DESCRIPTION: Download the Output Data of A Specified Component. \b - USAGE: flow component output-data -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 --output-path ./examples/ """ config_data, dsl_data = preprocess(**kwargs) tar_file_name = 'job_{}_{}_{}_{}_output_data.tar.gz'.format(config_data['job_id'], config_data['component_name'], config_data['role'], config_data['party_id']) extract_dir = os.path.join(config_data['output_path'], tar_file_name.replace('.tar.gz', '')) with closing(access_server('get', ctx, 'tracking/component/output/data/download', config_data, False, stream=True)) as response: if response.status_code == 200: try: download_from_request(http_response=response, tar_file_name=tar_file_name, extract_dir=extract_dir) res = {'retcode': 0, 'directory': os.path.abspath(extract_dir), 'retmsg': 'Download successfully, please check {} directory'.format(os.path.abspath(extract_dir))} except: res = {'retcode': 100, 'retmsg': 'Download failed, please check if the parameters are correct.'} else: try: res = response.json() if isinstance(response, requests.models.Response) else response except Exception: res = {'retcode': 100, 'retmsg': 'Download failed, for more details please check logs/fate_flow/fate_flow_stat.log.'} prettify(res)
def list_providers(ctx, **kwargs): config_data, dsl_data = preprocess(**kwargs) if kwargs.get("provider_name"): access_server("post", ctx, f"provider/{kwargs['provider_name']}/get", config_data) else: access_server("post", ctx, "provider/get", config_data)
def load(ctx, **kwargs): """ \b - DESCRIPTION: Load Model Command \b - USAGE: flow model load -c fate_flow/examples/publish_load_model.json flow model load -j $JOB_ID """ if not kwargs.get("conf_path") and not kwargs.get("job_id"): prettify({ "retcode": 100, "retmsg": "Load model failed. No arguments received, " "please provide one of arguments from job id and conf path." }) else: if kwargs.get("conf_path") and kwargs.get("job_id"): prettify({ "retcode": 100, "retmsg": "Load model failed. Please do not provide job id and " "conf path at the same time." }) else: config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'model/load', config_data)
def deploy(ctx, **kwargs): """ \b - DESCRIPTION: Deploy model. \b - USAGE: flow model deploy --model-id $MODEL_ID --model-version $MODEL_VERSION """ request_data = { 'model_id': kwargs['model_id'], 'model_version': kwargs['model_version'], } if kwargs.get("cpn_list") or kwargs.get("cpn_path"): if kwargs.get("cpn_list"): cpn_str = kwargs["cpn_list"] elif kwargs.get("cpn_path"): with open(kwargs["cpn_path"], "r") as fp: cpn_str = fp.read() else: cpn_str = "" if isinstance(cpn_str, list): cpn_list = cpn_str else: if (cpn_str.find("/") and cpn_str.find("\\")) != -1: raise Exception( "Component list string should not contain '/' or '\\'.") cpn_str = cpn_str.replace(" ", "").replace("\n", "").strip(",[]") cpn_list = cpn_str.split(",") request_data['cpn_list'] = cpn_list elif kwargs.get("dsl_path"): with open(kwargs["dsl_path"], "r") as ft: predict_dsl = ft.read() request_data['dsl'] = predict_dsl request_data['components_checkpoint'] = {} for i in ('cpn_step_index', 'cpn_step_name'): for j in kwargs[i]: component, checkpoint = j.rsplit(':', 1) if i == 'cpn_step_index': checkpoint = int(checkpoint) if component in request_data['components_checkpoint']: raise KeyError(f"Duplicated component name '{component}'.") request_data['components_checkpoint'][component] = { i[4:]: checkpoint, } config_data, dsl_data = preprocess(**request_data) access_server('post', ctx, 'model/deploy', config_data)
def upload(ctx, **kwargs): """ \b - DESCRIPTION: Upload Data Table. \b - Usage: flow data upload -c fate_flow/examples/upload_guest.json flow data upload -c fate_flow/examples/upload_host.json --verbose --drop """ kwargs['drop'] = 1 if kwargs['drop'] else 2 kwargs['verbose'] = int(kwargs['verbose']) config_data, dsl_data = preprocess(**kwargs) if config_data.get('use_local_data', 1): file_name = check_abs_path(config_data.get('file')) if os.path.exists(file_name): with open(file_name, 'rb') as fp: data = MultipartEncoder( fields={ 'file': (os.path.basename(file_name), fp, 'application/octet-stream') }) tag = [0] def read_callback(monitor): if config_data.get('verbose') == 1: sys.stdout.write("\r UPLOADING:{0}{1}".format( "|" * (monitor.bytes_read * 100 // monitor.len), '%.2f%%' % (monitor.bytes_read * 100 // monitor.len))) sys.stdout.flush() if monitor.bytes_read / monitor.len == 1: tag[0] += 1 if tag[0] == 2: sys.stdout.write('\n') data = MultipartEncoderMonitor(data, read_callback) access_server('post', ctx, 'data/upload', json_data=None, data=data, params=config_data, headers={'Content-Type': data.content_type}) else: prettify({ "retcode": 100, "retmsg": "The file is obtained from the fate flow client machine, but it does not exist, " "please check the path: {}".format(file_name) }) else: access_server('post', ctx, 'data/upload', config_data)
def parameters(ctx, **kwargs): """ \b - DESCRIPTION: Query the parameters of a specified component. \b - USAGE: flow component parameters -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'tracking/component/parameters', config_data)
def output_data_table(ctx, **kwargs): """ \b - DESCRIPTION: View Table Name and Namespace. \b - USAGE: flow component output-data-table -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'tracking/component/output/data/table', config_data)
def download(ctx, **kwargs): """ \b - DESCRIPTION: Download Data Table. \b - Usage: flow data download -c fate_flow/examples/download_host.json """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, "data/download", config_data)
def delete_tag(ctx, **kwargs): """ \b - DESCRIPTION: Delete Tag Command. Notice that the information of model would not be discarded even though the tag is removed. \b - USAGE: flow tag delete -t tag1 """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'model/tag/destroy', config_data)
def stop(ctx, **kwargs): """ \b - DESCRIPTION: Stop a specified job. \b - USAGE: flow job stop -j $JOB_ID """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, "job/stop", config_data)
def resource_return(ctx, **kwargs): """ \b - DESCRIPTION: Return Job Resource Command \b - USAGE: flow resource return -j $JobId """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'resource/return', config_data)
def disable(ctx, **kwargs): """ \b - DESCRIPTION: Enable A Specified Table. \b - USAGE: flow table enable -n $NAMESPACE -t $TABLE_NAME """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'table/enable', config_data)
def metrics(ctx, **kwargs): """ \b - DESCRIPTION: Query the List of Metrics. \b - USAGE: flow component metrics -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'tracking/component/metrics', config_data)
def output_model(ctx, **kwargs): """ \b - DESCRIPTION: Query the Model of A Speicied Component. \b - USAGE: flow component output-model -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'tracking/component/output/model', config_data)
def writer(ctx, **kwargs): """ \b - DESCRIPTION: Download Data Table. \b - Usage: flow data download -c fateflow/examples/writer/external_storage.json """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, "data/writer", config_data)
def list(ctx, **kwargs): """ \b - DESCRIPTION: List components of a specified job. \b - USAGE: flow component list -j $JOB_ID """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'tracking/component/list', config_data)
def info(ctx, **kwargs): """ \b - DESCRIPTION: Query Table Information. \b - USAGE: flow table info -n $NAMESPACE -t $TABLE_NAME """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'table/table_info', config_data)
def metric_all(ctx, **kwargs): """ \b - DESCRIPTION: Query All Metric Data. \b - USAGE: flow component metric-all -j $JOB_ID -r host -p 10000 -cpn hetero_feature_binning_0 """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'tracking/component/metric/all', config_data)
def disable(ctx, **kwargs): """ \b - DESCRIPTION: Delete Disable A Specified Table. \b - USAGE: flow table disable-delete """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'table/disable/delete', config_data)
def migrate(ctx, **kwargs): """ \b - DESCRIPTION: Migrate Model Command. \b - USAGE: flow model migrate -c fate_flow/examples/migrate_model.json """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'model/migrate', config_data)
def query(ctx, **kwargs): """ \b - DESCRIPTION: Query Resource Information. \b - USAGE: flow resource query """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'resource/query', config_data)
def create_tag(ctx, **kwargs): """ \b - DESCRIPTION: Create Tag Command. \b - USAGE: flow tag create -t $TAG_NAME -d $TEST_DESCRIPTION """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'model/tag/create', config_data)
def bind(ctx, **kwargs): """ \b - DESCRIPTION: Bind Model Command \b - USAGE: flow model bind -c fate_flow/examples/bind_model_service.json flow model bind -c fate_flow/examples/bind_model_service.json -j $JOB_ID """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'model/bind', config_data)
def list_tag(ctx, **kwargs): """ \b - DESCRIPTION: List Tags of Model Command. Custom can query the model by a valid job id, and get the tag list of the specified model. \b - USAGE: flow model tag-list -j $JOB_ID """ config_data, dsl_data = preprocess(**kwargs) access_server('post', ctx, 'model/model_tag/retrieve', config_data)