def job_config(self, max_iter): try: subp = subprocess.Popen([ "python", self.fate_flow_path, "-f", "job_config", "-j", self.job_id, "-r", "guest", "-p", str(self.guest_party_id[0]), "-o", self.output_path ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = subp.communicate() stdout = json.loads(stdout.decode("utf-8")) if stdout.get('retcode'): self.error_log('job config: {}'.format(stdout.get('retmsg')) + '\n') job_conf_path = stdout.get('directory') + '/runtime_conf.json' job_conf = get_dict_from_file(job_conf_path) if max_iter == job_conf['component_parameters']['common'][ self.component_name]['max_iter']: return stdout.get('retcode') except Exception: return
def data_upload(self, upload_path, work_mode, table_index=None): upload_file = get_dict_from_file(upload_path) upload_file.update({"use_local_data": 0, "work_mode": work_mode}) if table_index is not None: upload_file.update({"table_name": f'test_api_{table_index}'}) upload_path = self.cache_directory + 'upload_file.json' with open(upload_path, 'w') as fp: json.dump(upload_file, fp) try: subp = subprocess.Popen([ "python", self.fate_flow_path, "-f", "upload", "-c", upload_path, "-drop", "1" ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = subp.communicate() stdout = json.loads(stdout.decode("utf-8")) if stdout.get('retcode'): self.error_log('data upload: {}'.format(stdout.get('retmsg')) + '\n') return self.query_status(stdout.get("jobId")) except Exception: return
def run_test_api(config_json): output_path = './output/flow_test_data/' os.makedirs(os.path.dirname(output_path), exist_ok=True) fate_flow_path = str( config_json['data_base_dir']) + '/python/fate_flow/fate_flow_client.py' test_api = TestModel(fate_flow_path, config_json['component_name']) test_api.dsl_path = config_json['train_dsl_path'] test_api.cache_directory = config_json['cache_directory'] test_api.output_path = str(os.path.abspath(output_path)) + '/' conf_path = config_json['train_conf_path'] guest_party_id = config_json['guest_party_id'] host_party_id = config_json['host_party_id'] arbiter_party_id = config_json['arbiter_party_id'] upload_file_path = config_json['upload_file_path'] conf_file = get_dict_from_file(upload_file_path) work_mode = config_json['work_mode'] remove_path = str( config_json['data_base_dir'] ) + '/model_local_cache/guest#{}#arbiter-{}#guest-{}#host-{}#model/'.format( guest_party_id[0], arbiter_party_id[0], guest_party_id[0], host_party_id[0]) max_iter = test_api.set_config(guest_party_id, host_party_id, arbiter_party_id, conf_path, work_mode) data = PrettyTable() data.set_style(ORGMODE) data.field_names = ['data api name', 'status'] data.add_row([ 'data upload', judging_state( test_api.data_upload(upload_file_path, work_mode=work_mode)) ]) data.add_row([ 'data download', judging_state(test_api.data_download(conf_file, output_path, work_mode)) ]) data.add_row([ 'data upload history', judging_state( test_api.data_upload_history(upload_file_path, work_mode=work_mode)) ]) print(data.get_string(title="data api")) table = PrettyTable() table.set_style(ORGMODE) table.field_names = ['table api name', 'status'] table.add_row([ 'table info', judging_state(test_api.table_api('table_info', conf_file)) ]) table.add_row([ 'delete table', judging_state(test_api.table_api('table_delete', conf_file)) ]) print(table.get_string(title="table api")) job = PrettyTable() job.set_style(ORGMODE) job.field_names = ['job api name', 'status'] job.add_row(['job stop', judging_state(test_api.job_api('stop_job'))]) job.add_row(['job submit', judging_state(test_api.submit_job(stop=False))]) job.add_row([ 'job query', judging_state(False if test_api.query_job() == "success" else True) ]) job.add_row( ['job data view', judging_state(test_api.job_api('data_view_query'))]) job.add_row( ['job config', judging_state(test_api.job_config(max_iter=max_iter))]) job.add_row(['job log', judging_state(test_api.job_api('job_log'))]) task = PrettyTable() task.set_style(ORGMODE) task.field_names = ['task api name', 'status'] task.add_row(['task query', judging_state(test_api.query_task())]) print(task.get_string(title="task api")) component = PrettyTable() component.set_style(ORGMODE) component.field_names = ['component api name', 'status'] component.add_row([ 'output data', judging_state(test_api.component_api('component_output_data')) ]) component.add_row([ 'output table', judging_state(test_api.component_api('component_output_data_table')) ]) component.add_row([ 'output model', judging_state(test_api.component_api('component_output_model')) ]) component.add_row([ 'component parameters', judging_state( test_api.component_api('component_parameters', max_iter=max_iter)) ]) component.add_row([ 'metrics', judging_state(test_api.component_api('component_metrics')) ]) component.add_row([ 'metrics all', judging_state(test_api.component_api('component_metric_all')) ]) model = PrettyTable() model.set_style(ORGMODE) model.field_names = ['model api name', 'status'] model.add_row(['model load', judging_state(test_api.model_api('load'))]) model.add_row(['model bind', judging_state(test_api.model_api('bind'))]) status, model_path = test_api.model_api('export') model.add_row(['model export', judging_state(status)]) model.add_row([ 'model import', (judging_state( test_api.model_api('import', remove_path=remove_path, model_path=model_path))) ]) model.add_row( ['model store', (judging_state(test_api.model_api('store')))]) model.add_row( ['model restore', (judging_state(test_api.model_api('restore')))]) print(model.get_string(title="model api")) component.add_row([ 'metrics delete', judging_state(test_api.component_api('component_metric_delete')) ]) print(component.get_string(title="component api")) test_api.submit_job() test_api.submit_job() test_api.submit_job() job.add_row(['clean job', judging_state(test_api.job_api('clean_job'))]) job.add_row( ['clean queue', judging_state(test_api.job_api('clean_queue'))]) print(job.get_string(title="job api"))
def job_api(self, command): if command == 'stop_job': self.submit_job() time.sleep(5) try: subp = subprocess.Popen([ "python", self.fate_flow_path, "-f", command, "-j", self.job_id ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = subp.communicate() stdout = json.loads(stdout.decode("utf-8")) if stdout.get('retcode'): self.error_log( 'job stop: {}'.format(stdout.get('retmsg')) + '\n') if self.query_job() == "canceled": return stdout.get('retcode') except Exception: return elif command == 'job_log': log_file_dir = os.path.join(self.output_path, 'job_{}_log'.format(self.job_id)) try: subp = subprocess.Popen([ "python", self.fate_flow_path, "-f", command, "-j", self.job_id, "-o", log_file_dir ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = subp.communicate() stdout = json.loads(stdout.decode("utf-8")) if stdout.get('retcode'): self.error_log('job log: {}'.format(stdout.get('retmsg')) + '\n') return stdout.get('retcode') except Exception: return elif command == 'data_view_query': try: subp = subprocess.Popen([ "python", self.fate_flow_path, "-f", command, "-j", self.job_id ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = subp.communicate() stdout = json.loads(stdout.decode("utf-8")) if stdout.get('retcode'): self.error_log( 'data view queue: {}'.format(stdout.get('retmsg')) + '\n') if len(stdout.get("data")) == len( list( get_dict_from_file( self.dsl_path)['components'].keys())) - 1: return stdout.get('retcode') except Exception: return elif command == 'clean_job': try: subp = subprocess.Popen([ "python", self.fate_flow_path, "-f", command, "-j", self.job_id ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = subp.communicate() stdout = json.loads(stdout.decode("utf-8")) if stdout.get('retcode'): self.error_log( 'clean job: {}'.format(stdout.get('retmsg')) + '\n') subp = subprocess.Popen([ "python", self.fate_flow_path, "-f", "component_metrics", "-j", self.job_id, "-r", "guest", "-p", str(self.guest_party_id[0]), "-cpn", 'evaluation_0' ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) metric, stderr = subp.communicate() metric = json.loads(metric.decode("utf-8")) if not metric.get('data'): return stdout.get('retcode') except Exception: return elif command == 'clean_queue': try: subp = subprocess.Popen( ["python", self.fate_flow_path, "-f", command], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, stderr = subp.communicate() stdout = json.loads(stdout.decode("utf-8")) if stdout.get('retcode'): self.error_log( 'clean queue: {}'.format(stdout.get('retmsg')) + '\n') if not self.query_job(queue=True): return stdout.get('retcode') except Exception: return
def run_test_api(config_json): output_path = './output/flow_test_data/' os.makedirs(os.path.dirname(output_path), exist_ok=True) output_path = str(os.path.abspath(output_path)) + '/' guest_party_id = config_json['guest_party_id'] host_party_id = config_json['host_party_id'] arbiter_party_id = config_json['arbiter_party_id'] train_conf_path = config_json['train_conf_path'] train_dsl_path = config_json['train_dsl_path'] upload_file_path = config_json['upload_file_path'] work_mode = config_json['work_mode'] remove_path = str(config_json[ 'data_base_dir']) + '/model_local_cache/guest#{}#arbiter-{}#guest-{}#host-{}#model/'.format( guest_party_id[0], arbiter_party_id[0], guest_party_id[0], host_party_id[0]) test_api = TestModel(config_json['server_url'], component_name=config_json['component_name']) job_conf = test_api.set_config(guest_party_id, host_party_id, arbiter_party_id, train_conf_path, work_mode) max_iter = job_conf['component_parameters']['common']['hetero_lr_0']['max_iter'] test_api.set_dsl(train_dsl_path) conf_file = get_dict_from_file(upload_file_path) data = PrettyTable() data.set_style(ORGMODE) data.field_names = ['data api name', 'status'] data.add_row(['data upload', judging_state(test_api.data_upload(conf_file, work_mode=work_mode))]) data.add_row(['data download', judging_state(test_api.data_download(conf_file, output_path, work_mode))]) data.add_row(['data upload history', judging_state(test_api.data_upload_history(conf_file, work_mode=work_mode))]) print(data.get_string(title="data api")) table = PrettyTable() table.set_style(ORGMODE) table.field_names = ['table api name', 'status'] table.add_row(['table info', judging_state(test_api.table_api('table/info', conf_file))]) table.add_row(['delete table', judging_state(test_api.table_api('table/delete', conf_file))]) print(table.get_string(title="table api")) job = PrettyTable() job.set_style(ORGMODE) job.field_names = ['job api name', 'status'] job.add_row(['job stop', judging_state(test_api.job_api('stop'))]) job.add_row(['job rerun', judging_state(test_api.job_api('rerun'))]) job.add_row(['job submit', judging_state(test_api.submit_job(stop=False))]) job.add_row(['job query', judging_state(False if test_api.query_job() == "success" else True)]) job.add_row(['job data view', judging_state(test_api.job_api('data/view/query'))]) job.add_row(['job list', judging_state(test_api.job_api('list/job'))]) job.add_row(['job config', judging_state(test_api.job_config(max_iter=max_iter, output_path=output_path))]) job.add_row(['job log', judging_state(test_api.job_api('log', output_path))]) job.add_row(['job dsl generate', judging_state(test_api.job_dsl_generate())]) print(job.get_string(title="job api")) task = PrettyTable() task.set_style(ORGMODE) task.field_names = ['task api name', 'status'] task.add_row(['task list', judging_state(test_api.list_task())]) task.add_row(['task query', judging_state(test_api.query_task())]) print(task.get_string(title="task api")) tag = PrettyTable() tag.set_style(ORGMODE) tag.field_names = ['tag api name', 'status'] tag.add_row(['create tag', judging_state(test_api.tag_api('tag/create', 'create_job_tag'))]) tag.add_row(['update tag', judging_state(test_api.tag_api('tag/update', 'create_job_tag', 'update_job_tag'))]) tag.add_row(['list tag', judging_state(test_api.tag_api('tag/list'))]) tag.add_row( ['retrieve tag', judging_state(not test_api.tag_api('tag/retrieve', 'update_job_tag') == 'update_job_tag')]) tag.add_row(['destroy tag', judging_state(test_api.tag_api('tag/destroy', 'update_job_tag'))]) print(tag.get_string(title="tag api")) component = PrettyTable() component.set_style(ORGMODE) component.field_names = ['component api name', 'status'] component.add_row(['output data', judging_state(test_api.component_api('output/data', output_path=output_path))]) component.add_row(['output table', judging_state(test_api.component_api('output/data/table'))]) component.add_row(['output model', judging_state(test_api.component_api('output/model'))]) component.add_row(['component parameters', judging_state(test_api.component_api('parameters', max_iter=max_iter))]) component.add_row( ['component summary', judging_state(test_api.component_api('summary/download', output_path=output_path))]) component.add_row(['component list', judging_state(test_api.component_list())]) component.add_row(['metrics', judging_state( test_api.component_metric('metrics', output_path=output_path))]) component.add_row(['metrics all', judging_state( test_api.component_metric('metric/all', output_path=output_path))]) model = PrettyTable() model.set_style(ORGMODE) model.field_names = ['model api name', 'status'] model.add_row(['model load', judging_state(test_api.model_api('model/load'))]) model.add_row(['model bind', judging_state(test_api.model_api('model/bind'))]) status, model_path = test_api.model_api('model/export', output_path=output_path) model.add_row(['model export', judging_state(status)]) model.add_row(['model import', (judging_state( test_api.model_api('model/import', remove_path=remove_path, model_path=model_path)))]) model.add_row( ['model_tag create', judging_state(test_api.model_api('model_tag/create', tag_name='model_tag_create'))]) model.add_row( ['model_tag remove', judging_state(test_api.model_api('model_tag/remove', tag_name='model_tag_create'))]) model.add_row(['model_tag retrieve', judging_state(len(test_api.model_api('model_tag/retrieve')))]) model.add_row( ['model migrate', judging_state(test_api.model_api('model/migrate', arbiter_party_id=arbiter_party_id))]) model.add_row(['model query', judging_state(test_api.model_api('model/query'))]) model.add_row(['model deploy', judging_state(test_api.model_api('model/deploy'))]) model.add_row(['model conf', judging_state(test_api.model_api('model/conf'))]) model.add_row(['model dsl', judging_state(test_api.model_api('model/dsl'))]) print(model.get_string(title="model api")) component.add_row(['metrics delete', judging_state( test_api.component_metric('metric/delete', output_path=output_path))]) print(component.get_string(title="component api")) queue = PrettyTable() queue.set_style(ORGMODE) queue.field_names = ['api name', 'status'] test_api.submit_job() test_api.submit_job() test_api.submit_job() queue.add_row(['clean/queue', judging_state(test_api.job_api('clean/queue'))]) print(queue.get_string(title="queue job"))
def job_api(self, command): if command == 'stop': self.submit_job() time.sleep(5) try: stdout = self.client.job.stop(job_id=self.job_id) if stdout.get('retcode'): self.error_log( 'job stop: {}'.format(stdout.get('retmsg')) + '\n') if self.query_job() == "canceled": return stdout.get('retcode') except Exception: return elif command == 'list/job': try: stdout = self.client.job.list(limit=3) if stdout.get('retcode'): self.error_log( 'job list: {}'.format(stdout.get('retmsg')) + '\n') if len(stdout.get('data')) == 3: return stdout.get('retcode') except Exception: return elif command == 'view': try: stdout = self.client.job.view(job_id=self.job_id, role="guest") if stdout.get('retcode'): self.error_log( 'job view: {}'.format(stdout.get('retmsg')) + '\n') if len(stdout.get("data")) == len( list( get_dict_from_file( self.dsl_path)['components'].keys())) - 1: return stdout.get('retcode') except Exception: return elif command == 'log': log_file_dir = os.path.join(self.output_path, 'job_{}_log'.format(self.job_id)) try: stdout = self.client.job.log(job_id=self.job_id, output_path=log_file_dir) if stdout.get('retcode'): self.error_log('job log: {}'.format(stdout.get('retmsg')) + '\n') return stdout.get('retcode') except Exception: return elif command == 'clean/queue': try: stdout = self.client.queue.clean() if stdout.get('retcode'): self.error_log( 'clean queue: {}'.format(stdout.get('retmsg')) + '\n') if not self.query_job(queue=True): return stdout.get('retcode') except Exception: return
def run_test_api(config_json, namespace): output_path = './output/flow_test_data/' os.makedirs(os.path.dirname(output_path), exist_ok=True) test_api = TestModel(config_json['data_base_dir'], config_json['server_url'].split('//')[1], config_json['component_name'], namespace) test_api.dsl_path = config_json['train_dsl_path'] test_api.cache_directory = config_json['cache_directory'] test_api.output_path = str(os.path.abspath(output_path)) + '/' conf_path = config_json['train_conf_path'] guest_party_id = config_json['guest_party_id'] host_party_id = config_json['host_party_id'] arbiter_party_id = config_json['arbiter_party_id'] upload_file_path = config_json['upload_file_path'] model_file_path = config_json['model_file_path'] conf_file = get_dict_from_file(upload_file_path) serving_connect_bool = serving_connect(config_json['serving_setting']) remove_path = str(config_json['data_base_dir']).split( "python" )[0] + '/model_local_cache/guest#{}#arbiter-{}#guest-{}#host-{}#model/'.format( guest_party_id[0], arbiter_party_id[0], guest_party_id[0], host_party_id[0]) max_iter = test_api.set_config(guest_party_id, host_party_id, arbiter_party_id, conf_path, config_json['component_name']) data = PrettyTable() data.set_style(ORGMODE) data.field_names = ['data api name', 'status'] data.add_row( ['data upload', judging_state(test_api.data_upload(upload_file_path))]) data.add_row( ['data download', judging_state(test_api.data_download(conf_file))]) data.add_row([ 'data upload history', judging_state(test_api.data_upload_history(upload_file_path)) ]) print(data.get_string(title="data api")) table = PrettyTable() table.set_style(ORGMODE) table.field_names = ['table api name', 'status'] table.add_row([ 'table info', judging_state(test_api.table_api('table/info', conf_file)) ]) table.add_row([ 'delete table', judging_state(test_api.table_api('table/delete', conf_file)) ]) print(table.get_string(title="table api")) job = PrettyTable() job.set_style(ORGMODE) job.field_names = ['job api name', 'status'] job.add_row(['job stop', judging_state(test_api.job_api('stop'))]) job.add_row(['job submit', judging_state(test_api.submit_job(stop=False))]) job.add_row([ 'job query', judging_state(False if test_api.query_job() == "success" else True) ]) job.add_row(['job view', judging_state(test_api.job_api('view'))]) job.add_row(['job list', judging_state(test_api.job_api('list/job'))]) job.add_row( ['job config', judging_state(test_api.job_config(max_iter=max_iter))]) job.add_row(['job log', judging_state(test_api.job_api('log'))]) job.add_row( ['job dsl generate', judging_state(test_api.job_dsl_generate())]) print(job.get_string(title="job api")) task = PrettyTable() task.set_style(ORGMODE) task.field_names = ['task api name', 'status'] task.add_row(['task list', judging_state(test_api.list_task())]) task.add_row(['task query', judging_state(test_api.query_task())]) print(task.get_string(title="task api")) tag = PrettyTable() tag.set_style(ORGMODE) tag.field_names = ['tag api name', 'status'] tag.add_row([ 'create tag', judging_state(test_api.tag_api('tag/create', 'create_job_tag')) ]) tag.add_row([ 'update tag', judging_state( test_api.tag_api('tag/update', 'create_job_tag', 'update_job_tag')) ]) tag.add_row(['list tag', judging_state(test_api.tag_api('tag/list'))]) tag.add_row([ 'query tag', judging_state(not test_api.tag_api('tag/query', 'update_job_tag') == 'update_job_tag') ]) tag.add_row([ 'delete tag', judging_state(test_api.tag_api('tag/delete', 'update_job_tag')) ]) print(tag.get_string(title="tag api")) component = PrettyTable() component.set_style(ORGMODE) component.field_names = ['component api name', 'status'] component.add_row( ['output data', judging_state(test_api.component_api('output/data'))]) component.add_row([ 'output table', judging_state(test_api.component_api('output/data/table')) ]) component.add_row([ 'output model', judging_state(test_api.component_api('output/model')) ]) component.add_row([ 'component parameters', judging_state(test_api.component_api('parameters', max_iter=max_iter)) ]) component.add_row([ 'component summary', judging_state(test_api.component_api('summary')) ]) component.add_row( ['component list', judging_state(test_api.component_list())]) component.add_row( ['metrics', judging_state(test_api.component_api('metrics'))]) component.add_row( ['metrics all', judging_state(test_api.component_api('metric/all'))]) model = PrettyTable() model.set_style(ORGMODE) model.field_names = ['model api name', 'status'] if config_json.get('component_is_homo'): homo_deploy_path = config_json.get('homo_deploy_path') homo_deploy_kube_config_path = config_json.get( 'homo_deploy_kube_config_path') model.add_row([ 'model h**o convert', judging_state(test_api.model_api('model/h**o/convert')) ]) model.add_row([ 'model h**o deploy', judging_state( test_api.model_api( 'model/h**o/deploy', homo_deploy_path=homo_deploy_path, homo_deploy_kube_config_path=homo_deploy_kube_config_path)) ]) if not config_json.get('component_is_homo') and serving_connect_bool: model_load_conf = get_dict_from_file(model_file_path) model_load_conf["initiator"]["party_id"] = guest_party_id model_load_conf["role"].update({ "guest": [guest_party_id], "host": [host_party_id], "arbiter": [arbiter_party_id] }) model.add_row([ 'model load', judging_state( test_api.model_api('model/load', model_load_conf=model_load_conf)) ]) model.add_row([ 'model bind', judging_state( test_api.model_api('model/bind', model_load_conf=model_load_conf, servings=config_json['serving_setting'])) ]) status, model_path = test_api.model_api('model/export') model.add_row(['model export', judging_state(status)]) model.add_row([ 'model import', (judging_state( test_api.model_api('model/import', remove_path=remove_path, model_path=model_path))) ]) model.add_row([ 'tag model', judging_state( test_api.model_api('model_tag/model', tag_name='model_tag_create')) ]) model.add_row([ 'tag list', judging_state( test_api.model_api('model_tag/list', tag_name='model_tag_create')) ]) model.add_row([ 'tag remove', judging_state( test_api.model_api('model_tag/model', tag_name='model_tag_create', remove=True)) ]) if serving_connect_bool: model.add_row([ 'model migrate', judging_state(test_api.model_api('model/migrate')) ]) model.add_row( ['model query', judging_state(test_api.model_api('model/query'))]) if not config_json.get('component_is_homo') and serving_connect_bool: model.add_row([ 'model deploy', judging_state(test_api.model_api('model/deploy')) ]) model.add_row( ['model conf', judging_state(test_api.model_api('model/conf'))]) model.add_row( ['model dsl', judging_state(test_api.model_api('model/dsl'))]) print(model.get_string(title="model api")) component.add_row([ 'metrics delete', judging_state(test_api.component_api('metric/delete')) ]) print(component.get_string(title="component api")) queue = PrettyTable() queue.set_style(ORGMODE) queue.field_names = ['api name', 'status'] test_api.submit_job() test_api.submit_job() test_api.submit_job() queue.add_row( ['clean/queue', judging_state(test_api.job_api('clean/queue'))]) print(queue.get_string(title="queue job")) print('Please check the error content: {}'.format( test_api.error_log(None)))
def model_api(self, command, remove_path=None, model_path=None, tag_name=None, homo_deploy_path=None, homo_deploy_kube_config_path=None, remove=False, model_load_conf=None, servings=None): if model_load_conf is not None: model_load_conf["job_parameters"].update({ "model_id": self.model_id, "model_version": self.model_version }) if command == 'model/load': try: stdout = self.client.model.load(config_data=model_load_conf) if stdout.get('retcode'): self.error_log( 'model load: {}'.format(stdout.get('retmsg')) + '\n') return stdout.get('retcode') except Exception: return elif command == 'model/bind': service_id = "".join( [str(i) for i in np.random.randint(9, size=8)]) model_load_conf.update({ "service_id": service_id, "servings": [servings] }) try: stdout = self.client.model.bind(config_data=model_load_conf) if stdout.get('retcode'): self.error_log( 'model bind: {}'.format(stdout.get('retmsg')) + '\n') else: return stdout.get('retcode') except Exception: return elif command == 'model/import': config_data = { "model_id": self.model_id, "model_version": self.model_version, "role": "guest", "party_id": self.guest_party_id[0], "file": model_path } # config_file_path = self.cache_directory + 'model_import.json' # with open(config_file_path, 'w') as fp: # json.dump(config_data, fp) try: remove_path = Path(remove_path + self.model_version) if os.path.isdir(remove_path): shutil.rmtree(remove_path) stdout = self.client.model.import_model( config_data=config_data) if not stdout.get('retcode') and os.path.isdir(remove_path): return 0 else: self.error_log( 'model import: {}'.format(stdout.get('retmsg')) + '\n') except Exception: return elif command == 'model/export': config_data = { "model_id": self.model_id, "model_version": self.model_version, "role": "guest", "party_id": self.guest_party_id[0], "output_path": self.output_path } # config_file_path = self.cache_directory + 'model_export.json' # with open(config_file_path, 'w') as fp: # json.dump(config_data, fp) stdout = self.client.model.export_model(config_data=config_data) if stdout.get('retcode'): self.error_log( 'model export: {}'.format(stdout.get('retmsg')) + '\n') else: export_model_path = stdout.get('file') return stdout.get('retcode'), export_model_path elif command == 'model/migrate': config_data = { "job_parameters": { "federated_mode": "MULTIPLE" }, "migrate_initiator": { "role": "guest", "party_id": self.guest_party_id[0] }, "role": { "guest": self.guest_party_id, "arbiter": self.arbiter_party_id, "host": self.host_party_id }, "migrate_role": { "guest": self.guest_party_id, "arbiter": self.arbiter_party_id, "host": self.host_party_id }, "execute_party": { "guest": self.guest_party_id, "arbiter": self.arbiter_party_id, "host": self.host_party_id }, "model_id": self.model_id, "model_version": self.model_version, "unify_model_version": self.job_id + '_01' } # config_file_path = self.cache_directory + 'model_migrate.json' # with open(config_file_path, 'w') as fp: # json.dump(config_data, fp) try: stdout = self.client.model.migrate(config_data=config_data) if stdout.get('retcode'): self.error_log( 'model migrate: {}'.format(stdout.get('retmsg')) + '\n') return stdout.get('retcode') except Exception: return elif command == 'model/h**o/convert': config_data = { "model_id": self.model_id, "model_version": self.model_version, "role": "guest", "party_id": self.guest_party_id[0], } config_file_path = self.cache_directory + 'model_homo_convert.json' with open(config_file_path, 'w') as fp: json.dump(config_data, fp) try: stdout = self.client.model.homo_convert( conf_path=config_file_path) if stdout.get('retcode'): self.error_log( 'model h**o convert: {}'.format(stdout.get('retmsg')) + '\n') return stdout.get('retcode') except Exception: return elif command == 'model/h**o/deploy': job_data = { "model_id": self.model_id, "model_version": self.model_version, "role": "guest", "party_id": self.guest_party_id[0], "component_name": self.component_name } config_data = get_dict_from_file(homo_deploy_path) config_data.update(job_data) if homo_deploy_kube_config_path: config_data['deployment_parameters'][ 'config_file'] = homo_deploy_kube_config_path config_file_path = self.cache_directory + 'model_homo_deploy.json' with open(config_file_path, 'w') as fp: json.dump(config_data, fp) try: stdout = self.client.model.homo_deploy( conf_path=config_file_path) if stdout.get('retcode'): self.error_log( 'model h**o deploy: {}'.format(stdout.get('retmsg')) + '\n') return stdout.get('retcode') except Exception: return elif command == 'model_tag/model': try: stdout = self.client.model.tag_model(job_id=self.job_id, tag_name=tag_name, remove=remove) if stdout.get('retcode'): self.error_log( 'model tag model: {}'.format(stdout.get('retmsg')) + '\n') return self.model_api('model_tag/list', tag_name=tag_name, remove=True) except Exception: return elif command == 'model_tag/list': try: stdout = self.client.model.tag_list(job_id=self.job_id) if stdout.get('retcode'): self.error_log( 'model tag retrieve: {}'.format(stdout.get('retmsg')) + '\n') if remove and len(stdout.get('data').get('tags')) == 0: return stdout.get('retcode') if stdout.get('data').get('tags')[0].get('name') == tag_name: return stdout.get('retcode') except Exception: return elif command == 'model/deploy': try: stdout = self.client.model.deploy( model_id=self.model_id, model_version=self.model_version) if stdout.get('retcode'): self.error_log( 'model deploy: {}'.format(stdout.get('retmsg')) + '\n') if stdout.get('data')['model_id'] == self.model_id and\ stdout.get('data')['model_version'] != self.model_version: self.model_id = stdout.get('data')['model_id'] self.model_version = stdout.get('data')['model_version'] self.job_id = stdout.get('data')['model_version'] return stdout.get('retcode') except Exception: return elif command == 'model/conf': try: stdout = self.client.model.get_predict_conf( model_id=self.model_id, model_version=self.model_version) if stdout.get('retcode'): self.error_log( 'model conf: {}'.format(stdout.get('retmsg')) + '\n') if stdout.get('data'): if stdout.get('data')['job_parameters']['common']['model_id'] == self.model_id \ and stdout.get('data')['job_parameters']['common']['model_version'] == \ self.model_version and stdout.get('data')['initiator']['party_id'] == \ self.guest_party_id[0] and stdout.get('data')['initiator']['role'] == 'guest': return stdout.get('retcode') except Exception: return elif command == 'model/dsl': try: stdout = self.client.model.get_predict_dsl( model_id=self.model_id, model_version=self.model_version) if stdout.get('retcode'): self.error_log( 'model dsl: {}'.format(stdout.get('retmsg')) + '\n') model_dsl_cpn = list(stdout.get('data')['components'].keys()) train_dsl_cpn = list( get_dict_from_file(self.dsl_path)['components'].keys()) if len([k for k in model_dsl_cpn if k in train_dsl_cpn]) == len(train_dsl_cpn): return stdout.get('retcode') except Exception: return elif command == 'model/query': try: stdout = self.client.model.get_model_info( model_id=self.model_id, model_version=self.model_version, role="guest", party_id=self.guest_party_id[0]) if stdout.get('retcode'): self.error_log( 'model query: {}'.format(stdout.get('retmsg')) + '\n') if stdout.get('data')[0].get('f_model_id') == self.model_id and \ stdout.get('data')[0].get('f_model_version') == self.model_version and \ stdout.get('data')[0].get('f_role') == "guest" and \ stdout.get('data')[0].get('f_party_id') == str(self.guest_party_id[0]): return stdout.get('retcode') except Exception: return
def run_test_api(config_json, namespace): output_path = './output/flow_test_data/' os.makedirs(os.path.dirname(output_path), exist_ok=True) fate_flow_path = config_json[ 'data_base_dir'] / 'fateflow' / 'python' / 'fate_flow' / 'fate_flow_client.py' if not fate_flow_path.exists(): raise FileNotFoundError( f'fate_flow not found. filepath: {fate_flow_path}') test_api = TestModel(config_json['data_base_dir'], str(fate_flow_path), config_json['component_name'], namespace) test_api.dsl_path = config_json['train_dsl_path'] test_api.cache_directory = config_json['cache_directory'] test_api.output_path = str(os.path.abspath(output_path)) + '/' conf_path = config_json['train_conf_path'] guest_party_id = config_json['guest_party_id'] host_party_id = config_json['host_party_id'] arbiter_party_id = config_json['arbiter_party_id'] upload_file_path = config_json['upload_file_path'] model_file_path = config_json['model_file_path'] conf_file = get_dict_from_file(upload_file_path) serving_connect_bool = serving_connect(config_json['serving_setting']) remove_path = str(config_json['data_base_dir']).split( "python" )[0] + '/model_local_cache/guest#{}#arbiter-{}#guest-{}#host-{}#model/'.format( guest_party_id[0], arbiter_party_id[0], guest_party_id[0], host_party_id[0]) max_iter = test_api.set_config(guest_party_id, host_party_id, arbiter_party_id, conf_path, config_json['component_name']) data = PrettyTable() data.set_style(ORGMODE) data.field_names = ['data api name', 'status'] data.add_row( ['data upload', judging_state(test_api.data_upload(upload_file_path))]) data.add_row([ 'data download', judging_state(test_api.data_download(conf_file, output_path)) ]) data.add_row([ 'data upload history', judging_state(test_api.data_upload_history(upload_file_path)) ]) print(data.get_string(title="data api")) table = PrettyTable() table.set_style(ORGMODE) table.field_names = ['table api name', 'status'] table.add_row([ 'table info', judging_state(test_api.table_api('table_info', conf_file)) ]) table.add_row([ 'delete table', judging_state(test_api.table_api('table_delete', conf_file)) ]) print(table.get_string(title="table api")) job = PrettyTable() job.set_style(ORGMODE) job.field_names = ['job api name', 'status'] job.add_row(['job stop', judging_state(test_api.job_api('stop_job'))]) job.add_row(['job submit', judging_state(test_api.submit_job(stop=False))]) job.add_row([ 'job query', judging_state(False if test_api.query_job() == "success" else True) ]) job.add_row( ['job data view', judging_state(test_api.job_api('data_view_query'))]) job.add_row( ['job config', judging_state(test_api.job_config(max_iter=max_iter))]) job.add_row(['job log', judging_state(test_api.job_api('job_log'))]) task = PrettyTable() task.set_style(ORGMODE) task.field_names = ['task api name', 'status'] task.add_row(['task query', judging_state(test_api.query_task())]) print(task.get_string(title="task api")) component = PrettyTable() component.set_style(ORGMODE) component.field_names = ['component api name', 'status'] component.add_row([ 'output data', judging_state(test_api.component_api('component_output_data')) ]) component.add_row([ 'output table', judging_state(test_api.component_api('component_output_data_table')) ]) component.add_row([ 'output model', judging_state(test_api.component_api('component_output_model')) ]) component.add_row([ 'component parameters', judging_state( test_api.component_api('component_parameters', max_iter=max_iter)) ]) component.add_row([ 'metrics', judging_state(test_api.component_api('component_metrics')) ]) component.add_row([ 'metrics all', judging_state(test_api.component_api('component_metric_all')) ]) model = PrettyTable() model.set_style(ORGMODE) model.field_names = ['model api name', 'status'] if not config_json.get('component_is_homo') and serving_connect_bool: model_load_conf = get_dict_from_file(model_file_path) model_load_conf["initiator"]["party_id"] = guest_party_id model_load_conf["role"].update({ "guest": [guest_party_id], "host": [host_party_id], "arbiter": [arbiter_party_id] }) model.add_row([ 'model load', judging_state( test_api.model_api('load', model_load_conf=model_load_conf)) ]) model.add_row([ 'model bind', judging_state( test_api.model_api('bind', model_load_conf=model_load_conf, servings=config_json['serving_setting'])) ]) status, model_path = test_api.model_api('export') model.add_row(['model export', judging_state(status)]) model.add_row([ 'model import', (judging_state( test_api.model_api('import', remove_path=remove_path, model_path=model_path))) ]) model.add_row( ['model store', (judging_state(test_api.model_api('store')))]) model.add_row( ['model restore', (judging_state(test_api.model_api('restore')))]) print(model.get_string(title="model api")) component.add_row([ 'metrics delete', judging_state(test_api.component_api('component_metric_delete')) ]) print(component.get_string(title="component api")) test_api.submit_job() test_api.submit_job() test_api.submit_job() job.add_row(['clean job', judging_state(test_api.job_api('clean_job'))]) job.add_row( ['clean queue', judging_state(test_api.job_api('clean_queue'))]) print(job.get_string(title="job api")) print('Please check the error content: {}'.format( test_api.error_log(None)))