def load(self, conf_path=None, job_id=None): kwargs = locals() if not kwargs.get("conf_path") and not kwargs.get("job_id"): response = { "retcode": 100, "retmsg": "Load model failed. No arguments received, " "please provide one of arguments from job id and conf path." } else: if kwargs.get("conf_path") and kwargs.get("job_id"): response = { "retcode": 100, "retmsg": "Load model failed. Please do not provide job id and " "conf path at the same time." } else: config_data, dsl_data = preprocess(**kwargs) self._post(url='model/load', json=config_data) if not os.path.exists(conf_path): raise FileNotFoundError('Invalid conf path, file not exists.') config_data, dsl_data = preprocess(**kwargs) return self._post(url='model/load', json=config_data)
def config(self, job_id, role, party_id, output_path): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config( config=config_data, required_arguments=['job_id', 'role', 'party_id', 'output_path']) response = self._post(url='job/config', json=config_data) if response['retcode'] == 0: job_id = response['data']['job_id'] download_directory = os.path.join(config_data['output_path'], 'job_{}_config'.format(job_id)) os.makedirs(download_directory, exist_ok=True) for k, v in response['data'].items(): if k == 'job_id': continue with open('{}/{}.json'.format(download_directory, k), 'w') as fw: json.dump(v, fw, indent=4) del response['data']['dsl'] del response['data']['runtime_conf'] response['directory'] = download_directory response[ 'retmsg'] = 'download successfully, please check {} directory'.format( download_directory) return response
def log(self, job_id, output_path): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=['job_id', 'output_path']) job_id = config_data['job_id'] tar_file_name = 'job_{}_log.tar.gz'.format(job_id) extract_dir = os.path.join(config_data['output_path'], 'job_{}_log'.format(job_id)) with closing( self._get(url='job/log', handle_result=False, json=config_data, stream=True)) as response: if response.status_code == 200: download_from_request(http_response=response, tar_file_name=tar_file_name, extract_dir=extract_dir) response = { 'retcode': 0, 'directory': extract_dir, 'retmsg': 'download successfully, please check {} directory'.format( extract_dir) } else: response = response.json() return response
def tag_model(self, job_id, tag_name, remove=False): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) if not config_data.pop('remove'): return self._post(url='model/model_tag/create', json=config_data) else: return self._post(url='model/model_tag/remove', json=config_data)
def submit(self, config_data, dsl_data=None): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='job/submit', json={ 'job_runtime_conf': config_data, 'job_dsl': dsl_data, })
def metric_delete(self, date=None, job_id=None): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) if config_data.get('date'): config_data['model'] = config_data.pop('date') return self._post(url='tracking/component/metric/delete', json=config_data)
def export_model(self, conf_path, to_database=False): if not os.path.exists(conf_path): raise FileNotFoundError('Invalid conf path, file not exists.') kwargs = locals() config_data, dsl_data = preprocess(**kwargs) if not config_data.pop("to_database"): with closing( self._get(url='model/export', handle_result=False, json=config_data, stream=True)) as response: if response.status_code == 200: archive_file_name = re.findall( "filename=(.+)", response.headers["Content-Disposition"])[0] os.makedirs(config_data["output_path"], exist_ok=True) archive_file_path = os.path.join( config_data["output_path"], archive_file_name) with open(archive_file_path, 'wb') as fw: for chunk in response.iter_content(1024): if chunk: fw.write(chunk) response = { 'retcode': 0, 'file': archive_file_path, 'retmsg': 'download successfully, please check {}'.format( archive_file_path) } else: response = response.json() return response return self._post(url='model/store', json=config_data)
def list_checkpoints(self, **kwargs): config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=[ 'role', 'party_id', 'model_id', 'model_version', 'component_name' ]) return self._post(url='checkpoint/list', json=config_data)
def metrics(self, job_id, role, party_id, component_name): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=[ 'job_id', 'component_name', 'role', 'party_id' ]) return self._post(url='tracking/component/metrics', json=config_data)
def query(self, job_id=None, role=None, party_id=None, component_name=None, status=None): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='job/query', json=config_data)
def deploy(self, model_id, model_version, cpn_list=None, predict_dsl=None, components_checkpoint=None): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='model/deploy', json=config_data)
def clean(self, job_id=None, role=None, party_id=None, component_name=None): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=['job_id']) return self._post(url='job/clean', json=config_data)
def delete(self, namespace=None, table_name=None, job_id=None, role=None, party_id=None, component_name=None): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='table/delete', json=config_data)
def submit(self, conf_path, dsl_path=None): if not os.path.exists(conf_path): raise FileNotFoundError('Invalid conf path, file not exists.') kwargs = locals() config_data, dsl_data = preprocess(**kwargs) post_data = {'job_dsl': dsl_data, 'job_runtime_conf': config_data} return self._post(url='job/submit', json=post_data)
def get_model_info(self, model_id=None, model_version=None, role=None, party_id=None, query_filters=None, **kwargs): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='model/query', json=config_data)
def upload_history(self, limit=10, job_id=None): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) response = self._post(url='data/upload/history', json=config_data) if response['retcode'] == 999: start_cluster_standalone_job_server() return self._post(url='data/upload/history', json=config_data) return response
def download(self, config_data): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) response = self._post(url='data/download', json=config_data) if response['retcode'] == 999: start_cluster_standalone_job_server() return self._post(url='data/download', json=config_data) return response
def get_checkpoint(self, **kwargs): config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=[ 'role', 'party_id', 'model_id', 'model_version', 'component_name' ]) if len(config_data.keys() & {'step_index', 'step_name'}) != 1: raise KeyError('step_index or step_name is required') return self._post(url='checkpoint/get', json=config_data)
def get_summary(self, job_id, role, party_id, component_name): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=[ 'job_id', 'component_name', 'role', 'party_id' ]) res = self._post(url='tracking/component/summary/download', handle_result=True, json=config_data) if not res.get('data'): res['data'] = {} return res
def toy(self, guest_party_id: str, host_party_id: str, guest_user_name: str = "", host_user_name: str = "", task_cores: int = 2, timeout: int = 60): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) conf = self.toy_conf(**kwargs) return self._post(url='job/submit', json={ 'job_runtime_conf': conf, 'job_dsl': self.toy_dsl(), })
def output_data(self, job_id, role, party_id, component_name, output_path, limit=-1): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=[ 'job_id', 'component_name', 'role', 'party_id', 'output_path' ]) tar_file_name = 'job_{}_{}_{}_{}_output_data.tar.gz'.format( config_data['job_id'], config_data['component_name'], config_data['role'], config_data['party_id']) extract_dir = os.path.join(config_data['output_path'], tar_file_name.replace('.tar.gz', '')) with closing( self._get(url='tracking/component/output/data/download', handle_result=False, json=config_data, stream=True)) as response: if response.status_code == 200: try: download_from_request(http_response=response, tar_file_name=tar_file_name, extract_dir=extract_dir) response = { 'retcode': 0, 'directory': extract_dir, 'retmsg': 'download successfully, please check {} directory'. format(extract_dir) } except BaseException: response = { 'retcode': 100, 'retmsg': 'download failed, please check if the parameters are correct' } else: response = response.json() return response
def import_model(self, conf_path, from_database=False): if not os.path.exists(conf_path): raise FileNotFoundError('Invalid conf path, file not exists.') kwargs = locals() config_data, dsl_data = preprocess(**kwargs) if not kwargs.pop("from_database"): file_path = config_data["file"] if not os.path.isabs(file_path): file_path = os.path.join(get_project_base_directory(), file_path) if os.path.exists(file_path): files = {'file': open(file_path, 'rb')} else: raise Exception('The file is obtained from the fate flow client machine, but it does not exist, ' 'please check the path: {}'.format(file_path)) return self._post(url='model/import', data=config_data, files=files) return self._post(url='model/restore', json=config_data)
def upload(self, config_data, verbose=0, drop=0): kwargs = locals() kwargs['drop'] = int(kwargs['drop']) kwargs['verbose'] = int(kwargs['verbose']) config_data, dsl_data = preprocess(**kwargs) if config_data.get('use_local_data', 1): file_name = config_data.get('file') if not os.path.isabs(file_name): file_name = os.path.join(get_project_base_directory(), file_name) if os.path.exists(file_name): with open(file_name, 'rb') as fp: data = MultipartEncoder( fields={ 'file': (os.path.basename(file_name), fp, 'application/octet-stream') }) tag = [0] def read_callback(monitor): if config_data.get('verbose') == 1: sys.stdout.write("\r UPLOADING:{0}{1}".format( "|" * (monitor.bytes_read * 100 // monitor.len), '%.2f%%' % (monitor.bytes_read * 100 // monitor.len))) sys.stdout.flush() if monitor.bytes_read / monitor.len == 1: tag[0] += 1 if tag[0] == 2: sys.stdout.write('\n') data = MultipartEncoderMonitor(data, read_callback) return self._post( url='data/upload', data=data, params=json_dumps(config_data), headers={'Content-Type': data.content_type}) else: raise Exception( 'The file is obtained from the fate flow client machine, but it does not exist, ' 'please check the path: {}'.format(file_name)) else: return self._post(url='data/upload', json=config_data)
def homo_deploy(self, config_data): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) if config_data.get('deployment_type') == "kfserving": kube_config = config_data.get('deployment_parameters', {}).get('config_file') if kube_config: if not os.path.isabs(kube_config): kube_config = os.path.join(get_project_base_directory(), kube_config) if os.path.exists(kube_config): with open(kube_config, 'r') as fp: config_data['deployment_parameters'][ 'config_file_content'] = fp.read() del config_data['deployment_parameters']['config_file'] else: raise Exception( 'The kube_config file is obtained from the fate flow client machine, ' 'but it does not exist, please check the path: {}'. format(kube_config)) return self._post(url='model/h**o/deploy', json=config_data)
def load(self, config_data=None, job_id=None): if config_data is None and job_id is None: return { "retcode": 100, "retmsg": "Load model failed. No arguments received, " "please provide one of arguments from job id and conf path." } if config_data is not None and job_id is not None: return { "retcode": 100, "retmsg": "Load model failed. Please do not provide job id and " "conf path at the same time." } kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='model/load', json=config_data)
def migrate(self, conf_path): if not os.path.exists(conf_path): raise FileNotFoundError('Invalid conf path, file not exists.') kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='model/migrate', json=config_data)
def get_predict_conf(self, model_id, model_version): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='model/get/predict/conf', json=config_data)
def tag_list(self, job_id): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) return self._post(url='model/model_tag/retrieve', json=config_data)
def delete(self, src_party_id, src_role): kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=['src_party_id', 'src_role']) return self._post(url='permission/delete/privilege', json=config_data)
def stop(self, job_id): job_id = str(job_id) kwargs = locals() config_data, dsl_data = preprocess(**kwargs) check_config(config=config_data, required_arguments=['job_id']) return self._post(url='job/stop', json=config_data)