import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'] , "8989") train_files = get_all_files('/home/dev/csv/') resp = requests.post('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00002/ver/1/node/test_data_node/', files = train_files) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update source_info resp = requests.put('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00002/ver/1/node/test_data_node/', json={ "source_server": "local", "source_sql": "all", "max_sentence_len" : 50 }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update preprocess # preprocess : kkma, twiter, mecab, nltk resp = requests.put('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/pre/nnid/nn00002/ver/1/node/test_data_node/', json={ "preprocess": "mecab", }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data))
import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'], "8989") train_files = get_all_files('/home/dev/img/') resp = requests.post( 'http://' + url + '/api/v1/type/wf/state/imgdata/src/local/form/file/prg/source/nnid/nn00005/ver/1/node/datasrc/', files=train_files) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) resp = requests.put( 'http://' + url + '/api/v1/type/wf/state/imgdata/src/local/form/file/prg/source/nnid/nn00005/ver/1/node/datasrc/', json={ "type": "local image", "source_path": "/hoya_src_root/nn00005/1/datasrc", "preprocess": { "x_size": 100, "y_size": 100, "channel": 3 }, "labels": [], "store_path": "/hoya_str_root/nn00005/1/datasrc" }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data))
import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'] , "8989") train_files = get_all_files('/home/dev/text/') resp = requests.post('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00002/ver/1/node/data_node/', files = train_files) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update source_info resp = requests.put('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00002/ver/1/node/data_node/', json={ "source_server": "local", "source_sql": "all", "max_sentence_len" : 10 }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update preprocess # preprocess : kkma, twiter, mecab, nltk resp = requests.put('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/pre/nnid/nn00002/ver/1/node/data_node/', json={ "preprocess": "mecab", }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data))
import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'], "8989") train_files = get_all_files('/home/dev/train/') eval_files = get_all_files('/home/dev/eval/') # update source_info resp = requests.post( 'http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00004/ver/1/node/data_encode_node/', files=train_files, ) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update source_info resp = requests.put( 'http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00004/ver/1/node/data_encode_node/', json={ "source_server": "local", "source_sql": "all", "max_sentence_len": 50 }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update preprocess
import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'] , "8989") train_files = get_all_files('/home/dev/img/') resp = requests.post('http://' + url + '/api/v1/type/wf/state/imgdata/src/local/form/file/prg/source/nnid/nn00005/ver/1/node/datasrc/', files = train_files) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) resp = requests.put('http://' + url + '/api/v1/type/wf/state/imgdata/src/local/form/file/prg/source/nnid/nn00005/ver/1/node/datasrc/', json={ "type": "local image", "source_path": "/hoya_src_root/nn00005/1/datasrc", "preprocess": {"x_size": 100, "y_size": 100, "channel": 3 }, "labels":[], "store_path": "/hoya_str_root/nn00005/1/datasrc" }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data))
import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'], "8989") train_files = get_all_files('/home/dev/csv/') resp = requests.post( 'http://' + url + '/api/v1/type/wf/state/framedata/src/local/form/raw/prg/source/nnid/nn00001/ver/1/node/evaldata/', files=train_files) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) resp = requests.put( 'http://' + url + '/api/v1/type/wf/state/framedata/src/local/form/raw/prg/source/nnid/nn00001/ver/1/node/evaldata/', json={ "type": "csv", "source_server": "local", "source_sql": "all", "source_path": "test", "multi_node_flag": False }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # #update preprocess resp = requests.put( 'http://' + url +
import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'] , "8989") train_files = get_all_files('/home/dev/train/') eval_files = get_all_files('/home/dev/eval/') # update source_info resp = requests.post('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00004/ver/1/node/data_encode_node/', files = train_files,) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update source_info resp = requests.put('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00004/ver/1/node/data_encode_node/', json={ "source_server": "local", "source_sql": "all", "max_sentence_len": 50 }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update preprocess # preprocess : kkma, twiter, mecab, nltk resp = requests.put('http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/pre/nnid/nn00004/ver/1/node/data_encode_node/', json={ "preprocess": "mecab", })
import requests import json, os from demo.api_basic_0_util import get_all_files url = "{0}:{1}".format(os.environ['HOSTNAME'], "8989") train_files = get_all_files('/home/dev/text/') resp = requests.post( 'http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00002/ver/1/node/data_node/', files=train_files) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update source_info resp = requests.put( 'http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/source/nnid/nn00002/ver/1/node/data_node/', json={ "source_server": "local", "source_sql": "all", "max_sentence_len": 10 }) data = json.loads(resp.json()) print("evaluation result : {0}".format(data)) # update preprocess # preprocess : kkma, twiter, mecab, nltk resp = requests.put( 'http://' + url + '/api/v1/type/wf/state/textdata/src/local/form/raw/prg/pre/nnid/nn00002/ver/1/node/data_node/',