Пример #1
0
def upload_data(data_path, label_path, project_uid, task_num):
    data = np.load(data_path, allow_pickle=True)
    label = np.load(label_path, allow_pickle=True)
    data_split = np.split(data, task_num)
    label_split = np.split(label, task_num)

    init_time = time.time()
    for idx in range(task_num):
        res = requests.post(
            f'{base_url}/project/data/upload',
            data={  # 업로드 url 요청
                'project_uid': project_uid,
                'data': f'train_data_{idx}',
                'label': f'train_label_{idx}',
                'index': idx
            },
            headers={'AUTH': get_auth_header()})

        url = res.json()['label_url']  # presigned url
        with TemporaryFile() as tf:
            np.save(tf, label_split[idx])
            _ = tf.seek(0)
            requests.put(url=url, data=tf)  # 라벨 업로드

        url = res.json()['data_url']  # presigned url
        with TemporaryFile() as tf:
            np.save(tf, data_split[idx])
            _ = tf.seek(0)
            requests.put(url=url, data=tf)  # 데이터 업로드
        print(f'{idx} uploaded')

    print('data uploading finished')
    print(time.time() - init_time)
Пример #2
0
def create_project(initial_weight, data=None):
    with TemporaryFile() as tf:
        np.save(tf, np.array(initial_weight, dtype=object))
        _ = tf.seek(0)
        res = requests.post(f'{base_url}/project/create/',
                            files={'weight': tf},
                            data=data,
                            headers={'AUTH': get_auth_header()})

    return res.json()
Пример #3
0
def get_weight(project_id, params=None):
    res = requests.get(f'{base_url}/project/{project_id}/project/weight',
                       params=params,
                       headers={'AUTH': get_auth_header()})

    with TemporaryFile() as tf:
        tf.write(res.content)
        _ = tf.seek(0)
        weight = np.load(tf, allow_pickle=True)

    return weight
Пример #4
0
def result_learning(project_id, params=None):
    res = requests.get(f'{base_url}/project/{project_id}/result',
                       params=params,
                       headers={'AUTH': get_auth_header()})

    with TemporaryFile() as tf:
        tf.write(res.content)
        _ = tf.seek(0)
        weight = np.load(tf, allow_pickle=True)
    # if res.status_code not in [200, 201, 204]:
    # raise exc.ResponseException(res)
    return weight
Пример #5
0
    def upload_each_data(self, data, label, project_uid, idx):
        res = requests.post(
            f'{base_url}/project/data/upload',
            data={  # 업로드 url 요청
                'project_uid': project_uid,
                'data': f'train_data_{idx}',
                'label': f'train_label_{idx}',
                'index': idx
            },
            headers={'AUTH': self.auth})

        url = res.json()['label_url']  # presigned url
        with TemporaryFile() as tf:
            np.save(tf, label)
            _ = tf.seek(0)
            requests.put(url=url, data=tf)  # 라벨 업로드

        url = res.json()['data_url']  # presigned url
        with TemporaryFile() as tf:
            np.save(tf, data)
            _ = tf.seek(0)
            requests.put(url=url, data=tf)  # 데이터 업로드
Пример #6
0
def start_learning(project_id, params=None):
    callback.stop_learning_tok = False

    res = requests.get(f'{base_url}/project/{project_id}/task/get',
                       params=params,
                       headers={'AUTH': get_auth_header()})
    res_json = res.json()
    print(res_json)

    if (not (res_json['is_successful'])): return 'FAIL'
    occupy_task(project_id, {'task_index': res_json['task_index']})

    model_url = res_json['model_url']
    data_url = res_json['data_url']
    label_url = res_json['label_url']

    start_time = time.time()

    with TemporaryFile() as tf:
        tf.write(requests.get(url=model_url).content)
        _ = tf.seek(0)
        model = keras.models.load_model(h5py.File(tf, mode='r'))

    with TemporaryFile() as tf:
        tf.write(requests.get(url=data_url).content)
        _ = tf.seek(0)
        train_data = np.asarray(np.load(tf,
                                        allow_pickle=True)).astype(np.float32)

    with TemporaryFile() as tf:
        tf.write(requests.get(url=label_url).content)
        _ = tf.seek(0)
        train_label = np.asarray(np.load(tf,
                                         allow_pickle=True)).astype(np.float32)

    init_weight = get_weight(project_id)

    model.set_weights(init_weight.tolist())
    task_index = int(res_json['task_index'])
    epoch = int(res_json['epoch'])
    batch_size = int(res_json['batch_size'])
    valid_rate = float(res_json['valid_rate'])

    print(train_data.shape)

    if (task_index == -1):
        validate(project_id)
        return 'STOP'

    try:
        model.fit(train_data,
                  train_label,
                  batch_size=batch_size,
                  epochs=epoch,
                  validation_split=valid_rate,
                  callbacks=[callback],
                  verbose=2)
    except ValueError as e:
        report_error(project_id=project_id, params={'error_message': e})
        return 'ERROR'
    except TypeError as e:
        report_error(project_id=project_id, params={'error_message': e})
        return 'ERROR'

    if callback.stop_learning_tok:
        return 'STOP'

    spent_time = time.time() - start_time

    with TemporaryFile() as tf:
        np.save(
            tf,
            np.array(model.get_weights(), dtype=object) -
            np.array(init_weight, dtype=object))
        _ = tf.seek(0)
        update_success = update_learning(project_id=project_id,
                                         params={
                                             'task_index':
                                             res_json['task_index'],
                                             'spent_time': spent_time
                                         },
                                         gradient={'gradient': tf})

    if (not (update_success)): return 'FAIL'

    return 'SUCCESS'