示例#1
0
def job(query: str):
    message = None
    if not query.startswith('test'):
        job_id = utils.get_job_from_string(query)
    else:
        job_id = query

    job = utils.get_job(job_id)
    if not job:
        message = f'There isn`t job for word: "{query}"'
    else:
        if job.get_error_path():
            message = f'Error for job for word: "{query}"'
    if message:
        flash(message, 'errors')
        return redirect(url_for('main'))
    try:
        job_results = utils.apply_window(utils.get_results(job))
    except FileNotFoundError:
        return render_template("job.html",
                               meta=job.get_meta(),
                               inprogress=True,
                               data=[],
                               labels=[])

    return render_template("job.html",
                           meta=job.get_meta(),
                           data=job_results.data,
                           labels=list(map(str, job_results.labels)))
示例#2
0
def get_test_data(batch_size):
    file_dir = root_dir + 'severance_data/imgs/'
    input_list_pkl = root_dir + 'severance_data/pixel_diff/val_x.pkl'
    target_list_pkl = root_dir + 'severance_data/pixel_diff/val_y.pkl'
    with open(input_list_pkl, 'rb') as f:
        full_input_list = pickle.load(f)
    with open(target_list_pkl, 'rb') as f:
        full_target_list = pickle.load(f)

    assert (len(full_input_list) == len(full_target_list))
    indexes = np.arange(len(full_input_list))
    np.random.shuffle(indexes)
    full_input_list = np.array(full_input_list)[indexes]
    full_target_list = np.array(full_target_list)[indexes]
    input_imgs = []
    target_imgs = []
    for x, y in zip(full_input_list[:batch_size],
                    full_target_list[:batch_size]):
        input_imgs.append(apply_window(np.load(file_dir + x + '.npy')))
        target_imgs.append(apply_window(np.load(file_dir + y + '.npy')))
    return np.array(input_imgs), np.array(target_imgs)
示例#3
0
def get_train_pair(batch_size):
    file_dir = root_dir + 'severance_data/supervision/'
    d_list = os.listdir(file_dir)
    xs = []
    ys = []
    np.random.shuffle(d_list)
    for dname in d_list[:batch_size]:
        ys.append(np.load(file_dir + dname))
        img_name = root_dir + 'severance_data/imgs/' + dname
        img = np.load(img_name)
        img = apply_window(img)
        xs.append(img)
    return np.array(xs), np.array(ys)
示例#4
0
    for x, y in zip(full_input_list, full_target_list):
        d1 = pydicom.dcmread(file_dir + x)
        d2 = pydicom.dcmread(file_dir + y)
        input_imgs.append(d1.pixel_array)
        target_imgs.append(d2.pixel_array)
    if wo_name:
        return np.array(input_imgs), np.array(target_imgs)
    else:
        return np.array(input_imgs), np.array(target_imgs), full_input_list


batch_size = 10
x, y, names = get_data_all(wo_name=False)
supervisions = []
for i in range(x.shape[0]):
    start = time.time()
    a = apply_window(x[i])
    b = apply_window(y[i])

    mask = get_mask(a)
    mask2 = get_local_(mask, 'max', 5)
    mask2 = get_local_(mask2, 'min', 7)

    diff = pixel_diff(a, b)
    #    supervisions.append(mask2 * diff)

    end = time.time()
    print('time per img: %.3f,  step: %5d' % (end - start, i))

    np.save('supervision/' + names[i], mask2 * diff)