Ejemplo n.º 1
0
def load_secrets(mount_path='/secret/kubeaction'):
    _secrets = {}
    for f in files_list(mount_path):
        with open(path.join(mount_path, f), 'r') as raw:
            _secrets[f] = raw.read()

    print('secrets', _secrets)
    return _secrets
Ejemplo n.º 2
0
def main(mode):
    in_fold = os.path.join(root_folder, mode)
    print "Reading images from %s" % in_fold
    print "Run mode: %s" % mode.upper()
    names = files_list(in_fold, mode)
    print "Total number of files: %d" % len(names)
    
    # Create a parallel pool
    errf = open('err.log', 'w')
    sys.stderr = errf
    with Parallel(n_jobs=8) as parallel:
        rets = parallel(delayed(downsample)(fname_label, mode) 
                        for fname_label in names)
        print "Done. A total of %d files processed." % len(rets)
    errf.close()
Ejemplo n.º 3
0
test_path = '/home/ubuntu/dataset/test/test/'
job_id = "model_303_01_14.csv"

# net01 = caffe.Net(dep_path01, model_path01, caffe.TEST)
# net01.blobs['data'].reshape(*(batch_size, 3, input_shape[0], input_shape[1]))

# net14 = caffe.Net(dep_path14, model_path14, caffe.TEST)
# net14.blobs['data'].reshape(*(batch_size, 3, input_shape[0], input_shape[1]))

# transformer = caffe.io.Transformer({'data': net01.blobs['data'].data.shape})
# transformer.set_mean('data', (caffe.io.load_image(mean_img)*255).mean(0).mean(0))
# transformer.set_transpose('data', (2,0,1))
# transformer.set_channel_swap('data', (2,1,0))
# transformer.set_raw_scale('data', 255.0)

names = sorted(files_list(test_path, "test"), key=key_names)
num_files = len(names)
print "Total number of test files: %d" % num_files

out_file = open("pred_%s.csv" % (job_id), "w", 0)

# with Parallel(n_jobs=5) as parallel:
#     for i in xrange(0, num_files, uniq_im_per_batch):
#         upper_idx = min(i + uniq_im_per_batch, num_files)
#         files_batch = names[i:upper_idx]
#         num_uniq_im = (upper_idx - i)
#         ret = parallel(delayed(process_img)(fname_lab, crop_shape, scale, random_draws, "test", False) 
#                        for fname_lab in files_batch)
#         ret = np.asarray(ret).reshape((num_uniq_im * (random_draws + 1), crop_shape[0], crop_shape[1], 3))
#         if ret.shape[0] < batch_size:
#             pad = np.zeros((batch_size - num_uniq_im, crop_shape[0], crop_shape[1], 3), dtype=ret.dtype)