Example #1
0
def compute(file, yaml_file, stack, section, client, bucket, key):
    t0 = time()
    img_fn = os.path.join(os.environ['ROOT_DIR'], file)
    setup_download_from_s3(file, recursive=False)
    run('python {0}/extractPatches.py {1} {2}'.format(os.environ['REPO_DIR'],
                                                      img_fn, yaml_file))
    params = configuration(yaml_file).getParams()
    size_thresholds = params['normalization']['size_thresholds']
    dot = img_fn.rfind('.')
    slash = img_fn.rfind('/')
    local_dir = 'cells/' + img_fn[slash + 1:dot] + '_cells/'
    for size in size_thresholds:
        key_item = 'size_of_' + str(size)
        local_fp = local_dir + str(size) + '.bin'
        s3_fp = stack + '/cells/' + str(section) + '_cells/' + str(
            size) + '.bin'

        def s3_exist(s3_fp):
            try:
                report = client.stat_object(bucket, s3_fp)
                return True
            except:
                return False

        while not s3_exist(s3_fp):
            setup_upload_from_s3(s3_fp, local_fp, recursive=False)
        report = client.stat_object(bucket, s3_fp)
        key[key_item] = int(report.size / 1000)
        os.remove(os.path.join(os.environ['ROOT_DIR'], local_fp))
    print(file, 'finished in', time() - t0, 'seconds')
    os.remove(img_fn)
    return key
def main(argv):
    args = parse_cmd_args(argv[1:])
    config = configuration(args)

    dictConfig(config.LOGGING)
    logger.info(
        u'Run main loop. Worker pool size={}. Sleep time is {}.'.format(
            config.WORKER_POOL_SIZE, config.SLEEP
        ))
    parent_pid = os.getpid()
    while keep_running:
        main_loop_function(config, parent_pid)
        sleep(config.SLEEP)

    return config.EXIT_CODE
Example #3
0
    from time import time
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "filestem",
        type=str,
        help="Process <filestem>.tif into <filestem>_extracted.pkl")
    parser.add_argument("yaml",
                        type=str,
                        help="Path to Yaml file with parameters")

    # Add parameters for size of mexican hat and size of cell, threshold, percentile
    # Define file name based on size. Use this name for log file and for countours image.
    # save parameters in a log file ,

    args = parser.parse_args()
    config = configuration(args.yaml)
    params = config.getParams()

    _dir = params['paths']['data_dir'] + '/tiles/'
    stem = args.filestem
    infile = _dir + stem + '.tif'
    out_stem = stem + '.' + params['name']
    pkl_dir = params['paths']['pickle_subdir']
    pkl_out_file = _dir + 'pickles/' + out_stem + '.pkl'
    annotated_infile = _dir + out_stem + '_contours.jpg'

    extractor = patch_extractor(infile, params)

    tile = cv2.imread(infile)
    gray = cv2.cvtColor(tile, cv2.COLOR_BGR2GRAY)
Example #4
0
            self.count.append(0)
        for i in range(self.K):
            self.fp = open(self.saveDir + '/permuted-' + str(i) + '.bin', 'bw')
            for choice in np.random.choice(len(files), 100000, p=prob):
                fn = files[choice]
                V = np.fromfile(fn, np.float16)
                V = V.reshape([-1, self.size**2])
                self.fp.write(V[self.count[choice], :])
                self.count[choice] += 1
            clock(str(i) + ' files finished')


if __name__ == '__main__':

    yamlfile = os.environ['REPO_DIR'] + 'shape_params-aws.yaml'
    params = configuration(yamlfile).getParams()
    stack = 'DK39'
    root_dir = os.environ['ROOT_DIR']

    clock('Process Begin')
    t0 = time()
    setup_download_from_s3(stack + '/cells/')
    clock('Download From S3')
    sorter = Sorter(src_root=root_dir + stack + '/cells/')
    size_thresholds = params['normalization']['size_thresholds']
    for size in size_thresholds:
        sorter.sort_file(size, stem=root_dir + 'permute/permuted')
        clock('Complete files of size ' + str(size))
        print('Complete files of size ' + str(size), time() - t0, 'seconds')
    log_fp = 'TimeLog/'
    if not os.path.exists(log_fp):
Example #5
0
#!/usr/bin/env python3

from os.path import isfile, getmtime
from glob import glob
from time import sleep, time
from os import system
from subprocess import Popen, PIPE
from lib.utils import configuration

config = configuration('shape_params.yaml')
params = config.getParams()

scripts_dir = params['paths']['scripts_dir']

local_data = params['paths']['data_dir']
script = 'process_file.py'
yaml_file = 'shape_params.yaml'
stack = 's3://mousebraindata-open/MD657'
exec_dir = params['paths']['scripts_dir']


def runPipe(command):
    print('cmd=', command)
    p = Popen(command.split(), stdout=PIPE, stderr=PIPE)
    L = p.communicate()
    stdout = L[0].decode("utf-8").split('\n')
    stderr = L[1].decode("utf-8").split('\n')
    return stdout, stderr


def run(command, out):