Пример #1
0
    def compute_custom_transform(self):
        """
        Compute transform based on added control points.
        """

        curr_points = []
        for i, c in enumerate(self.curr_gscene.anchor_circle_items):
            pos = c.scenePos()
            curr_points.append((pos.x(), pos.y()))

        prev_points = []
        for i, c in enumerate(self.prev_gscene.anchor_circle_items):
            pos = c.scenePos()
            prev_points.append((pos.x(), pos.y()))

        print(self.curr_gscene.active_section, np.array(curr_points))
        print(self.prev_gscene.active_section, np.array(prev_points))

        curr_points = np.array(curr_points)
        prev_points = np.array(prev_points)
        curr_centroid = curr_points.mean(axis=0)
        prev_centroid = prev_points.mean(axis=0)
        curr_points0 = curr_points - curr_centroid
        prev_points0 = prev_points - prev_centroid

        H = np.dot(curr_points0.T, prev_points0)
        U, S, VT = np.linalg.svd(H)
        R = np.dot(VT.T, U.T)

        t = -np.dot(R, curr_centroid) + prev_centroid

        print(R, t)

        # Write to custom transform file
        curr_section_fn = self.curr_gscene.active_section
        prev_section_fn = self.prev_gscene.active_section

        custom_tf_dir = os.path.join(self.stack_data_dir, self.stack + '_custom_transforms', curr_section_fn + '_to_' + prev_section_fn)

        execute_command("rm -rf %(out_dir)s; mkdir -p %(out_dir)s" % dict(out_dir=custom_tf_dir))
        custom_tf_fp = os.path.join(custom_tf_dir, '%(curr_fn)s_to_%(prev_fn)s_customTransform.txt' % \
                    dict(curr_fn=curr_section_fn, prev_fn=prev_section_fn))

        with open(custom_tf_fp, 'w') as f:
            f.write('%f %f %f %f %f %f\n' % (R[0,0], R[0,1], t[0], R[1,0], R[1,1], t[1]))

        self.apply_custom_transform()
        self.update_transformed_images_feeder()
Пример #2
0
    def align_using_elastix(self):
        selected_elastix_parameter_name = str(self.alignment_ui.comboBox_parameters.currentText())
        param_fn = os.path.join(UTILITY_DIR, 'preprocess', 'parameters', 'Parameters_' + selected_elastix_parameter_name + '.txt')

        curr_fn = self.curr_gscene.active_section
        prev_fn = self.prev_gscene.active_section
        out_dir = os.path.join(self.stack_data_dir, self.stack + '_custom_transforms', curr_fn + '_to_' + prev_fn)

        curr_fp = DataManager.get_image_filepath_v2(stack=self.stack, prep_id=None, fn=curr_fn, resol=self.tb_res, version=self.tb_version)
        prev_fp = DataManager.get_image_filepath_v2(stack=self.stack, prep_id=None, fn=prev_fn, resol=self.tb_res, version=self.tb_version )

        # curr_fp = os.path.join(RAW_DATA_DIR, self.stack, curr_fn + '.' + self.tb_fmt)
        # prev_fp = os.path.join(RAW_DATA_DIR, self.stack, prev_fn + '.' + self.tb_fmt)

        execute_command('rm -rf %(out_dir)s; mkdir -p %(out_dir)s; elastix -f %(fixed_fn)s -m %(moving_fn)s -out %(out_dir)s -p %(param_fn)s' % \
        dict(param_fn=param_fn, out_dir=out_dir, fixed_fn=prev_fp, moving_fn=curr_fp))
        # section_filenames = self.get_sorted_filenames(valid_only=self.show_valid_only)

        self.update_transformed_images_feeder()
Пример #3
0
    def save_everything(self):

        # Dump preprocessing info
        placeholder_indices = [idx+1 for idx, fn in enumerate(self.sorted_filenames) if fn == 'Placeholder']
        placeholder_slide_positions = [(slide_name, pos) for slide_name, x in self.slide_position_to_fn.iteritems() for pos, fn in x.iteritems() if fn == 'Placeholder']
        rescan_indices = [idx+1 for idx, fn in enumerate(self.sorted_filenames) if fn == 'Rescan']
        rescan_slide_positions = [(slide_name, pos) for slide_name, x in self.slide_position_to_fn.iteritems() for pos, fn in x.iteritems() if fn == 'Rescan']

        ul_pos = self.sorted_sections_gscene.corners['ul'].scenePos()
        lr_pos = self.sorted_sections_gscene.corners['lr'].scenePos()
        ul_x = int(ul_pos.x())
        ul_y = int(ul_pos.y())
        lr_x = int(lr_pos.x())
        lr_y = int(lr_pos.y())

        info = {'placeholder_indices': placeholder_indices,
        'placeholder_slide_positions': placeholder_slide_positions,
        'rescan_indices': rescan_indices,
        'rescan_slide_positions': rescan_slide_positions,
        'sorted_filenames': self.sorted_filenames,
        'slide_position_to_fn': self.slide_position_to_fn,
        'first_section': self.first_section,
        'last_section': self.last_section,
        'anchor_fn': self.anchor_fn,
        # 'bbox': (ul_x, lr_x, ul_y, lr_y) #xmin,xmax,ymin,ymax
        'bbox': (ul_x, ul_y, lr_x+1-ul_x, lr_y+1-ul_y) #xmin,ymin,w,h
        }

        from datetime import datetime
        timestamp = datetime.now().strftime("%m%d%Y%H%M%S")
        pickle.dump(info, open(self.stack_data_dir + '/%(stack)s_preprocessInfo_%(timestamp)s.pkl' % {'stack': self.stack, 'timestamp':timestamp}, 'w'))

        execute_command('cd %(stack_data_dir)s && rm -f %(stack)s_preprocessInfo.pkl && ln -s %(stack)s_preprocessInfo_%(timestamp)s.pkl %(stack)s_preprocessInfo.pkl' % {'stack': self.stack, 'timestamp':timestamp, 'stack_data_dir':self.stack_data_dir})

        self.save_crop()
        self.save_sorted_filenames()
        self.save()
Пример #4
0
    def apply_custom_transform(self):

        # section_filenames = self.get_sorted_filenames(valid_only=self.show_valid_only)

        # curr_section_fn = section_filenames[self.valid_section_indices[self.curr_gscene.active_i]-1]
        # prev_section_fn = section_filenames[self.valid_section_indices[self.prev_gscene.active_i]-1]
        curr_section_fn = self.curr_gscene.active_section
        prev_section_fn = self.prev_gscene.active_section

        custom_tf_fn = os.path.join(self.stack_data_dir, self.stack+'_custom_transforms', curr_section_fn + '_to_' + prev_section_fn, curr_section_fn + '_to_' + prev_section_fn + '_customTransform.txt')
        with open(custom_tf_fn, 'r') as f:
            t11, t12, t13, t21, t22, t23 = map(float, f.readline().split())

        prev_fp = DataManager.get_image_filepath_v2(stack=self.stack, prep_id=None, fn=prev_section_fn, resol=self.tb_res, version=self.tb_version )
        curr_fp = DataManager.get_image_filepath_v2(stack=self.stack, prep_id=None, fn=curr_section_fn, resol=self.tb_res, version=self.tb_version )
        prev_img_w, prev_img_h = identify_shape(prev_fp)

        output_image_fp = os.path.join(self.stack_data_dir, '%(stack)s_custom_transforms/%(curr_fn)s_to_%(prev_fn)s/%(curr_fn)s_alignedTo_%(prev_fn)s.tif' % \
                        dict(stack=self.stack,
                        curr_fn=curr_section_fn,
                        prev_fn=prev_section_fn) )

        execute_command("convert %(curr_fp)s -virtual-pixel background +distort AffineProjection '%(sx)f,%(rx)f,%(ry)f,%(sy)f,%(tx)f,%(ty)f' -crop %(w)sx%(h)s%(x)s%(y)s\! -flatten -compress lzw %(output_fp)s" %\
        dict(curr_fp=curr_fp,
            output_fp=output_image_fp,
            tb_fmt=self.tb_fmt,
            sx=t11,
            sy=t22,
            rx=t21,
            ry=t12,
            tx=t13,
            ty=t23,
            w=prev_img_w,
            h=prev_img_h,
            x='+0',
            y='+0',
            raw_data_dir=RAW_DATA_DIR))
parser.add_argument("out_version", type=str, help="Output image version")
args = parser.parse_args()


out_version = args.out_version

input_spec = load_ini(args.input_spec)
stack = input_spec['stack']
image_name_list = input_spec['image_name_list']
if image_name_list == 'all':
    image_name_list = DataManager.load_sorted_filenames(stack=stack)[0].keys()
prep_id = input_spec['prep_id']
if prep_id == 'None':
    prep_id = None
resol = input_spec['resol']
version = input_spec['version']
if version == 'None':
    version = None

for img_name in image_name_list:
    t = time.time()

    in_fp = DataManager.get_image_filepath(stack=stack, resol=resol, version=version, fn=img_name)
    out_fp = DataManager.get_image_filepath(stack=stack, resol=resol, version=out_version, fn=img_name)
    create_parent_dir_if_not_exists(out_fp)
        
    cmd = """convert "%(in_fp)s" -normalize -depth 8 "%(out_fp)s" """ % {'in_fp': in_fp, 'out_fp': out_fp}
    execute_command(cmd)
    
    sys.stderr.write("Intensity normalize: %.2f seconds.\n" % (time.time() - t))
def run_distributed(stack,
                    command,
                    argument_type='single',
                    kwargs_list=None,
                    jobs_per_node=1,
                    node_list=None,
                    local_only=False,
                    use_aws=False):
    """
    Distributed executing a command.
    Args:
        local_only: run on local computer instead of AWS cluster
        jobs_per_node:
        kwargs_list: either dict of lists {kA: [vA1, vA2, ...], kB: [vB1, vB2, ...]} or list of dicts [{kA:vA1, kB:vB1}, {kA:vA2, kB:vB2}, ...].
        argument_type: one of list, list2, single. If command takes one input item as argument, use "single". If command takes a list of input items as argument, use "list2". If command takes an argument called "kwargs_str", use "list".
    """
    fileLocationManager = FileLocationManager(stack)
    print('run ddddddddddddddddistrib', kwargs_list)
    if use_aws:
        execute_command(
            'rm -f /home/ubuntu/stderr_*; rm -f /home/ubuntu/stdout_*')
    else:
        execute_command('rm -f %s; rm -f %s' %
                        (os.path.join(fileLocationManager.mouseatlas_tmp,
                                      'stderr_*'), os.path.join('stdout_*')))

    if local_only:
        sys.stderr.write("Run locally.\n")

        n_hosts = 1

    else:

        # Use a fixed node list rather than letting SGE automatically determine the node list.
        # This allows for control over which input items go to which node.
        if node_list is None:
            node_list = get_node_list()

        n_hosts = len(node_list)
        sys.stderr.write('%d nodes available.\n' % (n_hosts))
        if n_hosts == 0:
            print('NODE LIST LENGTH IS 0. NO HOSTS AVAILABLE')
            return

    if kwargs_list is None:
        kwargs_list = {'dummy': [None] * n_hosts}

    if isinstance(kwargs_list, dict):
        keys, vals = zip(*kwargs_list.items())
        kwargs_list_as_list = [dict(zip(keys, t)) for t in zip(*vals)]
        kwargs_list_as_dict = kwargs_list
    else:
        kwargs_list_as_list = kwargs_list
        keys = kwargs_list[0].keys()
        vals = [t.values() for t in kwargs_list]
        kwargs_list_as_dict = dict(zip(keys, zip(*vals)))

    assert argument_type in [
        'single', 'list', 'list2'
    ], 'argument_type must be one of single, list, list2.'

    create_if_not_exists(fileLocationManager.mouseatlas_tmp)
    print('point 1')
    for node_i, (fi, li) in enumerate(
            first_last_tuples_distribute_over(0,
                                              len(kwargs_list_as_list) - 1,
                                              n_hosts)):

        temp_script = os.path.join(fileLocationManager.mouseatlas_tmp,
                                   'runall.sh')
        temp_f = open(temp_script, 'w')

        for j, (fj, lj) in enumerate(
                first_last_tuples_distribute_over(fi, li, jobs_per_node)):
            print('node i j fj, lj', j, fj, lj)
            if argument_type == 'list':
                line = command % {
                    'kwargs_str': json.dumps(kwargs_list_as_list[fj:lj + 1])
                }
            elif argument_type == 'list2':
                line = command % {
                    key: json.dumps(vals[fj:lj + 1])
                    for key, vals in kwargs_list_as_dict.items()
                }
            elif argument_type == 'single':
                # It is important to wrap command_templates and kwargs_list_str in apostrphes.
                # That lets bash treat them as single strings.
                # Reference: http://stackoverflow.com/questions/15783701/which-characters-need-to-be-escaped-in-bash-how-do-we-know-it
                line = "%(generic_launcher_path)s %(command_template)s %(kwargs_list_str)s" % \
                       {'generic_launcher_path': os.path.join(UTILITY_DIR, 'sequential_dispatcher.py'),
                        'command_template': shell_escape(command),
                        'kwargs_list_str': shell_escape(json.dumps(kwargs_list_as_list[fj:lj+1]))
                        }

            temp_f.write(line + ' &\n')

        temp_f.write('wait')
        temp_f.close()
        os.chmod(temp_script, 0o777)

        # Explicitly specify the node to submit jobs.
        # By doing so, we can control which files are available in the local scratch space of which node.
        # One can then assign downstream programs to specific nodes so they can read corresponding files from local scratch.
        if use_aws:
            stdout_template = '/home/ubuntu/stdout_%d.log'
            stderr_template = '/home/ubuntu/stderr_%d.log'
        else:
            stdout_template = os.path.join(fileLocationManager.mouseatlas_tmp,
                                           'stdout_%d.log')
            stderr_template = os.path.join(fileLocationManager.mouseatlas_tmp,
                                           'stderr_%d.log')

        if local_only:
            stdout_f = open(stdout_template % node_i, "w")
            stderr_f = open(stderr_template % node_i, "w")
            print('running temp_script', temp_script)
            subprocess.run(temp_script,
                           shell=True,
                           stdout=stdout_f,
                           stderr=stderr_f)
        else:
            print('qsub -V -q all.q@%(node)s -o %(stdout_log)s -e %(stderr_log)s %(script)s' % \
                  dict(node=node_list[node_i], script=temp_script, stdout_log=stdout_template % node_i, stderr_log=stderr_template % node_i))

            subprocess.call('qsub -V -q all.q@%(node)s -o %(stdout_log)s -e %(stderr_log)s %(script)s' % \
                 dict(node=node_list[node_i], script=temp_script,
                      stdout_log=stdout_template % node_i, stderr_log=stderr_template % node_i),
                 shell=True)

    sys.stderr.write(
        'Jobs submitted. Use wait_qsub_complete() to wait for all execution to finish.\n'
    )
def transfer_data(from_fp,
                  to_fp,
                  from_hostname,
                  to_hostname,
                  is_dir,
                  include_only=None,
                  exclude_only=None,
                  includes=None):
    assert from_hostname in [
        'localhost', 'workstation', 'oasis', 's3', 'ec2', 's3raw', 'ec2scratch'
    ], 'from_hostname must be one of localhost, workstation, oasis, s3, s3raw, ec2 or ec2scratch.'
    assert to_hostname in [
        'localhost', 'workstation', 'oasis', 's3', 'ec2', 's3raw', 'ec2scratch'
    ], 'to_hostname must be one of localhost, workstation, oasis, s3, s3raw, ec2 or ec2scratch.'

    to_parent = os.path.dirname(to_fp)

    if from_hostname in ['localhost', 'ec2', 'workstation', 'ec2scratch']:
        # upload
        if to_hostname in ['s3', 's3raw']:
            if is_dir:
                if includes is not None:
                    execute_command(
                        'aws s3 cp --recursive \"%(from_fp)s\" \"s3://%(to_fp)s\" --exclude \"*\" %(includes_str)s'
                        % dict(from_fp=from_fp,
                               to_fp=to_fp,
                               includes_str=" ".join(
                                   ['--include ' + incl
                                    for incl in includes])))
                elif include_only is not None:
                    execute_command(
                        'aws s3 cp --recursive \"%(from_fp)s\" \"s3://%(to_fp)s\" --exclude \"*\" --include \"%(include)s\"'
                        % dict(from_fp=from_fp,
                               to_fp=to_fp,
                               include=include_only))
                elif exclude_only is not None:
                    execute_command(
                        'aws s3 cp --recursive \"%(from_fp)s\" \"s3://%(to_fp)s\" --include \"*\" --exclude \"%(exclude)s\"'
                        % dict(from_fp=from_fp,
                               to_fp=to_fp,
                               exclude=exclude_only))
                else:
                    execute_command('aws s3 cp --recursive \"%(from_fp)s\" \"s3://%(to_fp)s\"' % \
                                    dict(from_fp=from_fp, to_fp=to_fp))
            else:
                execute_command('aws s3 cp \"%(from_fp)s\" \"s3://%(to_fp)s\"' % \
                                dict(from_fp=from_fp, to_fp=to_fp))
        else:
            execute_command(
                "ssh %(to_hostname)s 'rm -rf \"%(to_fp)s\" && mkdir -p \"%(to_parent)s\"' && scp -r \"%(from_fp)s\" %(to_hostname)s:\"%(to_fp)s\"" % \
                dict(from_fp=from_fp, to_fp=to_fp, to_hostname=to_hostname, to_parent=to_parent))
    elif to_hostname in ['localhost', 'ec2', 'workstation', 'ec2scratch']:
        # download
        if from_hostname in ['s3', 's3raw']:

            # Clear existing folder/file
            if not include_only and not includes and not exclude_only:
                execute_command(
                    'rm -rf \"%(to_fp)s\" && mkdir -p \"%(to_parent)s\"' %
                    dict(to_parent=to_parent, to_fp=to_fp))

            # Download from S3 using aws commandline interface.
            if is_dir:
                if includes is not None:
                    execute_command(
                        'aws s3 cp --recursive \"s3://%(from_fp)s\" \"%(to_fp)s\" --exclude \"*\" %(includes_str)s'
                        % dict(from_fp=from_fp,
                               to_fp=to_fp,
                               includes_str=" ".join(
                                   ['--include ' + incl
                                    for incl in includes])))
                elif include_only is not None:
                    execute_command(
                        'aws s3 cp --recursive \"s3://%(from_fp)s\" \"%(to_fp)s\" --exclude \"*\" --include \"%(include)s\"'
                        % dict(from_fp=from_fp,
                               to_fp=to_fp,
                               include=include_only))
                elif exclude_only is not None:
                    execute_command(
                        'aws s3 cp --recursive \"s3://%(from_fp)s\" \"%(to_fp)s\" --include \"*\" --exclude \"%(exclude)s\"'
                        % dict(from_fp=from_fp,
                               to_fp=to_fp,
                               exclude=exclude_only))
                else:
                    execute_command(
                        'aws s3 cp --recursive \"s3://%(from_fp)s\" \"%(to_fp)s\"'
                        % dict(from_fp=from_fp, to_fp=to_fp))
            else:
                execute_command(
                    'aws s3 cp \"s3://%(from_fp)s\" \"%(to_fp)s\"' %
                    dict(from_fp=from_fp, to_fp=to_fp))
        else:
            execute_command(
                "scp -r %(from_hostname)s:\"%(from_fp)s\" \"%(to_fp)s\"" %
                dict(from_fp=from_fp, to_fp=to_fp,
                     from_hostname=from_hostname))
    else:
        # log onto another machine and perform upload from there.
        execute_command(
            "ssh %(from_hostname)s \"ssh %(to_hostname)s \'rm -rf \"%(to_fp)s\" && mkdir -p %(to_parent)s && scp -r \"%(from_fp)s\" %(to_hostname)s:\"%(to_fp)s\"\'\"" % \
            dict(from_fp=from_fp, to_fp=to_fp, from_hostname=from_hostname, to_hostname=to_hostname,
                 to_parent=to_parent))
Пример #8
0
    """Align consecutive images. Possible bad alignment pairs are written into a separate file.
Usage 1: align_compose.py in.ini --op from_none_to_aligned
""")

parser.add_argument("input_spec", type=str, help="input specifier. ini")
parser.add_argument("--op", type=str, help="operation id")

args = parser.parse_args()
print('args', args)

input_spec = load_ini(args.input_spec)
stack = input_spec['stack']
sqlController = SqlController()
fileLocationManager = FileLocationManager(stack)

execute_command('python align_v3.py %s --op %s' % (args.input_spec, args.op))
# execute_command('python compose_v3.py %s --op %s' % (args.input_spec, args.op))

#image_name_list = input_spec['image_name_list']
image_name_list = sqlController.get_image_list(stack, 'destination')
#if image_name_list == 'all':
#    #image_name_list = DataManager.load_sorted_filenames(stack=stack)[0].keys()
#    image_name_list = map(lambda x: x[0], sorted(DataManager.load_sorted_filenames(stack=input_spec['stack'])[0].items(), key=lambda x: x[1]))

#op = load_ini(os.path.join(DATA_ROOTDIR, 'CSHL_data_processed', input_spec['stack'], 'operation_configs', args.op + '.ini'))
op = load_ini(
    os.path.join(fileLocationManager.operation_configs, args.op + '.ini'))
assert op['type'] == 'warp', "Op type  must be warp."
assert op['base_prep_id'] == input_spec[
    'prep_id'], "Op requires %s, but input has prep %s." % (
        op['base_prep_id'], input_spec['prep_id'])
Пример #9
0
    print('do we ever get here?????????????')

    prev_img_name = kwarg['prev_img_name']
    curr_img_name = kwarg['curr_img_name']
    prev_fp = kwarg['prev_fp']
    curr_fp = kwarg['curr_fp']

    output_subdir = os.path.join(output_dir, curr_img_name + '_to_' + prev_img_name)

    if os.path.exists(output_subdir) and 'TransformParameters.0.txt' in os.listdir(output_subdir):
        sys.stderr.write('Result for aligning %s to %s already exists.\n' % (curr_img_name, prev_img_name))
        if not regenerate:
            sys.stderr.write('Skip.\n' % (curr_img_name, prev_img_name))
            continue

    execute_command('rm -rf \"%s\"' % output_subdir)
    create_if_not_exists(output_subdir)

    """
    ret = execute_command('%(elastix_bin)s -f \"%(fixed_fp)s\" -m \"%(moving_fp)s\" -out \"%(output_subdir)s\" -p \"%(param_fp)s\"' % \
            {'elastix_bin': ELASTIX_BIN,
            'param_fp': param_fp,
            'output_subdir': output_subdir,
            'fixed_fp': prev_fp,
            'moving_fp': curr_fp
            })
    """
    command = [ELASTIX_BIN, '-f', prev_fp, '-m', curr_fp, '-p', param_fp, '-out', output_subdir]
    print(" ".join(command))
    ret = subprocess.run(command)
Пример #10
0
                'w': str(w),
                'h': str(h)
            }

        elif op_type == 'rotate':
            #print op_params
            #print orientation_argparse_str_to_imagemagick_str.keys()
            #print orientation_argparse_str_to_imagemagick_str[op_params]
            #print op_str
            op_str += ' ' + orientation_argparse_str_to_imagemagick_str[
                op_params]

        else:
            raise Exception("Op_id must be either warp or crop.")

    assert args.input_fp is not None and args.output_fp is not None
    input_fp = args.input_fp
    output_fp = args.output_fp

    create_parent_dir_if_not_exists(output_fp)

    try:
        execute_command("convert \"%(input_fp)s\"  +repage -virtual-pixel background -background %(bg_color)s %(op_str)s -flatten -compress lzw \"%(output_fp)s\"" % \
                {'op_str': op_str,
                 'input_fp': input_fp,
                 'output_fp': output_fp,
                 'bg_color': pad_color})
    except Exception as e:
        sys.stderr.write("ImageMagick convert failed for input_fp %s: %s\n" %
                         (input_fp, e.message))