Exemple #1
0
    def calibrate_DM(self, rcond=0.1, fname_append=None, force_new=False):
        """Calculate and save the imagae reconstruction matrix for the AO system"""
        from os import path

        if fname_append is not None:
            fname = "rmat_" + str(
                self.DM.num_actuators) + "_" + str(rcond).replace(
                    ".", "") + "_" + fname_append
        else:
            fname = "rmat_" + str(
                self.DM.num_actuators) + "_" + str(rcond).replace(".", "")

        #first we make a reference image for an unaberrated wavefront passing through the pwfs
        wf = hc.Wavefront(self.ap, wavelength=self.reference_wavelength)

        wf.total_power = 1

        wf_pwfs = self.pwfs.forward(wf)

        self.ref_image = self.read_out(wf_pwfs, poisson=False)

        if path.exists(fname + ".npy") and not force_new:
            print("trying to load reconstruction matrix from " + fname)
            rmat = np.load(fname + str(".npy"))
            print("loaded cached rmat")
        else:
            print("computing reconstruction matrix")
            #compute the interaction matrix relating incoming abberations to wfs response
            probe_disp = 0.01 * self.wavelength
            slopes = []

            for i in range(self.DM.num_actuators):
                printProgressBar(i, self.DM.num_actuators)

                slope = 0

                for s in (-1, 1):
                    disps = np.zeros((self.DM.num_actuators, ))
                    disps[i] = s * probe_disp
                    self.DM.actuators = disps

                    wf_dm = self.DM.forward(wf)  #pass through DM
                    wf_dm_pwfs = self.pwfs.forward(wf_dm)  #pass through wfs

                    image = self.read_out(wf_dm_pwfs)
                    slope += s * (image - self.ref_image) / (2 * probe_disp)

                slopes.append(slope)

            print("matrix inversion...")
            basis = hc.ModeBasis(slopes)
            rmat = hc.inverse_tikhonov(basis.transformation_matrix,
                                       rcond=rcond,
                                       svd=None)
            np.save(fname, rmat)

            self.DM.actuators = np.zeros((self.DM.num_actuators, ))

        self.rmat = rmat
        return rmat
Exemple #2
0
    def run_closed_loop(self,
                        leakage,
                        gain,
                        fileout,
                        run_for=10,
                        freq=1000,
                        save_every=100,
                        wl=None):
        """like the original function but use's Mike's phase screen code"""

        #reset DM
        self.DM.actuators[:] = 0.

        t_arr = np.linspace(0, run_for, int(run_for * freq) + 1)

        wf_pupil, wf_pupil_hires, wf_focal = self.wf_pupil, self.wf_pupil_hires, self.wf_focal

        perfect_u = get_u(wf_focal)

        print("perfect psf of current optical system: ")
        plt.imshow(np.abs(perfect_u))
        plt.show()

        num_saves = int(len(t_arr) / save_every)

        DMshapes = np.empty((num_saves, len(self.DM.actuators)))
        avg = np.zeros_like(perfect_u, dtype=np.float)
        psf_arr = np.zeros((num_saves, perfect_u.shape[0], perfect_u.shape[1]),
                           dtype=np.complex128)
        times = []

        norm = np.sqrt(np.sum(perfect_u * np.conj(perfect_u)))

        print("simulating AO system...")

        j = 0
        for i in range(len(t_arr)):
            self.get_screen()
            self.update_DM(wf_pupil, leakage, gain)

            full_prop = (i != 0 and i % save_every == 0)

            if full_prop:
                # do a full optical propagation
                _wf = self.propagate(wf_pupil_hires)
                u = get_u(_wf, norm)
                avg += np.real(u * np.conj(u))
                psf_arr[j] = u
                DMshapes[j] = self.DM.actuators
                times.append(t_arr[i])
                j += 1
                printProgressBar(j, num_saves)

        avg /= j
        avg = np.sqrt(avg)

        s = get_strehl(avg, perfect_u / norm)

        with h5py.File(fileout + ".hdf5", "w") as f:
            f.create_dataset("DMs", data=DMshapes)
            f.create_dataset("psfs", data=psf_arr)
            f.create_dataset("ts", data=times)

            self.save_args_to_file(f)

        return u, avg, s
Exemple #3
0
def main(config):

    rm_mkdir(config.train_path)
    rm_mkdir(config.train_GT_path)
    rm_mkdir(config.valid_path)
    rm_mkdir(config.valid_GT_path)
    rm_mkdir(config.test_path)
    rm_mkdir(config.test_GT_path)

    filenames = os.listdir(config.origin_data_path)
    data_list = []
    GT_list = []

    for filename in filenames:
        ext = os.path.splitext(filename)[-1]
        if ext == '.jpg':
            filename = filename.split('_')[-1][:-len('.jpg')]
            data_list.append('ISIC_' + filename + '.jpg')
            GT_list.append('ISIC_' + filename + '_segmentation.png')

    num_total = len(data_list)
    num_train = int(
        (config.train_ratio /
         (config.train_ratio + config.valid_ratio + config.test_ratio)) *
        num_total)
    num_valid = int(
        (config.valid_ratio /
         (config.train_ratio + config.valid_ratio + config.test_ratio)) *
        num_total)
    num_test = num_total - num_train - num_valid

    print('\nNum of train set : ', num_train)
    print('\nNum of valid set : ', num_valid)
    print('\nNum of test set : ', num_test)

    Arange = list(range(num_total))
    random.shuffle(Arange)

    for i in range(num_train):
        idx = Arange.pop()

        src = os.path.join(config.origin_data_path, data_list[idx])
        dst = os.path.join(config.train_path, data_list[idx])
        copyfile(src, dst)

        src = os.path.join(config.origin_GT_path, GT_list[idx])
        dst = os.path.join(config.train_GT_path, GT_list[idx])
        copyfile(src, dst)

        printProgressBar(i + 1,
                         num_train,
                         prefix='Producing train set:',
                         suffix='Complete',
                         length=50)

    for i in range(num_valid):
        idx = Arange.pop()

        src = os.path.join(config.origin_data_path, data_list[idx])
        dst = os.path.join(config.valid_path, data_list[idx])
        copyfile(src, dst)

        src = os.path.join(config.origin_GT_path, GT_list[idx])
        dst = os.path.join(config.valid_GT_path, GT_list[idx])
        copyfile(src, dst)

        printProgressBar(i + 1,
                         num_valid,
                         prefix='Producing valid set:',
                         suffix='Complete',
                         length=50)

    for i in range(num_test):
        idx = Arange.pop()

        src = os.path.join(config.origin_data_path, data_list[idx])
        dst = os.path.join(config.test_path, data_list[idx])
        copyfile(src, dst)

        src = os.path.join(config.origin_GT_path, GT_list[idx])
        dst = os.path.join(config.test_GT_path, GT_list[idx])
        copyfile(src, dst)

        printProgressBar(i + 1,
                         num_test,
                         prefix='Producing test set:',
                         suffix='Complete',
                         length=50)
Exemple #4
0
def main(config):

    #     import pdb; pdb.set_trace()
    rm_mkdir(config.train_path)
    rm_mkdir(config.train_GT_path)
    rm_mkdir(config.valid_path)
    rm_mkdir(config.valid_GT_path)
    rm_mkdir(config.test_path)
    rm_mkdir(config.test_GT_path)

    # Get list of all masks
    filenames = os.listdir(config.origin_GT_path)

    data_list = filenames
    GT_list = filenames

    num_total = len(data_list)
    num_train = int(
        (config.train_ratio /
         (config.train_ratio + config.valid_ratio + config.test_ratio)) *
        num_total)
    num_valid = int(
        (config.valid_ratio /
         (config.train_ratio + config.valid_ratio + config.test_ratio)) *
        num_total)
    num_test = num_total - num_train - num_valid

    print('\nNum of train set : ', num_train)
    print('\nNum of valid set : ', num_valid)
    print('\nNum of test set : ', num_test)

    Arange = list(range(num_total))
    random.shuffle(Arange)

    for i in range(num_train):
        idx = Arange.pop()

        src = os.path.join(config.origin_data_path, data_list[idx])
        dst = os.path.join(config.train_path, data_list[idx])
        copyfile(src, dst)

        src = os.path.join(config.origin_GT_path, GT_list[idx])
        dst = os.path.join(config.train_GT_path, GT_list[idx])
        copyfile(src, dst)

        printProgressBar(i + 1,
                         num_train,
                         prefix='Producing train set:',
                         suffix='Complete',
                         length=50)

    for i in range(num_valid):
        idx = Arange.pop()

        src = os.path.join(config.origin_data_path, data_list[idx])
        dst = os.path.join(config.valid_path, data_list[idx])
        copyfile(src, dst)

        src = os.path.join(config.origin_GT_path, GT_list[idx])
        dst = os.path.join(config.valid_GT_path, GT_list[idx])
        copyfile(src, dst)

        printProgressBar(i + 1,
                         num_valid,
                         prefix='Producing valid set:',
                         suffix='Complete',
                         length=50)

    for i in range(num_test):
        idx = Arange.pop()

        src = os.path.join(config.origin_data_path, data_list[idx])
        dst = os.path.join(config.test_path, data_list[idx])
        copyfile(src, dst)

        src = os.path.join(config.origin_GT_path, GT_list[idx])
        dst = os.path.join(config.test_GT_path, GT_list[idx])
        copyfile(src, dst)

        printProgressBar(i + 1,
                         num_test,
                         prefix='Producing test set:',
                         suffix='Complete',
                         length=50)
Exemple #5
0
def main(config):
    rm_mkdir(config.train_path)
    rm_mkdir(config.train_GT_path)
    rm_mkdir(config.valid_path)
    rm_mkdir(config.valid_GT_path)

    origin_GT_file = csv.DictReader(open(config.origin_GT_path+config.origin_GT_file,  encoding='UTF-8-sig'))
    origin_data_list = [i for i in origin_GT_file]
    data_list = []
    for index in range(len(origin_data_list)):
        filename = origin_data_list[index]['FileName']
        multi_GT = origin_data_list[index]['Code'].split(';')
        for j in multi_GT:
            data = {}
            data['FileName'] = filename
            data['Code'] = j
            data_list.append(data)

    train_list = []
    valid_list = []
    test_file = csv.DictReader(open(config.test_GT_path+'test_label.csv', encoding='UTF-8-sig'))
    test_list = [i for i in test_file]

    num_total = len(data_list)
    num_train = int((config.train_ratio/(config.train_ratio+config.valid_ratio+config.test_ratio))*num_total)
    num_valid = int((config.valid_ratio/(config.train_ratio+config.valid_ratio+config.test_ratio))*num_total)
    num_test = len(test_list)

    print('\nNum of train set : ', num_train)
    print('\nNum of valid set : ', num_valid)
    print('\nNum of test set : ', num_test)
    
    Arange = list(range(num_total))
    random.shuffle(Arange)

    for i in range(num_train):
        idx = Arange.pop()
        filename = data_list[idx]['FileName']
        src = os.path.join(config.origin_data_path, filename)
        dst = os.path.join(config.train_path, filename)
        copyfile(src, dst)
        
        train_list.append(data_list[idx])

        printProgressBar(i + 1, num_train, prefix = 'Producing train set:', suffix = 'Complete', length = 50)
    
        train_data_writer = csv.DictWriter(open(config.train_GT_path+'train_label.csv','w'), origin_GT_file.fieldnames)
        train_data_writer.writeheader()
        train_data_writer.writerows(train_list)

    for i in range(num_valid):
        idx = Arange.pop()
        filename = data_list[idx]['FileName']
        src = os.path.join(config.origin_data_path, filename)
        dst = os.path.join(config.valid_path, filename)
        copyfile(src, dst)

        valid_list.append(data_list[idx])
        
        printProgressBar(i + 1, num_valid, prefix = 'Producing valid set:', suffix = 'Complete', length = 50)

    valid_data_writer = csv.DictWriter(open(config.valid_GT_path+'valid_label.csv','w'), origin_GT_file.fieldnames)
    valid_data_writer.writeheader()
    valid_data_writer.writerows(valid_list)
Exemple #6
0
    def prop2end(self,
                 u,
                 xyslice,
                 zslice,
                 u1_func=None,
                 writeto=None,
                 ucrit=5.e-3,
                 remesh_every=20,
                 dynamic_n0=True):
        mesh = self.mesh
        PML = mesh.PML

        za_keep = mesh.za[zslice]
        if type(za_keep) == np.ndarray:
            minz, maxz = za_keep[0], za_keep[-1]
            shape = (len(za_keep), *mesh.xg[xyslice].shape)
        else:
            raise Exception('uhh not implemented')

        self.field = np.zeros(shape, dtype=c128)

        xa_in = np.linspace(-mesh.xw / 2, mesh.xw / 2, u.shape[0])
        ya_in = np.linspace(-mesh.yw / 2, mesh.yw / 2, u.shape[1])

        dx0 = xa_in[1] - xa_in[0]
        dy0 = ya_in[1] - ya_in[0]

        print("normalizing input file")
        normalize(u, weight=dx0 * dy0)

        __z = 0

        #pull xy mesh
        xy = mesh.xy
        dx, dy = xy.dx0, xy.dy0

        #resample the field onto the smaller xy mesh (in the smaller mesh's computation zone!)
        u0 = xy.resample_complex(u, xa_in, ya_in, xy.xa[PML:-PML],
                                 xy.ya[PML:-PML])

        #now we pad w/ zeros to extend it into the PML zone
        u0 = np.pad(u0, ((PML, PML), (PML, PML)))

        #initial mesh refinement
        print("initial remesh")
        xy.refine_base(u0, ucrit)

        #xy.plot_mesh(4)

        weights = xy.get_weights()

        #now resample the field onto the smaller *non-uniform* xy mesh
        u = xy.resample_complex(u, xa_in, ya_in, xy.xa[PML:-PML],
                                xy.ya[PML:-PML])
        u = np.pad(u, ((PML, PML), (PML, PML)))

        #do another norm to correct for the slight power change you get when resampling. I measure 0.1% change for psflo. should check again
        norm_nonu(u, weights)

        counter = 0
        total_iters = self.mesh.zres

        print("propagating field...")

        z__ = 0

        #step 0 setup

        self.update_grid_cor_facs('x')
        self.update_grid_cor_facs('y')

        # initial array allocation
        _trimatsx, rmatx, gx, _trimatsy, rmaty, gy, IORsq__, _IORsq_, __IORsq = self.allocate_mats(
        )

        self.precomp_trimats('x')
        self.precomp_trimats('y')

        self.rmat_precomp('x')
        self.rmat_precomp('y')

        self._pmlcorrect(_trimatsx, 'x')
        self._pmlcorrect(_trimatsy, 'y')

        #get the current IOR dist
        self.set_IORsq(IORsq__, z__)

        print("initial shape: ", xy.shape)
        for i in range(total_iters):
            printProgressBar(i, total_iters - 1)
            u0 = xy.get_base_field(u)
            u0c = np.conj(u0)
            weights = xy.get_weights()

            ## Total power monitor ##
            self.totalpower[i] = overlap_nonu(u, u, weights)
            print(self.totalpower[i])

            ## Other monitors ##
            if u1_func is not None:
                lp = norm_nonu(u1_func(xy.xg, xy.yg), weights)
                self.power[i] = power(overlap_nonu(u, lp, weights), 2)

            _z_ = z__ + mesh.half_dz
            __z = z__ + mesh.dz

            if minz <= __z <= maxz:
                ix0, ix1, ix2, ix3 = mesh.get_loc()
                mid = int(u0.shape[1] / 2)

                self.field[counter][ix0:ix1 + 1] = u0[:, mid]  ## FIX ##
                counter += 1

            #avoid remeshing on step 0
            if (i + 1) % remesh_every == 0:
                #xy.plot_mesh(4)
                ## update the effective index
                if dynamic_n0:
                    #update the effective index
                    base = xy.get_base_field(IORsq__)
                    self.n02 = xy.dx0 * xy.dy0 * np.real(
                        np.sum(u0c * u0 * base)) / self.k02

                oldxm, oldxM = xy.xm, xy.xM
                oldym, oldyM = xy.ym, xy.yM

                oldxw, oldyw = xy.xw, xy.yw

                #expand the grid if necessary
                new_xw = mesh.xwfunc(__z)
                new_yw = mesh.ywfunc(__z)

                new_xw, new_yw = xy.snapto(new_xw, new_yw)

                xy.reinit(new_xw,
                          new_yw)  #set grid back to base res with new dims

                if (xy.xw > oldxw or xy.yw > oldyw):
                    #now we need to pad u,u0 with zeros to make sure it matches the new space
                    xpad = int((xy.shape0[0] - u0.shape[0]) / 2)
                    ypad = int((xy.shape0[1] - u0.shape[1]) / 2)

                    u = np.pad(u, ((xpad, xpad), (ypad, ypad)))
                    u0 = np.pad(u0, ((xpad, xpad), (ypad, ypad)))

                    #pad coord arrays to do interpolation
                    xy.xa_last = np.hstack(
                        (np.linspace(xy.xm, oldxm - dx, xpad), xy.xa_last,
                         np.linspace(oldxM + dx, xy.xM, xpad)))
                    xy.ya_last = np.hstack(
                        (np.linspace(xy.ym, oldym - dy, ypad), xy.ya_last,
                         np.linspace(oldyM + dy, xy.yM, ypad)))

                #subdivide into nonuniform grid
                xy.refine_base(u0, ucrit)

                #interp the field to the new grid
                u = xy.resample_complex(u)

                #give the grid to the optical sys obj so it can compute IORs
                self.optical_system.set_sampling(xy)

                #compute nonuniform grid correction factors R_i
                self.update_grid_cor_facs('x')
                self.update_grid_cor_facs('y')

                # grid size has changed, so now we need to reallocate arrays for at least the next remesh_period iters
                _trimatsx, rmatx, gx, _trimatsy, rmaty, gy, IORsq__, _IORsq_, __IORsq = self.allocate_mats(
                )

                #get the current IOR dist
                self.set_IORsq(IORsq__, z__)

                #precompute things that will be reused
                self.precomp_trimats('x')
                self.precomp_trimats('y')

                self.rmat_precomp('x')
                self.rmat_precomp('y')

                self._pmlcorrect(_trimatsx, 'x')
                self._pmlcorrect(_trimatsy, 'y')

            self.set_IORsq(
                _IORsq_,
                _z_,
            )
            self.set_IORsq(__IORsq, __z)

            self.rmat(rmatx, u, IORsq__, 'x')
            self.rmat_pmlcorrect(rmatx, u, 'x')

            self._trimats(_trimatsx, _IORsq_, 'x')
            self._trimats(_trimatsy, __IORsq.T, 'y')

            tri_solve_vec(_trimatsx[0], _trimatsx[1], _trimatsx[2], rmatx, gx,
                          u)

            self.rmat(rmaty, u.T, _IORsq_.T, 'y')
            self.rmat_pmlcorrect(rmaty, u.T, 'y')

            tri_solve_vec(_trimatsy[0], _trimatsy[1], _trimatsy[2], rmaty, gy,
                          u.T)

            z__ = __z
            if (i + 2) % remesh_every != 0:
                IORsq__[:, :] = __IORsq

        print("final total power", self.totalpower[-1])

        if writeto:
            np.save(writeto, self.field)
        return u
                cell_label_map[cell_label_map == region.label] = 0

        cell_mask = cell_label_map > 0
        postprocessed_path = config.result_path + 'postprocessed_png/'
        if not os.path.exists(postprocessed_path):
            os.mkdir(postprocessed_path)    
        save_folder = postprocessed_path + '%s/' % well
        if not os.path.exists(save_folder):
            os.mkdir(save_folder)
        plt.imsave(save_folder + whole_img_predic_list[slice_index].split('/')[-1].replace('.npy', '_postprocessed.png'), cell_mask)
        try:
            cell_mask_3D[:, :, slice_index - 30] = cell_mask
            nuclei_mask_3D[:, :, slice_index - 30] = nuclei_mask
            nuclei_segmentation_3D[:, :, slice_index - 30] = nucleus_image * nuclei_mask
            nuclei_channel_cell_segmentation_3D[:, :, slice_index - 30] = nucleus_image * cell_mask
            printProgressBar(slice_index, 30)
        except:
            printProgressBar(slice_index, len(sub_prediction_list))

    # Nifti shell is a original nifti scan that we used to keep all the files with the same affine and header
    # You may need to change your nifti shell here
    try:
        NIFTI_shell_file = './Step02_channel_combined_input/demo/1600*1600_nifti_shell.nii.gz'
        # change the result path to where you want to save this file
        result_path = config.result_path + 'nifti_30frames/'
        if not os.path.exists(result_path):
            os.mkdir(result_path)
        NIFTI_shell = nib.load(NIFTI_shell_file)
        cell_mask_3D_patch_nii = nib.Nifti1Image(cell_mask_3D, NIFTI_shell.affine)
        nuclei_mask_3D_patch_nii = nib.Nifti1Image(nuclei_mask_3D, NIFTI_shell.affine)
        nuclei_segmentation_3D_patch_nii = nib.Nifti1Image(nuclei_segmentation_3D, NIFTI_shell.affine)
Exemple #8
0
            keep_region = False
            for pixel in region.coords:
                if seed[pixel[0], pixel[1]] == 1:
                    keep_region = True
            if keep_region == False:
                cell_label_map[cell_label_map == region.label] = 0

        cell_mask = cell_label_map > 0

        cell_mask_3D[:, :, slice_index - 30] = cell_mask
        nuclei_mask_3D[:, :, slice_index - 30] = nuclei_mask
        nuclei_segmentation_3D[:, :,
                               slice_index - 30] = nucleus_image * nuclei_mask
        nuclei_channel_cell_segmentation_3D[:, :, slice_index -
                                            30] = nucleus_image * cell_mask
        printProgressBar(slice_index, 30)

    # Nifti shell is a original nifti scan that we used to keep all the files with the same affine and header
    # You need to change your nifti shell here
    NIFTI_shell_file = '/media/sail/SSD1T/Tal_cell_tracking/Nifti/20190621_111240_generate_nifti_1_1/strct/1600*1600.nii.gz'
    # change the result path to where you want to save this file
    result_path = '/media/sail/SSD1T/Tal_cell_tracking/Nifti/%s_Prediction_comparison/' % well
    if not os.path.exists(result_path):
        os.mkdir(result_path)
    NIFTI_shell = nib.load(NIFTI_shell_file)
    cell_mask_3D_patch_nii = nib.Nifti1Image(cell_mask_3D, NIFTI_shell.affine)
    nuclei_mask_3D_patch_nii = nib.Nifti1Image(nuclei_mask_3D,
                                               NIFTI_shell.affine)
    nuclei_segmentation_3D_patch_nii = nib.Nifti1Image(nuclei_segmentation_3D,
                                                       NIFTI_shell.affine)
    nuclei_channel_cell_segmentation_3D_patch_nii = nib.Nifti1Image(