예제 #1
0
def setup_search(path, tfile):
    with open(path, 'r') as fp:
        params = json.load(fp)
    print("Loading trajectories from " + tfile + "... ", end="")
    t_list = kb.load_trajectories(tfile)#params['trajectories_file'])
    print("Done.")
    psf = kb.psf(params['psf_sigma'])
    print("Generating images... ", end="", flush=True)
    imgs = []
    for i in range(params['img_count']):
        time = i/(params['img_count']-1)
        im = kb.layered_image('img'+str(i), 
            params['x_dim'],
            params['y_dim'],
            params['noise'],
            params['noise']*params['noise'],
            time)
        for t in t_list:
            im.add_object(t.x+time*t.x_v, t.y+time*t.y_v, t.flux, psf)
        imgs.append(im)

    stack = kb.image_stack(imgs)
    del imgs
    search = kb.stack_search(stack, psf)
    search.set_debug(True)
    del stack
    print("Done.")
    return search, t_list, params
예제 #2
0
def run_search(par, run_number):

    par = Bunch(par)

    files = os.listdir(par.path)

    files.sort()
    files = [par.path + f for f in files]
    files = files[:par.max_images]

    images = [kb.layered_image(f) for f in files]

    p = kb.psf(par.psf_width)
    angle_range = par.angle_range
    velocity_range = par.velocity_range

    results_key = []
    for _ in range(par.object_count):
        traj = kb.trajectory()
        traj.x = int(rd.uniform(*par.x_range))
        traj.y = int(rd.uniform(*par.y_range))
        ang = rd.uniform(*angle_range)
        vel = rd.uniform(*velocity_range)
        traj.x_v = vel * math.cos(ang)
        traj.y_v = vel * math.sin(ang)
        traj.flux = rd.uniform(*par.flux_range)
        results_key.append(traj)

    results_key.extend(par.real_results)

    for t in results_key:
        add_trajectory(images, t, p)

    stack = kb.image_stack(images)

    stack.apply_mask_flags(par.flags, par.flag_exceptions)
    stack.apply_master_mask(par.master_flags, 2)

    search = kb.stack_search(stack, p)

    search_angle_r = (angle_range[0] / par.search_margin,
                      angle_range[1] * par.search_margin)
    search_velocity_r = (velocity_range[0] / par.search_margin,
                         velocity_range[1] * par.search_margin)
    search.gpu(par.angle_steps, par.velocity_steps, *search_angle_r,
               *search_velocity_r, par.min_observations)

    #results = search.get_results(0, par.results_count)
    search.save_results(
        par.results_file_path + 'run{0:03d}.txt'.format(run_number + 1), 0.03)

    if par.save_science:
        images = stack.sciences()
        for i in range(len(images)):
            np.save(
                par.img_save_path + 'R{0:03d}'.format(run_number + 1) +
                'SCI{0:03d}.npy'.format(i + 1), images[i])

    return results_key
예제 #3
0
 def setUp(self):
     self.im_count = 5
     p = kb.psf(0.05)
     self.images = []
     for c in range(self.im_count):
         im = kb.layered_image(str(c), 10, 10, 0.0, 1.0, c)
         im.add_object(2 + c * 0.5 + 0.5, 2 + c * 0.5 + 0.5, 1, p)
         self.images.append(im)
예제 #4
0
    def test_pooling(self):
        depth = 10
        res = 2**depth
        im = kb.layered_image("test", res, res, 0.0, 1.0, 0.0)
        im = im.get_science()
        for _ in range(depth):
            im = im.pool_max()
        im = np.array(im)
        self.assertEqual(im[0][0], 0.0)

        im = kb.layered_image("test", res, res, 0.0, 1.0, 0.0)
        im = im.get_science()
        for _ in range(depth):
            im = im.pool_min()
        im = np.array(im)
        self.assertEqual(im[0][0], 0.0)

        im = kb.layered_image("test", res, res, 3.0, 9.0, 0.0)
        im = im.get_science()
        test_high = 142.6
        test_low = -302.2
        im.set_pixel(51, 55, test_high)
        im.set_pixel(20, 18, test_low)
        # reduce to max
        imax = im.pool_max()
        for _ in range(depth - 1):
            imax = imax.pool_max()
        imax = np.array(imax)
        self.assertAlmostEqual(imax[0][0], test_high, delta=0.001)

        #reduce to min
        imin = im.pool_min()
        for _ in range(depth):
            imin = imin.pool_min()
        imin = np.array(imin)
        self.assertAlmostEqual(imin[0][0], test_low, delta=0.001)
예제 #5
0
    def setUp(self):
        # test pass thresholds
        im_count = 10
        self.ix = 136
        self.iy = 103
        self.xv = 34.0
        self.yv = 21.0
        self.flux = 350.0
        p = kb.psf(1.0)

        imgs = []
        for i in range(im_count):
            im = kb.layered_image("im" + str(i + 1), 500, 500, 0.0, 100.0,
                                  i * 0.1)
            im.add_object(self.ix + 0.1 * i * self.xv,
                          self.iy + 0.1 * i * self.yv, self.flux, p)
            imgs.append(im)
        stack = kb.image_stack(imgs)
        self.search = kb.stack_search(stack, p)
예제 #6
0
    def setUp(self):
        # test pass thresholds
        im = kb.layered_image("", 171, 111, 5.0, 25.0, 0)
        stack = kb.image_stack([im])
        p = kb.psf(1.0)
        self.search = kb.stack_search(stack, p)

        max_img = im.get_science()
        max_img.set_pixel(38, 39, 117)
        max_img.set_pixel(24, 63, 1000)
        max_img.set_pixel(50, 27, 1000)
        max_img.set_pixel(80, 82, 1000)
        self.pooled_max = []
        while (max_img.get_ppi() > 1):
            self.pooled_max.append(max_img)
            max_img = max_img.pool_max()

        min_img = im.get_science()
        self.pooled_min = []
        while (min_img.get_ppi() > 1):
            self.pooled_min.append(min_img)
            min_img = min_img.pool_min()
예제 #7
0
 def setUp(self):
     p = kb.psf(1.0)
     img = kb.layered_image("test", 4, 4, 0.0, 0.0, 0.0)
     stack = kb.image_stack([img])
     self.search = kb.stack_search(stack, p)
예제 #8
0
    def run_search(self,
                   im_filepath,
                   res_filepath,
                   out_suffix,
                   time_file,
                   likelihood_level=10.,
                   mjd_lims=None,
                   num_fakes=25,
                   rand_seed=42):

        visit_nums, visit_times = np.genfromtxt(time_file, unpack=True)
        image_time_dict = OrderedDict()
        for visit_num, visit_time in zip(visit_nums, visit_times):
            image_time_dict[str(int(visit_num))] = visit_time

        chunk_size = 100000

        start = time.time()

        patch_visits = sorted(os.listdir(im_filepath))
        patch_visit_ids = np.array(
            [int(visit_name[1:7]) for visit_name in patch_visits])
        patch_visit_times = np.array(
            [image_time_dict[str(visit_id)] for visit_id in patch_visit_ids])

        if mjd_lims is None:
            use_images = patch_visit_ids
        else:
            visit_only = np.where(((patch_visit_times > mjd_lims[0])
                                   & (patch_visit_times < mjd_lims[1])))[0]
            print(visit_only)
            use_images = patch_visit_ids[visit_only]

        image_mjd = np.array(
            [image_time_dict[str(visit_id)] for visit_id in use_images])
        times = image_mjd - image_mjd[0]

        flags = ~0  # mask pixels with any flags
        flag_exceptions = [
            32, 39
        ]  # unless it has one of these special combinations of flags
        master_flags = int('100111', 2)  # mask any pixels which have any of
        # these flags in more than two images

        hdulist = fits.open('%s/v%i-fg.fits' % (im_filepath, use_images[0]))
        f0 = hdulist[0].header['FLUXMAG0']
        w = WCS(hdulist[1].header)
        ec_angle = self.calc_ecliptic_angle(w)
        del (hdulist)

        images = [
            kb.layered_image('%s/v%i-fg.fits' % (im_filepath, f))
            for f in np.sort(use_images)
        ]
        print('Images Loaded')

        p = kb.psf(1.4)

        # Add fakes steps
        print('Adding fake objects')
        x_fake_range = (5, 3650)
        y_fake_range = (5, 3650)
        angle_range = (ec_angle - (np.pi / 15.), ec_angle + (np.pi / 15.))
        velocity_range = (100, 500)
        mag_range = (20, 26)

        fake_results = []
        fake_output = []

        np.random.seed(rand_seed)
        for val in range(num_fakes):
            traj = kb.trajectory()
            traj.x = int(np.random.uniform(*x_fake_range))
            traj.y = int(np.random.uniform(*y_fake_range))
            ang = np.random.uniform(*angle_range)
            vel = np.random.uniform(*velocity_range)
            traj.x_v = vel * np.cos(ang)
            traj.y_v = vel * np.sin(ang)
            mag_val = np.random.uniform(*mag_range)
            traj.flux = f0 * np.power(10, -0.4 * mag_val)
            fake_results.append(traj)
            fake_output.append(
                [traj.x, traj.y, traj.x_v, traj.y_v, traj.flux, mag_val])

        for fake_obj in fake_results:
            tf.add_trajectory(images, fake_obj, p, times)

        stack = kb.image_stack(images)
        del (images)
        stack.apply_mask_flags(flags, flag_exceptions)
        stack.apply_master_mask(master_flags, 2)

        stack.grow_mask()
        stack.grow_mask()

        stack.apply_mask_threshold(120.)

        stack.set_times(times)
        print("Times set")
        x_size = stack.get_width()
        y_size = stack.get_width()

        search = kb.stack_search(stack, p)
        del (stack)
        ang_min = ec_angle - self.ang_arr[0]
        ang_max = ec_angle + self.ang_arr[1]
        vel_min = self.v_arr[0]
        vel_max = self.v_arr[1]
        print("Starting Search")
        print('---------------------------------------')
        param_headers = ("Ecliptic Angle", "Min. Search Angle",
                         "Max Search Angle", "Min Velocity", "Max Velocity")
        param_values = (ec_angle, ang_min, ang_max, vel_min, vel_max)
        for header, val in zip(param_headers, param_values):
            print('%s = %.4f' % (header, val))
        search.gpu(int(self.ang_arr[2]), int(self.v_arr[2]), ang_min, ang_max,
                   vel_min, vel_max, int(self.num_obs))

        keep_stamps = []
        keep_new_lh = []
        keep_results = []
        keep_times = []
        memory_error = False
        keep_lc = []

        likelihood_limit = False
        res_num = 0
        chunk_size = 500000
        print('---------------------------------------')
        print("Processing Results")
        print('---------------------------------------')
        while likelihood_limit is False:
            pool = mp.Pool(processes=16)
            results = search.get_results(res_num, chunk_size)
            chunk_headers = ("Chunk Start", "Chunk Size",
                             "Chunk Max Likelihood", "Chunk Min. Likelihood")
            chunk_values = (res_num, len(keep_results), results[0].lh,
                            results[-1].lh)
            for header, val, in zip(chunk_headers, chunk_values):
                if type(val) == np.int:
                    print('%s = %i' % (header, val))
                else:
                    print('%s = %.2f' % (header, val))
            print('---------------------------------------')
            psi_curves = []
            phi_curves = []
            for line in results:
                psi_curve, phi_curve = search.lightcurve(line)
                psi_curves.append(np.array(psi_curve).flatten())
                phi_curve = np.array(phi_curve).flatten()
                phi_curve[phi_curve == 0.] = 99999999.
                phi_curves.append(phi_curve)
                if line.lh < likelihood_level:
                    likelihood_limit = True
                    break
            keep_idx_results = pool.starmap_async(
                return_indices,
                zip(psi_curves, phi_curves,
                    [j for j in range(len(psi_curves))]))
            pool.close()
            pool.join()
            keep_idx_results = keep_idx_results.get()

            if len(keep_idx_results[0]) < 3:
                keep_idx_results = [(0, [-1], 0.)]

            for result_on in range(len(psi_curves)):
                if keep_idx_results[result_on][1][0] == -1:
                    continue
                elif len(keep_idx_results[result_on][1]) < 3:
                    continue
                elif keep_idx_results[result_on][2] < likelihood_level:
                    continue
                else:
                    keep_idx = keep_idx_results[result_on][1]
                    new_likelihood = keep_idx_results[result_on][2]
                    keep_results.append(results[result_on])
                    keep_new_lh.append(new_likelihood)
                    stamps = search.sci_stamps(results[result_on], 10)
                    stamp_arr = np.array(
                        [np.array(stamps[s_idx]) for s_idx in keep_idx])
                    keep_stamps.append(np.sum(stamp_arr, axis=0))
                    keep_lc.append((psi_curves[result_on] /
                                    phi_curves[result_on])[keep_idx])
                    #keep_times.append(image_mjd[keep_idx])
                    keep_times.append(keep_idx)

            # if len(keep_results) > 800000:
            #     with open('%s/memory_error_tr_%s.txt' %
            #               (res_filepath, out_suffix), 'w') as f:
            #         f.write('In %i total results, %i were kept. Needs manual look.' %
            #                 (res_num + chunk_size, len(keep_results)))
            #     memory_error = True
            #     likelihood_limit = True

            # if res_num+chunk_size >= 8000000:
            #     likelihood_level = 20.
            #     with open('%s/overload_error_tr_%s.txt' %
            #               (res_filepath, out_suffix), 'w') as f:
            #         f.write('In %i total results, %i were kept. Likelihood level down to %f.' %
            #                 (res_num + chunk_size, len(keep_results), line.lh))

            res_num += chunk_size

        del (search)

        lh_sorted_idx = np.argsort(np.array(keep_new_lh))[::-1]

        if len(lh_sorted_idx) > 0:
            print("Stamp filtering %i results" % len(lh_sorted_idx))
            pool = mp.Pool(processes=16)
            stamp_filt_pool = pool.map_async(
                stamp_filter_parallel,
                np.array(keep_stamps)[lh_sorted_idx])
            pool.close()
            pool.join()
            stamp_filt_results = stamp_filt_pool.get()
            stamp_filt_idx = lh_sorted_idx[np.where(
                np.array(stamp_filt_results) == 1)]
            if len(stamp_filt_idx) > 0:
                print("Clustering %i results" % len(stamp_filt_idx))
                cluster_idx = self.cluster_results(
                    np.array(keep_results)[stamp_filt_idx], x_size, y_size,
                    [vel_min, vel_max], [ang_min, ang_max])
                final_results = stamp_filt_idx[cluster_idx]
            else:
                cluster_idx = []
                final_results = []
            del (cluster_idx)
            del (stamp_filt_results)
            del (stamp_filt_idx)
            del (stamp_filt_pool)
        else:
            final_results = lh_sorted_idx

        print('Keeping %i results' % len(final_results))

        np.savetxt('%s/results_%s.txt' % (res_filepath, out_suffix),
                   np.array(keep_results)[final_results],
                   fmt='%s')
        np.savetxt('%s/results_fakes_%s.txt' % (res_filepath, out_suffix),
                   np.array(fake_output),
                   header='x,y,xv,yv,flux,mag')
        # np.savetxt('%s/lc_%s.txt' % (res_filepath, out_suffix),
        #            np.array(keep_lc)[final_results], fmt='%s')
        with open('%s/lc_%s.txt' % (res_filepath, out_suffix), 'w') as f:
            writer = csv.writer(f)
            writer.writerows(np.array(keep_lc)[final_results])
        # np.savetxt('%s/times_%s.txt' % (res_filepath, out_suffix),
        #            np.array(keep_times)[final_results], fmt='%s')
        with open('%s/times_%s.txt' % (res_filepath, out_suffix), 'w') as f:
            writer = csv.writer(f)
            writer.writerows(np.array(keep_times)[final_results])
        np.savetxt('%s/filtered_likes_%s.txt' % (res_filepath, out_suffix),
                   np.array(keep_new_lh)[final_results],
                   fmt='%.4f')
        np.savetxt('%s/ps_%s.txt' % (res_filepath, out_suffix),
                   np.array(keep_stamps).reshape(len(keep_stamps),
                                                 441)[final_results],
                   fmt='%.4f')

        end = time.time()

        del (keep_stamps)
        del (keep_times)
        del (keep_results)
        del (keep_new_lh)
        del (keep_lc)

        print("Time taken for patch: ", end - start)
예제 #9
0
    def load_images(
        self, im_filepath, time_file, mjd_lims, visit_in_filename,
        file_format):
        """
        This function loads images and ingests them into a search object.
        INPUT-
            im_filepath : string
                Image file path from which to load images
            time_file : string
                File name containing image times
            mjd_lims : int list
                Optional MJD limits on the images to search.
            visit_in_filename : int list    
                A list containg the first and last character of the visit ID
                contained in the filename. By default, the first six characters
                of the filenames in this folder should contain the visit ID.
            file_format : string
                An unformatted string to be passed to return_filename(). When
                str.format() passes a visit ID to file_format, file_format
                should return the name of a vile corresponding to that visit
                ID.
        OUTPUT-
            search : kbmod.stack_search object
            image_params : dictionary
                Contains the following image parameters:
                Julian day, x size of the images, y size of the images,
                ecliptic angle of the images.
        """
        image_params = {}
        print('---------------------------------------')
        print("Loading Images")
        print('---------------------------------------')
        visit_nums, visit_times = np.genfromtxt(time_file, unpack=True)
        image_time_dict = OrderedDict()
        for visit_num, visit_time in zip(visit_nums, visit_times):
            image_time_dict[str(int(visit_num))] = visit_time
        patch_visits = sorted(os.listdir(im_filepath))
        patch_visit_ids = self.get_folder_visits(patch_visits,
                                                 visit_in_filename)
        patch_visit_times = np.array([image_time_dict[str(int(visit_id))]
                                      for visit_id in patch_visit_ids])
        if mjd_lims is None:
            use_images = patch_visit_ids
        else:
            visit_only = np.where(((patch_visit_times >= mjd_lims[0])
                                   & (patch_visit_times <= mjd_lims[1])))[0]
            print(visit_only)
            use_images = patch_visit_ids[visit_only].astype(int)

        image_params['mjd'] = np.array([image_time_dict[str(int(visit_id))]
                                        for visit_id in use_images])
        times = image_params['mjd'] - image_params['mjd'][0]
        file_name = self.return_filename(int(use_images[0]),file_format)
        file_path = os.path.join(im_filepath,file_name)
        hdulist = fits.open(file_path)
        wcs = WCS(hdulist[1].header)
        image_params['ec_angle'] = self._calc_ecliptic_angle(wcs)
        del(hdulist)

        images = [kb.layered_image('{0:s}/{1:s}'.format(
            im_filepath, self.return_filename(f,file_format)))
            for f in np.sort(use_images)]

        print('Loaded {0:d} images'.format(len(images)))
        stack = kb.image_stack(images)

        stack.set_times(times)
        print("Times set", flush=True)

        image_params['x_size'] = stack.get_width()
        image_params['y_size'] = stack.get_width()
        image_params['times']  = stack.get_times()
        return(stack, image_params)
예제 #10
0
def run_search(par):

   par = Bunch(par)

   files = os.listdir(par.path)

   files.sort()
   files = [par.path+f for f in files]
   files = files[:par.max_images]

   images = [kb.layered_image(f) for f in files]

   p = kb.psf(par.psf_width)
   angle_range = par.angle_range
   velocity_range = par.velocity_range

   results_key = []
   for _ in range(par.object_count):
      traj = kb.trajectory()
      traj.x = int(rd.uniform(*par.x_range))
      traj.y = int(rd.uniform(*par.y_range))
      ang = rd.uniform(*angle_range)
      vel = rd.uniform(*velocity_range)
      traj.x_v = vel*math.cos(ang)
      traj.y_v = vel*math.sin(ang)
      traj.flux = rd.uniform(*par.flux_range)
      results_key.append(traj)

   results_key.extend(par.real_results)

   for t in results_key:
      add_trajectory(images, t, p)

   stack = kb.image_stack(images)

   stack.apply_mask_flags(par.flags, par.flag_exceptions)
   stack.apply_master_mask(par.master_flags, 2)

   search = kb.stack_search(stack, p)

   search_angle_r = (angle_range[0]/par.search_margin, 
                     angle_range[1]*par.search_margin)
   search_velocity_r = (velocity_range[0]/par.search_margin,
                        velocity_range[1]*par.search_margin)
   search.gpu(par.angle_steps, par.velocity_steps, 
      *search_angle_r, *search_velocity_r, par.min_observations)

   results = search.get_results(0, par.results_count)

   results_clustered = [ results[i] for i in 
      cluster_trajectories(results,
      dbscan_args=dict(eps=par.cluster_eps, 
          n_jobs=-1, min_samples=1))[1] ]

   results_matched, results_unmatched = \
      match_trajectories(results_clustered,
      results_key, par.match_v, par.match_coord)
  
   results_to_plot = results_unmatched

   images = [i.science() for i in stack.get_images()]
   stamps = [create_postage_stamp(images, t, stack.get_times(), [21,21])[0] \
      for t in results_to_plot]

   return results_matched, results_unmatched, stamps
예제 #11
0
    def load_images(self, im_filepath, time_file, psf_val=1.4, mjd_lims=None):
        """
        This function loads images and ingests them into a search object

        Input
        ---------

        im_filepath : string
            Image file path from which to load images

        time_file : string
            File name containing image times

        Output
        ---------

        search : kb.stack_search object

        image_params : dictionary
            Contains image parameters such as ecliptic angle and mean Julian day
        """

        # Empty for now. Will contain x_size, y_size, ec_angle, and mjd before being returned.
        image_params = {}

        visit_nums, visit_times = np.genfromtxt(time_file, unpack=True)
        image_time_dict = OrderedDict()
        for visit_num, visit_time in zip(visit_nums, visit_times):
            image_time_dict[str(int(visit_num))] = visit_time

        chunk_size = 100000

        patch_visits = sorted(os.listdir(im_filepath))
        patch_visit_ids = self.get_folder_visits(patch_visits)
        patch_visit_times = np.array(
            [image_time_dict[str(visit_id)] for visit_id in patch_visit_ids])

        if mjd_lims is None:
            use_images = patch_visit_ids
        else:
            visit_only = np.where(((patch_visit_times > mjd_lims[0])
                                   & (patch_visit_times < mjd_lims[1])))[0]
            print(visit_only)
            use_images = patch_visit_ids[visit_only]

        image_params['mjd'] = np.array(
            [image_time_dict[str(visit_id)] for visit_id in use_images])
        times = image_params['mjd'] - image_params['mjd'][0]

        flags = ~0  # mask pixels with any flags
        flag_exceptions = [
            32, 39
        ]  # unless it has one of these special combinations of flags
        master_flags = int('100111', 2)  # mask any pixels which have any of
        # these flags in more than two images

        hdulist = fits.open('%s/%s' %
                            (im_filepath, self.return_filename(use_images[0])))
        w = WCS(hdulist[1].header)
        image_params['ec_angle'] = self.calc_ecliptic_angle(w)
        del (hdulist)

        images = [
            kb.layered_image('%s/%s' % (im_filepath, self.return_filename(f)))
            for f in np.sort(use_images)
        ]

        print('Loaded %i images' % (len(images)))

        p = kb.psf(psf_val)
        stack = kb.image_stack(images)

        # Apply masks
        stack.apply_mask_flags(flags, flag_exceptions)
        stack.apply_master_mask(master_flags, 2)

        stack.grow_mask()
        stack.grow_mask()

        stack.apply_mask_threshold(120.)

        stack.set_times(times)
        print("Times set")

        image_params['x_size'] = stack.get_width()
        image_params['y_size'] = stack.get_width()

        search = kb.stack_search(stack, p)

        return (search, image_params)
예제 #12
0
    image_folder = sys.argv[1]
    results_file = sys.argv[2]

    # Following sets up ability to create psi/phi and is from
    # HITS_Main_Belt_Example.ipynb
    flags = ~0
    flag_exceptions = [32, 39]
    master_flags = int('100111', 2)

    image_file_list = [
        str(image_folder + '/' + filename)
        for filename in os.listdir(image_folder)
    ]
    image_file_list.sort()
    images = [kb.layered_image(f) for f in image_file_list]
    p = kb.psf(1.4)
    stack = kb.image_stack(images)
    stack.apply_mask_flags(flags, flag_exceptions)
    stack.apply_master_mask(master_flags, 2)

    image_array = stack.get_images()
    search = kb.stack_search(stack, p)

    search.gpu(1, 2, -0.0442959674533, 0.741102195944, 1920.0, 4032.0, 3)

    psi = search.get_psi()
    phi = search.get_phi()

    image_times = np.array(stack.get_times())