예제 #1
0
    def generate_effective_noise(self, neighbour_data, central_data, selection):
        print "Creating neighbour realisation ",
        if self.index is None:
            i = np.random.randint(neighbour_data.truth[selection].size)
        else:
            i=self.index
        print "random index: %d"%i

        tile = central_data.res[selection][i]["tilename"]
        cid = central_data.res[selection][i]["coadd_objects_id"]

        # Load the data from the MEDS file or the cache
        if tile in meds_cache.keys():
            meds, meds_ref = meds_cache[tile]
        else:
            filename = glob.glob("/share/des/disc6/samuroff/y1/hoopoe/y1a1-v2.2_10/meds/noise/%s*.fits*"%tile)[0]
            print "Loading MEDS file from %s"%filename
            meds = s.meds_wrapper(filename)
            filename = glob.glob("/share/des/disc6/samuroff/y1/hoopoe/y1a1-v2.2_10/meds/%s*.fits*"%tile)[0]
            meds_ref = s.meds_wrapper(filename)
            meds_cache[tile] = meds, meds_ref

        # Find the relevant ids and extract the cutouts
        object_data = meds._fits["object_data"].read()
        ind = np.argwhere( object_data["id"]== cid )[0,0]

        model = meds_ref.get_cutout_list(ind, type="model")[1:]
        image = meds.get_cutout_list(ind)[1:]
        seg = meds.get_cutout_list(ind, type="seg")[0]

        # Calculate the weights
        try:
            wts = get_cweight_cutout_nearest(image[0],seg)
        except:
            print "No weights could be generated (probably the seg map is empty)"
            wts = np.ones_like(seg)

        if (wts==0).all():
            wts = np.ones_like(seg)

        pixel_variance = []
        for allco in zip(image,model):
            im = allco[0]
            mod = allco[1]

            boxsize = im.shape[0]

            pixels = im - mod*im[boxsize/2,boxsize/2]/mod[boxsize/2,boxsize/2]
            pixels *= wts
            pixels = pixels.flatten()

            pixel_variance.append(pixels.std())

        # Take the mean noise sigma in this stack
        noise_sigma = np.mean(pixel_variance)
        noise = np.random.normal(0,noise_sigma,(32,32))

        print "Effective noise sigma = %3.3f"%noise_sigma

        return noise
예제 #2
0
def run(f, args):
    new_meds = f
    #"%s/%s"%(args.output,os.path.basename(f))

    truth_file = os.path.basename(f).replace("-meds-", "-truth-")

    cat = s.shapecat(res=None, truth="%s/%s" % (args.truth, truth_file))
    if args.mode == "model":
        load_truth = True
        cat.load(res=False, truth=True)

    meds = s.meds_wrapper(new_meds, update=False)

    if (args.mode == "model"):
        meds.remove_model_bias(cat,
                               silent=True,
                               outdir=args.output,
                               noise=False,
                               neighbours=False)
    elif (args.mode == "neighbour"):
        meds.remove_neighbours(silent=False, outdir=args.output, noise=True)
    elif (args.mode == "noise"):
        meds.remove_noise(silent=True, outdir=args.output)
    elif (args.mode == "neighbour_noise"):
        meds.remove_neighbours(silent=False, outdir=args.output, noise=False)
예제 #3
0
def reimpose_neighbour_masks(source_galaxies, source_masks, target, compressed=True):
    """Direct this function towards a set of neighbour free MEDS files"""
    if compressed:
        suffix = ".fz"
    else:
        suffix = ""

    files = glob.glob("%s/DES*.fits%s"%(source, suffix))

    print "Will remove neighbour masking in %d MEDS files."

    for i, f in files:
        tile = os.basename(f)[:12]
        print i, tile
        m = s.meds_wrapper(f)
예제 #4
0
def reimpose_neighbour_masks(source_galaxies,
                             source_masks,
                             target,
                             compressed=True):
    """Direct this function towards a set of neighbour free MEDS files"""
    if compressed:
        suffix = ".fz"
    else:
        suffix = ""

    files = glob.glob("%s/DES*.fits%s" % (source, suffix))

    print "Will remove neighbour masking in %d MEDS files."

    for i, f in files:
        tile = os.basename(f)[:12]
        print i, tile
        m = s.meds_wrapper(f)
def main():
    global args

    parser = argparse.ArgumentParser(add_help=True)
    parser.add_argument('-v', '--verbosity', type=int, action='store', default=2, choices=(0, 1, 2, 3), help='integer verbosity level: min=0, max=3 [default=2]')
    parser.add_argument('-i', '--input', type=str, default=".", help='Directory to source MEDS files')

    args = parser.parse_args()

    filelist = glob.glob("%s/DES*-meds-*fits*.fz"%args.input)
    print "found %d tiles"%len(filelist)

    for i, f in enumerate(filelist):
    	#os.system("cp -rf %s %s"%(f, args.output))
    	if args.verbosity>0:
            print i, f

    	meds = s.meds_wrapper(f, update=False)
        meds.download_source_images()
예제 #6
0
def main():
    global args

    parser = argparse.ArgumentParser(add_help=True)
    parser.add_argument('-v', '--verbosity', type=int, action='store', default=2, choices=(0, 1, 2, 3), help='integer verbosity level: min=0, max=3 [default=2]')
    parser.add_argument('-o', '--output', type=str, default="resimulated", help='Directory to write to.')
    parser.add_argument('-i', '--input', type=str, default=".", help='Directory to source MEDS files')
    parser.add_argument('-t', '--truth', type=str, default="truth", help='Directory to source truth tables')
    parser.add_argument('-m', '--mode', type=str, default="model", help='type of bias to remove.')
    parser.add_argument('-f', '--field', type=str, default="*", help='Simulation run to resimulate.')
    parser.add_argument('--mpi',  action='store_true', help='Split the tiles by MPI rank' )
    args = parser.parse_args()

    if args.mpi:
        print "Setting up MPI."
        import mpi4py.MPI
        rank = mpi4py.MPI.COMM_WORLD.Get_rank()
        size = mpi4py.MPI.COMM_WORLD.Get_size()
    else:
        print "Not using MPI (set the --mpi flag if you do want to parallelise the calculation)"
        rank = 0
        size = 1

    os.system("mkdir -p %s"%args.output)

    print "Will remove %s bias"%args.mode
    print "Resimulated tiles will be written to %s"%args.output

    filelist = glob.glob("%s/DES*%s*fits*.fz"%(args.input,args.field))
    print "found %d tiles"%len(filelist)

    for i, f in enumerate(filelist):
    	if os.path.exists("%s/%s"%(args.output,os.path.basename(f))):
            print "file exists."
            continue
        if i%size!=rank:
            continue
    	print i, f

    	new_meds = f
        #"%s/%s"%(args.output,os.path.basename(f))

    	truth_file = os.path.basename(f).replace("-meds-","-truth-")

    	cat = s.shapecat(res=None, truth="%s/%s"%(args.truth, truth_file))
    	if args.mode=="model":
            load_truth = True
            cat.load(res=False, truth=True)

    	meds = s.meds_wrapper(new_meds, update=False)

    	if (args.mode=="model"):
            try:
                meds.remove_model_bias(cat, silent=True, outdir=args.output, noise=False, neighbours=False)
            except:
                continue
    	elif (args.mode=="neighbour"):
    		meds.remove_neighbours(silent=False, outdir=args.output, noise=True)
    	elif (args.mode=="noise"):
            meds.remove_noise(silent=True, outdir=args.output)
        elif (args.mode=="neighbour_noise"):
            meds.remove_neighbours(silent=False, outdir=args.output, noise=False)
예제 #7
0
    def run(self,
            distributions,
            niterations=2000,
            filename="mc_toy_model-results",
            size=1,
            rank=0):
        # Highest level loop - Sets model parameters
        #-----------------------------------------------------------------------

        self.m = []
        self.centroid = []

        print "Setting up model"

        meds = s.meds_wrapper(
            "/share/des/disc8/cambridge/meds/DES2111+0043-r-sim-ohioA6-meds-y1a1-beta.fits.fz"
        )

        shears = [-0.02, 0.02]
        angles = np.linspace(0, 2 * np.pi, 31)[:-1]

        self.params = {
            "fc": 0,
            "fn": 0,
            "dgn": 0,
            "Rc": 0,
            "Rn": 0,
            "psf_size": 0
        }
        self.priors = {
            "fc": [200, 8000],
            "fn": [2, 9000],
            "dgn": [1, 70],
            "Rc": [0.1, 6.0],
            "Rn": [0.1, 6.0],
            "psf_size": [0.1, 4.0]
        }
        index_c = np.random.choice(distributions.size, niterations * 50)
        index_n = np.random.choice(distributions.size, niterations * 50)

        idone = 0

        for ireal, (icent, ineigh) in enumerate(zip(index_c, index_n)):
            if ireal % size != rank:
                continue
            self.get_realisation(distributions, icent, ineigh, ireal)
            outside_allowed = self.sanity_check()
            if outside_allowed:
                continue

            if idone > niterations: continue

            evec_g = []
            restart = False
            # Second level loop - input shear
            #-----------------------------------------------------------------------
            print "Will evaluate m using %d shear values" % len(shears)
            for ishear, g in enumerate(shears):
                print "g = (%2.2f, 0.00)" % g

                evec_t = []
                centroid = []

                # Third level loop - neighbour position
                #-----------------------------------------------------------------------
                print "Will use %d neighbour angles" % len(angles)
                for ipos, theta in enumerate(angles):
                    if restart: continue

                    x = self.params["dgn"] * np.cos(theta)
                    y = self.params["dgn"] * np.sin(theta)
                    print "theta = %2.3f degrees, position = (%3.2f,%3.2f)" % (
                        theta * 60., x, y)

                    gal, psf = i3s.setup_simple(
                        boxsize=96,
                        shear=(g, 0.0),
                        psf_size=self.params["psf_size"],
                        size=self.params["Rc"],
                        neighbour_ellipticity=(0.0, 0.0),
                        neighbour_flux=self.params["fn"],
                        flux=self.params["fc"],
                        neighbour_size=self.params["Rn"],
                        neighbour=[x, y],
                        opt=meds.options)
                    res = i3s.i3s([gal.array], [psf], meds=meds)

                    evec_t.append([res.e1, res.e2])
                    centroid.append(
                        np.sqrt(res.ra_as * res.ra_as +
                                res.dec_as * res.dec_as))

                meane1 = np.array(evec_t).T[0].mean()
                meane2 = np.array(evec_t).T[1].mean()
                evec_g.append([meane1, meane2])

            # Finally we have a vector, containing one mean measured shape for each of the input shear values
            # Calculate m
            residual_e1 = np.array(evec_g).T[0] - np.array(shears)
            residual_e2 = np.array(evec_g).T[1]

            m = (residual_e1[-1] - residual_e1[0]) / (shears[-1] - shears[0])

            print "---------------------- m=%f" % m
            print centroid

            self.m.append([
                (residual_e1[-1] - residual_e1[0]) / (shears[-1] - shears[0]),
                (residual_e2[-1] - residual_e2[0]) / (shears[-1] - shears[0])
            ])
            self.centroid.append(
                [np.array(centroid).mean(),
                 np.array(centroid).max()])
            if abs(self.m[-1][0]) > 2: continue
            if (self.m[-1][0] < -0.01) and (np.array(centroid).mean() < 1):
                import pdb
                pdb.set_trace()
            self.write_output_line(filename)
            idone += 1

        print "Done all loops"
예제 #8
0
    def generate_effective_noise(self, neighbour_data, central_data,
                                 selection):
        print "Creating neighbour realisation ",
        if self.index is None:
            i = np.random.randint(neighbour_data.truth[selection].size)
        else:
            i = self.index
        print "random index: %d" % i

        tile = central_data.res[selection][i]["tilename"]
        cid = central_data.res[selection][i]["coadd_objects_id"]

        # Load the data from the MEDS file or the cache
        if tile in meds_cache.keys():
            meds, meds_ref = meds_cache[tile]
        else:
            filename = glob.glob(
                "/share/des/disc6/samuroff/y1/hoopoe/y1a1-v2.2_10/meds/noise/%s*.fits*"
                % tile)[0]
            print "Loading MEDS file from %s" % filename
            meds = s.meds_wrapper(filename)
            filename = glob.glob(
                "/share/des/disc6/samuroff/y1/hoopoe/y1a1-v2.2_10/meds/%s*.fits*"
                % tile)[0]
            meds_ref = s.meds_wrapper(filename)
            meds_cache[tile] = meds, meds_ref

        # Find the relevant ids and extract the cutouts
        object_data = meds._fits["object_data"].read()
        ind = np.argwhere(object_data["id"] == cid)[0, 0]

        model = meds_ref.get_cutout_list(ind, type="model")[1:]
        image = meds.get_cutout_list(ind)[1:]
        seg = meds.get_cutout_list(ind, type="seg")[0]

        # Calculate the weights
        try:
            wts = get_cweight_cutout_nearest(image[0], seg)
        except:
            print "No weights could be generated (probably the seg map is empty)"
            wts = np.ones_like(seg)

        if (wts == 0).all():
            wts = np.ones_like(seg)

        pixel_variance = []
        for allco in zip(image, model):
            im = allco[0]
            mod = allco[1]

            boxsize = im.shape[0]

            pixels = im - mod * im[boxsize / 2, boxsize / 2] / mod[boxsize / 2,
                                                                   boxsize / 2]
            pixels *= wts
            pixels = pixels.flatten()

            pixel_variance.append(pixels.std())

        # Take the mean noise sigma in this stack
        noise_sigma = np.mean(pixel_variance)
        noise = np.random.normal(0, noise_sigma, (32, 32))

        print "Effective noise sigma = %3.3f" % noise_sigma

        return noise
예제 #9
0
import py3shape as p3s
from py3shape import utils
from tools.im3shape import basic as i3s

Rc=2.1
Rp=1.3
Rn=2.9
fc=1650
fn=473

Rc_med=2.1
fc_med=945
fn_med=475
Rn_med=1.47

m=s.meds_wrapper("/share/des/disc8/cambridge/meds/DES2111+0043-r-sim-ohioA6-meds-y1a1-beta.fits.fz")

def make_sersic(e1,e2):
    gal = i3s.setup_simple(boxsize=32,shear=(e1,e2), psf_size=Rp,  size=Rc_med, neighbour_ellipticity=(0.0,0.0), neighbour_flux=fn_med, flux=fc_med, neighbour_size=Rn_med, neighbour=[np.inf,0], opt=m.options)
    return gal[0].array

g,p=i3s.setup_simple(boxsize=32,shear=(0.05,0.2), psf_size=Rp,  size=Rc_med, neighbour_ellipticity=(0.0,0.0), neighbour_flux=fn_med, flux=fc_med, neighbour_size=Rn_med, neighbour=[np.inf,0], opt=m.options)

os.system("touch /home/samuroff/hoopoe_paper/toy_model_data/simple_1parfit_e_maxsamples_g1_0.05_g2_0.2_noise8.txt")



gvec=np.linspace(-0.06,0.2,200) 

for i in xrange(30000):
    lvec_noisy0=[]