Пример #1
0
def m2fs_imcombine(scifnames, outfname):
    sciimgs, scierrs, sciheaders = m2fs_load_files_two(scifnames)
    w = scierrs**-2.
    wtot = np.sum(w, axis=0)
    scisum = np.sum((sciimgs*w),axis=0)/wtot
    scierr = np.sqrt(1/wtot)
    # TODO fix header!!!
    print("Combined science frames to {}".format(outfname))
    print("TODO: fix header!!!")
    write_fits_two(outfname, scisum, scierr, sciheaders[0])
Пример #2
0
def m2fs_make_master_dark(filenames, outfname, exptime=3600., corners_only=False):
    """
    Make a master dark by taking the median of all dark frames
    """
    # Load data
    master_dark, master_darkerr, headers = m2fs_load_files_two(filenames)
    h = headers[0]
    # Rescale to common exptime
    for k in range(len(filenames)):
        dh = headers[k]
        if dh["EXPTIME"] != exptime:
            master_dark[k] = master_dark[k] * exptime/dh["EXPTIME"]
            master_darkerr[k] = master_darkerr[k] * np.sqrt(dh["EXPTIME"]/exptime)
    # Take median + calculate error
    master_dark = np.median(master_dark, axis=0)
    master_darkerr = np.sqrt(np.sum(master_darkerr**2, axis=0))
    
    # HACK: only subtract dark current from the corners
    if corners_only:
        Npixcorner = 200
        mask = np.ones_like(master_dark, dtype=bool)
        Nx, Ny = mask.shape
        mask[0:Npixcorner,0:Npixcorner] = False
        mask[0:Npixcorner,(Ny-Npixcorner):Ny] = False
        mask[(Nx-Npixcorner):Nx,0:Npixcorner] = False
        mask[(Nx-Npixcorner):Nx,(Ny-Npixcorner):Ny] = False
        master_dark[mask] = 0.0
    else:
        #err = np.std(master_dark)
        #med = np.median(master_dark)
        ## keep 5% of points
        #mask = master_dark < np.percentile(master_dark,[95])[0]
        ##mask = (master_dark < 0) | (np.abs(master_dark) < med+3*err)
        ##mask = np.abs(master_dark-med) < err
        #print(np.sum(mask),mask.size,np.sum(mask)/mask.size)
        #master_dark[mask] = 0.
        #master_darkerr[mask] = 0.
        pass
        
    _ = h.pop("EXPTIME")
    h["EXPTIME"] = exptime

    write_fits_two(outfname, master_dark, master_darkerr, h)
    print("Created dark frame with texp={} and wrote to {}".format(
            exptime, outfname))
    if corners_only:
        print("Only dark subtracting in the corners!")
Пример #3
0
def m2fs_remove_cosmics(filenames,suffix="crr",output_masks=False):
    """
    Remove cosmic rays by finding huge outliers.
    Replace with the median of all filenames.
    """
    imgain = 1.0 # gain has already been applied in m2fs_biastrim
    sciimgs, scierrs, headers = m2fs_load_files_two(filenames)
    readnoises = [h["ENOISE"] for h in headers]
    exptimes = [h["EXPTIME"] for h in headers]
    # Rescale by exptime
    imgmeds = sciimgs.copy()
    for k in range(len(filenames)):
        exptime = exptimes[k]
        imgmeds[k] = imgmeds[k] / exptime
    bigmedian = np.median(imgmeds, axis=0)
    def make_out_name(fname):
        name = os.path.basename(fname)
        twd = os.path.dirname(fname)
        assert name.endswith(".fits")
        return os.path.join(twd,name[:-5]+suffix+".fits")
    def make_mask_name(fname):
        name = os.path.basename(fname)
        twd = os.path.dirname(fname)
        assert name.endswith(".fits")
        return os.path.join(twd,name[:-5]+"_mask.fits")
    for k in range(len(filenames)):
        # Calculate sigma value = sqrt(readnoise^2+median*gain)
        bigmedian_k = bigmedian * exptimes[k]
        #sigma_d = scierrs[k]
        sigma_d = np.sqrt(readnoises[k]**2 + imgain * bigmedian_k)
        # Pixels > 5 sigma from the median are considered cosmic rays
        # Replace them with the median value
        # They should have a big error already because large counts, so do not change the error?
        mask = np.abs(sciimgs[k]) > 5*sigma_d + bigmedian_k
        sciimgs[k][mask] = bigmedian_k[mask]
        # Write out cleaned data
        headers[k].add_history("m2fs_crbye: replaced {} cosmic ray pixels".format(mask.sum()))
        write_fits_two(make_out_name(filenames[k]), sciimgs[k], scierrs[k], headers[k])
        print("m2fs_crbye: removed {}/{} pixels from {}".format(mask.sum(),mask.size, filenames[k]))
        
        if output_masks:
            hdu = fits.PrimaryHDU(mask.T.astype(int), headers[k])
            hdu.writeto(make_mask_name(fname))
Пример #4
0
def m2fs_make_master_flat(filenames, outfname):
    master_flat, master_flaterr, headers = m2fs_load_files_two(filenames)
    master_flat = np.median(master_flat, axis=0)
    master_flaterr = np.median(master_flaterr, axis=0)
    write_fits_two(outfname, master_flat, master_flaterr, headers[0])
    print("Created master flat and wrote to {}".format(outfname))
Пример #5
0
def m2fs_flat(scifnames, flatfname, tracefname, fibermapfname,
              yaper = 7, x_begin=900, x_end=1300):
    owd = os.path.dirname(flatfname)
    flatname = os.path.basename(flatfname)[:-5]

    yaper_arr = np.arange(1,1+yaper)
    sciimgs, scierrs, sciheaders = m2fs_load_files_two(scifnames)
    flatimg, flaterr, flatheaders= read_fits_two(flatfname)
    tracecoefs = np.loadtxt(tracefname)
    fibmap = np.loadtxt(fibermapfname)
    norder = fibmap.shape[1]
    
    nfib = len(tracecoefs)
    nx, ny = flatimg.shape
    xarr = np.arange(nx)
    
    ypeak = np.zeros((nx,nfib))
    for i in range(nfib):
        ypeak[:,i] = np.polyval(tracecoefs[i,:], xarr)
    vround = np.vectorize(lambda x: int(round(x)))
    ymins = vround(ypeak) - int(yaper/2.)
    
    # Generate valid pixels
    xarrlist = [np.arange(nx) for i in range(norder)]
    xarrlist[0] = np.arange(nx-x_begin)+x_begin
    xarrlist[-1] = np.arange(x_end)
    
    # Do the fibers from bottom to top
    flatsun = np.ones((nx,ny))
    for i in range(nfib):
        sys.stdout.write("\r  Running fib {} order {}".format(i+1, i % norder + 1))
        xarr = xarrlist[i % norder]
        auxarr = np.zeros(nx)
        for j in xarr:
            ymin = ymins[j,i]
            auxarr[j] = np.max(flatimg[j,ymin:(ymin+yaper-1)])
        auxarr_max = np.max(auxarr)
        auxarr_norm = auxarr/auxarr_max
        # Create flat with each row equal to max in each column
        for k in range(yaper):
            for j in xarr:
                ymin = ymins[j,i]
                flatsun[j,ymin+k] = auxarr_norm[j]
    flatclean = flatimg/flatsun
    print("\nFinished flatsun, flatclean")
    fits.PrimaryHDU(flatsun).writeto(os.path.join(owd,flatname+"_flatsun.fits"), overwrite=True)
    fits.PrimaryHDU(flatclean).writeto(os.path.join(owd,flatname+"_flatclean.fits"), overwrite=True)
    
    # Get another flat
    start = time.time()
    flatsmooth_all = np.zeros((nx,ny))
    flatsmooth_correct = np.zeros((nx,ny))
    flatpix = np.zeros((nx,ny))
    for i in range(nfib):
        sys.stdout.write("\r  Running fib {} order {}".format(i+1, i % norder + 1))
        mean_xarr = np.zeros(nx)
        max_xarr = np.zeros(nx)
        xarr = xarrlist[i % norder]
        for j in xarr:
            ymin = ymins[j,i]
            max_xarr[j] = np.max(flatclean[j,ymin:(ymin+yaper)])
            mean_xarr[j]=np.mean(flatclean[j,ymin:(ymin+yaper)])
        factor = np.mean(max_xarr)/np.mean(mean_xarr)
        corr_xarr = mean_xarr*factor
        
        flatsmooth = np.zeros(nx)
        yfit, coeff = jds_poly_reject(xarr,corr_xarr[xarr], 6, 2, 2)
        smoothfit_coeff=coeff
        flatsmooth[xarr]=np.polyval(smoothfit_coeff, xarr)
        
        # a flat with all rows in each order equal to above fit
        for k in range(yaper):
            for j in xarr:
                ymin = ymins[j,i]
                flatsmooth_all[j,ymin+k] = flatsmooth[j]
        # Create flat with fit in y direction at all places
        #for j in xarr:
            #yflat_arr = np.zeros(yaper)
            #ymin = ymins[j,i]
            #yflat_arr[0:yaper] = flatimg[j,ymin:(ymin+yaper)]
            
            ## normalize y array
            #yflat_arr_max=np.max(yflat_arr)
            #yflat_arr_norm=yflat_arr/yflat_arr_max
            ## poly fit to spectra spatial profile at each x
            #yarr_coeff = np.polyfit(yaper_arr, yflat_arr_norm, 2)
            #yarr_fit=np.polyval(yarr_coeff,yaper_arr)
            #flatsmooth_correct[j,ymin:(ymin+yaper)] = flatsmooth_all[j,(ymin+yaper)]
        for j in xarr:
            ymin = ymins[j,i]
            #flatpix[j,ymin:(ymin+yaper)] = flatclean[j,ymin:(ymin+yaper)]/(flatsmooth_correct[j,ymin:(ymin+yaper)])
            flatpix[j,ymin:(ymin+yaper)] = flatclean[j,ymin:(ymin+yaper)]/(flatsmooth_all[j,ymin:(ymin+yaper)])
    print("\nFinished flatpix ({:.1f}s)".format(time.time()-start))
    fits.PrimaryHDU(flatpix).writeto(os.path.join(owd,flatname+"_flatpix.fits"), overwrite=True)
    flatpix_corr = flatpix
    flatpix_corr[flatpix <= 0] = 1
    
    outs = sciimgs/flatpix_corr
    outerrs = scierrs/flatpix_corr
    for k, scifname in enumerate(scifnames):
        outdir = os.path.dirname(scifname)
        outname= os.path.basename(scifname)[:-5]
        outfname = outdir+"/"+outname+"f.fits"
        print("Writing",outfname)
        sciheaders[k].add_history("m2fs_flat: Applied flat field")
        sciheaders[k].add_history("m2fs_flat: Flat: {}".format(flatfname))
        write_fits_two(outfname, outs[k], outerrs[k], sciheaders[k])