Exemple #1
0
def split_frames(cubefile, range_pairs, target_dir):
    dirname, filename = os.path.split(cubefile)
    filebase = filename.rsplit('.', 1)[0]
    if len(filebase) > 8:
        warn("IRAF doesn't like long filenames. "
             "Consider shortening the cube filename ({0})".format(filebase))
    
    outfiles = []
    
    for fromidx, toidx in range_pairs:
        for i in range(fromidx, toidx+1):
            infile = cubefile + "[*,*,{0}]".format(i)
            outfile = '{0}/frame_{1:04}.fit'.format(target_dir, i)
            debug("imcopy", infile, outfile)
            iraf.imcopy( # easier to use imcopy and preserve headers than to use pyfits I think
                input=infile,
                output=outfile
            )
            outfiles.append(outfile)
        
        # f = open(inlst, 'w')
        # f.writelines(infiles)
        # f.write('\n')
        # f.close()
        
        # outfile = '{0}/{1}_{2}-{3}.fit'.format(target_dir, filebase, fromidx, toidx)
        # debug("imcombine input={input} output={output} combine=sum reject=none".format(
        #     input="@{0}".format(inlst), #','.join(infiles),
        #     output=outfile,
        # ))
        # outfiles.append(outfile)
        # iraf.imcombine(
        #     input=','.join(infiles),
        #     output=outfile,
        #     combine="sum",
        #     reject="none",
        #     # project='no', # IRAF wants bools specified / default is nonboolean?
        #     # mclip='no',
        # )
    return outfiles
Exemple #2
0
def daofind_brightest(filename, fwhmpsf=2.5, threshold=20.0):
    debug("finding brightest in", filename)
    data = pyfits.getdata(filename)
    sigma = np.std(data) * 0.75 # background stddev... approximated because we don't know where the star IS yet
    
    _dao_setup(fwhmpsf, threshold, sigma)
    
    tmp_target_dir = tempfile.mkdtemp()
    outfile = os.path.join(tmp_target_dir, 'daofind.coo')
    
    iraf.noao.digiphot.apphot.daofind.run(
        image=filename,
        output=outfile,
        interactive=False,
        verify=False,
    )

    found_stars = parse_coo_file(outfile)
    if len(found_stars) == 0:
        warn("HAX: halving fwhmpsf to try and get a detection")
        # FIXME: kludge to get AAS 2013 data 
        _dao_setup(fwhmpsf, threshold / 2.0, sigma)
        outfile = os.path.join(tmp_target_dir, 'daofind.coo.2')
    
        iraf.noao.digiphot.apphot.daofind.run(
            image=filename,
            output=outfile,
            interactive=False,
            verify=False,
        )
        found_stars = parse_coo_file(outfile)
        
    debug(found_stars)
    found_stars.sort(order=['MAG'])
    brightest = found_stars[0]
    debug("brightest found @", brightest['XCENTER'], ',', brightest['YCENTER'], 'with mag', brightest['MAG'])
    
    shutil.rmtree(tmp_target_dir)
    return brightest
Exemple #3
0
def aoavgcube(cubefile, outfile, newexposure, fromidx=1, toidx=None):
    """
    Combine (without rejection) many short exposures in a data cube
    to approximate a "seeing disk" from a longer integration time.
    
    cubefile - input data cube path
    outfile - output data cube path
    fromidx - 1-based index of start of combining range (default: 1)
    toidx - 1-based index of end of range (default: None, end of cube)
    """
    newexposure = float(newexposure)
    ffile = pyfits.open(cubefile)
    header = ffile[0].header
    data = ffile[0].data

    if 'EXPOSURE' in header.keys():
        oldexposure = float(header['EXPOSURE'])
    elif 'EXPTIME' in header.keys():
        oldexposure = float(header['EXPTIME'])
    else:
        raise Exception("No exposure time value found in the datacube header!")

    assert newexposure > oldexposure, ("Can't get a shorter exposure time by combining frames! "
        "oldexposure {0} > newexposure {1}".format(oldexposure, newexposure))

    frames_per_combined = int(newexposure / oldexposure)
    epsilon = (newexposure / oldexposure) - int(newexposure / oldexposure)
    if epsilon > 0.01:
        warn("New exposure is not an integer multiple of old exposure, rounding to", frames_per_combined)
    assert frames_per_combined > 1, "Only one old frame per combined frame; this is probably not what you want"

    if toidx <= fromidx:
        # use entire cube
        toidx = data.shape[0]
    else:
        assert toidx <= data.shape[0], "toidx ({0}) > number of frames ({1})".format(toidx, data.shape[0])
    assert data.shape[0] > 1, "Only one frame found! Is this a data cube?"

    total_frames = toidx - fromidx
    debug("toidx=",toidx,"fromidx=",fromidx)

    if total_frames % frames_per_combined != 0:
        warn("Total frames in range (toidx - fromidx =", toidx - fromidx, ") is not an integer multiple "
             "of the number of frames per combined exposure (", frames_per_combined,
             ") so", total_frames % frames_per_combined, "frames from the end of "
             "the range will be left off.")
        toidx = toidx - (total_frames % frames_per_combined)
        total_frames = toidx - fromidx

    total_combined_frames = int(total_frames / frames_per_combined)
    info("Output data cube will have", total_combined_frames, "total frames of", newexposure, "sec exposure")
    info("Processing input data cube frames", fromidx, "to", toidx)
    target_dir = tempfile.mkdtemp()
    info("Created working directory {0} for intermediate data".format(target_dir))

    try:
        range_pairs = list((
            fromidx + n * frames_per_combined,
            fromidx + (n+1) * frames_per_combined - 1
        ) for n in range(0, total_combined_frames))
        
        frame_paths = combine_cube_frames(cubefile, range_pairs, target_dir)
        # frames_to_cube(frame_paths, outfile)
    finally:
        # Don't leave clutter if the task fails
        # shutil.rmtree(target_dir)
        info("Removed working directory {0}".format(target_dir))