예제 #1
0
def mfs_image(**kwargs):
    cmd, p = img_cmd(**kwargs)

    # Perform MFS imaging
    utils.log("...starting MFS image of {} data".format(p.mfstype), f=p.lf, verbose=p.verbose)
    icmd = cmd + ['--image_mfs', '--msin', p.datafile, '--spw', p.spw]
    if p.mfstype == 'resid':
        icmd += ['--uvsub']
    if p.source_ext == '':
        source_ext = ''
    else:
        source_ext = '{}_'.format(p.source_ext)
    if p.mfstype == '':
        mfstype = ''
    else:
        mfstype = '_{}'.format(p.mfstype)
    icmd += ['--source_ext', "{}{}".format(source_ext, mfstype)]

    ecode = subprocess.check_call(icmd)

    if p.mfstype == 'resid':
        # Apply gaintables to make CORRECTED column as it was
        utils.log("...reapplying gaintables to CORRECTED data", f=p.lf, verbose=p.verbose)
        cmd2 = p.casa + ["-c", "{}/sky_cal.py".format(casa_scripts), "--msin", p.datafile, "--gaintables"] + p.gaintables
        ecode = subprocess.check_call(cmd2)
예제 #2
0
def gen_model(**kwargs):
    p = Dict2Obj(**kwargs)
    utils.log("\n{}\n...Generating a Flux Model", f=p.lf, verbose=p.verbose)

    # compile complist_gleam.py command
    cmd = casa + ["-c", "{}/complist_gleam.py".format(casa_scripts)]
    cmd += [
        '--point_ra', p.source_ra, '--point_dec', p.latitude, '--outdir',
        p.out_dir, '--gleamfile', p.gleamfile, '--radius', p.radius,
        '--min_flux', p.min_flux, '--freqs', p.freqs, '--cell', p.cell,
        '--imsize', p.imsize
    ]
    if p.image:
        cmd += ['--image']
    if p.use_peak:
        cmd += ['--use_peak']
    if p.overwrite:
        cmd += ['--overwrite']
    if hasattr(p, 'regions'):
        cmd += [
            '--regions', p.regions, '--exclude', '--region_radius',
            p.region_radius
        ]
    if hasattr(p, 'file_ext'):
        cmd += ['--ext', p.file_ext]
    else:
        p.file_ext = ''
    cmd = map(str, cmd)
    ecode = subprocess.call(cmd)

    modelstem = os.path.join(p.out_dir, "gleam{}.cl".format(p.file_ext))
    model = modelstem
    if p.image:
        model += ".image"

    print(p.longitude)
    print(p.latitude)
    print(p.time)

    # beam args
    print(p.beamfile)
    print(p.out_dir)

    # pbcorrect
    if p.pbcorr:
        # Do primary beam correction
        pbcorr(modelname=modelstem)
        # importfits
        cmd = p.casa + [
            "-c", "importfits('{}', '{}', overwrite={})".format(
                modelstem + '.pbcorr.fits', modelstem + '.pbcorr.image',
                p.overwrite)
        ]
        ecode = subprocess.call(cmd)
        model = modelstem + ".pbcorr.image"

    return model
예제 #3
0
def gen_model(**kwargs):
    p = Dict2Obj(**kwargs)
    utils.log("\n{}\n...Generating a Flux Model", f=p.lf, verbose=p.verbose)

    # compile complist_gleam.py command
    cmd = casa + ["-c", "{}/complist_gleam.py".format(casa_scripts)]
    cmd += ['--point_ra', p.source_ra, '--point_dec', p.latitude, '--outdir', p.out_dir, 
            '--gleamfile', p.gleamfile, '--radius', p.radius, '--min_flux', p.min_flux,
            '--freqs', p.freqs, '--cell', p.cell, '--imsize', p.imsize]
    if p.image:
        cmd += ['--image']
    if p.use_peak:
        cmd += ['--use_peak']
    if p.overwrite:
        cmd += ['--overwrite']
    if hasattr(p, 'regions'):
        cmd += ['--regions', p.regions, '--exclude', '--region_radius', p.region_radius]
    if hasattr(p, 'file_ext'):
        cmd += ['--ext', p.file_ext]
    else:
        p.file_ext = ''
    cmd = map(str, cmd)
    ecode = subprocess.check_call(cmd)

    modelstem = os.path.join(p.out_dir, "gleam{}.cl".format(p.file_ext))
    model = modelstem
    if p.image:
        model += ".image"

    # pbcorrect
    if p.pbcorr:
        utils.log("...applying PB to model", f=p.lf, verbose=p.verbose)
        assert p.image, "Cannot pbcorrect flux model without image == True"
        cmd = ["pbcorr.py", "--lon", p.longitude, "--lat", p.latitude, "--time", p.time, "--pols"] \
               + [uvutils.polstr2num(pol) for pol in p.pols] \
               + ["--outdir", p.out_dir, "--multiply", "--beamfile", p.beamfile]
        if p.overwrite:
            cmd.append("--overwrite")
        cmd.append(modelstem + '.fits')

        # generate component list and / or image cube flux model
        cmd = map(str, cmd)
        ecode = subprocess.check_call(cmd)
        modelstem = os.path.join(p.out_dir, modelstem)

        # importfits
        cmd = p.casa + ["-c", "importfits('{}', '{}', overwrite={})".format(modelstem + '.pbcorr.fits', modelstem + '.pbcorr.image', p.overwrite)]
        ecode = subprocess.check_call(cmd)
        model = modelstem + ".pbcorr.image"

    return model
예제 #4
0
def image(**img_kwargs):
    # Perform MFS of corrected data
    if img_kwargs['image_mfs']:
        kwargs = dict(
            list(img_kwargs.items()) + list(global_vars(varlist).items()))
        kwargs['mfstype'] = 'corr'
        mfs_image(**kwargs)

    # Perform MFS of model data
    if img_kwargs['image_mdl']:
        kwargs = dict(
            list(img_kwargs.items()) + list(global_vars(varlist).items()))
        mfile = "{}.model".format(datafile)
        if not os.path.exists(mfile):
            utils.log(
                "Didn't split model from datafile, which is required to image the model",
                f=lf,
                verbose=verbose)
        else:
            kwargs['datafile'] = mfile
            kwargs['mfstype'] = 'model'
            mfs_image(**kwargs)

    # Perform MFS of residual data
    if img_kwargs['image_res']:
        kwargs = dict(
            list(img_kwargs.items()) + list(global_vars(varlist).items()))
        kwargs['mfstype'] = 'resid'
        mfs_image(**kwargs)

    # Get spectral cube of corrected data
    if img_kwargs['image_spec']:
        kwargs = dict(
            list(img_kwargs.items()) + list(global_vars(varlist).items()))
        spec_image(**kwargs)

    # Get spectral cube of model data
    if img_kwargs['image_mdl_spec']:
        kwargs = dict(
            list(img_kwargs.items()) + list(global_vars(varlist).items()))
        mfile = "{}.model".format(datafile)
        if not os.path.exists(mfile):
            utils.log(
                "Didn't split model from datafile, which is required to image the model",
                f=lf,
                verbose=verbose)
        else:
            kwargs['datafile'] = mfile
            spec_image(**kwargs)
예제 #5
0
def spec_image(**kwargs):
    cmd, p = img_cmd(**kwargs)

    # Perform Spectral Cube imaging
    utils.log("...starting {} spectral cube imaging".format(p.datafile), f=p.lf, verbose=p.verbose)
    icmd = cmd + ['--spec_cube', '--msin', p.datafile,
                  '--spec_start', str(p.spec_start), '--spec_end', str(p.spec_end),
                  '--spec_dchan', str(p.spec_dchan)] 
    if p.source_ext == '':
        source_ext = ''
    else:
        source_ext = '{}_'.format(p.source_ext)
    icmd += ['--source_ext', "{}spec".format(source_ext)]

    ecode = subprocess.check_call(icmd)

    # Collate output images and run a source extraction
    img_cube_template = "{}.{}spec.chan????.image.fits".format(p.datafile, source_ext)
    img_cube = sorted(glob.glob(img_cube_template))
    if p.source_extract:
        utils.log("...extracting {} from {} cube".format(p.source, img_cube_template), f=p.lf, verbose=p.verbose)
        if len(img_cube) == 0:
            utils.log("...no image cube files found, cannot extract spectrum", f=p.lf, verbose=p.verbose)
        else:
            cmd = ["source_extract.py", "--source", p.source, "--source_ra", p.source_ra, "--source_dec", p.source_dec,
                   "--radius", p.radius, '--pols'] + p.pols \
                   + ["--outdir", p.out_dir, "--gaussfit_mult", p.gauss_mult, "--source_ext", p.source_ext]
            if p.overwrite:
                cmd += ["--overwrite"]
            if p.plot_fit:
                cmd += ["--plot_fit"]
            cmd += img_cube
            cmd = map(str, cmd)
            ecode = subprocess.check_call(cmd)
예제 #6
0
def calibrate(**cal_kwargs):
    kwargs = dict(cal_kwargs.items() + global_vars(varlist).items())
    p = Dict2Obj(**kwargs)

    # compile command
    cmd = p.casa + ["-c", "{}/sky_cal.py".format(casa_scripts)]
    cmd += ["--msin", p.datafile, "--out_dir", p.out_dir, "--model", p.model,
            "--refant", p.refant, "--gain_spw", p.gain_spw, "--uvrange", p.uvrange, "--timerange",
            p.timerange, "--ex_ants", p.ex_ants, "--gain_ext", p.gain_ext, '--bp_spw', p.bp_spw]
    if p.source_ra is not None:
        cmd += ["--source_ra", p.source_ra]
    if p.source_dec is not None:
        cmd += ["--source_dec", p.source_dec]

    if isinstance(p.gaintables, list):
        gtables = p.gaintables
    else:
        if p.gaintables in ['', None, 'None', 'none']:
            gtables = []
        else:
            gtables = [p.gaintables]
    if len(gtables) > 0:
        cmd += ['--gaintables'] + gtables

    if p.rflag:
        cmd += ["--rflag"]
    if p.KGcal:
        cmd += ["--KGcal", "--KGsnr", p.KGsnr]
    if p.Acal:
        cmd += ["--Acal", "--Asnr", p.Asnr]
    if p.BPcal:
        cmd += ["--BPcal", "--BPsnr", p.BPsnr, "--bp_spw", p.bp_spw]
    if p.BPsolnorm:
        cmd += ['--BPsolnorm']
    if p.split_cal:
        cmd += ["--split_cal", "--cal_ext", p.cal_ext]
    if p.split_model:
        cmd += ["--split_model"]
    cmd = [' '.join(_cmd) if type(_cmd) == list else str(_cmd) for _cmd in cmd]

    utils.log("...starting calibration", f=p.lf, verbose=p.verbose)
    ecode = subprocess.check_call(cmd)

    # Gather gaintables
    gext = ''
    if p.gain_ext not in ['', None]:
        gext = '.{}'.format(p.gain_ext)
    gts = sorted(glob.glob("{}{}.?.cal".format(p.datafile, gext)) + glob.glob("{}{}.????.cal".format(p.datafile, gext)))

    # export to calfits if desired
    if p.export_gains:
        utils.log("...exporting\n{}\n to calfits and combining into a single cal table".format('\n'.join(gts)), f=p.lf, verbose=p.verbose)
        # do file checks
        mirvis = os.path.splitext(p.datafile)[0]
        gtsnpz = ["{}.npz".format(gt) for gt in gts]
        if not os.path.exists(mirvis):
            utils.log("...{} doesn't exist: cannot export gains to calfits".format(mirvis), f=p.lf, verbose=p.verbose)

        elif len(gts) == 0:
            utils.log("...no gaintables found, cannot export gains to calfits", f=p.lf, verbose=p.verbose)

        elif not np.all([os.path.exists(gtnpz) for gtnpz in gtsnpz]):
            utils.log("...couldn't find a .npz file for all input .cal tables, can't export to calfits", f=p.lf, verbose=p.verbose)

        else:
            calfits_fname = "{}.{}{}.calfits".format(os.path.basename(mirvis), p.source, p.gain_ext)
            cmd = ['skynpz2calfits.py', "--fname", calfits_fname, "--uv_file", mirvis, '--out_dir', p.out_dir]
            if p.overwrite:
                cmd += ["--overwrite"]
            # add a delay and phase solution
            matchK = ["K.cal.npz" in gt for gt in gtsnpz]
            matchGphs = ["Gphs.cal.npz" in gt for gt in gtsnpz]
            if np.any(matchK):
                cmd += ["--plot_dlys"]
                cmd += ["--dly_files"] + [gtsnpz[i] for i, b in enumerate(matchK) if b == True]
                if not np.any(matchGphs):
                    utils.log("...WARNING: A delay file {} was found, but no mean phase file, which is needed if a delay file is present.", f=lf, verbose=verbose)
            if np.any(matchGphs):
                cmd += ["--plot_phs"]
                cmd += ["--phs_files"] + [gtsnpz[i] for i, b in enumerate(matchGphs) if b == True]

            # add a mean amp solution
            matchGamp = ["Gamp.cal.npz" in gt for gt in gtsnpz]
            if np.any(matchGamp):
                cmd += ["--plot_amp"]
                cmd += ["--amp_files"] + [gtsnpz[i] for i, b in enumerate(matchGamp) if b == True]

            # add a bandpass solution
            matchB = ["B.cal.npz" in gt for gt in gtsnpz]
            if np.any(matchB):
                cmd += ["--plot_bp"]
                cmd += ["--bp_files"] + [gtsnpz[i] for i, b in enumerate(matchB) if b == True]

            # additional smoothing options
            if p.smooth:
                cmd += ["--bp_gp_smooth", "--bp_gp_max_dly", p.gp_max_dly]
            if p.medfilt:
                cmd += ["--bp_medfilt", "--medfilt_kernel", p.kernel]
            if p.bp_broad_flags:
                cmd += ["--bp_broad_flags", "--bp_flag_frac", p.bp_flag_frac]
            if not p.verbose:
                cmd += ['--silence']

            cmd = map(str, cmd)
            ecode = subprocess.check_call(cmd)

            # convert calfits back to a single Btotal.cal table
            if np.any(matchB):
                # convert to cal
                bfile = gts[matchB.index(True)]
                btot_file = os.path.join(out_dir, "{}{}.Btotal.cal".format(os.path.basename(p.datafile), gext))
                cmd = p.casa + ["-c", "{}/calfits_to_Bcal.py".format(casa_scripts), "--cfits", os.path.join(p.out_dir, calfits_fname), "--inp_cfile", bfile,"--out_cfile", btot_file]
                if overwrite:
                    cmd += ["--overwrite"]
                ecode = subprocess.check_call(cmd)
                # replace gaintables with Btotal.cal
                gts = [btot_file]

    # append to gaintables
    gtables += gts

    return gtables
예제 #7
0
# Setup (Small) Global Variable Dictionary
varlist = ['datafile', 'verbose', 'overwrite', 'out_dir', 'casa', 'source_ra', 'source_dec', 'source',
           'longitude', 'latitude', 'lf', 'gaintables']

def global_vars(varlist=[]):
    d = []
    for v in varlist:
        try:
            d.append((v, globals()[v]))
        except KeyError:
            continue
    return dict(d)

# Print out parameter header
time = datetime.utcnow()
utils.log("Starting skycal_pipe.py on {}\n{}\n".format(time, '-'*60), 
             f=lf, verbose=verbose)
_cf = copy.copy(cf)
_cf.pop('algorithm')
utils.log(json.dumps(_cf, indent=1) + '\n', f=lf, verbose=verbose)

# Setup a dict->object converter
class Dict2Obj:
    def __init__(self, **entries):
        self.__dict__.update(entries)

#-------------------------------------------------------------------------------
# Search for a Source and Prepare Data for MS Conversion
#-------------------------------------------------------------------------------
if params['prep_data']:
    # start block
    time = datetime.utcnow()