Esempio n. 1
0
def run_casa(cmdlist):
    """
    Use drivecasa to run CASA commands
    """
    #load drivecasa
    casa = drivecasa.Casapy()
    print 'Running CASA command {}'.format(cmdlist)
    casa_output, casa_error = casa.run_script(cmdlist)
def casa_imaging(ms_file, Nfacet, NID):
    casa = drivecasa.Casapy(working_dir=os.path.curdir,
                         casa_logfile=False,
                         timeout = 1200,
                         echo_to_stdout=False)
    casa.run_script(["ms_file='{}'".format(ms_file)])

    casa.run_script(["Nfacet={}".format(Nfacet)])
    casa.run_script(["NID={}".format(NID)])

    casa.run_script_from_file('/BIGDATA1/ac_shao_tan_1/OSKAR/IDOS/test/OSKAR_CASA/MPI/image.py',timeout = 60000000)
Esempio n. 3
0
 def setUp(self):
     self.casa = drivecasa.Casapy(echo_to_stdout=True)
     self.output_dir = os.path.join(drivecasa.default_test_ouput_dir,
                                    'ms_generation_test')
     self.output_ms_path = os.path.join(self.output_dir,
                                        'foobar.ms')
     # Ensure that the output dir will be created if not already present
     # (Also deletes the output ms if present)
     if os.path.isdir(self.output_dir):
         shutil.rmtree(self.output_dir)
     os.makedirs(self.output_dir)
Esempio n. 4
0
def run_casa(cmd, raise_on_severe=False, timeout=1800):
    """Run a list of casa commands"""
    casa = drivecasa.Casapy()
    try:
        casa_output, casa_error = casa.run_script(cmd,
                                                  raise_on_severe=True,
                                                  timeout=timeout)
        logger.debug('\n'.join(casa_error))
    except RuntimeError:
        logger.error("Casa command failed")
        if raise_on_severe:
            raise
Esempio n. 5
0
def check_calibrated(msfile):
    if not _CASA:
        logger.error('requires drivecasa')
        return None

    try:
        casa = drivecasa.Casapy(casa_dir=casapy,
                                working_dir=os.path.abspath(os.curdir),
                                timeout=1200)
    except Exception, e:
        logger.error('Unable to instantiate casa:\n%s' % e)
        return None
Esempio n. 6
0
    def invoke_split(self, q, infile, outdir):

        import drivecasa
        try:
            script = []
            casa = drivecasa.Casapy(casa_dir = self.casapy_path, timeout = self.timeout)
            drivecasa.commands.mstransform(script, infile, outdir, self.transform_args, overwrite = True)
            casa.run_script(script)
            q.put(0)

        except Exception:
            q.put(-1)
            raise
def make_image_map_fits(vis_ms_path,
                        output_dir,
                        image_size,
                        cell_size,
                        niter=150,
                        threshold_in_jy=0.1):
    ensure_dir(output_dir)

    script = []

    img_n_pix = int(image_size.to(u.pixel).value)
    cell_arcsec = cell_size.to(u.arcsec).value
    clean_args = {
        "imsize": [img_n_pix, img_n_pix],
        "cell": [str(cell_arcsec) + 'arcsec'],
        "weighting": 'briggs',
        "robust": 0.5,
    }
    dirty_maps = drivecasa.commands.clean(script,
                                          vis_paths=vis_ms_path,
                                          niter=0,
                                          threshold_in_jy=threshold_in_jy,
                                          other_clean_args=clean_args,
                                          out_dir=output_dir,
                                          overwrite=True)

    dirty_fits_path = drivecasa.commands.export_fits(script,
                                                     dirty_maps.image,
                                                     overwrite=True)

    clean_maps = drivecasa.commands.clean(script,
                                          vis_paths=vis_ms_path,
                                          niter=niter,
                                          threshold_in_jy=threshold_in_jy,
                                          other_clean_args=clean_args,
                                          out_dir=output_dir,
                                          overwrite=True)

    clean_fits_path = drivecasa.commands.export_fits(script,
                                                     clean_maps.image,
                                                     overwrite=True)

    logfile_basename = os.path.basename(
        vis_ms_path) + ".casa-clean-commands.log"
    commands_logfile = os.path.join(output_dir, logfile_basename)
    if os.path.isfile(commands_logfile):
        os.unlink(commands_logfile)
    casa = drivecasa.Casapy(commands_logfile=commands_logfile)
    casa.run_script(script)
    return dirty_fits_path, clean_fits_path
Esempio n. 8
0
    def run(self):
        inp = self.inputs[0]
        out = self.outputs[0]

        if logger.isEnabledFor(logging.INFO):
            logger.info('Calculating source flux on %s.image' % (inp.path))

        import drivecasa
        casa = drivecasa.Casapy(casa_dir = self.casapy_path, timeout = self.timeout)
        casa.run_script(['ia.open("'"%s"'")' % (inp.path + '.image')])
        casa.run_script(['flux = ia.pixelvalue([128,128,0,179])["'"value"'"]["'"value"'"]'])
        casaout, _ = casa.run_script(['print flux'])
        flux = float(casaout[0])
        if flux > 9E-4:
            if logger.isEnabledFor(logging.INFO):
                logger.info('Valid flux found: %f' % (flux))
        out.write(str(flux))
Esempio n. 9
0
    def test_command_logging(self):
        with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
            commands_logfile = tmpfile.name
            #Use the temporary file path, but delete the empty file first...
            os.remove(commands_logfile)
        casa = drivecasa.Casapy(commands_logfile=commands_logfile,
                                echo_to_stdout=False)
        script = ['tasklist()']
        out, errors = casa.run_script(script)
        with open(commands_logfile) as f:
            commands_log = f.read()

        commands_logged = commands_log.split('\n')
        if not commands_logged[-1]:
            commands_logged.pop()
        self.assertEqual(commands_logged, script)
        # print "Command log", commands_log
        os.remove(commands_logfile)
Esempio n. 10
0
    def invoke_clean(self, q, vis, outcube):

        import drivecasa
        try:
            script = []
            casa = drivecasa.Casapy(casa_dir = self.casapy_path, timeout = self.timeout)
            drivecasa.commands.clean(script,
                                            vis_paths = vis,
                                            out_path = outcube,
                                            niter = 0,
                                            threshold_in_jy = 0,
                                            other_clean_args = self.clean_args,
                                            overwrite = True)
            casa.run_script(script)
            q.put(0)

        except Exception:
            q.put(-1)
            raise
Esempio n. 11
0
 def setUpClass(cls):
     casa_dir = os.environ.get('CASA_DIR', drivecasa.default_casa_dir)
     cls.casa = drivecasa.Casapy(casa_dir, echo_to_stdout=False)
Esempio n. 12
0
import os
import sys
import logging
import drivecasa

casa = drivecasa.Casapy(log2term=True,
                        echo_to_stdout=True,
                        timeout=24 * 3600 * 10)

sys.path.append("/scratch/stimela")

utils = __import__('utils')

CONFIG = os.environ["CONFIG"]
INPUT = os.environ["INPUT"]
OUTPUT = os.environ["OUTPUT"]
MSDIR = os.environ["MSDIR"]

cab = utils.readJson(CONFIG)

args = {}
for param in cab['parameters']:
    name = param['name']
    value = param['value']

    if value is None:
        continue

    args[name] = value

script = ['{0}(**{1})'.format(cab['binary'], args)]
Esempio n. 13
0
    def ms2miriad(self):
        '''
        Converts the data from MS to MIRIAD format via UVFITS using drivecasa. Does it for the flux calibrator, polarisation calibrator, and target field independently.
        '''
        subs.setinit.setinitdirs(self)
        beams = 37

        # Create the parameters for the parameter file for converting from MS to UVFITS format

        convertfluxcalmsavailable = get_param_def(self, 'convert_fluxcal_MSavailable', False ) # Flux calibrator MS dataset available?
        convertpolcalmsavailable = get_param_def(self, 'convert_polcal_MSavailable', False ) # Polarised calibrator MS dataset available?
        converttargetbeamsmsavailable = get_param_def(self, 'convert_targetbeams_MSavailable', np.full((beams), False) ) # Target beam MS dataset available?
        convertfluxcalms2uvfits = get_param_def(self, 'convert_fluxcal_MS2UVFITS', False ) # Flux calibrator MS dataset converted to UVFITS?
        convertpolcalms2uvfits = get_param_def(self, 'convert_polcal_MS2UVFITS', False ) # Polarised calibrator MS dataset converted to UVFITS?
        converttargetbeamsms2uvfits = get_param_def(self, 'convert_targetbeams_MS2UVFITS', np.full((beams), False) ) # Target beam MS dataset converted to UVFITS?
        convertfluxcaluvfitsavailable = get_param_def(self, 'convert_fluxcal_UVFITSavailable', False ) # Flux calibrator UVFITS dataset available?
        convertpolcaluvfitsavailable = get_param_def(self, 'convert_polcal_UVFITSavailable', False ) # Polarised calibrator UVFITS dataset available?
        converttargetbeamsuvfitsavailable = get_param_def(self, 'convert_targetbeams_UVFITSavailable', np.full((beams), False) ) # Target beam UVFITS dataset available?
        convertfluxcaluvfits2miriad = get_param_def(self, 'convert_fluxcal_UVFITS2MIRIAD', False ) # Flux calibrator UVFITS dataset converted to MIRIAD?
        convertpolcaluvfits2miriad = get_param_def(self, 'convert_polcal_UVFITS2MIRIAD', False ) # Polarised calibrator UVFITS dataset converted to MIRIAD?
        converttargetbeamsuvfits2miriad = get_param_def(self, 'convert_targetbeams_UVFITS2MIRIAD', np.full((beams), False) ) # Target beam UVFITS dataset converted to MIRIAD?

        ###################################################
        # Check which datasets are available in MS format #
        ###################################################

        if self.fluxcal != '':
            convertfluxcalmsavailable = os.path.isdir(self.basedir + '00' + '/' + self.rawsubdir + '/' + self.fluxcal)
        else:
            self.logger.warning('# Flux calibrator dataset not specified. Cannot convert flux calibrator! #')
        if self.polcal != '':
            convertpolcalmsavailable = os.path.isdir(self.basedir + '00' + '/' + self.rawsubdir + '/' + self.polcal)
        else:
            self.logger.warning('# Polarised calibrator dataset not specified. Cannot convert polarised calibrator! #')
        if self.target != '':
            for b in range(beams):
                converttargetbeamsmsavailable[b] = os.path.isdir(self.basedir + str(b).zfill(2) + '/' + self.rawsubdir + '/' + self.target)
        else:
            self.logger.warning('# Target beam dataset not specified. Cannot convert target beams! #')

        # Save the derived parameters for the availability to the parameter file

        subs.param.add_param(self, 'convert_fluxcal_MSavailable', convertfluxcalmsavailable)
        subs.param.add_param(self, 'convert_polcal_MSavailable', convertpolcalmsavailable)
        subs.param.add_param(self, 'convert_targetbeams_MSavailable', converttargetbeamsmsavailable)

        ###############################################
        # Convert the available MS-datasets to UVFITS #
        ###############################################

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if convertfluxcaluvfits2miriad == False:
                    if convertfluxcalmsavailable:
                        self.logger.debug('# Converting flux calibrator dataset from MS to UVFITS format. #')
                        subs.managefiles.director(self, 'mk', self.basedir + '00' + '/' + self.crosscalsubdir, verbose=False)
                        fc_convert = 'exportuvfits(vis="' + self.basedir + '00' + '/' + self.rawsubdir + '/' + self.fluxcal + '", fitsfile="' + self.basedir + '00' + '/' + self.crosscalsubdir + '/' + str(self.fluxcal).rstrip('MS') + 'UVFITS' + '", datacolumn="data", combinespw=True, padwithflags=True, multisource=True, writestation=True)'
                        casacmd = [fc_convert]
                        casa = drivecasa.Casapy()
                        casa.run_script(casacmd, raise_on_severe=False, timeout=3600)
                        if os.path.isfile( self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip('MS') + 'UVFITS'):
                            convertfluxcalms2uvfits = True
                            self.logger.info('# Converted flux calibrator dataset from MS to UVFITS format! #')
                        else:
                            convertfluxcalms2uvfits = False
                            self.logger.warning('# Could not convert flux calibrator dataset from MS to UVFITS format! #')
                    else:
                        self.logger.warning('# Flux calibrator dataset not available! #')
                else:
                    self.logger.info('# Flux calibrator dataset was already converted from MS to UVFITS format #')
            else:
                self.logger.warning('# Flux calibrator dataset not specified. Cannot convert flux calibrator! #')
        else:
            self.logger.warning('# Not converting flux calibrator dataset! #')
        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if convertpolcaluvfits2miriad == False:
                    if convertpolcalmsavailable:
                        self.logger.debug('# Converting polarised calibrator dataset from MS to UVFITS format. #')
                        subs.managefiles.director(self, 'mk', self.basedir + '00' + '/' + self.crosscalsubdir, verbose=False)
                        pc_convert = 'exportuvfits(vis="' + self.basedir + '00' + '/' + self.rawsubdir + '/' + self.polcal + '", fitsfile="' + self.basedir + '00' + '/' + self.crosscalsubdir + '/' + str(self.polcal).rstrip('MS') + 'UVFITS' + '", datacolumn="data", combinespw=True, padwithflags=True, multisource=True, writestation=True)'
                        casacmd = [pc_convert]
                        casa = drivecasa.Casapy()
                        casa.run_script(casacmd, raise_on_severe=False, timeout=3600)
                        if os.path.isfile( self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip('MS') + 'UVFITS'):
                            convertpolcalms2uvfits = True
                            self.logger.info('# Converted polarised calibrator dataset from MS to UVFITS format! #')
                        else:
                            convertpolcalms2uvfits = False
                            self.logger.warning('# Could not convert polarised calibrator dataset from MS to UVFITS format! #')
                    else:
                        self.logger.warning('# Polarised calibrator dataset not available! #')
                else:
                    self.logger.info('# Polarised calibrator dataset was already converted from MS to UVFITS format #')
            else:
                self.logger.warning('# Polarised calibrator dataset not specified. Cannot convert polarised calibrator! #')
        else:
            self.logger.warning('# Not converting polarised calibrator dataset! #')
        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                self.logger.info('# Converting target beam datasets from MS to UVFITS format. #')
                if self.convert_targetbeams == 'all':
                    datasets = glob.glob(self.basedir + '[0-9][0-9]' + '/' + self.rawsubdir + '/' + self.target)
                    self.logger.debug('# Converting all available target beam datasets #')
                else:
                    beams = self.convert_targetbeams.split(",")
                    datasets = [self.basedir + str(b).zfill(2) + '/' + self.rawsubdir + '/' + self.target for b in beams]
                    self.logger.debug('# Converting all selected target beam datasets #')
                for vis in datasets:
                    if converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])] == False:
                        if converttargetbeamsmsavailable[int(vis.split('/')[-3])]:
                            subs.managefiles.director(self, 'mk', self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir, verbose=False)
                            tg_convert = 'exportuvfits(vis="' + self.basedir + vis.split('/')[-3] + '/' + self.rawsubdir + '/' + self.target + '", fitsfile="' + self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS' + '", datacolumn="data", combinespw=True, padwithflags=True, multisource=True, writestation=True)'
                            casacmd = [tg_convert]
                            casa = drivecasa.Casapy()
                            casa.run_script(casacmd, raise_on_severe=False, timeout=7200)
                            if os.path.isfile( self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS'):
                                converttargetbeamsms2uvfits[int(vis.split('/')[-3])] = True
                                self.logger.debug('# Converted dataset of target beam ' + vis.split('/')[-3] + ' from MS to UVFITS format! #')
                            else:
                                converttargetbeamsms2uvfits[int(vis.split('/')[-3])] = False
                                self.logger.warning('# Could not convert dataset for target beam ' + vis.split('/')[-3] + ' from MS to UVFITS format! #')
                        else:
                            self.logger.warning('# Dataset for target beam ' + vis.split('/')[-3] + ' not available! #')
                    else:
                        self.logger.info('# Dataset for target beam ' + vis.split('/')[-3] + ' was already converted from MS to UVFITS format #')
            else:
                self.logger.warning('# Target beam dataset(s) not specified. Cannot convert target beam datasets! #')
        else:
            self.logger.warning('# Not converting target beam dataset(s)! #')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs.param.add_param(self, 'convert_fluxcal_MS2UVFITS', convertfluxcalms2uvfits)
        subs.param.add_param(self, 'convert_polcal_MS2UVFITS', convertpolcalms2uvfits)
        subs.param.add_param(self, 'convert_targetbeams_MS2UVFITS', converttargetbeamsms2uvfits)

        #######################################################
        # Check which datasets are available in UVFITS format #
        #######################################################

        if self.fluxcal != '':
            convertfluxcaluvfitsavailable = os.path.isfile(self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip('MS') + 'UVFITS')
        else:
            self.logger.warning('# Flux calibrator dataset not specified. Cannot convert flux calibrator! #')
        if self.polcal != '':
            convertpolcaluvfitsavailable = os.path.isfile(self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip('MS') + 'UVFITS')
        else:
            self.logger.warning('# Polarised calibrator dataset not specified. Cannot convert polarised calibrator! #')
        if self.target != '':
            for b in range(beams):
                converttargetbeamsuvfitsavailable[b] = os.path.isfile(self.basedir + str(b).zfill(2) + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS')
        else:
            self.logger.warning('# Target beam dataset not specified. Cannot convert target beams! #')

        # Save the derived parameters for the availability to the parameter file

        subs.param.add_param(self, 'convert_fluxcal_UVFITSavailable', convertfluxcaluvfitsavailable)
        subs.param.add_param(self, 'convert_polcal_UVFITSavailable', convertpolcaluvfitsavailable)
        subs.param.add_param(self, 'convert_targetbeams_UVFITSavailable', converttargetbeamsuvfitsavailable)

        ##########################################################
        # Convert the available UVFITS-datasets to MIRIAD format #
        ##########################################################

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if convertfluxcaluvfits2miriad == False:
                    if convertfluxcaluvfitsavailable:
                        self.logger.debug('# Converting flux calibrator dataset from UVFITS to MIRIAD format. #')
                        subs.managefiles.director(self, 'ch', self.basedir + '00' + '/' + self.crosscalsubdir, verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip('MS') + 'UVFITS'
                        fits.out = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip('MS') + 'mir'
                        fits.go()
                        if os.path.isdir( self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip('MS') + 'mir'):
                            convertfluxcaluvfits2miriad = True
                            self.logger.info('# Converted flux calibrator dataset from UVFITS to MIRIAD format! #')
                        else:
                            convertfluxcaluvfits2miriad = False
                            self.logger.warning('# Could not convert flux calibrator dataset from UVFITS to MIRIAD format! #')
                    else:
                        self.logger.warning('# Flux calibrator dataset not available! #')
                else:
                    self.logger.info('# Flux calibrator dataset was already converted from UVFITS to MIRIAD format #')
            else:
                self.logger.warning('# Flux calibrator dataset not specified. Cannot convert flux calibrator! #')
        else:
            self.logger.warning('# Not converting flux calibrator dataset! #')
        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if convertpolcaluvfits2miriad == False:
                    if convertpolcaluvfitsavailable:
                        self.logger.debug('# Converting polarised calibrator dataset from UVFITS to MIRIAD format. #')
                        subs.managefiles.director(self, 'ch',self.basedir + '00' + '/' + self.crosscalsubdir, verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip('MS') + 'UVFITS'
                        fits.out = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip('MS') + 'mir'
                        fits.go()
                        if os.path.isdir(self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip('MS') + 'mir'):
                            convertpolcaluvfits2miriad = True
                            self.logger.info('# Converted polarised calibrator dataset from UVFITS to MIRIAD format! #')
                        else:
                            convertpolcaluvfits2miriad = False
                            self.logger.warning('# Could not convert polarised calibrator dataset from UVFITS to MIRIAD format! #')
                    else:
                        self.logger.warning('# Polarised calibrator dataset not available! #')
                else:
                    self.logger.info('# Polarised calibrator dataset was already converted from UVFITS to MIRIAD format #')
            else:
                self.logger.warning('# Polarised calibrator dataset not specified. Cannot convert polarised calibrator! #')
        else:
            self.logger.warning('# Not converting polarised calibrator dataset! #')
        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                self.logger.info('# Converting target beam datasets from UVFITS to MIRIAD format. #')
                if self.convert_targetbeams == 'all':
                    datasets = glob.glob(self.basedir + '[0-9][0-9]' + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS')
                    self.logger.debug('# Converting all available target beam datasets #')
                else:
                    beams = self.convert_targetbeams.split(",")
                    datasets = [self.basedir + str(b).zfill(2) + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS' for b in beams]
                    self.logger.debug('# Converting all selected target beam datasets #')
                for vis in datasets:
                    if converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])] == False:
                        if converttargetbeamsuvfitsavailable[int(vis.split('/')[-3])]:
                            subs.managefiles.director(self, 'ch', self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir, verbose=False)
                            fits = lib.miriad('fits')
                            fits.op = 'uvin'
                            fits.in_ = self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS'
                            fits.out = self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'mir'
                            fits.go()
                            if os.path.isdir( self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'mir'):
                                converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])] = True
                                self.logger.debug('# Converted dataset of target beam ' + vis.split('/')[-3] + ' from UVFITS to MIRIAD format! #')
                            else:
                                converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])] = False
                                self.logger.warning('# Could not convert dataset for target beam ' + vis.split('/')[-3] + ' from UVFITS to MIRIAD format! #')
                        else:
                            self.logger.warning('# Dataset for target beam ' + vis.split('/')[-3] + ' not available! #')
                    else:
                        self.logger.info('# Dataset for target beam ' + vis.split('/')[-3] + ' was already converted from MS to UVFITS format #')
            else:
                self.logger.warning('# Target beam dataset(s) not specified. Cannot convert target beam datasets! #')
        else:
            self.logger.warning('# Not converting target beam dataset(s)! #')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs.param.add_param(self, 'convert_fluxcal_UVFITS2MIRIAD', convertfluxcaluvfits2miriad)
        subs.param.add_param(self, 'convert_polcal_UVFITS2MIRIAD', convertpolcaluvfits2miriad)
        subs.param.add_param(self, 'convert_targetbeams_UVFITS2MIRIAD', converttargetbeamsuvfits2miriad)

        #####################################
        # Remove the UVFITS files if wanted #
        #####################################

        if self.convert_removeuvfits:
            self.logger.info('# Removing all UVFITS files #')
            subs.managefiles.director(self, 'rm', self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip('MS') + 'UVFITS')
            subs.managefiles.director(self, 'rm', self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip('MS') + 'UVFITS')
            for beam in range(beams):
                if os.path.isdir(self.basedir + str(beam).zfill(2) + '/' + self.crosscalsubdir):
                    subs.managefiles.director(self, 'rm', self.basedir + str(beam).zfill(2) + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS')
                else:
                    pass
Esempio n. 14
0
 def setUpClass(cls):
     cls.casa = drivecasa.Casapy(echo_to_stdout=False)
Esempio n. 15
0
from __future__ import print_function
import drivecasa
casa = drivecasa.Casapy()
script = []
uvfits_path = '/path/to/uvdata.fits'
vis = drivecasa.commands.import_uvfits(script, uvfits_path, out_dir='./')
clean_args = {
    "imsize": [512, 512],
    "cell": ['5.0arcsec'],
    "weighting": 'briggs',
    "robust": 0.5,
}
dirty_maps = drivecasa.commands.clean(script,
                                      vis,
                                      niter=0,
                                      threshold_in_jy=1,
                                      other_clean_args=clean_args)
dirty_map_fits_image = drivecasa.commands.export_fits(script, dirty_maps.image)
print(script)
casa.run_script(script)
Esempio n. 16
0
def simulate_vis_with_casa(pointing_centre, source_list, output_dir):
    """
    Use casapy to simulate a visibility measurementset with noise.

    (This also produces an initial set of UVW data)

    Args:
        pointing_centre (:class:`astropy.coordinates.SkyCoord`)
        source_list: list of :class:`fastimgproto.skymodel.helpers.SkySource`
        output_dir (str): Output directory which will contain `vis.ms`

    Returns (str): Full path to `vis.ms`.
    """

    ensure_dir(output_dir)

    commands_logfile = os.path.join(
        output_dir, "./casa-visibilities_for_point_source-commands.log")
    component_list_path = os.path.join(output_dir, './sources.cl')
    output_visibility = os.path.abspath(os.path.join(output_dir, './vis.ms'))

    if os.path.isfile(commands_logfile):
        os.unlink(commands_logfile)
    casa = drivecasa.Casapy(commands_logfile=commands_logfile)
    script = []

    # For VLA reference numbers, see:
    # https://science.nrao.edu/facilities/vla/docs/manuals/oss/performance/fov
    # https://science.nrao.edu/facilities/vla/docs/manuals/oss/performance/resolution

    # Define some observation parameters:
    obs_central_frequency = 3. * u.GHz
    obs_frequency_bandwidth = 0.125 * u.GHz
    primary_beam_fwhm = (45. * u.GHz / obs_central_frequency) * u.arcmin

    # Convert the sources to a CASA 'componentlist'
    component_list_path = sim.make_componentlist(
        script,
        source_list=[(s.position, s.flux, s.frequency) for s in source_list],
        out_path=component_list_path)

    # Open the visibility file
    sim.open_sim(script, output_visibility)

    # Configure the virtual telescope
    # sim.setpb(script,
    #           telescope_name='VLA',
    #           primary_beam_hwhm=primary_beam_fwhm * 0.5,
    #           frequency=obs_central_frequency)
    sim.setconfig(script,
                  telescope_name='VLA',
                  antennalist_path=vla_c_antennalist_path)
    sim.setspwindow(
        script,
        freq_start=obs_central_frequency - 0.5 * obs_frequency_bandwidth,
        freq_resolution=obs_frequency_bandwidth,
        freq_delta=obs_frequency_bandwidth,
        n_channels=1,
    )
    sim.setfeed(script, )
    sim.setfield(script, pointing_centre)
    sim.setlimits(script)
    sim.setauto(script)
    ref_time = Time('2014-05-01T19:55:45', format='isot', scale='tai')
    sim.settimes(script, integration_time=10 * u.s, reference_time=ref_time)

    # Generate the visibilities
    sim.observe(script, stop_delay=10 * u.s)

    sim.predict(script, component_list_path)

    sim.set_simplenoise(script, noise_std_dev=1 * u.mJy)
    sim.corrupt(script)
    sim.close_sim(script)

    casa.run_script(script)
    return output_visibility
Esempio n. 17
0
def Run_MockObs(bulked,
                GClrealisations,
                CASAmock=False,
                saveFITS=False,
                writeClusters=False,
                savewodetect=False,
                log=False,
                side_effects=False,
                filter_sp_phase=False,
                extract_subtracted=True):
    """ Runs a mock observation
        side_effects: put   True if you want the input galaxy cluster to be changed,
                            False if you want only a copy to be influenced """
    (snap, Rmodel, emptySurvey) = bulked
    savefolder = emptySurvey.outfolder
    iom.check_mkdir(savefolder)

    #Variant B: Clean mask and .fits --> Source parameters; like variant A from step 4 on
    if CASAmock:
        import drivecasa as drica
        casa = drica.Casapy()

    smt = iom.SmartTiming(
        rate=5e4
    )  # logf=outf+'smt.log'  #;  print( '###==== Step2a:  Loading configuration files ====###'  )

    #  Units, conversion factors, and input variables
    fac_rho = loadsnap.conversion_fact_gadget_rho_to_nb(
        snap.head) * loadsnap.conversion_fact_ne_per_nb()  #electrons/cm^-3
    fac_T = loadsnap.conversion_fact_gadget_U_to_keV(snap.head)  # in [keV]
    fac_T2 = loadsnap.conversion_fact_gadget_U_to_keV(
        snap.head) / 8.61732814974056e-08  # to K
    """ determines if you want to change the galaxy cluster or not """
    if side_effects:
        GClrealisations_used = GClrealisations
    else:
        GClrealisations_used = copy.deepcopy(GClrealisations)

    for jj, gcl in enumerate(GClrealisations_used):
        #  Load variables and setting survey parameters
        mockobs = gcl.mockobs
        z = gcl.z.value
        hsize = mockobs.hsize
        dinf = gcl.dinfo  # Some parameters of dinfo could change, because of adaptive pixelsize etc.
        fac_x = loadsnap.comH_to_phys(snap.head, z)
        eff = Rmodel.effList[0]

        #  Units, conversion factors, and input variables
        radiounit = myu.radiounit_A * eff  # erg/s/Hz    --- Unit of particle luminousity in .radio snaps
        rot = mathut.Kep3D_seb(Omega=mockobs.theta,
                               i=mockobs.phi,
                               omega=mockobs.psi)
        posrot = np.dot(snap.pos, rot) * fac_x
        #velrot   = np.dot(snap.vel, rot)  Taken out, as long as we don't need to plot the velocity vetors

        smt(task='Bin_radio_[sub]')
        #print( '###==== Step 3b:  Binning cluster data cube  (radio) ====###'
        # Parameters implied
        # See Nuza+ 2012 Equ (1)
        relativistics = (1 + z)
        s_radio_SI = radiounit / myu.Jy_SI / (
            4 * np.pi * (gcl.cosmoDL * 1e-2)**2
        ) * relativistics  # radiounit*s_radioSI is Jy/particle        #Umrechnung
        nbins = int(2 * hsize / (gcl.cosmoPS * dinf.spixel))
        if nbins > mockobs.binmax:
            binsold = nbins
            spixelold = dinf.spixel
            dinf.spixel = dinf.spixel * np.power(
                float(nbins) / float(mockobs.binmax), 0.5)
            mockobs.hsize = mockobs.hsize * np.power(
                float(nbins) / float(mockobs.binmax), -0.5)
            dinf.update_Abeam()
            nbins = mockobs.binmax
            if log:
                print(
                    'At z=%.3f with an pixel size of %.1f arcsec, the number of pixels per image is %i^2. Due to that the pixelscale was increased to %.1f arcsec and the binned boxsize decreased to %i kpc.'
                    % (z, spixelold, binsold, dinf.spixel, mockobs.hsize))
            hsize = mockobs.hsize
        dinf.pcenter = [nbins / 2, nbins / 2]

        if filter_sp_phase:
            """ Filteres the cooled particles that no longer belong to the hot-ICM"""
            iL = np.where((
                np.sqrt(snap.pos[:, 0]**2 + snap.pos[:, 1]**2 +
                        snap.pos[:, 2]**2) * fac_x < 2.0 * gcl.R200())
                          & ((8.9 + 3.3 - np.log(snap.u * fac_T2) -
                              0.65 * np.log10(snap.rho * fac_rho)) < 0)
                          & ((8.9 - 11.0 - np.log(snap.u * fac_T2) -
                              3.50 * np.log10(snap.rho * fac_rho)) < 0)
                          & ((8.9 + 6.9 - np.log(snap.u * fac_T2) +
                              0.50 * np.log10(snap.rho * fac_rho)) < 0)
                          & (snap.mach < 10))[0]
        else:
            iL = np.where(
                np.sqrt(snap.pos[:, 0]**2 + snap.pos[:, 1]**2 +
                        snap.pos[:, 2]**2) * fac_x < 2.0 * gcl.R200())[0]

        if hasattr(snap, 'radiPre'):
            if log:
                print('Run_MockObs:: Ratio of PREs to total emission',
                      (np.sum(snap.radiPre[iL])) /
                      (np.sum(snap.radi[iL]) + np.sum(snap.radiPre[iL])))

        H1, xedges, yedges = np.histogram2d(-posrot[iL, 0],
                                            -posrot[iL, 1],
                                            weights=s_radio_SI * snap.radi[iL],
                                            range=[[-hsize, hsize],
                                                   [-hsize, hsize]],
                                            bins=nbins)
        """ Difference of gaussians method - accomplishing a simple subtraction of compact sources"
        
        We do this iteratively three times to also remove those particles that where shadowed by other 
        bright particles before
        
        This method is defines by
        
        thresh: A threshold for masking
        scale_1: Smaller scale in kpc
        scale_2: Larger  scale in kpc        
        """
        thresh = 0.75
        scale_1 = 20
        scale_2 = 60

        DoG1_filter = copy.deepcopy(dinf)
        DoG1_filter.beam = [scale_1 / gcl.cosmoPS, scale_1 / gcl.cosmoPS, 0]
        DoG1_filter.update_Abeam()

        DoG2_filter = copy.deepcopy(dinf)
        DoG2_filter.beam = [scale_2 / gcl.cosmoPS, scale_2 / gcl.cosmoPS, 0]
        DoG2_filter.update_Abeam()

        DoG_mask = np.ones_like(H1)
        for no_use in range(2):
            convolved_sigma1 = DoG1_filter.convolve_map(
                H1 * DoG_mask)  ## gaussian convolution
            convolved_sigma2 = DoG2_filter.convolve_map(
                H1 * DoG_mask)  ## gaussian convolution
            DoG_rel = np.divide(
                np.abs(convolved_sigma2 - convolved_sigma1) + 1e-20,
                convolved_sigma2 + 1e-20)
            DoG_mask[np.where(
                DoG_rel < thresh)] = 0.2 * DoG_mask[np.where(DoG_rel < thresh)]
        #convolved_sigma1 = DoG1_filter.convolve_map(H1)  ## gaussian convolution
        #convolved_sigma2 = DoG2_filter.convolve_map(H1)  ## gaussian convolution

        H2 = dinf.convolve_map(H1 * DoG_mask)
        #            print('____ Masked/Unmasked flux (mJy):  %6.3f %6.3f' % (np.sum(H2)/dinf.Abeam[0]*1000,s_radio_SI*np.sum(snap.radi[iL])*1000))

        smt(task='WriteDilMask_[sub]')
        #print( '###==== -- 4b:  Writing dilated .mask  ====###'
        #        mask       =  maput.numpy2mask (H2, dinf.limit, Ndil) # outfile = outfile.replace('.fits','') + '_mask.fits',

        #img = bdsm.process_image(filename= outfile+'_simple_conv.fits', thresh_isl=args.tIs, thresh_pix=args.tPi, mean_map = 'zero', beam = (0.0125,0.0125,0), rms_map = False, rms_value = 0.00045, thresh = 'hard')
        #img.export_image(outfile= outfile+'_simple_conv.ismk.fits'    , img_type='island_mask', img_format='fits', mask_dilation=5, clobber=True)
        #img.export_image(outfile= outfile+'_simple_conv.ismk.mask'    , img_type='island_mask', img_format='casa', mask_dilation=5, clobber=True)

        if CASAmock:
            """ Removed implementation; original idea:
            #1. Python: Create convolved perfect simulational output image
            #2. PyBDSM/Python: Create clean mask on that with some dilation
            #3. Casa/Python: Create constant rms and beam-corrected image (clean)
            #4. Casa/Python: Apply this clean mask with immath on constant-rms image
            #5. PyBDSM/python: Use pybdsm with detection_image= 'masked_constant rms_image'
            #6. Python. Create masked .fits mock imagename
            #7  Python. Extract radio relics """
        else:
            if log:
                print(
                    '###====          - Using the simple convolved image ====###'
                )
            IM0 = H2  #(fits.open(simpleconv))[0].data

        smt(task='CreateMask_[sub]')
        #print( '###==== Step 6:  Create masked .fits mock image ====###'
        IM1 = np.squeeze(
            IM0)  #!!! Here unmasked! ... np.multiply(np.squeeze(IM0), mask)
        """Outdated saveFITS, please update and put to end of procedure """
        if saveFITS and CASAmock:
            maput.numpy2FITS(IM1, 'sim.vla.d.masked.fits', dinf.spixel)

        smt(task='RelicExtr_[sub]')

        relics = relex.RelicExtraction(
            IM1,
            z,
            GCl=gcl,
            dinfo=dinf,
            rinfo=cbclass.RelicRegion(
                '', [], rtype=1))  #, faintexcl=0.4, Mach=Hmach, Dens=Hdens,

        smt(task='RelicHandling_[sub]')
        relics = sorted(relics, key=lambda x: x.flux, reverse=True)
        gcl.add_relics(relics)
        if savewodetect or len(relics) > 0:

            if log:
                print(
                    '  ++The brightest relic found has a flux density of %f mJy'
                    % (relics[0].flux)
                )  #Could producese errors, once there is no relict in the list
            iom.check_mkdir(savefolder + '/maps/z%04.f' %
                            (gcl.mockobs.z_snap * 1000))
            """ Part to derive additional relic information like average and  mach number and alpha. We also get the 
            emission weighted density, as this works only on the bright parts it is fine to work with the subset of 
            particles
            """
            alpha_help = (snap.mach[iL]**2 + 1) / (snap.mach[iL]**2 - 1)

            Hmach = SPH_binning(
                snap,
                posrot,
                dinf,
                iL,
                immask=DoG_mask,
                HSize_z=hsize,
                nbins=nbins,
                weights=lambda x: s_radio_SI * x.radi[iL] * x.mach[iL])
            Halpha = SPH_binning(
                snap,
                posrot,
                dinf,
                iL,
                immask=DoG_mask,
                HSize_z=hsize,
                nbins=nbins,
                weights=lambda x: s_radio_SI * x.radi[iL] * alpha_help)
            Hrho_up = SPH_binning(snap,
                                  posrot,
                                  dinf,
                                  iL,
                                  immask=DoG_mask,
                                  HSize_z=hsize,
                                  nbins=nbins,
                                  weights=lambda x: s_radio_SI * x.radi[iL] * x
                                  .rup[iL] * fac_rho)
            Htemp = SPH_binning(
                snap,
                posrot,
                dinf,
                iL,
                immask=DoG_mask,
                HSize_z=hsize,
                nbins=nbins,
                weights=lambda x: s_radio_SI * x.radi[iL] * x.u[iL] * fac_T)
            Harea = SPH_binning(
                snap,
                posrot,
                dinf,
                iL,
                immask=DoG_mask,
                HSize_z=hsize,
                nbins=nbins,
                weights=lambda x: s_radio_SI * x.radi[iL] * x.area[iL])
            Hmag = SPH_binning(
                snap,
                posrot,
                dinf,
                iL,
                immask=DoG_mask,
                HSize_z=hsize,
                nbins=nbins,
                weights=lambda x: s_radio_SI * x.radi[iL] * x.B[iL])
            Hpre = SPH_binning(snap,
                               posrot,
                               dinf,
                               iL,
                               immask=DoG_mask,
                               HSize_z=hsize,
                               nbins=nbins,
                               weights=lambda x: s_radio_SI * x.radiPre[iL])

            allflux = np.asarray([])
            for relic in relics:
                relic.wMach = Hmach[relic.pmask]
                relic.wT = Htemp[relic.pmask]
                relic.wArea = Harea[relic.pmask]
                relic.wAlpha = Halpha[relic.pmask]
                relic.wB = Hmag[relic.pmask]
                relic.wPre = Hpre[relic.pmask]
                relic.wRho_up = Hrho_up[relic.pmask]
                #                    relic.wRho        =  Hrho [relic.pmask]
                #                    relic.wRho_down   =  Hrho_down[relic.pmask]
                #                    relic.wT_up       =  Htemp_up[relic.pmask]
                #                    relic.wT_down     =  Htemp_down[relic.pmask]

                relic.wDoG_rel = DoG_rel[relic.pmask]
                allflux = np.concatenate((relic.sparseW, allflux), axis=0)
                relic.averages_quantities()
            """Save maps"""
            allflux = allflux.flatten()
            """ I couldn't come up with something better to take the inverse """
            mask = np.ones(snap.rho.shape, dtype=bool)
            mask[iL] = 0
            Subtracted, xedges, yedges = np.histogram2d(
                -posrot[mask, 0],
                -posrot[mask, 1],
                weights=s_radio_SI * snap.radi[mask],
                range=[[-hsize, hsize], [-hsize, hsize]],
                bins=nbins)
            Subtracted += H1 * (1 - DoG_mask)
            Subtracted_conv = dinf.convolve_map(Subtracted)
            if extract_subtracted:
                relics_subtracted = relex.RelicExtraction(
                    Subtracted_conv,
                    z,
                    GCl=gcl,
                    dinfo=dinf,
                    rinfo=cbclass.RelicRegion(
                        '', [],
                        rtype=1))  # , faintexcl=0.4, Mach=Hmach, Dens=Hdens,
                for relic in relics_subtracted:
                    relic.wMach = Hmach[relic.pmask]
                    relic.wT = Htemp[relic.pmask]
                    relic.wArea = Harea[relic.pmask]
                    relic.wAlpha = Halpha[relic.pmask]
                    relic.wB = Hmag[relic.pmask]
                    relic.wPre = Hpre[relic.pmask]
                    relic.wRho_up = Hrho_up[relic.pmask]
                    #                    relic.wRho        =  Hrho [relic.pmask]
                    #                    relic.wRho_down   =  Hrho_down[relic.pmask]
                    #                    relic.wT_up       =  Htemp_up[relic.pmask]
                    #                    relic.wT_down     =  Htemp_down[relic.pmask]

                    relic.wDoG_rel = DoG_rel[relic.pmask]
                    relic.averages_quantities()
                gcl.compacts = relics_subtracted

            if saveFITS:
                """ Here the maps are already masked with the detection region """

                smt(task='WriteFits_[writes,sub]')
                if log:
                    print(
                        '###==== Step 4:  Preparing FITS file & folders ====###'
                    )

                parlist = (savefolder, gcl.mockobs.z_snap * 1000, gcl.name,
                           Rmodel.id)
                gcl.maps_update(H1, 'Raw',
                                '%s/maps/z%04.f/%s-%04i_native.fits' % parlist)
                gcl.maps_update(IM1, 'Diffuse',
                                '%s/maps/z%04.f/%s-%04i.fits' % parlist)
                gcl.maps_update(
                    Subtracted, 'CompModell',
                    '%s/maps/z%04.f/%s-%04i_compact.fits' % parlist)
                gcl.maps_update(
                    Subtracted_conv, 'Subtracted',
                    '%s/maps/z%04.f/%s-%04i_compactObserved.fits' % parlist)
                if len(relics) > 0:
                    gcl.maps_update(
                        gcl.Mask_Map(Hmach, normalize=allflux), 'Mach',
                        '%s/maps/z%04.f/%s-%04i_mach.fits' % parlist)
                    gcl.maps_update(
                        gcl.Mask_Map(Hrho_up, normalize=allflux), 'RhoUp',
                        '%s/maps/z%04.f/%s-%04i_rhoup.fits' % parlist)
                    gcl.maps_update(
                        gcl.Mask_Map(Htemp, normalize=allflux), 'Temp',
                        '%s/maps/z%04.f/%s-%04i_temp.fits' % parlist)
                    gcl.maps_update(gcl.Mask_Map(Hmag, normalize=allflux), 'B',
                                    '%s/maps/z%04.f/%s-%04i_B.fits' % parlist)
                    gcl.maps_update(
                        gcl.Mask_Map(Hpre, normalize=allflux), 'PreRatio',
                        '%s/maps/z%04.f/%s-%04i_prerat.fits' % parlist)
                gcl.maps_update(
                    DoG_rel, 'DoG_rel',
                    '%s/maps/z%04.f/%s-%04i_DoG_rel.fits' % parlist)
                gcl.maps_update(
                    DoG_mask, 'DoG_mask',
                    '%s/maps/z%04.f/%s-%04i_DoG_mask.fits' % parlist)
            """ PhD feature --> plot the DoF images in a subplot
##                import matplotlib.pyplot as plt   
##                with np.errstate(divide='ignore', invalid='ignore'):
##                    DoG_rel            = np.divide(np.abs(convolved_sigma2-convolved_sigma1)+1e-20,convolved_sigma2+1e-20)
##                pixR200 =        gcl.R200()/(gcl.cosmoPS*dinf.spixel)     
##                bou     =  gcl.R200()*1.5   # pixR200*1
##                f, ((ax1, ax2, ax3)) = plt.subplots(1, 3, figsize=(30,12)) #, sharey='row', sharex='col', sharey='row'
##                ax1.imshow( np.power((np.abs(convolved_sigma1-convolved_sigma2)), 1/1 )     , extent=(-hsize, hsize, -hsize, hsize)) #-bou+cen
##                im2 = ax2.imshow( DoG_rel                                 , extent=(-hsize, hsize, -hsize, hsize), vmin=0.2, vmax=1 ) #-bou+cen           
##                XX,YY = np.meshgrid(xedges[0:-1]+0.5*gcl.cosmoPS*dinf.spixel,yedges[0:-1][::-1]+0.5*gcl.cosmoPS*dinf.spixel) #yedges[-1:0]
##                ax2.contour(XX, YY, DoG_mask,  colors='r', levels=[0.5])
##                ax3.imshow( np.power(dinf.convolve_map(H1*DoG_mask), 1/1 ), extent=(-hsize, hsize, -hsize, hsize)  ) #-bou+cen
##                ax1.set_xlim(-bou, bou)
##                ax1.set_ylim(-bou, bou)
##                ax2.set_xlim(-bou, bou)
##                ax2.set_ylim(-bou, bou) 
##                ax3.set_xlim(-bou, bou)
##                ax3.set_ylim(-bou, bou)
##                
##                ax1.set_title('DoG')
##                ax2.set_title('DoG/LowResImage + mask (contours)')
##                ax3.set_title('Filtered NVSS')
##                
##                print('CreateMokObs',  pixR200, gcl.R200(),dinf.spixel, gcl.cosmoPS)
##                circle1 = plt.Circle((0, 0), gcl.R200(), fill=False, color='w', ls='-')
##                circle2 = plt.Circle((0, 0), gcl.R200(), fill=False, color='w', ls='-')      
##                circle3 = plt.Circle((0, 0), gcl.R200(), fill=False, color='w', ls='-')   
##
##                ax1.add_artist(circle1)
##                ax2.add_artist(circle2)
##                ax3.add_artist(circle3)
##                
##                cax2 = f.add_axes([0.42, 0.12, 0.2, 0.03]) 
##                cb2  = f.colorbar(im2, format='%.2f', ticks=[0.0, 0.25, 0.5, 0.75, 1.0], cax = cax2, orientation="horizontal")  #label='average Mach', 
##                
##                plt.savefig('%s/%s-%04i_joined.png'        % (savefolder, gcl.name, Rmodel.id)) #dpi=400
##                plt.savefig('%s/%s-%04i_joined.pdf'        % (savefolder, gcl.name, Rmodel.id)) #dpi=400          
#                """ """
            
            gcl.add_relics(relics) 
            PhD feature end """
    if writeClusters:
        """This is here because some outputs get lost in a multiprocessing heavy input/output queue process"""
        for gcl in GClrealisations_used:
            filename = 'GCl-%05i' % (gcl.mockobs.id)
            iom.pickleObject((gcl, Rmodel),
                             savefolder + '/pickled/',
                             filename,
                             append=False)

    if log: print('Finished with all efficency values')
    return True, smt, GClrealisations_used, Rmodel
Esempio n. 18
0
import drivecasa.commands.simulation as sim
from astropy.coordinates import SkyCoord
from astropy.time import Time
from drivecasa.utils import ensure_dir

output_dir = './simulation_output'
ensure_dir(output_dir)

commands_logfile = os.path.join(output_dir, "./casa-commands.log")
component_list_path = os.path.join(output_dir, './sources.cl')
output_visibility = os.path.join(output_dir, './foovis.ms')

if os.path.isfile(commands_logfile):
    os.unlink(commands_logfile)
casa = drivecasa.Casapy(
    commands_logfile=commands_logfile,
    echo_to_stdout=True,
)
script = []

# For VLA reference numbers, see:
# https://science.nrao.edu/facilities/vla/docs/manuals/oss/performance/fov
# https://science.nrao.edu/facilities/vla/docs/manuals/oss/performance/resolution

# Define some observation parameters:
obs_central_frequency = 3. * u.GHz
obs_frequency_bandwidth = 0.125 * u.GHz
primary_beam_fwhm = (45. * u.GHz / obs_central_frequency) * u.arcmin

# Define the field centre and sources:
centre_ra = 180 * u.deg
centre_dec = 34 * u.deg
Esempio n. 19
0
def simulate_vis_with_casa(pointing_centre,
                           source_list,
                           noise_std_dev,
                           vis_path,
                           overwrite=True,
                           echo=False):
    """
    Use casapy to simulate a visibility measurementset with noise.

    (This also produces an initial set of UVW data)

    Args:
        pointing_centre (:class:`astropy.coordinates.SkyCoord`)
        source_list: list of :class:`fastimgproto.skymodel.helpers.SkySource`
        noise_std_dev (astropy.units.Quantity): Standard deviation of the noise
            (units of Jy).
        vis_path (str): Path to visibilities generated.
        echo (bool): Echo the CASA output to terminal during processing.

    Returns (str): Full path to `vis.ms`.
    """

    vis_abspath = os.path.abspath(vis_path)
    commands_logfile = vis_abspath + "_casa_commands.log"
    casa_logfile = vis_abspath + "_casa_log.log"
    component_list_path = vis_abspath + "_sources.cl"

    for outdir in vis_abspath, component_list_path:
        if os.path.isdir(outdir):
            if overwrite:
                shutil.rmtree(outdir)
            else:
                raise IOError(
                    "{} already present and overwrite==False.".format(outdir))
    if os.path.isfile(commands_logfile):
        os.remove(commands_logfile)
    ensure_dir(os.path.dirname(vis_abspath))

    casa = drivecasa.Casapy(commands_logfile=commands_logfile,
                            casa_logfile=casa_logfile,
                            echo_to_stdout=echo)
    script = []
    # Add subroutine definition, for manual reproduction with CASA:
    script.append(
        drivecasa.drivecasa.commands.subroutines.def_load_antennalist)

    # Define some observation parameters...
    # For VLA reference numbers, see:
    # https://science.nrao.edu/facilities/vla/docs/manuals/oss/performance/fov
    # https://science.nrao.edu/facilities/vla/docs/manuals/oss/performance/resolution
    obs_central_frequency = 3. * u.GHz
    obs_frequency_bandwidth = 0.125 * u.GHz
    primary_beam_fwhm = (45. * u.GHz / obs_central_frequency) * u.arcmin

    # Convert the sources to a CASA 'componentlist'
    component_list_path = sim.make_componentlist(
        script,
        source_list=[(s.position, s.flux, s.frequency) for s in source_list],
        out_path=component_list_path)

    # Open the visibility file
    sim.open_sim(script, vis_abspath)

    # Configure the virtual telescope
    # sim.setpb(script,
    #           telescope_name='VLA',
    #           primary_beam_hwhm=primary_beam_fwhm * 0.5,
    #           frequency=obs_central_frequency)
    sim.setconfig(script,
                  telescope_name='VLA',
                  antennalist_path=vla_c_antennalist_path)
    sim.setspwindow(
        script,
        freq_start=obs_central_frequency - 0.5 * obs_frequency_bandwidth,
        freq_resolution=obs_frequency_bandwidth,
        freq_delta=obs_frequency_bandwidth,
        n_channels=1,
    )
    sim.setfeed(script, )
    sim.setfield(script, pointing_centre)
    sim.setlimits(script)
    sim.setauto(script)
    ref_time = Time('2014-05-01T19:55:45', format='isot', scale='tai')
    sim.settimes(script, integration_time=10 * u.s, reference_time=ref_time)

    # Generate the visibilities
    sim.observe(script, stop_delay=10 * u.s)

    sim.predict(script, component_list_path)

    sim.set_simplenoise(script, noise_std_dev=noise_std_dev)
    sim.corrupt(script)
    sim.close_sim(script)

    casa_output = casa.run_script(script)
    return casa_output