Пример #1
0
    def test_split_MMS_weight_corr_sel(self):
        '''mstransform: Split MMS in parallel. Check WEIGHT shape when selecting correlation'''
        # Create an MMS in the setup. It creates self.testmms
        self.createMMS(self.vis, axis='scan', spws='0,1')
        
        self.outputms = 'corrRR_LL.mms'
        mstransform(vis=self.testmms, outputvis=self.outputms, datacolumn='data', correlation='RR,LL',spw='0')
        
        self.assertTrue(ParallelTaskHelper.isParallelMS(self.outputms),'Output is not an MMS')
        
        mslocal = mstool()
        mslocal.open(self.outputms)
        sublist = mslocal.getreferencedtables()
        self.assertEqual(len(sublist), 2)
        
        # Test DD table
        msmdt = msmdtool()
        msmdt.open(self.outputms)
        out_dds = msmdt.datadescids()
        msmdt.done()
        
        ref = [0]
        for i in out_dds:
            self.assertEqual(out_dds[i], ref[i])

        # The separation axis should be copied to the output MMS
        in_sepaxis = ph.axisType(self.testmms)
        out_sepaxis = ph.axisType(self.outputms)
        self.assertEqual(in_sepaxis, out_sepaxis, 'AxisTypes from input and output MMS do not match')

        # Check the dimensions of the WEIGHT and SIGMA columns. CAS-6946
        out_ws = th.getColShape(self.outputms,'WEIGHT')
        out_ss = th.getColShape(self.outputms,'SIGMA')
        self.assertEqual(out_ws[0],'[2]','WEIGHT shape is not correct')
        self.assertEqual(out_ss[0],'[2]','SIGMA shape is not correct')
Пример #2
0
def modeltransfer(msfile, spw='', reference='XX', transfer='YY'):
    from taskinit import mstool
    pol_dict = {'XX': 0, 'YY': 1, 'XY': 2, 'YX': 3}
    refidx = pol_dict[reference]
    trfidx = pol_dict[transfer]
    datams = mstool()
    datams.open(msfile, nomodify=False)

    if '~' in spw:
        sp0, sp1 = spw.split('~')
        for sp in range(int(sp0), int(sp1) + 1):
            staql = {'spw': str(sp)}
            datams.selectinit(reset=True)
            datams.msselect(staql)
            modeldata = datams.getdata(['model_data'])
            modeldata['model_data'][trfidx, ...] = modeldata['model_data'][refidx, ...]
            datams.putdata(modeldata)
        datams.close()
    else:
        datams.selectinit(reset=True)
        staql = {'spw': spw}
        datams.msselect(staql)
        modeldata = datams.getdata(['model_data'])
        modeldata['model_data'][trfidx, ...] = modeldata['model_data'][refidx, ...]
        datams.putdata(modeldata)
        datams.close()
Пример #3
0
    def test_split_MMS(self):
        '''mstransform: Split MMS in parallel'''
        # Create an MMS in the setup. It creates self.testmms
        self.createMMS(self.vis, axis='scan', spws='0,1')
        
        self.outputms = 'scan30.mms'
        mstransform(vis=self.testmms, outputvis=self.outputms, datacolumn='data', scan='30')
        
        self.assertTrue(ParallelTaskHelper.isParallelMS(self.outputms),'Output is not an MMS')
        
        mslocal = mstool()
        mslocal.open(self.outputms)
        sublist = mslocal.getreferencedtables()
        self.assertEqual(len(sublist), 1)
        
        # Test DD table
        msmdt = msmdtool()
        msmdt.open(self.outputms)
        out_dds = msmdt.datadescids()
        msmdt.done()
        
        ref = [0,1]
        for i in out_dds:
            self.assertEqual(out_dds[i], ref[i])

        # The separation axis should be copied to the output MMS
        in_sepaxis = ph.axisType(self.testmms)
        out_sepaxis = ph.axisType(self.outputms)
        self.assertEqual(in_sepaxis, out_sepaxis, 'AxisTypes from input and output MMS do not match')
Пример #4
0
    def test_MMS1(self):
        '''mstransform: input MMS should be the same as output MMS'''
        
        # Create an MMS in the setup
        self.createMMS(self.vis, axis='scan', spws='0,1')
                
        # Create another MS and compare. They should be the same
        self.outputms = 'thesame.mms'
        mstransform(vis=self.testmms, outputvis=self.outputms, datacolumn='data')
        
        self.assertTrue(ParallelDataHelper.isParallelMS(self.outputms),'Output is not an MMS')
                
        # Sort the MSs so that they can be compared
        myms = mstool()
        
        myms.open(self.testmms)
        myms.sort('input_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()
        
        myms.open(self.outputms)
        myms.sort('output_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()

        # Compare both tables. Ignore the DATA column and compare it in next line
        self.assertTrue(th.compTables('input_sorted.ms','output_sorted.ms', 
                                      ['FLAG_CATEGORY','FLAG','WEIGHT_SPECTRUM','SIGMA_SPECTRUM','DATA']))
        
        # Compare the DATA column
        self.assertTrue(th.compVarColTables('input_sorted.ms','output_sorted.ms','DATA'))
        
        # The separation axis should be copied to the output MMS
        in_sepaxis = ph.axisType(self.testmms)
        out_sepaxis = ph.axisType(self.outputms)
        self.assertEqual(in_sepaxis, out_sepaxis, 'AxisTypes from input and output MMS do not match')
Пример #5
0
    def test_mms_spw_selection(self):
        '''mstransform: Create MMS and select two spws with different polarization shapes'''
        self.outputms = '3cspw12.mms'
        mstransform(vis=self.vis, outputvis=self.outputms, datacolumn='data', spw='1,2',
                    createmms=True, separationaxis='spw')

        # Verify the input versus the output
        myms = mstool()
        myms.open(self.vis)
        myms.msselect({'spw':'1,2'})
        inp_nrow = myms.nrow()
        myms.close()

        myms.open(self.outputms)
        out_nrow = myms.nrow()
        myms.close()
        self.assertEqual(inp_nrow, out_nrow)

        # Verify that DATA_DESCRIPTION table is properly re-indexed.
        spw_col = th.getVarCol(self.outputms+'/DATA_DESCRIPTION', 'SPECTRAL_WINDOW_ID')
        self.assertEqual(spw_col.keys().__len__(), 2, 'Wrong number of rows in DD table')
        self.assertEqual(spw_col['r1'][0], 0,'Error re-indexing SPECTRAL_WINDOW_ID of DATA_DESCRIPTION table')
        self.assertEqual(spw_col['r2'][0], 1,'Error re-indexing SPECTRAL_WINDOW_ID of DATA_DESCRIPTION table')

        pol_col = th.getVarCol(self.outputms+'/DATA_DESCRIPTION', 'POLARIZATION_ID')
        self.assertEqual(pol_col['r1'][0], 2,'Error in POLARIZATION_ID of DATA_DESCRIPTION table')
        self.assertEqual(pol_col['r2'][0], 3,'Error in POLARIZATION_ID of DATA_DESCRIPTION table')

        # Verify that POLARIZATION table is not re-sized.
        corr_col = th.getVarCol(self.outputms+'/POLARIZATION', 'NUM_CORR')
        self.assertEqual(corr_col.keys().__len__(), 4, 'Wrong number of rows in POLARIZATION table')

        # Check the FEED table
        out_feed_spw = th.getVarCol(self.outputms+'/FEED', 'SPECTRAL_WINDOW_ID')
        self.assertEqual(len(out_feed_spw.keys()), 52)
Пример #6
0
    def test6(self):
        '''cvel2 6: I/O vis set, more complex input vis, one field selected, one spw selected, passall = True'''
        if testmms:
            return
        self.setUp_vis_a()
        myvis = vis_a
        os.system('ln -sf ' + myvis + ' myinput.ms')
        rval = cvel2(vis='myinput.ms',
                     outputvis=outfile,
                     field='1',
                     spw='0',
                     nchan=32,
                     start=10,
                     passall=True)
        self.assertNotEqual(rval, False)

        # Simulate the passall=True. This MS has fields 0~6
        desel = outfile + '.deselected'
        split(vis='myinput.ms',
              outputvis=desel,
              field='0,2,3,4,5,6',
              spw='0',
              datacolumn='all')
        mslocal = mstool()
        mslocal.open(outfile, nomodify=False)
        mslocal.concatenate(msfile=desel)
        mslocal.close()
        ret = (verify_ms(outfile, 2, 32, 0))
        self.assertTrue(ret[0], ret[1])
Пример #7
0
    def test_nomms(self):
        '''Partition: Create a normal MS with createmms=False'''
        partition(vis=self.msfile, outputvis=self.mmsfile, createmms=False)

        self.assertTrue(os.path.exists(self.mmsfile),
                        'MMS was not created for this test')

        # Sort the output MSs so that they can be compared
        myms = mstool()

        myms.open(self.msfile)
        myms.sort('ms_sorted.ms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        myms.done()

        myms.open(self.mmsfile)
        myms.sort('mms_sorted.ms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        myms.done()

        # Compare both tables. Ignore the DATA column and compare it in next line
        self.assertTrue(
            th.compTables(
                'ms_sorted.ms', 'mms_sorted.ms',
                ['FLAG_CATEGORY', 'FLAG', 'WEIGHT_SPECTRUM', 'DATA']))

        # Compare the DATA column
        self.assertTrue(
            th.compVarColTables('ms_sorted.ms', 'mms_sorted.ms', 'DATA'))
Пример #8
0
    def test_freqavg10(self):
        '''mstranform: Average using different bins, channel selection, both axes, output MMS'''
        self.outputms = "favg10.ms"
        mstransform(vis=self.vis, outputvis=self.outputms, spw='2,12,10:1~10', chanaverage=True,
                    chanbin=[32,128,5], createmms=True, separationaxis='auto', numsubms=6)

        self.assertTrue(os.path.exists(self.outputms))

        # Should create 6 subMSs
        mslocal = mstool()
        mslocal.open(thems=self.outputms)
        sublist = mslocal.getreferencedtables()
        mslocal.close()
        self.assertEqual(sublist.__len__(), 6, 'Should have created 6 subMSs')

        # Output should be:
        # spw=0 4 channels
        # spw=1 1 channel
        # spw=2 2 channels
        ret = th.verifyMS(self.outputms, 3, 4, 0, ignoreflags=True)
        self.assertTrue(ret[0],ret[1])
        ret = th.verifyMS(self.outputms, 3, 1, 1, ignoreflags=True)
        self.assertTrue(ret[0],ret[1])
        ret = th.verifyMS(self.outputms, 3, 2, 2, ignoreflags=True)
        self.assertTrue(ret[0],ret[1])

        # Verify that some sub-tables are properly re-indexed.
        spw_col = th.getVarCol(self.outputms+'/DATA_DESCRIPTION', 'SPECTRAL_WINDOW_ID')
        self.assertEqual(spw_col.keys().__len__(), 3, 'Wrong number of rows in DD table')
        self.assertEqual(spw_col['r1'][0], 0,'Error re-indexing DATA_DESCRIPTION table')
        self.assertEqual(spw_col['r2'][0], 1,'Error re-indexing DATA_DESCRIPTION table')
        self.assertEqual(spw_col['r3'][0], 2,'Error re-indexing DATA_DESCRIPTION table')
Пример #9
0
    def test_nomms(self):
        """Partition: Create a normal MS with createmms=False"""
        partition(vis=self.msfile, outputvis=self.mmsfile, createmms=False)

        self.assertTrue(os.path.exists(self.mmsfile), "MMS was not created for this test")

        # Sort the output MSs so that they can be compared
        myms = mstool()

        myms.open(self.msfile)
        myms.sort(
            "ms_sorted.ms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        myms.done()

        myms.open(self.mmsfile)
        myms.sort(
            "mms_sorted.ms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        myms.done()

        # Compare both tables. Ignore the DATA column and compare it in next line
        self.assertTrue(
            th.compTables("ms_sorted.ms", "mms_sorted.ms", ["FLAG_CATEGORY", "FLAG", "WEIGHT_SPECTRUM", "DATA"])
        )

        # Compare the DATA column
        self.assertTrue(th.compVarColTables("ms_sorted.ms", "mms_sorted.ms", "DATA"))
Пример #10
0
    def test2_setjy_scratchless_mode_multiple_model(self):
        """Test 2: Set vis model header in one multiple fields """

        retval = setjy(vis=self.vis, field='1331+305*',fluxdensity=[1331.,0.,0.,0.], 
                       scalebychan=False, usescratch=False,standard='manual')
        self.assertTrue(retval, "setjy run failed")
        retval = setjy(vis=self.vis, field='1445+099*',fluxdensity=[1445.,0.,0.,0.], 
                       scalebychan=False, usescratch=False,standard='manual')
        self.assertTrue(retval, "setjy run failed")
                   
        mslocal = mstool()
        mslocal.open(self.vis)
        listSubMSs = mslocal.getreferencedtables()
        mslocal.close()
        #listSubMSs.append(self.vis)
        for subMS in listSubMSs:
            tblocal = tbtool()
            tblocal.open(subMS + '/SOURCE')
            nrows = tblocal.nrows()
            for row_i in range(0,nrows):
                try:
                    model_i = tblocal.getcell('SOURCE_MODEL',row_i)
                    if (row_i == 0):
                        self.assertEqual(model_i['cl_0']['fields'][0],row_i)
                        self.assertEqual(model_i['cl_0']['container']['component0']['flux']['value'][0],1331.)
                    elif (row_i == 1):
                        self.assertEqual(model_i['cl_0']['fields'][0],row_i)
                        self.assertEqual(model_i['cl_0']['container']['component0']['flux']['value'][0],1445.)                    
                    else:
                        self.assertEqual(len(model_i),0)
                except:
                    casalog.post("Problem accesing SOURCE_MODEL col from subMS %s" % subMS ,
                                 "SEVERE","test2_setjy_scratchless_mode_multiple_model")                        
            tblocal.close()            
Пример #11
0
def clearcal(
    vis=None,
    field=None,
    spw=None,
    intent=None,
    addmodel=None,
):

    casalog.origin('clearcal')

    # Do the trivial parallelization
    if ParallelTaskHelper.isParallelMS(vis):
        helper = ParallelTaskHelper('clearcal', locals())
        helper.go()
        return

    # Local versions of the tools
    tblocal = tbtool()
    cblocal = cbtool()
    mslocal = mstool()

    try:

        # we will initialize scr cols only if we don't create them
        doinit = False

        if (type(vis) == str) & os.path.exists(vis):
            tblocal.open(vis)
            doinit = tblocal.colnames().count('CORRECTED_DATA') > 0
            tblocal.close()

            # We ignore selection if creating the scratch columns
            if not doinit:
                casalog.post(
                    'Need to create scratch columns; ignoring selection.')

            cblocal.setvi(old=True, quiet=False)
            # Old VI for now
            cblocal.open(vis, addmodel=addmodel)
        else:
            raise Exception, \
                'Visibility data set not found - please verify the name'

        # If necessary (scr col not just created), initialize scr cols
        if doinit:
            cblocal.selectvis(field=field, spw=spw, intent=intent)
            cblocal.initcalset(1)
        cblocal.close()

        # Write history to the MS
        param_names = clearcal.func_code.co_varnames[:clearcal.func_code.
                                                     co_argcount]
        param_vals = [eval(p) for p in param_names]
        casalog.post('Updating the history in the output', 'DEBUG1')
        write_history(mslocal, vis, 'clearcal', param_names, param_vals,
                      casalog)

    except Exception, instance:

        print '*** Error ***', instance
Пример #12
0
 def test6(self):
     '''cvel2 6: I/O vis set, more complex input vis, one field selected, one spw selected, passall = True'''
     if testmms:
         return
     self.setUp_vis_a()
     myvis = vis_a
     os.system('ln -sf ' + myvis + ' myinput.ms')
     rval = cvel2(
         vis = 'myinput.ms',
         outputvis = outfile,
         field = '1',
         spw = '0',
         nchan = 32,
         start = 10,
         passall = True
         )
     self.assertNotEqual(rval,False)
     
     # Simulate the passall=True. This MS has fields 0~6
     desel = outfile+'.deselected'
     split(vis='myinput.ms',outputvis=desel,field='0,2,3,4,5,6',spw='0',datacolumn='all')
     mslocal = mstool()
     mslocal.open(outfile, nomodify=False)
     mslocal.concatenate(msfile=desel)            
     mslocal.close()
     ret = (verify_ms(outfile, 2, 32, 0))
     self.assertTrue(ret[0],ret[1])
Пример #13
0
 def setUpTest(self, nomodify=True):
     # Despite setUp/tearDown, some tests with writable MS left
     # MS in a changed state and caused later tests to fail.
     # So will copy RO testms to a RW one in those tests
     self.ms = mstool()
     if not os.path.exists(self.testms):
         shutil.copytree(datafile, self.testms)
     self.ms.open(self.testms, nomodify)
Пример #14
0
 def setUpTest(self, nomodify=True):
     # Despite setUp/tearDown, some tests with writable MS left
     # MS in a changed state and caused later tests to fail.
     # So will copy RO testms to a RW one in those tests
     self.ms = mstool()
     if not os.path.exists(self.testms):
         shutil.copytree(datafile, self.testms)
     self.ms.open(self.testms, nomodify)
Пример #15
0
def sortFile(input_file,output_file,sort_order=None):
    
    if sort_order is None:
        sort_order = ['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME']
        
    mymstool = mstool()
    mymstool.open(input_file)
    mymstool.sort(output_file,sort_order)
    mymstool.done()
Пример #16
0
def clearcal(
    vis=None,
    field=None,
    spw=None,
    intent=None,
    addmodel=None,
    ):

    casalog.origin('clearcal')

    # Do the trivial parallelization
    if ParallelTaskHelper.isParallelMS(vis):
        helper = ParallelTaskHelper('clearcal', locals())
        helper.go()
        return

    # Local versions of the tools
    tblocal = tbtool()
    cblocal = cbtool()
    mslocal = mstool()

    try:

        # we will initialize scr cols only if we don't create them
        doinit = False

        if (type(vis) == str) & os.path.exists(vis):
            tblocal.open(vis)
            doinit = tblocal.colnames().count('CORRECTED_DATA') > 0
            tblocal.close()

            # We ignore selection if creating the scratch columns
            if not doinit:
                casalog.post('Need to create scratch columns; ignoring selection.'
                             )

            cblocal.open(vis, addmodel=addmodel)
        else:
            raise Exception, \
                'Visibility data set not found - please verify the name'

        # If necessary (scr col not just created), initialize scr cols
        if doinit:
            cblocal.selectvis(field=field, spw=spw, intent=intent)
            cblocal.initcalset(1)
        cblocal.close()

        # Write history to the MS
        param_names = clearcal.func_code.co_varnames[:clearcal.func_code.co_argcount]
        param_vals = [eval(p) for p in param_names]
        casalog.post('Updating the history in the output', 'DEBUG1')
        write_history(mslocal, vis, 'clearcal', param_names,
                      param_vals, casalog)
        
    except Exception, instance:

        print '*** Error ***', instance
Пример #17
0
def uvsub(vis=None,reverse=False):

    """Subtract model from the corrected visibility data
    
        uvsub(vis='ngc5921.ms', reverse=false)
        
        This function subtracts model visibility data from corrected visibility
        data leaving the residuals in the corrected data column.  If the
        parameter 'reverse' is set true, the process is reversed.
        
        Keyword arguments:
        vis -- Name of input visibility file (MS)
                default: none; example: vis='ngc5921.ms'
        reverse -- Reverse the operation (add rather than subtract)
                default: false; example: reverse=true
        
        uvsub(vis='ngc5921.ms', reverse=false)
    
    """

    #Python script
    #
    try:
        ms = mstool()
        casalog.origin('uvsub')
        if ((type(vis)==str) & (os.path.exists(vis))):
            ms.open(thems=vis,nomodify=False)
        else:
            raise Exception, 'Visibility data set not found - please verify the name'
            return
        ms.uvsub(reverse)
        ms.close
        
        # Write history to MS
        try:
            param_names = uvsub.func_code.co_varnames[:uvsub.func_code.co_argcount]
            param_vals = [eval(p) for p in param_names]
            write_history(mstool(), vis, 'uvsub', param_names,
                          param_vals, casalog)
        except Exception, instance:
            casalog.post("*** Error \'%s\' updating HISTORY" % (instance),
                     'WARN')            

        return
Пример #18
0
    def test_default_scan(self):
        '''Partition: create an MMS with default values and axis=scan'''
        partition(vis=self.msfile,
                  outputvis=self.mmsfile,
                  separationaxis='scan')

        self.assertTrue(os.path.exists(self.mmsfile),
                        'MMS was not created for this test')

        # Take the dictionary and compare with original MS
        thisdict = listpartition(vis=self.mmsfile, createdict=True)

        # Compare nrows of all scans
        slist = ph.getMMSScans(thisdict)

        for s in slist:
            mmsN = ph.getMMSScanNrows(thisdict, s)
            msN = ph.getScanNrows(self.msfile, s)
            self.assertEqual(
                mmsN, msN,
                'Nrows in scan=%s differs: mms_nrows=%s <--> ms_nrows=%s' %
                (s, mmsN, msN))

        # Compare spw IDs
        for s in slist:
            mms_spw = ph.getSpwIds(self.mmsfile, s)
            ms_spw = ph.getSpwIds(self.msfile, s)
            self.assertEqual(mms_spw, ms_spw, 'list of spws in scan=%s differs: '\
                             'mms_spw=%s <--> ms_spw=%s' %(s, mmsN, msN))

        # Sort the output MSs so that they can be compared
        myms = mstool()

        myms.open(self.msfile)
        myms.sort('ms_sorted.ms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        myms.done()

        myms.open(self.mmsfile)
        myms.sort('mms_sorted.ms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        myms.done()

        self.assertTrue(
            th.compTables('ms_sorted.ms', 'mms_sorted.ms', [
                'FLAG', 'FLAG_CATEGORY', 'TIME_CENTROID', 'WEIGHT_SPECTRUM',
                'DATA'
            ]))

        # Compare the DATA column
        self.assertTrue(
            th.compVarColTables('ms_sorted.ms', 'mms_sorted.ms', 'DATA'))
Пример #19
0
def statwt2(vis, timebin, chanbin):
    casalog.origin('statwt2')
    try:
        myms = mstool()
        myms.open(vis, nomodify=False)
        myms.statwt2(timebin=timebin, chanbin=chanbin)
        return True
    except Exception, instance:
        casalog.post('*** Error ***' + str(instance), 'SEVERE')
        raise
Пример #20
0
    def test_default_scan(self):
        """Partition: create an MMS with default values and axis=scan"""
        partition(vis=self.msfile, outputvis=self.mmsfile, separationaxis="scan")

        self.assertTrue(os.path.exists(self.mmsfile), "MMS was not created for this test")

        # Take the dictionary and compare with original MS
        thisdict = listpartition(vis=self.mmsfile, createdict=True)

        # Compare nrows of all scans
        slist = ph.getMMSScans(thisdict)

        for s in slist:
            mmsN = ph.getMMSScanNrows(thisdict, s)
            msN = ph.getScanNrows(self.msfile, s)
            self.assertEqual(mmsN, msN, "Nrows in scan=%s differs: mms_nrows=%s <--> ms_nrows=%s" % (s, mmsN, msN))

        # Compare spw IDs
        for s in slist:
            mms_spw = ph.getSpwIds(self.mmsfile, s)
            ms_spw = ph.getSpwIds(self.msfile, s)
            self.assertEqual(
                mms_spw, ms_spw, "list of spws in scan=%s differs: " "mms_spw=%s <--> ms_spw=%s" % (s, mmsN, msN)
            )

        # Sort the output MSs so that they can be compared
        myms = mstool()

        myms.open(self.msfile)
        myms.sort(
            "ms_sorted.ms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        myms.done()

        myms.open(self.mmsfile)
        myms.sort(
            "mms_sorted.ms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        myms.done()

        self.assertTrue(
            th.compTables(
                "ms_sorted.ms", "mms_sorted.ms", ["FLAG", "FLAG_CATEGORY", "TIME_CENTROID", "WEIGHT_SPECTRUM", "DATA"]
            )
        )

        # Compare the DATA column
        self.assertTrue(th.compVarColTables("ms_sorted.ms", "mms_sorted.ms", "DATA"))
Пример #21
0
 def test_linked_cols(self):
     '''partition: Verify that SYSPOWER, POINTING and SYSCAL are links'''
     self.outputms = "linkcols.mms"
     partition(self.vis, outputvis=self.outputms, numsubms=4, flagbackup=False)
     self.assertTrue(os.path.exists(self.outputms),'Output MMS does not exist')
     
     # Columns SYSPOWER, POINTING and SYSCAL should be links in most subMS
     mslocal = mstool()
     mslocal.open(self.outputms)
     subms = mslocal.getreferencedtables()
     mslocal.close()
     cols = ['POINTING','SYSCAL','SYSPOWER']
     for col in cols:
         self.assertTrue(os.path.islink(subms[1] + '/' + col))
Пример #22
0
    def test_linked_cols(self):
        '''partition: Verify that SYSPOWER, POINTING and SYSCAL are links'''
        self.outputms = "linkcols.mms"
        partition(self.vis,
                  outputvis=self.outputms,
                  numsubms=4,
                  flagbackup=False)
        self.assertTrue(os.path.exists(self.outputms),
                        'Output MMS does not exist')

        # Columns SYSPOWER, POINTING and SYSCAL should be links in most subMS
        mslocal = mstool()
        mslocal.open(self.outputms)
        subms = mslocal.getreferencedtables()
        mslocal.close()
        cols = ['POINTING', 'SYSCAL', 'SYSPOWER']
        for col in cols:
            self.assertTrue(os.path.islink(subms[1] + '/' + col))
Пример #23
0
    def test_parallel5(self):
        '''mstransform: Do not combine spws and create MMS with axis scan in parallel.'''
        self.setUp_jupiter()
        self.outputms = 'parallel5.mms'
        mstransform(vis=self.vis, outputvis=self.outputms, combinespws=False, spw='0,1',field = '12',
             datacolumn='DATA', createmms=True, separationaxis='scan',numsubms=6)

        self.assertTrue(os.path.exists(self.outputms))

        # Should create 6 subMSs
        mslocal = mstool()
        mslocal.open(thems=self.outputms)
        sublist = mslocal.getreferencedtables()
        mslocal.close()
        self.assertEqual(sublist.__len__(), 6, 'Should have created 6 subMSs')

        ret = th.verifyMS(self.outputms, 2, 1, 0)
        self.assertTrue(ret[0],ret[1])
Пример #24
0
    def test_default_scan(self):
        '''Partition: create an MMS with default values and axis=scan'''
        partition(vis=self.msfile, outputvis=self.mmsfile, separationaxis='scan')
        
        self.assertTrue(os.path.exists(self.mmsfile), 'MMS was not created for this test')
        
        # Take the dictionary and compare with original MS
        thisdict = listpartition(vis=self.mmsfile, createdict=True)
        
        # Compare nrows of all scans
        slist = ph.getMMSScans(thisdict)
        
        for s in slist:
            mmsN = ph.getMMSScanNrows(thisdict, s)
            msN = ph.getScanNrows(self.msfile, s)
            self.assertEqual(mmsN, msN, 'Nrows in scan=%s differs: mms_nrows=%s <--> ms_nrows=%s'
                             %(s, mmsN, msN))
 
        # Compare spw IDs
        for s in slist:
            mms_spw = ph.getSpwIds(self.mmsfile, s)
            ms_spw = ph.getSpwIds(self.msfile, s)
            self.assertEqual(mms_spw, ms_spw, 'list of spws in scan=%s differs: '\
                             'mms_spw=%s <--> ms_spw=%s' %(s, mmsN, msN))

        # Sort the output MSs so that they can be compared
        myms = mstool()
        
        myms.open(self.msfile)
        myms.sort('ms_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()
        
        myms.open(self.mmsfile)
        myms.sort('mms_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()

        self.assertTrue(th.compTables('ms_sorted.ms', 'mms_sorted.ms', 
                                      ['FLAG','FLAG_CATEGORY','TIME_CENTROID',
                                       'WEIGHT_SPECTRUM','DATA']))
        
        # Compare the DATA column
        self.assertTrue(th.compVarColTables('ms_sorted.ms','mms_sorted.ms','DATA'))
Пример #25
0
    def test_nomms(self):
        '''Partition: Create a normal MS with createmms=False'''
        partition(vis=self.msfile, outputvis=self.mmsfile, createmms=False)
        
        self.assertTrue(os.path.exists(self.mmsfile), 'MMS was not created for this test')
        
        # Sort the output MSs so that they can be compared
        myms = mstool()
        
        myms.open(self.msfile)
        myms.sort('ms_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()
        
        myms.open(self.mmsfile)
        myms.sort('mms_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()

        # Compare both tables. Ignore the DATA column and compare it in next line
        self.assertTrue(th.compTables('ms_sorted.ms','mms_sorted.ms', 
                                      ['FLAG_CATEGORY','FLAG','WEIGHT_SPECTRUM','DATA']))
        
        # Compare the DATA column
        self.assertTrue(th.compVarColTables('ms_sorted.ms','mms_sorted.ms','DATA'))
Пример #26
0
def delmod(vis=None, otf=None, field=None, scr=None):

    casalog.origin('delmod')

    # Do the trivial parallelization
    if ParallelTaskHelper.isParallelMS(vis):
        helper = ParallelTaskHelper('delmod', locals())
        helper.go()
        return

    #Python script
    try:

        # only if vis exists...
        if ((type(vis) == str) & (os.path.exists(vis))):
            # ... and we are asked to do something...
            # open without adding anything!
            _cb.open(vis, addcorr=False, addmodel=False)
            _cb.delmod(otf=otf, field=field, scr=scr)
            _cb.close()
        else:
            raise Exception, 'Visibility data set not found - please verify the name'

        # Write history to MS
        try:
            param_names = delmod.func_code.co_varnames[:delmod.func_code.
                                                       co_argcount]
            param_vals = [eval(p) for p in param_names]
            write_history(mstool(), vis, 'delmod', param_names, param_vals,
                          casalog)
        except Exception, instance:
            casalog.post("*** Error \'%s\' updating HISTORY" % (instance),
                         'WARN')

    except Exception, instance:
        print '*** Error ***', instance
Пример #27
0
def getPower(vis,
             scan,
             spw,
             duration,
             fromEnd=False,
             skipStartSecs=1.0,
             skipEndSecs=1.0,
             verbose=True):
    """
    Return a per-antenna list of total power values for the two polarizations of the specified scan and spw.
    duration: number of samples to use starting from the start of the scan
    I think the idea here is that the sky subscan is always the first subscan of a Tsys scan.  If this ever
    changes to one of the loads, then the result will be less than optimal. It would likely result in very
    small changes from the original tsys table (i.e. it will not get normalized).   - Todd
    """
    myms = taskinit.mstool()
    myms.open(vis)
    myms.selectinit(datadescid=spw)
    myms.selecttaql(
        'SCAN_NUMBER==%d AND DATA_DESC_ID==%d AND ANTENNA1==ANTENNA2' %
        (scan, spw))
    #    nrows = myms.nrow()
    print("    Working on spw: %d" % (spw))
    d = myms.getdata(['real', 'axis_info'], ifraxis=True)
    myms.close()
    if verbose:
        print("keys = ", d.keys())
    if 'real' not in d.keys():
        return
    powers = d['real']
    ants = d['axis_info']['ifr_axis']['ifr_name']
    pols = list(d['axis_info']['corr_axis'])
    idxPol0 = pols.index("XX")
    idxPol1 = pols.index("YY")
    if verbose:
        print("Pol 0,1 indexes: %d, %d" % (idxPol0, idxPol1))
    ts = d['axis_info']['time_axis']['MJDseconds']
    t0 = ts[0]
    tf = ts[-1]
    ts -= t0
    if verbose:
        print("times:", ts)
        print("pols:", pols)
    # choose the time samples we want
    sampStart = -99
    sampEnd = -99
    for i in range(len(ts)):
        if ts[i] > skipStartSecs:
            sampStart = i
            break
    if sampStart >= len(ts):
        sampStart = len(ts) - 1
    for i in range(len(ts) - 1, sampStart, -1):
        if tf - ts[i] > skipEndSecs:
            sampEnd = i
            break
    if sampEnd <= sampStart:
        sampEnd = sampStart + 1
    if not fromEnd:
        # take duration samples from start
        for i in range(sampStart + 1, sampEnd, 1):
            if ts[i] - ts[sampStart] > duration:
                sampEnd = i
                break
    else:
        # instead from end
        for i in range(sampEnd - 1, sampStart, -1):
            if ts[sampEnd] - ts[i] > duration:
                sampStart = i
                break
    if verbose:
        print("chosen sample range: %d to %d" % (sampStart, sampEnd))
    # indexing is pol, baseline(=0), ant, sample
    if verbose:
        print("number of antennas to produce powers for:", len(ants))
    result = []
    for ant in range(len(ants)):
        powersPol0 = powers[idxPol0][0][ant][sampStart:sampEnd]
        powersPol1 = powers[idxPol1][0][ant][sampStart:sampEnd]
        #print "Ant %d powers pol 0: %s, pol 1: %s" % (ant, str(powersPol0), str(powersPol1))
        medianP0 = np.median(powersPol0)
        medianP1 = np.median(powersPol1)
        result.append([medianP0, medianP1])
        #print "Ant %2d (%s) median powers for pols 0,1: %12.6f, %12.6f (nSamples = %d, %d)" % (ant, ants[ant], medianP0, medianP1, len(powersPol0), len(powersPol1))
    return result
Пример #28
0
def visstat2(vis=None,
             axis=None,
             datacolumn=None,
             useflags=None,
             spw=None,
             field=None,
             selectdata=None,
             antenna=None,
             uvrange=None,
             timerange=None,
             correlation=None,
             scan=None,
             array=None,
             observation=None,
             timeaverage=None,
             timebin=None,
             timespan=None,
             maxuvwdistance=None,
             disableparallel=None,
             ddistart=None,
             taql=None,
             monolithic_processing=None,
             intent=None,
             reportingaxes=None):

    casalog.origin('visstat2')

    mslocal = mstool()

    mslocal.open(vis)

    if axis in ['amp', 'amplitude', 'phase', 'imag', 'imaginary', 'real']:
        complex_type = axis
        col = datacolumn
    else:
        complex_type = ''
        col = axis

    if (not selectdata):
        antenna = ''
        uvrange = ''
        timerange = ''
        correlation = ''
        scan = ''
        array = ''
        observation = ''

    s = mslocal.statistics2(
        column=col.upper(),
        complex_value=complex_type,
        useflags=useflags,
        useweights=False,
        spw=spw,
        field=field,
        #                            feed="",
        baseline=antenna,
        uvrange=uvrange,
        time=timerange,
        correlation=correlation,
        scan=scan,
        intent=intent,
        array=array,
        obs=str(observation),
        reportingaxes=str(reportingaxes),
        timeaverage=timeaverage,
        timebin=timebin,
        timespan=timespan,
        maxuvwdistance=maxuvwdistance)

    mslocal.close()

    for stats in s.keys():
        casalog.post(stats + " values --- ", "NORMAL")

        if s[stats]['npts'] > 0:
            casalog.post(
                "         -- number of points [npts]:           " +
                str(int(round(s[stats]['npts']))), "NORMAL")
            casalog.post(
                "         -- minimum value [min]:               " +
                str(s[stats]['min']), "NORMAL")
            casalog.post(
                "         -- maximum value [max]:               " +
                str(s[stats]['max']), "NORMAL")
            casalog.post(
                "         -- Sum of values [sum]:               " +
                str(s[stats]['sum']), "NORMAL")
            casalog.post(
                "         -- Sum of squared values [sumsq]:     " +
                str(s[stats]['sumsq']), "NORMAL")

        casalog.post(stats + " statistics --- ", "NORMAL")
        if s[stats]['npts'] > 0:
            casalog.post(
                "        -- Mean of the values [mean]:                 " +
                str(s[stats]['mean']), "NORMAL")
            casalog.post(
                "        -- Variance of the values [variance]:         " +
                str(s[stats]['variance']), "NORMAL")
            casalog.post(
                "        -- Standard deviation of the values [stddev]: " +
                str(s[stats]['stddev']), "NORMAL")
            casalog.post(
                "        -- Root mean square [rms]:                    " +
                str(s[stats]['rms']), "NORMAL")
            casalog.post(
                "        -- Median of the pixel values [median]:       " +
                str(s[stats]['median']), "NORMAL")
            casalog.post(
                "        -- Median of the deviations [medabsdevmed]:   " +
                str(s[stats]['medabsdevmed']), "NORMAL")
            casalog.post(
                "        -- Quartile [quartile]:                       " +
                str(s[stats]['quartile']), "NORMAL")
        else:
            casalog.post(stats + " -- No valid points found", "WARN")

    return s
Пример #29
0
def importatca(files=None,
               vis=None,
               options=None,
               spw=None,
               nscans=None,
               lowfreq=None,
               highfreq=None,
               fields=None,
               edge=8):
    """Convert an RPFITS file into a CASA visibility file (MS).
           The conversion of the RPFITS format into a measurement set.  
           This version has been tested for both old ATCA and CABB data.
................          
           Keyword arguments:
       files -- Name of input RPFITS file(s)
               default: none; example: file='2010-01-02_1234.c999'

....   vis -- Output ms name, note a postfix (.ms) is NOT appended to this name
               default: none
               
....   options -- Processing options, comma separated list
                 birdie - flag parts of spectrum known to be bad
                 reweight - (pre-CABB) reweight lag spectrum to avoid ringing
                 noautoflag - don't apply automatic flags (e.g. pointing scans)
                 noxycorr - don't apply xyphase correction
                 fastmosaic - use for large mosaics to speed up data access
                 hires  - turn time binned data into fast sampled data
                 notsys - undo online Tsys calibration
                 noac - don't load autocorrelations
....   spw -- specify the input spectral windows to use. For CABB the order is
              first continuum, 2nd continuum, then any zooms for first band,
              followed by zooms for 2nd band. Pre-CABB data just has 0 and 1.
              The output may have more spectral windows if there are frequency
              changes.
........ default: all

....   nscans -- Number of scans to skip followed by number of scans to read
....       default: 0,0 (read all)

....   lowfreq -- Lowest reference frequency to select
....       default: 0 (all)

....   highfreq -- highest reference frequency to select
....       default: 0 (all)

....   fields -- List of field names to select
........ default: all

....   edge -- Percentage of edge channels to flag. For combined zooms, this 
               specifies the percentage for a single zoom
........ default: 8 (flags 4% of channels at lower and upper edge)
            
        """

    # Python script
    myaf = casac.atcafiller()
    try:
        try:
            casalog.origin('importatca')
            # -----------------------------------------
            # beginning of importatca implementation
            # -----------------------------------------
            myaf.open(vis, files, options)
            firstscan = 0
            lastscan = 9999
            if (nscans != None):
                if len(nscans) > 0:
                    firstscan = nscans[0]
                if len(nscans) > 1:
                    lastscan = nscans[1]
            myaf.select(firstscan, lastscan, spw, lowfreq, highfreq, fields,
                        edge)
            myaf.fill()
        except Exception, e:
            print e
            casalog.post("Failed to import atca rpfits file(s) %s" % files)
            raise
        # Write the args to HISTORY.
        try:
            mslocal = mstool()
            param_names = importatca.func_code.co_varnames[:importatca.
                                                           func_code.
                                                           co_argcount]
            param_vals = [eval(p) for p in param_names]
            write_history(mslocal, vis, 'importatca', param_names, param_vals,
                          casalog)
        except Exception, instance:
            casalog.post("Failed to updated HISTORY", 'WARN')
Пример #30
0
    try:
        import metadata_tools
        os.environ['ALMAIMF_ROOTDIR'] = os.path.split(
            metadata_tools.__file__)[0]
    except ImportError:
        raise ValueError("metadata_tools not found on path; make sure to "
                         "specify ALMAIMF_ROOTDIR environment variable "
                         "or your PYTHONPATH variable to include the directory"
                         " containing the ALMAIMF code.")
else:
    sys.path.append(os.getenv('ALMAIMF_ROOTDIR'))

from metadata_tools import check_channel_flags

msmd = msmdtool()
ms = mstool()
tb = tbtool()

# band name : frequency range (GHz)
bands = {
    'B3': (80, 110),
    'B6': (210, 250),
}


def logprint(string):
    casalog.post(string, origin='split_line_windows')
    print(string)


logprint("ALMAIMF_ROOTDIR directory set to {0}".format(
Пример #31
0
    
if os.environ.has_key('BYPASS_PARALLEL_PROCESSING'):
    ParallelTaskHelper.bypassParallelProcessing(1)

myname = 'test_cvel'
vis_a = 'ngc4826.ms'
vis_b = 'test.ms'
vis_c = 'jupiter6cm.demo-thinned.ms'
vis_d = 'g19_d2usb_targets_line-shortened-thinned.ms'
vis_e = 'evla-highres-sample-thinned.ms'
vis_f = 'test_cvel1.ms'
vis_g = 'jup.ms'
outfile = 'cvel-output.ms'

mytb = tbtool()
myms = mstool()

def verify_ms(msname, expnumspws, expnumchan, inspw, expchanfreqs=[]):
    msg = ''
    mytb.open(msname+'/SPECTRAL_WINDOW')
    nc = mytb.getcell("NUM_CHAN", inspw)
    nr = mytb.nrows()
    cf = mytb.getcell("CHAN_FREQ", inspw)
    mytb.close()
    mytb.open(msname)
    dimdata = mytb.getcell("FLAG", 0)[0].size
    mytb.close()
    if not (nr==expnumspws):
        msg =  "Found "+str(nr)+", expected "+str(expnumspws)+" spectral windows in "+msname
        return [False,msg]
    if not (nc == expnumchan):
Пример #32
0
    def test4(self):
        """hanningsmooth - Test 4: Theoretical and calculated values should be the same for MMS-case"""

        # Split the input to decrease the running time
        split(self.msfile, outputvis="splithan.ms", scan="1,2", datacolumn="data")
        self.msfile = "splithan.ms"

        # create a test MMS. It creates self.testmms
        self.createMMS(self.msfile)
        self.outputms = "hann4.mms"

        # check correct flagging (just for one row as a sample)
        mslocal = mstool()
        mslocal.open(self.msfile)
        mslocal.sort(
            "sorted.ms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        mslocal.close()
        self.msfile = "sorted.ms"
        flag_col = th.getVarCol(self.msfile, "FLAG")
        self.assertTrue(flag_col["r1"][0][0] == [False])
        self.assertTrue(flag_col["r1"][0][1] == [False])
        self.assertTrue(flag_col["r1"][0][61] == [False])
        self.assertTrue(flag_col["r1"][0][62] == [False])

        data_col = th.getVarCol(self.msfile, "DATA")
        hanningsmooth(vis=self.testmms, outputvis=self.outputms, datacolumn="data", keepmms=True)
        self.assertTrue(ParallelDataHelper.isParallelMS(self.outputms), "Output should be an MMS")

        # Sort the MMS
        mslocal.open(self.outputms)
        mslocal.sort(
            "sorted.mms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        mslocal.close()
        self.outputms = "sorted.mms"

        corr_col = th.getVarCol(self.outputms, "DATA")
        nrows = len(corr_col)

        # check correct flagging (just for one row as a sample)
        flag_col = th.getVarCol(self.outputms, "FLAG")
        self.assertTrue(flag_col["r1"][0][0] == [True])
        self.assertTrue(flag_col["r1"][0][1] == [False])
        self.assertTrue(flag_col["r1"][0][61] == [False])
        self.assertTrue(flag_col["r1"][0][62] == [True])

        # Loop over every 2nd row,pol and get the data for each channel
        max = 1e-05
        for i in range(1, nrows, 2):
            row = "r%s" % i
            # polarization is 0-1
            for pol in range(0, 2):
                # array's channels is 0-63
                for chan in range(1, 62):
                    # channels must start from second and end before the last
                    data = data_col[row][pol][chan]
                    dataB = data_col[row][pol][chan - 1]
                    dataA = data_col[row][pol][chan + 1]

                    Smoothed = th.calculateHanning(dataB, data, dataA)
                    CorData = corr_col[row][pol][chan]

                    # Check the difference
                    self.assertTrue(abs(CorData - Smoothed) < max)
Пример #33
0
from suncasa.utils import signal_utils as su
import sys

if sys.version_info.major > 2:
    from casatools import ms, quanta, msmetadata
    from casatasks import casalog

    casalog.showconsole(True)
    datams = ms()
    ms_in = ms()
    datamsmd = msmetadata()
    qa = quanta()
else:
    from taskinit import ms, qa, mstool, msmdtool, casalog

    datams = mstool()
    ms_in = mstool()
    datamsmd = msmdtool()

# from taskinit import *
# from callibrary import *
# import pdb


def subvs2(vis=None,
           outputvis=None,
           timerange='',
           spw='',
           mode='linear',
           subtime1='',
           subtime2='',
Пример #34
0
def main(thislist, axis='auto', numsubms=4):
    
    if thislist == []:
        print 'Need list of tasks to run.'
        usage()
        os._exit(0)
        
    print "Will create MMS for the following tasks %s"%thislist
    print
    
    # Loop through task list
    for t in thislist:
        if t not in TASKLIST:
            print 'ERROR: task '+t+' is not in TASKLIST. Run this script with -l for the full list.'
            os._exit(0)
            
 #       if t == 'flagdata':
#            axis='scan'
            
        mmstest(t, axis, numsubms)

    from tasks import partition,importuvfits

#    if 'listvis' in thislist:
#        # NOTE for test_listvis data:
#        # You need to run partition by hand to create an MMS for the single-dish data set
#        SDPATH = DATAPATH + 'unittest/listvis/'
#        SDMMS = './unittest_mms/listvis/'
#    
#        partition(vis=SDPATH+'OrionS_rawACSmod', outputvis=SDMMS+'OrionS_rawACSmod.mms', 
#                  datacolumn='float_data', createmms=True, flagbackup=False)

    if 'split' in thislist:
        # some additional MMSs
        SPLITMMSPATH = './unittest_mms/split/'
        specialcase = ['0420+417/0420+417.ms',
                       'viewertest/ctb80-vsm.ms',
                       'split/labelled_by_time+ichan.ms']
        for myms in specialcase:
            shutil.rmtree(SPLITMMSPATH+os.path.basename(myms), ignore_errors=True)
            partition(vis=DATAPATH+myms, outputvis=SPLITMMSPATH+os.path.basename(myms), 
                      datacolumn='all', flagbackup=False)

        # workaround for a partition shortcoming: column keywords not copied
        tb.open(SPLITMMSPATH+'hasfc.mms/SUBMSS/hasfc.0000.ms/', nomodify=False)
        tb.putcolkeyword('FLAG_CATEGORY','CATEGORY', ['FLAG_CMD', 'ORIGINAL', 'USER'])
        tb.close()


    if 'wvrgcal' in thislist:
        WVRGCALMMSPATH = './unittest_mms/wvrgcal/'
        WVRGCALPATH = DATAPATH+'unittest/wvrgcal/input/'
        origwd = os.getcwd()
        os.chdir(WVRGCALMMSPATH)
        shutil.rmtree('input', ignore_errors=True)
        os.mkdir('input')
        os.chdir('input')
        mydirs = os.listdir(WVRGCALPATH)
        for d in mydirs:
            print d
            if  os.path.splitext(d)[1]=='.ms':
                partition(vis=WVRGCALPATH+d, outputvis=d, datacolumn='all', numsubms=5,
                          flagbackup=False)
            else:
                os.symlink(WVRGCALPATH+d, d)
        os.chdir(origwd)

    if ('concat' in thislist):
        CONCATMMSPATH = './unittest_mms/concat/'
        CONCATPATH = DATAPATH+'unittest/concat/input/'
        origwd = os.getcwd()
        os.chdir(CONCATMMSPATH)
        shutil.rmtree('input', ignore_errors=True)
        os.mkdir('input')
        os.chdir('input')
        mydirs = os.listdir(CONCATPATH)
        for d in mydirs:
            print d
            if os.path.splitext(d)[1]=='.ms':
                partition(vis=CONCATPATH+d, outputvis=d, datacolumn='all', numsubms=6,
                          flagbackup=False)
            else:
                os.symlink(CONCATPATH+d, d)
        os.chdir(origwd)
        
    if ('cvel' in thislist):

        CVELPATH = DATAPATH + 'ngc4826/fitsfiles/'
        MMSPATH = './unittest_mms/cvel/'
        mmsdir = MMSPATH+'ngc4826.mms'
        tempdir = 'makemmsdirtemp'
        os.system('mkdir '+tempdir)
        importuvfits(fitsfile=CVELPATH+'ngc4826.ll.fits5',vis=tempdir+'/ngc4826.ms') 
        partition(vis=tempdir+'/ngc4826.ms',outputvis=MMSPATH+'ngc4826.mms',separationaxis='scan',flagbackup=False,datacolumn='all')
        os.system('rm -rf '+tempdir)      
        
        CVELPATH = DATAPATH + 'cvel/input/'
        cvelfiles =['jupiter6cm.demo-thinned.ms','g19_d2usb_targets_line-shortened-thinned.ms','evla-highres-sample-thinned.ms']
        MMSPATH = './unittest_mms/cvel/'
        thisdir = os.getcwd()
        for cvelms in cvelfiles:
            mmsname = cvelms.replace('.ms','.mms')
            partition(vis=CVELPATH+cvelms,outputvis=MMSPATH+mmsname,separationaxis='scan',flagbackup=False,datacolumn='all', numsubms=4)
            os.chdir(MMSPATH)
            os.system('ln -s '+ mmsname + ' ' + cvelms)
            os.chdir(thisdir)
            
        # Create the jup.mms file
        mmsname = 'jup.mms'
        output = MMSPATH+mmsname
        split2(vis=MMSPATH+'/jupiter6cm.demo-thinned.mms', outputvis=output, field='JUPITER', datacolumn='data')
        tblocal = tbtool()
        tblocal.open(output, nomodify=False)
        a = tblocal.getcol('TIME')
        delta = (54709.*86400-a[0])
        a = a + delta
        strt = a[0]
        tblocal.putcol('TIME', a)
        a = tblocal.getcol('TIME_CENTROID')
        a = a + delta
        tblocal.putcol('TIME_CENTROID', a)
        tblocal.close()
        tblocal.open(output+'/OBSERVATION', nomodify=False)
        a = tblocal.getcol('TIME_RANGE')
        delta = strt - a[0][0]
        a = a + delta
        tblocal.putcol('TIME_RANGE', a)
        tblocal.close()
        tblocal.open(output+'/FIELD', nomodify=False)
        a = tblocal.getcol('TIME')
        delta = strt - a[0]
        a = a + delta
        tblocal.putcol('TIME', a)
        tblocal.close()
        mslocal = mstool()
        mslocal.open(output, nomodify=False)
        mslocal.addephemeris(0,os.environ.get('CASAPATH').split()[0]+'/data/ephemerides/JPL-Horizons/Jupiter_54708-55437dUTC.tab',
                        'Jupiter_54708-55437dUTC', 0)
        mslocal.close()
        
        CVELMS = DATAPATH + 'fits-import-export/input/test.ms'
        MMSPATH = './unittest_mms/cvel/'
        thisdir = os.getcwd()
        partition(vis=CVELMS, outputvis=MMSPATH+'test.mms', separationaxis='scan',flagbackup=False,datacolumn='all', numsubms=4)
        os.chdir(MMSPATH)
        os.system('ln -s test.mms test.ms')
        os.chdir(thisdir)
                
    if ('fixvis' in thislist):
        MSPATH = os.environ.get('CASAPATH').split()[0]+'/data/regression/0420+417/'
        MSNAME = MSPATH + '0420+417.ms'
        MMSPATH = './unittest_mms/fixvis/'
        MMSNAME = MMSPATH + '0420+417.mms'
        partition(vis=MSNAME, outputvis=MMSNAME, datacolumn='all', separationaxis=axis,numsubms=numsubms,flagbackup=False)
        # Create symlink
        thisdir = os.getcwd()
        os.chdir(MMSPATH)
        os.system('ln -s 0420+417.mms 0420+417.ms')
        os.chdir(thisdir)
Пример #35
0
def uvcontsub3(vis, fitspw, combine, fitorder, field, spw,
               scan, intent, correlation, observation):
    """Extract the line(s) of an MS."""
    retval = True
    casalog.origin('uvcontsub3')

    myms = mstool()
    mytb = tbtool()
    # This one is redundant - it is already checked at the XML level.
    if not ((type(vis) == str) and os.path.isdir(vis)):
        casalog.post('Visibility data set not found - please verify the name', 'SEVERE')
        return False

    outputvis = vis + '.contsub'
    if os.path.exists(outputvis):
        casalog.post("Output MS " + outputvis + " already exists - will not overwrite.", 'SEVERE')
        return False

    if combine and combine.lower() != 'spw':
        casalog.post("uvcontsub3 deliberately does not support combination by",
                     'SEVERE')
        casalog.post("anything except spw.", 'SEVERE')
        return False

    # MSStateGram is picky ('CALIBRATE_WVR.REFERENCE, OBSERVE_TARGET_ON_SOURCE'
    # doesn't work, but 'CALIBRATE_WVR.REFERENCE,OBSERVE_TARGET_ON_SOURCE'
    # does), and I don't want to mess with bison now.  A .upper() might be a
    # good idea too, but the MS def'n v.2 does not say whether OBS_MODE should
    # be case-insensitive.
    intent = intent.replace(', ', ',')

    if type(spw) == list:
        spw = ','.join([str(s) for s in spw])
    elif type(spw) == int:
        spw = str(spw)

    ## if ':' in spw:
    ##     casalog.post("uvcontsub3 does not yet support selection by channel for the output",
    ##                  'SEVERE')
    ##     casalog.post("Meanwhile, use split to select the desired channels", 'WARN')
    ##     return False

    if ';' in spw:
        casalog.post("uvcontsub3 does not yet support writing multiple channel groups per output spw",
                     'SEVERE')
        return False

    mytb.open(vis + '/SPECTRAL_WINDOW')
    allspw = '0~' + str(mytb.nrows() - 1)
    mytb.close()
    if 'spw' not in combine:
        spwmfitspw = subtract_spws(spw, fitspw)
        if spwmfitspw == 'UNKNOWN':
            spwmfitspw = subtract_spws(allspw, fitspw)
        if spwmfitspw:
            raise Exception, "combine must include 'spw' when the fit is being applied to spws outside fitspw."

    if type(correlation) == list:
        correlation = ', '.join(correlation)
    correlation = correlation.upper()

    mytb.open(vis, nomodify=True)
    if 'CORRECTED_DATA' in mytb.colnames():
        datacolumn = 'CORRECTED_DATA'
    else:
        # DON'T remind the user that split before uvcontsub wastes time -
        # scratch columns will eventually go away.
        datacolumn = 'DATA'
    mytb.close()

    myms.open(vis, nomodify=True)
    if not myms.contsub(outputms=outputvis,   fitspw=fitspw,
                        fitorder=fitorder,    combine=combine,
                        spw=spw,              unionspw=join_spws(fitspw, spw),
                        field=field,          scan=scan,
                        intent=intent,        correlation=correlation,
                        obs=str(observation), whichcol=datacolumn):
        myms.close()
        return False
    myms.close()

    # Write history to output MS, not the input ms.
    try:
        param_names = uvcontsub3.func_code.co_varnames[:uvcontsub3.func_code.co_argcount]
        param_vals = [eval(p) for p in param_names]   
        retval &= write_history(myms, outputvis, 'uvcontsub3', param_names, param_vals,
                                casalog)
    except Exception, instance:
        casalog.post("*** Error \'%s\' updating HISTORY" % (instance),
                     'WARN')
Пример #36
0
def plot_weight_density(vis,
                        spw=0,
                        field='',
                        nbins=50,
                        bins=None,
                        clear=False,
                        ignore_flags=False,
                        representative_channel=None,
                        **kwargs):
    """
    Plot the "weight density" vs uvdist: i.e., the sum of the weights in each
    annular bin divided by the area of that bin

    Parameters
    ----------
    vis : str
        The .ms table to plot weights from
    spw : int or str
        The spectral window to plot.  Only one spectral window should be specified.
    field : str
        The field name to plot (if mosaic, make sure it is a name and not a number)
    nbins : int
        The number of bins to create
    bins : None or array
        You can specify specific bins to average the weights in
    ignore_flags : bool
        Ignore the flags in the file.  Flagged data will be plotted alongside
        unflagged.
    representative_channel : None or int
        A specific channel from which to extract flags.  If left as 'None',
        defaults to the mean frequency
    kwargs : dict
        Keyword arguments are passed to msselect (e.g., obsid).  Unfortunately,
        it seems that msselect will happily ignore just about everything it is
        given.
    """

    if hasattr(spw, '__len__'):
        assert len(spw) == 0, "Only one SPW can be plotted."

    mymsmd = msmdtool()
    mymsmd.open(vis)

    reffreq = "{value}{unit}".format(**mymsmd.reffreq(spw)['m0'])
    reffreq = "{0}Hz".format(mymsmd.meanfreq(spw))
    if representative_channel is not None:
        closest_channel = representative_channel
    else:
        closest_channel = np.argmin(
            np.abs(mymsmd.chanfreqs(spw) - mymsmd.meanfreq(spw)))
    mymsmd.close()

    myms = mstool()

    myms.open(vis)
    myms.selectinit(0)
    selection_dict = dict(field=field)  #, spw=reffreq,)
    selection_dict.update(kwargs)
    #print(selection_dict)
    assert myms.msselect(selection_dict), "Data selection has failed"
    #print(myms.msselectedindices())
    # select one "representative" channel out of the SPW (because the weights
    # are per SPW, but the flags are per channel)
    assert myms.selectchannel(start=closest_channel, nchan=1, inc=1,
                              width=1), "Channel selection has failed"
    if ignore_flags:
        columns = ['UVW', 'WEIGHT']
    else:
        columns = ['UVW', 'WEIGHT', 'FLAG']
    datadict = myms.getdata(columns)
    myms.close()
    wt = datadict['weight'].squeeze()
    uvw = datadict['uvw'].squeeze()

    # calculate the UV distance from the uvw array
    uvd = (uvw[:2, :]**2).sum(axis=0)**0.5

    if bins is None:
        bins = np.linspace(uvd.min(), uvd.max(), nbins)

    if not ignore_flags:
        # We have exactly one channel (we forced it above) and the second index
        # should be the channel ID
        # If the flag shape does not conform to this assumption, we're in trouble
        # squeeze just gets rid of all size=1 dimensions
        flags = datadict['flag'].squeeze()

        if flags.shape != wt.shape:
            raise ValueError("Flag shape and weight shape don't match. "
                             "Flag shape: {0}  Weight shape: {1}".format(
                                 flags.shape, wt.shape))

        # set weights to zero because we're adding them (this is obviously not right
        # for many operations, but it is right here!)
        wt[flags] = 0

    # one plot for each polarization
    h_1 = np.histogram(uvd, bins, weights=wt[0, :])
    h_2 = np.histogram(uvd, bins, weights=wt[1, :])

    # plot points at the bin center
    midbins = (bins[:-1] + bins[1:]) / 2.
    # compute the bin area for division below
    bin_area = (bins[1:]**2 - bins[:-1]**2) * np.pi

    if clear:
        pl.clf()
    pl.plot(midbins, h_1[0] / bin_area, drawstyle='steps-mid')
    pl.plot(midbins, h_2[0] / bin_area, drawstyle='steps-mid')
    pl.xlabel("UV Distance")
    pl.ylabel("Sum of weights / annular area")
Пример #37
0
def importfitsidi(fitsidifile,
                  vis,
                  constobsid=None,
                  scanreindexgap_s=None,
                  specframe=None):
    """Convert FITS-IDI visibility file into a CASA visibility file (MS).

	Keyword arguments:
	fitsidifile -- Name(s) of input FITS IDI file(s)
		default: None; example='3C273XC1.IDI' or ['3C273XC1.IDI1', '3C273XC1.IDI2']
	vis -- Name of output visibility file (MS)
		default: None; example: vis='3C273XC1.ms'
		
	constobsid -- If True a constant obs id == 0  of is given to all input files 
	        default = False (new obs id for each input file)

	scanreindexgap_s --  if > 0., a new scan is started whenever the gap between two
                integrations is > the given value (seconds) or when a new field starts
                or when the ARRAY_ID changes.
                default = 0. (no reindexing)

        specframe -- this frame will be used to set the spectral reference frame
                for all spectral windows in the output MS
                default = GEO (geocentric), other options: TOPO, LSRK, BARY
                NOTE: if specframe is set to TOPO, the reference location will be taken from
                the Observatories table in the CASA data repository for the given name of
                the observatory. You can edit that table and add new rows.   

	"""

    #Python script
    retval = True
    try:
        casalog.origin('importfitsidi')
        casalog.post("")
        myms = mstool()
        mytb = tbtool()

        if type(specframe) == str and not specframe == '':
            myspecframe = specframe.upper()
        else:
            myspecframe = 'GEO'

        refframes = {
            'REST': 0,
            'LSRK': 1,
            'LSRD': 2,
            'BARY': 3,
            'GEO': 4,
            'TOPO': 5
        }
        if not refframes.has_key(myspecframe):
            raise Exception, 'Value ' + myspecframe + ' of parameter specframe invalid. Possible values are REST, LSRK, LSRD, BARY, GEO, TOPO'

        if (type(fitsidifile) == str):
            casalog.post('### Reading file ' + fitsidifile, 'INFO')
            myms.fromfitsidi(vis, fitsidifile)
            myms.close()
        elif (type(fitsidifile) == list):
            clist = fitsidifile
            casalog.post('### Reading file ' + clist[0], 'INFO')
            myms.fromfitsidi(vis, clist[0])
            myms.close()
            clist.pop(0)
            tname = vis + '_importfitsidi_tmp_'
            shutil.rmtree(tname, ignore_errors=True)
            for fidifile in clist:
                casalog.post('### Reading file ' + fidifile, 'INFO')
                myms.fromfitsidi(tname, fidifile)
                myms.close()
                myms.open(vis, nomodify=False)
                myms.concatenate(msfile=tname, freqtol='', dirtol='')
                myms.close()
                shutil.rmtree(tname, ignore_errors=True)
        else:
            raise Exception, 'Parameter fitsidifile should be of type str or list'

        if (constobsid):
            mytb.open(vis + '/OBSERVATION', nomodify=False)
            nobs = mytb.nrows()
            cando = True
            if nobs > 1:
                casalog.post(
                    'Trying to keep obsid constant == 0 for all input files',
                    'INFO')
                # check if all observations are from the same telescope; if not warn and leave as is
                tels = mytb.getcol('TELESCOPE_NAME')
                for i in range(1, nobs):
                    if tels[i] != tels[0]:
                        cando = False

                if cando:
                    # get min and max time and write them into the first row;
                    casalog.post('Adjusting OBSERVATION table', 'INFO')
                    timeranges = mytb.getcol('TIME_RANGE')
                    ttr = timeranges.transpose()
                    newmin = min(ttr[0])
                    newmax = max(ttr[1])
                    mytb.putcell('TIME_RANGE', 0, [newmin, newmax])
                    # delete the other rows
                    mytb.removerows(range(1, nobs))
                else:
                    casalog.post(
                        'The input files stem from different telescopes. Need to give different obs id.',
                        'WARN')
            mytb.close()

            if cando:
                # give the same obs id == 0 to the entire output MS
                casalog.post('Setting observation ID of all integrations to 0',
                             'INFO')
                mytb.open(vis, nomodify=False)
                for i in xrange(0, mytb.nrows()):
                    mytb.putcell('OBSERVATION_ID', i, 0)
                mytb.close()

        else:  # don't want constant obs id
            if (type(fitsidifile) == list and len(fitsidifile) > 1):
                casalog.post(
                    'Incrementing observation ID for each input file ...',
                    'INFO')

        if (scanreindexgap_s > 0.):
            # reindex the scan column
            mytb.open(vis, nomodify=False)
            times = mytb.getcol('TIME')
            fields = mytb.getcol('FIELD_ID')
            arrayids = mytb.getcol('ARRAY_ID')
            scannumbers = mytb.getcol('SCAN_NUMBER')

            timesorted = np.argsort(np.array(times))

            scannumber = 0
            scannumber_field = len(fields) * [0]
            prevtime = len(fields) * [0]
            prevarrayid = arrayids[timesorted[0]]

            for i in xrange(0, mytb.nrows()):
                ii = timesorted[i]
                timenow = times[ii]
                fieldnow = fields[ii]
                arrayidnow = arrayids[ii]
                if (timenow-prevtime[fieldnow] > scanreindexgap_s) \
                     or (arrayidnow != prevarrayid):
                    scannumber += 1
                    scannumber_field[fieldnow] = scannumber
                    casalog.post("Starting new scan "+str(scannumber)+" at "+str(timenow)\
                           +", field "+str(fieldnow)+", array_id "+str(arrayidnow), 'INFO')
                scannumbers[ii] = scannumber_field[fieldnow]
                prevtime[fieldnow] = timenow
                prevarrayid = arrayidnow

            mytb.putcol('SCAN_NUMBER', scannumbers)
            mytb.close()

        if refframes.has_key(myspecframe):
            casalog.post(
                'Setting reference frame for all spectral windows to ' +
                myspecframe, 'INFO')
            if myspecframe == 'TOPO':
                casalog.post(
                    'NOTE: reference position for TOPO frame will be the observatory location',
                    'WARN')
            mytb.open(vis + '/SPECTRAL_WINDOW', nomodify=False)
            refcol = mytb.getcol('MEAS_FREQ_REF')
            refcol = [refframes[myspecframe]] * len(refcol)
            mytb.putcol('MEAS_FREQ_REF', refcol)
            mytb.close()

    # write history
        try:
            param_names = importfitsidi.func_code.co_varnames[:importfitsidi.
                                                              func_code.
                                                              co_argcount]
            param_vals = [eval(p) for p in param_names]
            retval &= write_history(myms, vis, 'importfitsidi', param_names,
                                    param_vals, casalog)

        except Exception, instance:
            casalog.post("*** Error \'%s\' updating HISTORY" % (instance),
                         'WARN')

    except Exception, instance:
        print '*** Error ***', instance
        shutil.rmtree(vis + '_importfitsidi_tmp_', ignore_errors=True)
        raise Exception, instance
Пример #38
0
def oldstatwt(vis, dorms, byantenna, sepacs, fitspw, fitcorr, combine,
           timebin, minsamp, field, spw, antenna, timerange, scan, intent,
           array, correlation, obs, datacolumn):
    """
    Sets WEIGHT and SIGMA using the scatter of the visibilities.
    """
    casalog.origin('oldstatwt')
    retval = True
    try:
        myms = mstool()
        mytb = tbtool()
        
        # parameter check for those not fully implemeted
        # (should be taken out once implemented)
        if byantenna:
          raise ValueError("byantenna=True is not supported yet")
        if fitcorr !='':
          raise ValueError("fitcorr is not supported yet")
        if timebin !='0s' and timebin !='-1s':
          raise ValueError("timebin is not supported yet")
                     
        datacol = 'DATA'
        mytb.open(vis)
        colnames  = mytb.colnames()
        mytb.close()


        for datacol in ['CORRECTED_DATA', 'DATA', 'junk']:
            if datacol in colnames:
                break
        if datacol == 'junk':
            raise ValueError(vis + " does not have a data column")        

        if datacolumn == 'float_data':
           raise ValueError("float_data is not yet supported") 

        if datacolumn == 'corrected' and datacol == 'DATA': # no CORRECTED_DATA case (fall back to DATA)
           casalog.post("No %s column found, using %s column" % (datacolumn.upper()+'_DATA', datacol),'WARN')
           datacolumn = datacol
        else:
           if datacolumn=='corrected':
               datacolumn_name=datacolumn.upper()+'_DATA'
           else:
               datacolumn_name=datacolumn.upper()
           casalog.post("Using %s column to determine visibility scatter" % datacolumn_name)
       
        if ':' in spw:
            casalog.post('The channel selection part of spw will be ignored.', 'WARN')
        
        if len(correlation)>0:
            correlation=''
            casalog.post('Correlation selection in oldstatwt has been disabled as of CASA v4.5', 'WARN')

        myms.open(vis, nomodify=False)
        retval = myms.oldstatwt(dorms, byantenna, sepacs, fitspw, fitcorr, combine,
                             timebin, minsamp, field, spw, antenna, timerange, scan, intent,
                             array, correlation, obs, datacolumn)
        myms.close()
    except Exception, e:
        casalog.post("Error setting WEIGHT and SIGMA for %s:" % vis, 'SEVERE')
        casalog.post("%s" % e, 'SEVERE')
        if False:  # Set True for debugging.
            for p in oldstatwt.func_code.co_varnames[:statwt.func_code.co_argcount]:
                v = eval(p)
                print p, "=", v, ", type =", type(v)
        retval = False
Пример #39
0
    def test_default(self):
        """Partition: create an MMS with default values in parallel"""

        # First split off one scan to run the test faster
        split(vis=self.msfile, outputvis="split30.ms", datacolumn="DATA", scan="30")
        msfile = "split30.ms"

        partition(vis=msfile, outputvis=self.mmsfile)

        self.assertTrue(os.path.exists(self.mmsfile), "MMS was not created for this test")

        # Gather several metadata information
        # for the MS
        mdlocal1 = msmdtool()
        mdlocal1.open(msfile)
        ms_rows = mdlocal1.nrows()
        ms_nscans = mdlocal1.nscans()
        ms_nspws = mdlocal1.nspw()
        ms_scans = mdlocal1.scannumbers()
        mdlocal1.close()

        # for the MMS
        mdlocal2 = msmdtool()
        mdlocal2.open(self.mmsfile)
        mms_rows = mdlocal2.nrows()
        mms_nscans = mdlocal2.nscans()
        mms_nspws = mdlocal2.nspw()
        mms_scans = mdlocal2.scannumbers()
        mdlocal2.close()

        # Compare the number of rows
        self.assertEqual(ms_rows, mms_rows, "Compare total number of rows in MS and MMS")
        self.assertEqual(ms_nscans, mms_nscans, "Compare number of scans")
        self.assertEqual(ms_nspws, mms_nspws, "Compare number of spws")

        # Compare the scans
        self.assertEqual(ms_scans.all(), mms_scans.all(), "Compare all scan IDs")

        try:
            mdlocal1.open(msfile)
            mdlocal2.open(self.mmsfile)

            # Compare the spws
            for i in ms_scans:
                msi = mdlocal1.spwsforscan(i)
                mmsi = mdlocal2.spwsforscan(i)
                self.assertEqual(msi.all(), mmsi.all(), "Compare spw Ids for a scan")
        finally:
            mdlocal1.close()
            mdlocal2.close()

        # Sort the output MSs so that they can be compared
        myms = mstool()

        myms.open(msfile)
        myms.sort(
            "ms_sorted.ms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        myms.done()

        myms.open(self.mmsfile)
        myms.sort(
            "mms_sorted.ms",
            ["OBSERVATION_ID", "ARRAY_ID", "SCAN_NUMBER", "FIELD_ID", "DATA_DESC_ID", "ANTENNA1", "ANTENNA2", "TIME"],
        )
        myms.done()

        self.assertTrue(
            th.compTables(
                "ms_sorted.ms", "mms_sorted.ms", ["FLAG", "FLAG_CATEGORY", "TIME_CENTROID", "WEIGHT_SPECTRUM", "DATA"]
            )
        )

        # Compare the DATA column
        self.assertTrue(th.compVarColTables("ms_sorted.ms", "mms_sorted.ms", "DATA"))

        # The separation axis should be written to the output MMS
        sepaxis = ph.axisType(self.mmsfile)
        self.assertEqual(sepaxis, "scan,spw", "Partition did not write AxisType correctly in MMS")
Пример #40
0
def visstat(vis=None,
            axis=None,
            datacolumn=None,
            useflags=None,
            spw=None,
            field=None,
            selectdata=None,
            antenna=None,
            uvrange=None,
            timerange=None,
            correlation=None,
            scan=None,
            array=None,
            observation=None):

    casalog.origin('visstat')  

    mslocal = mstool()

    mslocal.open(vis)

    if axis in ['amp', 'amplitude', 'phase', 'imag', 'imaginary', 'real']:
        complex_type = axis
        col = datacolumn
    else:
        complex_type = ''
        col = axis

    if (not selectdata):
        antenna=''
        uvrange=''
        timerange=''
        correlation=''
        scan=''
        array=''
        observation = ''
        
    s = mslocal.statistics(column=col.upper(),
                      complex_value=complex_type,
                      useflags=useflags,
                      spw=spw,
                      field=field,
                      baseline=antenna,
                      uvrange=uvrange,
                      time=timerange,
                      correlation=correlation,
                      scan=scan,
                      array=array,
                      obs=str(observation))
    
    mslocal.close()

    for stats in s.keys():
        casalog.post(stats + " values --- ", "NORMAL")
        
        if s[stats]['npts'] > 0:
            casalog.post("         -- number of points [npts]:           " + str(int(round(s[stats]['npts']))), "NORMAL")
            casalog.post("         -- minimum value [min]:               " + str(s[stats]['min'  ]), "NORMAL")
            casalog.post("         -- maximum value [max]:               " + str(s[stats]['max'  ]), "NORMAL")
            casalog.post("         -- Sum of values [sum]:               " + str(s[stats]['sum'  ]), "NORMAL")
            casalog.post("         -- Sum of squared values [sumsq]:     " + str(s[stats]['sumsq']), "NORMAL")

        casalog.post(stats + " statistics --- ", "NORMAL")
        if s[stats]['npts'] > 0:
                casalog.post("        -- Mean of the values [mean]:                 " + str(s[stats]['mean']), "NORMAL")
                casalog.post("        -- Variance of the values [var]:              " + str(s[stats]['var']), "NORMAL")
                casalog.post("        -- Standard deviation of the values [stddev]: " + str(s[stats]['stddev']), "NORMAL")
                casalog.post("        -- Root mean square [rms]:                    " + str(s[stats]['rms']), "NORMAL")
                casalog.post("        -- Median of the pixel values [median]:       " + str(s[stats]['median']), "NORMAL")
                casalog.post("        -- Median of the deviations [medabsdevmed]:   " + str(s[stats]['medabsdevmed']), "NORMAL")
                casalog.post("        -- Quartile [quartile]:                       " + str(s[stats]['quartile']), "NORMAL")
        else:
            casalog.post(stats + " -- No valid points found", "WARN")

    return s
Пример #41
0
def uvcontsub3(vis, fitspw, combine, fitorder, field, spw,
               scan, intent, correlation, observation):
    """Extract the line(s) of an MS."""
    retval = True
    casalog.origin('uvcontsub3')

    myms = mstool()
    mytb = tbtool()
    # This one is redundant - it is already checked at the XML level.
    if not ((type(vis) == str) and os.path.isdir(vis)):
        casalog.post('Visibility data set not found - please verify the name', 'SEVERE')
        return False

    outputvis = vis + '.contsub'
    if os.path.exists(outputvis):
        casalog.post("Output MS " + outputvis + " already exists - will not overwrite.", 'SEVERE')
        return False

    if combine and combine.lower() != 'spw':
        casalog.post("uvcontsub3 deliberately does not support combination by",
                     'SEVERE')
        casalog.post("anything except spw.", 'SEVERE')
        return False

    # MSStateGram is picky ('CALIBRATE_WVR.REFERENCE, OBSERVE_TARGET_ON_SOURCE'
    # doesn't work, but 'CALIBRATE_WVR.REFERENCE,OBSERVE_TARGET_ON_SOURCE'
    # does), and I don't want to mess with bison now.  A .upper() might be a
    # good idea too, but the MS def'n v.2 does not say whether OBS_MODE should
    # be case-insensitive.
    intent = intent.replace(', ', ',')

    if type(spw) == list:
        spw = ','.join([str(s) for s in spw])
    elif type(spw) == int:
        spw = str(spw)

    ## if ':' in spw:
    ##     casalog.post("uvcontsub3 does not yet support selection by channel for the output",
    ##                  'SEVERE')
    ##     casalog.post("Meanwhile, use split to select the desired channels", 'WARN')
    ##     return False

    if ';' in spw:
        casalog.post("uvcontsub3 does not yet support writing multiple channel groups per output spw",
                     'SEVERE')
        return False

    mytb.open(vis + '/SPECTRAL_WINDOW')
    allspw = '0~' + str(mytb.nrows() - 1)
    mytb.close()
    if 'spw' not in combine:
        spwmfitspw = subtract_spws(spw, fitspw)
        if spwmfitspw == 'UNKNOWN':
            spwmfitspw = subtract_spws(allspw, fitspw)
        if spwmfitspw:
            raise Exception, "combine must include 'spw' when the fit is being applied to spws outside fitspw."

    if type(correlation) == list:
        correlation = ', '.join(correlation)
    correlation = correlation.upper()

    mytb.open(vis, nomodify=True)
    if 'CORRECTED_DATA' in mytb.colnames():
        datacolumn = 'CORRECTED_DATA'
    else:
        # DON'T remind the user that split before uvcontsub wastes time -
        # scratch columns will eventually go away.
        datacolumn = 'DATA'
    mytb.close()

    myms.open(vis, nomodify=True)
    if not myms.contsub(outputms=outputvis,   fitspw=fitspw,
                        fitorder=fitorder,    combine=combine,
                        spw=spw,              unionspw=join_spws(fitspw, spw),
                        field=field,          scan=scan,
                        intent=intent,        correlation=correlation,
                        obs=str(observation), whichcol=datacolumn):
        myms.close()
        return False
    myms.close()

    # Write history to output MS, not the input ms.
    try:
        param_names = uvcontsub3.func_code.co_varnames[:uvcontsub3.func_code.co_argcount]
        param_vals = [eval(p) for p in param_names]   
        retval &= write_history(myms, outputvis, 'uvcontsub3', param_names, param_vals,
                                casalog)
    except Exception, instance:
        casalog.post("*** Error \'%s\' updating HISTORY" % (instance),
                     'WARN')
Пример #42
0
# default ASDM dataset name
myasdm_dataset_name = 'uid___X5f_X18951_X1'
myms_dataset_name = 'M51.ms'

# name of the resulting MS
msname = myasdm_dataset_name+'.ms'

# name of the exported ASDM
asdmname = myms_dataset_name+'.asdm'

# name of the reimported MS
reimp_msname = 'reimported-'+myms_dataset_name

# make local copies of the tools
tblocal = tbtool()
mslocal = mstool()

def checktable(thename, theexpectation):
    global msname, myname
    tblocal.open(msname+"/"+thename)
    if thename == "":
        thename = "MAIN"
    for mycell in theexpectation:
        print myname, ": comparing ", mycell
        value = tblocal.getcell(mycell[0], mycell[1])
        # see if value is array
        try:
            isarray = value.__len__
        except:
            # it's not an array
            # zero tolerance?
Пример #43
0
def importfitsidi(fitsidifile,vis,constobsid=None,scanreindexgap_s=None):
	"""Convert FITS-IDI visibility file into a CASA visibility file (MS).

	Keyword arguments:
	fitsidifile -- Name(s) of input FITS IDI file(s)
		default: None; example='3C273XC1.IDI' or ['3C273XC1.IDI1', '3C273XC1.IDI2']
	vis -- Name of output visibility file (MS)
		default: None; example: vis='3C273XC1.ms'
		
	constobsid -- If True a constant obs id == 0  of is given to all input files 
	        default = False (new obs id for each input file)

	scanreindexgap_s --  if > 0., a new scan is started whenever the gap between two
                integrations is > the given value (seconds) or when a new field starts
                default = 0. (no reindexing)
	"""

	#Python script
        retval = True
	try:
		casalog.origin('importfitsidi')
		casalog.post("")
                myms = mstool()
		mytb = tbtool()
		if(type(fitsidifile)==str):
			casalog.post('### Reading file '+fitsidifile, 'INFO')
			myms.fromfitsidi(vis,fitsidifile)
			myms.close()
		elif(type(fitsidifile)==list):
			clist = fitsidifile
			casalog.post('### Reading file '+clist[0], 'INFO')
			myms.fromfitsidi(vis,clist[0])
			myms.close()
			clist.pop(0)
			tname = '_importfitsidi_tmp_'+vis
			shutil.rmtree(tname, ignore_errors=True)
			for fidifile in clist:
				casalog.post('### Reading file '+fidifile, 'INFO')
				myms.fromfitsidi(tname,fidifile)
				myms.close()
				myms.open(vis, nomodify=False)
				myms.concatenate(msfile=tname, freqtol='', dirtol='')
				myms.close()
				shutil.rmtree(tname, ignore_errors=True)
		else:
                        raise Exception, 'Parameter fitsidifile should be of type str or list'			

		if (constobsid):
			mytb.open(vis+'/OBSERVATION', nomodify=False)
			nobs = mytb.nrows()
			cando = True
			if nobs>1:
				casalog.post('Trying to keep obsid constant == 0 for all input files', 'INFO')
				# check if all observations are from the same telescope; if not warn and leave as is
				tels = mytb.getcol('TELESCOPE_NAME')
				for i in range(1,nobs):
					if tels[i]!=tels[0]:
						cando = False

				if cando:
					# get min and max time and write them into the first row;
					casalog.post('Adjusting OBSERVATION table', 'INFO')
					timeranges = mytb.getcol('TIME_RANGE')
					ttr = timeranges.transpose()
					newmin = min(ttr[0])
					newmax = max(ttr[1])
					mytb.putcell('TIME_RANGE', 0, [newmin,newmax])
					# delete the other rows
					mytb.removerows(range(1,nobs))
				else:
					casalog.post('The input files stem from different telescopes. Need to give different obs id.', 'WARN')
			mytb.close()
			
			if cando:
				# give the same obs id == 0 to the entire output MS
				casalog.post('Setting observation ID of all integrations to 0', 'INFO')
				mytb.open(vis, nomodify=False)
				for i in xrange(0, mytb.nrows()):
					mytb.putcell('OBSERVATION_ID', i, 0)
				mytb.close()


		else: # don't want constant obs id
			if(type(fitsidifile)==list and len(fitsidifile)>1):
				casalog.post('Incrementing observation ID for each input file ...', 'INFO')
			
		if (scanreindexgap_s > 0.):
			# reindex the scan column
			mytb.open(vis, nomodify=False)
			times = mytb.getcol('TIME')
			fields = mytb.getcol('FIELD_ID')
			arrayids = mytb.getcol('ARRAY_ID')
			scannumbers = mytb.getcol('SCAN_NUMBER')

			timesorted = np.argsort(np.array(times)) 

			scannumber = 1
			prevtime = times[timesorted[0]]
			prevfield = fields[timesorted[0]]
			prevarrayid = arrayids[timesorted[0]]
			scannumbers[timesorted[0]] = scannumber

			for i in xrange(1,mytb.nrows()):
				ii = timesorted[i]
				timenow = times[ii]
				fieldnow = fields[ii]
				arrayidnow = arrayids[ii]
				if (timenow-prevtime > scanreindexgap_s) \
					    or (fieldnow != prevfield) \
					    or (arrayidnow != prevarrayid):
					scannumber += 1
					casalog.post("Starting new scan "+str(scannumber)+" at "+str(timenow)\
							     +", field "+str(fieldnow)+", array_id "+str(arrayidnow), 'INFO')
				scannumbers[ii] = scannumber
				prevtime = timenow
				prevfield = fieldnow
				prevarrayid = arrayidnow

			mytb.putcol('SCAN_NUMBER', scannumbers)	
			mytb.close()
		
	        # write history
                try:
                        param_names = importfitsidi.func_code.co_varnames[:importfitsidi.func_code.co_argcount]
                        param_vals = [eval(p) for p in param_names]   
                        retval &= write_history(myms, vis, 'importfitsidi', param_names,
                                                param_vals, casalog)

                except Exception, instance:
                        casalog.post("*** Error \'%s\' updating HISTORY" % (instance),
                                     'WARN')

	except Exception, instance: 
		print '*** Error ***',instance
		shutil.rmtree('_importfitsidi_tmp_'+vis, ignore_errors=True)
		raise Exception, instance
Пример #44
0
def split_core(vis, outputvis, datacolumn, field, spw, width, antenna,
               timebin, timerange, scan, intent, array, uvrange,
               correlation, observation, combine, keepflags):

    retval = True

    if not outputvis or outputvis.isspace():
        raise ValueError, 'Please specify outputvis'

    myms = mstool()
    mytb = None
    if ((type(vis)==str) & (os.path.exists(vis))):
        myms.open(vis, nomodify=True)
    else:
        raise ValueError, 'Visibility data set not found - please verify the name'

    if os.path.exists(outputvis):
        myms.close()
        raise ValueError, "Output MS %s already exists - will not overwrite." % outputvis

    if (os.path.exists(outputvis+".flagversions")):
        myms.close()
        raise ValueError, "The flagversions \"%s.flagversions\" for the output MS already exist. Please delete." % outputvis

    # No longer needed.  When did it get put in?  Note that the default
    # spw='*' in myms.split ends up as '' since the default type for a variant
    # is BOOLVEC.  (Of course!)  Therefore both split and myms.split must
    # work properly when spw=''.
    #if(spw == ''):
    #    spw = '*'
    
    if(type(antenna) == list):
        antenna = ', '.join([str(ant) for ant in antenna])

    ## Accept digits without units ...assume seconds
    timebin = qa.convert(qa.quantity(timebin), 's')['value']
    timebin = str(timebin) + 's'
    
    if timebin == '0s':
        timebin = '-1s'

    # MSStateGram is picky ('CALIBRATE_WVR.REFERENCE, OBSERVE_TARGET_ON_SOURCE'
    # doesn't work, but 'CALIBRATE_WVR.REFERENCE,OBSERVE_TARGET_ON_SOURCE'
    # does), and I don't want to mess with bison now.  A .upper() might be a
    # good idea too, but the MS def'n v.2 does not say whether OBS_MODE should
    # be case-insensitive.
    intent = intent.replace(', ', ',')

    if '^' in spw:
        casalog.post("The interpretation of ^n in split's spw strings has changed from 'average n' to 'skip n' channels!", 'WARN')
        casalog.post("Watch for Slicer errors", 'WARN')
        
    if type(width) == str:
        try:
            if(width.isdigit()):
                width=[string.atoi(width)]
            elif(width.count('[') == 1 and width.count(']') == 1):
                width = width.replace('[', '')
                width = width.replace(']', '')
                splitwidth = width.split(',')
                width = []
                for ws in splitwidth:
                    if(ws.isdigit()):
                        width.append(string.atoi(ws)) 
            else:
                width = [1]
        except:
            raise TypeError, 'parameter width is invalid...using 1'

    if type(correlation) == list:
        correlation = ', '.join(correlation)
    correlation = correlation.upper()

    if hasattr(combine, '__iter__'):
        combine = ', '.join(combine)

    if type(spw) == list:
        spw = ','.join([str(s) for s in spw])
    elif type(spw) == int:
        spw = str(spw)
    do_chan_mod = spw.find('^') > -1     # '0:2~11^1' would be pointless.
    if not do_chan_mod:                  # ...look in width.
        if type(width) == int and width > 1:
            do_chan_mod = True
        elif hasattr(width, '__iter__'):
            for w in width:
                if w > 1:
                    do_chan_mod = True
                    break

    do_both_chan_and_time_mod = (do_chan_mod and
                                 string.atof(timebin[:-1]) > 0.0)
    if do_both_chan_and_time_mod:
        # Do channel averaging first because it might be included in the spw
        # string.
        import tempfile
        # We want the directory outputvis is in, not /tmp, because /tmp
        # might not have enough space.
        # outputvis is itself a directory, so strip off a trailing slash if
        # it is present.
        # I don't know if giving tempfile an absolute directory is necessary -
        # dir='' is effectively '.' in Ubuntu.
        workingdir = os.path.abspath(os.path.dirname(outputvis.rstrip('/')))
        cavms = tempfile.mkdtemp(suffix=outputvis, dir=workingdir)

        casalog.post('Channel averaging to ' + cavms)
        if not myms.split(outputms=cavms,     field=field,
                          spw=spw,            step=width,
                          baseline=antenna,   subarray=array,
                          timebin='',         time=timerange,
                          whichcol=datacolumn,
                          scan=scan,          uvrange=uvrange,
                          combine=combine,
                          correlation=correlation, intent=intent,
                          obs=str(observation)):
            myms.close()
            if os.path.isdir(cavms):
                import shutil
                shutil.rmtree(cavms)
            return False
        
        # The selection was already made, so blank them before time averaging.
        field = ''
        spw = ''
        width = [1]
        antenna = ''
        array = ''
        timerange = ''
        datacolumn = 'all'
        scan = ''
        intent = ''
        uvrange = ''
        observation = ''

        myms.close()
        myms.open(cavms)
        casalog.post('Starting time averaging')

    if keepflags:
        taqlstr = ''
    else:
        taqlstr = 'NOT (FLAG_ROW OR ALL(FLAG))'

    if not myms.split(outputms=outputvis,  field=field,
                      spw=spw,             step=width,
                      baseline=antenna,    subarray=array,
                      timebin=timebin,     time=timerange,
                      whichcol=datacolumn,
                      scan=scan,           uvrange=uvrange,
                      combine=combine,
                      correlation=correlation,
                      taql=taqlstr, intent=intent,
                      obs=str(observation)):
        myms.close()
        return False
    myms.close()

    if do_both_chan_and_time_mod:
        import shutil
        shutil.rmtree(cavms)

    # Write history to output MS, not the input ms.
    try:
        param_names = split_core.func_code.co_varnames[:split_core.func_code.co_argcount]
        param_vals = [eval(p) for p in param_names]   
        retval &= write_history(myms, outputvis, 'oldsplit', param_names, param_vals,
                                casalog)
    except Exception, instance:
        casalog.post("*** Error \'%s\' updating HISTORY" % (instance),
                     'WARN')
Пример #45
0
def subvs2(vis=None,
           outputvis=None,
           timerange='',
           spw='',
           mode=None,
           subtime1=None,
           subtime2=None,
           smoothaxis=None,
           smoothtype=None,
           smoothwidth=None,
           splitsel=None,
           reverse=None,
           overwrite=None):
    """Perform vector subtraction for visibilities
    Keyword arguments:
    vis -- Name of input visibility file (MS)
            default: none; example: vis='ngc5921.ms'
    outputvis -- Name of output uv-subtracted visibility file (MS)
                  default: none; example: outputvis='ngc5921_src.ms'
    timerange -- Time range of performing the UV subtraction:
                 default='' means all times.  examples:
                 timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                 timerange = 'hh:mm:ss~hh:mm:ss'
    spw -- Select spectral window/channel.
           default = '' all the spectral channels. Example: spw='0:1~20'
    mode -- operation mode
            default 'linear' 
                mode = 'linear': use a linear fit for the background to be subtracted
                mode = 'lowpass': act as a lowpass filter---smooth the data using different
                        smooth types and window sizes. Can be performed along either time
                        or frequency axis
                mode = 'highpass': act as a highpass filter---smooth the data first, and 
                        subtract the smoothed data from the original. Can be performed along
                        either time or frequency axis
            mode = 'linear' expandable parameters:
                subtime1 -- Time range 1 of the background to be subtracted from the data 
                             default='' means all times.  format:
                             timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                             timerange = 'hh:mm:ss~hh:mm:ss'
                subtime2 -- Time range 2 of the backgroud to be subtracted from the data
                             default='' means all times.  examples:
                             timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                             timerange = 'hh:mm:ss~hh:mm:ss'
            mode = 'lowpass' or 'highpass' expandable parameters:
                smoothaxis -- axis of smooth
                    Default: 'time'
                    smoothaxis = 'time': smooth is along the time axis
                    smoothaxis = 'freq': smooth is along the frequency axis
                smoothtype -- type of the smooth depending on the convolving kernel
                    Default: 'flat'
                    smoothtype = 'flat': convolving kernel is a flat rectangle,
                            equivalent to a boxcar moving smooth
                    smoothtype = 'hanning': Hanning smooth kernel. See numpy.hanning
                    smoothtype = 'hamming': Hamming smooth kernel. See numpy.hamming
                    smoothtype = 'bartlett': Bartlett smooth kernel. See numpy.bartlett
                    smoothtype = 'blackman': Blackman smooth kernel. See numpy.blackman
                smoothwidth -- width of the smooth kernel
                    Default: 5
                    Examples: smoothwidth=5, meaning the width is 5 pixels
    splitsel -- True or False. default = False. If splitsel = False, then the entire input
            measurement set is copied as the output measurement set (outputvis), with 
            background subtracted at selected timerange and spectral channels. 
            If splitsel = True,then only the selected timerange and spectral channels 
            are copied into the output measurement set (outputvis).
    reverse -- True or False. default = False. If reverse = False, then the times indicated
            by subtime1 and/or subtime2 are treated as background and subtracted; If reverse
            = True, then reverse the sign of the background-subtracted data. The option can 
            be used for mapping absorptive structure.
    overwrite -- True or False. default = False. If overwrite = True and
                outputvis already exists, the selected subtime and spw in the 
                output measurment set will be replaced with background subtracted 
                visibilities

    """
    # check the visbility ms
    casalog.post('input parameters:')
    casalog.post('vis: ' + vis)
    casalog.post('outputvis: ' + outputvis)
    casalog.post('smoothaxis: ' + smoothaxis)
    casalog.post('smoothtype: ' + smoothtype)
    casalog.post('smoothwidth: ' + str(smoothwidth))
    if not outputvis or outputvis.isspace():
        raise (ValueError, 'Please specify outputvis')

    if os.path.exists(outputvis):
        if overwrite:
            print(
                "The already existing output measurement set will be updated.")
        else:
            raise (ValueError,
                   "Output MS %s already exists - will not overwrite." %
                   outputvis)
    else:
        if not splitsel:
            shutil.copytree(vis, outputvis)
        else:
            ms.open(vis, nomodify=True)
            ms.split(outputvis, spw=spw, time=timerange, whichcol='DATA')
            ms.close()

    if timerange and (type(timerange) == str):
        [btimeo, etimeo] = timerange.split('~')
        btimeosec = qa.getvalue(qa.convert(qa.totime(btimeo), 's'))
        etimeosec = qa.getvalue(qa.convert(qa.totime(etimeo), 's'))
        timebinosec = etimeosec - btimeosec
        if timebinosec < 0:
            raise Exception(
                'Negative timebin! Please check the "timerange" parameter.')
        casalog.post('Selected timerange: ' + timerange +
                     ' as the time for UV subtraction.')
    else:
        casalog.post(
            'Output timerange not specified, using the entire timerange')

    if spw and (type(spw) == str):
        spwlist = spw.split(';')
    else:
        casalog.post('spw not specified, use all frequency channels')

    # read the output data
    datams = mstool()
    datams.open(outputvis, nomodify=False)
    datamsmd = msmdtool()
    datamsmd.open(outputvis)
    spwinfod = datams.getspectralwindowinfo()
    spwinfok = spwinfod.keys()
    spwinfok.sort(key=int)
    spwinfol = [spwinfod[k] for k in spwinfok]
    for s, spi in enumerate(spwinfol):
        print('processing spectral window {}'.format(spi['SpectralWindowId']))
        datams.selectinit(reset=True)
        staql = {'time': '', 'spw': ''}
        if not splitsel:
            # outputvis is identical to input visibility, do the selection
            if timerange and (type(timerange == str)):
                staql['time'] = timerange
            if spw and (type(spw) == str):
                staql['spw'] = spwlist[s]
            if not spw and not timerange:
                # data selection is not made
                print('selecting all spws and times')
                staql['spw'] = str(spi['SpectralWindowId'])
        else:
            # outputvis is splitted, selections have already applied, select all the data
            print('split the selected spws and times')
            staql['spw'] = str(spi['SpectralWindowId'])
        datams.msselect(staql)
        orec = datams.getdata(['data', 'time', 'axis_info'], ifraxis=True)
        npol, nchan, nbl, ntim = orec['data'].shape
        print('dimension of output data', orec['data'].shape)
        casalog.post('Number of baselines: ' + str(nbl))
        casalog.post('Number of spectral channels: ' + str(nchan))
        casalog.post('Number of time pixels: ' + str(ntim))

        try:
            if mode == 'linear':
                # define and check the background time ranges
                if subtime1 and (type(subtime1) == str):
                    [bsubtime1, esubtime1] = subtime1.split('~')
                    bsubtime1sec = qa.getvalue(
                        qa.convert(qa.totime(bsubtime1), 's'))
                    esubtime1sec = qa.getvalue(
                        qa.convert(qa.totime(esubtime1), 's'))
                    timebin1sec = esubtime1sec - bsubtime1sec
                    if timebin1sec < 0:
                        raise Exception(
                            'Negative timebin! Please check the "subtime1" parameter.'
                        )
                    casalog.post('Selected timerange 1: ' + subtime1 +
                                 ' as background for uv subtraction.')
                else:
                    raise Exception(
                        'Please enter at least one timerange as the background'
                    )
                if subtime2 and (type(subtime2) == str):
                    [bsubtime2, esubtime2] = subtime2.split('~')
                    bsubtime2sec = qa.getvalue(
                        qa.convert(qa.totime(bsubtime2), 's'))
                    esubtime2sec = qa.getvalue(
                        qa.convert(qa.totime(esubtime2), 's'))
                    timebin2sec = esubtime2sec - bsubtime2sec
                    if timebin2sec < 0:
                        raise Exception(
                            'Negative timebin! Please check the "subtime2" parameter.'
                        )
                    timebin2 = str(timebin2sec) + 's'
                    casalog.post('Selected timerange 2: ' + subtime2 +
                                 ' as background for uv subtraction.')
                    # plus 1s is to ensure averaging over the entire timerange
                else:
                    casalog.post(
                        'Timerange 2 not selected, using only timerange 1 as background'
                    )

                # Select the background indicated by subtime1
                ms.open(vis, nomodify=True)
                # Select the spw id
                # ms.msselect({'time': subtime1})
                staql0 = {'time': subtime1, 'spw': ''}
                if spw and (type(spw) == str):
                    staql0['spw'] = spwlist[s]
                else:
                    staql0['spw'] = staql['spw']
                ms.msselect(staql0)
                rec1 = ms.getdata(['data', 'time', 'axis_info'], ifraxis=True)
                # print('shape of the frequency matrix ',rec1['axis_info']['freq_axis']['chan_freq'].shape)
                sz1 = rec1['data'].shape
                print('dimension of selected background 1', rec1['data'].shape)
                # the data shape is (n_pol,n_channel,n_baseline,n_time), no need to reshape
                # rec1['data']=rec1['data'].reshape(sz1[0],sz1[1],sz1[2],nspw,sz1[3]/nspw,order='F')
                # print('reshaped rec1 ', rec1['data'].shape)
                rec1avg = np.average(rec1['data'], axis=3)
                casalog.post('Averaging the visibilities in subtime1: ' +
                             subtime1)
                ms.close()
                if subtime2 and (type(subtime2) == str):
                    ms.open(vis, nomodify=True)
                    # Select the spw id
                    staql0 = {'time': subtime2, 'spw': ''}
                    if spw and (type(spw) == str):
                        staql0['spw'] = spwlist[s]
                    else:
                        staql0['spw'] = staql['spw']
                    ms.msselect(staql0)
                    rec2 = ms.getdata(['data', 'time', 'axis_info'],
                                      ifraxis=True)
                    sz2 = rec2['data'].shape
                    print('dimension of selected background 2',
                          rec2['data'].shape)
                    # rec2['data']=rec2['data'].reshape(sz2[0],sz2[1],sz2[2],nspw,sz2[3]/nspw,order='F')
                    # print('reshaped rec1 ', rec2['data'].shape)
                    rec2avg = np.average(rec2['data'], axis=3)
                    ms.close()
                    casalog.post('Averaged the visibilities in subtime2: ' +
                                 subtime2)
                if subtime1 and (not subtime2):
                    casalog.post(
                        'Only "subtime1" is defined, subtracting background defined in subtime1: '
                        + subtime1)
                    t1 = (np.amax(rec1['time']) + np.amin(rec1['time'])) / 2.
                    print('t1: ',
                          qa.time(qa.quantity(t1, 's'), form='ymd', prec=10))
                    for i in range(ntim):
                        orec['data'][:, :, :, i] -= rec1avg
                        if reverse:
                            orec['data'][:, :, :,
                                         i] = -orec['data'][:, :, :, i]
                if subtime1 and subtime2 and (type(subtime2) == str):
                    casalog.post(
                        'Both subtime1 and subtime2 are specified, doing linear interpolation between "subtime1" and "subtime2"'
                    )
                    t1 = (np.amax(rec1['time']) + np.amin(rec1['time'])) / 2.
                    t2 = (np.amax(rec2['time']) + np.amin(rec2['time'])) / 2.
                    touts = orec['time']
                    print('t1: ',
                          qa.time(qa.quantity(t1, 's'), form='ymd', prec=10))
                    print('t2: ',
                          qa.time(qa.quantity(t2, 's'), form='ymd', prec=10))
                    for i in range(ntim):
                        tout = touts[i]
                        if tout > np.amax([t1, t2]):
                            tout = np.amax([t1, t2])
                        elif tout < np.amin([t1, t2]):
                            tout = np.amin([t1, t2])
                        orec['data'][:, :, :, i] -= (rec2avg - rec1avg) * (
                            tout - t1) / (t2 - t1) + rec1avg
                        if reverse:
                            orec['data'][:, :, :,
                                         i] = -orec['data'][:, :, :, i]
            elif mode == 'highpass':
                if smoothtype != 'flat' and smoothtype != 'hanning' and smoothtype != 'hamming' and smoothtype != 'bartlett' and smoothtype != 'blackman':
                    raise Exception('Unknown smoothtype ' + str(smoothtype))
                if smoothaxis == 'time':
                    if smoothwidth <= 0 or smoothwidth >= ntim:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nchan):
                                for k in range(nbl):
                                    orec['data'][i, j,
                                                 k, :] -= signalsmooth.smooth(
                                                     orec['data'][i, j, k, :],
                                                     smoothwidth, smoothtype)
                if smoothaxis == 'freq':
                    if smoothwidth <= 0 or smoothwidth >= nchan:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nbl):
                                for k in range(ntim):
                                    orec['data'][i, :, j,
                                                 k] -= signalsmooth.smooth(
                                                     orec['data'][i, :, j, k],
                                                     smoothwidth, smoothtype)
            elif mode == 'lowpass':
                if smoothtype != 'flat' and smoothtype != 'hanning' and smoothtype != 'hamming' and smoothtype != 'bartlett' and smoothtype != 'blackman':
                    raise Exception('Unknown smoothtype ' + str(smoothtype))
                if smoothaxis == 'time':
                    if smoothwidth <= 0 or smoothwidth >= ntim:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nchan):
                                for k in range(nbl):
                                    orec['data'][i, j,
                                                 k, :] = signalsmooth.smooth(
                                                     orec['data'][i, j, k, :],
                                                     smoothwidth, smoothtype)
                if smoothaxis == 'freq':
                    if smoothwidth <= 0 or smoothwidth >= nchan:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nbl):
                                for k in range(ntim):
                                    orec['data'][i, :, j,
                                                 k] = signalsmooth.smooth(
                                                     orec['data'][i, :, j, k],
                                                     smoothwidth, smoothtype)
            else:
                raise Exception('Unknown mode' + str(mode))
        except Exception as instance:
            print('*** Error ***', instance)

        # orec['data']=orec['data'].reshape(szo[0],szo[1],szo[2],szo[3],order='F')
        # put the modified data back into the output visibility set
        del orec['time']
        del orec['axis_info']
        # ms.open(outputvis,nomodify=False)
        # if not splitsel:
        # outputvis is identical to input visibility, do the selection
        #    if timerange and (type(timerange==str)):
        #        datams.msselect({'time':timerange})
        #    if spw and (type(spw)==str):
        #        datams.selectinit(datadescid=int(spwid))
        #        nchan=int(echan)-int(bchan)+1
        #        datams.selectchannel(nchan,int(bchan),1,1)
        #    if not spw and not timerange:
        # data selection is not made
        #        datams.selectinit(datadescid=0)
        # else:
        # outputvis is splitted, selections have already applied, select all the data
        #    datams.selectinit(datadescid=0)
        datams.putdata(orec)
    datams.close()
    datamsmd.done()
Пример #46
0
    def test_default(self):
        '''Partition: create an MMS with default values in parallel'''

        # First split off one scan to run the test faster
        split(vis=self.msfile,
              outputvis='split30.ms',
              datacolumn='DATA',
              scan='30')
        msfile = 'split30.ms'

        partition(vis=msfile, outputvis=self.mmsfile)

        self.assertTrue(os.path.exists(self.mmsfile),
                        'MMS was not created for this test')

        # Gather several metadata information
        # for the MS
        mdlocal1 = msmdtool()
        mdlocal1.open(msfile)
        ms_rows = mdlocal1.nrows()
        ms_nscans = mdlocal1.nscans()
        ms_nspws = mdlocal1.nspw()
        ms_scans = mdlocal1.scannumbers()
        mdlocal1.close()

        # for the MMS
        mdlocal2 = msmdtool()
        mdlocal2.open(self.mmsfile)
        mms_rows = mdlocal2.nrows()
        mms_nscans = mdlocal2.nscans()
        mms_nspws = mdlocal2.nspw()
        mms_scans = mdlocal2.scannumbers()
        mdlocal2.close()

        # Compare the number of rows
        self.assertEqual(ms_rows, mms_rows,
                         'Compare total number of rows in MS and MMS')
        self.assertEqual(ms_nscans, mms_nscans, 'Compare number of scans')
        self.assertEqual(ms_nspws, mms_nspws, 'Compare number of spws')

        # Compare the scans
        self.assertEqual(ms_scans.all(), mms_scans.all(),
                         'Compare all scan IDs')

        try:
            mdlocal1.open(msfile)
            mdlocal2.open(self.mmsfile)

            # Compare the spws
            for i in ms_scans:
                msi = mdlocal1.spwsforscan(i)
                mmsi = mdlocal2.spwsforscan(i)
                self.assertEqual(msi.all(), mmsi.all(),
                                 'Compare spw Ids for a scan')
        finally:
            mdlocal1.close()
            mdlocal2.close()

        # Sort the output MSs so that they can be compared
        myms = mstool()

        myms.open(msfile)
        myms.sort('ms_sorted.ms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        myms.done()

        myms.open(self.mmsfile)
        myms.sort('mms_sorted.ms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        myms.done()

        # Ignore WEIGHT_SPECTRUM and SIGMA_SPECTRUM, which are empty columns
        self.assertTrue(
            th.compTables('ms_sorted.ms', 'mms_sorted.ms', [
                'FLAG', 'FLAG_CATEGORY', 'TIME_CENTROID', 'WEIGHT_SPECTRUM',
                'SIGMA_SPECTRUM', 'DATA'
            ]))

        # Compare the DATA column
        self.assertTrue(
            th.compVarColTables('ms_sorted.ms', 'mms_sorted.ms', 'DATA'))

        # The separation axis should be written to the output MMS
        sepaxis = ph.axisType(self.mmsfile)
        self.assertEqual(sepaxis, 'scan,spw',
                         'Partition did not write AxisType correctly in MMS')
Пример #47
0
    def test_default(self):
        '''Partition: create an MMS with default values in parallel'''
        
        # First split off one scan to run the test faster
        split(vis=self.msfile, outputvis='split30.ms', datacolumn='DATA', scan='30')
        msfile = 'split30.ms'

        partition(vis=msfile, outputvis=self.mmsfile)
        
        self.assertTrue(os.path.exists(self.mmsfile), 'MMS was not created for this test')
        
        # Gather several metadata information
        # for the MS
        mdlocal1 = msmdtool()
        mdlocal1.open(msfile)
        ms_rows = mdlocal1.nrows()
        ms_nscans = mdlocal1.nscans()
        ms_nspws = mdlocal1.nspw()
        ms_scans = mdlocal1.scannumbers()
        mdlocal1.close()        
          
        # for the MMS
        mdlocal2 = msmdtool()
        mdlocal2.open(self.mmsfile)
        mms_rows = mdlocal2.nrows()
        mms_nscans = mdlocal2.nscans()
        mms_nspws = mdlocal2.nspw()
        mms_scans = mdlocal2.scannumbers()
        mdlocal2.close()        
          
        # Compare the number of rows
        self.assertEqual(ms_rows, mms_rows, 'Compare total number of rows in MS and MMS')
        self.assertEqual(ms_nscans, mms_nscans, 'Compare number of scans')
        self.assertEqual(ms_nspws, mms_nspws, 'Compare number of spws')
          
        # Compare the scans
        self.assertEqual(ms_scans.all(), mms_scans.all(), 'Compare all scan IDs')
  
        try:
            mdlocal1.open(msfile)
            mdlocal2.open(self.mmsfile)
          
            # Compare the spws
            for i in ms_scans:                
                msi = mdlocal1.spwsforscan(i)
                mmsi = mdlocal2.spwsforscan(i)
                self.assertEqual(msi.all(), mmsi.all(), 'Compare spw Ids for a scan')
        finally:          
            mdlocal1.close()
            mdlocal2.close()               

        # Sort the output MSs so that they can be compared
        myms = mstool()
        
        myms.open(msfile)
        myms.sort('ms_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()
        
        myms.open(self.mmsfile)
        myms.sort('mms_sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        myms.done()

        # Ignore WEIGHT_SPECTRUM and SIGMA_SPECTRUM, which are empty columns
        self.assertTrue(th.compTables('ms_sorted.ms', 'mms_sorted.ms', 
                                      ['FLAG','FLAG_CATEGORY','TIME_CENTROID',
                                       'WEIGHT_SPECTRUM','SIGMA_SPECTRUM','DATA']))

        # Compare the DATA column
        self.assertTrue(th.compVarColTables('ms_sorted.ms', 'mms_sorted.ms','DATA'))
        
        # The separation axis should be written to the output MMS
        sepaxis = ph.axisType(self.mmsfile)
        self.assertEqual(sepaxis, 'scan,spw', 'Partition did not write AxisType correctly in MMS')
Пример #48
0
sys.path.append('.')
assert os.getenv('SCRIPT_DIR') is not None
sys.path.append(os.getenv('SCRIPT_DIR'))

from ms_lists import Kmses
from maserline_imaging import makefits

from tclean_cli import tclean_cli as tclean
from split_cli import split_cli as split
from concat_cli import concat_cli as concat
from gaincal_cli import gaincal_cli as gaincal
from applycal_cli import applycal_cli as applycal

from taskinit import mstool, msmdtool

mst = mstool()
msmd = msmdtool()

spws = {
    '../18A-229_2018_03_29_T13_19_55.276/18A-229.sb35069722.eb35251855.58206.45698415509.ms':
    '7',
    '../18A-229_2018_03_28_T17_09_22.432/18A-229.sb35069722.eb35251150.58205.383514664354.ms':
    '5',
}

field = 'Sgr B2 MN K,Sgr B2 MS K,Sgr B2 SDS K'
restfreq = '23.722633335GHz'
selfcal_spw = '0~5:23.7178~23.7185GHz'

merged_ms = 'NH322_merged.ms'
if not os.path.exists(merged_ms):
def plot_weight_density(vis, spw=0, field='', nbins=50, bins=None, clear=False,
                        ignore_flags=False, representative_channel=None,
                        **kwargs):
    """
    Plot the "weight density" vs uvdist: i.e., the sum of the weights in each
    annular bin divided by the area of that bin

    Parameters
    ----------
    vis : str
        The .ms table to plot weights from
    spw : int or str
        The spectral window to plot.  Only one spectral window should be specified.
    field : str
        The field name to plot (if mosaic, make sure it is a name and not a number)
    nbins : int
        The number of bins to create
    bins : None or array
        You can specify specific bins to average the weights in
    ignore_flags : bool
        Ignore the flags in the file.  Flagged data will be plotted alongside
        unflagged.
    representative_channel : None or int
        A specific channel from which to extract flags.  If left as 'None',
        defaults to the mean frequency
    kwargs : dict
        Keyword arguments are passed to msselect (e.g., obsid).  Unfortunately,
        it seems that msselect will happily ignore just about everything it is
        given.
    """

    if hasattr(spw, '__len__'):
        assert len(spw) == 0, "Only one SPW can be plotted."


    mymsmd = msmdtool()
    mymsmd.open(vis)

    reffreq = "{value}{unit}".format(**mymsmd.reffreq(spw)['m0'])
    reffreq = "{0}Hz".format(mymsmd.meanfreq(spw))
    if representative_channel is not None:
        closest_channel = representative_channel
    else:
        closest_channel = np.argmin(np.abs(mymsmd.chanfreqs(spw) - mymsmd.meanfreq(spw)))
    mymsmd.close()

    myms = mstool()

    myms.open(vis)
    myms.selectinit(0)
    selection_dict = dict(field=field) #, spw=reffreq,)
    selection_dict.update(kwargs)
    #print(selection_dict)
    assert myms.msselect(selection_dict), "Data selection has failed"
    #print(myms.msselectedindices())
    # select one "representative" channel out of the SPW (because the weights
    # are per SPW, but the flags are per channel)
    assert myms.selectchannel(start=closest_channel, nchan=1, inc=1, width=1), "Channel selection has failed"
    if ignore_flags:
        columns = ['UVW', 'WEIGHT']
    else:
        columns = ['UVW', 'WEIGHT', 'FLAG']
    datadict=myms.getdata(columns)
    myms.close()
    wt = datadict['weight'].squeeze()
    uvw = datadict['uvw'].squeeze()

    # calculate the UV distance from the uvw array
    uvd = (uvw[:2,:]**2).sum(axis=0)**0.5

    if bins is None:
        bins = np.linspace(uvd.min(), uvd.max(), nbins)


    if not ignore_flags:
        # We have exactly one channel (we forced it above) and the second index
        # should be the channel ID
        # If the flag shape does not conform to this assumption, we're in trouble
        # squeeze just gets rid of all size=1 dimensions
        flags = datadict['flag'].squeeze()

        if flags.shape != wt.shape:
            raise ValueError("Flag shape and weight shape don't match. "
                             "Flag shape: {0}  Weight shape: {1}".format(
                                 flags.shape,wt.shape))

        # set weights to zero because we're adding them (this is obviously not right
        # for many operations, but it is right here!)
        wt[flags] = 0

    # one plot for each polarization
    h_1 = np.histogram(uvd, bins, weights=wt[0,:])
    h_2 = np.histogram(uvd, bins, weights=wt[1,:])

    # plot points at the bin center
    midbins = (bins[:-1] + bins[1:])/2.
    # compute the bin area for division below
    bin_area = (bins[1:]**2-bins[:-1]**2)*np.pi

    if clear:
        pl.clf()
    pl.plot(midbins, h_1[0]/bin_area, drawstyle='steps-mid')
    pl.plot(midbins, h_2[0]/bin_area, drawstyle='steps-mid')
    pl.xlabel("UV Distance")
    pl.ylabel("Sum of weights / annular area")
Пример #50
0
def main(thislist, axis='auto', numsubms=4):

    if thislist == []:
        print 'Need list of tasks to run.'
        usage()
        os._exit(0)

    print "Will create MMS for the following tasks %s" % thislist
    print

    # Loop through task list
    for t in thislist:
        if t not in TASKLIST:
            print 'ERROR: task ' + t + ' is not in TASKLIST. Run this script with -l for the full list.'
            os._exit(0)

#       if t == 'flagdata':


#            axis='scan'

        mmstest(t, axis, numsubms)

    from tasks import partition, importuvfits

    #    if 'listvis' in thislist:
    #        # NOTE for test_listvis data:
    #        # You need to run partition by hand to create an MMS for the single-dish data set
    #        SDPATH = DATAPATH + 'unittest/listvis/'
    #        SDMMS = './unittest_mms/listvis/'
    #
    #        partition(vis=SDPATH+'OrionS_rawACSmod', outputvis=SDMMS+'OrionS_rawACSmod.mms',
    #                  datacolumn='float_data', createmms=True, flagbackup=False)

    if 'split' in thislist:
        # some additional MMSs
        SPLITMMSPATH = './unittest_mms/split/'
        specialcase = [
            '0420+417/0420+417.ms', 'viewertest/ctb80-vsm.ms',
            'split/labelled_by_time+ichan.ms'
        ]
        for myms in specialcase:
            shutil.rmtree(SPLITMMSPATH + os.path.basename(myms),
                          ignore_errors=True)
            partition(vis=DATAPATH + myms,
                      outputvis=SPLITMMSPATH + os.path.basename(myms),
                      datacolumn='all',
                      flagbackup=False)

        # workaround for a partition shortcoming: column keywords not copied
        tb.open(SPLITMMSPATH + 'hasfc.mms/SUBMSS/hasfc.0000.ms/',
                nomodify=False)
        tb.putcolkeyword('FLAG_CATEGORY', 'CATEGORY',
                         ['FLAG_CMD', 'ORIGINAL', 'USER'])
        tb.close()

    if 'wvrgcal' in thislist:
        WVRGCALMMSPATH = './unittest_mms/wvrgcal/'
        WVRGCALPATH = DATAPATH + 'unittest/wvrgcal/input/'
        origwd = os.getcwd()
        os.chdir(WVRGCALMMSPATH)
        shutil.rmtree('input', ignore_errors=True)
        os.mkdir('input')
        os.chdir('input')
        mydirs = os.listdir(WVRGCALPATH)
        for d in mydirs:
            print d
            if os.path.splitext(d)[1] == '.ms':
                partition(vis=WVRGCALPATH + d,
                          outputvis=d,
                          datacolumn='all',
                          numsubms=5,
                          flagbackup=False)
            else:
                os.symlink(WVRGCALPATH + d, d)
        os.chdir(origwd)

    if ('concat' in thislist):
        CONCATMMSPATH = './unittest_mms/concat/'
        CONCATPATH = DATAPATH + 'unittest/concat/input/'
        origwd = os.getcwd()
        os.chdir(CONCATMMSPATH)
        shutil.rmtree('input', ignore_errors=True)
        os.mkdir('input')
        os.chdir('input')
        mydirs = os.listdir(CONCATPATH)
        for d in mydirs:
            print d
            if os.path.splitext(d)[1] == '.ms':
                partition(vis=CONCATPATH + d,
                          outputvis=d,
                          datacolumn='all',
                          numsubms=6,
                          flagbackup=False)
            else:
                os.symlink(CONCATPATH + d, d)
        os.chdir(origwd)

    if ('cvel' in thislist):

        CVELPATH = DATAPATH + 'ngc4826/fitsfiles/'
        MMSPATH = './unittest_mms/cvel/'
        mmsdir = MMSPATH + 'ngc4826.mms'
        tempdir = 'makemmsdirtemp'
        os.system('mkdir ' + tempdir)
        importuvfits(fitsfile=CVELPATH + 'ngc4826.ll.fits5',
                     vis=tempdir + '/ngc4826.ms')
        partition(vis=tempdir + '/ngc4826.ms',
                  outputvis=MMSPATH + 'ngc4826.mms',
                  separationaxis='scan',
                  flagbackup=False,
                  datacolumn='all')
        os.system('rm -rf ' + tempdir)

        CVELPATH = DATAPATH + 'cvel/input/'
        cvelfiles = [
            'jupiter6cm.demo-thinned.ms',
            'g19_d2usb_targets_line-shortened-thinned.ms',
            'evla-highres-sample-thinned.ms'
        ]
        MMSPATH = './unittest_mms/cvel/'
        thisdir = os.getcwd()
        for cvelms in cvelfiles:
            mmsname = cvelms.replace('.ms', '.mms')
            partition(vis=CVELPATH + cvelms,
                      outputvis=MMSPATH + mmsname,
                      separationaxis='scan',
                      flagbackup=False,
                      datacolumn='all',
                      numsubms=4)
            os.chdir(MMSPATH)
            os.system('ln -s ' + mmsname + ' ' + cvelms)
            os.chdir(thisdir)

        # Create the jup.mms file
        mmsname = 'jup.mms'
        output = MMSPATH + mmsname
        split2(vis=MMSPATH + '/jupiter6cm.demo-thinned.mms',
               outputvis=output,
               field='JUPITER',
               datacolumn='data')
        tblocal = tbtool()
        tblocal.open(output, nomodify=False)
        a = tblocal.getcol('TIME')
        delta = (54709. * 86400 - a[0])
        a = a + delta
        strt = a[0]
        tblocal.putcol('TIME', a)
        a = tblocal.getcol('TIME_CENTROID')
        a = a + delta
        tblocal.putcol('TIME_CENTROID', a)
        tblocal.close()
        tblocal.open(output + '/OBSERVATION', nomodify=False)
        a = tblocal.getcol('TIME_RANGE')
        delta = strt - a[0][0]
        a = a + delta
        tblocal.putcol('TIME_RANGE', a)
        tblocal.close()
        tblocal.open(output + '/FIELD', nomodify=False)
        a = tblocal.getcol('TIME')
        delta = strt - a[0]
        a = a + delta
        tblocal.putcol('TIME', a)
        tblocal.close()
        mslocal = mstool()
        mslocal.open(output, nomodify=False)
        mslocal.addephemeris(
            0,
            os.environ.get('CASAPATH').split()[0] +
            '/data/ephemerides/JPL-Horizons/Jupiter_54708-55437dUTC.tab',
            'Jupiter_54708-55437dUTC', 0)
        mslocal.close()

        CVELMS = DATAPATH + 'fits-import-export/input/test.ms'
        MMSPATH = './unittest_mms/cvel/'
        thisdir = os.getcwd()
        partition(vis=CVELMS,
                  outputvis=MMSPATH + 'test.mms',
                  separationaxis='scan',
                  flagbackup=False,
                  datacolumn='all',
                  numsubms=4)
        os.chdir(MMSPATH)
        os.system('ln -s test.mms test.ms')
        os.chdir(thisdir)

    if ('fixvis' in thislist):
        MSPATH = os.environ.get(
            'CASAPATH').split()[0] + '/data/regression/0420+417/'
        MSNAME = MSPATH + '0420+417.ms'
        MMSPATH = './unittest_mms/fixvis/'
        MMSNAME = MMSPATH + '0420+417.mms'
        partition(vis=MSNAME,
                  outputvis=MMSNAME,
                  datacolumn='all',
                  separationaxis=axis,
                  numsubms=numsubms,
                  flagbackup=False)
        # Create symlink
        thisdir = os.getcwd()
        os.chdir(MMSPATH)
        os.system('ln -s 0420+417.mms 0420+417.ms')
        os.chdir(thisdir)
Пример #51
0
def oldsplit(vis, outputvis, datacolumn, field, spw, width, antenna,
          timebin, timerange, scan, intent, array, uvrange,
          correlation, observation, combine, keepflags, keepmms):
    """Create a visibility subset from an existing visibility set:

    Keyword arguments:
    vis -- Name of input visibility file (MS)
            default: none; example: vis='ngc5921.ms'
    outputvis -- Name of output visibility file (MS)
                  default: none; example: outputvis='ngc5921_src.ms'
    datacolumn -- Which data column to split out
                  default='corrected'; example: datacolumn='data'
                  Options: 'data', 'corrected', 'model', 'all',
                  'float_data', 'lag_data', 'float_data,data', and
                  'lag_data,data'.
                  note: 'all' = whichever of the above that are present.
    field -- Field name
              default: field = '' means  use all sources
              field = 1 # will get field_id=1 (if you give it an
                          integer, it will retrieve the source with that index)
              field = '1328+307' specifies source '1328+307'.
                 Minimum match can be used, egs  field = '13*' will
                 retrieve '1328+307' if it is unique or exists.
                 Source names with imbedded blanks cannot be included.
    spw -- Spectral window index identifier
            default=-1 (all); example: spw=1
    antenna -- antenna names
               default '' (all),
               antenna = '3 & 7' gives one baseline with antennaid = 3,7.
    timebin -- Interval width for time averaging.
               default: '0s' or '-1s' (no averaging)
               example: timebin='30s'
    timerange -- Time range
                 default='' means all times.  examples:
                 timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                 timerange='< YYYY/MM/DD/HH:MM:SS.sss'
                 timerange='> YYYY/MM/DD/HH:MM:SS.sss'
                 timerange='< ddd/HH:MM:SS.sss'
                 timerange='> ddd/HH:MM:SS.sss'
    scan -- Scan numbers to select.
            default '' (all).
    intent -- Scan intents to select.
            default '' (all).
    array -- (Sub)array IDs to select.     
             default '' (all).
    uvrange -- uv distance range to select.
               default '' (all).
    correlation -- Select correlations, e.g. 'rr, ll' or ['XY', 'YX'].
                   default '' (all).
    observation -- Select by observation ID(s).
                   default '' (all).
    combine -- Data descriptors that time averaging can ignore:
                  scan, and/or state
                  Default '' (none)
    keepflags -- Keep flagged data, if possible
                 Default True

    keepmms -- If the input is a multi-MS, make the output one, too. (experimental)
               Default: False
                 
    """

    casalog.origin('oldsplit')
    mylocals = locals()
    rval = True
    try:

        if (keepmms and ParallelTaskHelper.isParallelMS(vis)): 
            if (timebin!='0s' and timebin!='-1s'): 
                casalog.post('Averaging over time with keepmms=True may lead to results different\n'
                             +'  from those obtained with keepmms=False due to different binning.', 'WARN')
                            
            myms = mstool()
            myms.open(vis)
            mses = myms.getreferencedtables()
            myms.close() 
            mses.sort()

            nfail = 0
            if os.path.exists(outputvis):
                raise ValueError, "Output MS %s already exists - will not overwrite." % outputvis
            tempout = outputvis+str(time.time())
            os.mkdir(tempout)
            successfulmses = []
            mastersubms = ''
            masterptab = ''
            emptyptab = tempout+'/EMPTY_POINTING'
            nochangeinpointing = (str(antenna)+str(timerange)=='')

            if nochangeinpointing:    
                # resulting pointing table is the same for all
                #  -> replace by empty table if it is a link and won't be modified anyway
                #     and put back original into the master after split

                # find the master
                for m in mses:
                    theptab = m+'/POINTING'
                    if not os.path.islink(theptab):
                        #print "is master ", theptab
                        mastersubms = m
                        masterptab = m+'/POINTING'
                        # save time by not copying the POINTING table len(mses) times
                        myttb = tbtool()
                        myttb.open(masterptab)
                        tmpp = myttb.copy(newtablename=emptyptab, norows=True)
                        myttb.close()
                        del myttb
                        tmpp.close()
                        del tmpp
                        break

            mytb = tbtool()

            # prepare the input MMS for processing
            replaced = []
            outputviss = []
            theptabs = []
            
            for m in mses:

                # make sure the SORTED_TABLE keywords are disabled
                mytb.open(m, nomodify=False)
                if 'SORTED_TABLE' in mytb.keywordnames():
                    tobedel = mytb.getkeyword('SORTED_TABLE').split(' ')[1]
                    mytb.removekeyword('SORTED_TABLE')
                    os.system('rm -rf '+tobedel)
                mytb.close()

                # deal with the POINTING table
                theptab = m+'/POINTING'
                theptabs.append(theptab)

                if nochangeinpointing and os.path.islink(theptab):
                    #print "is link ", theptab
                    os.remove(theptab)
                    shutil.copytree(emptyptab, theptab)
                    replaced.append(True)
                else:
                    replaced.append(False)

                # run oldsplit
                outputviss.append(os.path.abspath(tempout+'/'+os.path.basename(m)))
            # end for

            # send off the jobs
            print 'Running split_core ... '
            helper = ParallelTaskHelper('oldsplit', mylocals)
            helper.override_arg('outputvis',outputviss)
            helper._consolidateOutput = False
            goretval = helper.go()

            for i in xrange(len(mses)):
                m = mses[i]

                # deal with the POINTING table
                if replaced[i]:
                    # restore link
                    shutil.rmtree(theptabs[i], ignore_errors=True)
                    os.symlink('../'+os.path.basename(mastersubms)+'/POINTING', theptabs[i])
                    # (link in target will be created my makeMMS)

                # accumulate list of successful splits
                if not goretval[m]:
                    nfail+=1
                else:
                    successfulmses.append(outputviss[i])

            if nfail>0: # there were unsuccessful splits
                if len(successfulmses)==0:
                    casalog.post('Split failed in all subMSs.', 'WARN')
                    rval=False
                else:
                    casalog.post('*** Summary: there were failures in '+str(nfail)+' SUBMSs:', 'WARN')
                    casalog.post('*** (these are harmless if they are caused by selection):', 'WARN')
                    for m in mses:
                        if not goretval[m]:
                            casalog.post(os.path.basename(m)+': '+str(goretval[m]), 'WARN')
                        else:
                            casalog.post(os.path.basename(m)+': '+str(goretval[m]), 'NORMAL') 

                    casalog.post('Will construct MMS from subMSs with successful selection ...', 'NORMAL')

                    if nochangeinpointing: # need to take care of POINTING table
                        # in case the master subms did not make it
                        if not (tempout+'/'+os.path.basename(mastersubms) in successfulmses):
                            # old master subms was not selected.
                            # copy the original masterptab into the new master
                            shutil.rmtree(successfulmses[0]+'/POINTING')
                            shutil.copytree(masterptab, successfulmses[0]+'/POINTING')
                    
            if rval: # construct new MMS from the output
                if(width==1 and str(field)+str(spw)+str(antenna)+str(timerange)+str(scan)+str(intent)\
                   +str(array)+str(uvrange)+str(correlation)+str(observation)==''):
                    ph.makeMMS(outputvis, successfulmses)
                else:
                    myms.open(successfulmses[0], nomodify=False)
                    auxfile = "split_aux_"+str(time.time())
                    for i in xrange(1,len(successfulmses)):
                        myms.virtconcatenate(successfulmses[i], auxfile, '1Hz', '10mas', True)
                    myms.close()
                    os.remove(auxfile)
                    ph.makeMMS(outputvis, successfulmses, True, ['POINTING']) 


            shutil.rmtree(tempout, ignore_errors=True)



        else: # do not output an MMS

            rval = split_core(vis, outputvis, datacolumn, field, spw, width, antenna,
                              timebin, timerange, scan, intent, array, uvrange,
                              correlation, observation, combine, keepflags)

    except Exception, instance:
            casalog.post("*** Error: %s" % (instance), 'SEVERE')
            rval = False
Пример #52
0
def statwt(
    vis,
    dorms,
    byantenna,
    sepacs,
    fitspw,
    fitcorr,
    combine,
    timebin,
    minsamp,
    field,
    spw,
    antenna,
    timerange,
    scan,
    intent,
    array,
    correlation,
    obs,
    datacolumn,
):
    """
    Sets WEIGHT and SIGMA using the scatter of the visibilities.
    """
    casalog.origin("statwt")
    retval = True
    try:
        myms = mstool()
        mytb = tbtool()

        # parameter check for those not fully implemeted
        # (should be taken out once implemented)
        if byantenna:
            raise ValueError("byantenna=True is not supported yet")
        if fitcorr != "":
            raise ValueError("fitcorr is not supported yet")
        if timebin != "0s" and timebin != "-1s":
            raise ValueError("timebin is not supported yet")

        datacol = "DATA"
        mytb.open(vis)
        colnames = mytb.colnames()
        mytb.close()

        for datacol in ["CORRECTED_DATA", "DATA", "junk"]:
            if datacol in colnames:
                break
        if datacol == "junk":
            raise ValueError(vis + " does not have a data column")

        if datacolumn == "float_data":
            raise ValueError("float_data is not yet supported")

        if datacolumn == "corrected" and datacol == "DATA":  # no CORRECTED_DATA case (fall back to DATA)
            casalog.post("No %s column found, using %s column" % (datacolumn.upper() + "_DATA", datacol), "WARN")
            datacolumn = datacol
        else:
            if datacolumn == "corrected":
                datacolumn_name = datacolumn.upper() + "_DATA"
            else:
                datacolumn_name = datacolumn.upper()
            casalog.post("Using %s column to determine visibility scatter" % datacolumn_name)

        if ":" in spw:
            casalog.post("The channel selection part of spw will be ignored.", "WARN")

        if len(correlation) > 0:
            correlation = ""
            casalog.post("Correlation selection in statwt has been disabled as of CASA v4.5", "WARN")

        myms.open(vis, nomodify=False)
        retval = myms.statwt(
            dorms,
            byantenna,
            sepacs,
            fitspw,
            fitcorr,
            combine,
            timebin,
            minsamp,
            field,
            spw,
            antenna,
            timerange,
            scan,
            intent,
            array,
            correlation,
            obs,
            datacolumn,
        )
        myms.close()
    except Exception, e:
        casalog.post("Error setting WEIGHT and SIGMA for %s:" % vis, "SEVERE")
        casalog.post("%s" % e, "SEVERE")
        if False:  # Set True for debugging.
            for p in statwt.func_code.co_varnames[: statwt.func_code.co_argcount]:
                v = eval(p)
                print p, "=", v, ", type =", type(v)
        retval = False
Пример #53
0
    def test4(self):
        '''hanningsmooth - Test 4: Theoretical and calculated values should be the same for MMS-case'''

        # Split the input to decrease the running time
        split(self.msfile,
              outputvis='splithan.ms',
              scan='1,2',
              datacolumn='data')
        self.msfile = 'splithan.ms'

        # create a test MMS. It creates self.testmms
        self.createMMS(self.msfile)
        self.outputms = 'hann4.mms'

        # check correct flagging (just for one row as a sample)
        mslocal = mstool()
        mslocal.open(self.msfile)
        mslocal.sort('sorted.ms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        mslocal.close()
        self.msfile = 'sorted.ms'
        flag_col = th.getVarCol(self.msfile, 'FLAG')
        self.assertTrue(flag_col['r1'][0][0] == [False])
        self.assertTrue(flag_col['r1'][0][1] == [False])
        self.assertTrue(flag_col['r1'][0][61] == [False])
        self.assertTrue(flag_col['r1'][0][62] == [False])

        data_col = th.getVarCol(self.msfile, 'DATA')
        hanningsmooth(vis=self.testmms,
                      outputvis=self.outputms,
                      datacolumn='data',
                      keepmms=True)
        self.assertTrue(ParallelDataHelper.isParallelMS(self.outputms),
                        'Output should be an MMS')

        # Sort the MMS
        mslocal.open(self.outputms)
        mslocal.sort('sorted.mms', [
            'OBSERVATION_ID', 'ARRAY_ID', 'SCAN_NUMBER', 'FIELD_ID',
            'DATA_DESC_ID', 'ANTENNA1', 'ANTENNA2', 'TIME'
        ])
        mslocal.close()
        self.outputms = 'sorted.mms'

        corr_col = th.getVarCol(self.outputms, 'DATA')
        nrows = len(corr_col)

        # check correct flagging (just for one row as a sample)
        flag_col = th.getVarCol(self.outputms, 'FLAG')
        self.assertTrue(flag_col['r1'][0][0] == [True])
        self.assertTrue(flag_col['r1'][0][1] == [False])
        self.assertTrue(flag_col['r1'][0][61] == [False])
        self.assertTrue(flag_col['r1'][0][62] == [True])

        # Loop over every 2nd row,pol and get the data for each channel
        max = 1e-05
        for i in range(1, nrows, 2):
            row = 'r%s' % i
            # polarization is 0-1
            for pol in range(0, 2):
                # array's channels is 0-63
                for chan in range(1, 62):
                    # channels must start from second and end before the last
                    data = data_col[row][pol][chan]
                    dataB = data_col[row][pol][chan - 1]
                    dataA = data_col[row][pol][chan + 1]

                    Smoothed = th.calculateHanning(dataB, data, dataA)
                    CorData = corr_col[row][pol][chan]

                    # Check the difference
                    self.assertTrue(abs(CorData - Smoothed) < max)
Пример #54
0
    def test4(self):
        '''hanningsmooth2 - Test 4: Theoretical and calculated values should be the same for MMS-case'''
	
        # Split the input to decrease the running time
        split2(self.msfile, outputvis='splithan.ms',scan='1,2',datacolumn='data')
        self.msfile = 'splithan.ms'
        
        # create a test MMS. It creates self.testmms
        self.createMMS(self.msfile)
        self.outputms = 'hann4.mms'
        
      # check correct flagging (just for one row as a sample)
        mslocal = mstool()
        mslocal.open(self.msfile)
        mslocal.sort('sorted.ms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        mslocal.close()
        self.msfile = 'sorted.ms'
        flag_col = th.getVarCol(self.msfile, 'FLAG')
        self.assertTrue(flag_col['r1'][0][0] == [False])
        self.assertTrue(flag_col['r1'][0][1] == [False])
        self.assertTrue(flag_col['r1'][0][61] == [False])
        self.assertTrue(flag_col['r1'][0][62] == [False])
        
        data_col = th.getVarCol(self.msfile, 'DATA')        
        hanningsmooth2(vis=self.testmms, outputvis=self.outputms, datacolumn='data', keepmms=True)
        self.assertTrue(ParallelDataHelper.isParallelMS(self.outputms), 'Output should be an MMS')

      # Sort the MMS
        mslocal.open(self.outputms)
        mslocal.sort('sorted.mms',['OBSERVATION_ID','ARRAY_ID','SCAN_NUMBER','FIELD_ID','DATA_DESC_ID','ANTENNA1','ANTENNA2','TIME'])
        mslocal.close()
        self.outputms = 'sorted.mms'
        
        corr_col = th.getVarCol(self.outputms, 'DATA')
        nrows = len(corr_col)

      # check correct flagging (just for one row as a sample)
        flag_col = th.getVarCol(self.outputms, 'FLAG')
        self.assertTrue(flag_col['r1'][0][0] == [True])
        self.assertTrue(flag_col['r1'][0][1] == [False])
        self.assertTrue(flag_col['r1'][0][61] == [False])
        self.assertTrue(flag_col['r1'][0][62] == [True])
        
      # Loop over every 2nd row,pol and get the data for each channel
        max = 1e-05
        for i in range(1,nrows,2) :
            row = 'r%s'%i            
            # polarization is 0-1
            for pol in range(0,2) :
                # array's channels is 0-63
                for chan in range(1,62) :
                    # channels must start from second and end before the last
                    data = data_col[row][pol][chan]
                    dataB = data_col[row][pol][chan-1]
                    dataA = data_col[row][pol][chan+1]
        
                    Smoothed = th.calculateHanning(dataB,data,dataA)
                    CorData = corr_col[row][pol][chan]
                    
                    # Check the difference
                    self.assertTrue(abs(CorData-Smoothed) < max )
Пример #55
0
                         %separationaxis
                          
     try:
         pdh.setupCluster('mstransform')
         pdh.go()
         monolithic_processing = False
     except Exception, instance:
         casalog.post('%s'%instance,'ERROR')
         return False
     
     return True
                 
     
 # Create a local copy of the MSTransform tool
 mtlocal = mttool()
 mslocal = mstool()
     
 try:
                 
     # Gather all the parameters in a dictionary.
     config = {}
     
     if keepflags:
         taqlstr = ''
     else:
         taqlstr = "NOT (FLAG_ROW OR ALL(FLAG))"
     
     # MMS taql selection
     if taql != '' and taql != None:
         if not keepflags:
             taqlstr = taqlstr + " AND "+taql
Пример #56
0
def visstatold(vis=None,
               axis=None,
               datacolumn=None,
               useflags=None,
               spw=None,
               field=None,
               selectdata=None,
               antenna=None,
               uvrange=None,
               timerange=None,
               correlation=None,
               scan=None,
               array=None,
               observation=None):

    casalog.origin('visstatold')

    casalog.post(
        'Use of visstatold is deprecated; please replace calls to visstatold with calls to visstat',
        priority='WARN')

    mslocal = mstool()

    mslocal.open(vis)

    if axis in ['amp', 'amplitude', 'phase', 'imag', 'imaginary', 'real']:
        complex_type = axis
        col = datacolumn
    else:
        complex_type = ''
        col = axis

    if (not selectdata):
        antenna = ''
        uvrange = ''
        timerange = ''
        correlation = ''
        scan = ''
        array = ''
        observation = ''

    s = mslocal.statisticsold(column=col.upper(),
                              complex_value=complex_type,
                              useflags=useflags,
                              spw=spw,
                              field=field,
                              baseline=antenna,
                              uvrange=uvrange,
                              time=timerange,
                              correlation=correlation,
                              scan=scan,
                              array=array,
                              obs=str(observation))

    mslocal.close()

    for stats in s.keys():
        casalog.post(stats + " values --- ", "NORMAL")

        if s[stats]['npts'] > 0:
            casalog.post(
                "         -- number of points [npts]:           " +
                str(int(round(s[stats]['npts']))), "NORMAL")
            casalog.post(
                "         -- minimum value [min]:               " +
                str(s[stats]['min']), "NORMAL")
            casalog.post(
                "         -- maximum value [max]:               " +
                str(s[stats]['max']), "NORMAL")
            casalog.post(
                "         -- Sum of values [sum]:               " +
                str(s[stats]['sum']), "NORMAL")
            casalog.post(
                "         -- Sum of squared values [sumsq]:     " +
                str(s[stats]['sumsq']), "NORMAL")

        casalog.post(stats + " statistics --- ", "NORMAL")
        if s[stats]['npts'] > 0:
            casalog.post(
                "        -- Mean of the values [mean]:                 " +
                str(s[stats]['mean']), "NORMAL")
            casalog.post(
                "        -- Variance of the values [var]:              " +
                str(s[stats]['var']), "NORMAL")
            casalog.post(
                "        -- Standard deviation of the values [stddev]: " +
                str(s[stats]['stddev']), "NORMAL")
            casalog.post(
                "        -- Root mean square [rms]:                    " +
                str(s[stats]['rms']), "NORMAL")
            casalog.post(
                "        -- Median of the pixel values [median]:       " +
                str(s[stats]['median']), "NORMAL")
            casalog.post(
                "        -- Median of the deviations [medabsdevmed]:   " +
                str(s[stats]['medabsdevmed']), "NORMAL")
            casalog.post(
                "        -- Quartile [quartile]:                       " +
                str(s[stats]['quartile']), "NORMAL")
        else:
            casalog.post(stats + " -- No valid points found", "WARN")

    return s