Example #1
0
    def test_readAndParseTbuff(self):
        '''flaghelper: compare the read and parse and apply tbuff'''
        print ''
        
        # MJD in seconds of timeranges are these
        # <startTime>4891227930515540000 <endTime>4891227932453838000
        # <startTime>4891228473545856000 <endTime>4891228473731891000
        # <startTime>4891226924455911000 <endTime>4891226927502314000
        # <startTime>4891228838164987000 <endTime>4891228838418996000
        # <startTime>4891228609440808000 <endTime>4891228612489617000

        online = ["antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'",
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'",
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'",
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'",
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"]

        myinput = "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'\n"\
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'\n"\
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'\n"\
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'\n"\
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"
        
        filename1 = 'flaghelperonline2.txt'
        create_input(myinput, filename1)
        
        # First timerange from online before padding
        origt = timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'
        
        # Apply tbuff to timeranges
        timebuffer = 1.1
        dlist1 = fh.readAndParse([filename1], tbuff=timebuffer)
        self.assertEqual(len(dlist1), 5)
        
        # Get the first padded timerange from output
        padt = dlist1[0]['timerange']
        
        # Revert the tbuff application manually
        t0,t1 = padt.split('~',1)
        startTime = qa.totime(t0)['value']
        startTimeSec = float((startTime * 24 * 3600) + timebuffer)
        startTimeSec = qa.quantity(startTimeSec, 's')
        paddedT0 = qa.time(startTimeSec,form='ymd',prec=9)[0]
        # end time
        endTime = qa.totime(t1)['value']
        endTimeSec = float((endTime * 24 * 3600) - timebuffer)
        endTimeSec = qa.quantity(endTimeSec, 's')
        paddedT1 = qa.time(endTimeSec,form='ymd',prec=9)[0]
        
        newtimerange =  paddedT0+'~'+paddedT1
        
        # Compare with the original
        self.assertEqual(origt, newtimerange)
        
        # Compare with original values from Flag.xml
        xmlt0 = float(4891227930515540000) * 1.0E-9
        xmlt1 = float(4891227932453838000) * 1.0E-9
        
        self.assertAlmostEqual(xmlt0, startTimeSec['value'], places=3)
        self.assertAlmostEqual(xmlt1, endTimeSec['value'], places=3)
Example #2
0
    def test_readAndParse(self):
        '''flaghelper: compare the read and parse from a file and from a list of strings'''
        print ''

        # <startTime>4891227930515540000 <endTime>4891227932453838000
        # <startTime>4891228473545856000 <endTime>4891228473731891000
        # <startTime>4891226924455911000 <endTime>4891226927502314000
        # <startTime>4891228838164987000 <endTime>4891228838418996000
        # <startTime>4891228609440808000 <endTime>4891228612489617000

        online = [
            "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'",
            "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'",
            "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'",
            "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'",
            "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"
        ]

        myinput = "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'\n"\
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'\n"\
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'\n"\
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'\n"\
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"

        filename1 = 'flaghelperonline1.txt'
        create_input(myinput, filename1)

        dlist1 = fh.readAndParse([filename1])
        self.assertEqual(len(dlist1), 5)

        # Use the list instead of the file
        dlist2 = fh.readAndParse(online)

        self.assertListEqual(dlist1, dlist2)

        # Compare with the original Flag.xml, second row
        orig_time_start = float(4891228473545856000) * 1.0E-9
        orig_time_end = float(4891228473731891000) * 1.0E-9

        proc_time = dlist2[1]['timerange']
        t0, t1 = proc_time.split('~', 1)
        startTime = qa.totime(t0)['value']
        startTimeSec = float(startTime * 24 * 3600)
        endTime = qa.totime(t1)['value']
        endTimeSec = float(endTime * 24 * 3600)

        self.assertAlmostEqual(orig_time_start, startTimeSec, places=3)
        self.assertAlmostEqual(orig_time_end, endTimeSec, places=3)
Example #3
0
    def test_readAndParse(self):
        '''flaghelper: compare the read and parse from a file and from a list of strings'''
        print ''
        
        # <startTime>4891227930515540000 <endTime>4891227932453838000
        # <startTime>4891228473545856000 <endTime>4891228473731891000
        # <startTime>4891226924455911000 <endTime>4891226927502314000
        # <startTime>4891228838164987000 <endTime>4891228838418996000
        # <startTime>4891228609440808000 <endTime>4891228612489617000

        online = ["antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'",
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'",
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'",
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'",
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"]

        myinput = "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'\n"\
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'\n"\
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'\n"\
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'\n"\
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"
        
        filename1 = 'flaghelperonline1.txt'
        create_input(myinput, filename1)
        
        dlist1 = fh.readAndParse([filename1])
        self.assertEqual(len(dlist1), 5)
        
        # Use the list instead of the file
        dlist2 = fh.readAndParse(online)
        
        self.assertListEqual(dlist1, dlist2)
        
        # Compare with the original Flag.xml, second row
        orig_time_start = float(4891228473545856000) * 1.0E-9
        orig_time_end = float(4891228473731891000) * 1.0E-9

        proc_time = dlist2[1]['timerange']
        t0,t1 = proc_time.split('~',1)
        startTime = qa.totime(t0)['value']
        startTimeSec = float(startTime * 24 * 3600)
        endTime = qa.totime(t1)['value']
        endTimeSec = float(endTime * 24 * 3600)

        self.assertAlmostEqual(orig_time_start, startTimeSec, places=3)
        self.assertAlmostEqual(orig_time_end, endTimeSec, places=3) 
Example #4
0
def datestrs_to_MJDs(cdsdict):
    """
    All of the date strings must have the same reference frame (i.e. UT).
    """
    datestrlist = cdsdict["data"]

    # Convert to FITS format, otherwise qa.totime() will silently drop the hours.
    datestrlist = [d.replace(" ", "T") for d in datestrlist]

    timeq = {}
    # Do first conversion to get unit.
    firsttime = qa.totime(datestrlist[0])
    timeq["unit"] = firsttime["unit"]
    timeq["value"] = [firsttime["value"]]

    for datestr in datestrlist[1:]:
        timeq["value"].append(qa.totime(datestr)["value"])

    return {"unit": timeq["unit"], "value": scipy.array(timeq["value"])}
Example #5
0
def datestrs_to_MJDs(cdsdict):
    """
    All of the date strings must have the same reference frame (i.e. UT).
    """
    datestrlist = cdsdict['data']

    # Convert to FITS format, otherwise qa.totime() will silently drop the hours.
    datestrlist = [d.replace(' ', 'T') for d in datestrlist]

    timeq = {}
    # Do first conversion to get unit.
    firsttime = qa.totime(datestrlist[0])
    timeq['unit'] = firsttime['unit']
    timeq['value'] = [firsttime['value']]

    for datestr in datestrlist[1:]:
        timeq['value'].append(qa.totime(datestr)['value'])

    return {'unit': timeq['unit'], 'value': scipy.array(timeq['value'])}
def datestrs_to_MJDs(cdsdict):
    """
    All of the date strings must have the same reference frame (i.e. UT).
    """
    datestrlist = cdsdict['data']

    # Convert to FITS format, otherwise qa.totime() will silently drop the hours.
    datestrlist = [d.replace(' ', 'T') for d in datestrlist]
    
    timeq = {}
    # Do first conversion to get unit.
    firsttime = qa.totime(datestrlist[0])
    timeq['unit'] = firsttime['unit']
    timeq['value'] = [firsttime['value']]
    
    for datestr in datestrlist[1:]:
        timeq['value'].append(qa.totime(datestr)['value'])

    return {'unit': timeq['unit'],
            'value': scipy.array(timeq['value'])}
Example #7
0
    def test_readAndParseIrregularTbuff(self):
        '''flaghelper: compare the read and parse and apply of irregular tbuff'''
        print ''
        
        # MJD in seconds of timeranges are these
        # <startTime>4891227930515540000 <endTime>4891227932453838000
        # <startTime>4891228473545856000 <endTime>4891228473731891000
        # <startTime>4891226924455911000 <endTime>4891226927502314000
        # <startTime>4891228838164987000 <endTime>4891228838418996000
        # <startTime>4891228609440808000 <endTime>4891228612489617000

        online = ["antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'",
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'",
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'",
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'",
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"]

        myinput = "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'\n"\
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'\n"\
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'\n"\
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'\n"\
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"
        
        filename1 = 'flaghelperonline2.txt'
        create_input(myinput, filename1)
        
        # timeranges from online before padding, for comparison later
        timeranges=[]
        for cmd in online:
            a,b = cmd.split(' ')
            b = b.lstrip('timerange=')
            timeranges.append(b.strip("'"))
                    
        # Apply 2 values of tbuff to timeranges
        timebuffer = [0.4, 0.7]
        dlist1 = fh.readAndParse([filename1], tbuff=timebuffer)
        self.assertEqual(len(dlist1), 5)
        
        # check the padded time ranges before and after the application
        n = 0
        for cmd in dlist1:
            padt = cmd['timerange']
            
#        padt = dlist1[0]['timerange']
        
            # Revert the tbuff application manually
            t0,t1 = padt.split('~',1)
            startTime = qa.totime(t0)['value']
            startTimeSec = float((startTime * 24 * 3600) + timebuffer[0])
            startTimeSec = qa.quantity(startTimeSec, 's')
            paddedT0 = qa.time(startTimeSec,form='ymd',prec=9)[0]
            # end time
            endTime = qa.totime(t1)['value']
            endTimeSec = float((endTime * 24 * 3600) - timebuffer[1])
            endTimeSec = qa.quantity(endTimeSec, 's')
            paddedT1 = qa.time(endTimeSec,form='ymd',prec=9)[0]
            
            newtimerange =  paddedT0+'~'+paddedT1
            
            # Compare with the original
            self.assertEqual(timeranges[n], newtimerange)
            n += 1
Example #8
0
def readJPLephem(fmfile, version=''):
    """
    Reads a JPL Horizons text file (see
    http://ssd.jpl.nasa.gov/horizons.cgi#top ) for a solar system object and
    returns various quantities in a dictionary.  The dict will be blank ({}) if
    there is a failure.
    """
    retdict = {}
    casalog.origin('readJPLephem')

    # Try opening fmfile now, because otherwise there's no point continuing.
    try:
        ephem = open(fmfile, 'rb')
        print("opened the file=", fmfile)
        lines = ephem.readlines()
        # skip this, handle by rstrip later
        #crCount=0
        #newlines=''
        #newln=''
        #for ln in lines:
        #  n = ln.count('\r')
        #  if n > 0:
        #    newln=ln.replace('\r','\n')
        #    crCount+=n
        #    newlines += newln
        #if crCount > 0:
        #  print("The input file appears to contains the carriage return code, \'^M\', will replace it with \'\\n\'...")
        #  raw_input('pause0')
        #  ephem.close()
        #  ephem = open('temp_ephem_data.txt','w+')
        #  ephem.write(newlines)
        ephem.seek(0)
    except IOError:
        casalog.post("Could not open ephemeris file " + fmfile,
                     priority="SEVERE")
        return {}

    # reset to default search pattern for MJD
    cols['MJD']['pat'] = r'(?P<MJD>\d+-\w+-\d+ \d+:\d+)'
    # Setup the regexps.

    # Headers (one time only things)

    # Dictionary of quantity label: regexp pattern pairs that will be searched
    # for once.  The matching quantity will go in retdict[label].  Only a
    # single quantity (group) will be retrieved per line.
    headers = {
        'NAME': {
            'pat': r'^[>\s]*Target body name:\s+\d*\s*(\w+)'
        },  # object name, w.o. number
        'ephtype': {
            'pat': r'\?s_type=1#top>\]\s*:\s+\*(\w+)'
        },  # e.g. OBSERVER
        'obsloc': {
            'pat': r'^[>\s]*Center-site name:\s+(\w+)'
        },  # e.g. GEOCENTRIC
        # Catch either an explicit mean radius or a solitary target radius.
        'meanrad': {
            'pat':
            r'(?:Mean radius \(km\)\s*=|^Target radii\s*:)\s*([0-9.]+)(?:\s*km)?\s*$',
            'unit': 'km'
        },
        # Triaxial target radii
        #'radii': {'pat': r'Target radii\s*:\s*([0-9.]+\s*x\s*[0-9.]+\s*x\s*[0-9.]+)\s*km.*Equator, meridian, pole',
        'radii': {
            'pat':
            r'Target radii\s*:\s*([0-9.]+\s*x\s*[0-9.]+\s*x\s*[0-9.]+)\s*km.*Equator, meridian, pole|Target radii\s*:\s*([0-9.]+)\s*km\s*',
            'unit': 'km'
        },
        'T_mean': {
            'pat': r'Mean Temperature \(K\)\s*=\s*([0-9.]+)',
            'unit': 'K'
        },

        # Figure out the units later.
        'rot_per': {
            'pat':
            r'(?i)(?<!Inferred )\b(rot(ation(al)?|\.)?\s*per.*=\s*([-0-9.]+\s*[dhr]*|Synchronous))'
        },
        'orb_per': {
            'pat':
            r'Orbital period((, days)?\s*=\s*[-0-9.]+\s*[dhr](\s*\(?R\)?)?)'
        },

        # MeasComet does not read units for these! E-lon(deg),  Lat(deg),     Alt(km)
        'GeoLong': {
            'pat':
            r'^[>\s]*Center geodetic\s*: ([-+0-9.]+,\s*[-+0-9.]+,\s*[-+0-9.]+)'
        },
        'dMJD': {
            'pat': r'^[>\s]*Step-size\s*:\s*(.+)'
        },

        #                     request method v  wday mth   mday  hh  mm  ss   yyyy
        'VS_CREATE': {
            'pat':
            r'^[>\s]*Ephemeris / \w+ \w+ (\w+\s+\d+\s+\d+:\d+:\d+\s+\d+)'
        }
    }
    for hk in headers:
        headers[hk]['pat'] = re.compile(headers[hk]['pat'])

    # Data ("the rows of the table")

    # need date, r (heliocentric distance), delta (geocentric distance), and phang (phase angle).
    # (Could use the "dot" time derivatives for Doppler shifting, but it's
    # likely unnecessary.)
    #datapat = r'^[>\s]*'
    datapat = r'^\s*'

    stoppat = r'[>\s]*\$\$EOE$'  # Signifies the end of data.

    # Read fmfile into retdict.
    num_cols = 0
    in_data = False
    comp_mismatches = []
    print_datapat = False
    # define interpretation of invalid values ('n.a.')
    invalid = -999.
    for origline in ephem:
        line = origline.rstrip('\r\n')
        if in_data:
            if re.match(stoppat, line):
                break
            matchobj = re.search(datapat, line)
            if matchobj:
                #print("matchobj!")
                gdict = matchobj.groupdict()
                #print("gdict=",gdict)
                for col in gdict:
                    if gdict[col] == 'n.a.':
                        gdict[col] = invalid
                #    print("cols.key=",cols.keys())

                    if not cols[col].get('unwanted'):
                        retdict['data'][col]['data'].append(gdict[col])
                if len(gdict) < num_cols:
                    print("Partially mismatching line:")
                    print(line)
                    print("Found:")
                    print(gdict)
                    print_datapat = True
                    raw_input("pause0")
            else:
                print_datapat = True
                # Chomp trailing whitespace.
                comp_mismatches.append(re.sub(r'\s*$', '', line))
        elif re.match(
                r'^[>\s]*' + cols['MJD']['header'] + r'\s+' +
                cols['RA']['header'], line):
            # need to modify regex to search for the header name for MJD containing second digits
            if re.match(r'^[>\s]*Date__\(UT\)__HR:MN:SC.fff', line):
                cols['MJD']['pat'] = r'(?P<MJD>\d+-\w+-\d+ \d+:\d+:\d+.\d+)'
            # See what columns are present, and finish setting up datapat and
            # retdict.
            havecols = []
            # extract coordinate ref info

            m = re.match(r'(^[>\s]*)(\S+)(\s+)(' + cols['RA']['header'] + ')',
                         line)
            coordref = m.group(4).split('(')[-1]
            cols['RA']['comment'] += '(' + coordref + ')'
            cols['DEC']['comment'] += '(' + coordref + ')'
            #print("cols['RA']['comment']=",  cols['RA']['comment'])
            # Chomp trailing whitespace.
            myline = re.sub(r'\s*$', '', line)
            titleline = myline
            remaining_cols = cols.keys()
            found_col = True
            # This loop will terminate one way or another.
            while myline and remaining_cols and found_col:
                found_col = False
                #print("myline = '%s'" % myline)
                #print("remaining_cols =", ', '.join(remaining_cols))
                for col in remaining_cols:
                    if re.match(r'^[>\s]*' + cols[col]['header'], myline):
                        #print("Found", col)
                        havecols.append(col)
                        remaining_cols.remove(col)
                        myline = re.sub(r'^[>\s]*' + cols[col]['header'], '',
                                        myline)
                        found_col = True
                        break
            datapat += r'\s+'.join([cols[col]['pat'] for col in havecols])
            sdatapat = datapat
            casalog.post("Found columns: " + ', '.join(havecols))
            datapat = re.compile(datapat)
            retdict['data'] = {}
            for col in havecols:
                if not cols[col].get('unwanted'):
                    retdict['data'][col] = {
                        'comment': cols[col]['comment'],
                        'data': []
                    }
            num_cols = len(retdict['data'])
        #elif re.match(r'^\$\$SOE\s*$', line):  # Start of ephemeris
        elif re.match(r'^[>\s]*\$\$SOE\s*$', line):  # Start of ephemeris
            casalog.post("Starting to read data.", priority='INFO2')
            in_data = True
        else:
            #print("line =", line)
            #print("looking for",)
            for hk in headers:
                #print("hk=",hk)

                if not retdict.has_key(hk):
                    matchobj = re.search(headers[hk]['pat'], line)
                    if matchobj:
                        if hk == 'radii':
                            mobjs = matchobj.groups()
                            for gp in mobjs:
                                if gp != None:
                                    retdict[hk] = gp
                                    break
                            break
                        else:
                            retdict[hk] = matchobj.group(
                                1)  # 0 is the whole line
                            break
    ephem.close()
    # clean up the temp file if exists
    #if os.path.exists('temp_ephem_data.txt'):
    #  os.remove('temp_ephem_data.txt')

    # If there were errors, provide debugging info.
    if comp_mismatches:
        print("Completely mismatching lines:")
        #print("\n".join(comp_mismatches))
    if print_datapat:
        print("The apparent title line is:")
        print(titleline)
        print("datapat = r'%s'" % sdatapat)

    # Convert numerical strings into actual numbers.
    try:
        retdict['earliest'] = datestr_to_epoch(
            retdict['data']['MJD']['data'][0])
        retdict['latest'] = datestr_to_epoch(
            retdict['data']['MJD']['data'][-1])
    except Exception as e:
        print("Error!")
        if retdict.has_key('data'):
            if retdict['data'].has_key('MJD'):
                if retdict['data']['MJD'].has_key('data'):
                    #print("retdict['data']['MJD']['data'] =", retdict['data']['MJD']['data'])
                    print("retdict['data'] =", retdict['data'])
                else:
                    print("retdict['data']['MJD'] has no 'data' key.")
                    print("retdict['data']['MJD'].keys() =",
                          retdict['data']['MJD'].keys())
            else:
                print("retdict['data'] has no 'MJD' key.")
                print("retdict['data'].keys() =", retdict['data'].keys())
        else:
            print("retdict has no 'data' key.")
        raise e

    for hk in headers:
        if retdict.has_key(hk):
            if headers[hk].has_key('unit'):
                if hk == 'radii':
                    radii = retdict[hk].split('x')
                    if len(radii) == 1:
                        a = float(radii[0])
                        retdict[hk] = {
                            'unit': headers[hk]['unit'],
                            'value': (a, a, a)
                        }
                        retdict['meanrad'] = {
                            'unit': headers[hk]['unit'],
                            'value': a
                        }
                    else:
                        a, b, c = [float(r) for r in radii]
                        retdict[hk] = {
                            'unit': headers[hk]['unit'],
                            'value': (a, b, c)
                        }
                        retdict['meanrad'] = {
                            'unit': headers[hk]['unit'],
                            'value': mean_radius(a, b, c)
                        }
                else:
                    try:
                        # meanrad might already have been converted.
                        if type(retdict[hk]) != dict:
                            retdict[hk] = {
                                'unit': headers[hk]['unit'],
                                'value': float(retdict[hk])
                            }
                    except Exception as e:
                        print("Error converting header", hk, "to a Quantity.")
                        print("retdict[hk] =", retdict[hk])
                        raise e
            elif hk == 'GeoLong':
                long_lat_alt = retdict[hk].split(',')
                retdict['GeoLong'] = float(long_lat_alt[0])
                retdict['GeoLat'] = float(long_lat_alt[1])
                retdict['GeoDist'] = float(long_lat_alt[2])
            elif hk == 'dMJD':
                retdict[hk] = qa.convert(
                    qa.totime(retdict[hk].replace('minutes', 'min')),
                    'd')['value']
            elif hk == 'orb_per':
                unit = 'h'
                retrograde = False
                if 'd' in retdict[hk].lower():
                    unit = 'd'  # Actually this is most common.
                if 'r' in retdict[hk].lower():
                    retrograde = True
                value = get_num_from_str(retdict[hk], 'orbital period')
                if value != False:
                    if retrograde and value > 0.0:
                        value = -value
                    retdict[hk] = {'unit': unit, 'value': value}
                else:
                    del retdict[hk]

    # The rotation period might depend on the orbital period ("Synchronous"),
    # so handle it after all the other headers have been done.
    if 'rot_per' in retdict:
        rpstr = retdict['rot_per']
        if 'ROTPER' in rpstr:  # Asteroid
            retdict['rot_per'] = {
                'unit': 'h',  # Always seems to be for asteroids.
                'value': get_num_from_str(rpstr, 'rotation period')
            }
        elif 'Synchronous' in rpstr:
            retdict['rot_per'] = retdict['orb_per']
        else:  # Most likely a planet.
            match = re.search(r'(\d+)h\s*(\d+)m\s*([0-9.]+)s', rpstr)
            if match:
                hms = [float(match.group(i)) for i in range(1, 4)]
                retdict['rot_per'] = {
                    'unit': 'h',
                    'value': hms[0] + (hms[1] + hms[2] / 60.0) / 60.0
                }
            else:
                # DON'T include the optional r in hr!  qa.totime can't handle it.
                try:
                    match = re.search(r'([-0-9.]+)(?:\s*\+-[0-9.]+)?\s*([dh])',
                                      rpstr)
                    if match:
                        retdict['rot_per'] = {
                            'unit': match.group(2),
                            'value': float(match.group(1))
                        }
                except:
                    print("Error parsing the rotation period from")
                    print(rpstr)

    if retdict['data'].has_key('ang_sep'):
        retdict['data']['obs_code'] = {'comment': 'Obscuration code'}
    for dk in retdict['data']:
        if dk == 'obs_code':
            continue
        if cols[dk].has_key('unit'):
            retdict['data'][dk]['data'] = {
                'unit':
                cols[dk]['unit'],
                'value':
                scipy.array([float(s) for s in retdict['data'][dk]['data']])
            }
            if dk == 'RadVel':
                # Convert from km/s to AU/d.  Blame MeasComet, not me.
                retdict['data'][dk]['data']['unit'] = 'AU/d'
                kmps_to_AUpd = qa.convert('1km/s', 'AU/d')['value']
                retdict['data'][dk]['data']['value'] *= kmps_to_AUpd

        if re.match(r'.*(RA|DEC)$', dk):
            retdict['data'][dk] = convert_radec(retdict['data'][dk])
        elif dk == 'MJD':
            retdict['data']['MJD'] = datestrs_to_MJDs(retdict['data']['MJD'])
        elif dk == 'ang_sep':
            angseps = []
            obscodes = []
            for asoc in retdict['data'][dk]['data']:
                angsep, obscode = asoc.split('/')
                angseps.append(float(angsep))
                obscodes.append(obscode)
            retdict['data'][dk]['data'] = {
                'unit': 'arcseconds',
                'value': angseps
            }
            retdict['data']['obs_code']['data'] = obscodes

    if len(retdict.get('radii', {'value': []})['value']) == 3 \
           and retdict['data'].has_key('NP_RA') and retdict['data'].has_key('NP_DEC'):
        # Do a better mean radius estimate using the actual theta.
        retdict['meanrad']['value'] = mean_radius_with_known_theta(retdict)

    # To be eventually usable as a MeasComet table, a few more keywords are needed.
    retdict['VS_TYPE'] = 'Table of comet/planetary positions'
    if version == '':
        version = '0003.0001'
    #retdict['VS_VERSION'] = '0003.0001'
    retdict['VS_VERSION'] = version
    if retdict.has_key('VS_CREATE'):
        dt = time.strptime(retdict['VS_CREATE'], "%b %d %H:%M:%S %Y")
    else:
        casalog.post(
            "The ephemeris creation date was not found.  Using the current time.",
            priority="WARN")
        dt = time.gmtime()
    retdict['VS_CREATE'] = time.strftime('%Y/%m/%d/%H:%M', dt)

    # VS_DATE is required by MeasComet, but it doesn't seem to be actually used.
    retdict['VS_DATE'] = time.strftime('%Y/%m/%d/%H:%M', time.gmtime())

    if retdict['data'].has_key('MJD'):
        #casalog.post("retdict.keys=%s" % retdict.keys())
        retdict['MJD0'] = retdict['data']['MJD']['value'][0] - retdict['dMJD']
    else:
        print(
            "The table will not be usable with me.framecomet because it lacks MJD."
        )

    # adding posrefsys keyword
    if cols['RA']['comment'].count('J2000'):
        retdict['posrefsys'] = 'ICRF/J2000.0'
    if cols['RA']['comment'].count('B1950'):
        retdict['posrefsys'] = 'FK4/B1950.0'

    return retdict
Example #9
0
def subvs(vis=None,
          outputvis=None,
          timerange=None,
          spw=None,
          subtime1=None,
          subtime2=None,
          splitsel=True,
          reverse=False,
          overwrite=False):
    """Perform vector subtraction for visibilities
    Keyword arguments:
    vis -- Name of input visibility file (MS)
            default: none; example: vis='ngc5921.ms'
    outputvis -- Name of output uv-subtracted visibility file (MS)
                  default: none; example: outputvis='ngc5921_src.ms'
    timerange -- Time range of performing the UV subtraction:
                 default='' means all times.  examples:
                 timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                 timerange = 'hh:mm:ss~hh:mm:ss'
    spw -- Select spectral window/channel.
           default = '' all the spectral channels. Example: spw='0:1~20'
    subtime1 -- Time range 1 of the background to be subtracted from the data 
                 default='' means all times.  format:
                 timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                 timerange = 'hh:mm:ss~hh:mm:ss'
    subtime2 -- Time range 2 of the backgroud to be subtracted from the data
                 default='' means all times.  examples:
                 timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                 timerange = 'hh:mm:ss~hh:mm:ss'
    splitsel -- True or False. default = False. If splitsel = False, then the entire input
            measurement set is copied as the output measurement set (outputvis), with 
            background subtracted at selected timerange and spectral channels. 
            If splitsel = True,then only the selected timerange and spectral channels 
            are copied into the output measurement set (outputvis).
    reverse -- True or False. default = False. If reverse = False, then the times indicated
            by subtime1 and/or subtime2 are treated as background and subtracted; If reverse
            = True, then reverse the sign of the background-subtracted data. The option can 
            be used for mapping absorptive structure.
    overwrite -- True or False. default = False. If overwrite = True and
                outputvis already exists, the selected subtime and spw in the 
                output measurment set will be replaced with background subtracted 
                visibilities

    """
    #check the visbility ms
    if not outputvis or outputvis.isspace():
        raise ValueError, 'Please specify outputvis'

    if os.path.exists(outputvis):
        if overwrite:
            print "The already existing output measurement set will be updated."
        else:
            raise ValueError, "Output MS %s already exists - will not overwrite." % outputvis
    else:
        if not splitsel:
            shutil.copytree(vis, outputvis)
        else:
            ms.open(vis, nomodify=True)
            ms.split(outputvis, spw=spw, time=timerange, whichcol='DATA')
            ms.close()

    #define and check the time ranges
    if subtime1 and (type(subtime1) == str):
        [bsubtime1, esubtime1] = subtime1.split('~')
        bsubtime1sec = qa.getvalue(qa.convert(qa.totime(bsubtime1), 's'))
        esubtime1sec = qa.getvalue(qa.convert(qa.totime(esubtime1), 's'))
        timebin1sec = esubtime1sec - bsubtime1sec
        if timebin1sec < 0:
            raise Exception, 'Negative timebin! Please check the "subtime1" parameter.'
        casalog.post('Selected timerange 1: ' + subtime1 +
                     ' as background for uv subtraction.')
    else:
        raise Exception, 'Please enter at least one timerange as the background'

    if subtime2 and (type(subtime2) == str):
        [bsubtime2, esubtime2] = subtime2.split('~')
        bsubtime2sec = qa.getvalue(qa.convert(qa.totime(bsubtime2), 's'))
        esubtime2sec = qa.getvalue(qa.convert(qa.totime(esubtime2), 's'))
        timebin2sec = esubtime2sec - bsubtime2sec
        if timebin2sec < 0:
            raise Exception, 'Negative timebin! Please check the "subtime2" parameter.'
        timebin2 = str(timebin2sec) + 's'
        casalog.post('Selected timerange 2: ' + subtime2 +
                     ' as background for uv subtraction.')
        #plus 1s is to ensure averaging over the entire timerange
    else:
        casalog.post(
            'Timerange 2 not selected, using only timerange 1 as background')

    if timerange and (type(timerange) == str):
        [btimeo, etimeo] = timerange.split('~')
        btimeosec = qa.getvalue(qa.convert(qa.totime(btimeo), 's'))
        etimeosec = qa.getvalue(qa.convert(qa.totime(etimeo), 's'))
        timebinosec = etimeosec - btimeosec
        if timebinosec < 0:
            raise Exception, 'Negative timebin! Please check the "timerange" parameter.'
        casalog.post('Selected timerange: ' + timerange +
                     ' as the time for UV subtraction.')
    else:
        casalog.post(
            'Output timerange not specified, using the entire timerange')

    if spw and (type(spw) == str):
        [spwid, chanran] = spw.split(':')
        [bchan, echan] = chanran.split('~')
    else:
        casalog.post('spw not specified, use all frequency channels')

    #Select the background indicated by subtime1
    ms.open(vis, nomodify=True)
    #Select the spw id
    ms.msselect({'time': subtime1})
    if spw and (type(spw) == str):
        ms.selectinit(datadescid=int(spwid))
        nchan = int(echan) - int(bchan) + 1
        ms.selectchannel(nchan, int(bchan), 1, 1)
    rec1 = ms.getdata(['data', 'time', 'axis_info'], ifraxis=True)
    #print 'shape of the frequency matrix ',rec1['axis_info']['freq_axis']['chan_freq'].shape
    sz1 = rec1['data'].shape
    print 'dimension of selected background 1', rec1['data'].shape
    #the data shape is (n_pol,n_channel,n_baseline,n_time), no need to reshape
    #rec1['data']=rec1['data'].reshape(sz1[0],sz1[1],sz1[2],nspw,sz1[3]/nspw,order='F')
    #print 'reshaped rec1 ', rec1['data'].shape
    rec1avg = np.average(rec1['data'], axis=3)
    casalog.post('Averaging the visibilities in subtime1: ' + subtime1)
    ms.close()
    if subtime2 and (type(subtime2) == str):
        ms.open(vis, nomodify=True)
        #Select the spw id
        ms.msselect({'time': subtime2})
        if spw and (type(spw) == str):
            ms.selectinit(datadescid=0)
            nchan = int(echan) - int(bchan) + 1
            ms.selectchannel(nchan, int(bchan), 1, 1)
        rec2 = ms.getdata(['data', 'time', 'axis_info'], ifraxis=True)
        sz2 = rec2['data'].shape
        print 'dimension of selected background 2', rec2['data'].shape
        #rec2['data']=rec2['data'].reshape(sz2[0],sz2[1],sz2[2],nspw,sz2[3]/nspw,order='F')
        #print 'reshaped rec1 ', rec2['data'].shape
        rec2avg = np.average(rec2['data'], axis=3)
        ms.close()
        casalog.post('Averaged the visibilities in subtime2: ' + subtime2)

    #do UV subtraction, according to timerange and spw
    ms.open(outputvis, nomodify=False)
    if not splitsel:
        #outputvis is identical to input visibility, do the selection
        if timerange and (type(timerange == str)):
            ms.msselect({'time': timerange})
        if spw and (type(spw) == str):
            ms.selectinit(datadescid=int(spwid))
            nchan = int(echan) - int(bchan) + 1
            ms.selectchannel(nchan, int(bchan), 1, 1)
    else:
        #outputvis is splitted, selections have already applied, select all the data
        ms.selectinit(datadescid=0)
    orec = ms.getdata(['data', 'time', 'axis_info'], ifraxis=True)
    b_rows = orec['data'].shape[2]
    nchan = orec['data'].shape[1]
    #szo=orec['data'].shape
    print 'dimension of output data', orec['data'].shape
    #orec['data']=orec['data'].reshape(szo[0],szo[1],szo[2],nspw,szo[3]/nspw,order='F')
    #print 'reshaped rec1 ', orec['data'].shape
    t_rows = orec['data'].shape[3]
    casalog.post('Number of baselines: ' + str(b_rows))
    casalog.post('Number of spectral channels: ' + str(nchan))
    casalog.post('Number of time pixels: ' + str(t_rows))

    if subtime1 and (not subtime2):
        casalog.post(
            'Only "subtime1" is defined, subtracting background defined in subtime1: '
            + subtime1)
        t1 = (np.amax(rec1['time']) + np.amin(rec1['time'])) / 2.
        print 't1: ', qa.time(qa.quantity(t1, 's'), form='ymd', prec=10)
        for i in range(t_rows):
            orec['data'][:, :, :, i] -= rec1avg
            if reverse:
                orec['data'][:, :, :, i] = -orec['data'][:, :, :, i]
    if subtime1 and subtime2 and (type(subtime2) == str):
        casalog.post(
            'Both subtime1 and subtime2 are specified, doing linear interpolation between "subtime1" and "subtime2"'
        )
        t1 = (np.amax(rec1['time']) + np.amin(rec1['time'])) / 2.
        t2 = (np.amax(rec2['time']) + np.amin(rec2['time'])) / 2.
        touts = orec['time']
        print 't1: ', qa.time(qa.quantity(t1, 's'), form='ymd', prec=10)
        print 't2: ', qa.time(qa.quantity(t2, 's'), form='ymd', prec=10)
        for i in range(t_rows):
            tout = touts[i]
            if tout > np.amax([t1, t2]):
                tout = np.amax([t1, t2])
            elif tout < np.amin([t1, t2]):
                tout = np.amin([t1, t2])
            orec['data'][:, :, :,
                         i] -= (rec2avg - rec1avg) * (tout -
                                                      t1) / (t2 - t1) + rec1avg
            if reverse:
                orec['data'][:, :, :, i] = -orec['data'][:, :, :, i]

    #orec['data']=orec['data'].reshape(szo[0],szo[1],szo[2],szo[3],order='F')
    #put the modified data back into the output visibility set
    del orec['time']
    del orec['axis_info']
    ms.putdata(orec)
    ms.close()
Example #10
0
    def test_readAndParseTbuff(self):
        '''flaghelper: compare the read and parse and apply tbuff'''
        print ''

        # MJD in seconds of timeranges are these
        # <startTime>4891227930515540000 <endTime>4891227932453838000
        # <startTime>4891228473545856000 <endTime>4891228473731891000
        # <startTime>4891226924455911000 <endTime>4891226927502314000
        # <startTime>4891228838164987000 <endTime>4891228838418996000
        # <startTime>4891228609440808000 <endTime>4891228612489617000

        online = [
            "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'",
            "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'",
            "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'",
            "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'",
            "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"
        ]

        myinput = "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'\n"\
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'\n"\
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'\n"\
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'\n"\
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"

        filename1 = 'flaghelperonline2.txt'
        create_input(myinput, filename1)

        # First timerange from online before padding
        origt = timerange = '2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'

        # Apply tbuff to timeranges
        timebuffer = 1.1
        dlist1 = fh.readAndParse([filename1], tbuff=timebuffer)
        self.assertEqual(len(dlist1), 5)

        # Get the first padded timerange from output
        padt = dlist1[0]['timerange']

        # Revert the tbuff application manually
        t0, t1 = padt.split('~', 1)
        startTime = qa.totime(t0)['value']
        startTimeSec = float((startTime * 24 * 3600) + timebuffer)
        startTimeSec = qa.quantity(startTimeSec, 's')
        paddedT0 = qa.time(startTimeSec, form='ymd', prec=9)[0]
        # end time
        endTime = qa.totime(t1)['value']
        endTimeSec = float((endTime * 24 * 3600) - timebuffer)
        endTimeSec = qa.quantity(endTimeSec, 's')
        paddedT1 = qa.time(endTimeSec, form='ymd', prec=9)[0]

        newtimerange = paddedT0 + '~' + paddedT1

        # Compare with the original
        self.assertEqual(origt, newtimerange)

        # Compare with original values from Flag.xml
        xmlt0 = float(4891227930515540000) * 1.0E-9
        xmlt1 = float(4891227932453838000) * 1.0E-9

        self.assertAlmostEqual(xmlt0, startTimeSec['value'], places=3)
        self.assertAlmostEqual(xmlt1, endTimeSec['value'], places=3)
Example #11
0
    def test_readAndParseIrregularTbuff(self):
        '''flaghelper: compare the read and parse and apply of irregular tbuff'''
        print ''

        # MJD in seconds of timeranges are these
        # <startTime>4891227930515540000 <endTime>4891227932453838000
        # <startTime>4891228473545856000 <endTime>4891228473731891000
        # <startTime>4891226924455911000 <endTime>4891226927502314000
        # <startTime>4891228838164987000 <endTime>4891228838418996000
        # <startTime>4891228609440808000 <endTime>4891228612489617000

        online = [
            "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'",
            "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'",
            "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'",
            "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'",
            "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"
        ]

        myinput = "antenna='DV03&&*' timerange='2013/11/15/10:25:30.516~2013/11/15/10:25:32.454'\n"\
                  "antenna='DA44&&*' timerange='2013/11/15/10:34:33.546~2013/11/15/10:34:33.732'\n"\
                  "antenna='DA46&&*' timerange='2013/11/15/10:08:44.456~2013/11/15/10:08:47.502'\n"\
                  "antenna='DV09&&*' timerange='2013/11/15/10:18:11.798~2013/11/15/10:18:13.837'\n"\
                  "antenna='DV05&&*' timerange='2013/11/15/10:40:38.165~2013/11/15/10:40:38.419'"

        filename1 = 'flaghelperonline2.txt'
        create_input(myinput, filename1)

        # timeranges from online before padding, for comparison later
        timeranges = []
        for cmd in online:
            a, b = cmd.split(' ')
            b = b.lstrip('timerange=')
            timeranges.append(b.strip("'"))

        # Apply 2 values of tbuff to timeranges
        timebuffer = [0.4, 0.7]
        dlist1 = fh.readAndParse([filename1], tbuff=timebuffer)
        self.assertEqual(len(dlist1), 5)

        # check the padded time ranges before and after the application
        n = 0
        for cmd in dlist1:
            padt = cmd['timerange']

            #        padt = dlist1[0]['timerange']

            # Revert the tbuff application manually
            t0, t1 = padt.split('~', 1)
            startTime = qa.totime(t0)['value']
            startTimeSec = float((startTime * 24 * 3600) + timebuffer[0])
            startTimeSec = qa.quantity(startTimeSec, 's')
            paddedT0 = qa.time(startTimeSec, form='ymd', prec=9)[0]
            # end time
            endTime = qa.totime(t1)['value']
            endTimeSec = float((endTime * 24 * 3600) - timebuffer[1])
            endTimeSec = qa.quantity(endTimeSec, 's')
            paddedT1 = qa.time(endTimeSec, form='ymd', prec=9)[0]

            newtimerange = paddedT0 + '~' + paddedT1

            # Compare with the original
            self.assertEqual(timeranges[n], newtimerange)
            n += 1
Example #12
0
                else:
                    try:
                        # meanrad might already have been converted.
                        if type(retdict[hk]) != dict:
                            retdict[hk] = {"unit": headers[hk]["unit"], "value": float(retdict[hk])}
                    except Exception, e:
                        print "Error converting header", hk, "to a Quantity."
                        print "retdict[hk] =", retdict[hk]
                        raise e
            elif hk == "GeoLong":
                long_lat_alt = retdict[hk].split(",")
                retdict["GeoLong"] = float(long_lat_alt[0])
                retdict["GeoLat"] = float(long_lat_alt[1])
                retdict["GeoDist"] = float(long_lat_alt[2])
            elif hk == "dMJD":
                retdict[hk] = qa.convert(qa.totime(retdict[hk].replace("minutes", "min")), "d")["value"]
            elif hk == "orb_per":
                unit = "h"
                retrograde = False
                if "d" in retdict[hk].lower():
                    unit = "d"  # Actually this is most common.
                if "r" in retdict[hk].lower():
                    retrograde = True
                value = get_num_from_str(retdict[hk], "orbital period")
                if value != False:
                    if retrograde and value > 0.0:
                        value = -value
                    retdict[hk] = {"unit": unit, "value": value}
                else:
                    del retdict[hk]
Example #13
0
def imreg(vis=None,
          ephem=None,
          msinfo=None,
          imagefile=None,
          timerange=None,
          reftime=None,
          fitsfile=None,
          beamfile=None,
          offsetfile=None,
          toTb=None,
          scl100=None,
          verbose=False,
          p_ang=False,
          overwrite=True,
          usephacenter=True,
          deletehistory=False):
    ''' 
    main routine to register CASA images
           Required Inputs:
               vis: STRING. CASA measurement set from which the image is derived
               imagefile: STRING or LIST. name of the input CASA image
               timerange: STRING or LIST. timerange used to generate the CASA image, must have the same length as the input images. 
                          Each element should be in CASA standard time format, e.g., '2012/03/03/12:00:00~2012/03/03/13:00:00'
           Optional Inputs:
               msinfo: DICTIONARY. CASA MS information, output from read_msinfo. If not provided, generate one from the supplied vis
               ephem: DICTIONARY. solar ephem, output from read_horizons. 
                      If not provided, query JPL Horizons based on time info of the vis (internet connection required)
               fitsfile: STRING or LIST. name of the output registered fits files
               reftime: STRING or LIST. Each element should be in CASA standard time format, e.g., '2012/03/03/12:00:00'
               offsetfile: optionally provide an offset with a series of solar x and y offsets with timestamps 
               toTb: Bool. Convert the default Jy/beam to brightness temperature?
               scl100: Bool. If True, scale the image values up by 100 (to compensate VLA 20 dB attenuator)
               verbose: Bool. Show more diagnostic info if True.
               usephacenter: Bool -- if True, correct for the RA and DEC in the ms file based on solar empheris.
                                     Otherwise assume the phasecenter is correctly pointed to the solar disk center
                                     (EOVSA case)
    '''
    ia = iatool()

    if deletehistory:
        msclearhistory(vis)
    if verbose:
        import time
        t0 = time.time()
        prtidx = 1
        print('point {}: {}'.format(prtidx, time.time() - t0))
        prtidx += 1

    if not imagefile:
        raise ValueError, 'Please specify input image'
    if not timerange:
        raise ValueError, 'Please specify timerange of the input image'
    if type(imagefile) == str:
        imagefile = [imagefile]
    if type(timerange) == str:
        timerange = [timerange]
    if not fitsfile:
        fitsfile = [img + '.fits' for img in imagefile]
    if type(fitsfile) == str:
        fitsfile = [fitsfile]
    nimg = len(imagefile)
    if len(timerange) != nimg:
        raise ValueError, 'Number of input images does not equal to number of timeranges!'
    if len(fitsfile) != nimg:
        raise ValueError, 'Number of input images does not equal to number of output fits files!'
    nimg = len(imagefile)
    if verbose:
        print str(nimg) + ' images to process...'

    if verbose:
        print('point {}: {}'.format(prtidx, time.time() - t0))
        prtidx += 1

    if reftime:  # use as reference time to find solar disk RA and DEC to register the image, but not the actual timerange associated with the image
        if type(reftime) == str:
            reftime = [reftime] * nimg
        if len(reftime) != nimg:
            raise ValueError, 'Number of reference times does not match that of input images!'
        helio = ephem_to_helio(vis,
                               ephem=ephem,
                               msinfo=msinfo,
                               reftime=reftime,
                               usephacenter=usephacenter)
    else:
        # use the supplied timerange to register the image
        helio = ephem_to_helio(vis,
                               ephem=ephem,
                               msinfo=msinfo,
                               reftime=timerange,
                               usephacenter=usephacenter)

    if verbose:
        print('point {}: {}'.format(prtidx, time.time() - t0))
        prtidx += 1

    for n, img in enumerate(imagefile):
        if verbose:
            print 'processing image #' + str(n)
        fitsf = fitsfile[n]
        timeran = timerange[n]
        # obtain duration of the image as FITS header exptime
        try:
            [tbg0, tend0] = timeran.split('~')
            tbg_d = qa.getvalue(qa.convert(qa.totime(tbg0), 'd'))[0]
            tend_d = qa.getvalue(qa.convert(qa.totime(tend0), 'd'))[0]
            tdur_s = (tend_d - tbg_d) * 3600. * 24.
            dateobs = qa.time(qa.quantity(tbg_d, 'd'), form='fits', prec=10)[0]
        except:
            print 'Error in converting the input timerange: ' + str(
                timeran) + '. Proceeding to the next image...'
            continue

        if verbose:
            print('point {}: {}'.format(prtidx, time.time() - t0))
            prtidx += 1

        hel = helio[n]
        if not os.path.exists(img):
            raise ValueError, 'Please specify input image'
        if os.path.exists(fitsf) and not overwrite:
            raise ValueError, 'Specified fits file already exists and overwrite is set to False. Aborting...'
        else:
            p0 = hel['p0']
            ia.open(img)
            imr = ia.rotate(pa=str(-p0) + 'deg')
            imr.tofits(fitsf, history=False, overwrite=overwrite)
            imr.close()
            imsum = ia.summary()
            ia.close()

        if verbose:
            print('point {}: {}'.format(prtidx, time.time() - t0))
            prtidx += 1

        # construct the standard fits header
        # RA and DEC of the reference pixel crpix1 and crpix2
        (imra, imdec) = (imsum['refval'][0], imsum['refval'][1])
        # find out the difference of the image center to the CASA phase center
        # RA and DEC difference in arcseconds
        ddec = degrees((imdec - hel['dec_fld'])) * 3600.
        dra = degrees((imra - hel['ra_fld']) * cos(hel['dec_fld'])) * 3600.
        # Convert into image heliocentric offsets
        prad = -radians(hel['p0'])
        dx = (-dra) * cos(prad) - ddec * sin(prad)
        dy = (-dra) * sin(prad) + ddec * cos(prad)
        if offsetfile:
            try:
                offset = np.load(offsetfile)
            except:
                raise ValueError, 'The specified offsetfile does not exist!'
            reftimes_d = offset['reftimes_d']
            xoffs = offset['xoffs']
            yoffs = offset['yoffs']
            timg_d = hel['reftime']
            ind = bisect.bisect_left(reftimes_d, timg_d)
            xoff = xoffs[ind - 1]
            yoff = yoffs[ind - 1]
        else:
            xoff = hel['refx']
            yoff = hel['refy']
        if verbose:
            print 'offset of image phase center to visibility phase center (arcsec): ', dx, dy
            print 'offset of visibility phase center to solar disk center (arcsec): ', xoff, yoff
        (crval1, crval2) = (xoff + dx, yoff + dy)
        # update the fits header to heliocentric coordinates

        if verbose:
            print('point {}: {}'.format(prtidx, time.time() - t0))
            prtidx += 1

        hdu = pyfits.open(fitsf, mode='update')

        if verbose:
            print('point {}: {}'.format(prtidx, time.time() - t0))
            prtidx += 1

        header = hdu[0].header
        (cdelt1,
         cdelt2) = (-header['cdelt1'] * 3600., header['cdelt2'] * 3600.
                    )  # Original CDELT1, 2 are for RA and DEC in degrees
        header['cdelt1'] = cdelt1
        header['cdelt2'] = cdelt2
        header['cunit1'] = 'arcsec'
        header['cunit2'] = 'arcsec'
        header['crval1'] = crval1
        header['crval2'] = crval2
        header['ctype1'] = 'HPLN-TAN'
        header['ctype2'] = 'HPLT-TAN'
        header['date-obs'] = dateobs  # begin time of the image
        if not p_ang:
            hel['p0'] = 0
        try:
            # this works for pyfits version of CASA 4.7.0 but not CASA 4.6.0
            if tdur_s:
                header.set('exptime', tdur_s)
            else:
                header.set('exptime', 1.)
            header.set('p_angle', hel['p0'])
            header.set('dsun_obs',
                       sun.sunearth_distance(Time(dateobs)).to(u.meter).value)
            header.set(
                'rsun_obs',
                sun.solar_semidiameter_angular_size(Time(dateobs)).value)
            header.set('rsun_ref', sun.constants.radius.value)
            header.set('hgln_obs', 0.)
            header.set('hglt_obs',
                       sun.heliographic_solar_center(Time(dateobs))[1].value)
        except:
            # this works for astropy.io.fits
            if tdur_s:
                header.append(('exptime', tdur_s))
            else:
                header.append(('exptime', 1.))
            header.append(('p_angle', hel['p0']))
            header.append(
                ('dsun_obs',
                 sun.sunearth_distance(Time(dateobs)).to(u.meter).value))
            header.append(
                ('rsun_obs',
                 sun.solar_semidiameter_angular_size(Time(dateobs)).value))
            header.append(('rsun_ref', sun.constants.radius.value))
            header.append(('hgln_obs', 0.))
            header.append(
                ('hglt_obs',
                 sun.heliographic_solar_center(Time(dateobs))[1].value))

        if verbose:
            print('point {}: {}'.format(prtidx, time.time() - t0))
            prtidx += 1

        # update intensity units, i.e. to brightness temperature?
        if toTb:
            # get restoring beam info
            (bmajs, bmins, bpas, beamunits,
             bpaunits) = getbeam(imagefile=imagefile, beamfile=beamfile)
            bmaj = bmajs[n]
            bmin = bmins[n]
            beamunit = beamunits[n]
            data = hdu[
                0].data  # remember the data order is reversed due to the FITS convension
            dim = data.ndim
            sz = data.shape
            keys = header.keys()
            values = header.values()
            # which axis is frequency?
            faxis = keys[values.index('FREQ')][-1]
            faxis_ind = dim - int(faxis)
            if header['BUNIT'].lower() == 'jy/beam':
                header['BUNIT'] = 'K'
                header['BTYPE'] = 'Brightness Temperature'
                for i in range(sz[faxis_ind]):
                    nu = header['CRVAL' + faxis] + header['CDELT' + faxis] * (
                        i + 1 - header['CRPIX' + faxis])
                    if header['CUNIT' + faxis] == 'KHz':
                        nu *= 1e3
                    if header['CUNIT' + faxis] == 'MHz':
                        nu *= 1e6
                    if header['CUNIT' + faxis] == 'GHz':
                        nu *= 1e9
                    if len(bmaj) > 1:  # multiple (per-plane) beams
                        bmajtmp = bmaj[i]
                        bmintmp = bmin[i]
                    else:  # one single beam
                        bmajtmp = bmaj[0]
                        bmintmp = bmin[0]
                    if beamunit == 'arcsec':
                        bmaj0 = np.radians(bmajtmp / 3600.)
                        bmin0 = np.radians(bmajtmp / 3600.)
                    if beamunit == 'arcmin':
                        bmaj0 = np.radians(bmajtmp / 60.)
                        bmin0 = np.radians(bmintmp / 60.)
                    if beamunit == 'deg':
                        bmaj0 = np.radians(bmajtmp)
                        bmin0 = np.radians(bmintmp)
                    if beamunit == 'rad':
                        bmaj0 = bmajtmp
                        bmin0 = bmintmp
                    beam_area = bmaj0 * bmin0 * np.pi / (4. * log(2.))
                    k_b = qa.constants('k')['value']
                    c_l = qa.constants('c')['value']
                    factor = 2. * k_b * nu**2 / c_l**2  # SI unit
                    jy_to_si = 1e-26
                    # print nu/1e9, beam_area, factor
                    factor2 = 1.
                    if scl100:
                        factor2 = 100.
                    if faxis == '3':
                        data[:,
                             i, :, :] *= jy_to_si / beam_area / factor * factor2
                    if faxis == '4':
                        data[
                            i, :, :, :] *= jy_to_si / beam_area / factor * factor2

        if verbose:
            print('point {}: {}'.format(prtidx, time.time() - t0))
            prtidx += 1

        hdu.flush()
        hdu.close()

        if verbose:
            print('point {}: {}'.format(prtidx, time.time() - t0))
            prtidx += 1
Example #14
0
def datestr_to_epoch(datestr):
    """
    Given a UT date like "2010-May-01 00:00", returns an epoch measure.
    """
    return me.epoch(rf='UTC', v0=qa.totime(datestr))
Example #15
0
def subvs(vis=None,
          outputvis=None,
          timerange=None,
          spw=None,
          timoffset=4,
          windowlen=10,
          windowtype='flat',
          splitsel=True,
          reverse=False,
          overwrite=False):
    """Vector-subtraction in UV using selected time ranges and spectral channels as background
    subvs is a function to do UV vector-subtraction. By selecting gliding averaging window,
    only the low-frequency signals corresponding to the background continuum emission remain.
    As a result, a uv subtraction of original dynamic spectrum from the background can improve
    fine stucture such as fibers. Subvs can be used to subtract the background
    continuum emission to separate the time-dependent emission, e.g. solar 
    coherent radio bursts.  
    
        Keyword arguments:
        vis -- Name of input visibility file
                default: none; example: vis='sun_type3.ms'
        outputvis -- Name of output visibility file
                default: none; example: outputvis='sun_type3.sub.ms'
    timerange -- Select the time range in the data to be subtracted from.
               timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
               Note: if YYYY/MM/DD is missing date, timerange defaults to the
               first day in the dataset
               timerange='09:14:0~09:54:0' picks 40 min on first day
               timerange='25:00:00~27:30:00' picks 1 hr to 3 hr 30min
               on next day
    spw -- Select spectral window/channel.
            default = '' all the spectral channels. Example: spw='0:1~20'
    timoffset -- After the convolution, each single channel of smoothed uv vectors 
                in time series are misaligned from original data. Setting the 
                timoffset allow you to shift by the specifed amount of data points. 
    windowlen -- Choose the width the gliding window for smoothing.             
    windowtype -- Choose the type o gliding window. Options available are  'flat', 
                'hanning', 'hamming', 'bartlett', 'blackman'
    splitsel -- True of False. default = False. If splitsel = False, then the entire input 
           measurement set is copied as the output measurement set (outputvis), with 
           background subtracted at selected timerange and spectral channels. 
           If splitsel = True,then only the selected timerange and spectral channels 
           are copied into the output measurement set (outputvis).
    reverse -- True or False. default = False. If reverse = False, then the times indicated
            by subtime1 and/or subtime2 are treated as background and subtracted; If reverse
            = True, then reverse the sign of the background-subtracted data. The option can 
            be used for mapping absorptive structure.
    overwrite -- True or False. default = False. If overwrite = True and outputvis
           already exists, the selected subtime and spw in the already existing 
           output measurement set will be replaced with subtracted visibilities

        """
    #check the visbility ms
    if not outputvis or outputvis.isspace():
        raise ValueError, 'Please specify outputvis.'

    if os.path.exists(outputvis):
        if overwrite:
            print "The already existing output measurement set will be updated."
        else:
            raise ValueError, "Output MS %s already exists - will not overwrite." % outputvis
    else:
        if not splitsel:
            shutil.copytree(vis, outputvis)
        else:
            ms.open(vis, nomodify=True)
            ms.split(outputvis, spw=spw, time=timerange, whichcol='DATA')
            ms.close()
    # check and specify time range and channel
    if timerange and (type(timerange) == str):
        [btimeo, etimeo] = timerange.split('~')
        btimeosec = qa.getvalue(qa.convert(qa.totime(btimeo), 's'))
        etimeosec = qa.getvalue(qa.convert(qa.totime(etimeo), 's'))
        timebinosec = etimeosec - btimeosec
        if timebinosec < 0:
            raise Exception, 'Negative timebin! Please check the "timerange" parameter.'
        casalog.post('Selected timerange: ' + timerange +
                     ' as the time for UV subtraction.')
    else:
        casalog.post(
            'Output timerange not specified, using the entire timerange')

    if spw and (type(spw) == str):
        [spwid, chanran] = spw.split(':')
        [bchan, echan] = chanran.split('~')
        nchan = int(echan) - int(bchan) + 1
    else:
        casalog.post('spw not specified, use all frequency channels')

    #select data range to be smoothed
    ms.open(vis, nomodify=True)
    #Select the spw id
    ms.msselect({'time': timerange})
    ms.selectinit(datadescid=int(spwid))
    ms.selectchannel(nchan, int(bchan), 1, 1)
    rec = ms.getdata(['data', 'time', 'axis_info'], ifraxis=True)
    #print 'shape of the frequency matrix ',rec1['axis_info']['freq_axis']['chan_freq'].shape
    sz = rec['data'].shape
    print 'dimension of selected background for smoothing', rec['data'].shape
    #the data shape is (n_pol,n_channel,n_baseline,n_time), no need to reshape
    #rec1['data']=rec1['data'].reshape(sz1[0],sz1[1],sz1[2],nspw,sz1[3]/nspw,order='F')
    #print 'reshaped rec1 ', rec1['data'].shape
    if not (timoffset and (type(timoffset) == int)):
        timoffset = int(4)

    for i in range(rec['data'].shape[0]):
        for j in range(rec['data'].shape[1]):
            for k in range(rec['data'].shape[2]):
                rec['data'][i, j,
                            k, :] = task_smooth.smooth(rec['data'][i, j, k, :],
                                                       timoffset, windowlen,
                                                       windowtype)

    casalog.post('Smoothing the visibilities in timerange: ' + timerange)
    ms.close()

    #do UV subtraction, according to timerange and spw
    ms.open(outputvis, nomodify=False)
    if not splitsel:
        #outputvis is identical to input visibility, do the selection
        if timerange and (type(timerange == str)):
            ms.msselect({'time': timerange})
        if spw and (type(spw) == str):
            ms.selectinit(datadescid=int(spwid))
            nchan = int(echan) - int(bchan) + 1
            ms.selectchannel(nchan, int(bchan), 1, 1)
    else:
        #outputvis is splitted, selections have already applied, select all the data
        ms.selectinit(datadescid=0)
    orec = ms.getdata(['data', 'time', 'axis_info'], ifraxis=True)
    b_rows = orec['data'].shape[2]
    nchan = orec['data'].shape[1]
    #szo=orec['data'].shape
    print 'dimension of output data', orec['data'].shape
    #orec['data']=orec['data'].reshape(szo[0],szo[1],szo[2],nspw,szo[3]/nspw,order='F')
    #print 'reshaped rec1 ', orec['data'].shape
    t_rows = orec['data'].shape[3]
    casalog.post('Number of baselines: ' + str(b_rows))
    casalog.post('Number of spectral channels: ' + str(nchan))
    casalog.post('Number of time pixels: ' + str(t_rows))

    casalog.post('Subtracting background defined in timerange: ' + timerange)

    for i in range(t_rows):
        orec['data'][:, :, :, i] -= rec['data'][:, :, :, i]
        if reverse:
            orec['data'][:, :, :, i] = -orec['data'][:, :, :, i]
    del orec['time']
    del orec['axis_info']
    ms.putdata(orec)
    ms.close()
def datestr_to_epoch(datestr):
    """
    Given a UT date like "2010-May-01 00:00", returns an epoch measure.
    """
    return me.epoch(rf='UTC', v0=qa.totime(datestr))
         try:
             # meanrad might already have been converted.
             if type(retdict[hk]) != dict:
                 retdict[hk] = {'unit': headers[hk]['unit'],
                                'value': float(retdict[hk])}
         except Exception, e:
             print "Error converting header", hk, "to a Quantity."
             print "retdict[hk] =", retdict[hk]
             raise e
 elif hk == 'GeoLong':
     long_lat_alt = retdict[hk].split(',')
     retdict['GeoLong'] = float(long_lat_alt[0])
     retdict['GeoLat']  = float(long_lat_alt[1])
     retdict['GeoDist'] = float(long_lat_alt[2])
 elif hk == 'dMJD':
     retdict[hk] = qa.convert(qa.totime(retdict[hk].replace('minutes', 'min')),
                              'd')['value']
 elif hk == 'orb_per':
     unit = 'h'
     retrograde = False
     if 'd' in retdict[hk].lower():
         unit = 'd'                 # Actually this is most common.
     if 'r' in retdict[hk].lower():
         retrograde = True
     value = get_num_from_str(retdict[hk], 'orbital period')
     if value != False:
         if retrograde and value > 0.0:
             value = -value
         retdict[hk] = {'unit': unit, 'value': value}
     else:
         del retdict[hk]
Example #18
0
def subvs2(vis=None,
           outputvis=None,
           timerange='',
           spw='',
           mode=None,
           subtime1=None,
           subtime2=None,
           smoothaxis=None,
           smoothtype=None,
           smoothwidth=None,
           splitsel=None,
           reverse=None,
           overwrite=None):
    """Perform vector subtraction for visibilities
    Keyword arguments:
    vis -- Name of input visibility file (MS)
            default: none; example: vis='ngc5921.ms'
    outputvis -- Name of output uv-subtracted visibility file (MS)
                  default: none; example: outputvis='ngc5921_src.ms'
    timerange -- Time range of performing the UV subtraction:
                 default='' means all times.  examples:
                 timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                 timerange = 'hh:mm:ss~hh:mm:ss'
    spw -- Select spectral window/channel.
           default = '' all the spectral channels. Example: spw='0:1~20'
    mode -- operation mode
            default 'linear' 
                mode = 'linear': use a linear fit for the background to be subtracted
                mode = 'lowpass': act as a lowpass filter---smooth the data using different
                        smooth types and window sizes. Can be performed along either time
                        or frequency axis
                mode = 'highpass': act as a highpass filter---smooth the data first, and 
                        subtract the smoothed data from the original. Can be performed along
                        either time or frequency axis
            mode = 'linear' expandable parameters:
                subtime1 -- Time range 1 of the background to be subtracted from the data 
                             default='' means all times.  format:
                             timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                             timerange = 'hh:mm:ss~hh:mm:ss'
                subtime2 -- Time range 2 of the backgroud to be subtracted from the data
                             default='' means all times.  examples:
                             timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
                             timerange = 'hh:mm:ss~hh:mm:ss'
            mode = 'lowpass' or 'highpass' expandable parameters:
                smoothaxis -- axis of smooth
                    Default: 'time'
                    smoothaxis = 'time': smooth is along the time axis
                    smoothaxis = 'freq': smooth is along the frequency axis
                smoothtype -- type of the smooth depending on the convolving kernel
                    Default: 'flat'
                    smoothtype = 'flat': convolving kernel is a flat rectangle,
                            equivalent to a boxcar moving smooth
                    smoothtype = 'hanning': Hanning smooth kernel. See numpy.hanning
                    smoothtype = 'hamming': Hamming smooth kernel. See numpy.hamming
                    smoothtype = 'bartlett': Bartlett smooth kernel. See numpy.bartlett
                    smoothtype = 'blackman': Blackman smooth kernel. See numpy.blackman
                smoothwidth -- width of the smooth kernel
                    Default: 5
                    Examples: smoothwidth=5, meaning the width is 5 pixels
    splitsel -- True or False. default = False. If splitsel = False, then the entire input
            measurement set is copied as the output measurement set (outputvis), with 
            background subtracted at selected timerange and spectral channels. 
            If splitsel = True,then only the selected timerange and spectral channels 
            are copied into the output measurement set (outputvis).
    reverse -- True or False. default = False. If reverse = False, then the times indicated
            by subtime1 and/or subtime2 are treated as background and subtracted; If reverse
            = True, then reverse the sign of the background-subtracted data. The option can 
            be used for mapping absorptive structure.
    overwrite -- True or False. default = False. If overwrite = True and
                outputvis already exists, the selected subtime and spw in the 
                output measurment set will be replaced with background subtracted 
                visibilities

    """
    # check the visbility ms
    casalog.post('input parameters:')
    casalog.post('vis: ' + vis)
    casalog.post('outputvis: ' + outputvis)
    casalog.post('smoothaxis: ' + smoothaxis)
    casalog.post('smoothtype: ' + smoothtype)
    casalog.post('smoothwidth: ' + str(smoothwidth))
    if not outputvis or outputvis.isspace():
        raise (ValueError, 'Please specify outputvis')

    if os.path.exists(outputvis):
        if overwrite:
            print(
                "The already existing output measurement set will be updated.")
        else:
            raise (ValueError,
                   "Output MS %s already exists - will not overwrite." %
                   outputvis)
    else:
        if not splitsel:
            shutil.copytree(vis, outputvis)
        else:
            ms.open(vis, nomodify=True)
            ms.split(outputvis, spw=spw, time=timerange, whichcol='DATA')
            ms.close()

    if timerange and (type(timerange) == str):
        [btimeo, etimeo] = timerange.split('~')
        btimeosec = qa.getvalue(qa.convert(qa.totime(btimeo), 's'))
        etimeosec = qa.getvalue(qa.convert(qa.totime(etimeo), 's'))
        timebinosec = etimeosec - btimeosec
        if timebinosec < 0:
            raise Exception(
                'Negative timebin! Please check the "timerange" parameter.')
        casalog.post('Selected timerange: ' + timerange +
                     ' as the time for UV subtraction.')
    else:
        casalog.post(
            'Output timerange not specified, using the entire timerange')

    if spw and (type(spw) == str):
        spwlist = spw.split(';')
    else:
        casalog.post('spw not specified, use all frequency channels')

    # read the output data
    datams = mstool()
    datams.open(outputvis, nomodify=False)
    datamsmd = msmdtool()
    datamsmd.open(outputvis)
    spwinfod = datams.getspectralwindowinfo()
    spwinfok = spwinfod.keys()
    spwinfok.sort(key=int)
    spwinfol = [spwinfod[k] for k in spwinfok]
    for s, spi in enumerate(spwinfol):
        print('processing spectral window {}'.format(spi['SpectralWindowId']))
        datams.selectinit(reset=True)
        staql = {'time': '', 'spw': ''}
        if not splitsel:
            # outputvis is identical to input visibility, do the selection
            if timerange and (type(timerange == str)):
                staql['time'] = timerange
            if spw and (type(spw) == str):
                staql['spw'] = spwlist[s]
            if not spw and not timerange:
                # data selection is not made
                print('selecting all spws and times')
                staql['spw'] = str(spi['SpectralWindowId'])
        else:
            # outputvis is splitted, selections have already applied, select all the data
            print('split the selected spws and times')
            staql['spw'] = str(spi['SpectralWindowId'])
        datams.msselect(staql)
        orec = datams.getdata(['data', 'time', 'axis_info'], ifraxis=True)
        npol, nchan, nbl, ntim = orec['data'].shape
        print('dimension of output data', orec['data'].shape)
        casalog.post('Number of baselines: ' + str(nbl))
        casalog.post('Number of spectral channels: ' + str(nchan))
        casalog.post('Number of time pixels: ' + str(ntim))

        try:
            if mode == 'linear':
                # define and check the background time ranges
                if subtime1 and (type(subtime1) == str):
                    [bsubtime1, esubtime1] = subtime1.split('~')
                    bsubtime1sec = qa.getvalue(
                        qa.convert(qa.totime(bsubtime1), 's'))
                    esubtime1sec = qa.getvalue(
                        qa.convert(qa.totime(esubtime1), 's'))
                    timebin1sec = esubtime1sec - bsubtime1sec
                    if timebin1sec < 0:
                        raise Exception(
                            'Negative timebin! Please check the "subtime1" parameter.'
                        )
                    casalog.post('Selected timerange 1: ' + subtime1 +
                                 ' as background for uv subtraction.')
                else:
                    raise Exception(
                        'Please enter at least one timerange as the background'
                    )
                if subtime2 and (type(subtime2) == str):
                    [bsubtime2, esubtime2] = subtime2.split('~')
                    bsubtime2sec = qa.getvalue(
                        qa.convert(qa.totime(bsubtime2), 's'))
                    esubtime2sec = qa.getvalue(
                        qa.convert(qa.totime(esubtime2), 's'))
                    timebin2sec = esubtime2sec - bsubtime2sec
                    if timebin2sec < 0:
                        raise Exception(
                            'Negative timebin! Please check the "subtime2" parameter.'
                        )
                    timebin2 = str(timebin2sec) + 's'
                    casalog.post('Selected timerange 2: ' + subtime2 +
                                 ' as background for uv subtraction.')
                    # plus 1s is to ensure averaging over the entire timerange
                else:
                    casalog.post(
                        'Timerange 2 not selected, using only timerange 1 as background'
                    )

                # Select the background indicated by subtime1
                ms.open(vis, nomodify=True)
                # Select the spw id
                # ms.msselect({'time': subtime1})
                staql0 = {'time': subtime1, 'spw': ''}
                if spw and (type(spw) == str):
                    staql0['spw'] = spwlist[s]
                else:
                    staql0['spw'] = staql['spw']
                ms.msselect(staql0)
                rec1 = ms.getdata(['data', 'time', 'axis_info'], ifraxis=True)
                # print('shape of the frequency matrix ',rec1['axis_info']['freq_axis']['chan_freq'].shape)
                sz1 = rec1['data'].shape
                print('dimension of selected background 1', rec1['data'].shape)
                # the data shape is (n_pol,n_channel,n_baseline,n_time), no need to reshape
                # rec1['data']=rec1['data'].reshape(sz1[0],sz1[1],sz1[2],nspw,sz1[3]/nspw,order='F')
                # print('reshaped rec1 ', rec1['data'].shape)
                rec1avg = np.average(rec1['data'], axis=3)
                casalog.post('Averaging the visibilities in subtime1: ' +
                             subtime1)
                ms.close()
                if subtime2 and (type(subtime2) == str):
                    ms.open(vis, nomodify=True)
                    # Select the spw id
                    staql0 = {'time': subtime2, 'spw': ''}
                    if spw and (type(spw) == str):
                        staql0['spw'] = spwlist[s]
                    else:
                        staql0['spw'] = staql['spw']
                    ms.msselect(staql0)
                    rec2 = ms.getdata(['data', 'time', 'axis_info'],
                                      ifraxis=True)
                    sz2 = rec2['data'].shape
                    print('dimension of selected background 2',
                          rec2['data'].shape)
                    # rec2['data']=rec2['data'].reshape(sz2[0],sz2[1],sz2[2],nspw,sz2[3]/nspw,order='F')
                    # print('reshaped rec1 ', rec2['data'].shape)
                    rec2avg = np.average(rec2['data'], axis=3)
                    ms.close()
                    casalog.post('Averaged the visibilities in subtime2: ' +
                                 subtime2)
                if subtime1 and (not subtime2):
                    casalog.post(
                        'Only "subtime1" is defined, subtracting background defined in subtime1: '
                        + subtime1)
                    t1 = (np.amax(rec1['time']) + np.amin(rec1['time'])) / 2.
                    print('t1: ',
                          qa.time(qa.quantity(t1, 's'), form='ymd', prec=10))
                    for i in range(ntim):
                        orec['data'][:, :, :, i] -= rec1avg
                        if reverse:
                            orec['data'][:, :, :,
                                         i] = -orec['data'][:, :, :, i]
                if subtime1 and subtime2 and (type(subtime2) == str):
                    casalog.post(
                        'Both subtime1 and subtime2 are specified, doing linear interpolation between "subtime1" and "subtime2"'
                    )
                    t1 = (np.amax(rec1['time']) + np.amin(rec1['time'])) / 2.
                    t2 = (np.amax(rec2['time']) + np.amin(rec2['time'])) / 2.
                    touts = orec['time']
                    print('t1: ',
                          qa.time(qa.quantity(t1, 's'), form='ymd', prec=10))
                    print('t2: ',
                          qa.time(qa.quantity(t2, 's'), form='ymd', prec=10))
                    for i in range(ntim):
                        tout = touts[i]
                        if tout > np.amax([t1, t2]):
                            tout = np.amax([t1, t2])
                        elif tout < np.amin([t1, t2]):
                            tout = np.amin([t1, t2])
                        orec['data'][:, :, :, i] -= (rec2avg - rec1avg) * (
                            tout - t1) / (t2 - t1) + rec1avg
                        if reverse:
                            orec['data'][:, :, :,
                                         i] = -orec['data'][:, :, :, i]
            elif mode == 'highpass':
                if smoothtype != 'flat' and smoothtype != 'hanning' and smoothtype != 'hamming' and smoothtype != 'bartlett' and smoothtype != 'blackman':
                    raise Exception('Unknown smoothtype ' + str(smoothtype))
                if smoothaxis == 'time':
                    if smoothwidth <= 0 or smoothwidth >= ntim:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nchan):
                                for k in range(nbl):
                                    orec['data'][i, j,
                                                 k, :] -= signalsmooth.smooth(
                                                     orec['data'][i, j, k, :],
                                                     smoothwidth, smoothtype)
                if smoothaxis == 'freq':
                    if smoothwidth <= 0 or smoothwidth >= nchan:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nbl):
                                for k in range(ntim):
                                    orec['data'][i, :, j,
                                                 k] -= signalsmooth.smooth(
                                                     orec['data'][i, :, j, k],
                                                     smoothwidth, smoothtype)
            elif mode == 'lowpass':
                if smoothtype != 'flat' and smoothtype != 'hanning' and smoothtype != 'hamming' and smoothtype != 'bartlett' and smoothtype != 'blackman':
                    raise Exception('Unknown smoothtype ' + str(smoothtype))
                if smoothaxis == 'time':
                    if smoothwidth <= 0 or smoothwidth >= ntim:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nchan):
                                for k in range(nbl):
                                    orec['data'][i, j,
                                                 k, :] = signalsmooth.smooth(
                                                     orec['data'][i, j, k, :],
                                                     smoothwidth, smoothtype)
                if smoothaxis == 'freq':
                    if smoothwidth <= 0 or smoothwidth >= nchan:
                        raise Exception(
                            'Specified smooth width is <=0 or >= the total number of '
                            + smoothaxis)
                    else:
                        for i in range(orec['data'].shape[0]):
                            for j in range(nbl):
                                for k in range(ntim):
                                    orec['data'][i, :, j,
                                                 k] = signalsmooth.smooth(
                                                     orec['data'][i, :, j, k],
                                                     smoothwidth, smoothtype)
            else:
                raise Exception('Unknown mode' + str(mode))
        except Exception as instance:
            print('*** Error ***', instance)

        # orec['data']=orec['data'].reshape(szo[0],szo[1],szo[2],szo[3],order='F')
        # put the modified data back into the output visibility set
        del orec['time']
        del orec['axis_info']
        # ms.open(outputvis,nomodify=False)
        # if not splitsel:
        # outputvis is identical to input visibility, do the selection
        #    if timerange and (type(timerange==str)):
        #        datams.msselect({'time':timerange})
        #    if spw and (type(spw)==str):
        #        datams.selectinit(datadescid=int(spwid))
        #        nchan=int(echan)-int(bchan)+1
        #        datams.selectchannel(nchan,int(bchan),1,1)
        #    if not spw and not timerange:
        # data selection is not made
        #        datams.selectinit(datadescid=0)
        # else:
        # outputvis is splitted, selections have already applied, select all the data
        #    datams.selectinit(datadescid=0)
        datams.putdata(orec)
    datams.close()
    datamsmd.done()
Example #19
0
                 retdict[hk] = {
                     'unit': headers[hk]['unit'],
                     'value': float(retdict[hk])
                 }
         except Exception, e:
             print "Error converting header", hk, "to a Quantity."
             print "retdict[hk] =", retdict[hk]
             raise e
 elif hk == 'GeoLong':
     long_lat_alt = retdict[hk].split(',')
     retdict['GeoLong'] = float(long_lat_alt[0])
     retdict['GeoLat'] = float(long_lat_alt[1])
     retdict['GeoDist'] = float(long_lat_alt[2])
 elif hk == 'dMJD':
     retdict[hk] = qa.convert(
         qa.totime(retdict[hk].replace('minutes', 'min')),
         'd')['value']
 elif hk == 'orb_per':
     unit = 'h'
     retrograde = False
     if 'd' in retdict[hk].lower():
         unit = 'd'  # Actually this is most common.
     if 'r' in retdict[hk].lower():
         retrograde = True
     value = get_num_from_str(retdict[hk], 'orbital period')
     if value != False:
         if retrograde and value > 0.0:
             value = -value
         retdict[hk] = {'unit': unit, 'value': value}
     else:
         del retdict[hk]
Example #20
0
def ephem_to_helio(vis=None,
                   ephem=None,
                   msinfo=None,
                   reftime=None,
                   polyfit=None,
                   usephacenter=False):
    '''1. Take a solar ms database, read the scan and field information, find out the pointings (in RA and DEC)
       2. Compare with the ephemeris of the solar disk center (in RA and DEC)
       3. Generate VLA pointings in heliocentric coordinates
       inputs:
           msinfo: CASA MS information, output from read_msinfo
           ephem: solar ephem, output from read_horizons
           reftime: list of reference times (e.g., used for imaging)
                    CASA standard time format, either a single time (e.g., '2012/03/03/12:00:00'
                    or a time range (e.g., '2012/03/03/12:00:00~2012/03/03/13:00:00'. If the latter,
                    take the midpoint of the timerange for reference. If no date specified, take
                    the date of the first scan
           polyfit: ONLY works for MS database with only one source with continously tracking;
                    not recommanded unless scan length is too long and want to have very high accuracy
           usephacenter: Bool -- if True, correct for the RA and DEC in the ms file based on solar empheris.
                                 Otherwise assume the phasecenter is correctly pointed to the solar disk center
                                 (EOVSA case)

       return value:
           helio: a list of VLA pointing information
                   reftimestr: reference time, in FITS format string
                   reftime: reference time, in mjd format
                   ra: actual RA of phasecenter in the ms file at the reference time (interpolated)
                   dec: actual DEC of phasecenter in the ms file at the reference time (interpolated)
                   # CASA uses only RA and DEC of the closest field (e.g. in clean) #
                   ra_fld: right ascention of the CASA reference pointing direction
                   dec_fld: declination of the CASA reference pointing direction
                   raoff: RA offset of the phasecenter in the ms file to solar center
                   decoff: DEC offset of the phasecenter in the ms file to solar center
                   refx: heliocentric X offset of the phasecenter in the ms file to solar center
                   refy: heliocentric Y offset of the phasecenter in the ms file to solar center
    ######## Example #########
         msfile='sun_C_20140910T221952-222952.10s.cal.ms'
         ephemfile='horizons_sun_20140910.radecp'
         ephem=vla_prep.read_horizons(ephemfile=ephemfile)
         msinfo=vla_prep.read_msinfo(msfile=msfile)
         polyfit=0
         reftime = '22:25:20~22:25:40'
    '''
    if not vis or not os.path.exists(vis):
        raise ValueError('Please provide information of the MS database!')
    if not ephem:
        ephem = read_horizons(vis=vis)
    if not msinfo:
        msinfo0 = read_msinfo(vis)
    else:
        if isinstance(msinfo, str):
            try:
                msinfo0 = np.load(msinfo)
            except:
                raise ValueError(
                    'The specified input msinfo file does not exist!')
        elif isinstance(msinfo, dict):
            msinfo0 = msinfo
        else:
            raise ValueError(
                'msinfo should be either a numpy npz or a dictionary')
    print('msinfo is derived from: {0:s}'.format(msinfo0['vis']))
    scans = msinfo0['scans']
    fieldids = msinfo0['fieldids']
    btimes = msinfo0['btimes']
    inttimes = msinfo0['inttimes']
    ras = msinfo0['ras']
    decs = msinfo0['decs']
    if 'observatory' in msinfo0.keys():
        if msinfo0['observatory'] == 'EOVSA':
            usephacenter = False
    if type(ras) is list:
        ra_rads = [ra['value'] for ra in ras]
    elif type(ras) is dict:
        ra_rads = ras['value']
    else:
        print('Type of msinfo0["ras"] unrecognized.')
        return 0
    if type(decs) is list:
        dec_rads = [dec['value'] for dec in decs]
    elif type(decs) is dict:
        dec_rads = decs['value']
    else:
        print('Type of msinfo0["decs"] unrecognized.')
    # fit 2nd order polynomial fits to the RAs and DECs #
    if polyfit:
        cra = np.polyfit(btimes, ra_rads, 2)
        cdec = np.polyfit(btimes, dec_rads, 2)

    # find out phase center infomation in ms according to the input time or timerange #
    if not reftime:
        raise ValueError('Please specify a reference time of the image')
    if type(reftime) == str:
        reftime = [reftime]
    if (not isinstance(reftime, list)):
        print('input "reftime" is not a valid list. Abort...')

    nreftime = len(reftime)
    helio = []
    for reftime0 in reftime:
        helio0 = dict.fromkeys([
            'reftimestr', 'reftime', 'ra', 'dec', 'ra_fld', 'dec_fld', 'raoff',
            'decoff', 'refx', 'refy', 'p0'
        ])
        helio0['reftimestr'] = reftime0
        if '~' in reftime0:
            # if reftime0 is specified as a timerange
            try:
                [tbg0, tend0] = reftime0.split('~')
                tbg_d = qa.getvalue(qa.convert(qa.totime(tbg0), 'd'))[0]
                tend_d = qa.getvalue(qa.convert(qa.totime(tend0), 'd'))[0]
                tdur_s = (tend_d - tbg_d) * 3600. * 24.
                # if no date is specified, add up the date of the first scan
                if tend_d < 1.:
                    if tend_d >= tbg_d:
                        tend_d += int(btimes[0])
                    else:
                        tend_d += int(btimes[0]) + 1
                if tbg_d < 1.:
                    tbg_d += int(btimes[0])
                tref_d = (tbg_d + tend_d) / 2.
            except:
                print('Error in converting the input reftime: ' +
                      str(reftime0) + '. Aborting...')
        else:
            # if reftime0 is specified as a single value
            try:
                tref_d = qa.getvalue(qa.convert(qa.totime(reftime0), 'd'))[0]
                # if no date is specified, add up the date of the first scan
                if tref_d < 1.:
                    tref_d += int(btimes[0])
                tbg_d = tref_d
                # use the intergration time
                # ind = bisect.bisect_left(btimes, tref_d)
                # if msinfo0['etimes']:
                #    tdur_s = inttimes[ind - 1]
                # else:
                #    tdur_s = np.mean(inttimes)
                tdur_s = 1.
            except:
                print('Error in converting the input reftime: ' +
                      str(reftime0) + '. Aborting...')
        helio0['reftime'] = tref_d
        # helio0['date-obs'] = qa.time(qa.quantity(tbg_d, 'd'), form='fits', prec=10)[0]
        # helio0['exptime'] = tdur_s

        # find out phase center RA and DEC in the measurement set according to the reference time
        # if polyfit, then use the 2nd order polynomial coeffs
        inttime = np.nanmean(inttimes)
        ind = bisect.bisect_left(btimes, tref_d)
        if ind > 1:
            dt = tref_d - btimes[ind - 1]
            if ind < len(btimes):
                scanlen = btimes[ind] - btimes[ind - 1]
                (ra_b, ra_e) = (ra_rads[ind - 1], ra_rads[ind])
                (dec_b, dec_e) = (dec_rads[ind - 1], dec_rads[ind])
            if ind >= len(btimes):
                scanlen = btimes[ind - 1] - btimes[ind - 2]
                (ra_b, ra_e) = (ra_rads[ind - 2], ra_rads[ind - 1])
                (dec_b, dec_e) = (dec_rads[ind - 2], dec_rads[ind - 1])
        if ind == 1:  # only one scan exists (e.g., imported from AIPS)
            ra_b = ra_rads[ind - 1]
            ra_e = ra_b
            dec_b = dec_rads[ind - 1]
            dec_e = dec_b
            scanlen = 10.  # radom value
            dt = 0.
        if ind < 1:
            if np.abs((tref_d - btimes[0]) * 24 * 3600) < inttime / 2.0:
                ind = 1
                ra_b = ra_rads[ind - 1]
                ra_e = ra_b
                dec_b = dec_rads[ind - 1]
                dec_e = dec_b
                scanlen = 10.  # radom value
                dt = 0.
            else:
                raise ValueError(
                    'Reference time does not fall into the scan list!')
        if polyfit:
            ra = cra[0] * tref_d**2. + cra[1] * tref_d + cra[2]
            dec = cdec[0] * tref_d**2. + cdec[1] * tref_d + cdec[2]
        # if not, use linearly interpolated RA and DEC at the beginning of this scan and next scan
        else:
            ra = ra_b + (ra_e - ra_b) / scanlen * dt
            dec = dec_b + (dec_e - dec_b) / scanlen * dt
        if ra < 0:
            ra += 2. * np.pi
        if ra_b < 0:
            ra_b += 2. * np.pi

        # compare with ephemeris from JPL Horizons
        time0 = ephem['time']
        ra0 = ephem['ra']
        dec0 = ephem['dec']
        p0 = ephem['p0']
        delta0 = ephem['delta']
        if len(time0) > 1:
            ind = bisect.bisect_left(time0, tref_d)
            dt0 = time0[ind] - time0[ind - 1]
            dt_ref = tref_d - time0[ind - 1]
            dra0 = ra0[ind] - ra0[ind - 1]
            ddec0 = dec0[ind] - dec0[ind - 1]
            dp0 = p0[ind] - p0[ind - 1]
            ddelta0 = delta0[ind] - delta0[ind - 1]
            ra0 = ra0[ind - 1] + dra0 / dt0 * dt_ref
            dec0 = dec0[ind - 1] + ddec0 / dt0 * dt_ref
            p0 = p0[ind - 1] + dp0 / dt0 * dt_ref
            delta0 = delta0[ind - 1] + ddelta0 / dt0 * dt_ref
        else:
            try:
                ra0 = ra0[0]
                dec0 = dec0[0]
                p0 = p0[0]
                delta0 = delta0[0]
            except:
                print("Error in retrieving info from ephemeris!")
        if ra0 < 0:
            ra0 += 2. * np.pi

        # RA and DEC offset in arcseconds
        decoff = degrees((dec - dec0)) * 3600.
        raoff = degrees((ra - ra0) * cos(dec)) * 3600.
        # Convert into heliocentric offsets
        prad = -radians(p0)
        refx = (-raoff) * cos(prad) - decoff * sin(prad)
        refy = (-raoff) * sin(prad) + decoff * cos(prad)
        helio0['ra'] = ra  # ra of the actual pointing
        helio0['dec'] = dec  # dec of the actual pointing
        helio0[
            'ra_fld'] = ra_b  # ra of the field, used as the reference in e.g., clean
        helio0[
            'dec_fld'] = dec_b  # dec of the field, used as the refenrence in e.g., clean
        helio0['raoff'] = raoff
        helio0['decoff'] = decoff
        if usephacenter:
            helio0['refx'] = refx
            helio0['refy'] = refy
        else:
            helio0['refx'] = 0.
            helio0['refy'] = 0.
        helio0['p0'] = p0
        # helio['r_sun']=np.degrees(R_sun.value/(au.value*delta0))*3600. #in arcsecs
        helio.append(helio0)
    return helio
Example #21
0
def imreg(vis=None,
          ephem=None,
          msinfo=None,
          imagefile=None,
          timerange=None,
          reftime=None,
          fitsfile=None,
          beamfile=None,
          offsetfile=None,
          toTb=None,
          sclfactor=1.0,
          verbose=False,
          p_ang=False,
          overwrite=True,
          usephacenter=True,
          deletehistory=False,
          subregion=[],
          docompress=False):
    ''' 
    main routine to register CASA images
           Required Inputs:
               vis: STRING. CASA measurement set from which the image is derived
               imagefile: STRING or LIST. name of the input CASA image
               timerange: STRING or LIST. timerange used to generate the CASA image, must have the same length as the input images. 
                          Each element should be in CASA standard time format, e.g., '2012/03/03/12:00:00~2012/03/03/13:00:00'
           Optional Inputs:
               msinfo: DICTIONARY. CASA MS information, output from read_msinfo. If not provided, generate one from the supplied vis
               ephem: DICTIONARY. solar ephem, output from read_horizons. 
                      If not provided, query JPL Horizons based on time info of the vis (internet connection required)
               fitsfile: STRING or LIST. name of the output registered fits files
               reftime: STRING or LIST. Each element should be in CASA standard time format, e.g., '2012/03/03/12:00:00'
               offsetfile: optionally provide an offset with a series of solar x and y offsets with timestamps 
               toTb: Bool. Convert the default Jy/beam to brightness temperature?
               sclfactor: scale the image values up by its value (to compensate VLA 20 dB attenuator)
               verbose: Bool. Show more diagnostic info if True.
               usephacenter: Bool -- if True, correct for the RA and DEC in the ms file based on solar empheris.
                                     Otherwise assume the phasecenter is correctly pointed to the solar disk center
                                     (EOVSA case)
               subregion: Region selection. See 'help par.region' for details.
    Usage:
    >>> from suncasa.utils import helioimage2fits as hf
    >>> hf.imreg(vis='mydata.ms', imagefile='myimage.image', fitsfile='myimage.fits',
                 timerange='2017/08/21/20:21:10~2017/08/21/20:21:18')
    The output fits file is 'myimage.fits'

    History:
    BC (sometime in 2014): function was first wrote, followed by a number of edits by BC and SY
    BC (2019-07-16): Added checks for stokes parameter. Verified that for converting from Jy/beam to brightness temperature,
                     the convention of 2*k_b*T should always be used. I.e., for unpolarized source, stokes I, RR, LL, XX, YY, 
                     etc. in the output CASA images from (t)clean should all have same values of radio intensity 
                     (in Jy/beam) and brightness temperature (in K).

    '''

    if deletehistory:
        ms_clearhistory(vis)

    if not imagefile:
        raise ValueError('Please specify input image')
    if not timerange:
        raise ValueError('Please specify timerange of the input image')
    if type(imagefile) == str:
        imagefile = [imagefile]
    if type(timerange) == str:
        timerange = [timerange]
    if not fitsfile:
        fitsfile = [img + '.fits' for img in imagefile]
    if type(fitsfile) == str:
        fitsfile = [fitsfile]
    nimg = len(imagefile)
    if len(timerange) != nimg:
        raise ValueError(
            'Number of input images does not equal to number of timeranges!')
    if len(fitsfile) != nimg:
        raise ValueError(
            'Number of input images does not equal to number of output fits files!'
        )
    nimg = len(imagefile)
    if verbose:
        print(str(nimg) + ' images to process...')

    if reftime:  # use as reference time to find solar disk RA and DEC to register the image, but not the actual timerange associated with the image
        if type(reftime) == str:
            reftime = [reftime] * nimg
        if len(reftime) != nimg:
            raise ValueError(
                'Number of reference times does not match that of input images!'
            )
        helio = ephem_to_helio(vis,
                               ephem=ephem,
                               msinfo=msinfo,
                               reftime=reftime,
                               usephacenter=usephacenter)
    else:
        # use the supplied timerange to register the image
        helio = ephem_to_helio(vis,
                               ephem=ephem,
                               msinfo=msinfo,
                               reftime=timerange,
                               usephacenter=usephacenter)

    if toTb:
        (bmajs, bmins, bpas, beamunits,
         bpaunits) = getbeam(imagefile=imagefile, beamfile=beamfile)

    for n, img in enumerate(imagefile):
        if verbose:
            print('processing image #' + str(n) + ' ' + img)
        fitsf = fitsfile[n]
        timeran = timerange[n]
        # obtain duration of the image as FITS header exptime
        try:
            [tbg0, tend0] = timeran.split('~')
            tbg_d = qa.getvalue(qa.convert(qa.totime(tbg0), 'd'))[0]
            tend_d = qa.getvalue(qa.convert(qa.totime(tend0), 'd'))[0]
            tdur_s = (tend_d - tbg_d) * 3600. * 24.
            dateobs = qa.time(qa.quantity(tbg_d, 'd'), form='fits', prec=10)[0]
        except:
            print('Error in converting the input timerange: ' + str(timeran) +
                  '. Proceeding to the next image...')
            continue

        hel = helio[n]
        if not os.path.exists(img):
            warnings.warn('{} does not existed!'.format(img))
        else:
            if os.path.exists(fitsf) and not overwrite:
                raise ValueError(
                    'Specified fits file already exists and overwrite is set to False. Aborting...'
                )
            else:
                p0 = hel['p0']
                tb.open(img + '/logtable', nomodify=False)
                nobs = tb.nrows()
                tb.removerows([i + 1 for i in range(nobs - 1)])
                tb.close()
                ia.open(img)
                imr = ia.rotate(pa=str(-p0) + 'deg')
                if subregion is not []:
                    imr = imr.subimage(region=subregion)
                imr.tofits(fitsf, history=False, overwrite=overwrite)
                imr.close()
                imsum = ia.summary()
                ia.close()
                ia.done()

            # construct the standard fits header
            # RA and DEC of the reference pixel crpix1 and crpix2
            (imra, imdec) = (imsum['refval'][0], imsum['refval'][1])
            # find out the difference of the image center to the CASA phase center
            # RA and DEC difference in arcseconds
            ddec = degrees((imdec - hel['dec_fld'])) * 3600.
            dra = degrees((imra - hel['ra_fld']) * cos(hel['dec_fld'])) * 3600.
            # Convert into image heliocentric offsets
            prad = -radians(hel['p0'])
            dx = (-dra) * cos(prad) - ddec * sin(prad)
            dy = (-dra) * sin(prad) + ddec * cos(prad)
            if offsetfile:
                try:
                    offset = np.load(offsetfile)
                except:
                    raise ValueError(
                        'The specified offsetfile does not exist!')
                reftimes_d = offset['reftimes_d']
                xoffs = offset['xoffs']
                yoffs = offset['yoffs']
                timg_d = hel['reftime']
                ind = bisect.bisect_left(reftimes_d, timg_d)
                xoff = xoffs[ind - 1]
                yoff = yoffs[ind - 1]
            else:
                xoff = hel['refx']
                yoff = hel['refy']
            if verbose:
                print(
                    'offset of image phase center to visibility phase center (arcsec): dx={0:.2f}, dy={1:.2f}'
                    .format(dx, dy))
                print(
                    'offset of visibility phase center to solar disk center (arcsec): dx={0:.2f}, dy={1:.2f}'
                    .format(xoff, yoff))
            (crval1, crval2) = (xoff + dx, yoff + dy)
            # update the fits header to heliocentric coordinates

            hdu = pyfits.open(fitsf, mode='update')
            hdu[0].verify('fix')
            header = hdu[0].header
            dshape = hdu[0].data.shape
            ndim = hdu[0].data.ndim
            (cdelt1,
             cdelt2) = (-header['cdelt1'] * 3600., header['cdelt2'] * 3600.
                        )  # Original CDELT1, 2 are for RA and DEC in degrees
            header['cdelt1'] = cdelt1
            header['cdelt2'] = cdelt2
            header['cunit1'] = 'arcsec'
            header['cunit2'] = 'arcsec'
            header['crval1'] = crval1
            header['crval2'] = crval2
            header['ctype1'] = 'HPLN-TAN'
            header['ctype2'] = 'HPLT-TAN'
            header['date-obs'] = dateobs  # begin time of the image
            if not p_ang:
                hel['p0'] = 0
            try:
                # this works for pyfits version of CASA 4.7.0 but not CASA 4.6.0
                if tdur_s:
                    header.set('exptime', tdur_s)
                else:
                    header.set('exptime', 1.)
                header.set('p_angle', hel['p0'])
                header.set('hgln_obs', 0.)
                header.set('rsun_ref', sun.constants.radius.value)
                if sunpyver <= 1:
                    header.set(
                        'dsun_obs',
                        sun.sunearth_distance(Time(dateobs)).to(u.meter).value)
                    header.set(
                        'rsun_obs',
                        sun.solar_semidiameter_angular_size(
                            Time(dateobs)).value)
                    header.set(
                        'hglt_obs',
                        sun.heliographic_solar_center(Time(dateobs))[1].value)
                else:
                    header.set(
                        'dsun_obs',
                        sun.earth_distance(Time(dateobs)).to(u.meter).value)
                    header.set('rsun_obs',
                               sun.angular_radius(Time(dateobs)).value)
                    header.set('hglt_obs', sun.L0(Time(dateobs)).value)
            except:
                # this works for astropy.io.fits
                if tdur_s:
                    header.append(('exptime', tdur_s))
                else:
                    header.append(('exptime', 1.))
                header.append(('p_angle', hel['p0']))
                header.append(('hgln_obs', 0.))
                header.append(('rsun_ref', sun.constants.radius.value))
                if sunpyver <= 1:
                    header.append(
                        ('dsun_obs', sun.sunearth_distance(Time(dateobs)).to(
                            u.meter).value))
                    header.append(('rsun_obs',
                                   sun.solar_semidiameter_angular_size(
                                       Time(dateobs)).value))
                    header.append(('hglt_obs',
                                   sun.heliographic_solar_center(
                                       Time(dateobs))[1].value))
                else:
                    header.append(
                        ('dsun_obs',
                         sun.earth_distance(Time(dateobs)).to(u.meter).value))
                    header.append(
                        ('rsun_obs', sun.angular_radius(Time(dateobs)).value))
                    header.append(('hglt_obs', sun.L0(Time(dateobs)).value))

            # check if stokes parameter exist
            exist_stokes = False
            stokes_mapper = {
                'I': 1,
                'Q': 2,
                'U': 3,
                'V': 4,
                'RR': -1,
                'LL': -2,
                'RL': -3,
                'LR': -4,
                'XX': -5,
                'YY': -6,
                'XY': -7,
                'YX': -8
            }
            if 'CRVAL3' in header.keys():
                if header['CTYPE3'] == 'STOKES':
                    stokenum = header['CRVAL3']
                    exist_stokes = True
            if 'CRVAL4' in header.keys():
                if header['CTYPE4'] == 'STOKES':
                    stokenum = header['CRVAL4']
                    exist_stokes = True
            if exist_stokes:
                if stokenum in stokes_mapper.values():
                    stokesstr = list(stokes_mapper.keys())[list(
                        stokes_mapper.values()).index(stokenum)]
                else:
                    print('Stokes parameter {0:d} not recognized'.format(
                        stokenum))
                if verbose:
                    print('This image is in Stokes ' + stokesstr)
            else:
                print(
                    'STOKES Information does not seem to exist! Assuming Stokes I'
                )
                stokenum = 1

            # intensity units to brightness temperature
            if toTb:
                # get restoring beam info
                bmaj = bmajs[n]
                bmin = bmins[n]
                beamunit = beamunits[n]
                data = hdu[
                    0].data  # remember the data order is reversed due to the FITS convension
                keys = list(header.keys())
                values = list(header.values())
                # which axis is frequency?
                faxis = keys[values.index('FREQ')][-1]
                faxis_ind = ndim - int(faxis)
                # find out the polarization of this image
                k_b = qa.constants('k')['value']
                c_l = qa.constants('c')['value']
                # Always use 2*kb for all polarizations
                const = 2. * k_b / c_l**2
                if header['BUNIT'].lower() == 'jy/beam':
                    header['BUNIT'] = 'K'
                    header['BTYPE'] = 'Brightness Temperature'
                    for i in range(dshape[faxis_ind]):
                        nu = header['CRVAL' +
                                    faxis] + header['CDELT' + faxis] * (
                                        i + 1 - header['CRPIX' + faxis])
                        if header['CUNIT' + faxis] == 'KHz':
                            nu *= 1e3
                        if header['CUNIT' + faxis] == 'MHz':
                            nu *= 1e6
                        if header['CUNIT' + faxis] == 'GHz':
                            nu *= 1e9
                        if len(bmaj) > 1:  # multiple (per-plane) beams
                            bmajtmp = bmaj[i]
                            bmintmp = bmin[i]
                        else:  # one single beam
                            bmajtmp = bmaj[0]
                            bmintmp = bmin[0]
                        if beamunit == 'arcsec':
                            bmaj0 = np.radians(bmajtmp / 3600.)
                            bmin0 = np.radians(bmintmp / 3600.)
                        if beamunit == 'arcmin':
                            bmaj0 = np.radians(bmajtmp / 60.)
                            bmin0 = np.radians(bmintmp / 60.)
                        if beamunit == 'deg':
                            bmaj0 = np.radians(bmajtmp)
                            bmin0 = np.radians(bmintmp)
                        if beamunit == 'rad':
                            bmaj0 = bmajtmp
                            bmin0 = bmintmp
                        beam_area = bmaj0 * bmin0 * np.pi / (4. * log(2.))
                        factor = const * nu**2  # SI unit
                        jy_to_si = 1e-26
                        # print(nu/1e9, beam_area, factor)
                        factor2 = sclfactor
                        # if sclfactor:
                        #     factor2 = 100.
                        if faxis == '3':
                            data[:,
                                 i, :, :] *= jy_to_si / beam_area / factor * factor2
                        if faxis == '4':
                            data[
                                i, :, :, :] *= jy_to_si / beam_area / factor * factor2

            header = fu.headerfix(header)
            hdu.flush()
            hdu.close()

            if ndim - np.count_nonzero(np.array(dshape) == 1) > 3:
                docompress = False
                '''
                    Caveat: only 1D, 2D, or 3D images are currently supported by
                    the astropy fits compression. If a n-dimensional image data array
                    does not have at least n-3 single-dimensional entries,
                    force docompress to be False
                '''

                print(
                    'warning: The fits data contains more than 3 non squeezable dimensions. Skipping fits compression..'
                )
            if docompress:
                fitsftmp = fitsf + ".tmp.fits"
                os.system("mv {} {}".format(fitsf, fitsftmp))
                hdu = pyfits.open(fitsftmp)
                hdu[0].verify('fix')
                header = hdu[0].header
                data = hdu[0].data
                fu.write_compressed_image_fits(fitsf,
                                               data,
                                               header,
                                               compression_type='RICE_1',
                                               quantize_level=4.0)
                os.system("rm -rf {}".format(fitsftmp))
    if deletehistory:
        ms_restorehistory(vis)
    return fitsfile
Example #22
0
def subvs(vis='',
          outputvis='',
          timerange='',
          spw='',
          timoffset=4,
          windowlen=5,
          windowtype='hamming',
          splitsel=True,
          reverse=False,
          overwrite=False):
    """Perform vector subtraction for visibilities
    Keyword arguments:
	vis -- Name of input visibility file (MS)
				default: none; example: vis='ngc5921.ms'
	outputvis -- Name of output uv-subtracted visibility file (MS)
				default: none; example: outputvis='ngc5921_src.ms'
	timerange -- Time range of performing the UV subtraction:
				default='' means all times.  examples:
				timerange = 'YYYY/MM/DD/hh:mm:ss~YYYY/MM/DD/hh:mm:ss'
				timerange = 'hh:mm:ss~hh:mm:ss'
	spw -- Select spectral window/channel.
	windowlen -- Specify the width of window for smoothing
	windowtype --The type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
				flat window will produce a moving average smoothing.
	splitsel -- True or False. default = False. If splitsel = False, then the entire input
				measurement set is copied as the output measurement set (outputvis), with 
				background subtracted at selected timerange and spectral channels. 
				If splitsel = True,then only the selected timerange and spectral channels 
				are copied into the output measurement set (outputvis).
	reverse -- True or False. default = False. If reverse = False, then the times indicated
				by subtime1 and/or subtime2 are treated as background and subtracted; If reverse
				= True, then reverse the sign of the background-subtracted data. The option can 
				be used for mapping absorptive structure.
	overwrite -- True or False. default = False. If overwrite = True and
				outputvis already exists, the selected subtime and spw in the 
				output measurment set will be replaced with background subtracted 
				visibilities

    """
    # Get the time and frequency axis of the input ms
    # Open the ms and plot dynamic spectrum
    print 'using window length: ', windowlen
    print 'using window type: ', windowtype
    ms.open(vis, nomodify=True)
    # ms.selectinit(datadescid=0)
    timfreq = ms.getdata(['time', 'axis_info'], ifraxis=True)
    tim = timfreq['time']
    # check timerange input; default: entire timerange
    if timerange and (type(timerange) == str):
        [btimeo, etimeo] = timerange.split('~')
        btimeosec = qa.getvalue(qa.convert(qa.totime(btimeo), 's'))
        etimeosec = qa.getvalue(qa.convert(qa.totime(etimeo), 's'))
        timebinosec = etimeosec - btimeosec
        if timebinosec < 0:
            raise Exception, 'Negative timebin! Please check the "timerange" parameter.'
        else:
            casalog.post('Selected timerange: ' + timerange +
                         ' as the time for UV subtraction.')
    else:
        casalog.post(
            'Output timerange not specified, using the entire timerange')
        timerange = str(qa.time(qa.quantity(
            tim[0], 's'), prec=8)[0]) + '~' + str(
                qa.time(qa.quantity(tim[-1], 's'), prec=8)[0])
        print 'Output timerange not specified, using the entire timerange', timerange
    # check spectral window input; default: entire channels of spectral window 0
    if spw and (type(spw) == str):
        [spwid, chanran] = spw.split(':')
        [bchan, echan] = chanran.split('~')
        nchan = int(echan) - int(bchan) + 1
    else:
        casalog.post('spw not specified, use all frequency channels')
        freq = timfreq['axis_info']['freq_axis']['chan_freq'].flatten()
        nchan = len(freq)
        spwid = '0'
        bchan = '0'
        echan = str(nchan - 1)
        print 'spw not specified, use all frequency channels', spwid + ':' + bchan + '~' + str(
            nchan - 1)

    ntimergn = len(timerange)
    # To avoid memory error, split the channel into smaller segements for smoothing
    cellstep = 2
    chancell = int(nchan / cellstep)
    l = range(nchan)
    chunks = [l[x:x + cellstep] for x in xrange(0, len(l), cellstep)]
    #spwrange='0:0~'+str(chancell)
    ms.close()

    if not (timoffset and (type(timoffset) == int)):
        timoffset = int(4)

    for i in range(len(chunks)):
        spwrange = spwid + ':' + str(int(bchan) + min(chunks[i])) + '~' + str(
            int(bchan) + max(chunks[i]))
        print 'Subtracting visibility from spectral range: ', spwrange
        result2 = task_subvs_lv1.subvs(vis, outputvis, timerange, spwrange,
                                       timoffset, windowlen, windowtype,
                                       splitsel, False, True)