def parseGFH(gfh): """Parses the GFH (general file header?) structure at the beginning of the file""" ## type ftype = gfh[0:4].tostring() ## length & version fhlen, fver = struct.unpack('ii', gfh[4:12]) ### creation date crdate = gfh[12:23].tostring() ### creation time crtime = gfh[23:28].tostring() ### revision date rrdate = gfh[28:39].tostring() ### revision time rrtime = gfh[39:44].tostring() ### revision count rcount = struct.unpack('i', gfh[44:48]) rcount = rcount[0] ### node name nname = gfh[48:128].tostring() ### types dattp = struct.unpack('B', gfh[128:129])[0] link1, link2 = struct.unpack('ii', gfh[152:160]) ### the remaining info is not needed dprint( 1, "read header type=%s, length=%d, version=%d, created=%s@%s, updated=%s@%s x %d, node name=%s, dattp=%d, link=%d,%d" % (ftype, fhlen, fver, crdate, crtime, rrdate, rrtime, rcount, nname, dattp, link1, link2)) return (ftype, fhlen, fver, crdate, crtime, rrdate, rrtime, rcount, nname)
def load (filename,center=None,**kw): """Imports an AIPS clean component list from FITS table """ srclist = []; dprint(1,"importing AIPS clean component FITS table",filename); # read file ff = pyfits.open(filename); if center is None: ra = ff[0].header['CRVAL1']; dec = ff[0].header['CRVAL2']; print "Using FITS image centre (%.4f, %.4f deg) as field centre"%(ra,dec); center = ra*DEG,dec*DEG; # now process file line-by-line cclist = ff[1].data; for num,ccrec in enumerate(cclist): stokes_i,dx,dy = map(float,ccrec); # convert dx/dy to real positions l,m = sin(dx*ARCSEC),sin(dy*ARCSEC); ra,dec = lm_to_radec(l,m,*center); pos = ModelClasses.Position(ra,dec); flux = ModelClasses.Flux(stokes_i); # now create a source object src = SkyModel.Source('cc%d'%num,pos,flux); src.setAttribute('r',math.sqrt(l*l+m*m)); srclist.append(src); dprintf(2,"imported %d sources from file %s\n",len(srclist),filename); # create model model = ModelClasses.SkyModel(*srclist); # setup model center model.setFieldCenter(*center); # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()); return model;
def parseGFH (gfh): """Parses the GFH (general file header?) structure at the beginning of the file"""; ## type ftype = gfh[0:4].tostring() ## length & version fhlen,fver = struct.unpack('ii',gfh[4:12]) ### creation date crdate = gfh[12:23].tostring() ### creation time crtime = gfh[23:28].tostring() ### revision date rrdate = gfh[28:39].tostring() ### revision time rrtime = gfh[39:44].tostring() ### revision count rcount = struct.unpack('i',gfh[44:48]) rcount = rcount[0] ### node name nname = gfh[48:128].tostring() ### types dattp = struct.unpack('B',gfh[128:129])[0]; link1,link2 = struct.unpack('ii',gfh[152:160]); ### the remaining info is not needed dprint(1,"read header type=%s, length=%d, version=%d, created=%s@%s, updated=%s@%s x %d, node name=%s, dattp=%d, link=%d,%d"% (ftype,fhlen,fver,crdate,crtime,rrdate,rrtime,rcount,nname,dattp,link1,link2)); return (ftype,fhlen,fver,crdate,crtime,rrdate,rrtime,rcount,nname);
def load(filename, center=None, **kw): """Imports an AIPs clean component list file """ srclist = [] dprint(1, "importing AIPS clean component table", filename) # read file ff = open(filename) if center is None: raise ValueError("field centre must be specified") # now process file line-by-line linenum = 0 for line in ff: linenum += 1 # parse one line dprint(4, "read line:", line) ff = line.split() if len(ff) != 5: continue try: num = int(ff[0]) dx, dy, i, i_tot = list(map(float, ff[1:])) except: continue try: # convert dx/dy to real positions l, m = sin(dx * ARCSEC), sin(dy * ARCSEC) ra, dec = lm_to_radec(l, m, *center) pos = ModelClasses.Position(ra, dec) except Exception as exc: print("CC %d: error converting coordinates (%s), skipping" % (num, str(exc))) continue flux = ModelClasses.Flux(i) # now create a source object src = SkyModel.Source('cc%d' % num, pos, flux) src.setAttribute('r', math.sqrt(l * l + m * m)) srclist.append(src) dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) # create model model = ModelClasses.SkyModel(*srclist) # setup model center model.setFieldCenter(*center) # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()) for src in model.sources: l, m = projection.lm(src.pos.ra, src.pos.dec) src.setAttribute('r', math.sqrt(l * l + m * m)) return model
def load(filename, center=None, **kw): """Imports an AIPS clean component list from FITS table """ srclist = [] dprint(1, "importing AIPS clean component FITS table", filename) # read file ff = pyfits.open(filename) if center is None: hdr = ff[0].header ra = hdr['CRVAL1'] * _units[hdr.get('CUNIT1', 'DEG').strip()] dec = hdr['CRVAL2'] * _units[hdr.get('CUNIT2', 'DEG').strip()] print("Using FITS image centre (%.4f, %.4f deg) as field centre" % (ra / DEG, dec / DEG)) center = ra, dec # now process file line-by-line cclist = ff[1].data hdr = ff[1].header ux = _units[hdr.get('TUNIT2', 'DEG').strip()] uy = _units[hdr.get('TUNIT3', 'DEG').strip()] for num, ccrec in enumerate(cclist): stokes_i, dx, dy = list(map(float, ccrec)) # convert dx/dy to real positions l, m = sin(dx * ux), sin(dy * uy) ra, dec = lm_to_radec(l, m, *center) pos = ModelClasses.Position(ra, dec) flux = ModelClasses.Flux(stokes_i) # now create a source object src = SkyModel.Source('cc%d' % num, pos, flux) src.setAttribute('r', math.sqrt(l * l + m * m)) srclist.append(src) dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) # create model model = ModelClasses.SkyModel(*srclist) # setup model center model.setFieldCenter(*center) # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()) for src in model.sources: l, m = projection.lm(src.pos.ra, src.pos.dec) src.setAttribute('r', math.sqrt(l * l + m * m)) return model
def parseMDH (mdh): """Parses the MDH (model file header?) structure"""; maxlin,modptr,nsources,mtype = struct.unpack('iiii',mdh[12:28]); mepoch = struct.unpack('f',mdh[28:32])[0]; ra0,dec0,freq0 = struct.unpack('ddd',mdh[32:56]); ### Max. # of lines in model or disk version ### pointer to model ??? ### no of sources in model ### model type(0: no ra,dec, 1=app, 2=epoch) ### Epoch (e.g. 1950) if TYP=2 (float) : 4 bytes ### Model centre RA (circles) : double ra0 *= math.pi*2; dec0 *= math.pi*2; ### Model centre FRQ (MHz) freq0 *= 1e6 ### the remaining is not needed dprint(1,"read model header maxlines=%d, pointer=%d, sources=%d, type=%d, epoch=%f RA=%f, DEC=%f (rad) Freq=%f Hz"% (maxlin,modptr,nsources,mtype,mepoch,ra0,dec0,freq0)); return (maxlin,modptr,nsources,mtype,mepoch,ra0,dec0,freq0);
def parse(self, line, linenum=0): """Parses one line. Returns None for empty or commented lines, else returns a CatalogLine object""" # strip whitespace line = line.strip() dprintf(3, "read line %d: %s\n", linenum, line) # skip empty or commented lines if not line or line[0] == '#': return None # split using separators, quit when no more separators fields = [] for sep in self.separators: ff = line.split(sep, 1) if len(ff) < 2: break fields.append(ff[0]) line = ff[1] fields.append(line) dprint(4, "line %d: " % linenum, fields) return CatalogLine(self, fields)
def parse(self, line, linenum=0): """Parses one line. Returns None for empty or commented lines, else returns a CatalogLine object""" # strip whitespace line = line.strip() dprintf(3, "read line %d: %s\n", linenum, line) # skip empty or commented lines if not line or line[0] == "#": return None # split using separators, quit when no more separators fields = [] for sep in self.separators: ff = line.split(sep, 1) if len(ff) < 2: break fields.append(ff[0]) line = ff[1] fields.append(line) dprint(4, "line %d: " % linenum, fields) return CatalogLine(self, fields)
def parseMDH(mdh): """Parses the MDH (model file header?) structure""" maxlin, modptr, nsources, mtype = struct.unpack('iiii', mdh[12:28]) mepoch = struct.unpack('f', mdh[28:32])[0] ra0, dec0, freq0 = struct.unpack('ddd', mdh[32:56]) ### Max. # of lines in model or disk version ### pointer to model ??? ### no of sources in model ### model type(0: no ra,dec, 1=app, 2=epoch) ### Epoch (e.g. 1950) if TYP=2 (float) : 4 bytes ### Model centre RA (circles) : double ra0 *= math.pi * 2 dec0 *= math.pi * 2 ### Model centre FRQ (MHz) freq0 *= 1e6 ### the remaining is not needed dprint( 1, "read model header maxlines=%d, pointer=%d, sources=%d, type=%d, epoch=%f RA=%f, DEC=%f (rad) Freq=%f Hz" % (maxlin, modptr, nsources, mtype, mepoch, ra0, dec0, freq0)) return (maxlin, modptr, nsources, mtype, mepoch, ra0, dec0, freq0)
def __init__(self, format): # figure out fields and their separators fields = [] self.separators = [] while True: match = re.match("(\w[\w:]*(=(fixed)?'[^']*')?)(([^\w]+)(\w.*))?$", format) if not match: break fields.append(match.group(1)) # if no group 4, then we've reached the last field if not match.group(4): break self.separators.append(match.group(5)) format = match.group(6) # now parse the format specification # this is a dict of field name -> field index self.field_number = {} # this is a dict of field name -> default value self.field_default = dict(Category='2', I='1') # fill up the dicts for num_field, field in enumerate(fields): # is a default value given? match = re.match("(.+)='(.*)'$", field) if match: field = match.group(1) self.field_default[field] = match.group(2) self.field_number[field] = num_field dprint(2, "fields are", self.field_number) dprint(2, "default values are", self.field_default) dprint(2, "separators are", self.separators)
def load (filename,freq0=None,**kw): """Imports a gaul table The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source. 'min_extent' is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. """ srclist = []; dprint(1,"importing PyBDSM gaul file",filename); format = {}; # look for format string and reference freq, and build up format dict for line in file(filename): m = re.match("# Reference frequency .*\w([0-9.eE+-]+)\s*Hz",line); if m: freq0 = freq0 or float(m.group(1)); dprint(2,"found reference frequency %g"%freq0); elif re.match("#(\s*[\w:]+\s+)+",line) and line.find("Gaus_id") > 0: dprint(2,"found format string",line); fields = dict([ (name,i) for i,name in enumerate(line[1:].split()) ]); # map known fields to their ASCII equivalents, the rest copy as custom float attributes with # a "pybdsm_" prefix for i,name in enumerate(line[1:].split()): if name in format_mapping: format[format_mapping[name]] = i; else: format[":float:_pybdsm_%s"%name] = i; if format and freq0: break; if not format: raise ValueError,"this .gaul file does not appear to contain a format string" # call ASCII.load() function now that we have the format dict kw['format'] = format; return ASCII.load(filename,**kw)
def __init__(self, format): # figure out fields and their separators fields = [] self.separators = [] while True: match = re.match("(\w[\w:]*(=(fixed)?'[^']*')?)(([^\w]+)(\w.*))?$", format) if not match: break fields.append(match.group(1)) # if no group 4, then we've reached the last field if not match.group(4): break self.separators.append(match.group(5)) format = match.group(6) # now parse the format specification # this is a dict of field name -> field index self.field_number = {} # this is a dict of field name -> default value self.field_default = dict(Category="2", I="1") # fill up the dicts for num_field, field in enumerate(fields): # is a default value given? match = re.match("(.+)='(.*)'$", field) if match: field = match.group(1) self.field_default[field] = match.group(2) self.field_number[field] = num_field dprint(2, "fields are", self.field_number) dprint(2, "default values are", self.field_default) dprint(2, "separators are", self.separators)
def load (filename,center=None,**kw): """Imports an AIPs clean component list file """ srclist = []; dprint(1,"importing AIPS clean component table",filename); # read file ff = file(filename); if center is None: raise ValueError,"field centre must be specified"; # now process file line-by-line linenum = 0; for line in ff: linenum += 1; # parse one line dprint(4,"read line:",line); ff = line.split(); if len(ff) != 5: continue; try: num = int(ff[0]); dx,dy,i,i_tot = map(float,ff[1:]); except: continue; try: # convert dx/dy to real positions l,m = sin(dx*ARCSEC),sin(dy*ARCSEC); ra,dec = lm_to_radec(l,m,*center); pos = ModelClasses.Position(ra,dec); except Exception,exc: print "CC %d: error converting coordinates (%s), skipping"%(num,str(exc)); continue; flux = ModelClasses.Flux(i); # now create a source object src = SkyModel.Source('cc%d'%num,pos,flux); src.setAttribute('r',math.sqrt(l*l+m*m)); srclist.append(src);
def load(filename, center=None, **kw): """Imports an AIPS clean component list from FITS table """ srclist = [] dprint(1, "importing AIPS clean component FITS table", filename) # read file ff = pyfits.open(filename) if center is None: ra = ff[0].header['CRVAL1'] dec = ff[0].header['CRVAL2'] print "Using FITS image centre (%.4f, %.4f deg) as field centre" % ( ra, dec) center = ra * DEG, dec * DEG # now process file line-by-line cclist = ff[1].data for num, ccrec in enumerate(cclist): stokes_i, dx, dy = map(float, ccrec) # convert dx/dy to real positions l, m = sin(dx * ARCSEC), sin(dy * ARCSEC) ra, dec = lm_to_radec(l, m, *center) pos = ModelClasses.Position(ra, dec) flux = ModelClasses.Flux(stokes_i) # now create a source object src = SkyModel.Source('cc%d' % num, pos, flux) src.setAttribute('r', math.sqrt(l * l + m * m)) srclist.append(src) dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) # create model model = ModelClasses.SkyModel(*srclist) # setup model center model.setFieldCenter(*center) # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()) return model
def load(filename, freq0=None, **kw): """Imports a gaul table The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source. 'min_extent' is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. """ srclist = [] id = None dprint(1, "importing PyBDSM gaul/srl file", filename) format = {} extension = filename.split(".")[-1] if extension == "srl": format_mapping['Source_id'] = format_mapping.pop('Gaus_id') id = "Source_id" # look for format string and reference freq, and build up format dict for line in open(filename): m = re.match("# Reference frequency .*?([0-9.eE+-]+)\s*Hz", line) if m: freq0 = kw['freq0'] = freq0 or float(m.group(1)) dprint(2, "found reference frequency %g" % freq0) elif re.match("#(\s*[\w:]+\s+)+", line) and line.find(id if id else "Gaus_id") > 0: dprint(2, "found format string", line) fields = dict([(name, i) for i, name in enumerate(line[1:].split())]) # map known fields to their ASCII equivalents, the rest copy as custom float attributes with # a "pybdsm_" prefix for i, name in enumerate(line[1:].split()): if name in format_mapping: dprint(2, "Field", format_mapping[name], name, "is column", i) format[format_mapping[name]] = i else: format[":float:_pybdsm_%s" % name] = i if format and freq0: break if not format: raise ValueError( "this .gaul file does not appear to contain a format string") # call ASCII.load() function now that we have the format dict kw['format'] = format return ASCII.load(filename, **kw)
def load(filename, format=None, freq0=None, center_on_brightest=False, min_extent=0, **kw): """Imports an ASCII table The 'format' argument can be either a dict (such as the DefaultDMSFormat dict above), or a string such as DefaultDMSFormatString. (Other possible field names are "ra_d", "ra_rad", "dec_rad", "dec_sign".) If None is specified, DefaultDMSFormat is used. The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source. 'min_extent' is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. """ srclist = [] dprint(1, "importing ASCII DMS file", filename) # brightest source and its coordinates maxbright = 0 brightest_name = radec0 = None # now process file line-by-line linenum = 0 format_str = '' for line in file(filename): # for the first line, firgure out the file format if not linenum: if not format and line.startswith("#format:"): format = line[len("#format:"):].strip() dprint(1, "file contains format header:", format) # set default format if format is None: format = DefaultDMSFormatString # is the format a string rather than a dict? Turn it into a dict then if isinstance(format, str): format_str = format # make list of fieldname,fieldnumber tuples fields = [(field, i) for i, field in enumerate(format.split())] if not fields: raise ValueError, "illegal format string in file: '%s'" % format # last fieldname can end with ... to indicate that it absorbs the rest of the line if fields[-1][0].endswith('...'): fields[-1] = (fields[-1][0][:-3], slice(fields[-1][1], None)) # make format dict format = dict(fields) elif not isinstance(format, dict): raise TypeError, "invalid 'format' argument of type %s" % ( type(format)) nf = max(format.itervalues()) + 1 fields = ['---'] * nf for field, number in format.iteritems(): fields[number] = field format_str = " ".join(fields) # get minimum necessary fields from format name_field = format.get('name', None) # flux try: i_field = format['i'] except KeyError: raise ValueError, "ASCII format specification lacks mandatory flux field ('i')" # main RA field if 'ra_h' in format: ra_field, ra_scale = format['ra_h'], (math.pi / 12) elif 'ra_d' in format: ra_field, ra_scale = format['ra_d'], (math.pi / 180) elif 'ra_rad' in format: ra_field, ra_scale = format['ra_rad'], 1. else: raise ValueError, "ASCII format specification lacks mandatory Right Ascension field ('ra_h', 'ra_d' or 'ra_rad')" # main Dec field if 'dec_d' in format: dec_field, dec_scale = format['dec_d'], (math.pi / 180) elif 'dec_rad' in format: dec_field, dec_scale = format['dec_rad'], 1. else: raise ValueError, "ASCII format specification lacks mandatory Declination field ('dec_d' or 'dec_rad')" # polarization as QUV try: quv_fields = [format[x] for x in ['q', 'u', 'v']] except KeyError: quv_fields = None # linear polarization as fraction and angle polfrac_field = format.get('pol_frac', None) if polfrac_field is not None: polpa_field, polpa_scale = format.get('pol_pa_d', None), (math.pi / 180) if not polpa_field is not None: polpa_field, polpa_scale = format.get('pol_pa_rad', None), 1 # fields for extent parameters ext_fields = [] for ext in 'emaj', 'emin', 'pa': for field, scale in (ext, 1.), (ext + "_rad", 1.), (ext + '_d', DEG), (ext + '_m', DEG / 60), (ext + '_s', DEG / 3600): if field in format: ext_fields.append((format[field], scale)) break # if not all three accumulated, ignore if len(ext_fields) != 3: ext_fields = None # fields for reference freq and RM and SpI freq0_field = format.get('freq0', None) rm_field = format.get('rm', None) spi_field = format.get('spi', None) spi2_field = [format.get('spi%d' % i, None) for i in range(2, 10)] tags_slice = format.get('tags', None) # now go on to process the line linenum += 1 try: # strip whitespace line = line.strip() dprintf(4, "%s:%d: read line '%s'\n", filename, linenum, line) # skip empty or commented lines if not line or line[0] == '#': continue # split (at whitespace) into fields fields = line.split() # get name name = fields[name_field] if name_field is not None else str( len(srclist) + 1) i = float(fields[i_field]) # get position: RA ra = float(fields[ra_field]) if 'ra_m' in format: ra += float(fields[format['ra_m']]) / 60. if 'ra_s' in format: ra += float(fields[format['ra_s']]) / 3600. ra *= ra_scale # position: Dec. Separate treatment of sign dec = abs(float(fields[dec_field])) if 'dec_m' in format: dec += float(fields[format['dec_m']]) / 60. if 'dec_s' in format: dec += float(fields[format['dec_s']]) / 3600. if fields[format.get('dec_sign', dec_field)][0] == '-': dec = -dec dec *= dec_scale # see if we have freq0 try: f0 = freq0 or (freq0_field and float(fields[freq0_field])) except IndexError: f0 = None # set model refrence frequency if f0 is not None and freq0 is None: freq0 = f0 # see if we have Q/U/V q = u = v = None if quv_fields: try: q, u, v = map(float, [fields[x] for x in quv_fields]) except IndexError: pass if polfrac_field is not None: pf = fields[polfrac_field] pf = float(pf[:-1]) / 100 if pf.endswith("%") else float(pf) ppa = float(fields[polpa_field] ) * polpa_scale if polpa_field is not None else 0 q = i * pf * math.cos(2 * ppa) u = i * pf * math.sin(2 * ppa) v = 0 # see if we have RM as well. Create flux object (unpolarized, polarized, polarized w/RM) if q is None: flux = ModelClasses.Polarization(i, 0, 0, 0) elif f0 is None or rm_field is None or rm_field >= len(fields): flux = ModelClasses.Polarization(i, q, u, v) else: flux = ModelClasses.PolarizationWithRM(i, q, u, v, float(fields[rm_field]), f0) # see if we have a spectral index if f0 is None or spi_field is None or spi_field >= len(fields): spectrum = None else: spi = [ float(fields[spi_field]) ] + \ [ (float(fields[x]) if x is not None else 0) for x in spi2_field ] # if any higher-order spectral terms are specified, include them here # but trim off all trailing zeroes while len(spi) > 1 and not spi[-1]: del spi[-1] if len(spi) == 1: spi = spi[0] spectrum = ModelClasses.SpectralIndex(spi, f0) # see if we have extent parameters ex = ey = pa = 0 if ext_fields: try: ex, ey, pa = [ float(fields[num]) * scale for num, scale in ext_fields ] except IndexError: pass # form up shape object if (ex or ey) and max(ex, ey) >= min_extent: shape = ModelClasses.Gaussian(ex, ey, pa) else: shape = None # get tags tagdict = {} if tags_slice: try: tags = fields[tags_slice] except IndexError: pass for tagstr1 in tags: for tagstr in tagstr1.split(","): if tagstr[0] == "+": tagname, value = tagstr[1:], True elif tagstr[0] == "-": tagname, value = tagstr[1:], False elif "=" in tagstr: tagname, value = tagstr.split("=", 1) if value[0] in "'\"" and value[-1] in "'\"": value = value[1:-1] else: try: value = float(value) except: continue else: tagname, value = tagstr, True tagdict[tagname] = value # OK, now form up the source object # position pos = ModelClasses.Position(ra, dec) # now create a source object dprint(3, name, ra, dec, i, q, u, v) src = SkyModel.Source(name, pos, flux, shape=shape, spectrum=spectrum, **tagdict) srclist.append(src) # check if it's the brightest brightness = src.brightness() if brightness > maxbright: maxbright = brightness brightest_name = src.name radec0 = ra, dec except: dprintf(0, "%s:%d: %s, skipping\n", filename, linenum, str(sys.exc_info()[1])) dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) # create model model = ModelClasses.SkyModel(*srclist) if freq0 is not None: model.setRefFreq(freq0) # set model format model.setAttribute("ASCII_Format", format_str) # setup model center if center_on_brightest and radec0: dprintf(2, "brightest source is %s (%g Jy) at %f,%f\n", brightest_name, maxbright, *radec0) model.setFieldCenter(*radec0) # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()) for src in model.sources: l, m = projection.lm(src.pos.ra, src.pos.dec) src.setAttribute('r', math.sqrt(l * l + m * m)) return model
def load(filename, freq0=None, center_on_brightest=False, **kw): """Imports an BBS catalog file The 'format' argument can be either a dict (such as the DefaultDMSFormat dict above), or a string such as DefaultDMSFormatString. (Other possible field names are "ra_d", "ra_rad", "dec_rad", "dec_sign".) If None is specified, DefaultDMSFormat is used. The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source, else to the center of the first patch. """ srclist = [] dprint(1, "importing BBS source table", filename) # read file ff = file(filename) # first line must be a format string: extract it line0 = ff.readline().strip() match = re.match("#\s*\((.+)\)\s*=\s*format", line0) if not match: raise ValueError, "line 1 is not a valid format specification" format_str = match.group(1) # create format parser from this string parser = CatalogParser(format_str) # check for mandatory fields for field in "Name", "Type": if not parser.defines(field): raise ValueError, "Table lacks mandatory field '%s'" % field maxbright = 0 patches = [] ref_freq = freq0 # now process file line-by-line linenum = 1 for line in ff: linenum += 1 try: # parse one line dprint(4, "read line:", line) catline = parser.parse(line, linenum) if not catline: continue dprint(5, "line %d: " % linenum, catline.__dict__) # is it a patch record? patchname = getattr(catline, "Patch", "") if not catline.Name: dprintf(2, "%s:%d: patch %s\n", filename, linenum, patchname) patches.append((patchname, catline.ra_rad, catline.dec_rad)) continue # form up name name = "%s:%s" % (patchname, catline.Name) if patchname else catline.Name # check source type stype = catline.Type.upper() if stype not in ("POINT", "GAUSSIAN"): raise ValueError, "unsupported source type %s" % stype # see if we have freq0 if freq0: f0 = freq0 elif hasattr(catline, "ReferenceFrequency"): f0 = float(catline.ReferenceFrequency or "0") else: f0 = None # set model refrence frequency if f0 is not None and ref_freq is None: ref_freq = f0 # see if we have Q/U/V i, q, u, v = [float(getattr(catline, stokes, "0") or "0") for stokes in "IQUV"] # see if we have RM as well. Create flux object (unpolarized, polarized, polarized w/RM) if f0 is not None and hasattr(catline, "RotationMeasure"): flux = ModelClasses.PolarizationWithRM(i, q, u, v, float(catline.RotationMeasure or "0"), f0) else: flux = ModelClasses.Polarization(i, q, u, v) # see if we have a spectral index if f0 is not None and hasattr(catline, "SpectralIndex:0"): spectrum = ModelClasses.SpectralIndex(float(getattr(catline, "SpectralIndex:0") or "0"), f0) else: spectrum = None # see if we have extent parameters if stype == "GAUSSIAN": ex = float(getattr(catline, "MajorAxis", "0") or "0") ey = float(getattr(catline, "MinorAxis", "0") or "0") pa = float(getattr(catline, "Orientation", "0") or "0") shape = ModelClasses.Gaussian(ex, ey, pa) else: shape = None # create tags tags = {} for field in "Patch", "Category": if hasattr(catline, field): tags["BBS_%s" % field] = getattr(catline, field) # OK, now form up the source object # position pos = ModelClasses.Position(catline.ra_rad, catline.dec_rad) # now create a source object src = SkyModel.Source(name, pos, flux, shape=shape, spectrum=spectrum, **tags) srclist.append(src) # check if it's the brightest brightness = src.brightness() if brightness > maxbright: maxbright = brightness brightest_name = src.name radec0 = catline.ra_rad, catline.dec_rad except: dprintf(0, "%s:%d: %s, skipping\n", filename, linenum, str(sys.exc_info()[1])) dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) # create model model = ModelClasses.SkyModel(*srclist) if ref_freq is not None: model.setRefFreq(ref_freq) # setup model center if center_on_brightest and radec0: dprintf(2, "setting model centre to brightest source %s (%g Jy) at %f,%f\n", brightest_name, maxbright, *radec0) model.setFieldCenter(*radec0) elif patches: name, ra, dec = patches[0] dprintf(2, "setting model centre to first patch %s at %f,%f\n", name, ra, dec) model.setFieldCenter(ra, dec) # map patches to model tags model.setAttribute("BBS_Patches", patches) model.setAttribute("BBS_Format", format_str) # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()) for src in model.sources: l, m = projection.lm(src.pos.ra, src.pos.dec) src.setAttribute("r", math.sqrt(l * l + m * m)) return model
def save(model, filename, freq0=None, sources=None, **kw): """Saves model to a NEWSTAR MDL file. The MDL file must exist, since the existing header is reused. 'sources' is a list of sources to write, if None, then model.sources is used. """ if sources is None: sources = model.sources dprintf(2, "writing %s model sources to NEWSTAR file\n", len(sources), filename) ra0, dec0 = model.fieldCenter() freq0 = freq0 or model.refFreq() # if freq0 is not specified, scan sources if freq0 is None: for src in sources: freq0 = (src.spectrum and getattr(src.spectrum, 'freq0', None)) or getattr( src.flux, 'freq0', None) if freq0: break else: raise ValueError( "unable to determine NEWSTAR model reference frequency, please specify one explicitly." ) ff = open(filename, mode="wb") ### create GFH header gfh = numpy.zeros(512, dtype=numpy.uint8) datestr = time.strftime("%d-%m-%Y") timestr = time.strftime("%H:%M") struct.pack_into("4sii11s5s11s5si80sB", gfh, 0, ".MDL", 512, 1, datestr, timestr, datestr, timestr, 0, os.path.splitext(os.path.basename(filename))[0], 6) # 6=datatype # link1/link2 gives the header size actually struct.pack_into("ii", gfh, 152, 512, 512) gfh.tofile(ff) # create MDH header mdh = numpy.zeros(64, dtype=numpy.uint8) struct.pack_into('iiii', mdh, 12, 1, 576, 0, 2) # maxlin,pointer,num_sources,mtype struct.pack_into('f', mdh, 28, getattr(model, 'epoch', 2000)) struct.pack_into('ddd', mdh, 32, ra0 / (2 * math.pi), dec0 / (2 * math.pi), freq0 * 1e-6) mdh.tofile(ff) # get the max ID, if specified max_id = max([getattr(src, 'newstar_id', 0) for src in sources]) # now loop over model sources # count how many are written out -- only point sources and gaussians are actually written out, the rest are skipped nsrc = 0 for src in sources: # create empty newstar source structure mdl = numpy.zeros(56, dtype=numpy.uint8) if src.shape and not isinstance(src.shape, ModelClasses.Gaussian): dprint( 3, "skipping source '%s': non-supported type '%s'" % (src.name, src.shape.typecode)) continue stI = src.flux.I # get l,m NCP position -- either from tag, or compute lm = getattr(src, '_lm_ncp', None) if lm: if isinstance(lm, (tuple, list)) and len(lm) == 2: l, m = lm else: dprint( 0, "warning: skipping source '%s' because its _lm_ncp attribute is malformed (tuple of 2 values expected)" % src.name) continue else: l, m = radec_to_lm_ncp(ra0, dec0, src.pos.ra, src.pos.dec) # update source count nsrc += 1 # generate source id src_id = getattr(src, 'newstar_id', None) if src_id is None: src_id = max_id = max_id + 1 # encode position, flux, identifier -- also, convert flux from Jy to WU to Jy (1WU=5mJy) struct.pack_into('fffi', mdl, 0, stI / 0.005, l, m, src_id) # encode fractional polarization struct.pack_into( 'fff', mdl, 16, *[getattr(src.flux, stokes, 0.0) / stI for stokes in "QUV"]) ## encode flag & type bits ## Flag: bit 0= extended; bit 1= Q|U|V <>0 and no longer used according to Wim ## Type: bit 0= clean component; bit 3= beamed beamed = getattr(src, 'flux_intrinsic', False) or getattr( src, 'newstar_beamed', False) struct.pack_into('BB', mdl, 52, 1 if src.shape else 0, (1 if getattr(src, 'newstar_cc', False) else 0) | (8 if beamed else 0)) ### extended source parameters if src.shape: ## the procedure is NMOEXF in nscan/nmoext.for R0 = math.cos(src.shape.pa) R1 = -math.sin(src.shape.pa) R2 = (.5 * src.shape.ex)**2 R3 = (.5 * src.shape.ey)**2 ex = R2 * R1 * R1 + R3 * R0 * R0 ey = R2 * R0 * R0 + R3 * R1 * R1 pa = 2 * (R2 - R3) * R0 * R1 struct.pack_into('fff', mdl, 28, ex, ey, pa) ### spectral index if isinstance(src.spectrum, ModelClasses.SpectralIndex): struct.pack_into('f', mdl, 40, src.spectrum.spi) if isinstance(src.flux, ModelClasses.PolarizationWithRM): struct.pack_into('f', mdl, 44, src.flux.rm) mdl.tofile(ff) # update MDH header with the new number of sources struct.pack_into('i', mdh, 20, nsrc) ff.seek(512) mdh.tofile(ff) ff.close() dprintf(1, "wrote %d sources to file %s\n", nsrc, filename)
def load(filename, import_src=True, import_cc=True, min_extent=0, **kw): """Imports a NEWSTAR MDL file. min_extent is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. import_src=False causes source components to be omitted import_cc=False causes clean components to be omitted """ srclist = [] dprint(1, "importing NEWSTAR file", filename) # build the LSM from a NewStar .MDL model file # if only_cleancomp=True, only clean components are used to build the LSM # if no_cleancomp=True, no clean components are used to build the LSM ff = open(filename, mode="rb") ### read GFH and MDH headers -- 512 bytes try: gfh = numpy.fromfile(ff, dtype=numpy.uint8, count=512) mdh = numpy.fromfile(ff, dtype=numpy.uint8, count=64) # parse headers ftype, fhlen, fver, crdate, crtime, rrdate, rrtime, rcount, nname = parseGFH( gfh) if ftype != ".MDL": raise TypeError maxlin, modptr, nsources, mtype, mepoch, ra0, dec0, freq0 = parseMDH( mdh) beam_const = 65 * 1e-9 * freq0 ## temp dict to hold unique nodenames unamedict = {} ### Models -- 56 bytes for ii in range(0, nsources): mdl = numpy.fromfile(ff, dtype=numpy.uint8, count=56) ### source parameters sI, ll, mm, id, sQ, sU, sV, eX, eY, eP, SI, RM = struct.unpack( 'fffiffffffff', mdl[0:48]) ### type bits bit1, bit2 = struct.unpack('BB', mdl[52:54]) # convert fluxes sI *= 0.005 # convert from WU to Jy (1WU=5mJy) sQ *= sI sU *= sI sV *= sI # Interpret bitflags 1: bit 0= extended; bit 1= Q|U|V <>0 and no longer used according to Wim fl_ext = bit1 & 1 # Interpret bitflags 2: bit 0= clean component; bit 3= beamed fl_cc = bit2 & 1 fl_beamed = bit2 & 8 ### extended source params: in arcsec, so multiply by ??? if fl_ext: ## the procedure is NMOEXT in nscan/nmoext.for if eP == 0 and eX == eY: r0 = 0 else: r0 = .5 * math.atan2(-eP, eY - eX) r1 = math.sqrt(eP * eP + (eX - eY) * (eX - eY)) r2 = eX + eY eX = 2 * math.sqrt(abs(0.5 * (r2 + r1))) eY = 2 * math.sqrt(abs(0.5 * (r2 - r1))) eP = r0 # NEWSTAR MDL lists might have same source twice if they are # clean components, so make a unique name for them bname = 'N' + str(id) if bname in unamedict: uniqname = bname + '_' + str(unamedict[bname]) unamedict[bname] += 1 else: uniqname = bname unamedict[bname] = 1 # compose source information pos = ModelClasses.Position(*lm_ncp_to_radec(ra0, dec0, ll, mm)) flux = ModelClasses.PolarizationWithRM(sI, sQ, sU, sV, RM, freq0) spectrum = ModelClasses.SpectralIndex(SI, freq0) tags = {} # work out beam gain and apparent flux tags['_lm_ncp'] = (ll, mm) tags['_newstar_r'] = tags['r'] = r = math.sqrt(ll * ll + mm * mm) tags['newstar_beamgain'] = bg = max( math.cos(beam_const * r)**6, .01) tags['newstar_id'] = id if fl_beamed: tags['Iapp'] = sI * bg tags['newstar_beamed'] = True tags['flux_intrinsic'] = True else: tags['flux_apparent'] = True # make some tags based on model flags if fl_cc: tags['newstar_cc'] = True # make shape if extended if fl_ext and max(eX, eY) >= min_extent: shape = ModelClasses.Gaussian(eX, eY, eP) else: shape = None # compute apparent flux src = SkyModel.Source(uniqname, pos, flux, shape=shape, spectrum=spectrum, **tags) srclist.append(src) except: traceback.print_exc() raise TypeError("%s does not appear to be a valid NEWSTAR MDL file" % filename) dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) return ModelClasses.SkyModel(ra0=ra0, dec0=dec0, freq0=freq0, pbexp='max(cos(65*1e-9*fq*r)**6,.01)', *srclist)
def save(model, filename, sources=None, format=None, **kw): """Exports model to a BBS catalog file""" if sources is None: sources = model.sources dprintf(2, "writing %d model sources to BBS file %s\n", len(sources), filename) # create catalog parser based on either specified format, or the model format, or the default format format = format or getattr( model, "BBS_Format", "Name, Type, Patch, Ra, Dec, I, Q, U, V, ReferenceFrequency, SpectralIndexDegree='0', SpectralIndex:0='0.0', MajorAxis, MinorAxis, Orientation", ) dprint(2, "format string is", format) parser = CatalogParser(format) # check for mandatory fields for field in "Name", "Type": if not parser.defines(field): raise ValueError, "Output format lacks mandatory field '%s'" % field # open file ff = open(filename, mode="wt") ff.write("# (%s) = format\n# The above line defines the field order and is required.\n\n" % format) # write patches for name, ra, dec in getattr(model, "BBS_Patches", []): catline = parser.newline() catline.Patch = name catline.setPosition(ra, dec) ff.write(catline.makeStr() + "\n") ff.write("\n") # write sources nsrc = 0 for src in sources: catline = parser.newline() # type if src.shape is None: catline.Type = "POINT" elif isinstance(src.shape, ModelClasses.Gaussian): catline.Type = "GAUSSIAN" else: dprint(3, "skipping source '%s': non-supported type '%s'" % (src.name, src.shape.typecode)) continue # name and patch name = src.name patch = getattr(src, "BBS_Patch", "") if patch and name.startswith(patch + ":"): name = name[(len(patch) + 1) :] catline.Name = name setattr(catline, "Patch", patch) # position catline.setPosition(src.pos.ra, src.pos.dec) # fluxes for stokes in "IQUV": setattr(catline, stokes, str(getattr(src.flux, stokes, 0.0))) # reference freq freq0 = (src.spectrum and getattr(src.spectrum, "freq0", None)) or getattr(src.flux, "freq0", None) if freq0 is not None: setattr(catline, "ReferenceFrequency", str(freq0)) # RM, spi if isinstance(src.spectrum, ModelClasses.SpectralIndex): setattr(catline, "SpectralIndexDegree", "0") setattr(catline, "SpectralIndex:0", str(src.spectrum.spi)) if isinstance(src.flux, ModelClasses.PolarizationWithRM): setattr(catline, "RotationMeasure", str(src.flux.rm)) # shape if isinstance(src.shape, ModelClasses.Gaussian): setattr(catline, "MajorAxis", src.shape.ex) setattr(catline, "MinorAxis", src.shape.ey) setattr(catline, "Orientation", src.shape.pa) # write line ff.write(catline.makeStr() + "\n") nsrc += 1 ff.close() dprintf(1, "wrote %d sources to file %s\n", nsrc, filename)
def save(model, filename, sources=None, format=None, **kw): """ Exports model to a text file """ if sources is None: sources = model.sources dprintf(2, "writing %d model sources to text file %s\n", len(sources), filename) # create catalog parser based on either specified format, or the model format, or the default format format_str = format or getattr(model, 'ASCII_Format', DefaultDMSFormatString) dprint(2, "format string is", format_str) # convert this into format dict fields = [[field, i] for i, field in enumerate(format_str.split())] if not fields: raise ValueError("illegal format string '%s'" % format) # last fieldname can end with ... ("tags..."), so strip it if fields[-1][0].endswith('...'): fields[-1][0] = fields[-1][0][:-3] # make format dict format = dict(fields) nfields = len(fields) # get minimum necessary fields from format name_field = format.get('name', None) # main RA field ra_rad_field, ra_d_field, ra_h_field, ra_m_field, ra_s_field = \ [format.get(x, None) for x in ('ra_rad', 'ra_d', 'ra_h', 'ra_m', 'ra_s')] dec_rad_field, dec_d_field, dec_m_field, dec_s_field = \ [format.get(x, None) for x in ('dec_rad', 'dec_d', 'dec_m', 'dec_s')] if ra_h_field is not None: ra_scale = 15 ra_d_field = ra_h_field else: ra_scale = 1 # fields for reference freq and RM and SpI freq0_field = format.get('freq0', None) rm_field = format.get('rm', None) spi_field = format.get('spi', None) tags_field = format.get('tags', None) # open file ff = open(filename, mode="wt") ff.write("#format: %s\n" % format_str) # write sources nsrc = 0 for src in sources: # only write points and gaussians if src.shape is not None and not isinstance(src.shape, ModelClasses.Gaussian): dprint(3, "skipping source '%s': non-supported type '%s'" % (src.name, src.shape.typecode)) continue # prepare field values fval = ['0'] * nfields # name if name_field is not None: fval[name_field] = src.name # position: RA ra, dec = src.pos.ra, src.pos.dec # RA in radians if ra_rad_field is not None: fval[ra_rad_field] = str(ra) ra /= ra_scale # RA in h/m/s or d/m/s if ra_m_field is not None: ra, ram, ras = src.pos.ra_hms_static(ra, scale=180, prec=1e-4) fval[ra_m_field] = str(ram) if ra_s_field is not None: fval[ra_s_field] = str(ras) if ra_d_field is not None: fval[ra_d_field] = str(ra) elif ra_d_field is not None: fval[ra_d_field] = str(ra * 180 / math.pi) # position: Dec if dec_rad_field is not None: fval[dec_rad_field] = str(dec) if dec_m_field is not None: dsign, decd, decm, decs = src.pos.dec_sdms() fval[dec_m_field] = str(decm) if dec_s_field is not None: fval[dec_s_field] = str(decs) if dec_d_field is not None: fval[dec_d_field] = dsign + str(decd) elif dec_d_field is not None: fval[dec_d_field] = str(dec * 180 / math.pi) # fluxes for stokes in "IQUV": field = format.get(stokes.lower()) if field is not None: fval[field] = str(getattr(src.flux, stokes, 0)) # fractional polarization if 'pol_frac' in format: i, q, u = [getattr(src.flux, stokes, 0) for stokes in "IQU"] fval[format['pol_frac']] = str(math.sqrt(q * q + u * u) / i) pa = math.atan2(u, q) / 2 for field, scale in ('pol_pa_rad', 1.), ('pol_pa_d', DEG): ifield = format.get(field) if ifield is not None: fval[ifield] = str(pa / scale) # shape if src.shape: for parm, sparm in ("emaj", "ex"), ("emin", "ey"), ("pa", "pa"): for field, scale in (parm, 1.), (parm + '_rad', DEG), (parm + '_d', DEG), (parm + '_m', DEG / 60), ( parm + '_s', DEG / 3600): ifield = format.get(field.lower()) if ifield is not None: fval[ifield] = str(getattr(src.shape, sparm, 0) / scale) # RM, spi, freq0 if freq0_field is not None: freq0 = (src.spectrum and getattr(src.spectrum, 'freq0', None)) or getattr(src.flux, 'freq0', 0) fval[freq0_field] = str(freq0) if rm_field is not None: fval[rm_field] = str(getattr(src.flux, 'rm', 0)) if spi_field is not None and hasattr(src, 'spectrum'): fval[spi_field] = str(getattr(src.spectrum, 'spi', 0)) # tags if tags_field is not None: outtags = [] for tag, value in src.getTags(): if isinstance(value, str): outtags.append("%s=\"%s\"" % (tag, value)) elif isinstance(value, bool): if value: outtags.append("+" + tag) else: outtags.append("-" + tag) elif isinstance(value, (int, float)): outtags.append("%s=%f" % (tag, value)) fval[tags_field] = ",".join(outtags) # write the line ff.write(" ".join(fval) + "\n") nsrc += 1 ff.close() dprintf(1, "wrote %d sources to file %s\n", nsrc, filename)
def load(filename, format=None, freq0=None, center_on_brightest=False, min_extent=0, verbose=0, **kw): """Imports an ASCII table The 'format' argument can be either a dict (such as the DefaultDMSFormat dict above), or a string such as DefaultDMSFormatString. (Other possible field names are "ra_d", "ra_rad", "dec_rad", "dec_sign".) If None is specified, DefaultDMSFormat is used. The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source. 'min_extent' is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. """ srclist = [] dprint(1, "importing ASCII DMS file", filename) # brightest source and its coordinates maxbright = 0 brightest_name = radec0 = None # Get column number associated with field from format dict, as well as the error # column number. Returns tuple of indices, with None index indicating no such column def get_field(name): return format.get(name, None), format.get(name + "_err", None) # Get column number associated with field from format dict, as well as the error # column number. Field is an angle thus will be suffixed with _{rad,d,h,m,s}. # Returns tuple of # column,scale,err_column,err_scale # with None index indicating no such column. Scale is scaling factor to convert # quantity in column to radians def get_ang_field(name, units=ANGULAR_UNITS): column = err_column = colunit = errunit = None units = units or ANGULAR_UNITS for unit, scale in units.items(): if column is None: column = format.get("%s_%s" % (name, unit)) if column is not None: colunit = scale if err_column is None: err_column = format.get("%s_err_%s" % (name, unit)) if err_column is not None: errunit = scale return column, colunit, err_column, errunit # helper function: returns element #num from the fields list, multiplied by scale, or None if no such field def getval(num, scale=1): return None if (num is None or len(fields) <= num) else float(fields[num]) * scale # now process file line-by-line linenum = 0 format_str = '' for line in open(filename): # for the first line, figure out the file format if not linenum: if not format and line.startswith("#format:"): format = line[len("#format:"):].strip() dprint(1, "file contains format header:", format) # set default format if format is None: format = DefaultDMSFormatString # is the format a string rather than a dict? Turn it into a dict then if isinstance(format, str): format_str = format # make list of fieldname,fieldnumber tuples fields = [(field, i) for i, field in enumerate(format.split())] if not fields: raise ValueError("illegal format string in file: '%s'" % format) # last fieldname can end with ... to indicate that it absorbs the rest of the line if fields[-1][0].endswith('...'): fields[-1] = (fields[-1][0][:-3], slice(fields[-1][1], None)) # make format dict format = dict(fields) elif not isinstance(format, dict): raise TypeError("invalid 'format' argument of type %s" % (type(format))) # nf = max(format.itervalues())+1 # fields = ['---']*nf # for field,number in format.iteritems(): # fields[number] = field # format_str = " ".join(fields) # get list of custom attributes from format custom_attrs = [] for name, col in format.items(): if name.startswith(":"): m = re.match("^:(bool|int|float|complex|str):([\w]+)$", name) if not m: raise TypeError("invalid field specification '%s' in format string" % name) custom_attrs.append((eval(m.group(1)), m.group(2), col)) # get minimum necessary fields from format name_field = format.get('name', None) # flux i_field, i_err_field = get_field("i") if i_field is None: raise ValueError("ASCII format specification lacks mandatory flux field ('i')") # main RA field ra_field, ra_scale, ra_err_field, ra_err_scale = get_ang_field('ra', ANGULAR_UNITS_RA) if ra_field is None: raise ValueError("ASCII format specification lacks mandatory Right Ascension field ('ra_h', 'ra_d' or 'ra_rad')") # main Dec field dec_field, dec_scale, dec_err_field, dec_err_scale = get_ang_field('dec', ANGULAR_UNITS_DEC) if dec_field is None: raise ValueError("ASCII format specification lacks mandatory Declination field ('dec_d' or 'dec_rad')") # polarization as QUV quv_fields = [get_field(x) for x in ['q', 'u', 'v']] # linear polarization as fraction and angle polfrac_field = format.get('pol_frac', None) if polfrac_field is not None: polpa_field, polpa_scale = format.get('pol_pa_d', None), (math.pi / 180) if not polpa_field is not None: polpa_field, polpa_scale = format.get('pol_pa_rad', None), 1 # fields for extent parameters extent_fields = [get_ang_field(x, ANGULAR_UNITS) for x in ('emaj', 'emin', 'pa')] # all three must be present, else ignore if any([x[0] is None for x in extent_fields]): extent_fields = None # fields for reference freq and RM and SpI freq0_field = format.get('freq0', None) rm_field, rm_err_field = get_field('rm') spi_fields = [get_field('spi')] + [get_field('spi%d' % i) for i in range(2, 10)] tags_slice = format.get('tags', None) # now go on to process the line linenum += 1 try: # strip whitespace line = line.strip() dprintf(4, "%s:%d: read line '%s'\n", filename, linenum, line) # skip empty or commented lines if not line or line[0] == '#': continue # split (at whitespace) into fields fields = line.split() # get name name = fields[name_field] if name_field is not None else str(len(srclist) + 1) i = getval(i_field) i_err = getval(i_err_field) # get position: RA ra = getval(ra_field) ra_err = getval(ra_err_field, ra_scale) if 'ra_m' in format: ra += float(fields[format['ra_m']]) / 60. if 'ra_s' in format: ra += float(fields[format['ra_s']]) / 3600. ra *= ra_scale # position: Dec. Separate treatment of sign dec = abs(getval(dec_field)) dec_err = getval(dec_err_field, dec_scale) if 'dec_m' in format: dec += float(fields[format['dec_m']]) / 60. if 'dec_s' in format: dec += float(fields[format['dec_s']]) / 3600. if fields[format.get('dec_sign', dec_field)][0] == '-': dec = -dec dec *= dec_scale # for up position object pos = ModelClasses.Position(ra, dec, ra_err=ra_err, dec_err=dec_err) # see if we have freq0 # Use explicitly provided reference frequency for this source if available f0 = None if freq0_field is not None: try: f0 = float(fields[freq0_field]) # If no default reference frequency for the model was supplied, # initialise from first source with a reference frequency if freq0 is None: freq0 = f0 dprint(0, "Set default freq0 to %s " "from source on line %s." % (f0, linenum)) except IndexError: f0 = None # Otherwise use default reference frequency (derived from args # or first reference frequency found in source) if f0 is None and freq0 is not None: f0 = freq0 # see if we have Q/U/V (q, q_err), (u, u_err), (v, v_err) = [(getval(x), getval(x_err)) for x, x_err in quv_fields] if polfrac_field is not None: pf = fields[polfrac_field] pf = float(pf[:-1]) / 100 if pf.endswith("%") else float(pf) ppa = float(fields[polpa_field]) * polpa_scale if polpa_field is not None else 0 q = i * pf * math.cos(2 * ppa) u = i * pf * math.sin(2 * ppa) v = 0 # see if we have RM as well. Create flux object (unpolarized, polarized, polarized w/RM) rm, rm_err = getval(rm_field), getval(rm_err_field) if q is None: flux = ModelClasses.Polarization(i, 0, 0, 0, I_err=i_err) elif f0 is None or rm is None: flux = ModelClasses.Polarization(i, q, u, v, I_err=i_err, Q_err=q_err, U_err=u_err, V_err=v_err) else: flux = ModelClasses.PolarizationWithRM(i, q, u, v, rm, f0, I_err=i_err, Q_err=q_err, U_err=u_err, V_err=v_err, rm_err=rm_err) # see if we have a spectral index if f0 is None: spectrum = None else: spi = [getval(x) for x, xerr in spi_fields] spi_err = [getval(xerr) for x, xerr in spi_fields] dprint(4, name, "spi is", spi, "err is", spi_err) # if any higher-order spectral terms are specified, include them here but trim off all trailing zeroes while spi and not spi[-1]: del spi[-1] del spi_err[-1] if not spi: spectrum = None elif len(spi) == 1: spectrum = ModelClasses.SpectralIndex(spi[0], f0) if spi_err[0] is not None: spectrum.spi_err = spi_err[0] else: spectrum = ModelClasses.SpectralIndex(spi, f0) if any([x is not None for x in spi_err]): spectrum.spi_err = spi_err # see if we have extent parameters ex = ey = pa = 0 if extent_fields: ex, ey, pa = [(getval(x[0], x[1]) or 0) for x in extent_fields] extent_errors = [getval(x[2], x[3]) for x in extent_fields] # form up shape object if (ex or ey) and max(ex, ey) >= min_extent: shape = ModelClasses.Gaussian(ex, ey, pa) for ifield, field in enumerate(['ex', 'ey', 'pa']): if extent_errors[ifield] is not None: shape.setAttribute(field + "_err", extent_errors[ifield]) else: shape = None # get tags tagdict = {} if tags_slice: try: tags = fields[tags_slice] except IndexError: pass for tagstr1 in tags: for tagstr in tagstr1.split(","): if tagstr[0] == "+": tagname, value = tagstr[1:], True elif tagstr[0] == "-": tagname, value = tagstr[1:], False elif "=" in tagstr: tagname, value = tagstr.split("=", 1) if value[0] in "'\"" and value[-1] in "'\"": value = value[1:-1] else: try: value = float(value) except: continue else: tagname, value = tagstr, True tagdict[tagname] = value # OK, now form up the source object # now create a source object dprint(3, name, ra, dec, i, q, u, v) src = SkyModel.Source(name, pos, flux, shape=shape, spectrum=spectrum, **tagdict) # get custom attributes for type_, attr, column in custom_attrs: if column is not None and len(fields) > column: src.setAttribute(attr, type_(fields[column])) # add to source list srclist.append(src) # check if it's the brightest brightness = src.brightness() if brightness > maxbright: maxbright = brightness brightest_name = src.name radec0 = ra, dec except: dprintf(0, "%s:%d: %s, skipping\n", filename, linenum, str(sys.exc_info()[1])) if verbose: raise dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) # create model model = ModelClasses.SkyModel(*srclist) if freq0 is not None: model.setRefFreq(freq0) # set model format model.setAttribute("ASCII_Format", format_str) # setup model center if center_on_brightest and radec0: dprintf(2, "brightest source is %s (%g Jy) at %f,%f\n", brightest_name, maxbright, *radec0) model.setFieldCenter(*radec0) # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()) for src in model.sources: l, m = projection.lm(src.pos.ra, src.pos.dec) src.setAttribute('r', math.sqrt(l * l + m * m)) return model
def save (model,filename,sources=None,format=None,**kw): """ Exports model to a text file """; if sources is None: sources = model.sources; dprintf(2,"writing %d model sources to text file %s\n",len(sources),filename); # create catalog parser based on either specified format, or the model format, or the default format format_str = format or getattr(model,'ASCII_Format',DefaultDMSFormatString); dprint(2,"format string is",format_str); # convert this into format dict fields = [ [field,i] for i,field in enumerate(format_str.split()) ]; if not fields: raise ValueError,"illegal format string '%s'"%format; # last fieldname can end with ... ("tags..."), so strip it if fields[-1][0].endswith('...'): fields[-1][0] = fields[-1][0][:-3]; # make format dict format = dict(fields); nfields = len(fields); # get minimum necessary fields from format name_field = format.get('name',None); # main RA field ra_rad_field,ra_d_field,ra_h_field,ra_m_field,ra_s_field = \ [ format.get(x,None) for x in 'ra_rad','ra_d','ra_h','ra_m','ra_s' ]; dec_rad_field,dec_d_field,dec_m_field,dec_s_field = \ [ format.get(x,None) for x in 'dec_rad','dec_d','dec_m','dec_s' ]; if ra_h_field is not None: ra_scale = 15; ra_d_field = ra_h_field; else: ra_scale = 1; # fields for reference freq and RM and SpI freq0_field = format.get('freq0',None); rm_field = format.get('rm',None); spi_field = format.get('spi',None); tags_field = format.get('tags',None); # open file ff = open(filename,mode="wt"); ff.write("#format: %s\n"%format_str); # write sources nsrc = 0; for src in sources: # only write points and gaussians if src.shape is not None and not isinstance(src.shape,ModelClasses.Gaussian): dprint(3,"skipping source '%s': non-supported type '%s'"%(src.name,src.shape.typecode)); continue; # prepare field values fval = ['0']*nfields; # name if name_field is not None: fval[name_field] = src.name; # position: RA ra,dec = src.pos.ra,src.pos.dec; # RA in radians if ra_rad_field is not None: fval[ra_rad_field] = str(ra); ra /= ra_scale; # RA in h/m/s or d/m/s if ra_m_field is not None: ra,ram,ras = src.pos.ra_hms_static(ra,scale=180,prec=1e-4); fval[ra_m_field] = str(ram); if ra_s_field is not None: fval[ra_s_field] = str(ras); if ra_d_field is not None: fval[ra_d_field] = str(ra); elif ra_d_field is not None: fval[ra_d_field] = str(ra*180/math.pi); # position: Dec if dec_rad_field is not None: fval[dec_rad_field] = str(dec); if dec_m_field is not None: dsign,decd,decm,decs = src.pos.dec_sdms(); fval[dec_m_field] = str(decm); if dec_s_field is not None: fval[dec_s_field] = str(decs); if dec_d_field is not None: fval[dec_d_field] = dsign+str(decd); elif dec_d_field is not None: fval[dec_d_field] = str(dec*180/math.pi); # fluxes for stokes in "IQUV": field = format.get(stokes.lower()); if field is not None: fval[field] = str(getattr(src.flux,stokes,0)); # fractional polarization if 'pol_frac' in format: i,q,u = [ getattr(src.flux,stokes,0) for stokes in "IQU" ]; fval[format['pol_frac']] = str(math.sqrt(q*q+u*u)/i); pa = math.atan2(u,q)/2; for field,scale in ('pol_pa_rad',1.),('pol_pa_d',DEG): ifield = format.get(field); if ifield is not None: fval[ifield] = str(pa/scale); # shape if src.shape: for parm,sparm in ("emaj","ex"),("emin","ey"),("pa","pa"): for field,scale in (parm,1.),(parm+'_rad',DEG),(parm+'_d',DEG),(parm+'_m',DEG/60),(parm+'_s',DEG/3600): ifield = format.get(field.lower()); if ifield is not None: fval[ifield] = str(getattr(src.shape,sparm,0)/scale); # RM, spi, freq0 if freq0_field is not None: freq0 = (src.spectrum and getattr(src.spectrum,'freq0',None)) or getattr(src.flux,'freq0',0); fval[freq0_field] = str(freq0); if rm_field is not None: fval[rm_field] = str(getattr(src.flux,'rm',0)); if spi_field is not None and hasattr(src,'spectrum'): fval[spi_field] = str(getattr(src.spectrum,'spi',0)); # tags if tags_field is not None: outtags = []; for tag,value in src.getTags(): if isinstance(value,str): outtags.append("%s=\"%s\""%(tag,value)); elif isinstance(value,bool): if value: outtags.append("+"+tag); else: outtags.append("-"+tag); elif isinstance(value,(int,float)): outtags.append("%s=%f"%(tag,value)); fval[tags_field] = ",".join(outtags); # write the line ff.write(" ".join(fval)+"\n"); nsrc += 1; ff.close(); dprintf(1,"wrote %d sources to file %s\n",nsrc,filename);
def load (filename,format=None,freq0=None,center_on_brightest=False,min_extent=0,verbose=0,**kw): """Imports an ASCII table The 'format' argument can be either a dict (such as the DefaultDMSFormat dict above), or a string such as DefaultDMSFormatString. (Other possible field names are "ra_d", "ra_rad", "dec_rad", "dec_sign".) If None is specified, DefaultDMSFormat is used. The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source. 'min_extent' is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. """ srclist = []; dprint(1,"importing ASCII DMS file",filename); # brightest source and its coordinates maxbright = 0; brightest_name = radec0 = None; # Get column number associated with field from format dict, as well as the error # column number. Returns tuple of indices, with None index indicating no such column def get_field (name): return format.get(name,None),format.get(name+"_err",None); # Get column number associated with field from format dict, as well as the error # column number. Field is an angle thus will be suffixed with _{rad,d,h,m,s}. # Returns tuple of # column,scale,err_column,err_scale # with None index indicating no such column. Scale is scaling factor to convert # quantity in column to radians def get_ang_field (name,units=ANGULAR_UNITS): column = err_column = colunit = errunit = None units = units or ANGULAR_UNITS; for unit,scale in units.iteritems(): if column is None: column = format.get("%s_%s"%(name,unit)); if column is not None: colunit = scale; if err_column is None: err_column = format.get("%s_err_%s"%(name,unit)) if err_column is not None: errunit = scale; return column,colunit,err_column,errunit; # helper function: returns element #num from the fields list, multiplied by scale, or None if no such field def getval (num,scale=1): return None if ( num is None or len(fields) <= num ) else float(fields[num])*scale; # now process file line-by-line linenum = 0; format_str = '' for line in file(filename): # for the first line, figure out the file format if not linenum: if not format and line.startswith("#format:"): format = line[len("#format:"):].strip(); dprint(1,"file contains format header:",format); # set default format if format is None: format = DefaultDMSFormatString; # is the format a string rather than a dict? Turn it into a dict then if isinstance(format,str): format_str = format; # make list of fieldname,fieldnumber tuples fields = [ (field,i) for i,field in enumerate(format.split()) ]; if not fields: raise ValueError,"illegal format string in file: '%s'"%format; # last fieldname can end with ... to indicate that it absorbs the rest of the line if fields[-1][0].endswith('...'): fields[-1] = (fields[-1][0][:-3],slice(fields[-1][1],None)); # make format dict format = dict(fields); elif not isinstance(format,dict): raise TypeError,"invalid 'format' argument of type %s"%(type(format)) # nf = max(format.itervalues())+1; # fields = ['---']*nf; # for field,number in format.iteritems(): # fields[number] = field; # format_str = " ".join(fields); # get list of custom attributes from format custom_attrs = []; for name,col in format.iteritems(): if name.startswith(":"): m = re.match("^:(bool|int|float|complex|str):([\w]+)$",name); if not m: raise TypeError,"invalid field specification '%s' in format string"%name; custom_attrs.append((eval(m.group(1)),m.group(2),col)); # get minimum necessary fields from format name_field = format.get('name',None); # flux i_field,i_err_field = get_field("i"); if i_field is None: raise ValueError,"ASCII format specification lacks mandatory flux field ('i')"; # main RA field ra_field,ra_scale,ra_err_field,ra_err_scale = get_ang_field('ra',ANGULAR_UNITS_RA); if ra_field is None: raise ValueError,"ASCII format specification lacks mandatory Right Ascension field ('ra_h', 'ra_d' or 'ra_rad')"; # main Dec field dec_field,dec_scale,dec_err_field,dec_err_scale = get_ang_field('dec',ANGULAR_UNITS_DEC); if dec_field is None: raise ValueError,"ASCII format specification lacks mandatory Declination field ('dec_d' or 'dec_rad')"; # polarization as QUV quv_fields = [ get_field(x) for x in ['q','u','v'] ]; # linear polarization as fraction and angle polfrac_field = format.get('pol_frac',None); if polfrac_field is not None: polpa_field,polpa_scale = format.get('pol_pa_d',None),(math.pi/180); if not polpa_field is not None: polpa_field,polpa_scale = format.get('pol_pa_rad',None),1; # fields for extent parameters extent_fields = [ get_ang_field(x,ANGULAR_UNITS) for x in 'emaj','emin','pa' ]; # all three must be present, else ignore if any( [ x[0] is None for x in extent_fields ] ): extent_fields = None; # fields for reference freq and RM and SpI freq0_field = format.get('freq0',None); rm_field,rm_err_field = get_field('rm'); spi_fields = [ get_field('spi') ] + [ get_field('spi%d'%i) for i in range(2,10) ]; tags_slice = format.get('tags',None); # now go on to process the line linenum += 1; try: # strip whitespace line = line.strip(); dprintf(4,"%s:%d: read line '%s'\n",filename,linenum,line); # skip empty or commented lines if not line or line[0] == '#': continue; # split (at whitespace) into fields fields = line.split(); # get name name = fields[name_field] if name_field is not None else str(len(srclist)+1); i = getval(i_field); i_err = getval(i_err_field); # get position: RA ra = getval(ra_field); ra_err = getval(ra_err_field,ra_scale); if 'ra_m' in format: ra += float(fields[format['ra_m']])/60.; if 'ra_s' in format: ra += float(fields[format['ra_s']])/3600.; ra *= ra_scale; # position: Dec. Separate treatment of sign dec = abs(getval(dec_field)); dec_err = getval(dec_err_field,dec_scale); if 'dec_m' in format: dec += float(fields[format['dec_m']])/60.; if 'dec_s' in format: dec += float(fields[format['dec_s']])/3600.; if fields[format.get('dec_sign',dec_field)][0] == '-': dec = -dec; dec *= dec_scale; # for up position object pos = ModelClasses.Position(ra,dec,ra_err=ra_err,dec_err=dec_err); # see if we have freq0 try: f0 = freq0 or (freq0_field and float(fields[freq0_field])); except IndexError: f0 = None; # set model refrence frequency if f0 is not None and freq0 is None: freq0 = f0; # see if we have Q/U/V (q,q_err),(u,u_err),(v,v_err) = [ (getval(x),getval(x_err)) for x,x_err in quv_fields ]; if polfrac_field is not None: pf = fields[polfrac_field]; pf = float(pf[:-1])/100 if pf.endswith("%") else float(pf); ppa = float(fields[polpa_field])*polpa_scale if polpa_field is not None else 0; q = i*pf*math.cos(2*ppa); u = i*pf*math.sin(2*ppa); v = 0; # see if we have RM as well. Create flux object (unpolarized, polarized, polarized w/RM) rm,rm_err = getval(rm_field),getval(rm_err_field); if q is None: flux = ModelClasses.Polarization(i,0,0,0,I_err=i_err); elif f0 is None or rm is None: flux = ModelClasses.Polarization(i,q,u,v,I_err=i_err,Q_err=q_err,U_err=u_err,V_err=v_err); else: flux = ModelClasses.PolarizationWithRM(i,q,u,v,rm,f0,I_err=i_err,Q_err=q_err,U_err=u_err,V_err=v_err,rm_err=rm_err); # see if we have a spectral index if f0 is None: spectrum = None; else: spi = [ getval(x) for x,xerr in spi_fields ]; spi_err = [ getval(xerr) for x,xerr in spi_fields ]; dprint(4,name,"spi is",spi,"err is",spi_err) # if any higher-order spectral terms are specified, include them here but trim off all trailing zeroes while spi and not spi[-1]: del spi[-1]; del spi_err[-1] if not spi: spectrum = None; elif len(spi) == 1: spectrum = ModelClasses.SpectralIndex(spi[0],f0); if spi_err[0] is not None: spectrum.spi_err = spi_err[0]; else: spectrum = ModelClasses.SpectralIndex(spi,f0); if any([ x is not None for x in spi_err ]): spectrum.spi_err = spi_err; # see if we have extent parameters ex = ey = pa = 0; if extent_fields: ex,ey,pa = [ ( getval(x[0],x[1]) or 0 ) for x in extent_fields ]; extent_errors = [ getval(x[2],x[3]) for x in extent_fields ]; # form up shape object if (ex or ey) and max(ex,ey) >= min_extent: shape = ModelClasses.Gaussian(ex,ey,pa); for ifield,field in enumerate(['ex','ey','pa']): if extent_errors[ifield] is not None: shape.setAttribute(field+"_err",extent_errors[ifield]); else: shape = None; # get tags tagdict = {}; if tags_slice: try: tags = fields[tags_slice]; except IndexError: pass; for tagstr1 in tags: for tagstr in tagstr1.split(","): if tagstr[0] == "+": tagname,value = tagstr[1:],True; elif tagstr[0] == "-": tagname,value = tagstr[1:],False; elif "=" in tagstr: tagname,value = tagstr.split("=",1); if value[0] in "'\"" and value[-1] in "'\"": value = value[1:-1]; else: try: value = float(value); except: continue; else: tagname,value = tagstr,True; tagdict[tagname] = value; # OK, now form up the source object # now create a source object dprint(3,name,ra,dec,i,q,u,v); src = SkyModel.Source(name,pos,flux,shape=shape,spectrum=spectrum,**tagdict); # get custom attributes for type_,attr,column in custom_attrs: if column is not None and len(fields) > column: src.setAttribute(attr,type_(fields[column])); # add to source list srclist.append(src); # check if it's the brightest brightness = src.brightness(); if brightness > maxbright: maxbright = brightness; brightest_name = src.name; radec0 = ra,dec; except: if verbose: traceback.print_exc(); dprintf(0,"%s:%d: %s, skipping\n",filename,linenum,str(sys.exc_info()[1])); dprintf(2,"imported %d sources from file %s\n",len(srclist),filename); # create model model = ModelClasses.SkyModel(*srclist); if freq0 is not None: model.setRefFreq(freq0); # set model format model.setAttribute("ASCII_Format",format_str); # setup model center if center_on_brightest and radec0: dprintf(2,"brightest source is %s (%g Jy) at %f,%f\n",brightest_name,maxbright,*radec0); model.setFieldCenter(*radec0); # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()); for src in model.sources: l,m = projection.lm(src.pos.ra,src.pos.dec); src.setAttribute('r',math.sqrt(l*l+m*m)); return model;
def load(filename, freq0=None, center_on_brightest=False, **kw): """Imports an BBS catalog file The 'format' argument can be either a dict (such as the DefaultDMSFormat dict above), or a string such as DefaultDMSFormatString. (Other possible field names are "ra_d", "ra_rad", "dec_rad", "dec_sign".) If None is specified, DefaultDMSFormat is used. The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source, else to the center of the first patch. """ srclist = [] dprint(1, "importing BBS source table", filename) # read file ff = open(filename) # first line must be a format string: extract it line0 = ff.readline().strip() match = re.match("#\s*\((.+)\)\s*=\s*format", line0) if not match: raise ValueError("line 1 is not a valid format specification") format_str = match.group(1) # create format parser from this string parser = CatalogParser(format_str) # check for mandatory fields for field in "Name", "Type": if not parser.defines(field): raise ValueError("Table lacks mandatory field '%s'" % field) maxbright = 0 patches = [] ref_freq = freq0 # now process file line-by-line linenum = 1 for line in ff: linenum += 1 try: # parse one line dprint(4, "read line:", line) catline = parser.parse(line, linenum) if not catline: continue dprint(5, "line %d: " % linenum, catline.__dict__) # is it a patch record? patchname = getattr(catline, 'Patch', '') if not catline.Name: dprintf(2, "%s:%d: patch %s\n", filename, linenum, patchname) patches.append((patchname, catline.ra_rad, catline.dec_rad)) continue # form up name name = "%s:%s" % (patchname, catline.Name) if patchname else catline.Name # check source type stype = catline.Type.upper() if stype not in ("POINT", "GAUSSIAN"): raise ValueError("unsupported source type %s" % stype) # see if we have freq0 if freq0: f0 = freq0 elif hasattr(catline, 'ReferenceFrequency'): f0 = float(catline.ReferenceFrequency or '0') else: f0 = None # set model refrence frequency if f0 is not None and ref_freq is None: ref_freq = f0 # see if we have Q/U/V i, q, u, v = [float(getattr(catline, stokes, '0') or '0') for stokes in "IQUV"] # see if we have RM as well. Create flux object (unpolarized, polarized, polarized w/RM) if f0 is not None and hasattr(catline, 'RotationMeasure'): flux = ModelClasses.PolarizationWithRM(i, q, u, v, float(catline.RotationMeasure or '0'), f0) else: flux = ModelClasses.Polarization(i, q, u, v) # see if we have a spectral index if f0 is not None and hasattr(catline, 'SpectralIndex:0'): spectrum = ModelClasses.SpectralIndex(float(getattr(catline, 'SpectralIndex:0') or '0'), f0) else: spectrum = None # see if we have extent parameters if stype == "GAUSSIAN": ex = float(getattr(catline, "MajorAxis", "0") or "0") ey = float(getattr(catline, "MinorAxis", "0") or "0") pa = float(getattr(catline, "Orientation", "0") or "0") shape = ModelClasses.Gaussian(ex, ey, pa) else: shape = None # create tags tags = {} for field in "Patch", "Category": if hasattr(catline, field): tags['BBS_%s' % field] = getattr(catline, field) # OK, now form up the source object # position pos = ModelClasses.Position(catline.ra_rad, catline.dec_rad) # now create a source object src = SkyModel.Source(name, pos, flux, shape=shape, spectrum=spectrum, **tags) srclist.append(src) # check if it's the brightest brightness = src.brightness() if brightness > maxbright: maxbright = brightness brightest_name = src.name radec0 = catline.ra_rad, catline.dec_rad except: dprintf(0, "%s:%d: %s, skipping\n", filename, linenum, str(sys.exc_info()[1])) dprintf(2, "imported %d sources from file %s\n", len(srclist), filename) # create model model = ModelClasses.SkyModel(*srclist) if ref_freq is not None: model.setRefFreq(ref_freq) # setup model center if center_on_brightest and radec0: dprintf(2, "setting model centre to brightest source %s (%g Jy) at %f,%f\n", brightest_name, maxbright, *radec0) model.setFieldCenter(*radec0) elif patches: name, ra, dec = patches[0] dprintf(2, "setting model centre to first patch %s at %f,%f\n", name, ra, dec) model.setFieldCenter(ra, dec) # map patches to model tags model.setAttribute("BBS_Patches", patches) model.setAttribute("BBS_Format", format_str) # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()) for src in model.sources: l, m = projection.lm(src.pos.ra, src.pos.dec) src.setAttribute('r', math.sqrt(l * l + m * m)) return model
def save(model, filename, sources=None, format=None, **kw): """Exports model to a BBS catalog file""" if sources is None: sources = model.sources dprintf(2, "writing %d model sources to BBS file %s\n", len(sources), filename) # create catalog parser based on either specified format, or the model format, or the default format format = format or getattr(model, 'BBS_Format', "Name, Type, Patch, Ra, Dec, I, Q, U, V, ReferenceFrequency, SpectralIndexDegree='0', SpectralIndex:0='0.0', MajorAxis, MinorAxis, Orientation") dprint(2, "format string is", format) parser = CatalogParser(format) # check for mandatory fields for field in "Name", "Type": if not parser.defines(field): raise ValueError("Output format lacks mandatory field '%s'" % field) # open file ff = open(filename, mode="wt") ff.write("# (%s) = format\n# The above line defines the field order and is required.\n\n" % format) # write patches for name, ra, dec in getattr(model, "BBS_Patches", []): catline = parser.newline() catline.Patch = name catline.setPosition(ra, dec) ff.write(catline.makeStr() + "\n") ff.write("\n") # write sources nsrc = 0 for src in sources: catline = parser.newline() # type if src.shape is None: catline.Type = "POINT" elif isinstance(src.shape, ModelClasses.Gaussian): catline.Type = "GAUSSIAN" else: dprint(3, "skipping source '%s': non-supported type '%s'" % (src.name, src.shape.typecode)) continue # name and patch name = src.name patch = getattr(src, 'BBS_Patch', '') if patch and name.startswith(patch + ':'): name = name[(len(patch) + 1):] catline.Name = name setattr(catline, 'Patch', patch) # position catline.setPosition(src.pos.ra, src.pos.dec) # fluxes for stokes in "IQUV": setattr(catline, stokes, str(getattr(src.flux, stokes, 0.))) # reference freq freq0 = (src.spectrum and getattr(src.spectrum, 'freq0', None)) or getattr(src.flux, 'freq0', None) if freq0 is not None: setattr(catline, 'ReferenceFrequency', str(freq0)) # RM, spi if isinstance(src.spectrum, ModelClasses.SpectralIndex): setattr(catline, 'SpectralIndexDegree', '0') setattr(catline, 'SpectralIndex:0', str(src.spectrum.spi)) if isinstance(src.flux, ModelClasses.PolarizationWithRM): setattr(catline, 'RotationMeasure', str(src.flux.rm)) # shape if isinstance(src.shape, ModelClasses.Gaussian): setattr(catline, 'MajorAxis', src.shape.ex) setattr(catline, 'MinorAxis', src.shape.ey) setattr(catline, 'Orientation', src.shape.pa) # write line ff.write(catline.makeStr() + "\n") nsrc += 1 ff.close() dprintf(1, "wrote %d sources to file %s\n", nsrc, filename)
def load (filename,import_src=True,import_cc=True,min_extent=0,**kw): """Imports a NEWSTAR MDL file. min_extent is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. import_src=False causes source components to be omitted import_cc=False causes clean components to be omitted """; srclist = []; dprint(1,"importing NEWSTAR file",filename); # build the LSM from a NewStar .MDL model file # if only_cleancomp=True, only clean components are used to build the LSM # if no_cleancomp=True, no clean components are used to build the LSM ff = open(filename,mode="rb"); ### read GFH and MDH headers -- 512 bytes try: gfh = numpy.fromfile(ff,dtype=numpy.uint8,count=512); mdh = numpy.fromfile(ff,dtype=numpy.uint8,count=64); # parse headers ftype,fhlen,fver,crdate,crtime,rrdate,rrtime,rcount,nname = parseGFH(gfh); if ftype != ".MDL": raise TypeError; maxlin,modptr,nsources,mtype,mepoch,ra0,dec0,freq0 = parseMDH(mdh); beam_const = 65*1e-9*freq0; ## temp dict to hold unique nodenames unamedict={} ### Models -- 56 bytes for ii in xrange(0,nsources): mdl = numpy.fromfile(ff,dtype=numpy.uint8,count=56) ### source parameters sI,ll,mm,id,sQ,sU,sV,eX,eY,eP,SI,RM = struct.unpack('fffiffffffff',mdl[0:48]) ### type bits bit1,bit2 = struct.unpack('BB',mdl[52:54]); # convert fluxes sI *= 0.005 # convert from WU to Jy (1WU=5mJy) sQ *= sI; sU *= sI; sV *= sI; # Interpret bitflags 1: bit 0= extended; bit 1= Q|U|V <>0 and no longer used according to Wim fl_ext = bit1&1; # Interpret bitflags 2: bit 0= clean component; bit 3= beamed fl_cc = bit2&1; fl_beamed = bit2&8; ### extended source params: in arcsec, so multiply by ??? if fl_ext: ## the procedure is NMOEXT in nscan/nmoext.for if eP == 0 and eX == eY: r0 = 0 else: r0 = .5*math.atan2(-eP,eY-eX) r1 = math.sqrt(eP*eP+(eX-eY)*(eX-eY)) r2 = eX+eY eX = 2*math.sqrt(abs(0.5*(r2+r1))) eY = 2*math.sqrt(abs(0.5*(r2-r1))) eP = r0 # NEWSTAR MDL lists might have same source twice if they are # clean components, so make a unique name for them bname='N'+str(id); if unamedict.has_key(bname): uniqname = bname+'_'+str(unamedict[bname]) unamedict[bname] += 1 else: uniqname = bname unamedict[bname] = 1 # compose source information pos = ModelClasses.Position(*lm_ncp_to_radec(ra0,dec0,ll,mm)); flux = ModelClasses.PolarizationWithRM(sI,sQ,sU,sV,RM,freq0); spectrum = ModelClasses.SpectralIndex(SI,freq0); tags = {}; # work out beam gain and apparent flux tags['_lm_ncp'] = (ll,mm); tags['_newstar_r'] = tags['r'] = r = math.sqrt(ll*ll+mm*mm); tags['newstar_beamgain'] = bg = max(math.cos(beam_const*r)**6,.01); tags['newstar_id'] = id; if fl_beamed: tags['Iapp'] = sI*bg; tags['newstar_beamed'] = True; tags['flux_intrinsic'] = True; else: tags['flux_apparent'] = True; # make some tags based on model flags if fl_cc: tags['newstar_cc'] = True; # make shape if extended if fl_ext and max(eX,eY) >= min_extent: shape = ModelClasses.Gaussian(eX,eY,eP); else: shape = None; # compute apparent flux src = SkyModel.Source(uniqname,pos,flux,shape=shape,spectrum=spectrum,**tags); srclist.append(src); except: traceback.print_exc(); raise TypeError("%s does not appear to be a valid NEWSTAR MDL file"%filename); dprintf(2,"imported %d sources from file %s\n",len(srclist),filename); return ModelClasses.SkyModel(ra0=ra0,dec0=dec0,freq0=freq0,pbexp='max(cos(65*1e-9*fq*r)**6,.01)',*srclist);
def save (model,filename,freq0=None,sources=None,**kw): """Saves model to a NEWSTAR MDL file. The MDL file must exist, since the existing header is reused. 'sources' is a list of sources to write, if None, then model.sources is used. """ if sources is None: sources = model.sources; dprintf(2,"writing %s model sources to NEWSTAR file\n",len(sources),filename); ra0,dec0 = model.fieldCenter(); freq0 = freq0 or model.refFreq(); # if freq0 is not specified, scan sources if freq0 is None: for src in sources: freq0 = (src.spectrum and getattr(src.spectrum,'freq0',None)) or getattr(src.flux,'freq0',None); if freq0: break; else: raise ValueError("unable to determine NEWSTAR model reference frequency, please specify one explicitly."); ff = open(filename,mode="wb"); ### create GFH header gfh = numpy.zeros(512,dtype=numpy.uint8); datestr = time.strftime("%d-%m-%Y"); timestr = time.strftime("%H:%M"); struct.pack_into("4sii11s5s11s5si80sB",gfh,0,".MDL",512,1, datestr,timestr,datestr,timestr,0, os.path.splitext(os.path.basename(filename))[0],6); # 6=datatype # link1/link2 gives the header size actually struct.pack_into("ii",gfh,152,512,512); gfh.tofile(ff); # create MDH header mdh = numpy.zeros(64,dtype=numpy.uint8); struct.pack_into('iiii',mdh,12,1,576,0,2); # maxlin,pointer,num_sources,mtype struct.pack_into('f',mdh,28,getattr(model,'epoch',2000)); struct.pack_into('ddd',mdh,32,ra0/(2*math.pi),dec0/(2*math.pi),freq0*1e-6); mdh.tofile(ff); # get the max ID, if specified max_id = max([ getattr(src,'newstar_id',0) for src in sources ]); # now loop over model sources # count how many are written out -- only point sources and gaussians are actually written out, the rest are skipped nsrc = 0; for src in sources: # create empty newstar source structure mdl = numpy.zeros(56,dtype=numpy.uint8); if src.shape and not isinstance(src.shape,ModelClasses.Gaussian): dprint(3,"skipping source '%s': non-supported type '%s'"%(src.name,src.shape.typecode)); continue; stI = src.flux.I; # get l,m NCP position -- either from tag, or compute lm = getattr(src,'_lm_ncp',None); if lm: if isinstance(lm,(tuple,list)) and len(lm) == 2: l,m = lm; else: dprint(0,"warning: skipping source '%s' because its _lm_ncp attribute is malformed (tuple of 2 values expected)"%src.name); continue; else: l,m = radec_to_lm_ncp(ra0,dec0,src.pos.ra,src.pos.dec); # update source count nsrc += 1; # generate source id src_id = getattr(src,'newstar_id',None); if src_id is None: src_id = max_id = max_id+1; # encode position, flux, identifier -- also, convert flux from Jy to WU to Jy (1WU=5mJy) struct.pack_into('fffi',mdl,0,stI/0.005,l,m,src_id); # encode fractional polarization struct.pack_into('fff',mdl,16,*[ getattr(src.flux,stokes,0.0)/stI for stokes in "QUV" ]); ## encode flag & type bits ## Flag: bit 0= extended; bit 1= Q|U|V <>0 and no longer used according to Wim ## Type: bit 0= clean component; bit 3= beamed beamed = getattr(src,'flux_intrinsic',False) or getattr(src,'newstar_beamed',False); struct.pack_into('BB',mdl,52, 1 if src.shape else 0, (1 if getattr(src,'newstar_cc',False) else 0) | (8 if beamed else 0)); ### extended source parameters if src.shape: ## the procedure is NMOEXF in nscan/nmoext.for R0 = math.cos(src.shape.pa); R1 = -math.sin(src.shape.pa); R2 = (.5*src.shape.ex)**2; R3 = (.5*src.shape.ey)**2; ex = R2*R1*R1+R3*R0*R0 ey = R2*R0*R0+R3*R1*R1 pa = 2*(R2-R3)*R0*R1 struct.pack_into('fff',mdl,28,ex,ey,pa); ### spectral index if isinstance(src.spectrum,ModelClasses.SpectralIndex): struct.pack_into('f',mdl,40,src.spectrum.spi); if isinstance(src.flux,ModelClasses.PolarizationWithRM): struct.pack_into('f',mdl,44,src.flux.rm); mdl.tofile(ff); # update MDH header with the new number of sources struct.pack_into('i',mdh,20,nsrc); ff.seek(512); mdh.tofile(ff); ff.close(); dprintf(1,"wrote %d sources to file %s\n",nsrc,filename);
def load (filename,format=None,freq0=None,center_on_brightest=False,min_extent=0,**kw): """Imports an ASCII table The 'format' argument can be either a dict (such as the DefaultDMSFormat dict above), or a string such as DefaultDMSFormatString. (Other possible field names are "ra_d", "ra_rad", "dec_rad", "dec_sign".) If None is specified, DefaultDMSFormat is used. The 'freq0' argument supplies a default reference frequency (if one is not contained in the file.) If 'center_on_brightest' is True, the mpodel field center will be set to the brightest source. 'min_extent' is minimal source extent (in radians), above which a source will be treated as a Gaussian rather than a point component. """ srclist = []; dprint(1,"importing ASCII DMS file",filename); # brightest source and its coordinates maxbright = 0; brightest_name = radec0 = None; # now process file line-by-line linenum = 0; format_str = '' for line in file(filename): # for the first line, firgure out the file format if not linenum: if not format and line.startswith("#format:"): format = line[len("#format:"):].strip(); dprint(1,"file contains format header:",format); # set default format if format is None: format = DefaultDMSFormatString; # is the format a string rather than a dict? Turn it into a dict then if isinstance(format,str): format_str = format; # make list of fieldname,fieldnumber tuples fields = [ (field,i) for i,field in enumerate(format.split()) ]; if not fields: raise ValueError,"illegal format string in file: '%s'"%format; # last fieldname can end with ... to indicate that it absorbs the rest of the line if fields[-1][0].endswith('...'): fields[-1] = (fields[-1][0][:-3],slice(fields[-1][1],None)); # make format dict format = dict(fields); elif not isinstance(format,dict): raise TypeError,"invalid 'format' argument of type %s"%(type(format)) nf = max(format.itervalues())+1; fields = ['---']*nf; for field,number in format.iteritems(): fields[number] = field; format_str = " ".join(fields); # get minimum necessary fields from format name_field = format.get('name',None); # flux try: i_field = format['i']; except KeyError: raise ValueError,"ASCII format specification lacks mandatory flux field ('i')"; # main RA field if 'ra_h' in format: ra_field,ra_scale = format['ra_h'],(math.pi/12); elif 'ra_d' in format: ra_field,ra_scale = format['ra_d'],(math.pi/180); elif 'ra_rad' in format: ra_field,ra_scale = format['ra_rad'],1.; else: raise ValueError,"ASCII format specification lacks mandatory Right Ascension field ('ra_h', 'ra_d' or 'ra_rad')"; # main Dec field if 'dec_d' in format: dec_field,dec_scale = format['dec_d'],(math.pi/180); elif 'dec_rad' in format: dec_field,dec_scale = format['dec_rad'],1.; else: raise ValueError,"ASCII format specification lacks mandatory Declination field ('dec_d' or 'dec_rad')"; # polarization as QUV try: quv_fields = [ format[x] for x in ['q','u','v'] ]; except KeyError: quv_fields = None; # linear polarization as fraction and angle polfrac_field = format.get('pol_frac',None); if polfrac_field is not None: polpa_field,polpa_scale = format.get('pol_pa_d',None),(math.pi/180); if not polpa_field is not None: polpa_field,polpa_scale = format.get('pol_pa_rad',None),1; # fields for extent parameters ext_fields = []; for ext in 'emaj','emin','pa': for field,scale in (ext,1.),(ext+"_rad",1.),(ext+'_d',DEG),(ext+'_m',DEG/60),(ext+'_s',DEG/3600): if field in format: ext_fields.append((format[field],scale)); break; # if not all three accumulated, ignore if len(ext_fields) != 3: ext_fields = None; # fields for reference freq and RM and SpI freq0_field = format.get('freq0',None); rm_field = format.get('rm',None); spi_field = format.get('spi',None); spi2_field = [ format.get('spi%d'%i,None) for i in range(2,10) ]; tags_slice = format.get('tags',None); # now go on to process the line linenum += 1; try: # strip whitespace line = line.strip(); dprintf(4,"%s:%d: read line '%s'\n",filename,linenum,line); # skip empty or commented lines if not line or line[0] == '#': continue; # split (at whitespace) into fields fields = line.split(); # get name name = fields[name_field] if name_field is not None else str(len(srclist)+1); i = float(fields[i_field]); # get position: RA ra = float(fields[ra_field]); if 'ra_m' in format: ra += float(fields[format['ra_m']])/60.; if 'ra_s' in format: ra += float(fields[format['ra_s']])/3600.; ra *= ra_scale; # position: Dec. Separate treatment of sign dec = abs(float(fields[dec_field])); if 'dec_m' in format: dec += float(fields[format['dec_m']])/60.; if 'dec_s' in format: dec += float(fields[format['dec_s']])/3600.; if fields[format.get('dec_sign',dec_field)][0] == '-': dec = -dec; dec *= dec_scale; # see if we have freq0 try: f0 = freq0 or (freq0_field and float(fields[freq0_field])); except IndexError: f0 = None; # set model refrence frequency if f0 is not None and freq0 is None: freq0 = f0; # see if we have Q/U/V q=u=v=None; if quv_fields: try: q,u,v = map(float,[fields[x] for x in quv_fields]); except IndexError: pass; if polfrac_field is not None: pf = fields[polfrac_field]; pf = float(pf[:-1])/100 if pf.endswith("%") else float(pf); ppa = float(fields[polpa_field])*polpa_scale if polpa_field is not None else 0; q = i*pf*math.cos(2*ppa); u = i*pf*math.sin(2*ppa); v = 0; # see if we have RM as well. Create flux object (unpolarized, polarized, polarized w/RM) if q is None: flux = ModelClasses.Polarization(i,0,0,0); elif f0 is None or rm_field is None or rm_field >= len(fields): flux = ModelClasses.Polarization(i,q,u,v); else: flux = ModelClasses.PolarizationWithRM(i,q,u,v,float(fields[rm_field]),f0); # see if we have a spectral index if f0 is None or spi_field is None or spi_field >= len(fields): spectrum = None; else: spi = [ float(fields[spi_field]) ] + \ [ (float(fields[x]) if x is not None else 0) for x in spi2_field ]; # if any higher-order spectral terms are specified, include them here # but trim off all trailing zeroes while len(spi)>1 and not spi[-1]: del spi[-1]; if len(spi) == 1: spi = spi[0]; spectrum = ModelClasses.SpectralIndex(spi,f0); # see if we have extent parameters ex=ey=pa=0; if ext_fields: try: ex,ey,pa = [ float(fields[num])*scale for num,scale in ext_fields ]; except IndexError: pass; # form up shape object if (ex or ey) and max(ex,ey) >= min_extent: shape = ModelClasses.Gaussian(ex,ey,pa); else: shape = None; # get tags tagdict = {}; if tags_slice: try: tags = fields[tags_slice]; except IndexError: pass; for tagstr1 in tags: for tagstr in tagstr1.split(","): if tagstr[0] == "+": tagname,value = tagstr[1:],True; elif tagstr[0] == "-": tagname,value = tagstr[1:],False; elif "=" in tagstr: tagname,value = tagstr.split("=",1); if value[0] in "'\"" and value[-1] in "'\"": value = value[1:-1]; else: try: value = float(value); except: continue; else: tagname,value = tagstr,True; tagdict[tagname] = value; # OK, now form up the source object # position pos = ModelClasses.Position(ra,dec); # now create a source object dprint(3,name,ra,dec,i,q,u,v); src = SkyModel.Source(name,pos,flux,shape=shape,spectrum=spectrum,**tagdict); srclist.append(src); # check if it's the brightest brightness = src.brightness(); if brightness > maxbright: maxbright = brightness; brightest_name = src.name; radec0 = ra,dec; except: dprintf(0,"%s:%d: %s, skipping\n",filename,linenum,str(sys.exc_info()[1])); dprintf(2,"imported %d sources from file %s\n",len(srclist),filename); # create model model = ModelClasses.SkyModel(*srclist); if freq0 is not None: model.setRefFreq(freq0); # set model format model.setAttribute("ASCII_Format",format_str); # setup model center if center_on_brightest and radec0: dprintf(2,"brightest source is %s (%g Jy) at %f,%f\n",brightest_name,maxbright,*radec0); model.setFieldCenter(*radec0); # setup radial distances projection = Coordinates.Projection.SinWCS(*model.fieldCenter()); for src in model.sources: l,m = projection.lm(src.pos.ra,src.pos.dec); src.setAttribute('r',math.sqrt(l*l+m*m)); return model;