def test_init_by_type(): # LLSSurvey system = pyasu.class_by_type('LLS')((0.*u.deg, 0.*u.deg), 2.0, None, NHI=17.9) assert isinstance(system, LLSSystem) # DLASurvey system = pyasu.class_by_type('DLA')((0.*u.deg, 0.*u.deg), 2.5, None, NHI=20.55) assert isinstance(system, DLASystem)
def from_flist(cls, flist, tree=None, **kwargs): """ Read from list of .dat files (historical JXP format) Parameters ---------- flist : str ASCII file including list of .dat files tree : str, optional Path to .dat files kwargs : Passed to __init__ """ if tree is None: tree = '' # Load up (if possible) data = ascii.read(tree + flist, data_start=0, guess=False, format='no_header') slf = cls(**kwargs) slf.tree = tree slf.flist = flist # Load up slf.dat_files = list(data['col1']) # Generate IGMSys list for dat_file in slf.dat_files: slf._abs_sys.append( class_by_type(slf.abs_type).from_datfile(dat_file, tree=slf.tree)) print('Read {:d} files from {:s} in the tree {:s}'.format( slf.nsys, slf.flist, slf.tree)) return slf
def from_flist(cls, flist, tree=None, **kwargs): """ Read from list of .dat files (historical JXP format) Parameters ---------- flist : str ASCII file including list of .dat files tree : str, optional Path to .dat files kwargs : Passed to __init__ """ if tree is None: tree = '' # Load up (if possible) data = ascii.read(tree+flist, data_start=0, guess=False, format='no_header') slf = cls(**kwargs) slf.tree = tree slf.flist = flist # Load up slf.dat_files = list(data['col1']) # Generate IGMSys list for dat_file in slf.dat_files: slf._abs_sys.append(class_by_type(slf.abs_type).from_datfile(dat_file, tree=slf.tree)) print('Read {:d} files from {:s} in the tree {:s}'.format( slf.nsys, slf.flist, slf.tree)) return slf
def build_abs_sys_from_dict(self, abssys_name, **kwargs): """ Build an AbsSystem from the _dict The item in self._abs_sys is filled and the systems is also returned Parameters ---------- abssys_name : str Needs to match a key in the dict **kwargs Passed to components_from_dict() Returns ------- abs_sys : AbsSystem """ # Index idx = self.sys_idx(abssys_name) # Instantiate abssys = class_by_type(self.abs_type).from_dict( self._dict[abssys_name], coord=self.coords[idx], **kwargs) # Fill if len(self._abs_sys) == 0: self.init_abs_sys() self._abs_sys[idx] = abssys # Return too return abssys
def from_sfits(cls, summ_fits, **kwargs): """Generate the Survey from a summary FITS file Handles SPEC_FILES too. Parameters ---------- summ_fits : str or Table or QTable Summary FITS file **kwargs : dict passed to __init__ """ # Init slf = cls(**kwargs) # Read if isinstance(summ_fits, Table): systems = summ_fits else: systems = QTable.read(summ_fits) nsys = len(systems) # Dict kdict = dict(NHI=['NHI', 'logNHI'], sig_NHI=['sig(logNHI)', 'SIGNHI'], name=['Name'], vlim=['vlim'], zabs=['Z_LLS', 'ZABS', 'zabs'], zem=['Z_QSO', 'QSO_ZEM'], RA=['RA'], Dec=['DEC', 'Dec']) # Parse the Table inputs = {} for key in kdict.keys(): vals, tag = lsio.get_table_column(kdict[key], [systems],idx=0) if vals is not None: inputs[key] = vals # vlim if 'vlim' not in inputs.keys(): default_vlim = [-1000, 1000.]* u.km / u.s inputs['vlim'] = [default_vlim]*nsys # Generate for kk in range(nsys): # Generate keywords kwargs = {} args = {} for key in inputs.keys(): if key in ['vlim', 'zabs', 'RA', 'Dec']: args[key] = inputs[key][kk] else: kwargs[key] = inputs[key][kk] # Instantiate abssys = class_by_type(slf.abs_type)((args['RA'], args['Dec']), args['zabs'], args['vlim'], **kwargs) # spec_files try: abssys.spec_files += systems[kk]['SPEC_FILES'].tolist() except (KeyError, AttributeError): pass slf._abs_sys.append(abssys) # Mask slf.init_mask() # Return return slf
def from_dict(cls, idict, **kwargs): """ Instantiate from a dict Parameters ---------- idict : dict Required keys are: 'RA' -- float (deg) 'DEC' -- float(deg) 'zem' -- float 'name' -- str 'components' -- list Other keys are added as attributes to the IgmSightline object Returns ------- """ if 'linelist' not in kwargs.keys(): from linetools.lists.linelist import LineList ism = LineList('ISM') kwargs['linelist'] = ism from pyigm.abssys.utils import class_by_type ism = LineList('ISM') kwargs['linelist'] = ism # Load ISM to speed things up if 'meta' in idict.keys(): meta = idict['meta'] else: meta = idict # Components -- backwards compatability if 'cmps' in idict.keys(): idict['components'] = idict['cmps'].copy() # Instantiate slf = cls(SkyCoord(ra=meta['RA'], dec=meta['DEC'], unit='deg'), zem=meta['zem'], name=meta['JNAME'], **kwargs) # Other for key in idict.keys(): if key in [ 'RA', 'DEC', 'zem', 'name', 'components', 'meta', 'cmps' ]: continue else: setattr(slf, key, idict[key]) add_comps_from_dict(slf, idict, **kwargs) # Systems if 'systems' in idict.keys(): for key in idict['systems'].keys(): asys = class_by_type( idict['systems'][key]['abs_type']).from_dict( idict['systems'][key], **kwargs) slf._abssystems.append(asys) # Return return slf
def load_sys_files(inp, type, ref=None, sys_path=False, **kwargs): """ Load up a set of SYS files from the hard-drive (JSON files) Parameters ---------- inp : str Name of JSON tarball or if sys_path=True then the path to a folder of JSON files type : str type of IGMSystem, e.g. LLS ref : str, optional Reference label sys_path : str, optional indicates that inp is a path to a set of JSON SYS files otherwise, inp should be the filename of a tarball of JSON files Returns ------- survey : IGMSurvey """ import tarfile # survey = class_by_type(type)(ref=ref) system = pyasu.class_by_type(type) if sys_path: # Individual files files = glob.glob(inp+'*.json') files.sort() for ifile in files: tdict = ltu.loadjson(ifile) abssys = system.from_dict(tdict) survey._abs_sys.append(abssys) else: # tarball print('Loading systems from {:s}'.format(inp)) tar = tarfile.open(inp) for member in tar.getmembers(): if '.' not in member.name: print('Skipping a likely folder: {:s}'.format(member.name)) continue # Extract f = tar.extractfile(member) tdict = json.load(f) # Add keys (for backwards compatability) if ('NHI' in tdict.keys()) and ('flag_NHI' not in tdict.keys()): tdict['flag_NHI'] = 1 # Generate abssys = system.from_dict(tdict, chk_sep=False, **kwargs) # Consider use_coord=True as default survey._abs_sys.append(abssys) tar.close() # Return return survey
def load_sys_files(inp, type, ref=None, sys_path=False, **kwargs): """ Load up a set of SYS files from the hard-drive (JSON files) Parameters ---------- inp : str type : str type of IGMSystem, e.g. LLS ref : str, optional Reference label sys_path : str, optional indicates that inp is a path to a set of JSON SYS files otherwise, inp should be the filename of a tarball of JSON files Returns ------- survey : IGMSurvey """ import tarfile # survey = class_by_type(type)(ref=ref) system = pyasu.class_by_type(type) if sys_path: # Individual files files = glob.glob(inp+'*.json') files.sort() for ifile in files: tdict = ltu.loadjson(ifile) abssys = system.from_dict(tdict) survey._abs_sys.append(abssys) else: # tarball print('Loading systems from {:s}'.format(inp)) tar = tarfile.open(inp) for member in tar.getmembers(): if '.' not in member.name: print('Skipping a likely folder: {:s}'.format(member.name)) continue # Extract f = tar.extractfile(member) tdict = json.load(f) # Add keys (for backwards compatability) if ('NHI' in tdict.keys()) and ('flag_NHI' not in tdict.keys()): tdict['flag_NHI'] = 1 # Generate abssys = system.from_dict(tdict, chk_sep=False, **kwargs) # Consider use_coord=True as default survey._abs_sys.append(abssys) tar.close() # Return return survey
def load_sys_files(inp, type, sys_path=False): """ Load up a set of SYS files from the hard-drive (JSON files) Parameters ---------- inp : str type : str type of IGMSystem sys_path : str, optional indicates that inp is a path to a set of SYS files otherwise, it should be the filename of a tarball Returns ------- survey : IGMSurvey """ import tarfile # survey = class_by_type(type)(ref='HD-LLS') system = pyasu.class_by_type(type) if sys_path: # Individual files files = glob.glob(inp+'*.json') files.sort() for ifile in files: tdict = ltu.loadjson(ifile) abssys = system.from_dict(tdict) survey._abs_sys.append(abssys) else: # tarball print('Loading systems from {:s}'.format(inp)) tar = tarfile.open(inp) for member in tar.getmembers(): if '.' not in member.name: print('Skipping a likely folder: {:s}'.format(member.name)) continue # Extract f = tar.extractfile(member) tdict = json.load(f) # Generate abssys = system.from_dict(tdict) survey._abs_sys.append(abssys) # Return return survey
def build_abs_sys_from_data(self, row): """ Build an AbsSystem from the _data The item in self._abs_sys is filled and the system is also returned Parameters ---------- row : int Row of the _data table Ignores any masking -- this may change Returns ------- abs_sys : AbsSystem """ # vlim -- may make optional vlim = self._data['vlim'][row] if self._data['vlim'].unit is not None: vlim *= self._data['vlim'].unit else: vlim = vlim * u.km / u.s # skwargs skwargs = {} for key in ['NHI', 'sig_NHI', 'name', 'zem']: if key in self._data.keys(): skwargs[key] = self._data[key][row] # Instantiate abssys = class_by_type(self.abs_type)(self.coords[row], self._data['zabs'][row], vlim, **skwargs) # Fill if len(self._abs_sys) == 0: self.init_abs_sys() self._abs_sys[row] = abssys # Return too return abssys
def load_sys_files(inp, type, ref=None, sys_path=False, build_abs_sys=False, **kwargs): """ Load up a set of SYS files from the hard-drive (JSON files) Parameters ---------- inp : str Name of JSON tarball or if sys_path=True then the path to a folder of JSON files type : str type of IGMSystem, e.g. LLS ref : str, optional Reference label sys_path : str, optional indicates that inp is a path to a set of JSON SYS files otherwise, inp should be the filename of a tarball of JSON files build_abs_sys : bool, optional Build a list of AbsSystem's? Can always be instantiated later **kwargs : Passed to system Returns ------- survey : IGMSurvey """ import tarfile # survey = class_by_type(type)(ref=ref) system = pyasu.class_by_type(type) if sys_path: pdb.set_trace() # THIS NEEDS TO BE UPDATED AS WAS DONE FOR THE TARBALL # Individual files files = glob.glob(inp + '*.json') files.sort() for ifile in files: tdict = ltu.loadjson(ifile) abssys = system.from_dict(tdict, linelist=llist) survey._abs_sys.append(abssys) else: # tarball print('Loading systems from {:s}'.format(inp)) tar = tarfile.open(inp, 'r:gz') for member in tar.getmembers(): if '.' not in member.name: print('Skipping a likely folder: {:s}'.format(member.name)) continue # Extract f = tar.extractfile(member) f = f.read() f = f.decode('utf-8') tdict = json.loads(f) # Add keys (for backwards compatability) if ('NHI' in tdict.keys()) and ('flag_NHI' not in tdict.keys()): tdict['flag_NHI'] = 1 # Add to list of dicts survey._dict[tdict['Name']] = tdict tar.close() # Mask survey.init_mask() # Set coordinates ras = [survey._dict[key]['RA'] for key in survey._dict.keys()] decs = [survey._dict[key]['DEC'] for key in survey._dict.keys()] survey.coords = SkyCoord(ra=ras, dec=decs, unit='deg') # Build AbsSystem objects? if build_abs_sys: survey.build_all_abs_sys(linelist=llist) # Generate the data table print("Building the data Table from the internal dict") survey.data_from_dict() # Return return survey
def from_sfits(cls, summ_fits, **kwargs): """Generate the Survey from a summary FITS file or Table Handles SPEC_FILES too. Parameters ---------- summ_fits : str or Table or QTable Summary FITS file **kwargs : dict passed to __init__ """ # Init slf = cls(**kwargs) # Read if isinstance(summ_fits, Table): systems = summ_fits else: systems = QTable.read(summ_fits) nsys = len(systems) # Dict kdict = dict(NHI=['NHI', 'logNHI'], sig_NHI=['sig(logNHI)', 'SIGNHI', 'NHI_ERR'], name=['Name'], vlim=['vlim'], zabs=['Z_LLS', 'ZABS', 'zabs'], zem=['Z_QSO', 'QSO_ZEM', 'ZEM'], RA=['RA'], Dec=['DEC', 'Dec']) # Parse the Table inputs = {} for key in kdict.keys(): vals, tag = lsio.get_table_column(kdict[key], [systems], idx=0) if vals is not None: inputs[key] = vals # vlim if 'vlim' not in inputs.keys(): default_vlim = [-1000, 1000.] * u.km / u.s inputs['vlim'] = [default_vlim] * nsys # Generate for kk in range(nsys): # Generate keywords kwargs = {} args = {} for key in inputs.keys(): if key in ['vlim', 'zabs', 'RA', 'Dec']: args[key] = inputs[key][kk] else: kwargs[key] = inputs[key][kk] # Instantiate abssys = class_by_type(slf.abs_type)((args['RA'], args['Dec']), args['zabs'], args['vlim'], **kwargs) # spec_files try: abssys.spec_files += systems[kk]['SPEC_FILES'].tolist() except (KeyError, AttributeError): pass slf._abs_sys.append(abssys) # Mask slf.init_mask() # Return return slf