def load_bands(self, band_indices): """Load band files.""" prt = self._printer if self._pool.is_master(): vo_tabs = OrderedDict() per = 0.0 bc = 0 band_set = set(band_indices) for i, band in enumerate(self._unique_bands): if len(band_indices) and i not in band_set: continue if self._pool.is_master(): new_per = np.round(100.0 * float(bc) / len(band_set)) if new_per > per: per = new_per prt.message('loading_bands', [per], inline=True) systems = ['AB'] zps = [0.0] path = None if 'SVO' in band: photsystem = self._band_systs[i] if photsystem in syst_syns: photsystem = syst_syns[photsystem] if photsystem not in systems: systems.append(photsystem) zpfluxes = [] for sys in systems: svopath = band['SVO'] + '/' + sys path = os.path.join(self._dir_path, 'filters', svopath.replace('/', '_') + '.dat') xml_path = os.path.join( self._dir_path, 'filters', svopath.replace('/', '_') + '.xml') if not os.path.exists(xml_path): prt.message('dl_svo', [svopath], inline=True) try: response = get_url_file_handle( 'http://svo2.cab.inta-csic.es' '/svo/theory/fps3/' 'fps.php?PhotCalID=' + svopath, timeout=10) except Exception: prt.message('cant_dl_svo', warning=True) else: with open_atomic(xml_path, 'wb') as f: shutil.copyfileobj(response, f) if os.path.exists(xml_path): already_written = svopath in vo_tabs if not already_written: vo_tabs[svopath] = voparse(xml_path) vo_tab = vo_tabs[svopath] # need to account for zeropoint type for resource in vo_tab.resources: if len(resource.params) == 0: params = vo_tab.get_first_table().params else: params = resource.params oldzplen = len(zps) for param in params: if param.name == 'ZeroPoint': zpfluxes.append(param.value) if sys != 'AB': # 0th element is AB flux zps.append(2.5 * np.log10( zpfluxes[0] / zpfluxes[-1])) else: continue if sys != 'AB' and len(zps) == oldzplen: raise RuntimeError( 'ZeroPoint not found in XML.') if not already_written: vo_dat = vo_tab.get_first_table().array bi = max( next((i for i, x in enumerate(vo_dat) if x[1]), 0) - 1, 0) ei = -max( next((i for i, x in enumerate( reversed(vo_dat)) if x[1]), 0) - 1, 0) vo_dat = vo_dat[bi:ei if ei else len(vo_dat)] vo_string = '\n'.join([ ' '.join([str(y) for y in x]) for x in vo_dat ]) with open_atomic(path, 'w') as f: f.write(vo_string) else: raise RuntimeError( prt.string('cant_read_svo')) self._unique_bands[i]['origin'] = band['SVO'] elif all(x in band for x in [ 'min_wavelength', 'max_wavelength', 'delta_wavelength']): nbins = int(np.round(( band['max_wavelength'] - band['min_wavelength']) / band[ 'delta_wavelength'])) + 1 rows = np.array( [np.linspace( band['min_wavelength'], band['max_wavelength'], nbins), np.full(nbins, 1.0)]).T.tolist() self._unique_bands[i]['origin'] = 'generated' elif 'path' in band: self._unique_bands[i]['origin'] = band['path'] path = band['path'] else: raise RuntimeError(prt.text('bad_filter_rule')) if path: with open(os.path.join( self._dir_path, 'filters', path), 'r') as f: rows = [] for row in csv.reader( f, delimiter=' ', skipinitialspace=True): rows.append([float(x) for x in row[:2]]) for rank in range(1, self._pool.size + 1): self._pool.comm.send(rows, dest=rank, tag=3) self._pool.comm.send(zps, dest=rank, tag=4) else: rows = self._pool.comm.recv(source=0, tag=3) zps = self._pool.comm.recv(source=0, tag=4) xvals, yvals = list( map(list, zip(*rows))) xvals = np.array(xvals) yvals = np.array(yvals) if '{0}'.format(self._band_yunits[i]) == 'cm2': xscale = (c.h * c.c / u.Angstrom).cgs.value / self._band_xu[i] self._band_energies[ i], self._band_areas[i] = xvals, yvals / xvals self._band_wavelengths[i] = xscale / self._band_energies[i] self._average_wavelengths[i] = np.trapz([ x * y for x, y in zip( self._band_areas[i], self._band_wavelengths[i]) ], self._band_wavelengths[i]) / np.trapz( self._band_areas[i], self._band_wavelengths[i]) else: self._band_wavelengths[ i], self._transmissions[i] = xvals, yvals self._filter_integrals[i] = self.FLUX_STD * np.trapz( np.array(self._transmissions[i]) / np.array(self._band_wavelengths[i]) ** 2, self._band_wavelengths[i]) self._average_wavelengths[i] = np.trapz([ x * y for x, y in zip( self._transmissions[i], self._band_wavelengths[i]) ], self._band_wavelengths[i]) / np.trapz( self._transmissions[i], self._band_wavelengths[i]) if 'offset' in band: self._band_offsets[i] = band['offset'] elif 'SVO' in band: self._band_offsets[i] = zps[-1] # Do some sanity checks. if (self._band_offsets[i] != 0.0 and self._band_systs[i] == 'AB'): raise RuntimeError( 'Filters in AB system should always have offset = ' '0.0, not the case for `{}`'.format( self._band_names[i])) self._min_waves[i] = min(self._band_wavelengths[i]) self._max_waves[i] = max(self._band_wavelengths[i]) self._imp_waves[i] = set([self._min_waves[i], self._max_waves[i]]) if len(self._transmissions[i]): new_wave = self._band_wavelengths[i][ np.argmax(self._transmissions[i])] self._imp_waves[i].add(new_wave) elif len(self._band_areas[i]): new_wave = self._band_wavelengths[i][ np.argmax(self._band_areas[i])] self._imp_waves[i].add(new_wave) self._imp_waves[i] = list(sorted(self._imp_waves[i])) bc = bc + 1 if self._pool.is_master(): prt.message('band_load_complete', inline=True)
def fetch(self, event_list, offline=False): """Fetch a list of events from the open catalogs.""" dir_path = os.path.dirname(os.path.realpath(__file__)) prt = self._printer levent_list = listify(event_list) events = [None for x in levent_list] catalogs = OrderedDict([ (x, self._catalogs[x]) for x in self._catalogs if x not in self._excluded_catalogs]) for ei, event in enumerate(levent_list): if not event: continue events[ei] = OrderedDict() path = '' # If the event name ends in .json, assume event is a path. if event.endswith('.json'): path = event events[ei]['name'] = event.replace('.json', '').split('/')[-1] # If not (or the file doesn't exist), download from an open # catalog. if not path or not os.path.exists(path): names_paths = [ os.path.join(dir_path, 'cache', x + '.names.min.json') for x in catalogs] input_name = event.replace('.json', '') if offline: prt.message('event_interp', [input_name]) else: prt.message('dling_aliases', [input_name]) for ci, catalog in enumerate(catalogs): try: response = get_url_file_handle( catalogs[catalog]['json'] + '/names.min.json', timeout=10) except Exception: prt.message( 'cant_dl_names', [catalog], warning=True) else: with open_atomic( names_paths[ci], 'wb') as f: shutil.copyfileobj(response, f) names = OrderedDict() for ci, catalog in enumerate(catalogs): if os.path.exists(names_paths[ci]): with open(names_paths[ci], 'r') as f: names[catalog] = json.load( f, object_pairs_hook=OrderedDict) else: prt.message('cant_read_names', [catalog], warning=True) if offline: prt.message('omit_offline') continue if input_name in names[catalog]: events[ei]['name'] = input_name events[ei]['catalog'] = catalog else: for name in names[catalog]: if (input_name in names[catalog][name] or 'SN' + input_name in names[catalog][name]): events[ei]['name'] = name events[ei]['catalog'] = catalog break if not events[ei].get('name', None): for ci, catalog in enumerate(catalogs): namekeys = [] for name in names[catalog]: namekeys.extend(names[catalog][name]) namekeys = list(sorted(set(namekeys))) matches = get_close_matches( event, namekeys, n=5, cutoff=0.8) # matches = [] if len(matches) < 5 and is_number(event[0]): prt.message('pef_ext_search') snprefixes = set(('SN19', 'SN20')) for name in names[catalog]: ind = re.search("\d", name) if ind and ind.start() > 0: snprefixes.add(name[:ind.start()]) snprefixes = list(sorted(snprefixes)) for prefix in snprefixes: testname = prefix + event new_matches = get_close_matches( testname, namekeys, cutoff=0.95, n=1) if (len(new_matches) and new_matches[0] not in matches): matches.append(new_matches[0]) if len(matches) == 5: break if len(matches): if self._test: response = matches[0] else: response = prt.prompt( 'no_exact_match', kind='select', options=matches, none_string=( 'None of the above, ' + ('skip this event.' if ci == len(catalogs) - 1 else 'try the next catalog.'))) if response: for name in names[catalog]: if response in names[ catalog][name]: events[ei]['name'] = name events[ei]['catalog'] = catalog break if events[ei]['name']: break if not events[ei].get('name', None): prt.message('no_event_by_name') events[ei]['name'] = input_name continue urlname = events[ei]['name'] + '.json' name_path = os.path.join(dir_path, 'cache', urlname) if offline: prt.message('cached_event', [ events[ei]['name'], events[ei]['catalog']]) else: prt.message('dling_event', [ events[ei]['name'], events[ei]['catalog']]) try: response = get_url_file_handle( catalogs[events[ei]['catalog']][ 'json'] + '/json/' + urlname, timeout=10) except Exception: prt.message('cant_dl_event', [ events[ei]['name']], warning=True) else: with open_atomic(name_path, 'wb') as f: shutil.copyfileobj(response, f) path = name_path if os.path.exists(path): events[ei]['path'] = path if self._open_in_browser: webbrowser.open( catalogs[events[ei]['catalog']]['web'] + events[ei]['name']) with open(path, 'r') as f: events[ei]['data'] = json.load( f, object_pairs_hook=OrderedDict) prt.message('event_file', [path], wrapped=True) else: prt.message('no_data', [ events[ei]['name'], '/'.join(catalogs.keys())]) if offline: prt.message('omit_offline') raise RuntimeError return events