def run_booker(self, horizons_filename, body): ephem = test_horizons.read_horizons(horizons_filename) csv_dir_name = "tableout" csv_filename = os.path.join(csv_dir_name, "I3EventHeader.csv") tray = I3Tray() tray.Add(generator, times=ephem['date']) tray.Add(tableio.I3TableWriter, tableservice=[tableio.I3CSVTableService(csv_dir_name)], keys={ 'I3EventHeader': [ astro.converters.I3SunAndMoonConverter(), dataclasses.converters.I3EventHeaderConverter() ], }) tray.Execute() tray.Finish() del tray with open(csv_filename, 'r') as csvfile: reader = csv.reader(csvfile) header = reader.next() keys = [ 'time_start_mjd_day [days]', 'time_start_mjd_sec [seconds]', 'time_start_mjd_ns [ns]', body + '_zenith [radian]', body + '_azimuth [radian]' ] indices = {} for k in keys: indices[k] = header.index(k) #skip info row reader.next() for line_number, row in enumerate(reader): #row_values = [row[indices[k]] for k in keys] t = dataclasses.I3Time() t.set_mod_julian_time( int(row[indices['time_start_mjd_day [days]']]), int(row[indices['time_start_mjd_sec [seconds]']]), float(row[indices['time_start_mjd_ns [ns]']])) self.assertEqual( t, dataclasses.make_I3Time(ephem['date'][line_number])) assert ( abs(90 - float(row[indices[body + '_zenith [radian]']]) / I3Units.degree - ephem['el'][line_number]) < 0.003) assert (test_horizons.azimuth_distance( 90 - (float(row[indices[body + '_azimuth [radian]']]) + astro.ICECUBE_LONGITUDE) / I3Units.degree, ephem['az'][line_number]) < 1.)
def test_tableio(self): csv_dir_name = "tableout" csv_filename = os.path.join(csv_dir_name, "Particle.csv") if os.path.exists(csv_filename): print("deleting", repr(csv_filename)) os.unlink(csv_filename) coords = [] for line in open(TEST_DATA + "simbad_tevcat_galactic.txt").readlines()[7:-2]: line = line.split('|') coords.append([ float(i) for l in (line[3], line[6], line[5]) for i in l.split() ]) tray = I3Tray() tray.Add(generator, Coords=coords) tray.Add(tableio.I3TableWriter, tableservice=[tableio.I3CSVTableService(csv_dir_name)], keys={ 'Particle': [ dataclasses.converters.I3ParticleConverter(), astro.converters.I3AstroConverter(), astro.converters.I3GalacticConverter(), astro.converters.I3SuperGalacticConverter(), ] }) tray.Execute() del tray with open(csv_filename, 'r') as csvfile: reader = csv.reader(csvfile) header = next(reader) keys = ['ra', 'dec', 'gal_l', 'gal_b', 'sg_l', 'sg_b'] indices = {} for k in keys: indices[k] = header.index(k + ' [radian]') #skip info row next(reader) for line_number, row in enumerate(reader): row_values = [ float(row[indices[k]]) / I3Units.degree for k in keys ] for i in range(len(row_values)): self.assertAlmostEqual(row_values[i], coords[line_number][i], 2)
from icecube import icetray, dataclasses, dataio, tableio, clsim if options.format == 'hdf5': try: from icecube import hdfwriter except ImportError: raise ImportError("Couldn't find the HDF writer service") tabler = hdfwriter.I3HDFTableService(outfile, options.compression) elif options.format == 'root': try: from icecube import rootwriter except ImportError: raise ImportError("Couldn't find the ROOT writer service") tabler = rootwriter.I3ROOTTableService(outfile, options.compression) elif options.format == 'csv': tabler = tableio.I3CSVTableService(outfile[:-4] + '_csv') else: raise ValueError("I don't have a writer service for format '%s'" % options.format) tray = I3Tray() tray.AddModule('I3Reader', 'reader', filename=infile) tray.AddModule('I3NullSplitter', 'nullsplit') count = 0 def counter(frame): global count
tray.AddModule("I3InfiniteSource", "source", stream=icetray.I3Frame.Physics) def fakeit(frame): header = dataclasses.I3EventHeader() frame['I3EventHeader'] = header pulsemap = dataclasses.I3RecoPulseSeriesMap() pulses = dataclasses.I3RecoPulseSeries() pulse = dataclasses.I3RecoPulse() pulses.append(pulse) pulsemap[icetray.OMKey(7,42)] = pulses pulsemap[icetray.OMKey(9,42)] = pulses frame['Pulses'] = pulsemap mask = dataclasses.I3RecoPulseSeriesMapMask(frame, 'Pulses') frame['PulseMask'] = mask tray.AddModule(fakeit, 'fakeit') tabler = tableio.I3CSVTableService(os.environ['I3_BUILD'] + '/tableio/pulsemask_test') tray.AddModule(tableio.I3TableWriter, 'scribe', tableservice=tabler, keys=['PulseMask'], ) tray.AddModule('TrashCan', 'YesWeCan') tray.Execute(1) tray.Finish()
# doesn't have a default constructor continue frame[name] = obj fill_frame.objects[name] = obj fill_frame.objects = dict() tray = I3Tray() tray.Add("I3InfiniteSource") tray.Add(fake_event_header, Streams=[icetray.I3Frame.DAQ]) tray.Add("I3NullSplitter", "nullsplit") tray.Add(fill_frame) from icecube.tableio import I3BroadcastTableService tablers = [tableio.I3CSVTableService('test_converters')] outfiles = ['test_converters'] try: from icecube.hdfwriter import I3HDFTableService tablers.append(I3HDFTableService("test_converters.hdf5", 6, 'w')) outfiles.append('test_converters.hdf5') except ImportError: pass try: from icecube.rootwriter import I3ROOTTableService tablers.append(I3ROOTTableService("test_converters.root")) outfiles.append('test_converters.root') except ImportError: pass if len(tablers) == 1: tabler = tablers[0]
frame['I3EventHeader'] = header pulsemap = dataclasses.I3RecoPulseSeriesMap() pulses = dataclasses.I3RecoPulseSeries() pulse = dataclasses.I3RecoPulse() pulses.append(pulse) pulsemap[icetray.OMKey(7, 42)] = pulses pulsemap[icetray.OMKey(9, 42)] = pulses frame['Pulses'] = pulsemap mask = dataclasses.I3RecoPulseSeriesMapMask(frame, 'Pulses') frame['PulseMask'] = mask tray.AddModule(fakeit, 'fakeit') dirname = os.environ['I3_BUILD'] + '/tableio/pulsemask_test' tabler = tableio.I3CSVTableService(dirname) tray.AddModule( tableio.I3TableWriter, 'scribe', tableservice=tabler, keys=['PulseMask'], ) tray.Execute(1) import shutil shutil.rmtree(dirname)