def FIRE_HiRes_L1_L2(datafile, ephemfile): full_data = dm.readJSONheadedASCII(datafile) ephem = dm.readJSONheadedASCII(ephemfile) data = Trim_data_file(full_data, ephem) labels = ephem.keys() ephem_fields = ['Lsimple', 'CDMAG_MLT'] dt = spt.Ticktock(data['Epoch']).TAI et = spt.Ticktock(ephem['DateTime']).TAI for i in range(len(ephem_fields)): print ephem_fields[i] y = ephem[ephem_fields[i]] nx = tb.interpol(dt, et, y) data[ephem_fields[i]] = dm.dmarray(nx) ephem_lat = ephem['Rgeod_LatLon'][:,0] ephem_lon = ephem['Rgeod_LatLon'][:,1] nx = tb.interpol(dt, et, ephem_lat) data['Lat'] = dm.dmarray(nx) nx = tb.interpol(dt, et, ephem_lon) data['Lon'] = dm.dmarray(nx) n_lines = len(data['Epoch']) eflux = np.zeros(n_lines,12) day = ephem['DateTime'][0][0:10] outfile = datafile[:-23] + day + '-HiRes_L2.txt' dm.toJSONheadedASCII(outfile, data) return data
def test_toJSONheadedASCII(self): """Write known datamodel to JSON-headed ASCII and ensure it has right stuff added""" a = dm.SpaceData() a.attrs['Global'] = 'A global attribute' a['Var1'] = dm.dmarray([1, 2, 3, 4, 5], attrs={'Local1': 'A local attribute'}) a['Var2'] = dm.dmarray([[8, 9], [9, 1], [3, 4], [8, 9], [7, 8]]) a['MVar'] = dm.dmarray([7.8], attrs={'Note': 'Metadata'}) t_file = tempfile.NamedTemporaryFile(delete=False) t_file.close() dm.toJSONheadedASCII(t_file.name, a, depend0='Var1', order=['Var1', 'Var2']) dat2 = dm.readJSONheadedASCII(t_file.name) #test global attr self.assertTrue(a.attrs == dat2.attrs) #test that metadata is back and all original keys are present for key in a['MVar'].attrs: self.assertTrue(key in dat2['MVar'].attrs) np.testing.assert_array_equal(a['MVar'], dat2['MVar']) #test vars are right np.testing.assert_almost_equal(a['Var1'], dat2['Var1']) np.testing.assert_almost_equal(a['Var2'], dat2['Var2']) #test for added dimension and start col self.assertTrue(dat2['Var1'].attrs['DIMENSION'] == [1]) self.assertTrue(dat2['Var2'].attrs['DIMENSION'] == [2]) os.remove(t_file.name)
def writeOutput(): """ write out the data to a file, always make a new filename """ for ii in range(100000): filename = 'FB_Spectra_{0:05}.txt'.format(ii) if not os.path.isfile(filename): break if ii == 100000-1: raise(RuntimeError('Wow how many times has this been run int his directory?')) dm.toJSONheadedASCII(filename, output, order=['Channel', 'Detector0', 'Detector1']) print('*** Wrote {0} ***'.format(filename)) return filename
def FIRE_Context_L1_L2(datafile, ephemfile): full_data = dm.readJSONheadedASCII(datafile) ephem = dm.readJSONheadedASCII(ephemfile) meta = dm.readJSONheadedASCII(ephemfile) data = Trim_data_file(full_data, ephem) labels = ephem.keys() ephem_fields = test_ephem_list dt = spt.Ticktock(data['Epoch']).TAI et = spt.Ticktock(ephem['DateTime']).TAI for i in range(len(ephem_fields)): dim = np.size(ephem[ephem_fields[i]][0]) print ephem_fields[i], dim nx = np.empty([len(dt),dim]) if dim > 1: for j in range(dim): y = ephem[ephem_fields[i]][:,j] nx[:,j] = tb.interpol(dt, et, y) data[ephem_fields[i]] = dm.dmarray(nx, attrs = meta[ephem_fields[i]].attrs) else: y = ephem[ephem_fields[i]] nx = tb.interpol(dt, et, y) data[ephem_fields[i]] = dm.dmarray(nx, attrs = meta[ephem_fields[i]].attrs) col = deepcopy(data['Context'][:,0]) sur = deepcopy(data['Context'][:,1]) despike(col, 250, 10) despike(sur, 250, 10) col = (col/(6*9))*1033 sur = (sur/(6*23))*772 data['col'] = dm.dmarray(col, attrs = {'Description': 'Collimated Detector Energy Flux', 'SCALE_TYPE': 'log'}) data['sur'] = dm.dmarray(sur, attrs = {'Description': 'Surface Detector Energy Flux', 'SCALE_TYPE': 'log'}) day = ephem['DateTime'][0][0:10] outfile = datafile[:-25] + day + '-Context_L2.txt' order = ['Epoch', 'col', 'sur', 'Context'] # order = ['Epoch', 'Context'] order.extend(ephem_fields) dm.toJSONheadedASCII(outfile, data) return data
def test_toJSONheadedASCII(self): """Write known datamodel to JSON-headed ASCII and ensure it has right stuff added""" a = dm.SpaceData() a.attrs['Global'] = 'A global attribute' a['Var1'] = dm.dmarray([1,2,3,4,5], attrs={'Local1': 'A local attribute'}) a['Var2'] = dm.dmarray([[8,9],[9,1],[3,4],[8,9],[7,8]]) a['MVar'] = dm.dmarray([7.8], attrs={'Note': 'Metadata'}) t_file = tempfile.NamedTemporaryFile(delete=False) t_file.close() dm.toJSONheadedASCII(t_file.name, a, depend0='Var1', order=['Var1','Var2']) dat2 = dm.readJSONheadedASCII(t_file.name) #test global attr self.assertTrue(a.attrs==dat2.attrs) #test that metadata is back and all original keys are present for key in a['MVar'].attrs: self.assertTrue(key in dat2['MVar'].attrs) np.testing.assert_array_equal(a['MVar'], dat2['MVar']) #test vars are right np.testing.assert_almost_equal(a['Var1'], dat2['Var1']) np.testing.assert_almost_equal(a['Var2'], dat2['Var2']) #test for added dimension and start col self.assertTrue(dat2['Var1'].attrs['DIMENSION']==[1]) self.assertTrue(dat2['Var2'].attrs['DIMENSION']==[2]) os.remove(t_file.name)
def write(self, filename, hdf5=False): if hdf5: dm.toHDF5(filename, self.dat) else: dm.toJSONheadedASCII(filename, self.dat, order=['Epoch'] ) print(' Wrote {0}'.format(os.path.abspath(filename)))
def write(self, filename, hdf5=False): if hdf5: dm.toHDF5(filename, self.data) else: dm.toJSONheadedASCII(filename, self.data)