def test_dmfilled(self): """dmfilled should fill an array""" ans = np.asarray([1,1,1], dtype=int) tst = dm.dmfilled((3), fillval=1, dtype=int) np.testing.assert_equal(tst, ans) self.assertEqual(tst.dtype, ans.dtype) ans = np.asarray([datetime.datetime(2000, 1,1), datetime.datetime(2000, 1,1), datetime.datetime(2000, 1,1)], dtype=object) tst = dm.dmfilled((3), fillval=datetime.datetime(2000, 1,1), dtype=object) np.testing.assert_equal(tst, ans) self.assertEqual(tst.dtype, ans.dtype)
def initialSW(): """Construct initial estimate of hourly solar wind parameters""" st = dt.datetime(1989, 3, 12) en = dt.datetime(1989, 3, 15) hourly = getKondrashovSW() # Keep IMF By and n, set V to Boteler/Nagatsuma t_ssc1 = dt.datetime(1989, 3, 13, 1, 27) t_ssc2 = dt.datetime(1989, 3, 13, 7, 43) t_cme = dt.datetime(1989, 3, 13, 16) t_turn = dt.datetime(1989, 3, 14, 2) # Set Bx positive, in accordance with ISEE-3 data, Vy/Vz->0 hourly['Bx'] = dm.dmfilled(hourly['By'].shape, fillval=3) hourly['Vy'] = dm.dmfilled(hourly['By'].shape, fillval=0) hourly['Vz'] = dm.dmfilled(hourly['By'].shape, fillval=0) # Before first SSC inds_before_1 = tb.tOverlapHalf([dt.datetime(1989, 3, 12), t_ssc1], hourly['DateTime']) hourly['V_sw'][inds_before_1] = 400 # Between first and ssecond SSC inds_between_12 = tb.tOverlapHalf([t_ssc1, t_ssc2], hourly['DateTime']) hourly['V_sw'][inds_between_12] = 550 # IMF turns north around 1989-03-14T02:00:00 according to inverse Burton and Kondrashov inds_mainphase = tb.tOverlapHalf([t_ssc2, t_turn], hourly['DateTime']) hourly['V_sw'][inds_mainphase] = 983 # Then have speed decay towards IMP-8 measurement which is ballpark 820 km/s inds_rest = tb.tOverlapHalf([t_turn, hourly['DateTime'][-1]], hourly['DateTime']) hourly['V_sw'][inds_rest] = np.linspace(983, 820, len(inds_rest)) # Now we have speed, estimate temperature hourly['Plasma_temp'] = emp.getExpectedSWTemp(hourly['V_sw'], model='XB15', units='K') inds_cme = tb.tOverlapHalf([t_cme, en], hourly['DateTime']) hourly['Plasma_temp'][ inds_cme] /= 3 # reduce by factor of 3 for CME-like temp # Get "Kondrashov VBs" using V from Boteler/Nagatsuma hourly['VBs_K'] = 1e-3 * hourly['V_sw'] * rectify( -1 * hourly['Bz']) # mV/m # Now get VBs from inverse Burton ky_dat = kyo.fetch('dst', (st.year, st.month, st.day), (en.year, en.month, en.day)) inds = tb.tOverlapHalf([st, en], ky_dat['time']) # Pressure correct here using n and V hourly = calc_P(hourly) hourly['Dst'] = ky_dat['dst'][inds] dst_star = emp.getDststar(hourly, model='OBrien') hourly['VBs_OB'] = 1e-3 * inverseOBrienMcPherron(dst_star) # Make new Bz from VBs_OB hourly['Bz_OB'] = -1e3 * hourly['VBs_OB'] / hourly['V_sw'] # nT return hourly
def toDate(yr, doy, hh, mm, ss): MM, DD = spt.doy2date(yr, doy) dates = dm.dmfilled(len(yr), fillval=None, dtype=object) for idx, (mon, day) in enumerate(zip(MM, DD)): dates[idx] = dt.datetime(yr[idx], mon, day, hh[idx], mm[idx], ss[idx]) return dates
def readIMP8plasmafile(fname): """ ftp://space.mit.edu/pub/plasma/imp/fine_res/1989/ yr doy hh mm ss sc decimal yr rg md xse yse zse ysm zsm speed thermal speed density E/W angle N/S angle mom nonlin mom nonlin mom nonlin mom best thresh threshs """ header = [] with open(fname, 'r') as fh: while True: pos = fh.tell() line = fh.readline().strip() if not line: # empty line, skip continue if not line[0].isdigit(): # this is header, save it header.append(line) else: # first line of data, roll back to start and pass to numpy fh.seek(pos) break data = np.loadtxt(fh) def toDate(yr, doy, hh, mm, ss): MM, DD = spt.doy2date(yr, doy) dates = dm.dmfilled(len(yr), fillval=None, dtype=object) for idx, (mon, day) in enumerate(zip(MM, DD)): dates[idx] = dt.datetime(yr[idx], mon, day, hh[idx], mm[idx], ss[idx]) return dates region = data[:, 7] outdata = dm.SpaceData(attrs={'header': header, 'fname': fname}) outdata['time'] = toDate(data[:, 0].astype(int), data[:, 1].astype(int), data[:, 2].astype(int), data[:, 3].astype(int), data[:, 4].astype(int)) outdata['region'] = dm.dmarray(region) outdata['pos_gse'] = dm.dmarray(data[:, 9:12], attrs={'coord_sys': 'gse'}) outdata['pos_gsm'] = dm.dmfilled(outdata['pos_gse'].shape, fillval=0, dtype=float, attrs={'coord_sys': 'gsm'}) outdata['pos_gsm'][:, 0] = data[:, 9] outdata['pos_gsm'][:, 1:] = data[:, 12:14] outdata['speed'] = dm.dmarray(data[:, 14], attrs={'description': 'speed from moments'}) # outdata['speed'][region > 2] = np.nan # region 3 is sheath outdata['speed_nl'] = dm.dmarray(data[:, 15]) vmask = outdata['speed_nl'] >= 9000 # outdata['speed_nl'][region > 2] = np.nan # region 3 is sheath outdata['speed_nl'][vmask] = np.nan # region 3 is sheath outdata['n_dens'] = dm.dmarray( data[:, 18], attrs={'description': 'number density from moments'}) outdata['n_dens_nl'] = dm.dmarray(data[:, 19]) outdata['temp'] = 60.5 * dm.dmarray(data[:, 16])**2 outdata['temp_nl'] = 60.5 * dm.dmarray(data[:, 17])**2 outdata['data'] = data return outdata
def test_dmfilled_recarray(self): """dmfilled should fill a recarray""" dt = [('foo', 'f4'), ('bar', 'i2')] tst = dm.dmfilled(4, fillval=3, dtype=dt) a = np.empty((4, ), dt) a.fill(3) ans = dm.dmarray(a) np.testing.assert_equal(tst, ans)
def toDate(ymd, hms): dates = dm.dmfilled(len(ymd), fillval=None, dtype=object) for idx, (p1, p2) in enumerate(zip(ymd, hms)): yr = p1 // 10000 mon = (p1 - yr * 10000) // 100 day = p1 - (yr * 10000 + mon * 100) hh = p2 // 10000 mm = (p2 - hh * 10000) // 100 ss = int(p2 - (hh * 10000 + mm * 100)) dates[idx] = dt.datetime(int(yr)+1900, int(mon), int(day)) +\ dt.timedelta(hours=int(hh), minutes=int(mm), seconds=ss) return dates
def get_omni(ticks, dbase='QDhourly', **kwargs): ''' Returns Qin-Denton OMNI values, interpolated to any time-base from a default hourly resolution The update function in toolbox retrieves all available hourly Qin-Denton data, and this function accesses that and interpolates to the given times, returning the OMNI values as a SpaceData (dict-like) with Kp, Dst, dens, velo, Pdyn, ByIMF, BzIMF, G1, G2, G3, etc. (see also http://www.dartmouth.edu/~rdenton/magpar/index.html and http://www.agu.org/pubs/crossref/2007/2006SW000296.shtml ) Parameters ========== ticks : Ticktock class or array-like of datetimes time values for desired output dbase : str (optional) Select data source, options are 'QDhourly', 'OMNI2', 'Mergedhourly' Note - Custom data sources can be specified in the spacepy config file as described in the module documentation. Returns ======= out : spacepy.datamodel.SpaceData containing all Qin-Denton values at times given by ticks Examples ======== >>> import spacepy.time as spt >>> import spacepy.omni as om >>> ticks = spt.Ticktock(['2002-02-02T12:00:00', '2002-02-02T12:10:00'], 'ISO') >>> d = om.get_omni(ticks) >>> d.tree(levels=1) + |____ByIMF |____Bz1 |____Bz2 |____Bz3 |____Bz4 |____Bz5 |____Bz6 |____BzIMF |____DOY |____Dst |____G1 |____G2 |____G3 |____Hr |____Kp |____Pdyn |____Qbits |____RDT |____UTC |____W1 |____W2 |____W3 |____W4 |____W5 |____W6 |____Year |____akp3 |____dens |____ticks |____velo Notes ===== Note about Qbits: If the status variable is 2, the quantity you are using is fairly well determined. If it is 1, the value has some connection to measured values, but is not directly measured. These values are still better than just using an average value, but not as good as those with the status variable equal to 2. If the status variable is 0, the quantity is based on average quantities, and the values listed are no better than an average value. The lower the status variable, the less confident you should be in the value. ''' dbase_options = {'QDhourly' : 1, 'OMNI2hourly' : 2, 'Mergedhourly': 3, 'Test' : -9, } if not isinstance(ticks, spt.Ticktock): try: ticks = spt.Ticktock(ticks, 'UTC') except: raise TypeError('get_omni: Input times must be a Ticktock object or a list of datetime objects') if not dbase in dbase_options: from spacepy import config if dbase in config: #If a dbase is specified that isn't a default, then it MUST be in the spacepy config qdpath = os.path.split(os.path.split(config[dbase])[0])[0] if not os.path.isdir(qdpath): raise IOError('Specified dbase ({0}) does not have a valid location ({1})'.format(dbase, config[dbase])) days = list(set([tt.date() for tt in ticks.UTC])) flist = ['']*len(days) fnpath, fnformat = os.path.split(config[dbase]) for idx, day in enumerate(days): dp = fnpath.replace('YYYY', '{0}'.format(day.year)) df = fnformat.replace('YYYY', '{0}'.format(day.year)) df = df.replace('MM', '{0:02d}'.format(day.month)) df = df.replace('DD', '{0:02d}'.format(day.day)) flist[idx] = os.path.join(dp, df) if 'convert' in kwargs: convdict = kwargs['convert'] else: convdict = True #set to True as default? if 'interp' not in kwargs: kwargs['interp'] = True data = readJSONheadedASCII(sorted(flist), convert=convdict) omniout = SpaceData() time_var = [var for var in ['DateTime', 'Time', 'Epoch', 'UTC'] if var in data] if time_var: use_t_var = time_var[0] else: #no obvious time variable in input files ... can't continue raise ValueError('No clear time variable in file') if kwargs['interp'] is True: data['RDT'] = spt.Ticktock(data[use_t_var]).RDT keylist = sorted(data.keys()) dum = keylist.pop(keylist.index(use_t_var)) for key in keylist: try: omniout[key] = dmarray(np.interp(ticks.RDT, data['RDT'], data[key], left=np.NaN, right=np.NaN)) omniout[key].attrs = dmcopy(data[key].attrs) except: try: omniout[key] = dmfilled([len(ticks.RDT), data[key].shape[1]], fillval=np.NaN, attrs=dmcopy(data[key].attrs)) for col in range(data[key].shape[1]): omniout[key][:,col] = np.interp(ticks.RDT, data['RDT'], data[key][:,col], left=np.NaN, right=np.NaN) except ValueError: print('Failed to interpolate {0} to new time base, skipping variable'.format(key)) except IndexError: print('Variable {0} appears to be non-record varying, skipping interpolation'.format(key)) omniout[key] = data[key] omniout['UTC'] = ticks.UTC else: #Trim to specified times inds = tOverlapHalf([ticks[0].RDT, ticks[-1].RDT], spt.Ticktock(data['DateTime']).RDT) for key in data: if len(inds) == len(data[key]): omniout[key] = data[key][inds] else: #is ancillary data omniout[key] = data[key] #TODO: convert to same format as OMNI/QD read (or vice versa) omniout['UTC'] = omniout[use_t_var] return omniout else: raise IOError('Specified dbase ({0}) must be specified in spacepy.config'.format(dbase)) def getattrs(hf, key): out = {} if hasattr(hf[key],'attrs'): for kk, value in hf[key].attrs.items(): try: out[kk] = value except: pass return out def HrFromDT(indt): hour = indt.hour minute = indt.minute second = indt.second musecond = indt.microsecond return hour+(minute/60.0)+(second/3600.0)+(musecond/3600.0e3) import h5py as h5 fname, QDkeylist, O2keylist = '', [], [] omnivals = SpaceData() dbase_select = dbase_options[dbase] if dbase_select in [1, 3, -9]: if dbase_select > 0: ldb = 'QDhourly' fln = omnifln else: ldb = 'Test' fln = testfln with h5.File(fln, 'r') as hfile: QDkeylist = [kk for kk in hfile if kk not in ['Qbits', 'UTC']] st, en = ticks[0].RDT, ticks[-1].RDT ##check that requested requested times are within range of data enval, stval = omnirange(dbase=ldb)[1], omnirange(dbase=ldb)[0] if (ticks.UTC[0]>enval) or (ticks[-1]<stval): raise ValueError('Requested dates are outside data range') if (ticks.UTC[-1]>enval) or (ticks[0]<stval): print('Warning: Some requested dates are outside data range ({0})'.format(ldb)) inds = tOverlapHalf([st, en], hfile['RDT'], presort=True) #returns an xrange inds = indsFromXrange(inds) if inds[0] < 1: inds[0] = 1 sl_op = slice(inds[0]-1, inds[-1]+2) fname = ','.join([fname,hfile.filename]) omnivals.attrs = getattrs(hfile, '/') for key in QDkeylist: omnivals[key] = dmarray(hfile[key][sl_op]) #TODO: add attrs from h5 omnivals[key].attrs = getattrs(hfile, key) for key in hfile['Qbits']: omnivals['Qbits<--{0}'.format(key)] = dmarray(hfile['/Qbits/{0}'.format(key)][sl_op]) omnivals['Qbits<--{0}'.format(key)].attrs = getattrs(hfile, '/Qbits/{0}'.format(key)) QDkeylist.append('Qbits<--{0}'.format(key)) if dbase_options[dbase] == 2 or dbase_options[dbase] == 3: ldb = 'OMNI2hourly' with h5.File(omni2fln) as hfile: O2keylist = [kk for kk in hfile if kk not in ['Epoch','RDT']] st, en = ticks[0].RDT, ticks[-1].RDT ##check that requested requested times are within range of data enval, stval = omnirange(dbase=ldb)[1], omnirange(dbase=ldb)[0] if (ticks[0].UTC>enval) or (ticks[-1]<stval): raise ValueError('Requested dates are outside data range') if (ticks[-1].UTC>enval) or (ticks[0]<stval): print('Warning: Some requested dates are outside data range ({0})'.format(ldb)) inds = tOverlapHalf([st, en], hfile['RDT'], presort=True) #returns an xrange inds = indsFromXrange(inds) if inds[0] < 1: inds[0] = 1 sl_op = slice(inds[0]-1, inds[-1]+2) fname = ','.join([fname,hfile.filename]) omnivals.attrs = getattrs(hfile, '/') #TODO: This overwrites the previous set on Merged load... Fix! omnivals['RDT_OMNI'] = dmarray(hfile['RDT'][sl_op]) for key in O2keylist: omnivals[key] = dmarray(hfile[key][sl_op]) #TODO: add attrs from h5 omnivals[key].attrs = getattrs(hfile, key) if dbase_options[dbase] == 3: #prune "merged" SpaceData sigmas = [key for key in omnivals if 'sigma' in key] for sk in sigmas: del omnivals[sk] bees = [key for key in omnivals if re.search('B._', key)] for bs in bees: del omnivals[bs] aves = [key for key in omnivals if ('_ave' in key) or ('ave_' in key)] for av in aves: del omnivals[av] omniout = SpaceData(attrs=dmcopy(omnivals.attrs)) omniout.attrs['filename'] = fname[1:] ###print('QDkeys: {0}\n\nO2keys: {1}'.format(QDkeylist, O2keylist)) for key in sorted(omnivals.keys()): if key in O2keylist: omniout[key] = dmarray(np.interp(ticks.RDT, omnivals['RDT_OMNI'], omnivals[key], left=np.NaN, right=np.NaN)) #set metadata -- assume this has been set properly in d/l'd file to match ECT-SOC files omniout[key].attrs = dmcopy(omnivals[key].attrs) elif key in QDkeylist: omniout[key] = dmarray(np.interp(ticks.RDT, omnivals['RDT'], omnivals[key], left=np.NaN, right=np.NaN)) omniout[key].attrs = dmcopy(omnivals[key].attrs) if key == 'G3': #then we have all the Gs omniout['G'] = dmarray(np.vstack([omniout['G1'], omniout['G2'], omniout['G3']]).T) omniout['G'].attrs = dmcopy(omnivals['G1'].attrs) for i in range(1,4): del omniout['G{0}'.format(i)] if key == 'W6': omniout['W'] = dmarray(np.vstack([omniout['W1'], omniout['W2'], omniout['W3'], omniout['W4'], omniout['W5'], omniout['W6']]).T) omniout['W'].attrs = dmcopy(omnivals['W1'].attrs) for i in range(1,7): del omniout['W{0}'.format(i)] if 'Qbits' in key: #Qbits are integer vals, higher is better, so floor to get best representation of interpolated val omniout[key] = np.floor(omnivals[key]) omniout[key].attrs = dmcopy(omnivals[key].attrs) if 'G3' in key: #then we have all the Gs omniout['Qbits<--G'] = dmarray(np.vstack([omniout['Qbits<--G1'], omniout['Qbits<--G2'], omniout['Qbits<--G3']]).T) for i in range(1,4): del omniout['Qbits<--G{0}'.format(i)] if 'W6' in key: omniout['Qbits<--W'] = dmarray(np.vstack([omniout['Qbits<--W1'], omniout['Qbits<--W2'], omniout['Qbits<--W3'], omniout['Qbits<--W4'], omniout['Qbits<--W5'], omniout['Qbits<--W6']]).T) for i in range(1,7): del omniout['Qbits<--W{0}'.format(i)] omniout['ticks'] = ticks omniout['UTC'] = ticks.UTC omniout['Hr'] = dmarray([HrFromDT(val) for val in omniout['UTC']]) omniout['Year'] = dmarray([val.year for val in omniout['UTC']]) omniout = unflatten(omniout) return omniout
def get_omni(ticks, dbase='QDhourly', **kwargs): ''' Returns Qin-Denton OMNI values, interpolated to any time-base from a default hourly resolution The update function in toolbox retrieves all available hourly Qin-Denton data, and this function accesses that and interpolates to the given times, returning the OMNI values as a SpaceData (dict-like) with Kp, Dst, dens, velo, Pdyn, ByIMF, BzIMF, G1, G2, G3, etc. (see also http://www.dartmouth.edu/~rdenton/magpar/index.html and http://www.agu.org/pubs/crossref/2007/2006SW000296.shtml ) Parameters ========== ticks : Ticktock class or array-like of datetimes time values for desired output dbase : str (optional) Select data source, options are 'QDhourly', 'OMNI2hourly', 'Mergedhourly' Note - Custom data sources can be specified in the spacepy config file as described in the module documentation. Returns ======= out : spacepy.datamodel.SpaceData containing all Qin-Denton values at times given by ticks Examples ======== >>> import spacepy.time as spt >>> import spacepy.omni as om >>> ticks = spt.Ticktock(['2002-02-02T12:00:00', '2002-02-02T12:10:00'], 'ISO') >>> d = om.get_omni(ticks) >>> d.tree(levels=1) + |____ByIMF |____Bz1 |____Bz2 |____Bz3 |____Bz4 |____Bz5 |____Bz6 |____BzIMF |____DOY |____Dst |____G1 |____G2 |____G3 |____Hr |____Kp |____Pdyn |____Qbits |____RDT |____UTC |____W1 |____W2 |____W3 |____W4 |____W5 |____W6 |____Year |____akp3 |____dens |____ticks |____velo Notes ===== Note about Qbits: If the status variable is 2, the quantity you are using is fairly well determined. If it is 1, the value has some connection to measured values, but is not directly measured. These values are still better than just using an average value, but not as good as those with the status variable equal to 2. If the status variable is 0, the quantity is based on average quantities, and the values listed are no better than an average value. The lower the status variable, the less confident you should be in the value. ''' dbase_options = { 'QDhourly': 1, 'OMNI2hourly': 2, 'Mergedhourly': 3, 'Test': -9, } if not isinstance(ticks, spt.Ticktock): try: ticks = spt.Ticktock(ticks, 'UTC') except: raise TypeError( 'get_omni: Input times must be a Ticktock object or a list of datetime objects' ) if not dbase in dbase_options: from spacepy import config if dbase in config: #If a dbase is specified that isn't a default, then it MUST be in the spacepy config qdpath = os.path.split(os.path.split(config[dbase])[0])[0] if not os.path.isdir(qdpath): raise IOError( 'Specified dbase ({0}) does not have a valid location ({1})' .format(dbase, config[dbase])) days = list(set([tt.date() for tt in ticks.UTC])) flist = [''] * len(days) fnpath, fnformat = os.path.split(config[dbase]) for idx, day in enumerate(days): dp = fnpath.replace('YYYY', '{0}'.format(day.year)) df = fnformat.replace('YYYY', '{0}'.format(day.year)) df = df.replace('MM', '{0:02d}'.format(day.month)) df = df.replace('DD', '{0:02d}'.format(day.day)) flist[idx] = os.path.join(dp, df) if 'convert' in kwargs: convdict = kwargs['convert'] else: convdict = True #set to True as default? if 'interp' not in kwargs: kwargs['interp'] = True data = readJSONheadedASCII(sorted(flist), convert=convdict) omniout = SpaceData() time_var = [ var for var in ['DateTime', 'Time', 'Epoch', 'UTC'] if var in data ] if time_var: use_t_var = time_var[0] else: #no obvious time variable in input files ... can't continue raise ValueError('No clear time variable in file') if kwargs['interp'] is True: data['RDT'] = spt.Ticktock(data[use_t_var]).RDT keylist = sorted(data.keys()) dum = keylist.pop(keylist.index(use_t_var)) for key in keylist: try: omniout[key] = dmarray( np.interp(ticks.RDT, data['RDT'], data[key], left=np.NaN, right=np.NaN)) omniout[key].attrs = dmcopy(data[key].attrs) except: try: omniout[key] = dmfilled( [len(ticks.RDT), data[key].shape[1]], fillval=np.NaN, attrs=dmcopy(data[key].attrs)) for col in range(data[key].shape[1]): omniout[key][:, col] = np.interp(ticks.RDT, data['RDT'], data[key][:, col], left=np.NaN, right=np.NaN) except ValueError: print( 'Failed to interpolate {0} to new time base, skipping variable' .format(key)) except IndexError: print( 'Variable {0} appears to be non-record varying, skipping interpolation' .format(key)) omniout[key] = data[key] omniout['UTC'] = ticks.UTC else: #Trim to specified times inds = tOverlapHalf([ticks[0].RDT, ticks[-1].RDT], spt.Ticktock(data['DateTime']).RDT) for key in data: if len(inds) == len(data[key]): omniout[key] = data[key][inds] else: #is ancillary data omniout[key] = data[key] #TODO: convert to same format as OMNI/QD read (or vice versa) omniout['UTC'] = omniout[use_t_var] return omniout else: raise IOError( 'Specified dbase ({0}) must be specified in spacepy.config'. format(dbase)) def getattrs(hf, key): out = {} if hasattr(hf[key], 'attrs'): for kk, value in hf[key].attrs.items(): try: out[kk] = value except: pass return out def HrFromDT(indt): hour = indt.hour minute = indt.minute second = indt.second musecond = indt.microsecond return hour + (minute / 60.0) + (second / 3600.0) + (musecond / 3600.0e3) import h5py as h5 fname, QDkeylist, O2keylist = '', [], [] omnivals = SpaceData() dbase_select = dbase_options[dbase] if dbase_select in [1, 3, -9]: if dbase_select > 0: ldb = 'QDhourly' fln = omnifln else: ldb = 'Test' fln = testfln with h5.File(fln, 'r') as hfile: QDkeylist = [kk for kk in hfile if kk not in ['Qbits', 'UTC']] st, en = ticks[0].RDT, ticks[-1].RDT ##check that requested requested times are within range of data enval, stval = omnirange(dbase=ldb)[1], omnirange(dbase=ldb)[0] if (ticks.UTC[0] > enval) or (ticks[-1] < stval): raise ValueError('Requested dates are outside data range') if (ticks.UTC[-1] > enval) or (ticks[0] < stval): print( 'Warning: Some requested dates are outside data range ({0})' .format(ldb)) inds = tOverlapHalf([st, en], hfile['RDT'], presort=True) #returns an xrange inds = indsFromXrange(inds) if inds[0] < 1: inds[0] = 1 sl_op = slice(inds[0] - 1, inds[-1] + 2) fname = ','.join([fname, hfile.filename]) omnivals.attrs = getattrs(hfile, '/') for key in QDkeylist: omnivals[key] = dmarray( hfile[key][sl_op]) #TODO: add attrs from h5 omnivals[key].attrs = getattrs(hfile, key) for key in hfile['Qbits']: omnivals['Qbits<--{0}'.format(key)] = dmarray( hfile['/Qbits/{0}'.format(key)][sl_op]) omnivals['Qbits<--{0}'.format(key)].attrs = getattrs( hfile, '/Qbits/{0}'.format(key)) QDkeylist.append('Qbits<--{0}'.format(key)) if dbase_options[dbase] == 2 or dbase_options[dbase] == 3: ldb = 'OMNI2hourly' with h5.File(omni2fln) as hfile: O2keylist = [kk for kk in hfile if kk not in ['Epoch', 'RDT']] st, en = ticks[0].RDT, ticks[-1].RDT ##check that requested requested times are within range of data enval, stval = omnirange(dbase=ldb)[1], omnirange(dbase=ldb)[0] if (ticks[0].UTC > enval) or (ticks[-1] < stval): raise ValueError('Requested dates are outside data range') if (ticks[-1].UTC > enval) or (ticks[0] < stval): print( 'Warning: Some requested dates are outside data range ({0})' .format(ldb)) inds = tOverlapHalf([st, en], hfile['RDT'], presort=True) #returns an xrange inds = indsFromXrange(inds) if inds[0] < 1: inds[0] = 1 sl_op = slice(inds[0] - 1, inds[-1] + 2) fname = ','.join([fname, hfile.filename]) omnivals.attrs = getattrs( hfile, '/' ) #TODO: This overwrites the previous set on Merged load... Fix! omnivals['RDT_OMNI'] = dmarray(hfile['RDT'][sl_op]) for key in O2keylist: omnivals[key] = dmarray( hfile[key][sl_op]) #TODO: add attrs from h5 omnivals[key].attrs = getattrs(hfile, key) if dbase_options[dbase] == 3: #prune "merged" SpaceData sigmas = [key for key in omnivals if 'sigma' in key] for sk in sigmas: del omnivals[sk] bees = [key for key in omnivals if re.search('B._', key)] for bs in bees: del omnivals[bs] aves = [key for key in omnivals if ('_ave' in key) or ('ave_' in key)] for av in aves: del omnivals[av] omniout = SpaceData(attrs=dmcopy(omnivals.attrs)) omniout.attrs['filename'] = fname[1:] ###print('QDkeys: {0}\n\nO2keys: {1}'.format(QDkeylist, O2keylist)) for key in sorted(omnivals.keys()): if key in O2keylist: omniout[key] = dmarray( np.interp(ticks.RDT, omnivals['RDT_OMNI'], omnivals[key], left=np.NaN, right=np.NaN)) #set metadata -- assume this has been set properly in d/l'd file to match ECT-SOC files omniout[key].attrs = dmcopy(omnivals[key].attrs) elif key in QDkeylist: omniout[key] = dmarray( np.interp(ticks.RDT, omnivals['RDT'], omnivals[key], left=np.NaN, right=np.NaN)) omniout[key].attrs = dmcopy(omnivals[key].attrs) if key == 'G3': #then we have all the Gs omniout['G'] = dmarray( np.vstack([omniout['G1'], omniout['G2'], omniout['G3']]).T) omniout['G'].attrs = dmcopy(omnivals['G1'].attrs) for i in range(1, 4): del omniout['G{0}'.format(i)] if key == 'W6': omniout['W'] = dmarray( np.vstack([ omniout['W1'], omniout['W2'], omniout['W3'], omniout['W4'], omniout['W5'], omniout['W6'] ]).T) omniout['W'].attrs = dmcopy(omnivals['W1'].attrs) for i in range(1, 7): del omniout['W{0}'.format(i)] if 'Qbits' in key: #Qbits are integer vals, higher is better, so floor to get best representation of interpolated val omniout[key] = np.floor(omnivals[key]) omniout[key].attrs = dmcopy(omnivals[key].attrs) if 'G3' in key: #then we have all the Gs omniout['Qbits<--G'] = dmarray( np.vstack([ omniout['Qbits<--G1'], omniout['Qbits<--G2'], omniout['Qbits<--G3'] ]).T) for i in range(1, 4): del omniout['Qbits<--G{0}'.format(i)] if 'W6' in key: omniout['Qbits<--W'] = dmarray( np.vstack([ omniout['Qbits<--W1'], omniout['Qbits<--W2'], omniout['Qbits<--W3'], omniout['Qbits<--W4'], omniout['Qbits<--W5'], omniout['Qbits<--W6'] ]).T) for i in range(1, 7): del omniout['Qbits<--W{0}'.format(i)] omniout['ticks'] = ticks omniout['UTC'] = ticks.UTC omniout['Hr'] = dmarray([HrFromDT(val) for val in omniout['UTC']]) omniout['Year'] = dmarray([val.year for val in omniout['UTC']]) omniout = unflatten(omniout) return omniout
def makeSW_v2(): """Construct initial estimate of hourly solar wind parameters""" st = dt.datetime(1989, 3, 12) en = dt.datetime(1989, 3, 15) hourly = getKondrashovSW() # Keep IMF By and n, set V to Boteler/Nagatsuma t_ssc1 = dt.datetime(1989, 3, 13, 1, 27) t_ssc2 = dt.datetime(1989, 3, 13, 7, 43) t_cme = dt.datetime(1989, 3, 13, 16) t_turn = dt.datetime(1989, 3, 14, 2) # Set Bx positive, in accordance with ISEE-3 data, Vy/Vz->0 hourly['Bx'] = dm.dmfilled(hourly['By'].shape, fillval=3) hourly['Vy'] = dm.dmfilled(hourly['By'].shape, fillval=0) hourly['Vz'] = dm.dmfilled(hourly['By'].shape, fillval=0) # Before first SSC inds_before_1 = tb.tOverlapHalf([dt.datetime(1989, 3, 12), t_ssc1], hourly['DateTime']) hourly['V_sw'][inds_before_1] = 400 # Between first and ssecond SSC inds_between_12 = tb.tOverlapHalf([t_ssc1, t_ssc2], hourly['DateTime']) hourly['V_sw'][inds_between_12] = 550 # IMF turns north around 1989-03-14T02:00:00 according to inverse Burton and Kondrashov inds_mainphase = tb.tOverlapHalf([t_ssc2, t_turn], hourly['DateTime']) hourly['V_sw'][inds_mainphase] = 983 # Then have speed decay towards IMP-8 measurement which is ballpark 820 km/s inds_rest = tb.tOverlapHalf([t_turn, hourly['DateTime'][-1]], hourly['DateTime']) hourly['V_sw'][inds_rest] = np.linspace(983, 820, len(inds_rest)) # Now we have speed, estimate temperature hourly['Plasma_temp'] = emp.getExpectedSWTemp(hourly['V_sw'], model='XB15', units='K') inds_cme = tb.tOverlapHalf([t_cme, en], hourly['DateTime']) hourly['Plasma_temp'][ inds_cme] /= 3 # reduce by factor of 3 for CME-like temp # Get "Kondrashov VBs" using V from Boteler/Nagatsuma hourly['VBs_K'] = 1e-3 * hourly['V_sw'] * rectify( -1 * hourly['Bz']) # mV/m # Now get VBs from inverse Burton ky_dat = kyo.fetch('dst', (st.year, st.month, st.day), (en.year, en.month, en.day)) inds = tb.tOverlapHalf([st, en], ky_dat['time']) # Substitute density curve from Sept. 2017 (double shock) sep17 = pybats.ImfInput( filename= '/home/smorley/projects/github/advect1d/IMF_201709_advect_filt.dat', load=True) den_inds = tb.tOverlapHalf( [t_ssc1 - dt.timedelta(hours=25), hourly['DateTime'][-1]], hourly['DateTime']) nhours = len(den_inds) # Keep the opening 8 hours from Kondrashov (2017 event gets high) hourly['Den_P'][den_inds[9:]] = 2 + (sep17['rho'][::60][9:nhours] * 2) # After shocks, ensure number density doesn't drop below 10 (keeps M_A over 2) after_ssc2 = hourly['DateTime'] > t_ssc2 under_lim = hourly['Den_P'] <= 10 limit_inds = np.logical_and(after_ssc2, under_lim) hourly['Den_P'][limit_inds] = 10 # Pressure correct here using n and V hourly = calc_P(hourly) hourly['Dst'] = ky_dat['dst'][inds] dst_star = emp.getDststar(hourly, model='OBrien') hourly['VBs_OB'] = 1e-3 * inverseOBrienMcPherron(dst_star) # Make new Bz from VBs_OB hourly['Bz_OB'] = -1e3 * hourly['VBs_OB'] / hourly['V_sw'] # nT return hourly