def parser(platform_info, sensor_info, lines): """ Parse and assign wind profile data from main Sodar file. """ main_data = maindata.MainData(''.join(lines)) num_profiles = len(main_data) min_altitude = sensor_info['min_altitude'] altitude_interval = sensor_info['altitude_interval'] num_altitudes = sensor_info['num_altitudes'] sensor_elevation = sensor_info['sensor_elevation'] altitudes = [(altitude_num * altitude_interval) + min_altitude for altitude_num in range(num_altitudes)] elevations = [altitude + sensor_elevation for altitude in altitudes] data = { 'dt' : n.array(n.ones((num_profiles,), dtype=object) * n.nan), 'time' : n.array(n.ones((num_profiles,), dtype=long) * n.nan), 'z' : n.array(elevations, dtype=float), 'u' : n.array(n.ones((num_profiles, num_altitudes), dtype=float) * n.nan), 'v' : n.array(n.ones((num_profiles, num_altitudes), dtype=float) * n.nan), } gaps = {} for variable in main_data.variables: symbol = variable['symbol'] gaps[symbol] = variable['gap'] if symbol not in manual: data[symbol.lower()] = n.array(n.ones((num_profiles, num_altitudes), dtype=float) * n.nan) data['error'] = n.array(n.ones((num_profiles, num_altitudes), dtype = int) * n.nan) for (profile_index, profile) in enumerate(main_data): dt = {'month' : profile.stop.month, 'day' : profile.stop.day, 'year' : profile.stop.year, 'hour' : profile.stop.hour, 'min' : profile.stop.minute, } dt = '%(month)02d-%(day)02d-%(year)04d %(hour)02d:%(min)02d' % dt dt = procutil.scanf_datetime(dt, fmt='%m-%d-%Y %H:%M') if sensor_info['utc_offset']: dt = dt + datetime.timedelta(hours=sensor_info['utc_offset']) data['dt'][profile_index] = dt data['time'][profile_index] = procutil.dt2es(dt) for (observation_index, observation) in enumerate(profile): radial = observation['speed'] theta = observation['dir'] if radial != gaps['speed'] and theta != gaps['dir']: theta = math.pi * float(theta) / 180.0 radial = float(radial) data['u'][profile_index][observation_index] = \ -radial * math.sin(theta) data['v'][profile_index][observation_index] = \ -radial * math.cos(theta) for variable in profile.variables: if variable not in manual and \ observation[variable] != gaps[variable]: data[variable.lower()][profile_index][observation_index] = \ float(observation[variable]) data['error'][profile_index][observation_index] = \ int(observation['error']) return data
def parser(platform_info, sensor_info, lines): """ Parse and assign sponge data from XML file. """ _data = Data(''.join(lines)) # Each Device tag represents a time sample. num_samples = len(_data.devices) data = { 'dt' : n.array(n.ones((num_samples,)) * n.nan, dtype=object), 'time' : n.array(n.ones((num_samples,)) * n.nan, dtype=long), 'pdt' : n.array(n.ones((num_samples,)) * n.nan, dtype=object), 'ptime' : n.array(n.ones((num_samples,)) * n.nan, dtype=long), 'ds' : n.array(n.ones((num_samples,)) * n.nan, dtype=object), 'session' : n.array(n.ones((num_samples,)) * n.nan, dtype=long), 'pds' : n.array(n.ones((num_samples,)) * n.nan, dtype=object), 'psession' : n.array(n.ones((num_samples,)) * n.nan, dtype=long), 'record' : n.array(n.ones((num_samples,)) * n.nan, dtype=int), 'status' : n.array(n.ones((num_samples,)) * n.nan, dtype=int), 'pstatus' : n.array(n.ones((num_samples,)) * n.nan, dtype=int), 'abs_speed' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'direction' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'v' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'u' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'heading' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'tiltx' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'tilty' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'std_speed' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'strength' : n.array(n.ones((num_samples,)) * n.nan, dtype=float), 'pings' : n.array(n.ones((num_samples,)) * n.nan, dtype=int), } for (sample_index, sample) in enumerate(_data.devices): # sample time at the platform dt = {'month' : int(sample['time'][5:7]), 'day' : int(sample['time'][8:10]), 'year' : int(sample['time'][0:4]), 'hour' : int(sample['time'][11:13]), 'min' : int(sample['time'][14:16]), 'sec' : int(sample['time'][17:19]), } dt = '%(month)02d-%(day)02d-%(year)04d %(hour)02d:%(min)02d:%(sec)02d' \ % dt dt = procutil.scanf_datetime(dt, fmt='%m-%d-%Y %H:%M:%S') if sensor_info['utc_offset']: dt = dt + datetime.timedelta(hours=sensor_info['utc_offset']) data['dt'][sample_index] = dt data['time'][sample_index] = procutil.dt2es(dt) # sample time at the package package_dt = {'month' : int(sample['data_time'][5:7]), 'day' : int(sample['data_time'][8:10]), 'year' : int(sample['data_time'][0:4]), 'hour' : int(sample['data_time'][11:13]), 'min' : int(sample['data_time'][14:16]), 'sec' : int(sample['data_time'][17:19]), } package_dt = ('%(month)02d-%(day)02d-%(year)04d ' + '%(hour)02d:%(min)02d:%(sec)02d') \ % package_dt package_dt = procutil.scanf_datetime(package_dt, fmt='%m-%d-%Y %H:%M:%S') if sensor_info['utc_offset']: package_dt = package_dt + \ datetime.timedelta(hours=sensor_info['utc_offset']) data['pdt'][sample_index] = package_dt data['ptime'][sample_index] = procutil.dt2es(package_dt) # platform session time ds = {'month' : int(sample['sessionid'][14:16]), 'day' : int(sample['sessionid'][17:19]), 'year' : int(sample['sessionid'][9:13]), 'hour' : int(sample['sessionid'][20:22]), 'min' : int(sample['sessionid'][23:25]), 'sec' : int(sample['sessionid'][26:28]), } ds = '%(month)02d-%(day)02d-%(year)04d %(hour)02d:%(min)02d:%(sec)02d' \ % ds ds = procutil.scanf_datetime(ds, fmt='%m-%d-%Y %H:%M:%S') if sensor_info['utc_offset']: ds = ds + datetime.timedelta(hours=sensor_info['utc_offset']) data['ds'][sample_index] = ds data['session'][sample_index] = procutil.dt2es(ds) # package session time package_ds = {'month' : int(sample['data_sessionid'][5:7]), 'day' : int(sample['data_sessionid'][8:10]), 'year' : int(sample['data_sessionid'][0:4]), 'hour' : int(sample['data_sessionid'][11:13]), 'min' : int(sample['data_sessionid'][14:16]), 'sec' : int(sample['data_sessionid'][17:19]), } package_ds = ('%(month)02d-%(day)02d-%(year)04d ' + '%(hour)02d:%(min)02d:%(sec)02d') \ % package_ds package_ds = procutil.scanf_datetime(package_ds, fmt='%m-%d-%Y %H:%M:%S') if sensor_info['utc_offset']: package_ds = package_ds + \ datetime.timedelta(hours=sensor_info['utc_offset']) data['pds'][sample_index] = package_ds data['psession'][sample_index] = procutil.dt2es(package_ds) # platform variables try: data['record'][sample_index] = int(sample["recordnumber"]) except KeyError: pass try: data['status'][sample_index] = int(sample["status"]. partition(":")[0]) except (KeyError, AttributeError, ): pass # package variables try: data['pstatus'][sample_index] = int(sample.sensors [sensor_info["id_number"]] ["status"]. partition(":")[0]) except (KeyError, AttributeError, ): pass try: data['abs_speed'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["abs_speed_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['direction'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["direction_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['v'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["v_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['u'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["u_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['heading'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["heading_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['tiltx'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["tiltx_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['tilty'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["tilty_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['std_speed'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["std_speed_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['strength'][sample_index] = float(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["strength_description"]] ["value"]) except (KeyError, AttributeError, ): pass try: data['pings'][sample_index] = int(sample.sensors [sensor_info["id_number"]]. points[sensor_info ["pings_description"]] ["value"]) except (KeyError, AttributeError, ): pass return data
def parse_csi_loggernet(fn, lines): """ From FSL (CSI datalogger program files): Example data: TOA5,CR1000_CBC,CR1000,5498,CR1000.Std.11,CPU:UNC_CrowBranch.CR1,1554,Data15Min TIMESTAMP,RECORD,RainIn_Tot,WaterLevelFt,Flow TS,RN,,, ,,Tot,Smp,Smp 2009-01-22 15:30:00,0,0,0,0 2009-01-22 15:45:00,1,0,0,0 2009-01-22 16:00:00,2,0.01,0,0 2009-01-22 16:15:00,3,0,0,0 TOA5,CR1000_CBC,CR1000,5498,CR1000.Std.11,CPU:UNC_CrowBranch.CR1,1554,DataHourly TIMESTAMP,RECORD,SondeTempC,SpCond,DOSat,DOmg,pH,Turb,BattVolt_Min TS,RN,,,,,,, ,,Smp,Smp,Smp,Smp,Smp,Smp,Min 2009-01-22 16:00:00,0,2.68,0.533,7.63,-46.8,-1.4,0,11.99 2009-01-22 17:00:00,1,3.07,0.553,7.62,-46.6,-1.4,0,11.96 2009-01-22 18:00:00,2,3.45,0.548,7.62,-46.5,-1.4,0,11.91 2009-01-22 19:00:00,3,3.53,0.546,7.62,-46.3,-1.4,0,11.89 2009-01-22 20:00:00,4,3.59,0.547,7.62,-46.3,-1.4,0,11.86 2009-01-22 21:00:00,5,3.55,0.545,7.61,-46.2,-0.7,0,11.84 2009-01-22 22:00:00,6,3.47,0.545,7.62,-46.3,4.2,0,11.81 2009-01-22 23:00:00,7,3.37,0.545,7.62,-46.4,-0.7,0,11.8 2009-01-23 00:00:00,8,3.28,0.545,7.62,-46.5,4.2,0,11.78 2009-01-23 01:00:00,9,3.17,0.546,7.62,-46.7,-0.9,0,11.76 2009-01-23 02:00:00,10,3,0.549,7.63,-46.8,-1.3,0,11.74 2009-01-23 03:00:00,11,2.95,0.55,7.64,-47.3,-1.4,0,11.73 2009-01-23 04:00:00,12,2.89,0.552,7.63,-47.2,-1.4,0,11.71 2009-01-23 05:00:00,13,2.8,0.554,7.64,-47.3,-1.4,0,11.69 2009-01-23 06:00:00,14,2.72,0.554,7.64,-47.6,-1.3,0,11.68 """ p = os.path.split(fn) (loggertype, id, datatype) = p[1].split('_') this_month_str = procutil.this_month() if datatype=='Data15Min.dat': data_dir = os.path.join(p[0],id.lower(),'flow') ofn_prefix = '%s_%s' % (id.lower(), 'flow') samples_per_hour = 4 elif datatype=='DataHourly.dat': data_dir = os.path.join(p[0],id.lower(),'wq') ofn_prefix = '%s_%s' % (id.lower(), 'wq') samples_per_hour = 1 if not os.path.isdir(data_dir): print ' ... Creating directory: '+data_dir os.mkdir(data_dir) ofn = os.path.join(data_dir, ofn_prefix) ofn = '_'.join([ofn, this_month_str]) ofn = '.'.join([ofn, 'dat']) # delete previous existing month file so start fresh if os.path.exists(ofn): print ' ... ... Deleting file: '+ofn os.remove(ofn) # only read last part of each loggernet data file starti = -32*samples_per_hour*24 endi = -1 # unless there is less than one month of data in the file if len(lines)<32*samples_per_hour*24+4: starti = 4 # skip first 4 lines but write these four lines to top of monthly files for line in lines[starti:endi]: # split line sw = re.split(',', line) if len(sw)>=1: # print line # get sample datetime from sw[0] sample_dt = procutil.scanf_datetime(sw[0], fmt='"%Y-%m-%d %H:%M:%S"') file_month_str = '%4d_%02d' % sample_dt.timetuple()[0:2] else: # not a well-formed line, so skip to next line print 'ill-formed time, line not to be copied: ' + line continue if file_month_str == this_month_str: if os.path.exists(ofn): f = open(ofn, 'a') f.write(line) f.close else: print ' ... ... Creating file: '+ofn print lines[0:4] # write first four header lines to each new month # and the first line of data for the month f = open(ofn, 'w') for l in lines[0:4]: f.write(l) f.write(line) f.close() # for each line return