missing_values='nd', autostrip=True, names=fields, converters={'site_name': tolower}, invalid_raise=False) fid.close() #filter out data from other sites log_data = log_data[log_data['site_name'] == site_name] log_dates = np.array([ datetime.datetime.strptime(d + t, '%m/%d/%Y%H:%M:%S') for d, t in zip(log_data['date'], log_data['time']) ]) #spot check data is always in local tz & convert to default static tz log_dates = np.array([i.replace(tzinfo=sonde.find_tz(i)) for i in log_dates]) tmp_datetimes = np.array([(i.strftime('%m/%d/%Y %H:%M:%S').split()) for i in log_dates]) log_data['date'] = tmp_datetimes[:, 0] log_data['time'] = tmp_datetimes[:, 1] #sort deployment data idx = np.argsort(log_dates) log_data = log_data[idx] log_dates = log_dates[idx] #write sorted log data local_log_file = os.path.join(site_dir, 'twdb_wq_' + site_name + '_deployment_log.csv') np.savetxt(local_log_file, log_data, delimiter=',', fmt='%s') #todo fix formating and add header line
('qa_status', np.int32), ('notes', 'S60')] log_data = np.genfromtxt(fid, delimiter=',', dtype=dtype, comments="somethingthatnoonewilleveruse", autostrip=True, names=fields, converters={'site_name': tolower,'renamed_filename': strip}, invalid_raise=True) fid.close() #filter out data from other sites log_data = log_data[log_data['site_name']==site_name] log_dates = np.array([datetime.datetime.strptime(d+t,'%m/%d/%Y%H:%M:%S') for d,t in zip(log_data['date'],log_data['time'])]) #spot check data is always in local tz & convert to default static tz log_dates = np.array([i.replace(tzinfo=sonde.find_tz(i)) for i in log_dates]) tmp_datetimes = np.array([(i.strftime('%m/%d/%Y %H:%M:%S').split()) for i in log_dates]) try: log_data['date'] = tmp_datetimes[:,0] log_data['time'] = tmp_datetimes[:,1] except: pass #sort deployment data idx = np.argsort(log_dates) log_data = log_data[idx] log_dates = log_dates[idx] #write sorted log data log_file_name = os.path.join(site_dir, 'twdb_wq_' + site_name + '_deployment_log.csv') local_log_file = open(log_file_name, 'wb') local_log_file.write('#file_format: pysonde deployment log version 1.0 \n # site_name, date, time, water_temperature, seawater_electrical_conductivity, salinity, ph, water_dissolved_oxygen_concentration, water_dissolved_oxygen_saturation, water_depth_non_vented,intrument_battery_voltage, spotcheck_instrument_serial_number, deployed_instrument_serial_number,deployed_DO_serial_number, deployed_filename, renamed_filename, timezone, field_tech_name, created_by, verified_by, notes \n # n/a, mm/dd/yyyy, hh:mm:ss, degC, mS/cm, psu, nd, mg/l, %, mH2O,volts, n/a, n/a,n/a, n/a,n/a,n/a,n/a,n/a,n/a,n/a \n')
solinst_sensor_height = sensor_to_gps_height.ix[site_name, 'median_solinst_sensor_height'] \ + solinst_bottom_to_sensor sonde_baro_series['total_water_depth'][sonde_baro_series\ ['file_name'] == sonde_file] = solinst_sensor_height + \ sonde_baro_series\ ['corrected_sonde_depth'][sonde_baro_series['file_name']== sonde_file] sonde_baro_series['corrected_sonde_depth']\ [sonde_baro_series['file_name']==sonde_file].plot(style='.',label='sonde depth') plt.ylim(0,2) site_deployment_data.index = [t + pandas.datetools.Hour(-1) if find_tz(t).zone == 'UTC-5' else t for t in site_deployment_data.index] site_deployment_data.to_csv(os.path.join(sonde_site_dir, site_name + '_complete_dep_log.csv'), sep=',', index_label='datetime(utc-6)', float_format='%10.2f', na_rep=-999.99) site_deployment_data[site_deployment_data.columns[-3]].dropna().plot(style='r.', markersize=12) water_depth_detection_mask = sonde_baro_series.corrected_sonde_depth\ < water_depth_detection_limit salinity_threshold_mask = sonde_baro_series.seawater_salinity < salinity_filter_limit water_depth_detection_mask = water_depth_detection_mask * salinity_threshold_mask sonde_baro_series['raw_seawater_salinity'] = sonde_baro_series.seawater_salinity sonde_baro_series['seawater_salinity'][water_depth_detection_mask] = -888.88