def run(self): self.out.set_fraction(0.01) points = self.points_store['points'] training_dates = self.training_data_store.keys() training_array = np.zeros((len(points), len(training_dates))) # build array of training data for date_i in range(len(training_dates)): date_str = training_dates[date_i] print date_str data = self.training_data_store[date_str] data_year, data_month, data_day = [int(s) for s in date_str.split('-')] data_midnight = datetime.datetime(data_year, data_month, data_day, 0, 0) time_offset = ppas.datetime_to_seconds(self.exp_midnight) - ppas.datetime_to_seconds(data_midnight) for p_i in range(len(points)): p = points[p_i] t = p[3] - time_offset val = ppas.roms.get_value(data, p[0], p[1], p[2], t, self.varname, interp='linear') training_array[p_i,date_i] = val self.out.set_fraction(0.5) # filter out bad data if self.varname == 'salt': varmin, varmax = 30., 40. elif self.varname == 'temp': varmin, varmax = 10., 100. else: dieee_unknown_var_type___ good_columns = ((training_array > varmin) & ( training_array < varmax)).all(axis=0) self.out.set_fraction(0.7) # calculate covariance matrix kmat = np.cov(training_array[:,good_columns]) self.save_store['kmat'] = kmat self.out.set_fraction(1.0)
def run(self): self.out.set_fraction(0.01) points = self.points_store['points'] training_dates = self.training_data_store.keys() training_array = np.zeros((len(points), len(training_dates))) # build array of training data for date_i in range(len(training_dates)): date_str = training_dates[date_i] print date_str data = self.training_data_store[date_str] data_year, data_month, data_day = [ int(s) for s in date_str.split('-') ] data_midnight = datetime.datetime(data_year, data_month, data_day, 0, 0) time_offset = ppas.datetime_to_seconds( self.exp_midnight) - ppas.datetime_to_seconds(data_midnight) for p_i in range(len(points)): p = points[p_i] t = p[3] - time_offset val = ppas.roms.get_value(data, p[0], p[1], p[2], t, self.varname, interp='linear') training_array[p_i, date_i] = val self.out.set_fraction(0.5) # filter out bad data if self.varname == 'salt': varmin, varmax = 30., 40. elif self.varname == 'temp': varmin, varmax = 10., 100. else: dieee_unknown_var_type___ good_columns = ((training_array > varmin) & (training_array < varmax)).all(axis=0) self.out.set_fraction(0.7) # calculate covariance matrix kmat = np.cov(training_array[:, good_columns]) self.save_store['kmat'] = kmat self.out.set_fraction(1.0)
def calc_emse(run_dir): settings = pplan.PPlanSettings(run_dir) store = ppas.Store(settings.data_dir) P = store['P'] G = store['G'] objective = ppas.objectives.EMSEObjective(store['kmat'], settings.planner_properties['sigma_n']) start_t = ppas.datetime_to_seconds(settings.roi_properties['starttime']) samples = ppas.graph.path_samples(store['P'], start_t) print '%s: EMSE:' % run_dir, objective.f(samples)
def calc_emp_cov(run_dir, args): print '%s: calculating empirical covariance matrix' % run_dir settings = pplan.PPlanSettings(run_dir) store = ppas.Store(settings.data_dir) training_store = ppas.Store(os.path.join(settings.data_dir, 'training_data')) points = store['points'] qoi = settings.roi_properties['qoi'] training_dates = sorted(training_store.keys()) training_array = np.zeros((len(points), len(training_dates))) exp_start = settings.roi_properties['starttime'] exp_midnight = datetime.datetime(exp_start.year, exp_start.month, exp_start.day, 0, 0) # build array of training data for date_i in range(len(training_dates)): date_str = training_dates[date_i] data = training_store[date_str] data_year, data_month, data_day = [int(s) for s in date_str.split('-')] data_midnight = datetime.datetime(data_year, data_month, data_day, 0, 0) if exp_start.hour < 13: exp_midnight += datetime.timedelta(days=1) time_offset = ppas.datetime_to_seconds(exp_midnight) - ppas.datetime_to_seconds(data_midnight) for p_i in range(len(points)): p = points[p_i] t = p[3] - time_offset val = ppas.roms.get_value(data, p[0], p[1], p[2], t, qoi, interp='linear') training_array[p_i,date_i] = val # filter out bad data if qoi == 'salt': varmin, varmax = 30., 40. elif qoi == 'temp': varmin, varmax = 0., 100. else: dieee_unknown_var_type___ good_columns = ((training_array > varmin) & ( training_array < varmax)).all(axis=0) good_training_data = training_array[:,good_columns] store['good_training_data'] = good_training_data # calculate covariance matrix kmat = np.cov(good_training_data) store['kmat'] = kmat
def parse_time(date_str, time_str, timezone=pytz.utc): ''' Parse the ecomapper time format date_str is in format mm/dd/yyyy time_str is format hh:mm:ss.ss Returns ppas time as defined in ppas/util.py ''' month_str, day_str, year_str = date_str.split('/') hour_str, min_str, float_sec_str = time_str.split(':') float_sec = float(float_sec_str) dt = datetime.datetime(int(year_str), int(month_str), int(day_str), int(hour_str), int(min_str), int(float_sec), int((float_sec%1)*1e6)) ldt = timezone.localize(dt) return ppas.datetime_to_seconds(ldt)
roslib.load_manifest('ppas') import datetime import numpy as np import ppas lat0 = 33.4967 lon0 = -118.72 lat1 = 33.58 lon1 = -118.52 depth0 = 30. depth1 = 50. year, month, day = 2011, 7, 26 # get 3 hours of data starting at 15:00 GMT time0 = ppas.datetime_to_seconds(datetime.datetime(year, month, day, 15)) time1 = time0 + 3. * 3600. print 'Connecting to ROMS server' dataset = ppas.roms.open_dataset(datetime.date(year, month, day)) print 'Downloading data' data = ppas.roms.get_data(dataset, lat0, lat1, lon0, lon1, depth0, depth1, time0, time1) # print somve values lat = (lat0 + lat1) / 2.0 lon = (lon0 + lon1) / 2.0 depth = (depth0 + depth1) / 2.0 for t in np.linspace(time0, time1): v = ppas.roms.get_value(data, lat, lon, depth, t, 'temp', interp='linear')
import roslib roslib.load_manifest('ppas') import time, datetime import ppas # get the current POSIX time t = time.time() # convert to UTC datetime tm = time.gmtime(t) dt = datetime.datetime(tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec) # convert back to POSIX time using our own function t_ppas = ppas.datetime_to_seconds(dt) # error should be less than one second (time.gmtime rounds to the nearest second) print 'Error:', t - t_ppas
def makegraph_edgetime_equilateral(roi_properties, graph_properties): ''' Construct a graph where all edges have equal lengths ''' spatial_points = [] nrows, ncols = graph_properties['shape'] edge_distance = graph_properties['edge_distance'] row_spacing = ppas.roms.meters_to_degrees(edge_distance, 0.0)[0] col_spacing = ppas.roms.meters_to_degrees( 0.0, np.cos(np.pi/6.)*edge_distance)[1] for col_i in range(ncols): lon = col_i * col_spacing + roi_properties['lon0'] if col_i % 2 == 1: offset = 0.5 * row_spacing nrows_thiscol = nrows - 1 else: offset = 0.0 nrows_thiscol = nrows for row_i in range(nrows_thiscol): lat = offset + row_spacing * row_i + roi_properties['lat0'] spatial_points.append(np.array((lat, lon))) spatial_points = np.array(spatial_points) # make the graph nodes = range(len(spatial_points)) G = ppas.graph.Graph(nodes, 'discrete_time') G.node_points = spatial_points starttime = ppas.datetime_to_seconds(roi_properties['starttime']) points = [] for v_i in nodes: sp_i = spatial_points[v_i] for v_j in nodes: sp_j = spatial_points[v_j] meters_delta = ppas.roms.degrees_to_meters( sp_i[0] - sp_j[0], sp_i[1] - sp_j[1]) distance = linalg.norm(meters_delta) if distance <= edge_distance * 1.01 and v_i != v_j: # fudge factor... length_dict = {} sample_dict = {} for t in graph_properties['time_list']: length_dict[t] = graph_properties['edge_len'] samples = set() ppe = graph_properties['ppe'] # technically each sample should be at a different time, but # then we would end up with more points (due to points being # at different times depending on which direction the edge # is taversed) t_s = 0.5 * t + 0.5 * (t + graph_properties['edge_len']) for r in np.linspace(0., 1., ppe): sp = (1 - r)*sp_i + r*sp_j depth = roi_properties['depth'] p = np.array((sp[0], sp[1], depth, t_s)) p_ii = add_point(points, p) samples.add(p_ii) sample_dict[t] = samples e = ppas.graph.DiscreteTimeEdge(v_i, v_j, length_dict, sample_dict) G.add_edge(e) return G, np.array(points)
roslib.load_manifest('ppas') import datetime import numpy as np import ppas lat0 = 33.4967 lon0 = -118.72 lat1 = 33.58 lon1 = -118.52 depth0 = 30. depth1 = 50. year, month, day = 2011, 7, 26 # get 3 hours of data starting at 15:00 GMT time0 = ppas.datetime_to_seconds(datetime.datetime(year, month, day, 15)) time1 = time0 + 3.*3600. print 'Connecting to ROMS server' dataset = ppas.roms.open_dataset(datetime.date(year, month, day)) print 'Downloading data' data = ppas.roms.get_data(dataset, lat0, lat1, lon0, lon1, depth0, depth1, time0, time1) # print somve values lat = (lat0 + lat1)/2.0 lon = (lon0 + lon1)/2.0 depth = (depth0 + depth1)/2.0 for t in np.linspace(time0, time1): v = ppas.roms.get_value(data, lat, lon, depth, t, 'temp', interp='linear') print 'Temperature at (%.3f, %.3f, %.1f, %.7f): %f' % (lat, lon, depth, t, v)
def makegraph_edgetime_equilateral(roi_properties, graph_properties): ''' Construct a graph where all edges have equal lengths ''' spatial_points = [] nrows, ncols = graph_properties['shape'] edge_distance = graph_properties['edge_distance'] row_spacing = ppas.roms.meters_to_degrees(edge_distance, 0.0)[0] col_spacing = ppas.roms.meters_to_degrees( 0.0, np.cos(np.pi / 6.) * edge_distance)[1] for col_i in range(ncols): lon = col_i * col_spacing + roi_properties['lon0'] if col_i % 2 == 1: offset = 0.5 * row_spacing nrows_thiscol = nrows - 1 else: offset = 0.0 nrows_thiscol = nrows for row_i in range(nrows_thiscol): lat = offset + row_spacing * row_i + roi_properties['lat0'] spatial_points.append(np.array((lat, lon))) spatial_points = np.array(spatial_points) # make the graph nodes = range(len(spatial_points)) G = ppas.graph.Graph(nodes, 'discrete_time') G.node_points = spatial_points starttime = ppas.datetime_to_seconds(roi_properties['starttime']) points = [] for v_i in nodes: sp_i = spatial_points[v_i] for v_j in nodes: sp_j = spatial_points[v_j] meters_delta = ppas.roms.degrees_to_meters(sp_i[0] - sp_j[0], sp_i[1] - sp_j[1]) distance = linalg.norm(meters_delta) if distance <= edge_distance * 1.01 and v_i != v_j: # fudge factor... length_dict = {} sample_dict = {} for t in graph_properties['time_list']: length_dict[t] = graph_properties['edge_len'] samples = set() ppe = graph_properties['ppe'] # technically each sample should be at a different time, but # then we would end up with more points (due to points being # at different times depending on which direction the edge # is taversed) t_s = 0.5 * t + 0.5 * (t + graph_properties['edge_len']) for r in np.linspace(0., 1., ppe): sp = (1 - r) * sp_i + r * sp_j depth = roi_properties['depth'] p = np.array((sp[0], sp[1], depth, t_s)) p_ii = add_point(points, p) samples.add(p_ii) sample_dict[t] = samples e = ppas.graph.DiscreteTimeEdge(v_i, v_j, length_dict, sample_dict) G.add_edge(e) return G, np.array(points)