def test_datetime_to_timestamp(self): time_units = 'seconds since 1950-01-01 00:00:00' val = datetime.datetime(2017, 12, 21, 0, 0, tzinfo=pytz.UTC) res = datetime_to_timestamp(val, time_units) self.assertEqual(res, 2144966400)
def timestamp(value): return datetime_to_timestamp(value) * 1000
def load_data(self): if isinstance(self.observation[0], numbers.Number): self.observation_variable_names = [] self.observation_variable_units = [] self.data = [] self.timestamps = [] self.observation_times = [] self.names = [] for idx, o in enumerate(self.observation): station = db.session.query(Station).get(o) observation = { 'time': station.time.isoformat(), 'longitude': station.longitude, 'latitude': station.latitude, } self.observation_time = station.time self.observation_times.append(station.time) if station.name: self.names.append(station.name) else: self.names.append( f"({station.latitude:.4f}, {station.longitude:.4f})") datatype_keys = [ k[0] for k in db.session.query( db.func.distinct(Sample.datatype_key)).filter( Sample.station == station).all() ] datatypes = db.session.query(DataType).filter( DataType.key.in_(datatype_keys)).order_by( DataType.key).all() observation['datatypes'] = [ f"{dt.name} [{dt.unit}]" for dt in datatypes ] data = [] for dt in datatypes: data.append( db.session.query(Sample.depth, Sample.value).filter( Sample.station == station, Sample.datatype == dt).all()) if idx == 0: self.observation_variable_names.append(dt.name) self.observation_variable_units.append(dt.unit) observation['data'] = np.ma.array(data) #.transpose() self.observation[idx] = observation self.points = [[o['latitude'], o['longitude']] for o in self.observation] cftime = datetime_to_timestamp(station.time, self.dataset_config.time_dim_units) with open_dataset( self.dataset_config, variable=self.variables, timestamp=int(cftime), nearest_timestamp=True, ) as dataset: ts = dataset.nc_data.timestamps observation_times = [] timestamps = [] for o in self.observation: observation_time = dateutil.parser.parse( o['time']).replace(tzinfo=pytz.UTC) observation_times.append(observation_time) deltas = [(x - observation_time).total_seconds() for x in ts] time = np.abs(deltas).argmin() timestamp = ts[time] timestamps.append(timestamp) try: self.load_misc(dataset, self.variables) except IndexError as e: raise ClientError( gettext( "The selected variable(s) were not found in the dataset. \ Most likely, this variable is a derived product from existing dataset variables. \ Please select another variable.") + str(e)) point_data, self.depths = self.get_data( dataset, self.variables, datetime_to_timestamp(timestamps[0], self.dataset_config.time_dim_units)) point_data = np.ma.array(point_data) self.data = point_data self.observation_time = observation_time self.observation_times = observation_times self.timestamps = timestamps self.timestamp = timestamp
def load_data(self): ds_url = current_app.config['DRIFTER_URL'] data_names = [] data_units = [] with Dataset(ds_url % self.drifter, 'r') as ds: self.name = ds.buoyid self.imei = str(chartostring(ds['imei'][0])) self.wmo = str(chartostring(ds['wmo'][0])) t = cftime.utime(ds['data_date'].units) d = [] for v in self.buoyvariables: d.append(ds[v][:]) if "long_name" in ds[v].ncattrs(): data_names.append(ds[v].long_name) else: data_names.append(v) if "units" in ds[v].ncattrs(): data_units.append(ds[v].units) else: data_units.append(None) self.data = d self.times = t.num2date(ds['data_date'][:]) self.points = np.array([ ds['latitude'][:], ds['longitude'][:], ]).transpose() data_names = data_names[:len(self.buoyvariables)] data_units = data_units[:len(self.buoyvariables)] for i, t in enumerate(self.times): if t.tzinfo is None: self.times[i] = t.replace(tzinfo=pytz.UTC) self.data_names = data_names self.data_units = data_units if self.starttime is not None: self.starttime = dateutil.parser.parse(self.starttime).replace( hour=0, minute=0, second=0, microsecond=0) self.start = np.where(self.times >= self.starttime)[0].min() else: self.start = -5 if self.endtime is not None: self.endtime = dateutil.parser.parse(self.endtime).replace( hour=0, minute=0, second=0, microsecond=0) self.end = np.where(self.times <= self.endtime)[0].max() + 1 else: self.end = len(self.times) - 1 if self.start < 0: self.start += len(self.times) self.start = np.clip(self.start, 0, len(self.times) - 1) if self.end < 0: self.end += len(self.times) self.end = np.clip(self.end, 0, len(self.times) - 1) start = int( datetime_to_timestamp(self.starttime, self.dataset_config.time_dim_units)) end = int( datetime_to_timestamp(self.endtime, self.dataset_config.time_dim_units)) with open_dataset(self.dataset_config, timestamp=start, endtime=end, variable=self.variables, nearest_timestamp=True) as dataset: depth = int(self.depth) try: model_start = np.where( dataset.nc_data.timestamps <= self.times[self.start] )[0][-1] except IndexError: model_start = 0 model_start -= 1 model_start = np.clip(model_start, 0, len(dataset.nc_data.timestamps) - 1) try: model_end = np.where( dataset.nc_data.timestamps >= self.times[self.end])[0][0] except IndexError: model_end = len(dataset.nc_data.timestamps) - 1 model_end += 1 model_end = np.clip(model_end, model_start, len(dataset.nc_data.timestamps) - 1) model_times = [ time.mktime(t.timetuple()) for t in dataset.nc_data.timestamps[model_start:model_end + 1] ] output_times = [ time.mktime(t.timetuple()) for t in self.times[self.start:self.end + 1] ] d = [] for v in self.variables: pts, dist, mt, md = dataset.get_path( self.points[self.start:self.end + 1], depth, list(range(model_start, model_end + 1)), v, times=output_times) f = interp1d( model_times, md, assume_sorted=True, bounds_error=False, ) d.append(np.diag(f(mt))) model_data = np.ma.array(d) variable_names = [] variable_units = [] scale_factors = [] for v in self.variables: vc = self.dataset_config.variable[v] variable_units.append(vc.unit) variable_names.append(vc.name) scale_factors.append(vc.scale_factor) for idx, sf in enumerate(scale_factors): model_data[idx, :] = np.multiply(model_data[idx, :], sf) self.model_data = model_data self.model_times = list(map(datetime.datetime.utcfromtimestamp, mt)) self.variable_names = variable_names self.variable_units = variable_units
def load_data(self): platform = db.session.query(Platform).get(self.platform) self.name = platform.unique_id # First get the variable st0 = db.session.query(Station).filter( Station.platform == platform).first() datatype_keys = db.session.query(db.func.distinct( Sample.datatype_key)).filter(Sample.station == st0).all() datatypes = db.session.query(DataType).filter( DataType.key.in_(datatype_keys)).order_by(DataType.key).all() variables = [datatypes[int(x)] for x in self.trackvariables] self.data_names = [dt.name for dt in variables] self.data_units = [dt.unit for dt in variables] self.track_cmaps = [ colormap.find_colormap(dt.name) for dt in variables ] d = [] for v in variables: d.append( get_platform_variable_track( db.session, platform, v.key, self.track_quantum, starttime=self.starttime, endtime=self.endtime, )) d = np.array(d) self.points = d[0, :, 1:3].astype(float) add_tz_utc = np.vectorize(lambda x: x.replace(tzinfo=pytz.UTC)) self.times = add_tz_utc(d[0, :, 0]) self.data = d[:, :, 4].astype(float) self.depth = d[0, :, 3].astype(float) d_delta = [ distance(p0, p1).km for p0, p1 in zip(self.points[0:-1], self.points[1:]) ] d_delta.insert(0, 0) self.distances = np.cumsum(d_delta) start = int( datetime_to_timestamp(self.times[0], self.dataset_config.time_dim_units)) end = int( datetime_to_timestamp(self.times[-1], self.dataset_config.time_dim_units)) points_simplified = self.points if len(self.points) > 100: points_simplified = np.array(vw.simplify(self.points, number=100)) if len(self.variables) > 0: with open_dataset(self.dataset_config, timestamp=start, endtime=end, variable=self.variables, nearest_timestamp=True) as dataset: # Make distance -> time function dist_to_time = interp1d( self.distances, [time.mktime(t.timetuple()) for t in self.times], assume_sorted=True, bounds_error=False, ) output_times = dist_to_time( np.linspace(0, self.distances[-1], 100)) model_times = sorted([ time.mktime(t.timetuple()) for t in dataset.nc_data.timestamps ]) self.model_depths = dataset.depths d = [] depth = 0 for v in self.variables: if len(np.unique(self.depth)) > 1: pts, dist, md, dep = dataset.get_path_profile( points_simplified, v, int( datetime_to_timestamp( dataset.nc_data.timestamps[0], self.dataset_config.time_dim_units)), endtime=int( datetime_to_timestamp( dataset.nc_data.timestamps[-1], self.dataset_config.time_dim_units)), ) if len(model_times) > 1: f = interp1d( model_times, md.filled(np.nan), assume_sorted=True, bounds_error=False, ) ot = dist_to_time(dist) od = f(ot).diagonal(0, 0, 2).copy() else: od = md # Clear model data beneath observed data od[np.where(self.model_depths > max(self.depth) )[0][1:], :] = np.nan d.append(od) mt = [ int( datetime_to_timestamp( t, self.dataset_config.time_dim_units)) for t in dataset.nc_data.timestamps ] model_dist = dist else: pts, dist, mt, md = dataset.get_path( self.points, depth, v, datetime_to_timestamp( dataset.nc_data.timestamps[0], self.dataset_config.time_dim_units), endtime=datetime_to_timestamp( dataset.nc_data.timestamps[-1], self.dataset_config.time_dim_units), times=output_times) model_dist = dist if len(model_times) > 1: f = interp1d( model_times, md, assume_sorted=True, bounds_error=False, ) d.append(np.diag(f(mt))) else: d.append(md) model_data = np.ma.array(d) variable_units = [] variable_names = [] scale_factors = [] cmaps = [] for v in self.variables: vc = self.dataset_config.variable[v] variable_units.append(vc.unit) variable_names.append(vc.name) scale_factors.append(vc.scale_factor) cmaps.append(colormap.find_colormap(vc.name)) for idx, sf in enumerate(scale_factors): model_data[idx, :] = np.multiply(model_data[idx, :], sf) self.model_data = model_data self.model_dist = model_dist self.model_times = list( map(datetime.datetime.utcfromtimestamp, model_times)) self.variable_names = variable_names self.variable_units = variable_units self.cmaps = cmaps