Exemple #1
0
 def _get_source_timedata(self, grid, t_adjust):
     """ Get the source time data information. builds up sourcedata objects of a given grid """
     dir_list = self._get_dir_list(grid)
     group = GridGroup()
     group.data_list = []
     group.time_counter = []
     group.date_counter = []
     for filename in dir_list:
         nc = Dataset(filename, 'r')
         varid = nc.variables['time_counter']
         for index in range(0, len(varid)):
             x = [filename, index]
             group.data_list.append(x)
             group.time_counter.append(varid[index] + t_adjust)
             group.date_counter.append(
                 netcdftime.utime(varid.units,
                                  varid.calendar).num2date(varid[index] +
                                                           t_adjust))
         group.units = varid.units
         group.calendar = varid.calendar
         nc.close()
     tmp_data_list = copy.deepcopy(group.data_list)
     tmp_time_counter = copy.deepcopy(group.time_counter)
     for index in range(len(group.time_counter)):
         tmp_data_list[index] = group.data_list[index]
         tmp_time_counter[index] = group.time_counter[index]
     group.data_list = tmp_data_list
     group.time_counter = tmp_time_counter
     return group
Exemple #2
0
 def _get_source_timedata(self, grid, t_adjust):
     """ Get the source time data information. builds up sourcedata objects of a given grid """
     timevar = grid[self.time_counter]    
     grid.time_counter = timevar[:]+t_adjust
     grid.date_counter = []
     for index in range(0,len(grid.time_counter)):            
         grid.date_counter.append(netcdftime.utime(grid.units,
                                                   grid.calendar).num2date(grid.time_counter[index])) 
Exemple #3
0
 def _set_time_data(self):
     tmp = netcdftime.utime(self.variables['time'].units,
                            calendar=self.variables['time'].calendar)
     d = [
         datetime.datetime(self.start, 1, 1) +
         relativedelta.relativedelta(months=x)
         for x in range(0, self.month)
     ]
     self.variables['time'][:] = tmp.date2num(d)
Exemple #4
0
        def get_time_info(f):
            print(f)
            # read netcdf file and extract time information

            F = netCDF4.Dataset(f,'r')
            t = F.variables['time']
            tmp = netcdftime.utime(t.units, calendar=t.calendar)
            d = tmp.num2date(t[:])
            print(len(d), d[0], d[-1])
            F.close()
            return len(d), d[0], d[-1]
Exemple #5
0
def parse_dates(date_time, calendar='standard'):
    """
    ERDDAP ReSTful API can take a variety of time constraints,
    for erddapy we chose to use only `seconds since 1970-01-01T00:00:00Z`,
    converted from datetime internally, that way the user can parse the dates in any way they like
    using python datetime like objects..

    """

    utime = netcdftime.utime('seconds since 1970-01-01T00:00:00Z',
                             calendar=calendar)
    return utime.date2num(date_time)
    def timestamps(self):
        if self.__timestamp_cache.get("timestamps") is None:
            var = None
            for v in ['time', 'time_counter']:
                if v in self._dataset.variables:
                    var = self._dataset.variables[v]
                    break

            t = netcdftime.utime(var.units)
            timestamps = np.array(
                map(lambda ts: t.num2date(ts).replace(tzinfo=pytz.UTC),
                    var[:]))
            timestamps.flags.writeable = False
            self.__timestamp_cache["timestamps"] = timestamps

        return self.__timestamp_cache.get("timestamps")
    def timestamps(self):
        if self.__timestamp_cache.get("timestamps") is None:
            var = None
            for v in ['time', 'time_counter']:
                if v in self._dataset.variables:
                    var = self._dataset.variables[v]
                    break

            t = netcdftime.utime(var.units)
            timestamps = np.array(
                map(
                    lambda ts: t.num2date(ts).replace(tzinfo=pytz.UTC),
                    var[:]
                )
            )
            timestamps.flags.writeable = False
            self.__timestamp_cache["timestamps"] = timestamps

        return self.__timestamp_cache.get("timestamps")
def drifters_time(drifter_id):
    drifters = drifter_id.split(",")

    mins = []
    maxes = []
    for d in drifters:
        with Dataset(app.config["DRIFTER_URL"] % d, 'r') as ds:
            var = ds['data_date']
            ut = netcdftime.utime(var.units)
            mins.append(ut.num2date(var[:].min()))
            maxes.append(ut.num2date(var[:].max()))

    min_time = np.amin(mins)
    max_time = np.amax(maxes)

    return {
        'min': min_time.isoformat(),
        'max': max_time.isoformat(),
    }
Exemple #9
0
def list_class4_forecasts(class4_id):
    dataset_url = current_app.config["CLASS4_URL"] % class4_id
    with Dataset(dataset_url, 'r') as ds:
        var = ds['modeljuld']
        forecast_date = [
            d.strftime("%d %B %Y")
            for d in netcdftime.utime(var.units).num2date(var[:])
        ]

    res = [{
        'id': 'best',
        'name': 'Best Estimate',
    }]

    if len(set(forecast_date)) > 1:
        for idx, date in enumerate(forecast_date):
            if res[-1]['name'] == date:
                continue
            res.append({'id': idx, 'name': date})

    return res
Exemple #10
0
def list_class4_forecasts(class4_id):
    dataset_url = app.config["CLASS4_URL"] % class4_id
    with Dataset(dataset_url, 'r') as ds:
        var = ds['modeljuld']
        forecast_date = [d.strftime("%d %B %Y") for d in
                         netcdftime.utime(var.units).num2date(var[:])]

    res = [{
        'id': 'best',
        'name': 'Best Estimate',
    }]

    if len(set(forecast_date)) > 1:
        for idx, date in enumerate(forecast_date):
            if res[-1]['name'] == date:
                continue
            res.append({
                'id': idx,
                'name': date
            })

    return res
Exemple #11
0
    def load_data(self):
        if isinstance(self.observation[0], numbers.Number):
            self.observation_variable_names = []
            self.observation_variable_units = []
            with Dataset(app.config["OBSERVATION_AGG_URL"], 'r') as ds:
                t = netcdftime.utime(ds['time'].units)
                for idx, o in enumerate(self.observation):
                    observation = {}
                    ts = t.num2date(ds['time'][o]).replace(tzinfo=pytz.UTC)
                    observation['time'] = ts.isoformat()
                    observation['longitude'] = ds['lon'][o]
                    observation['latitude'] = ds['lat'][o]

                    observation['depth'] = ds['z'][:]
                    observation['depthunit'] = ds['z'].units

                    observation['datatypes'] = []
                    data = []
                    for v in sorted(ds.variables):
                        if v in ['z', 'lat', 'lon', 'profile', 'time']:
                            continue
                        var = ds[v]
                        if var.datatype == '|S1':
                            continue

                        observation['datatypes'].append("%s [%s]" % (
                            var.long_name,
                            var.units
                        ))
                        data.append(var[o, :])

                        if idx == 0:
                            self.observation_variable_names.append(
                                var.long_name)
                            self.observation_variable_units.append(var.units)

                    observation['data'] = np.ma.array(data).transpose()
                    self.observation[idx] = observation

                self.points = map(lambda o: [o['latitude'], o['longitude']],
                                  self.observation)

        with open_dataset(get_dataset_url(self.dataset_name)) as dataset:
            ts = dataset.timestamps

            observation_times = []
            timestamps = []
            for o in self.observation:
                observation_time = dateutil.parser.parse(o['time'])
                observation_times.append(observation_time)

                deltas = [
                    (x.replace(tzinfo=pytz.UTC) -
                     observation_time).total_seconds()
                    for x in ts]

                time = np.abs(deltas).argmin()
                timestamp = ts[time]
                timestamps.append(timestamp)

            try:
                self.load_misc(dataset, self.variables)
            except IndexError as e:
                raise ClientError(gettext("The selected variable(s) were not found in the dataset. \
                Most likely, this variable is a derived product from existing dataset variables. \
                Please select another variable.") + str(e))
                
            point_data, self.depths = self.get_data(dataset, self.variables, time)
            point_data = np.ma.array(point_data)

            point_data = self.apply_scale_factors(point_data)

            self.variable_units, point_data = self.kelvin_to_celsius(
                self.variable_units,
                point_data
            )

        self.data = point_data
        self.observation_time = observation_time
        self.observation_times = observation_times
        self.timestamps = timestamps
        self.timestamp = timestamp
def get_time_converter(time):
    """
    reftime,time_converter = get_time_converter(time)
    *input:
    time = nc.variables['time'],
    where nc is a netCDF4.Dataset object
    *outputs:
    reftime - datetime.datetime object
    time_converter netCDF4.netcdftime.utime object
    """

    tu        = time.units
    time_info = tu.split()

    # determine time format of reference point:
    Unit = time_info[0]# 1st word gives units
    Unit = Unit.strip('s')
    if Unit=='econd':
        Unit = 'second'

    time_info.remove(time_info[0])
    time_info.remove(time_info[0]) # 'since'

    # rest is date and time of reference point
    if len(time_info)>=2:
        cdate, ctime = time_info[0:2]
    else:
        if ('T' in time_info[0]) and ('Z' in time_info[0]):
            # cdate+T...Z format for time
            split1 = tu.split('T')
            ctime  = split1[1].strip('Z')
            cdate  = split1[0].split()[2]

    # reformat cdate to YYYYMMDD
    if '-' in cdate:
        # remove '-'
        # - otherwise assume YYYYMMDD format
        split2 = cdate.split('-')
        for loop_i in range(1,3):
            if len(split2[loop_i])==1:
                split2[loop_i] = '0'+split2[loop_i]
        cdate = split2[0]+split2[1]+split2[2] # should be YYYYMMDD now
    if len(cdate)<8:
        cdate = (8-len(cdate))*'0'+cdate

    # reformat ctime to HHMMSS
    if ':' in ctime:
        # remove ':'
        # - otherwise assume HHMMSS format
        split2 = ctime.split(':')
        for loop_i in range(0,3):
            if (split2[loop_i])==1:
                split2[loop_i] = '0'+split2[loop_i]
        ctime = split2[0]+split2[1]+split2[2] # should be HHMMSS now


    # now can make new string where format is known
    # - this is to pass into netcdftime.utime
    # - NB can't always use strftime/strptime since it only works after 1900
    cyear0 = cdate[:4]
    cmon0  = cdate[4:6]
    cday0  = cdate[6:8]
    chr0   = ctime[:2]
    cmin0  = ctime[2:4]
    csec0  = ctime[4:]
    #
    year0   = int(cyear0)
    mon0    = int(cmon0)
    day0    = int(cday0)
    hr0     = int(chr0)
    min0    = int(cmin0)
    sec0    = int(float(csec0))
    reftime = datetime(year0,mon0,day0,hr0,min0,sec0)

    init_string = (Unit+'s since '+
            cyear0+'-'+cmon0+'-'+cday0+' '+
            chr0+'-'+cmin0+'-'+csec0[:2])

    if 'calendar' in time.ncattrs():
        time_converter = NCT.utime(init_string,calendar=time.calendar)
    else:
        time_converter = NCT.utime(init_string)

    return time_converter
    def load_data(self):
        ds_url = current_app.config['DRIFTER_URL']
        data_names = []
        data_units = []
        with Dataset(ds_url % self.drifter, 'r') as ds:
            self.name = ds.buoyid

            self.imei = str(chartostring(ds['imei'][0]))
            self.wmo = str(chartostring(ds['wmo'][0]))

            t = netcdftime.utime(ds['data_date'].units)

            d = []
            for v in self.buoyvariables:
                d.append(ds[v][:])
                if "long_name" in ds[v].ncattrs():
                    data_names.append(ds[v].long_name)
                else:
                    data_names.append(v)

                if "units" in ds[v].ncattrs():
                    data_units.append(ds[v].units)
                else:
                    data_units.append(None)

            self.data = d

            self.times = t.num2date(ds['data_date'][:])
            self.points = np.array([
                ds['latitude'][:],
                ds['longitude'][:],
            ]).transpose()

        data_names = data_names[:len(self.buoyvariables)]
        data_units = data_units[:len(self.buoyvariables)]

        for i, t in enumerate(self.times):
            if t.tzinfo is None:
                self.times[i] = t.replace(tzinfo=pytz.UTC)

        self.data_names = data_names
        self.data_units = data_units

        if self.starttime is not None:
            d = dateutil.parser.parse(self.starttime)
            self.start = np.where(self.times >= d)[0].min()
        else:
            self.start = 0

        if self.endtime is not None:
            d = dateutil.parser.parse(self.endtime)
            self.end = np.where(self.times <= d)[0].max() + 1
        else:
            self.end = len(self.times) - 1

        if self.start < 0:
            self.start += len(self.times)
        self.start = np.clip(self.start, 0, len(self.times) - 1)
        if self.end < 0:
            self.end += len(self.times)
        self.end = np.clip(self.end, 0, len(self.times) - 1)

        with open_dataset(self.dataset_config) as dataset:
            depth = int(self.depth)

            try:
                model_start = np.where(
                    dataset.timestamps <= self.times[self.start])[0][-1]
            except IndexError:
                model_start = 0

            model_start -= 1
            model_start = np.clip(model_start, 0, len(dataset.timestamps) - 1)

            try:
                model_end = np.where(
                    dataset.timestamps >= self.times[self.end])[0][0]
            except IndexError:
                model_end = len(dataset.timestamps) - 1

            model_end += 1
            model_end = np.clip(model_end, model_start,
                                len(dataset.timestamps) - 1)

            model_times = [
                time.mktime(t.timetuple())
                for t in dataset.timestamps[model_start:model_end + 1]
            ]
            output_times = [
                time.mktime(t.timetuple())
                for t in self.times[self.start:self.end + 1]
            ]
            d = []
            for v in self.variables:
                pts, dist, mt, md = dataset.get_path(
                    self.points[self.start:self.end + 1],
                    depth,
                    list(range(model_start, model_end + 1)),
                    v,
                    times=output_times)

                f = interp1d(
                    model_times,
                    md,
                    assume_sorted=True,
                    bounds_error=False,
                )

                d.append(np.diag(f(mt)))

            model_data = np.ma.array(d)

            variable_names = []
            variable_units = []
            scale_factors = []

            for v in self.variables:
                vc = self.dataset_config.variable[v]
                variable_units.append(vc.unit)
                variable_names.append(vc.name)
                scale_factors.append(vc.scale_factor)

            for idx, sf in enumerate(scale_factors):
                model_data[idx, :] = np.multiply(model_data[idx, :], sf)

            self.model_data = model_data
            self.model_times = list(map(datetime.datetime.utcfromtimestamp,
                                        mt))
            self.variable_names = variable_names
            self.variable_units = variable_units
    def load_data(self):
        ds_url = app.config['DRIFTER_URL']
        data_names = []
        data_units = []
        with Dataset(ds_url % self.drifter, 'r') as ds:
            self.name = ds.buoyid

            self.imei = str(chartostring(ds['imei'][0]))
            self.wmo = str(chartostring(ds['wmo'][0]))

            t = netcdftime.utime(ds['data_date'].units)

            d = []
            for v in self.buoyvariables:
                d.append(ds[v][:])
                if "long_name" in ds[v].ncattrs():
                    data_names.append(ds[v].long_name)
                else:
                    data_names.append(v)

                if "units" in ds[v].ncattrs():
                    data_units.append(ds[v].units)
                else:
                    data_units.append(None)

            self.data = d

            self.times = t.num2date(ds['data_date'][:])
            self.points = np.array([
                ds['latitude'][:],
                ds['longitude'][:],
            ]).transpose()

        data_names = data_names[:len(self.buoyvariables)]
        data_units = data_units[:len(self.buoyvariables)]

        for i, t in enumerate(self.times):
            if t.tzinfo is None:
                self.times[i] = t.replace(tzinfo=pytz.UTC)

        self.data_names = data_names
        self.data_units = data_units

        if self.starttime is not None:
            d = dateutil.parser.parse(self.starttime)
            self.start = np.where(self.times >= d)[0].min()
        else:
            self.start = 0

        if self.endtime is not None:
            d = dateutil.parser.parse(self.endtime)
            self.end = np.where(self.times <= d)[0].max() + 1
        else:
            self.end = len(self.times) - 1

        if self.start < 0:
            self.start += len(self.times)
        self.start = np.clip(self.start, 0, len(self.times) - 1)
        if self.end < 0:
            self.end += len(self.times)
        self.end = np.clip(self.end, 0, len(self.times) - 1)

        with open_dataset(get_dataset_url(self.dataset_name)) as dataset:
            depth = int(self.depth)

            try:
                model_start = np.where(
                    dataset.timestamps <= self.times[self.start]
                )[0][-1]
            except IndexError:
                model_start = 0

            model_start -= 1
            model_start = np.clip(model_start, 0, len(dataset.timestamps) - 1)

            try:
                model_end = np.where(
                    dataset.timestamps >= self.times[self.end]
                )[0][0]
            except IndexError:
                model_end = len(dataset.timestamps) - 1

            model_end += 1
            model_end = np.clip(
                model_end,
                model_start,
                len(dataset.timestamps) - 1
            )

            model_times = map(
                lambda t: time.mktime(t.timetuple()),
                dataset.timestamps[model_start:model_end + 1]
            )
            output_times = map(
                lambda t: time.mktime(t.timetuple()),
                self.times[self.start:self.end + 1]
            )
            d = []
            for v in self.variables:
                pts, dist, mt, md = dataset.get_path(
                    self.points[self.start:self.end + 1],
                    depth,
                    range(model_start, model_end + 1),
                    v,
                    times=output_times
                )

                f = interp1d(
                    model_times,
                    md,
                    assume_sorted=True,
                    bounds_error=False,
                )

                d.append(np.diag(f(mt)))

            model_data = np.ma.array(d)

            variable_names = []
            variable_units = []
            scale_factors = []

            for v in self.variables:
                variable_units.append(get_variable_unit(self.dataset_name,
                                                        dataset.variables[v]))
                variable_names.append(get_variable_name(self.dataset_name,
                                                        dataset.variables[v]))
                scale_factors.append(
                    get_variable_scale_factor(self.dataset_name,
                                              dataset.variables[v])
                )

            for idx, sf in enumerate(scale_factors):
                model_data[idx, :] = np.multiply(model_data[idx, :], sf)

            for idx, u in enumerate(variable_units):
                variable_units[idx], model_data[idx, :] = \
                    self.kelvin_to_celsius(u, model_data[idx, :])

            self.model_data = model_data
            self.model_times = map(datetime.datetime.utcfromtimestamp, mt)
            self.variable_names = variable_names
            self.variable_units = variable_units
    def load_data(self):
        if isinstance(self.observation[0], numbers.Number):
            self.observation_variable_names = []
            self.observation_variable_units = []
            with Dataset(app.config["OBSERVATION_AGG_URL"], 'r') as ds:
                t = netcdftime.utime(ds['time'].units)
                for idx, o in enumerate(self.observation):
                    observation = {}
                    ts = t.num2date(ds['time'][o]).replace(tzinfo=pytz.UTC)
                    observation['time'] = ts.isoformat()
                    observation['longitude'] = ds['lon'][o]
                    observation['latitude'] = ds['lat'][o]

                    observation['depth'] = ds['z'][:]
                    observation['depthunit'] = ds['z'].units

                    observation['datatypes'] = []
                    data = []
                    for v in sorted(ds.variables):
                        if v in ['z', 'lat', 'lon', 'profile', 'time']:
                            continue
                        var = ds[v]
                        if var.datatype == '|S1':
                            continue

                        observation['datatypes'].append("%s [%s]" % (
                            var.long_name,
                            var.units
                        ))
                        data.append(var[o, :])

                        if idx == 0:
                            self.observation_variable_names.append(
                                var.long_name)
                            self.observation_variable_units.append(var.units)

                    observation['data'] = np.ma.array(data).transpose()
                    self.observation[idx] = observation

                self.points = map(lambda o: [o['latitude'], o['longitude']],
                                  self.observation)

        with open_dataset(get_dataset_url(self.dataset_name)) as dataset:
            ts = dataset.timestamps

            observation_times = []
            timestamps = []
            for o in self.observation:
                observation_time = dateutil.parser.parse(o['time'])
                observation_times.append(observation_time)

                deltas = [
                    (x.replace(tzinfo=pytz.UTC) -
                     observation_time).total_seconds()
                    for x in ts]

                time = np.abs(deltas).argmin()
                timestamp = ts[time]
                timestamps.append(timestamp)

            self.load_misc(dataset, self.variables)

            point_data, self.depths = self.get_data(
                dataset, self.variables, time)
            point_data = np.ma.array(point_data)

            point_data = self.apply_scale_factors(point_data)

            self.variable_units, point_data = self.kelvin_to_celsius(
                self.variable_units,
                point_data
            )

        self.data = self.subtract_climatology(point_data, timestamp)
        self.observation_time = observation_time
        self.observation_times = observation_times
        self.timestamps = timestamps
        self.timestamp = timestamp
Exemple #16
0
     if dim.lower() in ['time', 'time_counter']:
         tname = dim
         break
 time_test = dat.variables[tname][:]
 t_unit = dat.variables[
     tname].units  # get unit  "days since 1950-01-01T00:00:00Z"
 if 'months' in t_unit:
     x = np.array(range(1, 13))
     datevar = []
 else:
     try:
         t_cal = dat.variables[tname].calendar
     except AttributeError:  # Attribute doesn't exist
         t_cal = u"gregorian"  # or standard
     datevar = []
     cdftime = netcdftime.utime(t_unit)  #
     #, calendar=u"gregorian")
     # -- Garde-fou calendar
     if not isinstance(cdftime.num2date(time_test)[0], datetime.datetime):
         cdftime = netcdftime.utime(t_unit, calendar=u"gregorian")
     datevar.append(cdftime.num2date(time_test))
     print 'datevar = ', datevar
     #
     x = np.array(datevar)[0, :]
 y = test_dat[:, 0, 0]
 handles_for_legend.append(
     #plt.plot(x,y,lw=lw_list[filenames_list.index(pathfilename)], color=colors[filenames_list.index(pathfilename)],
     #     label=labels_list[filenames_list.index(pathfilename)])[0]
     plt.plot(x,
              y,
              lw=lw_list[dataset_number],
    def path(self,
             variable,
             depth,
             points,
             times,
             n=100,
             interpolation={
                 'method': 'inv_square',
                 'neighbours': 8
             }):

        target_lat = points[:, 0]
        target_lon = points[:, 1]

        miny, maxy, minx, maxx = self.bounding_box(target_lat, target_lon, 10)

        lat = self.latvar[miny:maxy, minx:maxx]
        lon = self.lonvar[miny:maxy, minx:maxx]

        method, neighbours, radius = self._get_interpolation(
            interpolation, target_lat, target_lon)

        ts = [
            t.replace(tzinfo=pytz.UTC) for t in netcdftime.utime(
                self.time_var.units).num2date(self.time_var[:])
        ]

        mintime, x = _take_surrounding(ts, times[0])
        x, maxtime = _take_surrounding(ts, times[-1])
        maxtime += 1
        uniquetimes = list(range(mintime, maxtime + 1))

        combined = []
        for t in range(mintime, maxtime):
            if len(variable.shape) == 3:
                data = variable[t, miny:maxy, minx:maxx]
            else:
                data = variable[t, depth, miny:maxy, minx:maxx]
            _fill_invalid_shift(np.ma.array(data))
            combined.append(
                resample(lat,
                         lon,
                         np.array(target_lat),
                         np.array(target_lon),
                         data,
                         method=method,
                         neighbours=neighbours,
                         radius_of_influence=radius,
                         nprocs=4))
        combined = np.ma.array(combined)

        if mintime + 1 >= len(ts):
            result = combined[0]
        else:
            t0 = ts[mintime]
            td = (ts[mintime + 1] - t0).total_seconds()

            deltas = np.ma.masked_array(
                [t.total_seconds() / td for t in np.subtract(times, t0)])

            model_td = ts[1] - ts[0]

            deltas[np.where(
                np.array(times) > ts[-1] + model_td / 2)] = np.ma.masked
            deltas[np.where(
                np.array(times) < ts[0] - model_td / 2)] = np.ma.masked

            # This is a slight modification on scipy's interp1d
            # https://github.com/scipy/scipy/blob/v0.17.1/scipy/interpolate/interpolate.py#L534-L561
            x = np.array(list(range(0, len(uniquetimes) - 1)))
            new_idx = np.searchsorted(x, deltas)
            new_idx = new_idx.clip(1, len(x) - 1).astype(int)
            low = new_idx - 1
            high = new_idx
            if (high >= len(x)).any():
                result = combined[0]
                # result[:, np.where(deltas.mask)] = np.ma.masked
                result[np.where(deltas.mask)] = np.ma.masked
            else:
                x_low = x[low]
                x_high = x[high]
                y_low = combined[low, list(range(0, len(times)))]
                y_high = combined[high, list(range(0, len(times)))]
                slope = (y_high - y_low) / (x_high - x_low)[None]
                y_new = slope * (deltas - x_low)[None] + y_low
                result = y_new[0]

        return result
Exemple #18
0
    def path(self, variable, depth, points, times, n=100,
             interpolation={'method': 'inv_square', 'neighbours': 8}):

        target_lat = points[:, 0]
        target_lon = points[:, 1]

        miny, maxy, minx, maxx = self.bounding_box(target_lat, target_lon, 10)

        lat = self.latvar[miny:maxy, minx:maxx]
        lon = self.lonvar[miny:maxy, minx:maxx]

        method, neighbours, radius = self._get_interpolation(interpolation,
                                                             target_lat,
                                                             target_lon)

        ts = [
            t.replace(tzinfo=pytz.UTC)
            for t in
            netcdftime.utime(self.time_var.units).num2date(self.time_var[:])
        ]

        mintime, x = _take_surrounding(ts, times[0])
        x, maxtime = _take_surrounding(ts, times[-1])
        maxtime += 1
        uniquetimes = range(mintime, maxtime + 1)

        combined = []
        for t in range(mintime, maxtime):
            if len(variable.shape) == 3:
                data = variable[t, miny:maxy, minx:maxx]
            else:
                data = variable[t, depth, miny:maxy, minx:maxx]
            _fill_invalid_shift(np.ma.array(data))
            combined.append(resample(lat,
                                     lon,
                                     np.array(target_lat),
                            np.array(target_lon),
                                     data,
                                     method=method,
                                     neighbours=neighbours,
                                     radius_of_influence=radius,
                                     nprocs=4))
        combined = np.ma.array(combined)

        if mintime + 1 >= len(ts):
            result = combined[0]
        else:
            t0 = ts[mintime]
            td = (ts[mintime + 1] - t0).total_seconds()

            deltas = np.ma.masked_array([t.total_seconds() / td
                                        for t in np.subtract(times, t0)])

            model_td = ts[1] - ts[0]

            deltas[
                np.where(np.array(times) > ts[-1] + model_td / 2)
            ] = np.ma.masked
            deltas[
                np.where(np.array(times) < ts[0] - model_td / 2)
            ] = np.ma.masked
            print abcd

            # This is a slight modification on scipy's interp1d
            # https://github.com/scipy/scipy/blob/v0.17.1/scipy/interpolate/interpolate.py#L534-L561
            x = np.array(range(0, len(uniquetimes) - 1))
            new_idx = np.searchsorted(x, deltas)
            new_idx = new_idx.clip(1, len(x) - 1).astype(int)
            low = new_idx - 1
            high = new_idx
            if (high >= len(x)).any():
                result = combined[0]
                # result[:, np.where(deltas.mask)] = np.ma.masked
                result[np.where(deltas.mask)] = np.ma.masked
            else:
                x_low = x[low]
                x_high = x[high]
                y_low = combined[low, range(0, len(times))]
                y_high = combined[high, range(0, len(times))]
                slope = (y_high - y_low) / (x_high - x_low)[None]
                y_new = slope * (deltas - x_low)[None] + y_low
                result = y_new[0]

        return result