示例#1
0
    def add(self, value, units, calendar=None):
        if calendar is None:
            calendar = StandardCalendar

        if units >= Months:
            if units == Years:
                incr = 12.0 * value
            elif units == Seasons:
                incr = 3.0 * value
            elif units == Months:
                incr = value
            else:
                raise ValueError('Unrecognised cdtime units %s' % units)
        
            origin = 'months since %s' % self._datetime
            dt = nct.num2date(incr, origin, _calendar_map[celandar])    
        else:
            if units == Weeks:
                value = value * 168.0
            elif units == Days:
                value = value * 24.0
            elif units == Hours:
                pass
            elif units == Minutes:
                value = value / 60.0
            elif units == Seconds:
                value = value / 3600.0
            else:
                raise ValueError('Unrecognised cdtime units %s' % units)

            origin = 'hours since %s' % self._datetime
            dt = nct.num2date(value, origin, _calendar_map[calendar])

        ct = CompTime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
        return ct
 def check(self, max_n_ticks, num1, num2):
     locator = NetCDFTimeDateLocator(max_n_ticks=max_n_ticks,
                                     calendar=self.calendar,
                                     date_unit=self.date_unit)
     return locator.compute_resolution(
         num1, num2,
         netcdftime.num2date(num1, self.date_unit, self.calendar),
         netcdftime.num2date(num2, self.date_unit, self.calendar))
 def check(self, max_n_ticks, num1, num2):
     locator = NetCDFTimeDateLocator(max_n_ticks=max_n_ticks,
                                     calendar=self.calendar,
                                     date_unit=self.date_unit)
     return locator.compute_resolution(
         num1, num2, netcdftime.num2date(num1, self.date_unit,
                                         self.calendar),
         netcdftime.num2date(num2, self.date_unit, self.calendar))
示例#4
0
def get_list_dates(ifile, type_dates):
    '''
    Returns list of dates from one file.
    
    :param ifile: NetCDF file
    :type ifile: str
    :param type_dates: type of dates ('dt' for datetime objects, 'num' for float objects) 
    :type type_dates: str

    :rtype: list of datetime/float 
    
    '''
    try:
        nc = Dataset(ifile, 'r')
    except RuntimeError:
        raise MissingIcclimInputError("Failed to access dataset: " + ifile)

    var_time = nc.variables['time']
    time_units = var_time.units  # str (ex.: 'days since 1850-01-01 00:00:00')
    try:
        time_calend = var_time.calendar  # str (ex.: 'standard'/'gregorian'/...)
    except:
        time_calend = 'gregorian'

    if type_dates == 'num':
        arr_dt = var_time[:]
        list_dt = arr_dt.tolist()  # numpy array -> list
        del arr_dt
    if type_dates == 'dt':
        try:
            arr_dt = netcdftime.num2date(var_time[:],
                                         units=time_units,
                                         calendar=time_calend)
            list_dt = arr_dt.tolist()
            del arr_dt
        except:
            t = netcdftime.utime(time_units, time_calend)
            list_dt = [
                netcdftime.num2date(var_time_i,
                                    units=time_units,
                                    calendar=time_calend)
                for var_time_i in var_time
            ]
            #list_dt = arr_dt.tolist() # numpy array -> list

    nc.close()

    return list_dt
示例#5
0
  def load(self):
    # time:
    vars={'time':('time','ocean_time','scrum_time','clim_time')},

    self.load_vars(vars)

    # netcdf time:
    try:
      # units must have the format <units> since <date>
      self.datetime=netcdftime.num2date(self.time,self.var_as['time']['units'])
    except: self.datetime=False


    # time is usually seconds, but may be days!!, so:
    if self.var_as['time']['units'].strip().startswith('days'): self.time=self.time*86400.

    if len(self.time)>1:
      self.dt=self.time[1]-self.time[0]
    else: self.dt=0

    self.tdays=self.time/86400.


    # s params:
    # hasattr(obj, '__contains__')
    if isinstance(self.name,basestring):
      self.s_params=rt.s_params(self.name)
    else: self.s_params=rt.s_params(self.name[0])
示例#6
0
    def load(self):
        # time:
        vars = {'time': ('time', 'ocean_time', 'scrum_time', 'clim_time')},

        self.load_vars(vars)

        # netcdf time:
        try:
            # units must have the format <units> since <date>
            self.datetime = netcdftime.num2date(self.time,
                                                self.var_as['time']['units'])
        except:
            self.datetime = False

        # time is usually seconds, but may be days!!, so:
        if self.var_as['time']['units'].strip().startswith('days'):
            self.time = self.time * 86400.

        if len(self.time) > 1:
            self.dt = self.time[1] - self.time[0]
        else:
            self.dt = 0

        self.tdays = self.time / 86400.

        # s params:
        # hasattr(obj, '__contains__')
        if isinstance(self.name, basestring):
            self.s_params = rt.s_params(self.name)
        else:
            self.s_params = rt.s_params(self.name[0])
示例#7
0
    def load(self):
        # time:
        vars = ({"time": ("time", "ocean_time", "scrum_time", "clim_time")},)

        self.load_vars(vars)

        # netcdf time:
        try:
            # units must have the format <units> since <date>
            self.datetime = netcdftime.num2date(self.time, self.var_as["time"]["units"])
        except:
            self.datetime = False

        # time is usually seconds, but may be days!!, so:
        if self.var_as["time"]["units"].strip().startswith("days"):
            self.time = self.time * 86400.0

        if len(self.time) > 1:
            self.dt = self.time[1] - self.time[0]
        else:
            self.dt = 0

        self.tdays = self.time / 86400.0

        # s params:
        # hasattr(obj, '__contains__')
        if isinstance(self.name, basestring):
            self.s_params = rt.s_params(self.name)
        else:
            self.s_params = rt.s_params(self.name[0])
示例#8
0
    def tocomponent(self, calendar=None):
        if calendar is None:
            calendar = StandardCalendar

        dt = nct.num2date(self.value, self.units, _calendar_map[calendar])
        ct = CompTime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)

        return ct
示例#9
0
def jarkusmean(id_min, id_max, plotproperties=None, figsize=None):
    id_min = int(id_min)
    id_max = int(id_max)
    if plotproperties is None:
        plotproperties = {}
    if figsize is None:
        figsize = (8, 6)
    dataset = netCDF4.Dataset(NC_RESOURCE['transect'], 'r')
    try:
        # Lookup variables
        ids_all = dataset.variables['id'][:]
        # idx, = (id == 7004000).nonzero()
        ids = ((ids_all < id_max) & (ids_all > id_min)).nonzero()[0]
        if len(ids) > 30:
            raise WouldTakeTooLong('Too much data selected')
        timevar = dataset.variables['time']
        time = netcdftime.num2date(timevar[:], timevar.units)
        cross_shore = dataset.variables['cross_shore'][:]

        # Define color for years
        timenum = matplotlib.dates.date2num(time)
        sm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.YlGnBu)
        sm.set_clim(np.min(timenum), np.max(timenum))
        sm.set_array(timenum)

        # Create the plot
        fig = pyplot.figure(figsize=figsize)
        plot = fig.add_subplot(111)
        for i, id in enumerate(ids):
            z = dataset.variables['altitude'][:,id,:]
            if z.mask.all(1).any():
                continue
            logger.debug('Plotting %s', id)
            zfilled = fill_z(cross_shore, z+i*4)
            timeplot(plot, cross_shore, zfilled, timenum, sm)
    finally:
        dataset.close()

    # Format the colorbar
    cb = fig.colorbar(sm)
    yearsfmt = matplotlib.dates.DateFormatter('%Y')
    yearsloc = matplotlib.dates.YearLocator()
    #cb.locator = yearsloc
    cb.formatter = yearsfmt
    cb.update_ticks()

    plot.set_xlim(-500, 1000)

    # save image
    buf = cStringIO.StringIO()
    fig.savefig(buf, **plotproperties)
    buf.seek(0)

    # cleanup
    fig.clf()
    # return an 'open' file descriptor
    return buf
示例#10
0
def get_list_dates(ifile, type_dates):
    
    '''
    Returns list of dates from one file.
    
    :param ifile: NetCDF file
    :type ifile: str
    :param type_dates: type of dates ('dt' for datetime objects, 'num' for float objects) 
    :type type_dates: str

    :rtype: list of datetime/float 
    
    '''
    try:
        nc = Dataset(ifile, 'r')
    except RuntimeError:
        raise MissingIcclimInputError("Failed to access dataset: " + ifile)

    var_time = nc.variables['time']
    time_units = var_time.units # str (ex.: 'days since 1850-01-01 00:00:00')
    try:
        time_calend = var_time.calendar # str (ex.: 'standard'/'gregorian'/...)
    except:
        time_calend = 'gregorian'
    
    if type_dates == 'num':
        arr_dt = var_time[:]
        list_dt = arr_dt.tolist() # numpy array -> list
        del arr_dt
    if type_dates == 'dt':
        try:
            arr_dt = netcdftime.num2date(var_time[:], units = time_units, calendar = time_calend)
            list_dt = arr_dt.tolist()
            del arr_dt
        except:
            t = netcdftime.utime(time_units, time_calend)
            list_dt = [netcdftime.num2date(var_time_i, units = time_units, calendar = time_calend) for var_time_i in var_time]
            #list_dt = arr_dt.tolist() # numpy array -> list 

    
    
    nc.close()
    
    return list_dt
示例#11
0
    def test_select_nc(self):
        nutime = self.time_vars['time']

        dates = [datetime(1950, 1, 2, 6), datetime(
            1950, 1, 3), datetime(1950, 1, 3, 18)]

        t = date2index(dates, nutime, select='before')
        assert_equal(t, [1, 2, 2])

        t = date2index(dates, nutime, select='after')
        assert_equal(t, [2, 2, 3])

        t = date2index(dates, nutime, select='nearest')
        assert_equal(t, [1, 2, 3])

        # Test dates outside the support with select
        t = date2index(datetime(1949, 12, 1), nutime, select='nearest')
        assert_equal(t, 0)

        t = date2index(datetime(1978, 1, 1), nutime, select='nearest')
        assert_equal(t, 365)

        # Test dates outside the support with before
        self.assertRaises(
            ValueError, date2index, datetime(1949, 12, 1), nutime, select='before')

        t = date2index(datetime(1978, 1, 1), nutime, select='before')
        assert_equal(t, 365)

        # Test dates outside the support with after
        t = date2index(datetime(1949, 12, 1), nutime, select='after')
        assert_equal(t, 0)

        self.assertRaises(
            ValueError, date2index, datetime(1978, 1, 1), nutime, select='after')
        # test microsecond and millisecond units
        unix_epoch = "milliseconds since 1970-01-01T00:00:00Z"
        d = datetime(2038, 1, 19, 3, 14, 7)
        millisecs = int(
            date2num(d, unix_epoch, calendar='proleptic_gregorian'))
        assert_equal(millisecs, (2 ** 32 / 2 - 1) * 1000)
        unix_epoch = "microseconds since 1970-01-01T00:00:00Z"
        microsecs = int(date2num(d, unix_epoch))
        assert_equal(microsecs, (2 ** 32 / 2 - 1) * 1000000)
        # test microsecond accuracy in date2num/num2date roundtrip
        # note: microsecond accuracy lost for time intervals greater
        # than about 270 years.
        units = 'microseconds since 1776-07-04 00:00:00-12:00'
        dates = [datetime(1962, 10, 27, 6, 1, 30, 9001),
                 datetime(1993, 11, 21, 12, 5, 25, 999),
                 datetime(1995, 11, 25, 18, 7, 59, 999999)]
        times2 = date2num(dates, units)
        dates2 = num2date(times2, units)
        for date, date2 in zip(dates, dates2):
            assert_equal(date, date2)
示例#12
0
文件: netcdf.py 项目: jsh1012/okean
def nctime(filename,varname,interface='auto',**kargs):
  time=use(filename,varname,interface=interface,**kargs)
  units=vatt(filename,varname,'units')
  import netcdftime

  # dates like 2004-01-01T00:00:00Z not supported by older varsion
  # units also cannot have multiple spaces...
  if netcdftime.__version__ <'0.9.2': # dont really know which version...
    units=units.replace('T',' ').replace('Z',' ')
    units=' '.join(units.split())

  return netcdftime.num2date(time,units)
示例#13
0
def file_time(dset):
    tname = "time_counter"

    nctime = dset.variables[tname][:]  # get values
    t_unit = dset.variables[tname].units  # get unit

    try:
        t_cal = dset.variables[tname].calendar
    except AttributeError:  # Attribute doesn't exist
        t_cal = u"gregorian"  # or standard

    datevar = netcdftime.num2date(nctime, units=t_unit, calendar=t_cal)[0]

    datestr = datevar.strftime('%Y%m%d')
    return datestr
示例#14
0
def _decode_datetime_with_netcdftime(num_dates, units, calendar):
    nctime = _import_netcdftime()

    dates = np.asarray(nctime.num2date(num_dates, units, calendar))
    if (dates[np.nanargmin(num_dates)].year < 1678 or
            dates[np.nanargmax(num_dates)].year >= 2262):
        warnings.warn('Unable to decode time axis into full '
                      'numpy.datetime64 objects, continuing using dummy '
                      'netcdftime.datetime objects instead, reason: dates out'
                      ' of range', SerializationWarning, stacklevel=3)
    else:
        try:
            dates = nctime_to_nptime(dates)
        except ValueError as e:
            warnings.warn('Unable to decode time axis into full '
                          'numpy.datetime64 objects, continuing using '
                          'dummy netcdftime.datetime objects instead, reason:'
                          '{0}'.format(e), SerializationWarning, stacklevel=3)
    return dates
示例#15
0
    def file_time(self, time_step):
        """
        Converts timestep to datetime object
        :param time_step: an int time step to convert
        :return: formatted as '%Y%m%d' string
        """
        tname = 'time'
        nctime = self.dset.variables[tname][time_step]  # get values
        t_unit = self.dset.variables[tname].units  # get unit

        try:
            t_cal = self.dset.variables[tname].calendar
        except AttributeError:  # Attribute doesn't exist
            t_cal = u"gregorian"  # or standard

        datevar = netcdftime.num2date(nctime, units=t_unit, calendar=t_cal)

        datestr = datevar.strftime('%Y%m%d')
        return datestr
示例#16
0
  def load(self):
    # time:
    vars={'time':('bulk_time','time','scrum_time','ocean_time')},

    self.load_vars(vars)

    # netcdf time:
    try:
      # units must have the format <units> since <date>
      self.datetime=netcdftime.num2date(self.time,self.var_as['time']['units'])
    except: self.datetime=False

    # time is usually seconds, but may be days!!, so:
    if self.var_as['time']['units'].strip().startswith('days'): self.time=self.time*86400.

    if len(self.time)>1:
      self.dt=self.time[1]-self.time[0]
    else: self.dt=0

    self.tdays=self.time/86400.
示例#17
0
    def timelist(self, tname='time'):
        """
        Generates array of datetime.datetime objects, based on dataset time variable
        :param tname: name of a time variable
        :return: array of datetime.datetime objects
        """

        t_unit = self.dset.variables[tname].units  # get unit

        try:
            t_cal = self.dset.variables[tname].calendar
        except AttributeError:  # Attribute doesn't exist
            t_cal = u"gregorian"  # or standard

        try:
            return netcdftime.num2date(self.dset.variables[tname][:],
                                       units=t_unit,
                                       calendar=t_cal)
        except ValueError:
            # TODO - fix it
            raise Exception('no ''since'' in unit_string')
示例#18
0
    def tick_values(self, vmin, vmax):
        vmin, vmax = mtransforms.nonsingular(vmin, vmax, expander=1e-7, tiny=1e-13)

        lower = netcdftime.num2date(vmin, self.date_unit, self.calendar)
        upper = netcdftime.num2date(vmax, self.date_unit, self.calendar)

        self.ndays = abs(vmax - vmin)

        resolution, n = self.compute_resolution(vmin, vmax, lower, upper)

        if resolution == 'YEARLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as appropriate.
            years = self._max_n_locator.tick_values(lower.year, upper.year)
            ticks = [netcdftime.datetime(int(year), 1, 1) for year in years]
        elif resolution == 'MONTHLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as appropriate.
            months_offset = self._max_n_locator.tick_values(0, n)
            ticks = []
            for offset in months_offset:
                year = lower.year + np.floor((lower.month + offset) / 12)
                month = ((lower.month + offset) % 12) + 1
                ticks.append(netcdftime.datetime(int(year), int(month), 1))
        elif resolution == 'DAILY':
            # TODO: It would be great if this favoured multiples of 7.
            days = self._max_n_locator_days.tick_values(vmin, vmax)
            ticks = [netcdftime.num2date(dt, self.date_unit, self.calendar) for dt in days]
        elif resolution == 'HOURLY':
            hour_unit = 'hours since 2000-01-01'
            in_hours = netcdftime.date2num([lower, upper], hour_unit, self.calendar)
            hours = self._max_n_locator.tick_values(in_hours[0], in_hours[1])
            ticks = [netcdftime.num2date(dt, hour_unit, self.calendar) for dt in hours]
        elif resolution == 'MINUTELY':
            minute_unit = 'minutes since 2000-01-01'
            in_minutes = netcdftime.date2num([lower, upper], minute_unit, self.calendar)
            minutes = self._max_n_locator.tick_values(in_minutes[0], in_minutes[1])
            ticks = [netcdftime.num2date(dt, minute_unit, self.calendar) for dt in minutes]
        elif resolution == 'SECONDLY':
            second_unit = 'seconds since 2000-01-01'
            in_seconds = netcdftime.date2num([lower, upper], second_unit, self.calendar)
            seconds = self._max_n_locator.tick_values(in_seconds[0], in_seconds[1])
            ticks = [netcdftime.num2date(dt, second_unit, self.calendar) for dt in seconds]
        else:
            raise ValueError('Resolution {} not implemented yet.'.format(resolution))

        return netcdftime.date2num(ticks, self.date_unit, self.calendar)
示例#19
0
    def load(self):
        # time:
        vars = ({"time": ("bulk_time", "time", "scrum_time", "ocean_time")},)

        self.load_vars(vars)

        # netcdf time:
        try:
            # units must have the format <units> since <date>
            self.datetime = netcdftime.num2date(self.time, self.var_as["time"]["units"])
        except:
            self.datetime = False

        # time is usually seconds, but may be days!!, so:
        if self.var_as["time"]["units"].strip().startswith("days"):
            self.time = self.time * 86400.0

        if len(self.time) > 1:
            self.dt = self.time[1] - self.time[0]
        else:
            self.dt = 0

        self.tdays = self.time / 86400.0
示例#20
0
    def load(self):
        # time:
        vars = {'time': ('bulk_time', 'time', 'scrum_time', 'ocean_time')},

        self.load_vars(vars)

        # netcdf time:
        try:
            # units must have the format <units> since <date>
            self.datetime = netcdftime.num2date(self.time,
                                                self.var_as['time']['units'])
        except:
            self.datetime = False

        # time is usually seconds, but may be days!!, so:
        if self.var_as['time']['units'].strip().startswith('days'):
            self.time = self.time * 86400.

        if len(self.time) > 1:
            self.dt = self.time[1] - self.time[0]
        else:
            self.dt = 0

        self.tdays = self.time / 86400.
示例#21
0
def write_sinarame_cfradial(path):
    """
    This function takes SINARAME_H5 files (where every file has only one field
    and one volume) from a folder and writes a CfRadial file for each volume
    including all fields.
    Parameters
    ----------
    path : str
        Where the SINARAME_H5 files are.
    """

    path_user = os.path.expanduser(path)

    TH_list = glob.glob(path_user + '/*_TH_*.H5')

    file_date = [i.split('_')[-1][9:-4] for i in TH_list]
    file_date.sort()

    for i in file_date:
        files = glob.glob(path_user + '/*' + i + 'Z.H5')

        # I want it to start with TV or TH because of some range issue
        files.sort(reverse=True)
        files.sort(key=lambda x: len(x.split('_')[-2]))

        for j in np.arange(len(files)):
            basename = os.path.basename(files[j])
            bs = basename.split('_')
            base1 = '{b1}_{b2}_{b3}_{fn}_{b4}'.format(b1=bs[0],
                                                      b2=bs[1],
                                                      b3=bs[2],
                                                      fn=bs[3],
                                                      b4=bs[4])
            file = '{path}/{base1}'.format(path=path_user, base1=base1)

            if j == 0:
                try:
                    radar = read_sinarame_h5(file, file_field_names=True)
                    azi_shape, range_shape = radar.fields['TV']['data'].shape

                except ValueError:
                    print("x - this Radar wasn't created", base1, sep='\t')

                # In case the H5 file was badly created with bbufr it
                # throws a KeyError
                except KeyError:
                    print("x - Wrong BUFR conversion", base1, sep='\t')

            else:
                try:
                    radar_tmp = read_sinarame_h5(file, file_field_names=True)

                    field = radar_tmp.fields.keys()[0]

                    # Add missing gates
                    if radar_tmp.fields[field]['data'].shape[1] != range_shape:
                        n_missing_gates = \
                            (range_shape
                             - radar_tmp.fields[field]['data'].shape[1])
                        fill = np.ma.masked_all((azi_shape, n_missing_gates))
                        data = np.ma.concatenate(
                            [radar_tmp.fields[field]['data'], fill], 1)
                        radar_tmp.fields[field]['data'] = data

                    radar.fields.update(radar_tmp.fields)

                # Same as above, bbufr conversion errors
                except KeyError:
                    print("x - Wrong BUFR conversion", base1, sep='\t')

                except ValueError:
                    print("x - this Radar wasn't created", base1, sep='\t')

                # In case the first file didn't create a Radar object
                except NameError:
                    print("Radar didn't exist, creating")
                    radar = read_sinarame_h5(file, file_field_names=True)

        time1 = num2date(
            radar.time['data'][0],
            radar.time['units'],
            calendar='standard',
            only_use_cftime_datetimes=True,
            only_use_python_datetimes=False).strftime('%Y%m%d_%H%M%S')
        time2 = num2date(
            radar.time['data'][-1],
            radar.time['units'],
            calendar='standard',
            only_use_cftime_datetimes=True,
            only_use_python_datetimes=False).strftime('%Y%m%d_%H%M%S')

        radar._DeflateLevel = 5

        # cfrad.TIME1_to_TIME2_NAME_VCP_RANGE.nc
        cffile = 'cfrad.{time1}.0000_to_{time2}.0000'\
                 '_{b1}_{est}_{ran}'.format(time1=time1, time2=time2,
                                            b1=bs[0], est=bs[1], ran=bs[2])

        print('Writing to {path}{filename}.nc'.format(path=path_user,
                                                      filename=cffile))
        write_cfradial(path_user + '/' + cffile + '.nc',
                       radar,
                       format='NETCDF4_CLASSIC')
示例#22
0
 def __call__(self, x, pos=0):
     format_string = self.pick_format(ndays=self.locator.ndays)
     dt = netcdftime.num2date(x, self.time_units, self.calendar)
     return dt.strftime(format_string)
示例#23
0
print('plot-1D.py '+ifile+' '+ifile_base+' '+odir)

if not os.path.exists(ifile):
    print('ifile '+ifile+' not found')
    Usage()


# =============================================================================
# Main

# open file
#ncfile = Dataset(ifile)
ncfile = netcdf.netcdf_file(ifile, 'r')
times = ncfile.variables['time']
dates = num2date(times[:],units=times.units,calendar=times.calendar)
dates_axis = []#array('i')
for date in dates:
    dates_axis.append(int(str(date)[0:4]))

# loop all vars
for var_name in ncfile.variables: 
    var = ncfile.variables[var_name]
    ndims = len(var.shape)
    ofile_base = os.path.splitext(ifile_base)[0]+'_'+var_name
    if ndims < 3:
        continue
    if var_name in exclude_vars:
        continue
    # plot 1d var (file has 3 dims)
    if ndims == 3:
示例#24
0
文件: means.py 项目: alexfmsu/P1.6
def compute_mean(datafile, window='day'):
    """
Make the mean of an input regcm data output file.
Output file will be created in the current directory, and its name 
will follow the following convention:

Its format will be NETCDF4_CLASSIC, and all 2D+ variables will be
compressed in disk.
  """
    from netCDF4 import Dataset
    import numpy as np
    import time
    from netcdftime import datetime, num2date, utime
    import os
    from string import join

    ncf = Dataset(datafile)

    times = ncf.variables['time']
    if len(times) < 1:
        print('No timesteps in file !')
        sys.exit(0)
    if times.units.find('hours') >= 0:
        dates = num2date(times[:] - 0.01,
                         units=times.units,
                         calendar=times.calendar)
    else:
        dates = num2date(times[:], units=times.units, calendar=times.calendar)
    d1 = datetime(dates[0].year, dates[0].month, dates[0].day)
    d2 = datetime(dates[-1].year, dates[-1].month, dates[-1].day)
    f1 = (repr(dates[0].year).zfill(4) + repr(dates[0].month).zfill(2) +
          repr(dates[0].day).zfill(2))
    f2 = (repr(dates[-1].year).zfill(4) + repr(dates[-1].month).zfill(2) +
          repr(dates[-1].day).zfill(2))

    pieces = os.path.basename(os.path.splitext(datafile)[0]).split('_')

    if window == 'day':
        if ncf.frequency == 'day' or ncf.frequency == 'mon':
            print('How to make daily mean on day or monthly dataset?')
            sys.exit(-1)
        try:
            nco = Dataset(join(pieces[0:7], '_') + '_' + window + '_' + f1 +
                          '12-' + f2 + '12.nc',
                          'w',
                          format='NETCDF4_CLASSIC')
        except:
            raise RuntimeError('Cannot open output file')
        tunit = 'days since 1949-12-01 00:00:00 UTC'
    elif window == 'mon':
        if ncf.frequency == 'mon':
            print('How to make monthly mean on monthly dataset?')
            sys.exit(-1)
        try:
            nco = Dataset(join(pieces[0:7], '_') + '_' + window + '_' + f1 +
                          '-' + f2 + '.nc',
                          'w',
                          format='NETCDF4_CLASSIC')
        except:
            raise RuntimeError('Cannot open output file')
        tunit = 'days since 1949-12-01 00:00:00 UTC'
    else:
        raise RuntimeError(
            'Unsupported time window. Only day and mon implemented')

    cdftime = utime(tunit, calendar=times.calendar)

    for attr in ncf.ncattrs():
        if attr == 'frequency':
            nco.setncattr(attr, window)
        else:
            nco.setncattr(attr, getattr(ncf, attr))

    for dim in ncf.dimensions:
        if (ncf.dimensions[dim].isunlimited()):
            nco.createDimension(dim)
        else:
            nco.createDimension(dim, len(ncf.dimensions[dim]))

    if 'time_bnds' not in ncf.dimensions:
        nco.createDimension('time_bnds', 2)

    tbnds = nco.createVariable('time_bnds', 'f8', ['time', 'time_bnds'])
    tbnds.setncattr('units', tunit)
    tbnds.setncattr('calendar', times.calendar)

    for var in ncf.variables:
        nctype = ncf.variables[var].datatype
        if ('x' in ncf.variables[var].dimensions
                and 'y' in ncf.variables[var].dimensions):
            nco.createVariable(var,
                               nctype,
                               ncf.variables[var].dimensions,
                               shuffle=True,
                               fletcher32=True,
                               zlib=True,
                               complevel=9)
        else:
            if var == 'time_bnds':
                pass
            else:
                nco.createVariable(var, nctype, ncf.variables[var].dimensions)
        if var == 'time':
            hasbnds = False
            for attr in ncf.variables[var].ncattrs():
                if attr == 'units':
                    nco.variables[var].setncattr('units', tunit)
                elif attr == 'bounds':
                    nco.variables[var].setncattr('bounds', 'time_bnds')
                    hasbnds = True
                else:
                    nco.variables[var].setncattr(
                        attr, getattr(ncf.variables[var], attr))
            if not hasbnds:
                nco.variables[var].setncattr('bounds', 'time_bnds')
        elif var == 'time_bnds':
            pass
        else:
            if 'time' in ncf.variables[var].dimensions:
                if 'cell_methods' in ncf.variables[var].ncattrs():
                    attvalue = (getattr(ncf.variables[var], 'cell_methods') +
                                ' within ' + ncf.frequency +
                                ' time: mean over ' + window)
                    nco.variables[var].setncattr('cell_methods', attvalue)
                else:
                    nco.variables[var].setncattr('cell_methods',
                                                 'time: mean over ' + window)
            for attr in ncf.variables[var].ncattrs():
                if attr != 'cell_methods':
                    nco.variables[var].setncattr(
                        attr, getattr(ncf.variables[var], attr))

    if window == 'day':
        rc = cdftime.date2num(dates)
        ic = rc.astype(int)
        dic = np.unique(ic)
        nco.variables['time'][:] = dic + 0.5
        tbnds[:, 0] = dic + 0.0
        tbnds[:, 1] = dic + 1.0
    else:
        ic = np.zeros(len(times), dtype='i4')
        for it in range(0, len(times)):
            ic[it] = int(dates[it].year * 100 + dates[it].month)
        dic = np.unique(ic)
        for it in range(0, len(dic)):
            indx = (ic == dic[it])
            nco.variables['time'][it] = np.median(times[indx])
        if times.units.find('hours') > 0:
            diff = 12.0
        else:
            diff = 0.5
        for it in range(0, len(dic)):
            indx = (ic == dic[it])
            tbnds[it, 0] = np.min(times[indx]) - diff
            tbnds[it, 1] = np.max(times[indx]) + diff

    for var in ncf.variables:
        if 'time' not in ncf.variables[var].dimensions:
            nco.variables[var][:] = ncf.variables[var][:]
        else:
            if var == 'time' or var == 'time_bnds':
                pass
            else:
                for it in range(0, len(dic)):
                    indx = (ic == dic[it])
                    nco.variables[var][it, Ellipsis] = (np.nanmean(
                        ncf.variables[var][indx, Ellipsis],
                        axis=0,
                        keepdims=True))
    ncf.close()
    nco.close()
示例#25
0
if len(fh.variables[options.varname].shape)==4:
    vardata = fh.variables[options.varname][:,options.zlevel,bufferzone:-bufferzone,bufferzone:-bufferzone]
elif len(fh.variables[options.varname].shape)==3:
    vardata = fh.variables[options.varname][:,bufferzone:-bufferzone,bufferzone:-bufferzone]

vardata *= options.multiplicator

try:
    if fh.variables[options.varname].units=='kg m-2 s-1' and options.multiplicator==1:
        vardata *= 86400
except:
    pass

grdis = gridfh.grid_size_in_meters

times = num2date(fh.variables['time'][:], fh.variables['time'].units, fh.variables['time'].calendar)
if options.timestep is not None:
    print "Timestep requested: %d" % options.timestep
    time1 = options.timestep
    time2 = time1+1
else:
    time1 = 0
    time2 = times.shape[0]

counter = 0
for t in range(time1,time2):
    (Y,mon,d) = times[t].year,times[t].month,times[t].day
    (hh,mm,ss) = times[t].hour, times[t].minute, times[t].second
    print "Processing timestep %d, %4d-%02d-%02d %02d:%02d" % (t, Y, mon, d, hh, mm)
    plt.figure(figsize=(10.24,7.68), dpi=300)
    if options.show_domain=='CR':
示例#26
0
# Extract data from NetCDF file
print GCMf.variables.keys() 
print GCMf.dimensions.keys() 

GCMvar3D=GCMf.variables[GCMvar][:,:,:]
#MeanBias=GCMvar3D
MeanBias=np.zeros(1440*145*192).reshape(1440,145,192)
print MeanBias.shape
#quit()
RELYvar3D=RELYf.variables[RELYvar][:,:,:]

lenLon=len(GCMvar3D[0,0,:])

#quit()
#=================================================== to datetime
GCMtime=netcdftime.num2date(GCMf.variables['time'][:],GCMf.variables['time'].units,calendar='360_day')
#GCMtime=netcdftime.num2date(GCMf.variables['time'][:],GCMf.variables['time'].units)
#print GCMtime[9].year
print type(GCMtime)
#print  [str(i) for i in GCMtime[:]]
#GCMindex=[DT.datetime.strptime(t,'%Y-%m-%d %H:%M:%S') for t in [str(i) for i in GCMtime[:]]]
#print GCMindex
#print DT.datetime.strptime('2002-02-30 4:00:09','%Y-%m-%d %H:%M:%S') 
# NOTE: this day donot exits in Python

#=================================================== to datetime
# NOTE: when I use the kew word 'calendar='360_day', it gives 
#       wrong value for ONLY this netcdf file, GCMtime is quite OK.

#cdftime = utime(RELYf.variables['time'].units,calendar='360_day')
#cdftime = utime(RELYf.variables['time'].units)
    Usage()

# =============================================================================
# Main

# open file
if use_scipy:
    ncfile = netcdf.netcdf_file(ifile, 'r')
else:
    ncfile = Dataset(ifile)
print('ifile: '+ifile)
#print('variables: '+str(ncfile.variables))
#print('time: '+str(ncfile.variables['time']))
times = ncfile.variables['time']
#dates = num2date(times[:],units=times.units,calendar=times.calendar)
dates = num2date(times[:],units=times.units)
#print('dates: '+str(dates))


firstvar=True

for var_name in sorted(ncfile.variables.iterkeys()): 
#    print(var_name)
    print var_name,
    sys.stdout.flush()
    var = ncfile.variables[var_name]
    ndims = len(var.shape)
    ofile_base = os.path.splitext(ifile_base)[0]+'_'+var_name
    if ndims < 3:
        print " skipped"
        continue    
示例#28
0
def load_data(f,quiet=0,**kargs):
  '''
  Loads prognostic variables (temp,salt,u,v,ubar,vbar,zeta) from
  netcdf file or opendap server. Also loads lon,lat, depth, and time.

  If f is a file, it must include the 1d variables lon,lat and depth;
  the 2d variable ssh (zeta) and the 3d variables temp, salt, u and v;
  ie, each file must contain data for a simgle time. The file must also
  contain the variable time.

  If f is a opendap address, it must contain also all these variables
  or the ones defined in the input karg settings (DataAccess object)

  To deal with the case of variables in different files/opendap addresses,
  f can also be a dictionary with keys the variables and values the files
  or opendap addresses. In this case, the keys must be:
    - temp
    - salt
    - u
    - v
    - ssh
    - misc, for lon, lat, depth, time and dimensions
      or xy for lon,lat and x,ydim; z for depth and zdim, time for time

  The output data (dict) is suitable to be used by data2roms, which
  interpolates the data to ROMS 3d grid.
  Also outputs an error/status string.

  kargs:
    inds, dict with dimension names/values (where time dim can be integer
          or datetime)
    settings, DataAccess object
    extra, extra misc vars to load [(outKey0,fileVar0),...]
    t_units, units of variable time, by default the att  units is used
  '''

  sett=DataAccess()
  inds={}
  extra=[]
  t_units=[]
  if 'settings' in kargs.keys(): sett    = kargs['settings']
  if 'inds'     in kargs.keys(): inds    = kargs['inds']
  if 'extra'    in kargs.keys(): extra   = kargs['extra']
  if 't_units'  in kargs.keys(): t_units = kargs['t_units']

  res={}
  msg=''

  if not isinstance(f,dict) and not f.startswith('http') and not isfile(f):
    msg='file not found %s' % f
    if not quiet: print msg
    return res, msg

  # load nc files:
  if not isinstance(f,dict):
    f={'temp':f,'salt':f,'u':f,'v':f,'ssh':f,'misc':f}

  if not f.has_key('xy'):   f['xy']   = f['misc']
  if not f.has_key('z'):    f['z']    = f['misc']
  if not f.has_key('time'): f['time'] = f['misc']

  filesUsed=[]
  ncUsed=[]
  for i in f.keys():
    if not quiet: print '(%s) loading from %s' % (i.ljust(5),f[i])

    if i=='temp':
      if f[i] in filesUsed: ncTemp=ncUsed[filesUsed.index(f[i])]
      else:
        ncTemp=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTemp]

    elif i=='salt':
      if f[i] in filesUsed: ncSalt=ncUsed[filesUsed.index(f[i])]
      else:
        ncSalt=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSalt]

    elif i=='u':
      if f[i] in filesUsed: ncU=ncUsed[filesUsed.index(f[i])]
      else:
        ncU=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncU]

    elif i=='v':
      if f[i] in filesUsed: ncV=ncUsed[filesUsed.index(f[i])]
      else:
        ncV=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncV]

    elif i=='ssh':
      if f[i] in filesUsed: ncSsh=ncUsed[filesUsed.index(f[i])]
      else:
        ncSsh=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncSsh]

    elif i=='xy':
      if f[i] in filesUsed: ncXy=ncUsed[filesUsed.index(f[i])]
      else:
        ncXy=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncXy]

    elif i=='z':
      if f[i] in filesUsed: ncZ=ncUsed[filesUsed.index(f[i])]
      else:
        ncZ=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncZ]

    elif i=='time':
      if f[i] in filesUsed: ncTime=ncUsed[filesUsed.index(f[i])]
      else:
        ncTime=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncTime]

    elif i=='misc':
      if f[i] in filesUsed: ncMisc=ncUsed[filesUsed.index(f[i])]
      else:
        ncMisc=netcdf.ncopen(f[i])
        filesUsed+=[f[i]]
        ncUsed+=[ncMisc]


  # load dims:
  if not quiet: print '  loading dims...'
  dimsXy=netcdf.fdim(ncXy)
  dimsZ =netcdf.fdim(ncZ)

  res['NX']=dimsXy[sett.xdim]
  res['NY']=dimsXy[sett.ydim]
  ###if sett.z_name:
  if sett.zdim:
    res['NZ']=dimsZ[sett.zdim]
  else:
    res['NZ']=1

  # about horizontal inds:
  if inds.has_key(sett.xdim) and len(inds[sett.xdim])==2 and not isinstance(inds[sett.xdim],basestring):
    if not quiet: print '  calc horizontal inds...'
    xlim=inds[sett.xdim]
    ylim=inds[sett.ydim]

    inds.pop(sett.xdim)
    inds.pop(sett.ydim)

    lon=netcdf.use(ncXy,sett.x_name,**inds)
    if np.any(lon>360): lon=np.mod(lon,360.)
    lat=netcdf.use(ncXy,sett.y_name,**inds)
    i0,i1,j0,j1=calc.ij_limits(lon,lat,xlim,ylim,margin=3)
    inds[sett.xdim]='%d:%d' % (i0,i1)
    inds[sett.ydim]='%d:%d' % (j0,j1)


  if not quiet: print '  loading lon, lat, depth...'
  res['lon']  = netcdf.use(ncXy,sett.x_name,**inds)
  if np.any(res['lon']>360): res['lon']=np.mod(res['lon'],360.)
  res['lat']  = netcdf.use(ncXy,sett.y_name,**inds)
  if sett.z_name:
    res['depth'] = -netcdf.use(ncZ,sett.z_name,**inds)
  else: res['depth']=False

  if res['lon'].size!=res['lat'].size:
    res['lon'],res['lat']=np.meshgrid(res['lon'],res['lat'])
    # needed for griddata, later

  # update nx,ny:
  if inds.has_key(sett.xdim):
    res['NY'],res['NX']=res['lon'].shape

  # extra misc vars:
  if len(extra):
    for outKey,fileVar in extra:
      if not quiet: print '  loading extra misc... %s %s' % (outKey,fileVar)
      res[outKey]=netcdf.use(ncMisc,fileVar,**inds)


  # time:
  # file may have one or several times. If several, time dim must be given
  # with kargs inds!
  if not quiet: print '  loading time...'
  if t_units:
    times=netcdf.use(ncTime,sett.time_name)
    times=netcdftime.num2date(times,t_units)
  else:
    times=netcdf.nctime(ncTime,sett.time_name)

  if inds.has_key(sett.tdim):
    try: tind=dts.parse_date(inds[sett.tdim])
    except: tind=inds[sett.tdim] # is an integer, for instance

    if isinstance(tind,datetime.datetime):
      tind,=np.where(times==tind)
      if tind.size:
        tind=tind[0]
        inds[sett.tdim]=tind # update inds to extract other variables
      else:
        Msg='date not found'
        msg+='\n'+Msg
        return res,msg+' ERROR'

    date=times[tind]
    if not quiet: print '    tind, date= %d %s' % (tind,date.isoformat(' '))

  elif times.size==1:
    date=times[0]
    if not quiet: print '    date= %s' % date.isoformat(' ')
  else: # must provide tind as input!!
    Msg='several dates in file... provice tind!'
    msg+='\n'+Msg
    return res,msg+' ERROR'

  res['date'] = date

  empty3d=np.zeros([res['NZ'],res['NY'],res['NX']])
  empty2d=np.zeros([res['NY'],res['NX']])

  if 'temp' in f.keys():
    if not quiet: print '  loading temp...'
    if sett.temp_name in ncTemp.varnames: res['temp'] = netcdf.use(ncTemp,sett.temp_name,**inds)
    else:
      Msg='var %s not found' % 'temp'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['temp']=empty3d

  if 'salt' in f.keys():
    if not quiet: print '  loading salt...'
    if sett.salt_name in ncSalt.varnames: res['salt'] = netcdf.use(ncSalt,sett.salt_name,**inds)
    else:
      Msg='var %s not found' % 'salt'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['salt']=empty3d

  if 'u' in f.keys():
    if not quiet: print '  loading u...'
    if sett.u_name in ncU.varnames: res['u']    = netcdf.use(ncU,sett.u_name,**inds)
    else:
      Msg='var %s not found' % 'u'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['u']=empty3d

  if 'v' in f.keys():
    if not quiet: print '  loading v...'
    if sett.v_name in ncV.varnames: res['v']    = netcdf.use(ncV,sett.v_name,**inds)
    else:
      Msg='var %s not found' % 'v'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['v']=empty3d

  if 'ssh' in f.keys():
    if not quiet: print '  loading ssh...'
    if sett.ssh_name in ncSsh.varnames: res['ssh']  = netcdf.use(ncSsh,sett.ssh_name,**inds)
    else:
      Msg='var %s not found' % 'ssh'
      msg+='\n'+Msg
      if not quiet: print Msg
      res['ssh']=empty2d

  for nc in ncUsed:
    try:  nc.close()
    except: pass

  return res, msg
示例#29
0
    Usage()

# =============================================================================
# Main

# open file
if use_scipy:
    ncfile = netcdf.netcdf_file(ifile, 'r')
else:
    ncfile = Dataset(ifile)
print('ifile: ' + ifile)
#print('variables: '+str(ncfile.variables))
#print('time: '+str(ncfile.variables['time']))
times = ncfile.variables['time']
#dates = num2date(times[:],units=times.units,calendar=times.calendar)
dates = num2date(times[:], units=times.units)
#print('dates: '+str(dates))

firstvar = True

for var_name in sorted(ncfile.variables.iterkeys()):
    #    print(var_name)
    print var_name,
    sys.stdout.flush()
    var = ncfile.variables[var_name]
    ndims = len(var.shape)
    ofile_base = os.path.splitext(ifile_base)[0] + '_' + var_name
    if ndims < 3:
        print " skipped"
        continue
    if var_name in exclude_vars:
示例#30
0
 def __call__(self, x, pos=0):
     format_string = self.pick_format(ndays=self.locator.ndays)
     dt = netcdftime.num2date(x, self.time_units, self.calendar)
     return dt.strftime(format_string)
示例#31
0
    def tick_values(self, vmin, vmax):
        vmin, vmax = mtransforms.nonsingular(vmin,
                                             vmax,
                                             expander=1e-7,
                                             tiny=1e-13)

        lower = netcdftime.num2date(vmin, self.date_unit, self.calendar)
        upper = netcdftime.num2date(vmax, self.date_unit, self.calendar)

        self.ndays = abs(vmax - vmin)

        resolution, n = self.compute_resolution(vmin, vmax, lower, upper)

        if resolution == 'YEARLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as appropriate.
            years = self._max_n_locator.tick_values(lower.year, upper.year)
            ticks = [netcdftime.datetime(int(year), 1, 1) for year in years]
        elif resolution == 'MONTHLY':
            # TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as appropriate.
            months_offset = self._max_n_locator.tick_values(0, n)
            ticks = []
            for offset in months_offset:
                year = lower.year + np.floor((lower.month + offset) / 12)
                month = ((lower.month + offset) % 12) + 1
                ticks.append(netcdftime.datetime(int(year), int(month), 1))
        elif resolution == 'DAILY':
            # TODO: It would be great if this favoured multiples of 7.
            days = self._max_n_locator_days.tick_values(vmin, vmax)
            ticks = [
                netcdftime.num2date(dt, self.date_unit, self.calendar)
                for dt in days
            ]
        elif resolution == 'HOURLY':
            hour_unit = 'hours since 2000-01-01'
            in_hours = netcdftime.date2num([lower, upper], hour_unit,
                                           self.calendar)
            hours = self._max_n_locator.tick_values(in_hours[0], in_hours[1])
            ticks = [
                netcdftime.num2date(dt, hour_unit, self.calendar)
                for dt in hours
            ]
        elif resolution == 'MINUTELY':
            minute_unit = 'minutes since 2000-01-01'
            in_minutes = netcdftime.date2num([lower, upper], minute_unit,
                                             self.calendar)
            minutes = self._max_n_locator.tick_values(in_minutes[0],
                                                      in_minutes[1])
            ticks = [
                netcdftime.num2date(dt, minute_unit, self.calendar)
                for dt in minutes
            ]
        elif resolution == 'SECONDLY':
            second_unit = 'seconds since 2000-01-01'
            in_seconds = netcdftime.date2num([lower, upper], second_unit,
                                             self.calendar)
            seconds = self._max_n_locator.tick_values(in_seconds[0],
                                                      in_seconds[1])
            ticks = [
                netcdftime.num2date(dt, second_unit, self.calendar)
                for dt in seconds
            ]
        else:
            raise ValueError(
                'Resolution {} not implemented yet.'.format(resolution))

        return netcdftime.date2num(ticks, self.date_unit, self.calendar)
示例#32
0
    def updateVariable(self):
        dim_map = dict()
        self.dim_names = []
        self.dim_values = dict()
        self.dim_values2 = dict()
        self.dim_def = dict()
        self.dim_band = dict()
        self.dim1Count = 0
        self.dim2Count = 0
        self.clear()
        uri = 'NETCDF:"%s":%s' % (self.ui.leFileName.text(),
                                  self.ui.cboVars.currentText())

        if debug > 0:
            print('updateVariable ' + str(uri))

        #look for extra dim definitions
        #  NETCDF_DIM_EXTRA={time,tile}
        #  NETCDF_DIM_tile_DEF={3,6}
        #  NETCDF_DIM_tile_VALUES={1,2,3}
        #  NETCDF_DIM_time_DEF={12,6}
        #  NETCDF_DIM_time_VALUES={1,32,60,91,121,152,182,213,244,274,305,335}

        # open file and get basic info
        gdal.PushErrorHandler('CPLQuietErrorHandler')
        ds = gdal.Open(uri)
        gdal.PopErrorHandler()
        if ds is None:
            return
        wkt = ds.GetProjection()
        md = ds.GetMetadata()
        ds = None
        if md is None:
            return

        # update CRS selectors
        projectCrs = self.iface.mapCanvas().mapRenderer().destinationCrs()
        self.ui.cboCrs.setItemText(
            1,
            self.tr("Project") + " (%s, %s)" %
            (projectCrs.description(), projectCrs.authid()))

        if not wkt or not self.layerCrs.createFromWkt(wkt):
            self.layerCrs = QgsCoordinateReferenceSystem()
        if debug > 0:
            print('wkt: ' + wkt + ' layer desc:' + self.layerCrs.description())

        # if layer has valid crs, use that, if not use selected crs
        if self.layerCrs.description():
            self.ui.cboCrs.setItemText(
                0,
                self.tr("Layer") + " (%s, %s)" %
                (self.layerCrs.description(), self.layerCrs.authid()))
            self.ui.cboCrs.setCurrentIndex(0)
        else:
            self.ui.cboCrs.setItemText(0, self.tr("Layer (None)"))
            if self.selectedCrs.description():
                self.ui.cboCrs.setItemText(
                    2,
                    self.tr("Selected") + " (%s, %s)" %
                    (self.selectedCrs.description(), self.selectedCrs.authid())
                )
            else:
                self.ui.cboCrs.setItemText(2, self.tr("Selected (None)"))
            self.ui.cboCrs.setCurrentIndex(2)

        ds = None

        # iterate over all md items looking for dim info
        for key in sorted(md.iterkeys()):
            if key.startswith('NETCDF_DIM_'):
                line = "%s=%s" % (key, md[key])
                m = re.search('^(NETCDF_DIM_.+)={(.+)}', line)
                if m is not None:
                    dim_map[m.group(1)] = m.group(2)

        if not 'NETCDF_DIM_EXTRA' in dim_map:
            self.warning()
            return

        tok = dim_map['NETCDF_DIM_EXTRA']
        if tok is not None:
            for dim in tok.split(','):
                self.dim_names.append(dim)
                tok2 = dim_map.get('NETCDF_DIM_' + dim + '_VALUES')
                self.dim_values[dim] = []
                if tok2 is not None:
                    for s in tok2.split(','):
                        self.dim_values[dim].append(num(s))
                tok2 = dim_map.get('NETCDF_DIM_' + dim + '_DEF')
                self.dim_def[dim] = []
                if tok2 is not None:
                    for s in tok2.split(','):
                        self.dim_def[dim].append(num(s))

        # remove any dims which have only 1 element
        dim_names = self.dim_names
        self.dim_names = []
        for dim in dim_names:
            if self.dim_def[dim][0] <= 1:
                del self.dim_values[dim]
                del self.dim_def[dim]
            else:
                self.dim_names.append(dim)

        # transform time dimensions - currently requires netcdftime from python-netcdf4
        if has_netcdftime:
            for dim in dim_names:
                #dim+"#standard_name" in md and md[dim+"#standard_name"] == "time":
                if dim in self.dim_values:
                    if dim + "#units" in md:
                        timestr = md[dim + "#units"]
                        units = timestr.split()[0].lower()
                        if units in _units:
                            try:
                                dates = num2date(self.dim_values[dim],
                                                 units=timestr)
                            except ValueError:
                                continue
                            self.dim_values2[dim] = []
                            only_days = True
                            for date in dates:
                                val = date.isoformat(
                                    " "
                                )  # equivalent to strftime("%Y-%m-%d %H:%M:%S")
                                if not val.endswith(" 00:00:00"):
                                    only_days = False
                                self.dim_values2[dim].append(val)
                            if only_days:
                                for i in range(0, len(self.dim_values2[dim])):
                                    self.dim_values2[dim][
                                        i] = self.dim_values2[dim][i][0:10]

        if debug > 1:
            print(str(dim_map))
            print(str(self.dim_names))
            print(str(self.dim_def))
            print(str(self.dim_values))
            print(str(self.dim_values2))

        # update UI
        self.ui.pbnDim1.setEnabled(False)
        self.ui.pbnDim2.setEnabled(False)

        if len(self.dim_names) > 0:
            dim = self.dim_names[0]
            self.ui.lblDim1.setText(dim)
            menu = MyMenu()
            action = QAction(self.tr('all/none'), menu)
            action.setCheckable(True)
            menu.addAction(action)
            for i in range(0, len(self.dim_values[dim])):
                self.dim2Count = self.dim2Count + 1
                value = self.dim_values2[dim][
                    i] if dim in self.dim_values2 else self.dim_values[dim][i]
                action = QAction(str(value), menu)
                action.setCheckable(True)
                menu.addAction(action)
            self.ui.pbnDim1.setMenu(menu)
            QObject.connect(self.ui.pbnDim1.menu(),
                            SIGNAL("triggered(QAction *)"),
                            self.on_pbnDimx_triggered)
            # click first element of each dim
            if len(menu.actions()) > 1:
                menu.actions()[1].setChecked(True)
            self.ui.pbnDim1.setEnabled(True)

        if len(self.dim_names) > 1:
            dim = self.dim_names[1]
            self.ui.lblDim2.setText(dim)
            menu = MyMenu()
            action = QAction(self.tr('all/none'), menu)
            action.setCheckable(True)
            menu.addAction(action)
            for i in range(0, len(self.dim_values[dim])):
                self.dim2Count = self.dim2Count + 1
                value = self.dim_values[dim][i]
                action = QAction(str(value), menu)
                action.setCheckable(True)
                menu.addAction(action)
            self.ui.pbnDim2.setMenu(menu)
            QObject.connect(self.ui.pbnDim2.menu(),
                            SIGNAL("triggered(QAction *)"),
                            self.on_pbnDimx_triggered)
            # click first element of each dim
            if len(menu.actions()) > 1:
                menu.actions()[1].setChecked(True)
            self.ui.pbnDim2.setEnabled(True)

        # make sure we found something, if not notify user
        if len(self.dim_names) == 0:
            self.warning2()
        self.updateURI()
        self.updateDims()
示例#33
0
    def runTest(self):
        """testing netcdftime"""
        # test mixed julian/gregorian calendar
        # check attributes.
        self.assertTrue(self.cdftime_mixed.units == 'hours')
        self.assertTrue(
            str(self.cdftime_mixed.origin) == '   1-01-01 00:00:00')
        self.assertTrue(self.cdftime_mixed.unit_string ==
                        'hours since 0001-01-01 00:00:00')
        self.assertTrue(self.cdftime_mixed.calendar == 'standard')
        # check date2num method. (date before switch)
        d = datetime(1582, 10, 4, 23)
        t1 = self.cdftime_mixed.date2num(d)
        assert_almost_equal(t1, 13865687.0)
        # check num2date method.
        d2 = self.cdftime_mixed.num2date(t1)
        self.assertTrue(str(d) == str(d2))
        # this is a non-existant date, should raise ValueError.
        d = datetime(1582, 10, 5, 0)
        self.assertRaises(ValueError, self.cdftime_mixed.date2num, d)
        # check date2num/num2date with date after switch.
        d = datetime(1582, 10, 15, 0)
        t2 = self.cdftime_mixed.date2num(d)
        assert_almost_equal(t2, 13865688.0)
        d2 = self.cdftime_mixed.num2date(t2)
        self.assertTrue(str(d) == str(d2))
        # check day of year.
        ndayr = d.timetuple()[7]
        self.assertTrue(ndayr == 288)
        # test using numpy arrays.
        t = numpy.arange(t2, t2 + 240.0, 12.)
        t = numpy.reshape(t, (4, 5))
        d = self.cdftime_mixed.num2date(t)
        self.assertTrue(d.shape == t.shape)
        d_check = "1582-10-15 00:00:001582-10-15 12:00:001582-10-16 00:00:001582-10-16 12:00:001582-10-17 00:00:001582-10-17 12:00:001582-10-18 00:00:001582-10-18 12:00:001582-10-19 00:00:001582-10-19 12:00:001582-10-20 00:00:001582-10-20 12:00:001582-10-21 00:00:001582-10-21 12:00:001582-10-22 00:00:001582-10-22 12:00:001582-10-23 00:00:001582-10-23 12:00:001582-10-24 00:00:001582-10-24 12:00:00"
        d2 = [str(dd) for dd in d.flat]
        self.assertTrue(d_check == ''.join(d2))
        # test julian calendar with numpy arrays
        d = self.cdftime_julian.num2date(t)
        self.assertTrue(d.shape == t.shape)
        d_check = "1582-10-05 00:00:001582-10-05 12:00:001582-10-06 00:00:001582-10-06 12:00:001582-10-07 00:00:001582-10-07 12:00:001582-10-08 00:00:001582-10-08 12:00:001582-10-09 00:00:001582-10-09 12:00:001582-10-10 00:00:001582-10-10 12:00:001582-10-11 00:00:001582-10-11 12:00:001582-10-12 00:00:001582-10-12 12:00:001582-10-13 00:00:001582-10-13 12:00:001582-10-14 00:00:001582-10-14 12:00:00"
        d2 = [str(dd) for dd in d.flat]
        self.assertTrue(d_check == ''.join(d2))
        # test proleptic gregorian calendar.
        self.assertTrue(self.cdftime_pg.units == 'seconds')
        self.assertTrue(str(self.cdftime_pg.origin) == '   1-01-01 00:00:00')
        self.assertTrue(
            self.cdftime_pg.unit_string == 'seconds since 0001-01-01 00:00:00')
        self.assertTrue(self.cdftime_pg.calendar == 'proleptic_gregorian')
        # check date2num method.
        d = datetime(1990, 5, 5, 2, 17)
        t1 = numpy.around(self.cdftime_pg.date2num(d))
        self.assertTrue(t1 == 62777470620.0)
        # check num2date method.
        d2 = self.cdftime_pg.num2date(t1)
        self.assertTrue(str(d) == str(d2))
        # check day of year.
        ndayr = d.timetuple()[7]
        self.assertTrue(ndayr == 125)
        # check noleap calendar.
        # this is a non-existant date, should raise ValueError.
        self.assertRaises(ValueError,
                          utime,
                          'days since 1600-02-29 00:00:00',
                          calendar='noleap')
        self.assertTrue(self.cdftime_noleap.units == 'days')
        self.assertTrue(
            str(self.cdftime_noleap.origin) == '1600-02-28 00:00:00')
        self.assertTrue(self.cdftime_noleap.unit_string ==
                        'days since 1600-02-28 00:00:00')
        self.assertTrue(self.cdftime_noleap.calendar == 'noleap')
        assert_almost_equal(
            self.cdftime_noleap.date2num(self.cdftime_noleap.origin), 0.0)
        # check date2num method.
        d1 = datetime(2000, 2, 28)
        d2 = datetime(1600, 2, 28)
        t1 = self.cdftime_noleap.date2num(d1)
        t2 = self.cdftime_noleap.date2num(d2)
        assert_almost_equal(t1, 400 * 365.)
        assert_almost_equal(t2, 0.)
        t12 = self.cdftime_noleap.date2num([d1, d2])
        assert_almost_equal(t12, [400 * 365., 0])
        # check num2date method.
        d2 = self.cdftime_noleap.num2date(t1)
        self.assertTrue(str(d1) == str(d2))
        # check day of year.
        ndayr = d2.timetuple()[7]
        self.assertTrue(ndayr == 59)
        # non-existant date, should raise ValueError.
        date = datetime(2000, 2, 29)
        self.assertRaises(ValueError, self.cdftime_noleap.date2num, date)
        # check all_leap calendar.
        self.assertTrue(self.cdftime_leap.units == 'days')
        self.assertTrue(str(self.cdftime_leap.origin) == '1600-02-29 00:00:00')
        self.assertTrue(
            self.cdftime_leap.unit_string == 'days since 1600-02-29 00:00:00')
        self.assertTrue(self.cdftime_leap.calendar == 'all_leap')
        assert_almost_equal(
            self.cdftime_leap.date2num(self.cdftime_leap.origin), 0.0)
        # check date2num method.
        d1 = datetime(2000, 2, 29)
        d2 = datetime(1600, 2, 29)
        t1 = self.cdftime_leap.date2num(d1)
        t2 = self.cdftime_leap.date2num(d2)
        assert_almost_equal(t1, 400 * 366.)
        assert_almost_equal(t2, 0.)
        # check num2date method.
        d2 = self.cdftime_leap.num2date(t1)
        self.assertTrue(str(d1) == str(d2))
        # check day of year.
        ndayr = d2.timetuple()[7]
        self.assertTrue(ndayr == 60)
        # double check date2num,num2date methods.
        d = datetime(2000, 12, 31)
        t1 = self.cdftime_mixed.date2num(d)
        d2 = self.cdftime_mixed.num2date(t1)
        self.assertTrue(str(d) == str(d2))
        ndayr = d2.timetuple()[7]
        self.assertTrue(ndayr == 366)
        # check 360_day calendar.
        self.assertTrue(self.cdftime_360day.units == 'days')
        self.assertTrue(
            str(self.cdftime_360day.origin) == '1600-02-30 00:00:00')
        self.assertTrue(self.cdftime_360day.unit_string ==
                        'days since 1600-02-30 00:00:00')
        self.assertTrue(self.cdftime_360day.calendar == '360_day')
        assert_almost_equal(
            self.cdftime_360day.date2num(self.cdftime_360day.origin), 0.0)
        # check date2num,num2date methods.
        # use datetime from netcdftime, since this date doesn't
        # exist in "normal" calendars.
        d = datetimex(2000, 2, 30)
        t1 = self.cdftime_360day.date2num(d)
        assert_almost_equal(t1, 360 * 400.)
        d2 = self.cdftime_360day.num2date(t1)
        assert_equal(str(d), str(d2))
        # check day of year.
        d = datetime(2001, 12, 30)
        t = self.cdftime_360day.date2num(d)
        assert_almost_equal(t, 144660.0)
        date = self.cdftime_360day.num2date(t)
        self.assertTrue(str(d) == str(date))
        ndayr = date.timetuple()[7]
        self.assertTrue(ndayr == 360)
        # Check fraction
        d = datetime(1969, 12, 30, 12)
        t = self.cdftime_360day.date2num(d)
        date = self.cdftime_360day.num2date(t)
        assert_equal(str(d), str(date))
        # test proleptic julian calendar.
        d = datetime(1858, 11, 17, 12)
        t = self.cdftime_jul.date2num(d)
        assert_almost_equal(t, 7528932.0)
        d1 = datetime(1582, 10, 4, 23)
        d2 = datetime(1582, 10, 15, 0)
        assert_almost_equal(
            self.cdftime_jul.date2num(d1) + 241.0,
            self.cdftime_jul.date2num(d2))
        date = self.cdftime_jul.num2date(t)
        self.assertTrue(str(d) == str(date))
        # test julian day from date, date from julian day
        d = datetime(1858, 11, 17)
        mjd = JulianDayFromDate(d)
        assert_almost_equal(mjd, 2400000.5)
        date = DateFromJulianDay(mjd)
        self.assertTrue(str(date) == str(d))
        # test iso 8601 units string
        d = datetime(1970, 1, 1, 1)
        t = self.cdftime_iso.date2num(d)
        assert_equal(numpy.around(t), 3600)
        # test fix for issue 75 (seconds hit 60 at end of month,
        # day goes out of range).
        t = 733499.0
        d = num2date(t, units='days since 0001-01-01 00:00:00')
        dateformat = '%Y-%m-%d %H:%M:%S'
        assert_equal(d.strftime(dateformat), '2009-04-01 00:00:00')
        # test edge case of issue 75 for numerical problems
        for t in (733498.999, 733498.9999, 733498.99999, 733498.999999,
                  733498.9999999):
            d = num2date(t, units='days since 0001-01-01 00:00:00')
            t2 = date2num(d, units='days since 0001-01-01 00:00:00')
            assert (abs(t2 - t) < 1e-5)  # values should be less than second
        # Check equality testing
        d1 = datetimex(1979, 6, 21, 9, 23, 12)
        d2 = datetime(1979, 6, 21, 9, 23, 12)
        assert (d1 == d2)
        # check timezone offset
        d = datetime(2012, 2, 29, 15)
        # mixed_tz is -6 hours from UTC, mixed is UTC so
        # difference in elapsed time is 6 hours.
        assert (self.cdftime_mixed_tz.date2num(d) -
                self.cdftime_mixed.date2num(d) == 6)

        # Check comparisons with Python datetime types

        # Note that d1 has to use the proleptic Gregorian calendar to
        # be comparable to d2: datetime.datetime uses the proleptic
        # Gregorian calendar and year 1000 is before the
        # Julian/Gregorian transition (1582-10-15).
        d1 = num2date(0, 'days since 1000-01-01', 'proleptic_gregorian')

        d2 = datetime(2000, 1, 1)

        # The date d3 is well after the Julian/Gregorian transition
        # and so this Gregorian date can be compared to the proleptic
        # Gregorian date d2.
        d3 = num2date(0, 'days since 3000-01-01', 'standard')
        assert d1 < d2
        assert d2 < d3

        # check all comparisons
        assert d1 != d2
        assert d1 <= d2
        assert d2 > d1
        assert d2 >= d1

        # check datetime hash
        d1 = datetimex(1995, 1, 1)
        d2 = datetime(1995, 1, 1)
        d3 = datetimex(2001, 2, 30)
        assert hash(d1) == hash(d1)
        assert hash(d1) == hash(d2)
        assert hash(d1) != hash(d3)
        assert hash(d3) == hash(d3)

        # check datetime immutability
        # using assertRaises as a context manager
        # only works with python >= 2.7 (issue #497).
        immutability_tests = {
            "year": 1999,
            "month": 6,
            "day": 5,
            "hour": 10,
            "minute": 33,
            "second": 45,
            "dayofwk": 1,
            "dayofyr": 52,
            "format": '%Y'
        }

        for name, value in immutability_tests.items():
            self.assertRaises(AttributeError, setattr, d1, name, value)

        # Check leading white space
        self.assertEqual(str(self.cdftime_leading_space.origin),
                         ' 850-01-01 00:00:00')

        #issue 330
        units = "seconds since 1970-01-01T00:00:00Z"
        t = utime(units)
        for n in range(10):
            assert n == int(round(t.date2num(t.num2date(n))))

        #issue 344
        units = 'hours since 2013-12-12T12:00:00'
        assert (1.0 == date2num(num2date(1.0, units), units))

        # test rountrip accuracy
        # also tests error found in issue #349
        calendars=['standard', 'gregorian', 'proleptic_gregorian', 'noleap', 'julian',\
                   'all_leap', '365_day', '366_day', '360_day']
        dateformat = '%Y-%m-%d %H:%M:%S'
        dateref = datetime(2015, 2, 28, 12)
        ntimes = 1001
        for calendar in calendars:
            eps = 100.
            units = 'microseconds since 1800-01-30 01:01:01'
            microsecs1 = date2num(dateref, units, calendar=calendar)
            for n in range(ntimes):
                microsecs1 += 1.
                date1 = num2date(microsecs1, units, calendar=calendar)
                microsecs2 = date2num(date1, units, calendar=calendar)
                date2 = num2date(microsecs2, units, calendar=calendar)
                err = numpy.abs(microsecs1 - microsecs2)
                assert (err < eps)
                assert (
                    date1.strftime(dateformat) == date2.strftime(dateformat))
            units = 'milliseconds since 1800-01-30 01:01:01'
            eps = 0.1
            millisecs1 = date2num(dateref, units, calendar=calendar)
            for n in range(ntimes):
                millisecs1 += 0.001
                date1 = num2date(millisecs1, units, calendar=calendar)
                millisecs2 = date2num(date1, units, calendar=calendar)
                date2 = num2date(millisecs2, units, calendar=calendar)
                err = numpy.abs(millisecs1 - millisecs2)
                assert (err < eps)
                assert (
                    date1.strftime(dateformat) == date2.strftime(dateformat))
            eps = 1.e-4
            units = 'seconds since 0001-01-30 01:01:01'
            secs1 = date2num(dateref, units, calendar=calendar)
            for n in range(ntimes):
                secs1 += 0.1
                date1 = num2date(secs1, units, calendar=calendar)
                secs2 = date2num(date1, units, calendar=calendar)
                date2 = num2date(secs2, units, calendar=calendar)
                err = numpy.abs(secs1 - secs2)
                assert (err < eps)
                assert (
                    date1.strftime(dateformat) == date2.strftime(dateformat))
            eps = 1.e-5
            units = 'minutes since 0001-01-30 01:01:01'
            mins1 = date2num(dateref, units, calendar=calendar)
            for n in range(ntimes):
                mins1 += 0.01
                date1 = num2date(mins1, units, calendar=calendar)
                mins2 = date2num(date1, units, calendar=calendar)
                date2 = num2date(mins2, units, calendar=calendar)
                err = numpy.abs(mins1 - mins2)
                assert (err < eps)
                assert (
                    date1.strftime(dateformat) == date2.strftime(dateformat))
            eps = 1.e-5
            units = 'hours since 0001-01-30 01:01:01'
            hrs1 = date2num(dateref, units, calendar=calendar)
            for n in range(ntimes):
                hrs1 += 0.001
                date1 = num2date(hrs1, units, calendar=calendar)
                hrs2 = date2num(date1, units, calendar=calendar)
                date2 = num2date(hrs2, units, calendar=calendar)
                err = numpy.abs(hrs1 - hrs2)
                assert (err < eps)
                assert (
                    date1.strftime(dateformat) == date2.strftime(dateformat))
            eps = 1.e-5
            units = 'days since 0001-01-30 01:01:01'
            days1 = date2num(dateref, units, calendar=calendar)
            for n in range(ntimes):
                days1 += 0.00001
                date1 = num2date(days1, units, calendar=calendar)
                days2 = date2num(date1, units, calendar=calendar)
                date2 = num2date(days2, units, calendar=calendar)
                err = numpy.abs(days1 - days2)
                assert (err < eps)
                assert (
                    date1.strftime(dateformat) == date2.strftime(dateformat))

        # issue 353
        assert (num2date(0, 'hours since 2000-01-01 0') == datetime(
            2000, 1, 1, 0))

        # issue 354
        num1 = numpy.array([[0, 1], [2, 3]])
        num2 = numpy.array([[0, 1], [2, 3]])
        dates1 = num2date(num1, 'days since 0001-01-01')
        dates2 = num2date(num2, 'days since 2001-01-01')
        assert (dates1.shape == (2, 2))
        assert (dates2.shape == (2, 2))
        num1b = date2num(dates1, 'days since 0001-01-01')
        num2b = date2num(dates2, 'days since 2001-01-01')
        assert (num1b.shape == (2, 2))
        assert (num2b.shape == (2, 2))
        assert_almost_equal(num1, num1b)
        assert_almost_equal(num2, num2b)

        # issue 357 (make sure time zone offset in units done correctly)
        # Denver time, 7 hours behind UTC
        units = 'hours since 1682-10-15 -07:00 UTC'
        # date after gregorian switch, python datetime used
        date = datetime(1682, 10, 15)  # assumed UTC
        num = date2num(date, units)
        # UTC is 7 hours ahead of units, so num should be 7
        assert (num == 7)
        assert (num2date(num, units) == date)
        units = 'hours since 1482-10-15 -07:00 UTC'
        # date before gregorian switch, netcdftime datetime used
        date = datetime(1482, 10, 15)
        num = date2num(date, units)
        date2 = num2date(num, units)
        assert (num == 7)
        assert (date2.year == date.year)
        assert (date2.month == date.month)
        assert (date2.day == date.day)
        assert (date2.hour == date.hour)
        assert (date2.minute == date.minute)
        assert (date2.second == date.second)

        # issue 362: case insensitive calendars
        self.assertTrue(self.cdftime_mixed_capcal.calendar == 'standard')
        self.assertTrue(self.cdftime_noleap_capcal.calendar == 'noleap')
        d = datetime(2015, 3, 4, 12, 18, 30)
        units = 'days since 0001-01-01'
        for cap_cal, low_cal in (('STANDARD', 'standard'), ('NoLeap',
                                                            'noleap'),
                                 ('Gregorian', 'gregorian'), ('ALL_LEAP',
                                                              'all_leap')):
            d1 = date2num(d, units, cap_cal)
            d2 = date2num(d, units, low_cal)
            self.assertEqual(d1, d2)
            self.assertEqual(num2date(d1, units, cap_cal),
                             num2date(d1, units, low_cal))
        # issue 415
        t = datetimex(2001, 12, 1, 2, 3, 4)
        self.assertEqual(t, copy.deepcopy(t))

        # issue 442
        units = "days since 0000-01-01 00:00:00"
        # this should fail (year zero not allowed with real-world calendars)
        try:
            date2num(datetime(1, 1, 1), units, calendar='standard')
        except ValueError:
            pass
        # this should not fail (year zero allowed in 'fake' calendars)
        t = date2num(datetime(1, 1, 1), units, calendar='360_day')
        self.assertEqual(t, 360)
        d = num2date(t, units, calendar='360_day')
        self.assertEqual(d, Datetime360Day(1, 1, 1))
        d = num2date(0, units, calendar='360_day')
        self.assertEqual(d, Datetime360Day(0, 1, 1))

        # list around missing dates in Gregorian calendar
        # scalar
        units = 'days since 0001-01-01 12:00:00'
        t1 = date2num(datetime(1582, 10, 4), units, calendar='gregorian')
        t2 = date2num(datetime(1582, 10, 15), units, calendar='gregorian')
        self.assertEqual(t1 + 1, t2)
        # list
        t1, t2 = date2num([datetime(1582, 10, 4),
                           datetime(1582, 10, 15)],
                          units,
                          calendar='gregorian')
        self.assertEqual(t1 + 1, t2)
        t1, t2 = date2num([datetime(1582, 10, 4),
                           datetime(1582, 10, 15)],
                          units,
                          calendar='standard')
        self.assertEqual(t1 + 1, t2)
        # this should fail: days missing in Gregorian calendar
        try:
            t1, t2, t3 = date2num([
                datetime(1582, 10, 4),
                datetime(1582, 10, 10),
                datetime(1582, 10, 15)
            ],
                                  units,
                                  calendar='standard')
        except ValueError:
            pass
    def updateVariable(self):
        dim_map = dict()
        self.dim_names = []
        self.dim_values = dict()
        self.dim_values2 = dict()
        self.dim_def = dict()
        self.dim_band = dict()
        self.dim1Count = 0
        self.dim2Count = 0
        self.clear()
        uri = 'NETCDF:"%s":%s' % (self.ui.leFileName.text(), self.ui.cboVars.currentText())

        if debug>0:
            print('updateVariable '+str(uri))

        #look for extra dim definitions
        #  NETCDF_DIM_EXTRA={time,tile}
        #  NETCDF_DIM_tile_DEF={3,6}
        #  NETCDF_DIM_tile_VALUES={1,2,3}
        #  NETCDF_DIM_time_DEF={12,6}
        #  NETCDF_DIM_time_VALUES={1,32,60,91,121,152,182,213,244,274,305,335}

        # open file and get basic info
        gdal.PushErrorHandler('CPLQuietErrorHandler')
        ds = gdal.Open(uri)
        gdal.PopErrorHandler()
        if ds is None:
            return
        wkt = ds.GetProjection()
        md = ds.GetMetadata()
        ds = None
        if md is None:
            return

        # update CRS selectors
        projectCrs = self.iface.mapCanvas().mapRenderer().destinationCrs()
        self.ui.cboCrs.setItemText(1, 
                                   self.tr( "Project" ) + " (%s, %s)" % (projectCrs.description(), projectCrs.authid()) )
        
        if not wkt or not self.layerCrs.createFromWkt(wkt):
                self.layerCrs = QgsCoordinateReferenceSystem()
        if debug > 0:
            print('wkt: '+wkt+' layer desc:'+self.layerCrs.description())

        # if layer has valid crs, use that, if not use selected crs
        if self.layerCrs.description():
            self.ui.cboCrs.setItemText(0, 
                                       self.tr( "Layer" ) + " (%s, %s)" % (self.layerCrs.description(), self.layerCrs.authid()) )
            self.ui.cboCrs.setCurrentIndex(0)
        else:
            self.ui.cboCrs.setItemText(0, self.tr("Layer (None)"))
            if self.selectedCrs.description():
                self.ui.cboCrs.setItemText(2, 
                                           self.tr( "Selected" ) + " (%s, %s)" % (self.selectedCrs.description(), self.selectedCrs.authid()) )
            else:
                self.ui.cboCrs.setItemText(2, self.tr("Selected (None)"))
            self.ui.cboCrs.setCurrentIndex(2)

        ds = None

        # iterate over all md items looking for dim info
        for key in sorted(md.iterkeys()):
            if key.startswith('NETCDF_DIM_'):
                line="%s=%s" % (key,md[key])
                m = re.search('^(NETCDF_DIM_.+)={(.+)}', line)
                if m is not None:
                    dim_map[ m.group(1) ] = m.group(2)

        if not 'NETCDF_DIM_EXTRA' in dim_map:
            self.warning()
            return
        
        tok = dim_map['NETCDF_DIM_EXTRA']
        if tok is not None:
            for dim in tok.split(','):
                self.dim_names.append( dim )
                tok2 = dim_map.get('NETCDF_DIM_'+dim+'_VALUES')
                self.dim_values[ dim ] = []
                if tok2 is not None:
                    for s in tok2.split(','):
                        self.dim_values[ dim ].append(num(s))
                tok2 = dim_map.get('NETCDF_DIM_'+dim+'_DEF')
                self.dim_def[ dim ] = []
                if tok2 is not None:
                    for s in tok2.split(','):
                        self.dim_def[ dim ].append(num(s))

        # remove any dims which have only 1 element
        dim_names = self.dim_names
        self.dim_names = []
        for dim in dim_names:
            if self.dim_def[dim][0] <= 1:
                del self.dim_values[dim]
                del self.dim_def[dim]
            else:
                self.dim_names.append(dim)

        # transform time dimensions - currently requires netcdftime from python-netcdf4              
        if has_netcdftime:
            for dim in dim_names:
                #dim+"#standard_name" in md and md[dim+"#standard_name"] == "time":
                if dim in self.dim_values:
                    if dim+"#units" in md:
                        timestr = md[ dim+"#units" ]
                        units = timestr.split()[0].lower()
                        if units in _units:
                            try:
                                dates = num2date(self.dim_values[dim],units=timestr)
                            except ValueError:
                                continue
                            self.dim_values2[ dim ] = []
                            only_days = True
                            for date in dates:
                                val = date.isoformat(" ") # equivalent to strftime("%Y-%m-%d %H:%M:%S")
                                if not val.endswith(" 00:00:00"):
                                    only_days = False
                                self.dim_values2[ dim ].append(val)
                            if only_days:
                                for i in range(0,len(self.dim_values2[ dim ])):
                                    self.dim_values2[dim][i] = self.dim_values2[dim][i][0:10]

        if debug>1:
            print(str(dim_map))
            print(str(self.dim_names))
            print(str(self.dim_def))
            print(str(self.dim_values))
            print(str(self.dim_values2))


        # update UI
        self.ui.pbnDim1.setEnabled(False)
        self.ui.pbnDim2.setEnabled(False)

        if len(self.dim_names) > 0:
            dim = self.dim_names[0]
            self.ui.lblDim1.setText( dim )
            menu = MyMenu()
            action = QAction(self.tr('all/none'),menu)
            action.setCheckable(True)
            menu.addAction(action)
            for i in range(0,len(self.dim_values[dim])):
                self.dim2Count = self.dim2Count + 1
                value = self.dim_values2[dim][i] if dim in self.dim_values2 else self.dim_values[dim][i]
                action = QAction(str(value),menu)
                action.setCheckable(True)
                menu.addAction(action)                
            self.ui.pbnDim1.setMenu(menu)
            QObject.connect(self.ui.pbnDim1.menu(), SIGNAL("triggered(QAction *)"), self.on_pbnDimx_triggered)   
            # click first element of each dim
            if len(menu.actions()) > 1:
                menu.actions()[1].setChecked(True)
            self.ui.pbnDim1.setEnabled(True)

        if len(self.dim_names) > 1:
            dim = self.dim_names[1]
            self.ui.lblDim2.setText( dim )
            menu = MyMenu()
            action = QAction(self.tr('all/none'),menu)
            action.setCheckable(True)
            menu.addAction(action)
            for i in range(0,len(self.dim_values[dim])):
                self.dim2Count = self.dim2Count + 1
                value = self.dim_values[dim][i]
                action = QAction(str(value),menu)
                action.setCheckable(True)
                menu.addAction(action)
            self.ui.pbnDim2.setMenu(menu)
            QObject.connect(self.ui.pbnDim2.menu(), SIGNAL("triggered(QAction *)"), self.on_pbnDimx_triggered)   
            # click first element of each dim
            if len(menu.actions()) > 1:
                menu.actions()[1].setChecked(True)
            self.ui.pbnDim2.setEnabled(True)

        # make sure we found something, if not notify user
        if len(self.dim_names) == 0:
            self.warning2()
        self.updateURI()
        self.updateDims()
示例#35
0
文件: gnome.py 项目: jsh1012/okean
def frc2gnome(fname,frc,grd,xylim=False,dates=False,ij=(1,1),**kargs):
  '''
  Creates GNOME wind file
  kargs:
    t[u,v]var
    t[u,v]dim
    x[y,ang]var

  Ex:
    .frc2gnome(out,frc,grd,ij=(10,10),dates=dates,**{'tdim':'Time'})
  '''

  deta,dxi=ij

  tvar='time'
  uvar='Uwind'
  vvar='Vwind'
  #tvar='bulk_time'
  #uvar='uwnd'
  #vvar='vwnd'

  tdim='time'
  #tdim='bulk_time'
  xdim='xi_rho'
  ydim='eta_rho'

  xvar='lon_rho'
  yvar='lat_rho'
  angvar='angle'

  if 'tvar' in kargs.keys(): tvar=kargs['tvar']
  if 'uvar' in kargs.keys(): uvar=kargs['uvar']
  if 'vvar' in kargs.keys(): vvar=kargs['vvar']

  if 'tdim' in kargs.keys(): tdim=kargs['tdim']
  if 'xdim' in kargs.keys(): xdim=kargs['xdim']
  if 'ydim' in kargs.keys(): ydim=kargs['ydim']

  if 'xvar' in kargs.keys(): xvar=kargs['xvar']
  if 'yvar' in kargs.keys(): yvar=kargs['yvar']
  if 'angvar' in kargs.keys(): angvar=kargs['angvar']


  dims=netcdf.fdim(grd)
  xi,eta=dims[xdim],dims[ydim]
  xi0,eta0=xi,eta

  ncg=netcdf.ncopen(grd)

  nc0=netcdf.ncopen(frc)
  try:
   t=netcdf.nctime(nc0,tvar)
  except:
    t=netcdf.use(nc0,tvar)
    t=netcdftime.num2date(t,'days since %d-01-01' % year0)

  time=netcdftime.date2num(t,tunits)

  x0=netcdf.use(grd,xvar)
  y0=netcdf.use(grd,yvar)
  if x0.ndim==1: x0,y0=np.meshgrid(x0,y0)

  if angvar:
    ang=netcdf.use(grd,angvar)

  if not xylim is False:
    xlim=xylim[:2]
    ylim=xylim[2:]
    i1,i2,j1,j2=calc.ij_limits(x0,y0,xlim,ylim)
    xi=i2-i1
    eta=j2-j1
  else:
    i1,i2=0,xi
    j1,j2=0,eta

  XI  ='%d:%d:%d' %(i1,i2,dxi)
  ETA ='%d:%d:%d' %(j1,j2,deta)

  xi=len(range(i1,i2,dxi))
  eta=len(range(j1,j2,deta))

  # create file:
  create_wind(fname,xi,eta)

  nc=netcdf.ncopen(fname,'a')

  x=x0[j1:j2:deta,i1:i2:dxi]
  y=y0[j1:j2:deta,i1:i2:dxi]

  nc.vars['lon'][:]=x
  nc.vars['lat'][:]=y
  if angvar: ang=ang[j1:j2:deta,i1:i2:dxi]

  n=-1
  for it in range(len(time)):

    if not dates is False:
      d0,d1=dates
      if t[it]<d0 or t[it]>=d1: continue

    n+=1
    u=netcdf.use(nc0,uvar,**{xdim:XI,ydim:ETA,tdim:it})
    v=netcdf.use(nc0,vvar,**{xdim:XI,ydim:ETA,tdim:it})

    # rotate uv:
    if angvar:
      print 'rotating ...'
      u,v=calc.rot2d(u,v,-ang)


    nc.vars['time'][n]=time[it]
    print 'filling uv',n,t[it]
    nc.vars['air_u'][n,...]=u
    nc.vars['air_v'][n,...]=v


  nc.close()
  nc0.close()
  ncg.close()