Example #1
0
def getNcStartEnd(inDir, urlNcDap, timeAxisName):
    '''Find the lines in the html with the .nc file, then open it and read the start/end times
    return url to the .nc  and start/end as datetime objects.
    '''
    logger.debug('open_url on urlNcDap = {}'.format(urlNcDap))

    try:
        base_in =  '/'.join(urlNcDap.split('/')[-3:])
        in_file = os.path.join(inDir, base_in) 
        df = netCDF4.Dataset(in_file, mode='r')
    except pydap.exceptions.ServerError as ex:
        logger.warning(ex)
        raise ServerError("Can't read {} time axis from {}".format(timeAxisName, urlNcDap))

    try:
        timeAxisUnits = df[timeAxisName].units
    except KeyError as ex:
        logger.warning(ex)
        raise ServerError("Can't read {} time axis from {}".format(timeAxisName, urlNcDap))

    if timeAxisUnits == 'seconds since 1970-01-01T00:00:00Z' or timeAxisUnits == 'seconds since 1970/01/01 00:00:00Z':
        timeAxisUnits = 'seconds since 1970-01-01 00:00:00'    # coards is picky

    try:
        startDatetime = from_udunits(df[timeAxisName][0][0].data, timeAxisUnits)
        endDatetime = from_udunits(df[timeAxisName][-1][0].data, timeAxisUnits)
    except pydap.exceptions.ServerError as ex:
        logger.warning(ex)
        raise ServerError("Can't read start and end dates of {} from {}".format(timeAxisUnits, urlNcDap))
    except ValueError as ex:
        logger.warning(ex)
        raise ServerError("Can't read start and end dates of {} from {}".format(timeAxisUnits, urlNcDap))

    return startDatetime, endDatetime
Example #2
0
    def get_bounds(self, url, timeAxisName):
        '''Find the lines in the html with the .nc file, then open it and read the start/end times
        return url to the .nc  and start/end as datetime objects.
        '''
        logger.debug('open_url on url = {}'.format(url))
        df = pydap.client.open_url(url)
        try:
            time_units = df[timeAxisName].units
        except KeyError as e:
            logger.warning(e)
            raise ServerError("Can't read {} time axis from {}".format(
                timeAxisName, url))

        if time_units == 'seconds since 1970-01-01T00:00:00Z' or time_units == 'seconds since 1970/01/01 00:00:00Z':
            time_units = 'seconds since 1970-01-01 00:00:00'  # coards is picky

        try:
            startDatetime = from_udunits(df[timeAxisName][0][0].data,
                                         time_units)
            endDatetime = from_udunits(df[timeAxisName][-1][0].data,
                                       time_units)
        except pydap.exceptions.ServerError as e:
            logger.warning(e)
            raise ServerError("Can't read start and end dates of %s from %s" %
                              (time_units, url))
        except ValueError as e:
            logger.warning(e)
            raise ServerError("Can't read start and end dates of %s from %s" %
                              (time_units, url))

        return startDatetime, endDatetime
Example #3
0
    def add_global_metadata(self, featureType='trajectory'):
        '''
        This is the main advantage of using a class for these methods.  This method uses the
        instance variables to write metadata specific for the data that are written.
        '''

        iso_now = datetime.datetime.now().isoformat()

        self.ncFile.netcdf_version = '3.6'
        self.ncFile.Conventions = 'CF-1.6'
        self.ncFile.date_created = iso_now
        self.ncFile.date_update = iso_now
        self.ncFile.date_modified = iso_now
        self.ncFile.featureType = featureType
        self.ncFile.data_mode = 'R'
        self.ncFile.geospatial_lat_min = np.min(self.latitude[:])
        self.ncFile.geospatial_lat_max = np.max(self.latitude[:])
        self.ncFile.geospatial_lon_min = np.min(self.longitude[:])
        self.ncFile.geospatial_lon_max = np.max(self.longitude[:])
        self.ncFile.geospatial_lat_units = 'degree_north'
        self.ncFile.geospatial_lon_units = 'degree_east'

        self.ncFile.geospatial_vertical_min= np.min(self.depth[:])
        self.ncFile.geospatial_vertical_max= np.max(self.depth[:])
        self.ncFile.geospatial_vertical_units = 'm'
        self.ncFile.geospatial_vertical_positive = 'down'

        self.ncFile.time_coverage_start = coards.from_udunits(self.time[0], self.time.units).isoformat()
        self.ncFile.time_coverage_end = coards.from_udunits(self.time[-1], self.time.units).isoformat()

        self.ncFile.useconst = 'Not intended for legal use. Data may contain inaccuracies.'
        self.ncFile.history = 'Created by STOQS software command "%s" on %s' % (' '.join(sys.argv), iso_now,)
Example #4
0
def getNcStartEnd(urlNcDap, timeAxisName):
    '''Find the lines in the html with the .nc file, then open it and read the start/end times
    return url to the .nc  and start/end as datetime objects.
    '''
    logger.debug('open_url on urlNcDap = %s', urlNcDap)
    df = pydap.client.open_url(urlNcDap)
    try:
        timeAxisUnits = df[timeAxisName].units
    except KeyError as e:
        logger.warn(e)
        raise ServerError("Can't read %s time axis from %s" % (timeAxisName, urlNcDap))

    if timeAxisUnits == 'seconds since 1970-01-01T00:00:00Z' or timeAxisUnits == 'seconds since 1970/01/01 00:00:00Z':
        timeAxisUnits = 'seconds since 1970-01-01 00:00:00'    # coards is picky

    try:
        startDatetime = from_udunits(df[timeAxisName][0][0], timeAxisUnits)
        endDatetime = from_udunits(df[timeAxisName][-1][0], timeAxisUnits)
    except pydap.exceptions.ServerError as e:
        logger.warn(e)
        raise ServerError("Can't read start and end dates of %s from %s" % (timeAxisUnits, urlNcDap))
    except ValueError as e:
        logger.warn(e)
        raise ServerError("Can't read start and end dates of %s from %s" % (timeAxisUnits, urlNcDap)) 

    return startDatetime, endDatetime
Example #5
0
def getNcStartEnd(urlNcDap, timeAxisName):
    '''Find the lines in the html with the .nc file, then open it and read the start/end times
    return url to the .nc  and start/end as datetime objects.
    '''
    logger.debug('open_url on urlNcDap = %s', urlNcDap)
    df = pydap.client.open_url(urlNcDap)
    try:
        timeAxisUnits = df[timeAxisName].units
    except KeyError as e:
        logger.warning(e)
        raise ServerError("Can't read %s time axis from %s" %
                          (timeAxisName, urlNcDap))

    if timeAxisUnits == 'seconds since 1970-01-01T00:00:00Z' or timeAxisUnits == 'seconds since 1970/01/01 00:00:00Z':
        timeAxisUnits = 'seconds since 1970-01-01 00:00:00'  # coards is picky

    try:
        startDatetime = from_udunits(df[timeAxisName][0][0].data,
                                     timeAxisUnits)
        endDatetime = from_udunits(df[timeAxisName][-1][0].data, timeAxisUnits)
    except pydap.exceptions.ServerError as e:
        logger.warning(e)
        raise ServerError("Can't read start and end dates of %s from %s" %
                          (timeAxisUnits, urlNcDap))
    except webob.exc.HTTPError as e:
        logger.warning(e.comment)
        raise ServerError("Can't read start and end dates of %s from %s" %
                          (timeAxisUnits, urlNcDap))
    except ValueError as e:
        logger.warning(e)
        raise ServerError("Can't read start and end dates of %s from %s" %
                          (timeAxisUnits, urlNcDap))

    return startDatetime, endDatetime
Example #6
0
    def getNcStartEnd(self, urlNcDap, timeAxisName):
        '''Find the lines in the html with the .nc file, then open it and read the start/end times
        return url to the .nc  and start/end as datetime objects.
        '''
        self.logger.debug('open_url on urlNcDap = {}'.format(urlNcDap))

        base_in = '/'.join(urlNcDap.split('/')[-3:])
        in_file = os.path.join(self.inDir, base_in)
        df = netCDF4.Dataset(in_file, mode='r')

        timeAxisUnits = df[timeAxisName].units

        if timeAxisUnits == 'seconds since 1970-01-01T00:00:00Z' or timeAxisUnits == 'seconds since 1970/01/01 00:00:00Z':
            timeAxisUnits = 'seconds since 1970-01-01 00:00:00'  # coards is picky

        try:
            startDatetime = from_udunits(df[timeAxisName][0].data,
                                         timeAxisUnits)
            endDatetime = from_udunits(df[timeAxisName][-1].data,
                                       timeAxisUnits)
        except pydap.exceptions.ServerError as ex:
            self.logger.warning(ex)
            raise ServerError(
                "Can't read start and end dates of {} from {}".format(
                    timeAxisUnits, urlNcDap))
        except ValueError as ex:
            self.logger.warning(ex)
            raise ServerError(
                "Can't read start and end dates of {} from {}".format(
                    timeAxisUnits, urlNcDap))

        return startDatetime, endDatetime
Example #7
0
    def _set_datetime(self):
        """
	    Should I really do it right the way, automatically?
        """
	if 'time' in self.dataset.variables:
	    from coards import from_udunits
	    self.data['datetime'] = ma.array([ from_udunits(v, self.dataset.variables['time'].units) for v in self.dataset.variables['time'][:] ])
	return
Example #8
0
def dDate(time, dataset):
    time_units =  dataset['time'].units
    #logging.debug(time_units)
    if time_units == "hours since 1-1-1 00:00:0.0":
        time_units = "hours since 1800-1-1 00:00:0.0"
        #logging.debug("fixed " + time_units)
        time = time - (657438.0 * 24)
        pass
    date = from_udunits(time, time_units)
    return date
Example #9
0
    def _set_datetime(self):
        """
	    Should I really do it right the way, automatically?
        """
        if 'time' in self.dataset.variables:
            from coards import from_udunits
            self.data['datetime'] = ma.array([
                from_udunits(v, self.dataset.variables['time'].units)
                for v in self.dataset.variables['time'][:]
            ])
        return
Example #10
0
    def add_global_metadata(self):
        '''
        This is the main advantage of using a class for these methods.  This method uses the
        instance variables to write metadata specific for the data that are written.
        '''

        iso_now = datetime.datetime.now().isoformat()

        self.ncFile.title = ''
        self.ncFile.netcdf_version = '3.6'
        self.ncFile.Convention = 'CF-1.4'
        self.ncFile.date_created = iso_now
        self.ncFile.date_update = iso_now
        self.ncFile.date_modified = iso_now
        self.ncFile.cdm_data_type = 'trajectory'
        self.ncFile.CF_featureType = 'trajectory'
        self.ncFile.data_mode = 'R'
        self.ncFile.geospatial_lat_min = np.min(self.latitude[:])
        self.ncFile.geospatial_lat_max = np.max(self.latitude[:])
        self.ncFile.geospatial_lon_min = np.min(self.longitude[:])
        self.ncFile.geospatial_lon_max = np.max(self.longitude[:])
        self.ncFile.geospatial_lat_units = 'degree_north'
        self.ncFile.geospatial_lon_units = 'degree_east'

        self.ncFile.geospatial_vertical_min = np.min(self.depth[:])
        self.ncFile.geospatial_vertical_max = np.max(self.depth[:])
        self.ncFile.geospatial_vertical_units = 'm'
        self.ncFile.geospatial_vertical_positive = 'down'

        self.ncFile.time_coverage_start = coards.from_udunits(
            self.time[0], self.time.units).isoformat()
        self.ncFile.time_coverage_end = coards.from_udunits(
            self.time[-1], self.time.units).isoformat()

        self.ncFile.distribution_statement = 'Any use requires prior approval from the MBARI CANON PI: Dr. Francisco Chavez'
        self.ncFile.license = self.ncFile.distribution_statement
        self.ncFile.useconst = 'Not intended for legal use. Data may contain inaccuracies.'
        self.ncFile.history = 'Created by "%s" on %s' % (
            ' '.join(sys.argv),
            iso_now,
        )
Example #11
0
def time_range(timebnds, first, last):
    tidx = 0
    included = []
    for t in timebnds:
        # replace t[0] with t is using 'time' rather than 'time_bnds'
        date = from_udunits(t[0], dataset.time.units.replace('GMT', '+0:00'))
        if (date >= first) & (date <= last):
            print "include", tidx, t, date
            included.append(tidx) 
            pass
        tidx += 1
        pass
    print "RANGE", included[0], included[-1] 
    return
Example #12
0
    def add_global_metadata(self, featureType='trajectory'):
        '''
        This is the main advantage of using a class for these methods.  This method uses the
        instance variables to write metadata specific for the data that are written.
        '''

        iso_now = datetime.datetime.now().isoformat()

        self.ncFile.netcdf_version = '3.6'
        self.ncFile.Conventions = 'CF-1.6'
        self.ncFile.date_created = iso_now
        self.ncFile.date_update = iso_now
        self.ncFile.date_modified = iso_now
        self.ncFile.featureType = featureType
        self.ncFile.data_mode = 'R'
        self.ncFile.geospatial_lat_min = np.min(self.latitude[:])
        self.ncFile.geospatial_lat_max = np.max(self.latitude[:])
        self.ncFile.geospatial_lon_min = np.min(self.longitude[:])
        self.ncFile.geospatial_lon_max = np.max(self.longitude[:])
        self.ncFile.geospatial_lat_units = 'degree_north'
        self.ncFile.geospatial_lon_units = 'degree_east'

        self.ncFile.geospatial_vertical_min = np.min(self.depth[:])
        self.ncFile.geospatial_vertical_max = np.max(self.depth[:])
        self.ncFile.geospatial_vertical_units = 'm'
        self.ncFile.geospatial_vertical_positive = 'down'

        self.ncFile.time_coverage_start = coards.from_udunits(
            self.time[0], self.time.units).isoformat()
        self.ncFile.time_coverage_end = coards.from_udunits(
            self.time[-1], self.time.units).isoformat()

        self.ncFile.useconst = 'Not intended for legal use. Data may contain inaccuracies.'
        self.ncFile.history = 'Created by STOQS software command "%s" on %s' % (
            ' '.join(sys.argv),
            iso_now,
        )
Example #13
0
    def add_global_metadata(self):
        '''
        This is the main advantage of using a class for these methods.  This method uses the
        instance variables to write metadata specific for the data that are written.
        '''

        iso_now = datetime.datetime.now().isoformat()

        self.ncFile.title = ''
        self.ncFile.netcdf_version = '3.6'
        self.ncFile.Convention = 'CF-1.4'
        self.ncFile.date_created = iso_now
        self.ncFile.date_update = iso_now
        self.ncFile.date_modified = iso_now
        self.ncFile.cdm_data_type = 'trajectory'
        self.ncFile.CF_featureType = 'trajectory'
        self.ncFile.data_mode = 'R'
        self.ncFile.geospatial_lat_min = np.min(self.latitude[:])
        self.ncFile.geospatial_lat_max = np.max(self.latitude[:])
        self.ncFile.geospatial_lon_min = np.min(self.longitude[:])
        self.ncFile.geospatial_lon_max = np.max(self.longitude[:])
        self.ncFile.geospatial_lat_units = 'degree_north'
        self.ncFile.geospatial_lon_units = 'degree_east'

        self.ncFile.geospatial_vertical_min= np.min(self.depth[:])
        self.ncFile.geospatial_vertical_max= np.max(self.depth[:])
        self.ncFile.geospatial_vertical_units = 'm'
        self.ncFile.geospatial_vertical_positive = 'down'

        self.ncFile.time_coverage_start = coards.from_udunits(self.time[0], self.time.units).isoformat()
        self.ncFile.time_coverage_end = coards.from_udunits(self.time[-1], self.time.units).isoformat()

        self.ncFile.distribution_statement = 'Any use requires prior approval from the MBARI CANON PI: Dr. Francisco Chavez'
        self.ncFile.license = self.ncFile.distribution_statement
        self.ncFile.useconst = 'Not intended for legal use. Data may contain inaccuracies.'
        self.ncFile.history = 'Created by "%s" on %s' % (' '.join(sys.argv), iso_now,)
Example #14
0
time = dataset['time']

if 0:
    print type(air)
    print air.dimensions
    print air.shape
    print air.attributes    
    pass

grid = air.array[1655,0:93:10,0:191:10] * air.scale_factor + air.add_offset - 273.17
grid = numpy.round(grid,1)
print grid.shape
print dataset['time_bnds'].shape

for t in dataset['time_bnds'][1655]:
    date = from_udunits(t, dataset.time.units.replace('GMT', '+0:00'))
    print date

ZERO = timedelta(0)
class UTC(tzinfo):
    """UTC"""
    def utcoffset(self, dt):
        return ZERO

    def tzname(self, dt):
        return "UTC"

    def dst(self, dt):
        return ZERO

first = datetime(2005,1,1, tzinfo=UTC())
Example #15
0
                bounds_ = None
        elif re.search('mon', variable['table']):
            if re.match('time:\s*mean', cell_methods):
                if (28*8) <= data.shape[0] <= (31*8):
                    data = np.mean(data, axis=0)
                    # replicate data because it expects more than one value, since
                    # we're processing one month at a time.
                    data.shape = (1,) + data.shape
                    data = np.concatenate((data, data), 0)
                    time_ = np.array([np.mean(time)])
                    bounds_ = np.array([bounds[0], bounds[-1]])
                else:
                    out = np.empty((12,) + data.shape[1:], 'f')
                    bounds_ = []
                    for l in range(12):
                        valid = np.array([ from_udunits(v, TIME).month == l+1 for v in time ], bool)
                        out[l] = np.mean(data[valid], axis=0)
                        bounds_.append(bounds[valid][0])
                    data = out
                    bounds_.append(bounds[-1])
                    bounds_ = np.array(bounds_)
                    time_ = (bounds_[:-1] + bounds_[1:])/2.

        axis_ids = []
        if 'height2m' in variable['CMOR dimensions']:
            ilevels = cmor.axis(table_entry='height2m', units='m', coord_vals=[2.])
            axis_ids.append(ilevels)
        elif 'height10m' in variable['CMOR dimensions']:
            ilevels = cmor.axis(table_entry='height10m', units='m', coord_vals=[10.])
            axis_ids.append(ilevels)
        if 'time' in variable['CMOR dimensions']:
Example #16
0
def date_str(time):
    date = from_udunits(time, dataset.time.units.replace('GMT', '+0:00'))
    return '%d-%02d-%02d' % (date.year, date.month, date.day)
Example #17
0
def dDate(time, dataset):
    date = from_udunits(time, dataset.time.units.replace('GMT', '+0:00'))
    return date
Example #18
0
    def add_global_metadata(self, featureType='trajectory'):
        '''
        This is the main advantage of using a class for these methods.  This method uses the
        instance variables to write metadata specific for the data that are written.
        '''

        iso_now = datetime.datetime.now().isoformat()

        self.ncFile.netcdf_version = '3.6'
        self.ncFile.Conventions = 'CF-1.6'
        self.ncFile.date_created = iso_now
        self.ncFile.date_update = iso_now
        self.ncFile.date_modified = iso_now
        self.ncFile.featureType = featureType
        self.ncFile.data_mode = 'R'
        if os.environ.get('USER'):
            self.ncFile.user = os.environ.get('USER')
        if os.environ.get('HOSTNAME'):
            self.ncFile.hostname = os.environ.get('HOSTNAME')

        # Record source of the software producing this file
        app_dir = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "../"))
        repo = Repo(app_dir, search_parent_directories=True)
        self.ncFile.gitorigin = repo.remotes.origin.url
        self.ncFile.gitcommit = repo.head.commit.hexsha

        # Likely TypeError: 'float' object is not subscriptable
        try:
            self.ncFile.geospatial_lat_min = np.min(self.latitude[:])
        except TypeError:
            self.ncFile.geospatial_lat_min = self.latitude
        try:
            self.ncFile.geospatial_lat_max = np.max(self.latitude[:])
        except TypeError:
            self.ncFile.geospatial_lat_max = self.latitude
        try:
            self.ncFile.geospatial_lon_min = np.min(self.longitude[:])
        except TypeError:
            self.ncFile.geospatial_lon_min = self.longitude
        try:
            self.ncFile.geospatial_lon_max = np.max(self.longitude[:])
        except TypeError:
            self.ncFile.geospatial_lon_max = self.longitude

        self.ncFile.geospatial_lat_units = 'degree_north'
        self.ncFile.geospatial_lon_units = 'degree_east'

        self.ncFile.geospatial_vertical_min = np.min(self.depth[:])
        self.ncFile.geospatial_vertical_max = np.max(self.depth[:])
        self.ncFile.geospatial_vertical_units = 'm'
        self.ncFile.geospatial_vertical_positive = 'down'

        self.ncFile.time_coverage_start = coards.from_udunits(
            self.time[0], self.time.units).isoformat()
        self.ncFile.time_coverage_end = coards.from_udunits(
            self.time[-1], self.time.units).isoformat()

        self.ncFile.useconst = 'Not intended for legal use. Data may contain inaccuracies.'
        self.ncFile.history = 'Created by STOQS software command "%s" on %s' % (
            ' '.join(sys.argv),
            iso_now,
        )
Example #19
0
def udDate(time, dataset):
    date = from_udunits(time, dataset.time.units.replace('GMT', '+0:00'))
    return '%d/%02d/%02d' % (date.year, date.month, date.day)
Example #20
0
nregions = len(regions)
  
for r in range(nregions):
    (n,w) = regions[r][0]
    (s,e) = regions[r][1]
    w = w%360
    e = e%360
    if e == 0: e=360
    print "nw", n,w, "se", s,e
    a = dataset.air[interval,
                    (dataset.lat > s) & (dataset.lat < n),
                    (dataset.lon >= w) & (dataset.lon < e) ]
    #print "shape", a.shape
    data = numpy.round(a.array[:] * dataset.air.scale_factor + dataset.air.add_offset - 273.15, 1)
    #print "data shape", data.shape
    (ntimes, nlats, nlons) = data.shape

    for t in range(ntimes):
        print from_udunits(a.time[t], dataset.time.units.replace('GMT', '+0:00'))
        for la in range(nlats):
            for lo in range(nlons):
                loc_code = code(r) + code(la) + code(lo)
                csvout.writerow([date_str(a.time[t]),loc_code,
                                 location_str(a, la, lo),
                                 data[t,la,lo]])





Example #21
0
def extract_point(model, ctd):

    # load model data
    field, depth = model.field, model.depth
    lon, lat = model.lon, model.lat
    time = [from_udunits(v[0], v[1]) for v in model.time]

    # load ctd data and extract positions
    t = from_udunits(ctd.time[0], ctd.time[1])
    y = ctd.lat

    #x = ctd.lon % 360
    if model.name == 'MOM3':
        x = ctd.lon + 360
    else:
        x = ctd.lon

    if x > lon[-1]:
        x += lon[0]
    if x < lon[0]:
        x += lon[-1]

    # find the horizontal position and time for this CTD measure
    i0 = np.searchsorted(lon, x, side='left') - 1
    j0 = np.searchsorted(lat, y, side='left') - 1
    l0 = np.searchsorted(time, t, side='left') - 1

    if i0 < 0:
        i0 = 0
    if j0 < 0:
        j0 = 0
    if l0 < 0:
        l0 = 0

    # prefetching the data, avoid comm overhead
    data = model.get_data(l0, j0, i0)
    values = []
    for z in -ctd.depth[:-1]:
        # for each point find the 8 encompassing points and calculate
        # weighted average
        k0 = bisect.bisect(depth, z) - 1
        if k0 < 0:
            k0 = 0
        if k0 >= len(depth):
            values.append(field.missing_value)

        v0 = w = 0
        v1 = w = 0
        for (i, j, k) in ((i, j, k) for i in (0, 1) for j in (0, 1)
                          for k in (k0, k0 + 1)):
            distance = np.sqrt((x - lon[i0 + i])**2 + (y - lat[j0 + j])**2 +
                               (z - depth[k])**2)
            if not np.all(data[:, k, j, i].mask):
                v0 += float(data[0, k, j, i]) / (distance**WEIGHT)
                v1 += float(data[1, k, j, i]) / (distance**WEIGHT)
                w += 1 / (distance**WEIGHT)
        if w:
            v0 /= w
            v1 /= w

        # interpolate linearly in time
        v = (v0 + (v1 - v0) * seconds(t - time[l0]) /
             seconds(time[l0 + 1] - time[l0]))
        values.append(v)
    return ma.masked_equal(np.array(values), field.missing_value)
Example #22
0
if not os.path.exists(config['description']['outpath']):
    os.mkdir(config['description']['outpath'])

# read data from netcdf
mm = month
yyyy = year
if freq in ['mon', 'monClim']: 
    mm = (month % 12) + 1  # get next month
    if mm == 1:
        yyyy += 1
filename = "cgcm2.2_CMIP5_%s_%04d_%02d.nc" % (os.path.splitext(os.path.split(table)[1])[0], yyyy, mm)
print 'Opening %s' % filename
inp = netcdf_file(filename)

time = [ from_udunits(value, inp.variables['time'].units) for value in inp.variables['time'][:] ]
time = np.array([ to_udunits(v, TIME) for v in time ])
if 'time_bounds' in inp.variables:
    bounds = np.concatenate((inp.variables['time_bounds'][:,0], inp.variables['time_bounds'][-1:,1]), axis=0)
    bounds = [ from_udunits(value, inp.variables['time'].units) for value in bounds ]
    bounds = np.array([ to_udunits(v, TIME) for v in bounds ])
else:
    bounds = None

# now save data
start_for_variable = datetime.datetime.now()
for variable in config['variables'].values():
    start_this_variable = datetime.datetime.now()
    if variable['realm'] in ['ocean', 'seaIce']:
        if variable['output variable name'] == 'thetao':
            temp_salt_file = 'cgcm2.2_tempsalt_%04d_%02d.nc' % (year, month)