Beispiel #1
0
    def get_albedo_data(self, interval='season'):
        """
        get albedo data for JSBACH

        returns Data object
        """

        if interval != 'season':
            raise ValueError(
                'Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry'
            )

        v = 'var176'

        filename = self.data_dir + 'data/model1/' + self.experiment + '_echam6_BOT_mm_1979-2006_albedo_yseasmean.nc'
        ls_mask = get_T63_landseamask(self.shift_lon)

        albedo = Data(filename,
                      v,
                      read=True,
                      label='MPI-ESM albedo ' + self.experiment,
                      unit='-',
                      lat_name='lat',
                      lon_name='lon',
                      shift_lon=self.shift_lon,
                      mask=ls_mask.data.data)

        return albedo
Beispiel #2
0
    def get_tree_fraction(self, interval='season'):
        """
        todo implement this for data from a real run !!!
        """

        if interval != 'season':
            raise ValueError(
                'Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry'
            )

        ls_mask = get_T63_landseamask(self.shift_lon)

        filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/lib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_forest_shrub.nc'
        v = 'var12'
        tree = Data(filename,
                    v,
                    read=True,
                    label='MPI-ESM tree fraction ' + self.experiment,
                    unit='-',
                    lat_name='lat',
                    lon_name='lon',
                    shift_lon=self.shift_lon,
                    mask=ls_mask.data.data,
                    start_time=pl.num2date(pl.datestr2num('2001-01-01')),
                    stop_time=pl.num2date(pl.datestr2num('2001-12-31')))

        return tree
Beispiel #3
0
    def get_albedo_data(self, interval='season', **kwargs):
        """
        calculate albedo as ratio of upward and downwelling fluxes
        first the monthly mean fluxes are used to calculate the albedo,
        """

        force_calc = False

        # read land-sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)

        if self.start_time is None:
            raise ValueError('Start time needs to be specified')
        if self.stop_time is None:
            raise ValueError('Stop time needs to be specified')

        # get fluxes
        Fu = self.get_surface_shortwave_radiation_up(interval=interval)
        if Fu is None:
            print 'File not existing for UPWARD flux!: ', self.name
            return None
        else:
            Fu_i = Fu[0]
        lab = Fu_i._get_label()
        Fd = self.get_surface_shortwave_radiation_down(interval=interval, **{'CMIP5': {'valid_mask': 'land'}, 'CMIP5RAW': {'valid_mask': 'land'}, 'CMIP5RAWSINGLE': {'valid_mask': 'land'}})  # todo: take routine name from the configuration setup in JSON file !!!!
        if Fd is None:
            print 'File not existing for DOWNWARD flux!: ', self.name
            return None
        else:
            Fd_i = Fd[0]

        #albedo for chosen interval as caluclated as ratio of means of fluxes in that interval (e.g. season, months)
        Fu_i.div(Fd_i, copy=False)
        del Fd_i  # Fu contains now the albedo
        Fu_i._apply_mask(ls_mask.data)

        #albedo for monthly data (needed for global mean plots )
        Fu_m = Fu[1][2]
        del Fu
        Fd_m = Fd[1][2]
        del Fd

        Fu_m.div(Fd_m, copy=False)
        del Fd_m
        Fu_m._apply_mask(ls_mask.data)
        Fu_m._set_valid_range(0., 1.)
        Fu_m.label = lab + ' albedo'
        Fu_i.label = lab + ' albedo'
        Fu_m.unit = '-'
        Fu_i.unit = '-'

        # center dates of months
        Fu_m.adjust_time(day=15)
        Fu_i.adjust_time(day=15)

        # return data as a tuple list
        retval = (Fu_m.time, Fu_m.fldmean(), Fu_m)

        return Fu_i, retval
Beispiel #4
0
    def get_gpp_data(self, interval='season'):
        """
        get surface GPP data for JSBACH

        todo temporal aggregation of data --> or leave it to the user!
        """
        cdo = Cdo()
        v = 'var167'
        y1 = str(self.start_time)[0:10]
        y2 = str(self.stop_time)[0:10]
        rawfilename = self.data_dir + 'data/model/' + self.experiment + '_' + y1[
            0:4] + '-' + y2[0:4] + '.nc'
        times_in_file = int(''.join(cdo.ntime(input=rawfilename)))

        if interval == 'season':
            if times_in_file != 4:
                tmp_file = get_temporary_directory() + os.path.basename(
                    rawfilename)
                cdo.yseasmean(options='-f nc -b 32 -r ',
                              input='-selvar,' + v + ' ' + rawfilename,
                              output=tmp_file[:-3] + '_yseasmean.nc')
                rawfilename = tmp_file[:-3] + '_yseasmean.nc'

        if interval == 'monthly':
            if times_in_file != 12:
                tmp_file = get_temporary_directory() + os.path.basename(
                    rawfilename)
                cdo.ymonmean(options='-f nc -b 32 -r ',
                             input='-selvar,' + v + ' ' + rawfilename,
                             output=tmp_file[:-3] + '_ymonmean.nc')
                rawfilename = tmp_file[:-3] + '_ymonmean.nc'

        if not os.path.exists(rawfilename):
            return None

        filename = rawfilename

        #--- read land-sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)

        #--- read SW up data
        gpp = Data4D(filename,
                     v,
                     read=True,
                     label=self.experiment + ' ' + v,
                     unit='gC m-2 a-1',
                     lat_name='lat',
                     lon_name='lon',
                     shift_lon=self.shift_lon,
                     mask=ls_mask.data.data,
                     scale_factor=3600. * 24. * 30. / 0.083)

        return gpp.sum_data4D()
Beispiel #5
0
    def get_rainfall_data(self, interval='season'):
        """
        get rainfall data for JSBACH
        returns Data object
        """

        if interval == 'season':
            pass
        else:
            raise ValueError('Invalid value for interval: %s' % interval)

        #/// PREPROCESSING: seasonal means ///
        s_start_time = str(self.start_time)[0:10]
        s_stop_time = str(self.stop_time)[0:10]

        filename1 = self.data_dir + self.experiment + '_echam6_BOT_mm_1980_sel.nc'
        tmp = pyCDO(filename1, s_start_time, s_stop_time).seldate()
        tmp1 = pyCDO(tmp, s_start_time, s_stop_time).seasmean()
        filename = pyCDO(tmp1, s_start_time, s_stop_time).yseasmean()

        #/// READ DATA ///

        #1) land / sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)

        #2) precipitation data
        try:
            v = 'var4'
            rain = Data(filename,
                        v,
                        read=True,
                        scale_factor=86400.,
                        label='MPI-ESM ' + self.experiment,
                        unit='mm/day',
                        lat_name='lat',
                        lon_name='lon',
                        shift_lon=self.shift_lon,
                        mask=ls_mask.data.data)
        except:
            v = 'var142'
            rain = Data(filename,
                        v,
                        read=True,
                        scale_factor=86400.,
                        label='MPI-ESM ' + self.experiment,
                        unit='mm/day',
                        lat_name='lat',
                        lon_name='lon',
                        shift_lon=self.shift_lon,
                        mask=ls_mask.data.data)

        return rain
Beispiel #6
0
    def get_surface_shortwave_radiation_down(self, interval='season'):
        """
        get surface shortwave incoming radiation data for JSBACH

        returns Data object
        """

        if interval != 'season':
            raise ValueError(
                'Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry'
            )

        v = 'var176'

        y1 = '1979-01-01'
        y2 = '2006-12-31'
        rawfilename = self.data_dir + 'data/model/' + self.experiment + '_echam6_BOT_mm_1979-2006_srads.nc'

        if not os.path.exists(rawfilename):
            return None

        #--- read data
        cdo = pyCDO(rawfilename, y1, y2)
        if interval == 'season':
            seasfile = cdo.seasmean()
            del cdo
            print 'seasfile: ', seasfile
            cdo = pyCDO(seasfile, y1, y2)
            filename = cdo.yseasmean()
        else:
            raise ValueError('Invalid interval option %s ' % interval)

        #--- read land-sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)

        #--- read SIS data
        sis = Data(
            filename,
            v,
            read=True,
            label='MPI-ESM SIS ' + self.experiment,
            unit='-',
            lat_name='lat',
            lon_name='lon',
            #shift_lon=shift_lon,
            mask=ls_mask.data.data)

        return sis
Beispiel #7
0
    def get_grass_fraction(self, interval='season'):
        """
        todo implement this for data from a real run !!!
        """

        if interval != 'season':
            raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')

        ls_mask = get_T63_landseamask(self.shift_lon)

        filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/lib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_grass_crop_pasture_2001.nc'
        v = 'var12'
        grass = Data(filename, v, read=True,
                     label='MPI-ESM tree fraction ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
                     #shift_lon=shift_lon,
                     mask=ls_mask.data.data, start_time=pl.num2date(pl.datestr2num('2001-01-01')), stop_time=pl.num2date(pl.datestr2num('2001-12-31')), squeeze=True)

        return grass
Beispiel #8
0
    def get_gpp_data(self, interval='season'):
        """
        get surface GPP data for JSBACH

        todo temporal aggregation of data --> or leave it to the user!
        """
        cdo = Cdo()
        v = 'var167'
        y1 = str(self.start_time)[0:10]
        y2 = str(self.stop_time)[0:10]
        rawfilename = self.data_dir + 'data/model/' + self.experiment + '_' + y1[0:4] + '-' + y2[0:4] + '.nc'
        times_in_file = int(''.join(cdo.ntime(input=rawfilename)))

        if interval == 'season':
            if times_in_file != 4:
                tmp_file = get_temporary_directory() + os.path.basename(rawfilename)
                cdo.yseasmean(options='-f nc -b 32 -r ', input='-selvar,' + v + ' ' + rawfilename, output=tmp_file[:-3] + '_yseasmean.nc')
                rawfilename = tmp_file[:-3] + '_yseasmean.nc'

        if interval == 'monthly':
            if times_in_file != 12:
                tmp_file = get_temporary_directory() + os.path.basename(rawfilename)
                cdo.ymonmean(options='-f nc -b 32 -r ', input='-selvar,' + v + ' ' + rawfilename, output=tmp_file[:-3] + '_ymonmean.nc')
                rawfilename = tmp_file[:-3] + '_ymonmean.nc'

        if not os.path.exists(rawfilename):
            return None

        filename = rawfilename

        #--- read land-sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)

        #--- read SW up data
        gpp = Data4D(filename, v, read=True,
                     label=self.experiment + ' ' + v, unit='gC m-2 a-1', lat_name='lat', lon_name='lon',
                     shift_lon=self.shift_lon,
                     mask=ls_mask.data.data, scale_factor=3600. * 24. * 30. / 0.083
                     )

        return gpp.sum_data4D()
Beispiel #9
0
    def get_albedo_data(self, interval='season'):
        """
        get albedo data for JSBACH

        returns Data object
        """

        if interval != 'season':
            raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')

        v = 'var176'

        filename = self.data_dir + 'data/model1/' + self.experiment + '_echam6_BOT_mm_1979-2006_albedo_yseasmean.nc'
        ls_mask = get_T63_landseamask(self.shift_lon)

        albedo = Data(filename, v, read=True,
                      label='MPI-ESM albedo ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
                      shift_lon=self.shift_lon,
                      mask=ls_mask.data.data)

        return albedo
Beispiel #10
0
    def get_surface_shortwave_radiation_down(self, interval='season'):
        """
        get surface shortwave incoming radiation data for JSBACH

        returns Data object
        """

        if interval != 'season':
            raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')

        v = 'var176'

        y1 = '1979-01-01'
        y2 = '2006-12-31'
        rawfilename = self.data_dir + 'data/model/' + self.experiment + '_echam6_BOT_mm_1979-2006_srads.nc'

        if not os.path.exists(rawfilename):
            return None

        #--- read data
        cdo = pyCDO(rawfilename, y1, y2)
        if interval == 'season':
            seasfile = cdo.seasmean()
            del cdo
            print 'seasfile: ', seasfile
            cdo = pyCDO(seasfile, y1, y2)
            filename = cdo.yseasmean()
        else:
            raise ValueError('Invalid interval option %s ' % interval)

        #--- read land-sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)

        #--- read SIS data
        sis = Data(filename, v, read=True,
                   label='MPI-ESM SIS ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
                   #shift_lon=shift_lon,
                   mask=ls_mask.data.data)

        return sis
Beispiel #11
0
    def get_rainfall_data(self, interval='season'):
        """
        get rainfall data for JSBACH
        returns Data object
        """

        if interval == 'season':
            pass
        else:
            raise ValueError('Invalid value for interval: %s' % interval)

        #/// PREPROCESSING: seasonal means ///
        s_start_time = str(self.start_time)[0:10]
        s_stop_time = str(self.stop_time)[0:10]

        filename1 = self.data_dir + self.experiment + '_echam6_BOT_mm_1980_sel.nc'
        tmp = pyCDO(filename1, s_start_time, s_stop_time).seldate()
        tmp1 = pyCDO(tmp, s_start_time, s_stop_time).seasmean()
        filename = pyCDO(tmp1, s_start_time, s_stop_time).yseasmean()

        #/// READ DATA ///

        #1) land / sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)

        #2) precipitation data
        try:
            v = 'var4'
            rain = Data(filename, v, read=True, scale_factor=86400.,
                        label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
                        shift_lon=self.shift_lon,
                        mask=ls_mask.data.data)
        except:
            v = 'var142'
            rain = Data(filename, v, read=True, scale_factor=86400.,
                        label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
                        shift_lon=self.shift_lon,
                        mask=ls_mask.data.data)

        return rain
Beispiel #12
0
    def _do_preprocessing(self, rawfile, varname, s_start_time, s_stop_time, interval='monthly', force_calc=False, valid_mask='global', target_grid='t63grid'):
        """
        perform preprocessing
        * selection of variable
        * temporal subsetting
        """
        cdo = Cdo()

        if not os.path.exists(rawfile):
            print('File not existing! %s ' % rawfile)
            return None, None

        # calculate monthly means
        file_monthly = get_temporary_directory() + os.sep + os.path.basename(rawfile[:-3]) + '_' + varname + '_' + s_start_time + '_' + s_stop_time + '_mm.nc'
        if (force_calc) or (not os.path.exists(file_monthly)):
            cdo.monmean(options='-f nc', output=file_monthly, input='-seldate,' + s_start_time + ',' + s_stop_time + ' ' + '-selvar,' + varname + ' ' + rawfile, force=force_calc)
        else:
            pass
        if not os.path.exists(file_monthly):
            raise ValueError('Monthly preprocessing did not work! %s ' % file_monthly)

        # calculate monthly or seasonal climatology
        if interval == 'monthly':
            mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
            mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
            cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc)  # number of samples
        elif interval == 'season':
            mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
            mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
            cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc)  # number of samples
        else:
            raise ValueError('Unknown temporal interval. Can not perform preprocessing!')

        if not os.path.exists(mdata_clim_file):
            return None

        # read data
        if interval == 'monthly':
            thetime_cylce = 12
        elif interval == 'season':
            thetime_cylce = 4
        else:
            print interval
            raise ValueError('Unsupported interval!')

        mdata = Data(mdata_clim_file, varname, read=True, label=self.name, shift_lon=False, time_cycle=thetime_cylce, lat_name='lat', lon_name='lon')
        mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self.name + ' std', unit='-', shift_lon=False, time_cycle=thetime_cylce, lat_name='lat', lon_name='lon')
        mdata.std = mdata_std.data.copy()
        del mdata_std
        mdata_N = Data(mdata_N_file, varname, read=True, label=self.name + ' std', shift_lon=False, lat_name='lat', lon_name='lon')
        mdata.n = mdata_N.data.copy()
        del mdata_N

        # ensure that climatology always starts with January, therefore set date and then sort
        mdata.adjust_time(year=1700, day=15)  # set arbitrary time for climatology
        mdata.timsort()

        #4) read monthly data
        mdata_all = Data(file_monthly, varname, read=True, label=self.name, shift_lon=False, time_cycle=12, lat_name='lat', lon_name='lon')
        mdata_all.adjust_time(day=15)

        #mask_antarctica masks everything below 60 degree S.
        #here we only mask Antarctica, if only LAND points shall be used
        if valid_mask == 'land':
            mask_antarctica = True
        elif valid_mask == 'ocean':
            mask_antarctica = False
        else:
            mask_antarctica = False

        if target_grid == 't63grid':
            mdata._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
            mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
        else:
            tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid, mask_antarctica=mask_antarctica)
            mdata._apply_mask(tmpmsk)
            mdata_all._apply_mask(tmpmsk)
            del tmpmsk

        mdata_mean = mdata_all.fldmean()

        # return data as a tuple list
        retval = (mdata_all.time, mdata_mean, mdata_all)

        del mdata_all
        return mdata, retval
Beispiel #13
0
    def get_albedo_data(self, interval='monthly', **kwargs):
        """
        calculate albedo as ratio of upward and downwelling fluxes
        first the monthly mean fluxes are used to calculate the albedo,
        """

        # read land-sea mask
        ls_mask = get_T63_landseamask(self.shift_lon)  # TODO make this more flexible

        if self.start_time is None:
            raise ValueError('Start time needs to be specified')
        if self.stop_time is None:
            raise ValueError('Stop time needs to be specified')

        Fd = self.get_surface_shortwave_radiation_down(**kwargs)
        Fu = self.get_surface_shortwave_radiation_up(**kwargs)

        if Fu is None:
            print 'File not existing for UPWARD flux!: ', self.name
            return None
        else:
            Fu_i = Fu[0]
            if Fu_i is None:
                return None

        if Fd is None:
            print 'File not existing for DOWNWARD flux!: ', self.name
            return None
        else:
            Fd_i = Fd[0]
            if Fd_i is None:
                return None
        lab = Fu_i.label

        # albedo for chosen interval as caluclated as ratio of means of fluxes in that interval (e.g. season, months)
        Fu_i.div(Fd_i, copy=False)
        del Fd_i  # Fu contains now the albedo
        Fu_i._apply_mask(ls_mask.data)

        #albedo for monthly data (needed for global mean plots )
        Fu_m = Fu[1][2]
        del Fu
        Fd_m = Fd[1][2]
        del Fd

        Fu_m.div(Fd_m, copy=False)
        del Fd_m
        Fu_m._apply_mask(ls_mask.data)
        Fu_m._set_valid_range(0., 1.)
        Fu_m.label = lab + ' albedo'
        Fu_i.label = lab + ' albedo'
        Fu_m.unit = '-'
        Fu_i.unit = '-'

        # center dates of months
        Fu_m.adjust_time(day=15)
        Fu_i.adjust_time(day=15)

        # return data as a tuple list
        retval = (Fu_m.time, Fu_m.fldmean(), Fu_m)

        return Fu_i, retval
Beispiel #14
0
    def get_model_data_generic(self, interval="season", **kwargs):
        """
        unique parameters are:
            filename - file basename
            variable - name of the variable as the short_name in the netcdf file

            kwargs is a dictionary with keys for each model. Then a dictionary with properties follows

        """

        if not self.type in kwargs.keys():
            print ""
            print "WARNING: it is not possible to get data using generic function, as method missing: ", self.type, kwargs.keys()
            assert False

        locdict = kwargs[self.type]

        # read settings and details from the keyword arguments
        # no defaults; everything should be explicitely specified in either the config file or the dictionaries
        varname = locdict.pop("variable", None)
        # ~ print self.type
        # ~ print locdict.keys()
        assert varname is not None, "ERROR: provide varname!"

        units = locdict.pop("unit", None)
        assert units is not None, "ERROR: provide unit!"

        lat_name = locdict.pop("lat_name", "lat")
        lon_name = locdict.pop("lon_name", "lon")
        model_suffix = locdict.pop("model_suffix", None)
        model_prefix = locdict.pop("model_prefix", None)
        file_format = locdict.pop("file_format")
        scf = locdict.pop("scale_factor")
        valid_mask = locdict.pop("valid_mask")
        custom_path = locdict.pop("custom_path", None)
        thelevel = locdict.pop("level", None)

        target_grid = self._actplot_options["targetgrid"]
        interpolation = self._actplot_options["interpolation"]

        if custom_path is None:
            filename1 = self.get_raw_filename(varname, **kwargs)  # routine needs to be implemented by each subclass
        else:
            filename1 = custom_path + self.get_raw_filename(varname, **kwargs)

        if filename1 is None:
            print_log(WARNING, "No valid model input data")
            return None

        force_calc = False

        if self.start_time is None:
            raise ValueError("Start time needs to be specified")
        if self.stop_time is None:
            raise ValueError("Stop time needs to be specified")

        # /// PREPROCESSING ///
        cdo = Cdo()
        s_start_time = str(self.start_time)[0:10]
        s_stop_time = str(self.stop_time)[0:10]

        # 1) select timeperiod and generate monthly mean file
        if target_grid == "t63grid":
            gridtok = "T63"
        else:
            gridtok = "SPECIAL_GRID"

        file_monthly = (
            filename1[:-3] + "_" + s_start_time + "_" + s_stop_time + "_" + gridtok + "_monmean.nc"
        )  # target filename
        file_monthly = get_temporary_directory() + os.path.basename(file_monthly)

        sys.stdout.write("\n *** Model file monthly: %s\n" % file_monthly)

        if not os.path.exists(filename1):
            print "WARNING: File not existing: " + filename1
            return None

        cdo.monmean(
            options="-f nc",
            output=file_monthly,
            input="-"
            + interpolation
            + ","
            + target_grid
            + " -seldate,"
            + s_start_time
            + ","
            + s_stop_time
            + " "
            + filename1,
            force=force_calc,
        )

        sys.stdout.write("\n *** Reading model data... \n")
        sys.stdout.write("     Interval: " + interval + "\n")

        # 2) calculate monthly or seasonal climatology
        if interval == "monthly":
            mdata_clim_file = file_monthly[:-3] + "_ymonmean.nc"
            mdata_sum_file = file_monthly[:-3] + "_ymonsum.nc"
            mdata_N_file = file_monthly[:-3] + "_ymonN.nc"
            mdata_clim_std_file = file_monthly[:-3] + "_ymonstd.nc"
            cdo.ymonmean(options="-f nc -b 32", output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.ymonsum(options="-f nc -b 32", output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.ymonstd(options="-f nc -b 32", output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(
                options="-f nc", output=mdata_N_file, input=mdata_sum_file + " " + mdata_clim_file, force=force_calc
            )  # number of samples
        elif interval == "season":
            mdata_clim_file = file_monthly[:-3] + "_yseasmean.nc"
            mdata_sum_file = file_monthly[:-3] + "_yseassum.nc"
            mdata_N_file = file_monthly[:-3] + "_yseasN.nc"
            mdata_clim_std_file = file_monthly[:-3] + "_yseasstd.nc"
            cdo.yseasmean(options="-f nc -b 32", output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.yseassum(options="-f nc -b 32", output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.yseasstd(options="-f nc -b 32", output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(
                options="-f nc -b 32",
                output=mdata_N_file,
                input=mdata_sum_file + " " + mdata_clim_file,
                force=force_calc,
            )  # number of samples
        else:
            raise ValueError("Unknown temporal interval. Can not perform preprocessing!")

        if not os.path.exists(mdata_clim_file):
            return None

        # 3) read data
        if interval == "monthly":
            thetime_cylce = 12
        elif interval == "season":
            thetime_cylce = 4
        else:
            print interval
            raise ValueError("Unsupported interval!")
        mdata = Data(
            mdata_clim_file,
            varname,
            read=True,
            label=self._unique_name,
            unit=units,
            lat_name=lat_name,
            lon_name=lon_name,
            shift_lon=False,
            scale_factor=scf,
            level=thelevel,
            time_cycle=thetime_cylce,
        )
        mdata_std = Data(
            mdata_clim_std_file,
            varname,
            read=True,
            label=self._unique_name + " std",
            unit="-",
            lat_name=lat_name,
            lon_name=lon_name,
            shift_lon=False,
            level=thelevel,
            time_cycle=thetime_cylce,
        )
        mdata.std = mdata_std.data.copy()
        del mdata_std
        mdata_N = Data(
            mdata_N_file,
            varname,
            read=True,
            label=self._unique_name + " std",
            unit="-",
            lat_name=lat_name,
            lon_name=lon_name,
            shift_lon=False,
            scale_factor=scf,
            level=thelevel,
        )
        mdata.n = mdata_N.data.copy()
        del mdata_N

        # ensure that climatology always starts with January, therefore set date and then sort
        mdata.adjust_time(year=1700, day=15)  # set arbitrary time for climatology
        mdata.timsort()

        # 4) read monthly data
        mdata_all = Data(
            file_monthly,
            varname,
            read=True,
            label=self._unique_name,
            unit=units,
            lat_name=lat_name,
            lon_name=lon_name,
            shift_lon=False,
            time_cycle=12,
            scale_factor=scf,
            level=thelevel,
        )
        mdata_all.adjust_time(day=15)

        # mask_antarctica masks everything below 60 degrees S.
        # here we only mask Antarctica, if only LAND points shall be used
        if valid_mask == "land":
            mask_antarctica = True
        elif valid_mask == "ocean":
            mask_antarctica = False
        else:
            mask_antarctica = False

        if target_grid == "t63grid":
            mdata._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
            mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
        else:
            tmpmsk = get_generic_landseamask(
                False, area=valid_mask, target_grid=target_grid, mask_antarctica=mask_antarctica
            )
            mdata._apply_mask(tmpmsk)
            mdata_all._apply_mask(tmpmsk)
            del tmpmsk

        mdata_mean = mdata_all.fldmean()

        mdata._raw_filename = filename1
        mdata._monthly_filename = file_monthly
        mdata._clim_filename = mdata_clim_file
        mdata._varname = varname

        # return data as a tuple list
        retval = (mdata_all.time, mdata_mean, mdata_all)

        del mdata_all
        return mdata, retval
Beispiel #15
0
    def _import_regional_file(self,
                              region_file,
                              varname,
                              targetgrid=None,
                              logfile=None):
        """
        check if the regional file can be either imported or if
        regions are provided as vector data. In the latter case
        the regions are rasterized and results are stored in a netCDF
        file

        Parameters
        ----------
        region_file : str
            name of file defining the region. This is either a netCDF
            file which contains the mask as different integer values
            or it is a *.reg file which contains the regions as
            vector data.
        varname : str
            name of variable in netCDF file
        targetgrid : str
            name of targetgrid; either 't63grid' or the name of a file
            with a valid geometry

        Returns
        -------
            region_filename, region_file_varname
        """

        if not os.path.exists(region_file):
            raise ValueError('ERROR: region file is not existing: ' +
                             region_file)

        ext = os.path.splitext(region_file)[1]
        if ext == '.nc':
            # netCDF file was given. Try to read variable
            if varname is None:
                raise ValueError('ERROR: no variable name given!')
            try:
                tmp = Data(region_file, varname, read=True)
            except:
                raise ValueError(
                    'ERROR: the regional masking file can not be read!')
            del tmp

            # everything is fine
            return region_file, varname

        elif ext == '.reg':
            # regions were given as vector files. Read it and
            # rasterize the data and store results in a temporary
            # file
            import tempfile

            if targetgrid is None:
                raise ValueError(
                    'ERROR: targetgrid needs to be specified for vectorization of regions!'
                )

            if targetgrid == 't63grid':
                ls_mask = get_T63_landseamask(True,
                                              area='global',
                                              mask_antarctica=False)
            else:
                ls_mask = get_generic_landseamask(True,
                                                  area='global',
                                                  target_grid=targetgrid,
                                                  mask_antarctica=False)

            # temporary netCDF filename
            region_file1 = tempfile.mktemp(prefix='region_mask_', suffix='.nc')
            R = RegionParser(region_file)  # read region vector data
            M = Raster(ls_mask.lon, ls_mask.lat)
            polylist = []
            if logfile is not None:
                logf = open(logfile, 'w')
            else:
                logf = None

            id = 1
            for k in R.regions.keys():
                reg = R.regions[k]
                polylist.append(pycmbsPolygon(id, zip(reg.lon, reg.lat)))
                if logf is not None:  # store mapping table
                    logf.write(k + '\t' + str(id) + '\n')
                id += 1

            M.rasterize_polygons(polylist)
            if logf is not None:
                logf.close()

            # generate dummy output file
            O = Data(None, None)
            O.data = M.mask
            O.lat = ls_mask.lat
            O.lon = ls_mask.lon
            varname = 'regions'
            O.save(region_file1, varname=varname, format='nc', delete=True)
            print('Regionfile was store in file: %s' % region_file1)

            # check again that file is readable
            try:
                tmp = Data(region_file1, varname, read=True)
            except:
                print region_file1, varname
                raise ValueError(
                    'ERROR: the generated region file is not readable!')
            del tmp

            return region_file1, varname

        else:
            raise ValueError('ERROR: unsupported file type')
Beispiel #16
0
    def get_model_data_generic(self, interval='season', **kwargs):
        """
        unique parameters are:
            filename - file basename
            variable - name of the variable as the short_name in the netcdf file

            kwargs is a dictionary with keys for each model. Then a dictionary with properties follows

        """

        if not self.type in kwargs.keys():
            print ''
            print 'WARNING: it is not possible to get data using generic function, as method missing: ', self.type, kwargs.keys(
            )
            assert False

        locdict = kwargs[self.type]

        # read settings and details from the keyword arguments
        # no defaults; everything should be explicitely specified in either the config file or the dictionaries
        varname = locdict.pop('variable', None)
        #~ print self.type
        #~ print locdict.keys()
        assert varname is not None, 'ERROR: provide varname!'

        units = locdict.pop('unit', None)
        assert units is not None, 'ERROR: provide unit!'

        lat_name = locdict.pop('lat_name', 'lat')
        lon_name = locdict.pop('lon_name', 'lon')
        model_suffix = locdict.pop('model_suffix', None)
        model_prefix = locdict.pop('model_prefix', None)
        file_format = locdict.pop('file_format')
        scf = locdict.pop('scale_factor')
        valid_mask = locdict.pop('valid_mask')
        custom_path = locdict.pop('custom_path', None)
        thelevel = locdict.pop('level', None)

        target_grid = self._actplot_options['targetgrid']
        interpolation = self._actplot_options['interpolation']

        if custom_path is None:
            filename1 = self.get_raw_filename(
                varname,
                **kwargs)  # routine needs to be implemented by each subclass
        else:
            filename1 = custom_path + self.get_raw_filename(varname, **kwargs)

        if filename1 is None:
            print_log(WARNING, 'No valid model input data')
            return None

        force_calc = False

        if self.start_time is None:
            raise ValueError('Start time needs to be specified')
        if self.stop_time is None:
            raise ValueError('Stop time needs to be specified')

        #/// PREPROCESSING ///
        cdo = Cdo()
        s_start_time = str(self.start_time)[0:10]
        s_stop_time = str(self.stop_time)[0:10]

        #1) select timeperiod and generate monthly mean file
        if target_grid == 't63grid':
            gridtok = 'T63'
        else:
            gridtok = 'SPECIAL_GRID'

        file_monthly = filename1[:
                                 -3] + '_' + s_start_time + '_' + s_stop_time + '_' + gridtok + '_monmean.nc'  # target filename
        file_monthly = get_temporary_directory() + os.path.basename(
            file_monthly)

        sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)

        if not os.path.exists(filename1):
            print 'WARNING: File not existing: ' + filename1
            return None

        cdo.monmean(options='-f nc',
                    output=file_monthly,
                    input='-' + interpolation + ',' + target_grid +
                    ' -seldate,' + s_start_time + ',' + s_stop_time + ' ' +
                    filename1,
                    force=force_calc)

        sys.stdout.write('\n *** Reading model data... \n')
        sys.stdout.write('     Interval: ' + interval + '\n')

        #2) calculate monthly or seasonal climatology
        if interval == 'monthly':
            mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
            mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
            cdo.ymonmean(options='-f nc -b 32',
                         output=mdata_clim_file,
                         input=file_monthly,
                         force=force_calc)
            cdo.ymonsum(options='-f nc -b 32',
                        output=mdata_sum_file,
                        input=file_monthly,
                        force=force_calc)
            cdo.ymonstd(options='-f nc -b 32',
                        output=mdata_clim_std_file,
                        input=file_monthly,
                        force=force_calc)
            cdo.div(options='-f nc',
                    output=mdata_N_file,
                    input=mdata_sum_file + ' ' + mdata_clim_file,
                    force=force_calc)  # number of samples
        elif interval == 'season':
            mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
            mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
            cdo.yseasmean(options='-f nc -b 32',
                          output=mdata_clim_file,
                          input=file_monthly,
                          force=force_calc)
            cdo.yseassum(options='-f nc -b 32',
                         output=mdata_sum_file,
                         input=file_monthly,
                         force=force_calc)
            cdo.yseasstd(options='-f nc -b 32',
                         output=mdata_clim_std_file,
                         input=file_monthly,
                         force=force_calc)
            cdo.div(options='-f nc -b 32',
                    output=mdata_N_file,
                    input=mdata_sum_file + ' ' + mdata_clim_file,
                    force=force_calc)  # number of samples
        else:
            raise ValueError(
                'Unknown temporal interval. Can not perform preprocessing!')

        if not os.path.exists(mdata_clim_file):
            return None

        #3) read data
        if interval == 'monthly':
            thetime_cylce = 12
        elif interval == 'season':
            thetime_cylce = 4
        else:
            print interval
            raise ValueError('Unsupported interval!')
        mdata = Data(mdata_clim_file,
                     varname,
                     read=True,
                     label=self._unique_name,
                     unit=units,
                     lat_name=lat_name,
                     lon_name=lon_name,
                     shift_lon=False,
                     scale_factor=scf,
                     level=thelevel,
                     time_cycle=thetime_cylce)
        mdata_std = Data(mdata_clim_std_file,
                         varname,
                         read=True,
                         label=self._unique_name + ' std',
                         unit='-',
                         lat_name=lat_name,
                         lon_name=lon_name,
                         shift_lon=False,
                         level=thelevel,
                         time_cycle=thetime_cylce)
        mdata.std = mdata_std.data.copy()
        del mdata_std
        mdata_N = Data(mdata_N_file,
                       varname,
                       read=True,
                       label=self._unique_name + ' std',
                       unit='-',
                       lat_name=lat_name,
                       lon_name=lon_name,
                       shift_lon=False,
                       scale_factor=scf,
                       level=thelevel)
        mdata.n = mdata_N.data.copy()
        del mdata_N

        # ensure that climatology always starts with January, therefore set date and then sort
        mdata.adjust_time(year=1700,
                          day=15)  # set arbitrary time for climatology
        mdata.timsort()

        #4) read monthly data
        mdata_all = Data(file_monthly,
                         varname,
                         read=True,
                         label=self._unique_name,
                         unit=units,
                         lat_name=lat_name,
                         lon_name=lon_name,
                         shift_lon=False,
                         time_cycle=12,
                         scale_factor=scf,
                         level=thelevel)
        mdata_all.adjust_time(day=15)

        #mask_antarctica masks everything below 60 degrees S.
        #here we only mask Antarctica, if only LAND points shall be used
        if valid_mask == 'land':
            mask_antarctica = True
        elif valid_mask == 'ocean':
            mask_antarctica = False
        else:
            mask_antarctica = False

        if target_grid == 't63grid':
            mdata._apply_mask(
                get_T63_landseamask(False,
                                    area=valid_mask,
                                    mask_antarctica=mask_antarctica))
            mdata_all._apply_mask(
                get_T63_landseamask(False,
                                    area=valid_mask,
                                    mask_antarctica=mask_antarctica))
        else:
            tmpmsk = get_generic_landseamask(False,
                                             area=valid_mask,
                                             target_grid=target_grid,
                                             mask_antarctica=mask_antarctica)
            mdata._apply_mask(tmpmsk)
            mdata_all._apply_mask(tmpmsk)
            del tmpmsk

        mdata_mean = mdata_all.fldmean()

        mdata._raw_filename = filename1
        mdata._monthly_filename = file_monthly
        mdata._clim_filename = mdata_clim_file
        mdata._varname = varname

        # return data as a tuple list
        retval = (mdata_all.time, mdata_mean, mdata_all)

        del mdata_all
        return mdata, retval
Beispiel #17
0
    def xxxxxxxxxxxxxxxxxxxget_surface_shortwave_radiation_down(self, interval='season', force_calc=False, **kwargs):
        """
        return data object of
        a) seasonal means for SIS
        b) global mean timeseries for SIS at original temporal resolution
        """

        the_variable = 'rsds'

        locdict = kwargs[self.type]
        valid_mask = locdict.pop('valid_mask')

        if self.start_time is None:
            raise ValueError('Start time needs to be specified')
        if self.stop_time is None:
            raise ValueError('Stop time needs to be specified')

        s_start_time = str(self.start_time)[0:10]
        s_stop_time = str(self.stop_time)[0:10]

        if self.type == 'CMIP5':
            filename1 = self.data_dir + 'rsds' + os.sep + self.experiment + '/ready/' + self.model + '/rsds_Amon_' + self.model + '_' + self.experiment + '_ensmean.nc'
        elif self.type == 'CMIP5RAW':  # raw CMIP5 data based on ensembles
            filename1 = self._get_ensemble_filename(the_variable)
        elif self.type == 'CMIP5RAWSINGLE':
            filename1 = self.get_single_ensemble_file(the_variable, mip='Amon', realm='atmos', temporal_resolution='mon')
        else:
            raise ValueError('Unknown model type! not supported here!')

        if not os.path.exists(filename1):
            print ('WARNING file not existing: %s' % filename1)
            return None

        #/// PREPROCESSING ///
        cdo = Cdo()

        #1) select timeperiod and generatget_she monthly mean file
        file_monthly = filename1[:-3] + '_' + s_start_time + '_' + s_stop_time + '_T63_monmean.nc'
        file_monthly = get_temporary_directory() + os.path.basename(file_monthly)

        print file_monthly

        sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)
        cdo.monmean(options='-f nc', output=file_monthly, input='-remapcon,t63grid -seldate,' + s_start_time + ',' + s_stop_time + ' ' + filename1, force=force_calc)

        sys.stdout.write('\n *** Reading model data... \n')
        sys.stdout.write('     Interval: ' + interval + '\n')

        #2) calculate monthly or seasonal climatology
        if interval == 'monthly':
            sis_clim_file = file_monthly[:-3] + '_ymonmean.nc'
            sis_sum_file = file_monthly[:-3] + '_ymonsum.nc'
            sis_N_file = file_monthly[:-3] + '_ymonN.nc'
            sis_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
            cdo.ymonmean(options='-f nc -b 32', output=sis_clim_file, input=file_monthly, force=force_calc)
            cdo.ymonsum(options='-f nc -b 32', output=sis_sum_file, input=file_monthly, force=force_calc)
            cdo.ymonstd(options='-f nc -b 32', output=sis_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc', output=sis_N_file, input=sis_sum_file + ' ' + sis_clim_file, force=force_calc)  # number of samples
        elif interval == 'season':
            sis_clim_file = file_monthly[:-3] + '_yseasmean.nc'
            sis_sum_file = file_monthly[:-3] + '_yseassum.nc'
            sis_N_file = file_monthly[:-3] + '_yseasN.nc'
            sis_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
            cdo.yseasmean(options='-f nc -b 32', output=sis_clim_file, input=file_monthly, force=force_calc)
            cdo.yseassum(options='-f nc -b 32', output=sis_sum_file, input=file_monthly, force=force_calc)
            cdo.yseasstd(options='-f nc -b 32', output=sis_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc -b 32', output=sis_N_file, input=sis_sum_file + ' ' + sis_clim_file, force=force_calc)  # number of samples
        else:
            print interval
            raise ValueError('Unknown temporal interval. Can not perform preprocessing!')

        if not os.path.exists(sis_clim_file):
            return None

        #3) read data
        sis = Data(sis_clim_file, 'rsds', read=True, label=self._unique_name, unit='$W m^{-2}$', lat_name='lat', lon_name='lon', shift_lon=False)
        sis_std = Data(sis_clim_std_file, 'rsds', read=True, label=self._unique_name + ' std', unit='-', lat_name='lat', lon_name='lon', shift_lon=False)
        sis.std = sis_std.data.copy()
        del sis_std
        sis_N = Data(sis_N_file, 'rsds', read=True, label=self._unique_name + ' std', unit='-', lat_name='lat', lon_name='lon', shift_lon=False)
        sis.n = sis_N.data.copy()
        del sis_N

        #ensure that climatology always starts with January, therefore set date and then sort
        sis.adjust_time(year=1700, day=15)  # set arbitrary time for climatology
        sis.timsort()

        #4) read monthly data
        sisall = Data(file_monthly, 'rsds', read=True, label=self._unique_name, unit='W m^{-2}', lat_name='lat', lon_name='lon', shift_lon=False)
        if not sisall._is_monthly():
            raise ValueError('Timecycle of 12 expected here!')
        sisall.adjust_time(day=15)

        # land/sea masking ...
        if valid_mask == 'land':
            mask_antarctica = True
        elif valid_mask == 'ocean':
            mask_antarctica = False
        else:
            mask_antarctica = False

        sis._apply_mask(get_T63_landseamask(False, mask_antarctica=mask_antarctica, area=valid_mask))
        sisall._apply_mask(get_T63_landseamask(False, mask_antarctica=mask_antarctica, area=valid_mask))
        sismean = sisall.fldmean()

        # return data as a tuple list
        retval = (sisall.time, sismean, sisall)
        del sisall

        # mask areas without radiation (set to invalid): all data < 1 W/m**2
        sis.data = np.ma.array(sis.data, mask=sis.data < 1.)

        return sis, retval
Beispiel #18
0
    def get_jsbach_data_generic(self, interval='season', **kwargs):
        """
        unique parameters are:
            filename - file basename
            variable - name of the variable as the short_name in the netcdf file

            kwargs is a dictionary with keys for each model. Then a dictionary with properties follows
        """

        if not self.type in kwargs.keys():
            print 'WARNING: it is not possible to get data using generic function, as method missing: ', self.type, kwargs.keys()
            return None

        print self.type
        print kwargs

        locdict = kwargs[self.type]

        # read settings and details from the keyword arguments
        # no defaults; everything should be explicitely specified in either the config file or the dictionaries

        varname = locdict.pop('variable')
        units = locdict.pop('unit', 'Unit not specified')

        lat_name = locdict.pop('lat_name', 'lat')
        lon_name = locdict.pop('lon_name', 'lon')
        #model_suffix = locdict.pop('model_suffix')
        #model_prefix = locdict.pop('model_prefix')
        file_format = locdict.pop('file_format')
        scf = locdict.pop('scale_factor')
        valid_mask = locdict.pop('valid_mask')
        custom_path = locdict.pop('custom_path', None)
        thelevel = locdict.pop('level', None)

        target_grid = self._actplot_options['targetgrid']
        interpolation = self._actplot_options['interpolation']

        if self.type != 'JSBACH_RAW2':
            print self.type
            raise ValueError('Invalid data format here!')

        # define from which stream of JSBACH data needs to be taken for specific variables
        if varname in ['swdown_acc', 'swdown_reflect_acc']:
            filename1 = self.files['jsbach']
        elif varname in ['precip_acc']:
            filename1 = self.files['land']
        elif varname in ['temp2']:
            filename1 = self.files['echam']
        elif varname in ['var14']:  # albedo vis
            filename1 = self.files['albedo_vis']
        elif varname in ['var15']:  # albedo NIR
            filename1 = self.files['albedo_nir']
        else:
            print varname
            raise ValueError('Unknown variable type for JSBACH_RAW2 processing!')

        force_calc = False

        if self.start_time is None:
            raise ValueError('Start time needs to be specified')
        if self.stop_time is None:
            raise ValueError('Stop time needs to be specified')

        #/// PREPROCESSING ///
        cdo = Cdo()
        s_start_time = str(self.start_time)[0:10]
        s_stop_time = str(self.stop_time)[0:10]

        #1) select timeperiod and generate monthly mean file
        if target_grid == 't63grid':
            gridtok = 'T63'
        else:
            gridtok = 'SPECIAL_GRID'

        file_monthly = filename1[:-3] + '_' + s_start_time + '_' + s_stop_time + '_' + gridtok + '_monmean.nc'  # target filename
        file_monthly = get_temporary_directory() + os.path.basename(file_monthly)

        sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)

        if not os.path.exists(filename1):
            print 'WARNING: File not existing: ' + filename1
            return None

        cdo.monmean(options='-f nc', output=file_monthly, input='-' + interpolation + ',' + target_grid + ' -seldate,' + s_start_time + ',' + s_stop_time + ' ' + filename1, force=force_calc)

        sys.stdout.write('\n *** Reading model data... \n')
        sys.stdout.write('     Interval: ' + interval + '\n')

        #2) calculate monthly or seasonal climatology
        if interval == 'monthly':
            mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
            mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
            cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc)  # number of samples
        elif interval == 'season':
            mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
            mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
            cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc)  # number of samples
        else:
            raise ValueError('Unknown temporal interval. Can not perform preprocessing! ')

        if not os.path.exists(mdata_clim_file):
            return None

        #3) read data
        if interval == 'monthly':
            thetime_cylce = 12
        elif interval == 'season':
            thetime_cylce = 4
        else:
            print interval
            raise ValueError('Unsupported interval!')
        mdata = Data(mdata_clim_file, varname, read=True, label=self.model, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel, time_cycle=thetime_cylce)
        mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self.model + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, level=thelevel, time_cycle=thetime_cylce)
        mdata.std = mdata_std.data.copy()
        del mdata_std
        mdata_N = Data(mdata_N_file, varname, read=True, label=self.model + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel)
        mdata.n = mdata_N.data.copy()
        del mdata_N

        #ensure that climatology always starts with J  anuary, therefore set date and then sort
        mdata.adjust_time(year=1700, day=15)  # set arbitrary time for climatology
        mdata.timsort()

        #4) read monthly data
        mdata_all = Data(file_monthly, varname, read=True, label=self.model, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, time_cycle=12, scale_factor=scf, level=thelevel)
        mdata_all.adjust_time(day=15)

        if target_grid == 't63grid':
            mdata._apply_mask(get_T63_landseamask(False, area=valid_mask))
            mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask))
        else:
            tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid)
            mdata._apply_mask(tmpmsk)
            mdata_all._apply_mask(tmpmsk)
            del tmpmsk

        mdata_mean = mdata_all.fldmean()

        # return data as a tuple list
        retval = (mdata_all.time, mdata_mean, mdata_all)

        del mdata_all

        return mdata, retval
Beispiel #19
0
    def get_model_data_generic(self, interval='season', **kwargs):
        """
        unique parameters are:
            filename - file basename
            variable - name of the variable as the short_name in the netcdf file

            kwargs is a dictionary with keys for each model. Then a dictionary with properties follows

        """

        if not self.type in kwargs.keys():
            print 'WARNING: it is not possible to get data using generic function, as method missing: ', self.type, kwargs.keys()
            return None

        locdict = kwargs[self.type]

        # read settings and details from the keyword arguments
        # no defaults; everything should be explicitely specified in either the config file or the dictionaries
        varname = locdict.pop('variable')
        units = locdict.pop('unit', 'Crazy Unit')
        #interval = kwargs.pop('interval') #, 'season') #does not make sense to specifiy a default value as this option is specified by configuration file!

        lat_name = locdict.pop('lat_name', 'lat')
        lon_name = locdict.pop('lon_name', 'lon')
        model_suffix = locdict.pop('model_suffix')
        model_prefix = locdict.pop('model_prefix')
        file_format = locdict.pop('file_format')
        scf = locdict.pop('scale_factor')
        valid_mask = locdict.pop('valid_mask')
        custom_path = locdict.pop('custom_path', None)
        thelevel = locdict.pop('level', None)

        target_grid = self._actplot_options['targetgrid']
        interpolation = self._actplot_options['interpolation']

        if custom_path is None:
            filename1 = ("%s%s/merged/%s_%s_%s_%s_%s.%s" %
                        (self.data_dir, varname, varname, model_prefix, self.model, self.experiment, model_suffix, file_format))
        else:
            if self.type == 'CMIP5':
                filename1 = ("%s/%s_%s_%s_%s_%s.%s" %
                             (custom_path, varname, model_prefix, self.model, self.experiment, model_suffix, file_format))
            elif self.type == 'CMIP5RAW':
                filename1 = ("%s/%s_%s_%s_%s_%s.%s" %
                             (custom_path, varname, model_prefix, self.model, self.experiment, model_suffix, file_format))
            elif self.type == 'CMIP5RAWSINGLE':
                print 'todo needs implementation!'
                assert False
            elif self.type == 'CMIP3':
                filename1 = ("%s/%s_%s_%s_%s.%s" %
                             (custom_path, self.experiment, self.model, varname, model_suffix, file_format))
            else:
                print self.type
                raise ValueError('Can not generate filename: invalid model type! %s' % self.type)

        force_calc = False

        if self.start_time is None:
            raise ValueError('Start time needs to be specified')
        if self.stop_time is None:
            raise ValueError('Stop time needs to be specified')

        #/// PREPROCESSING ///
        cdo = Cdo()
        s_start_time = str(self.start_time)[0:10]
        s_stop_time = str(self.stop_time)[0:10]

        #1) select timeperiod and generate monthly mean file
        if target_grid == 't63grid':
            gridtok = 'T63'
        else:
            gridtok = 'SPECIAL_GRID'

        file_monthly = filename1[:-3] + '_' + s_start_time + '_' + s_stop_time + '_' + gridtok + '_monmean.nc'  # target filename
        file_monthly = get_temporary_directory() + os.path.basename(file_monthly)

        sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)

        if not os.path.exists(filename1):
            print 'WARNING: File not existing: ' + filename1
            return None

        cdo.monmean(options='-f nc', output=file_monthly, input='-' + interpolation + ',' + target_grid + ' -seldate,' + s_start_time + ',' + s_stop_time + ' ' + filename1, force=force_calc)

        sys.stdout.write('\n *** Reading model data... \n')
        sys.stdout.write('     Interval: ' + interval + '\n')

        #2) calculate monthly or seasonal climatology
        if interval == 'monthly':
            mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
            mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
            cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc)  # number of samples
        elif interval == 'season':
            mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
            mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
            mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
            mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
            cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
            cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
            cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
            cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc)  # number of samples
        else:
            raise ValueError('Unknown temporal interval. Can not perform preprocessing!')

        if not os.path.exists(mdata_clim_file):
            return None

        #3) read data
        if interval == 'monthly':
            thetime_cylce = 12
        elif interval == 'season':
            thetime_cylce = 4
        else:
            print interval
            raise ValueError('Unsupported interval!')
        mdata = Data(mdata_clim_file, varname, read=True, label=self._unique_name, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel, time_cycle=thetime_cylce)
        mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self._unique_name + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, level=thelevel, time_cycle=thetime_cylce)
        mdata.std = mdata_std.data.copy()
        del mdata_std
        mdata_N = Data(mdata_N_file, varname, read=True, label=self._unique_name + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel)
        mdata.n = mdata_N.data.copy()
        del mdata_N

        #ensure that climatology always starts with January, therefore set date and then sort
        mdata.adjust_time(year=1700, day=15)  # set arbitrary time for climatology
        mdata.timsort()

        #4) read monthly data
        mdata_all = Data(file_monthly, varname, read=True, label=self._unique_name, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, time_cycle=12, scale_factor=scf, level=thelevel)
        mdata_all.adjust_time(day=15)

        #mask_antarctica masks everything below 60 degrees S.
        #here we only mask Antarctica, if only LAND points shall be used
        if valid_mask == 'land':
            mask_antarctica = True
        elif valid_mask == 'ocean':
            mask_antarctica = False
        else:
            mask_antarctica = False

        if target_grid == 't63grid':
            mdata._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
            mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
        else:
            tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid, mask_antarctica=mask_antarctica)
            mdata._apply_mask(tmpmsk)
            mdata_all._apply_mask(tmpmsk)
            del tmpmsk

        mdata_mean = mdata_all.fldmean()

        # return data as a tuple list
        retval = (mdata_all.time, mdata_mean, mdata_all)

        del mdata_all
        return mdata, retval
Beispiel #20
0
    def _import_regional_file(self, region_file, varname, targetgrid=None, logfile=None):
        """
        check if the regional file can be either imported or if
        regions are provided as vector data. In the latter case
        the regions are rasterized and results are stored in a netCDF
        file

        Parameters
        ----------
        region_file : str
            name of file defining the region. This is either a netCDF
            file which contains the mask as different integer values
            or it is a *.reg file which contains the regions as
            vector data.
        varname : str
            name of variable in netCDF file
        targetgrid : str
            name of targetgrid; either 't63grid' or the name of a file
            with a valid geometry

        Returns
        -------
            region_filename, region_file_varname
        """

        if not os.path.exists(region_file):
            raise ValueError('ERROR: region file is not existing: ' + region_file)

        ext = os.path.splitext(region_file)[1]
        if ext == '.nc':
            # netCDF file was given. Try to read variable
            if varname is None:
                raise ValueError('ERROR: no variable name given!')
            try:
                tmp = Data(region_file, varname, read=True)
            except:
                raise ValueError('ERROR: the regional masking file can not be read!')
            del tmp

            # everything is fine
            return region_file, varname

        elif ext == '.reg':
            # regions were given as vector files. Read it and
            # rasterize the data and store results in a temporary
            # file
            import tempfile

            if targetgrid is None:
                raise ValueError('ERROR: targetgrid needs to be specified for vectorization of regions!')

            if targetgrid == 't63grid':
                ls_mask = get_T63_landseamask(True, area='global', mask_antarctica=False)
            else:
                ls_mask = get_generic_landseamask(True, area='global', target_grid=targetgrid,
                                                  mask_antarctica=False)

            # temporary netCDF filename
            region_file1 = tempfile.mktemp(prefix='region_mask_', suffix='.nc')
            R = RegionParser(region_file)  # read region vector data
            M = Raster(ls_mask.lon, ls_mask.lat)
            polylist = []
            if logfile is not None:
                logf = open(logfile, 'w')
            else:
                logf = None

            id = 1
            for k in R.regions.keys():
                reg = R.regions[k]
                polylist.append(pycmbsPolygon(id, zip(reg.lon, reg.lat)))
                if logf is not None:  # store mapping table
                    logf.write(k + '\t' + str(id) + '\n')
                id += 1

            M.rasterize_polygons(polylist)
            if logf is not None:
                logf.close()

            # generate dummy output file
            O = Data(None, None)
            O.data = M.mask
            O.lat = ls_mask.lat
            O.lon = ls_mask.lon
            varname = 'regions'
            O.save(region_file1, varname=varname, format='nc', delete=True)
            print('Regionfile was store in file: %s' % region_file1)

            # check again that file is readable
            try:
                tmp = Data(region_file1, varname, read=True)
            except:
                print region_file1, varname
                raise ValueError('ERROR: the generated region file is not readable!')
            del tmp

            return region_file1, varname

        else:
            raise ValueError('ERROR: unsupported file type')