Beispiel #1
0
    def __init__(self, file_prefix, variables_requested=[], runname="noname",
                 filetimes=False, verbose=False, gridded_int=True, printfiles=True,
                 **kwargs): 
        '''filename must be of form "3D***_tYYMMDD" to load all files for one day,
         and must include a complete path to the files'''
        super(MODEL, self).__init__()
        
        #check for .nc files
        total_files = glob.glob(file_prefix+'*')
        self.patterns = np.unique([f.split('/')[-1].split('\\')[-1][:13] \
                                    for f in total_files])
        n_ncfiles = len(glob.glob(file_prefix+'.nc'))
        
        #separate out file directory
        slash_location = file_prefix.rfind('\\')
        if slash_location==-1: slash_location = file_prefix.rfind('/')
        file_dir = file_prefix[:slash_location+1]
        
        #if .nc files not there, create then
        if len(self.patterns)!=n_ncfiles:  #then need to create .nc files for each pattern            
            from kamodo.readers.gitm_tocdf import GITMbin_toCDF as toCDF
            test = [toCDF(file_dir+p) for p in self.patterns]  #get/convert files with given prefix
            if sum(test)!=len(self.patterns): 
                self.conversion_test = False
                return   #if file conversion fails, return
        
        #establish time attributes first for file searching, preferring 3D file
        t0 = ti.perf_counter()
        cdf_data = Dataset(file_dir+self.patterns[-1]+'.nc', 'r')
        string_date = cdf_data.filedate       
        self.filedate = datetime.strptime(string_date+' 00:00:00', \
                                          '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) #dt object

        #establish beginning and end times of files found
        files = cdf_data.file.split(',')
        self.datetimes = [filename_to_dts(filename, string_date) for filename \
                          in [files[0], files[-1]]]  #strings in format = YYYY-MM-DD HH:MM:SS 
        self.timerange0={'min':self.datetimes[0], 'max':self.datetimes[-1], 'n':len(files)}
        self.timerange = self.timerange0
        self.filetimes=[dts_to_ts(file_dts) for file_dts in self.datetimes]   #timestamps in UTC     
        if filetimes: 
            return   
        
        #return if only one file found because interpolator code will break
        if len(files)<2:
            print('Not enough files found with given file prefix.')
            return        
        
        #store coordinates of last file for reference, time is the same for all
        self._time = np.array(cdf_data.variables['time'])  #accurate to the sec
        self._lat = np.array(cdf_data.variables['lat'])
        self._lon = np.array(cdf_data.variables['lon'])
        self._height = np.array(cdf_data.variables['height'])
        cdf_data.close()
        
        #store variables
        self.filename = []
        self.runname = runname
        self.missing_value = np.NAN
        self._registered = 0
        self.variables = {}
        self.varfiles = {}  #store which variable came from which file for easier association with coords
        
        #loop through files
        for i in range(len(self.patterns)):
            cdf_data = Dataset(file_dir+self.patterns[i]+'.nc', 'r')
            files = cdf_data.file.split(',')
            self.filename.extend(files)
            
            #store coordinates, time is always the same between files
            setattr(self, '_lat'+str(i), np.array(cdf_data.variables['lat'])) #in deg
            setattr(self, '_lon'+str(i), np.array(cdf_data.variables['lon'])) #in deg
            setattr(self, '_height'+str(i), np.array(cdf_data.variables['height'])) #in km
        
            #check var_list for variables not possible in this file set
            if len(variables_requested)>0:
                gvar_list = [key for key in cdf_data.variables.keys() if key \
                             in variables_requested]
                if len(gvar_list)!=len(variables_requested):
                    err_list = [item for item in variables_requested if item not in \
                                cdf_data.variables.keys()]
                    print(f'Some requested variables are not available in {self.patterns[i]}:', err_list)
                    print('These files have:', cdf_data.variables.keys())
            else:
                gvar_list = [key for key in cdf_data.variables.keys() if key not in \
                             ['time','lat','lon','height']]
            self.varfiles[str(i)] = gvar_list  #store which file these variables came from
            #print('Variables: ', gvar_list)
        
            # Store variable data, units, etc from netCDF file.
            variables = {key:{'units':cdf_data.variables[key].units,
                                        'data':np.array(cdf_data.variables[key]), 
                                        'scale': cdf_data.variables[key].datascale} \
                              for key in gvar_list}
            cdf_data.close()
            for key in variables.keys(): self.variables[key] = variables[key] #save to class object
            #this overwrites the TEC data from the 2D files with the calculated TEC data from the 3D files

        if printfiles: 
            print(f'{len(self.filename)} Files:')
            for file in self.filename: print(file)
            
        #register interpolators for each variable
        varname_list = [key for key in self.variables.keys()]  #store original list b/c gridded interpolators
        for varname in varname_list:
            if len(self.variables[varname]['data'].shape)==3:
                #print('3D', varname, self.variables[varname]['data'].shape)
                self.register_3D_variable(self.variables[varname]['units'], 
                                      self.variables[varname]['data'], varname,
                                      gridded_int)
            elif len(self.variables[varname]['data'].shape)==4:
                #print('4D', varname, self.variables[varname]['data'].shape)
                self.register_4D_variable(self.variables[varname]['units'], 
                                      self.variables[varname]['data'], varname,
                                      gridded_int)
        self = RPlot.initialize_4D_plot(self)  #initialize 4D plotting variables 
        if verbose: print(f'{len(varname_list)} variables kamodofied in {ti.perf_counter()-t0:.5f}s.')
Beispiel #2
0
    def __init__(self,
                 file_prefix,
                 variables_requested=[],
                 runname="noname",
                 filetimes=False,
                 verbose=False,
                 gridded_int=True,
                 printfiles=True,
                 **kwargs):
        '''file_prefix must be of form "3D***_tYYMMDD" to load all files for one day
         and include a complete path to the files'''
        super(MODEL, self).__init__()

        #check if given .nc file exists. If not, convert files with same prefix to netCDF
        if not path.isfile(file_prefix + '.nc'):
            from kamodo.readers.swmfie_tocdf import convertSWMFIE_toCDF
            test = convertSWMFIE_toCDF(file_prefix)
            if not test:
                self.conversion_test = test  #only needed for 1 file/time cases
                return  #if file conversion fails, return 0
        t0 = ti.perf_counter()

        #establish time attributes first for file searching
        file_datestr = file_prefix.split('/')[-1].split('\\')[-1][3:11]
        string_date = file_datestr[:4] + '-' + file_datestr[
            4:6] + '-' + file_datestr[6:8]  #'YYYY-MM-DD'
        self.filedate = datetime.strptime(string_date+' 00:00:00', \
                                          '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc) #dt object

        #establish beginning and end time of file list
        cdf_data = Dataset(file_prefix + '.nc', 'r')
        files = cdf_data.file.split(',')
        self.datetimes = list(
            filename_to_dts(
                [files[0], files[-1]],
                string_date))  #strings in format = YYYY-MM-DD HH:MM:SS
        self.timerange0 = {
            'min': self.datetimes[0],
            'max': self.datetimes[-1],
            'n': len(files)
        }
        self.timerange = self.timerange0
        self.filetimes = [dts_to_ts(file_dts)
                          for file_dts in self.datetimes]  #timestamps in UTC
        if filetimes:
            return

        #return if only one file found because interpolator code will break
        if len(files) < 2:
            print('Not enough files found with given file prefix.')
            return

        #store variables
        self.filename = files
        self.runname = runname
        self.missing_value = np.NAN
        self._registered = 0
        self.variables = dict()
        if printfiles:
            print('Files:')
            for file in self.filename:
                print(file)

        #get list of variables possible in these files using first file
        if len(variables_requested) > 0:
            gvar_list = [key for key in cdf_data.variables.keys() if key \
                         in variables_requested]
            if len(gvar_list) != len(variables_requested):
                err_list = [item for item in variables_requested if item not in \
                            cdf_data.variables.keys()]
                print('Some requested variables are not available:', err_list)
        else:
            gvar_list = [key for key in cdf_data.variables.keys() \
                         if key not in cdf_data.dimensions.keys() and key not in \
                             ['theta_Btilt', 'psi_Btilt']]
            #avoid returning coordinates stored elsewhere (or ignored)
        #print(gvar_list)

        #store coordinate data and Btilt (for documentation)
        self._time = np.array(
            cdf_data.variables['time'])  #hours since midnight
        self._height = np.array(cdf_data.variables['height'])
        self._lat = np.array(cdf_data.variables['lat'])
        self._lon = np.array(cdf_data.variables['lon'])
        self.theta_Btilt = np.array(cdf_data.variables['theta_Btilt'])
        self.psi_Btilt = np.array(cdf_data.variables['psi_Btilt'])

        # Store variable's data, units, and datatypes.
        self.variables = {key:{'units':cdf_data.variables[key].units,
                               'data':np.array(cdf_data.variables[key])}\
                          for key in gvar_list}
        cdf_data.close()
        if verbose: print(f'Took {ti.perf_counter()-t0:.6f}s to read in data')

        #register interpolators for each variable
        varname_list = [key for key in self.variables.keys()
                        ]  #store original list b/c gridded interpolators
        t_reg = ti.perf_counter()
        for varname in varname_list:  #all are 3D variables
            self.register_3D_variable(self.variables[varname]['units'],
                                      self.variables[varname]['data'], varname,
                                      gridded_int)
        if verbose:            print(f'Took {ti.perf_counter()-t_reg:.5f}s to register '+\
                  f'{len(varname_list)} variables.')
        self = RPlot.initialize_4D_plot(self)  #initialize
        if verbose:            print(f'Took a total of {ti.perf_counter()-t0:.5f}s to kamodofy '+\
                  f'{len(gvar_list)} variables.')
Beispiel #3
0
    def __init__(self,
                 filename,
                 variables_requested=[],
                 runname="noname",
                 filetimes=False,
                 verbose=False,
                 gridded_int=True,
                 printfiles=True,
                 **kwargs):  #filename should include the full path

        #### Use a super init so that your class inherits any methods from Kamodo
        super(MODEL, self).__init__()

        #store time information for satellite flythrough layer to choose the right file
        t0 = ti.perf_counter()
        cdf_data = Dataset(filename, 'r')
        time = np.array(cdf_data.variables['time']
                        )  #in minutes since 2000-03-20 00:00:00 UTC
        self.filedate = min_to_date(
            time[0])  #datetime object for file date at midnight UTC
        self.datetimes = [min_to_dts(time_minutes) for time_minutes \
                          in [time[0], time[-1]]]  #strings in format = YYYY-MM-DD HH:MM:SS
        self.timerange0 = {
            'min': self.datetimes[0],
            'max': self.datetimes[-1],
            'n': len(time)
        }
        self.timerange = self.timerange0
        self.filetimes = [dts_to_ts(file_dts)
                          for file_dts in self.datetimes]  #timestamps in UTC
        if filetimes:
            return

        #### Store our inputs as class attributes to the class
        self.filename = filename
        self.runname = runname
        self.missing_value = np.NAN
        self._registered = 0
        self.variables = dict()
        if printfiles:
            print('Files:', self.filename)

        #These lists need to be the standardized variable name to match that above,
        #not the names from the data file.
        self.ilev_list = [
            "rho", "H_ilev", "H_geopot", "N_e", "omega", "V", 'O2P_ELD',
            'N2P_ELD', 'N_Nplus', 'NOP_ELD', 'Sigma_P', 'Sigma_H', 'Q_Joule',
            'psi_ON2', 'N2D_ELD', "psi_O2"
        ]  # index with ilev
        self.lev_list = [
            'T_n', 'u_n', 'v_n', 'psi_O2', 'psi_O', 'psi_N2', 'psi_He',
            'H_lev', 'psi_NO', 'psi_N4S', 'T_e', 'T_i', 'N_O2plus', 'N_Oplus',
            'Q_CO2cool', 'Q_NOcool', "u_iExB", "v_iExB", "w_iExB"
        ]  # index with lev
        self.imlev_list = ['H_imlev']

        #translate from standardized variables to names in file
        #remove variables requested that are not in the file
        if len(variables_requested) > 0:
            gvar_list = [key for key, value in model_varnames.items() \
                             if value[0] in variables_requested and \
                                 key in cdf_data.variables.keys()]  # file variable names

            #check for variables requested but not available
            if len(gvar_list) != len(variables_requested):
                err_list = [value[0] for key, value in model_varnames.items() \
                             if value[0] in variables_requested and \
                                 key not in cdf_data.variables.keys()]
                print('Some requested variables are not available:', err_list)

            #check that the appropriate height variable is added for the variables requested
            check_list = [key for key, value in model_varnames.items()\
                          if value[0] in self.ilev_list and key in gvar_list]
            if 'ZG' not in gvar_list and len(check_list) > 0:
                gvar_list.append(
                    'ZG'
                )  #force addition of H for conversion of ilev to H and back
            check_list = [key for key, value in model_varnames.items()\
                          if value[0] in self.lev_list and key in gvar_list]
            if 'ZGMID' not in gvar_list and len(check_list) > 0:
                gvar_list.append('ZGMID')
            check_list = [key for key, value in model_varnames.items()\
                          if value[0] in self.imlev_list and key in gvar_list]
            if 'ZMAG' not in gvar_list and len(check_list):
                gvar_list.append('ZMAG')
        else:
            gvar_list = [key for key in cdf_data.variables.keys() \
                         if key in model_varnames.keys()]

        #### Store coordinate data as class attributes
        self._time = min_to_hrs(
            time, self.filedate)  #convert to hours since midnight
        self._ilev = np.array(cdf_data.variables['ilev'])
        lat = np.array(
            cdf_data.variables['lat'])  #NOT FULL RANGE IN LATITIUDE!!!
        lat = np.insert(lat, 0, lat[0] - np.diff(lat).min()
                        )  #insert a grid point at beginning (before -87.5)
        self._lat = np.append(lat, lat[-1] +
                              np.diff(lat).min())  #and at the end (after 87.5)
        lon = np.array(
            cdf_data.variables['lon']) + 180.  #NOT WRAPPED IN LONGITUDE!!!!!
        self._lon = np.append(lon, 360.)  #add 360. to end of array
        self._lev = np.array(cdf_data.variables['lev'])
        self._imlev = np.array(cdf_data.variables['imlev']
                               )  #'mlev' isn't used by any of the variables
        self._mlat = np.array(cdf_data.variables['mlat'])
        self._mlon = np.array(
            cdf_data.variables['mlon']) + 180.  #shifting to 0-360 range
        self._height = np.array([0.])  #for compatibility only

        #### Store the requested variables into a dictionary
        ####     as the variables attributes
        #### This will contain units, dtype, and the data
        #print(gvar_list)
        self.variables = {model_varnames[key][0]:{'units':model_varnames[key][-1],
                               'data':np.array(cdf_data.variables[key])}\
                          for key in gvar_list}  #store with key = standardized name
        cdf_data.close()
        if verbose: print(f'Took {ti.perf_counter()-t0:.6f}s to read in data')

        #### register interpolators for each requested variable
        varname_list = [key for key in self.variables.keys()
                        ]  #store original list b/c gridded interpolators
        t_reg = ti.perf_counter()
        for varname in varname_list:
            if len(self.variables[varname]['data'].shape) == 3:
                self.register_3D_variable(self.variables[varname]['units'],
                                          self.variables[varname]['data'],
                                          varname, gridded_int)
            elif len(self.variables[varname]['data'].shape) == 4:
                self.register_4D_variable(self.variables[varname]['units'],
                                          self.variables[varname]['data'],
                                          varname, gridded_int)
        if verbose:            print(f'Took {ti.perf_counter()-t_reg:.5f}s to register '+\
                  f'{len(varname_list)} variables.')
        self = RPlot.initialize_4D_plot(self)  #initialize plots
        if verbose:            print(f'Took a total of {ti.perf_counter()-t0:.5f}s to kamodofy '+\
                  f'{len(varname_list)} variables.')
Beispiel #4
0
    def __init__(self,
                 filename,
                 variables_requested=None,
                 filetimes=False,
                 runname="noname",
                 printfiles=True,
                 gridded_int=True,
                 **kwargs):
        #date = None, date is in filename, so exclude? (self.date ....)
        #time=None, runpath = "./", not used

        # input file name can be one of the 4 files for each day of model outputs
        # YYYYMMDD-plot-[density|height|neutral|plasma].nc files
        # only the density, height and neutral files have data and are read
        super(MODEL, self).__init__()  #what does this line do??

        filename = CTIPe_filesearch(filename)  #get/convert filename
        filetype_list = [
            'plot-density-wrapped', 'plot-height-wrapped',
            'plot-neutral-wrapped', 'plot-plasma-wrapped'
        ]
        file_beg, file_end = [
            filename.split(filetype) for filetype in filetype_list
            if len(filename.split(filetype)) > 1
        ][0]
        filename_density, filename_height, filename_neutral = [
            file_beg + filetype + file_end for filetype in filetype_list[:-1]
        ]
        self.filename = [filename_density, filename_height, filename_neutral]
        if printfiles:
            print('Files:\n' + filename_density + '\n' + filename_height +
                  '\n' + filename_neutral)

        #establish time attributes first
        self._ctipe_density = Dataset(filename_density)
        self.filedate = datetime.strptime(
            file_beg[-11:-1] + ' 00:00:00',
            '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc)
        t = np.array(self._ctipe_density.variables['time'])
        self.datetimes = [
            datetime.utcfromtimestamp(t[0]).isoformat(sep=' '),
            datetime.utcfromtimestamp(t[-1]).isoformat(sep=' ')
        ]  #strings
        self.filetimes = [t[0],
                          t[-1]]  #timestamps in hours for matching in wrapper
        self.timerange0 = {
            'min': self.datetimes[0],
            'max': self.datetimes[1],
            'n': len(t)
        }  #strings in format = YYYY-MM-DD HH:MM:SS
        self.timerange = self.timerange0
        if filetimes:
            return

        #pull in remaining datasets from files into kamodo object
        self.ilev_list = [
            'rho', 'T', 'H_ilev', 'Vn_lat', 'Vn_lon', 'Vn_H', 'T_n', 'Rmt',
            'N_n', 'Q_Solar', 'Q_Joule', 'Q_radiation', 'N_O', 'N_O2', 'N_N2',
            'N_NO', 'N_NOplus', 'N_N2plus', 'N_O2plus', 'N_Nplus', 'Sigma_P',
            'Sigma_H', 'Vi_lon', 'Vi_lat'
        ]
        self.modelname = 'CTIPe'
        self._ctipe_height = Dataset(filename_height)  #in meters
        self._ctipe_neutral = Dataset(filename_neutral)

        #pull in coordinate variables into kamodo object (density file)
        self._time = np.array(ts_to_hrs(
            t, self.filedate))  #convert timestamps to hrs since midnight
        self._ilev = np.array(
            self._ctipe_density.variables['plev'])  #_ilev to match others
        self._lat = np.array(self._ctipe_density.variables['lat'])
        self._lon = np.array(self._ctipe_density.variables['lon'])

        #pull in coordinate variables into kamodo object (height file, in km)
        self._height = np.array(
            self._ctipe_height.variables['ht'])  #_height to match others
        self._lat_height = np.array(self._ctipe_height.variables['lat'])
        self._lon_height = np.array(self._ctipe_height.variables['lon'])

        ##pull in coordinate variables into kamodo object (neutral file)
        self._ilev_neutral = np.array(self._ctipe_neutral.variables['plev'])
        self._lat_neutral = np.array(self._ctipe_neutral.variables['lat'])
        self._lon_neutral = np.array(self._ctipe_neutral.variables['lon'])
        self._elat = np.array(self._ctipe_neutral.variables['elat'])
        self._elon = np.array(self._ctipe_neutral.variables['elon'])

        #initialize variables
        self._registered = 0
        super(MODEL, self).__init__()  #what does this line do???
        self.filename = filename
        self.runname = runname
        self.missing_value = np.NAN
        self.variables = dict()

        #if variables_requested not given, collect all values from dict above as a list
        if variables_requested is None:
            variables_requested = [
                value[0] for key, value in model_varnames.items()
            ]

        # add height variable needed to height (not IP-level) interpolatioms
        check_list = [value[0] for key, value in model_varnames.items()\
                          if value[0] in self.ilev_list and value[0] in variables_requested]
        if 'H_ilev' not in variables_requested and len(check_list) > 0:
            variables_requested.append('H_ilev')
        #print(f'Requested {len(variables_requested)} variables: {variables_requested} \n')

        #collect list of ctipe variable name equivalents
        var_names = [
            key for key, value in model_varnames.items()
            if value[0] in variables_requested
        ]
        extra_variables = [
            var for var in variables_requested
            if var not in [value[0] for key, value in model_varnames.items()]
        ]
        if len(extra_variables
               ) > 0:  #print statement if some variables not in files
            print('Some requested variables are not available:',
                  extra_variables)

        #cycle through all variables in one loop
        bad_varnames = {
            'density': ['ZMAG', 'Rmt', 'H'],
            'height': ['ZMAG'],
            'neutral': ['ZMAG', 'electron_density']
        }  #initialize bad_var dictionary per file_type
        for varname in var_names:
            #determine source file type for variable
            file_type = ''
            if varname in self._ctipe_density.variables.keys():
                file_type = 'density'
            elif varname in self._ctipe_height.variables.keys():
                file_type = 'height'
            elif varname in self._ctipe_neutral.variables.keys():
                file_type = 'neutral'
            else:
                raise AttributeError(
                    f"{varname} not found in the files' metadata.")

            #set units, initialize variables
            variable = np.array(
                getattr(self, '_ctipe_' +
                        file_type).variables[varname])  #set variable
            units = model_varnames[varname][-1]
            if (len(variable.shape)
                    not in [3, 4]) or (varname in bad_varnames[file_type]):
                continue  #if not 3D or 4D or not allowed, skip to next variable

            #register allowed 3D and 4D variables
            kamodo_varname = model_varnames[varname][
                0]  #retreive standardized name
            self.variables[kamodo_varname] = dict(
                units=units, data=variable)  #register in object
            if len(variable.shape
                   ) == 4:  #define and register interpolators for each
                self.register_4D_variable(units, variable, kamodo_varname,
                                          file_type, gridded_int)
            elif len(variable.shape) == 3:
                self.register_3D_variable(units, variable, kamodo_varname,
                                          file_type, gridded_int)

        #close netCDF4 files, initialize plotting variables
        self._ctipe_density.close()
        self._ctipe_height.close()
        self._ctipe_neutral.close()
        self = RPlot.initialize_4D_plot(
            self)  #initialize 4D plotting variables
Beispiel #5
0
    def __init__(
            self,
            filename,
            variables_requested=None,
            runname="noname",
            printfiles=True,
            filetimes=False,
            gridded_int=True,
            **kwargs):  #                 time_index=None, time_seconds=None,
        # Prepare model for function registration for the input argument
        super(MODEL, self).__init__(**kwargs)

        #collect filenames
        if '.2D.' in filename:  #require that input filename be for 3D file
            filename2d = filename
            f = filename.replace('.3D.', '.2D.')  #can't replace in place
            filename = f
        else:
            filename2d = filename.replace('.3D.', '.2D.')
        self.filename = filename
        self.filename2d = filename2d
        if printfiles: print(filename, filename2d)

        #establish time attributes first
        self._iri3D = Dataset(filename, 'r')
        self._time = np.array(self._iri3D.variables['time']
                              ) / 60.  #convert to hours since midnight of file
        self.filedate = datetime(int(filename[-10:-6]),1,1,0,0,0).replace(tzinfo=timezone.utc)+\
            timedelta(days=int(filename[-6:-3])-1)
        #strings with timezone info chopped off (UTC anyway)
        self.datetimes = [
            (self.filedate +
             timedelta(hours=self._time[0])).isoformat(sep=' ')[:19],
            (self.filedate +
             timedelta(hours=self._time[-1])).isoformat(sep=' ')[:19]
        ]  #strings
        self.filetimes=[datetime.timestamp(datetime.strptime(dt, '%Y-%m-%d %H:%M:%S').replace(\
            tzinfo=timezone.utc)) for dt in self.datetimes]   #timestamp in seconds, for value matching in wrapper?
        self.timerange0 = {
            'min': self.datetimes[0],
            'max': self.datetimes[1],
            'n': len(self._time)
        }  #strings in format = YYYY-MM-DD HH:MM:SS
        self.timerange = self.timerange0
        if filetimes:
            return

        #collect data and make dimensional grid from 3D file
        self._iri2D = Dataset(filename2d, 'r')
        self._lon = np.array(self._iri3D.variables['lon'])
        self._lat = np.array(self._iri3D.variables['lat'])
        self._height = np.array(self._iri3D.variables['ht'])

        #store a few items in iri object
        self.missing_value = np.NAN
        self._registered = 0
        self.variables = {}
        self.runname = runname
        self.modelname = 'MODEL'

        #if variables_requested not given, collect all values from dict above as a list
        if variables_requested is None:
            variables_requested = [
                value[0] for key, value in model_varnames.items()
            ]

        #collect list of iri variable name equivalents
        var_names = [
            key for key, value in model_varnames.items()
            if value[0] in variables_requested
        ]
        extra_variables = [
            var for var in variables_requested
            if var not in [value[0] for key, value in model_varnames.items()]
        ]
        if len(extra_variables
               ) > 0:  #pull out variables not allowed and error if not empty
            print('Some requested variables are not available:',
                  extra_variables)

        #register each variable desired
        for varname in var_names:
            #determine source file type for variable
            file_type = ''
            if varname in self._iri3D.variables.keys(): file_type = '3D'
            elif varname in self._iri2D.variables.keys(): file_type = '2D'
            else:
                raise AttributeError(
                    f"{varname} not found in the files' metadata.")

            #set variables, units
            variable = np.array(
                getattr(self,
                        '_iri' + file_type).variables[varname])  #set data
            if (len(variable.shape) not in [3, 4]):
                continue  #skip anything not 3D or 4D
            units = model_varnames[varname][
                -1]  #units stored as last item in list per varname
            kamodo_varname = model_varnames[varname][0]

            #register allowed 3D and 4D variables
            self.variables[kamodo_varname] = dict(
                units=units, data=variable)  #register in object
            if len(variable.shape
                   ) == 4:  #define and register interpolators for each
                self.register_4D_variable(
                    units, variable, kamodo_varname,
                    gridded_int)  #len(var.shape) instead of file_type
            elif len(variable.shape) == 3:
                self.register_3D_variable(units, variable, kamodo_varname,
                                          gridded_int)

        #close netCDF4 files, initialize plotting variables
        self._iri3D.close()
        self._iri2D.close()
        self = RPlot.initialize_4D_plot(
            self)  #initialize 4D plotting variables