Ejemplo n.º 1
0
def main():
    file_list = parser.parse_args().input_file
    cwd = os.getcwd()
    path2data = os.getcwd()

    if parser.parse_args().fv3 and parser.parse_args().combine:
        prRed('Use --fv3 and --combine sequentially to avoid ambiguity ')
        exit()
    #=======Convert to FV3================
    if parser.parse_args().fv3:
        for irequest in parser.parse_args().fv3:
            if irequest not in ['fixed', 'average', 'daily', 'diurn']:
                prRed(
                    irequest +
                    """ is not available, select 'fixed', 'average', 'daily', or 'diurn'"""
                )
    #argument definitions:

        do_multi = False
        do_1year = False  #Used with LegacyGCM_1year.nc'

        #Get files to process
        histlist = []
        for filei in file_list:
            if not ('/' in filei):
                histlist.append(path2data + '/' + filei)
            else:
                histlist.append(filei)
        fnum = len(histlist)
        if fnum >= 0: do_multi = True  #TODO why not 1?

        try:
            hist1year = path2data + '/LegacyGCM_1year.nc'
            file1year = Dataset(hist1year, 'r', format='NETCDF4_CLASSIC')
            do_1year = True
        except:
            hist1year = None
            do_1year = False

        lsmin = None
        lsmax = None

        if do_multi:
            for f in histlist:
                histname = os.path.basename(f)
                ls_l = histname[-12:-9]
                ls_r = histname[-6:-3]
                if lsmin is None:
                    lsmin = ls_l
                else:
                    lsmin = str(min(int(lsmin), int(ls_l))).zfill(3)
                if lsmax is None:
                    lsmax = ls_r
                else:
                    lsmax = str(max(int(lsmax), int(ls_r))).zfill(3)
                a = make_FV3_files(f, parser.parse_args().fv3, True, cwd)

    elif parser.parse_args().combine:
        #TODO Use ncks if it is available (not tested yet)
        if cat_method == 'ncks':
            subprocess.check_call('ncks --version',
                                  shell=True,
                                  stdout=open(os.devnull, "w"),
                                  stderr=open(os.devnull, "w"))
            #now cat together the files
            newfavg = "Ls" + lsmin + "_Ls" + lsmax + ".atmos_average.nc"
            newfdai = "Ls" + lsmin + "_Ls" + lsmax + ".atmos_daily.nc"
            newfdiu = "Ls" + lsmin + "_Ls" + lsmax + ".atmos_diurn.nc"
            tempdir = os.path.join(cwd, 'temp')
            os.makedirs(tempdir, exist_ok=True)
            os.chdir(tempdir)

            catavg = "ncrcat ../*.atmos_average.nc " + "00000.atmos_average.nc"
            catdai = "ncrcat ../*.atmos_daily.nc " + "00000.atmos_daily.nc"
            catdiu = "ncrcat ../*.atmos_diurn.nc " + "00000.atmos_diurn.nc"
            p = subprocess.Popen(catavg, universal_newlines=True, shell=True)
            p.wait()
            p = subprocess.Popen(catdai, universal_newlines=True, shell=True)
            p.wait()
            p = subprocess.Popen(catdiu, universal_newlines=True, shell=True)
            p.wait()
            os.chdir(cwd)
            p = subprocess.run('rm -f Ls*.nc',
                               universal_newlines=True,
                               shell=True)
            p = subprocess.run('mv temp/*.nc .',
                               universal_newlines=True,
                               shell=True)
            p = subprocess.run('rm -rf temp/',
                               universal_newlines=True,
                               shell=True)
            if do_1year:
                a = make_FV3_files(hist1year, cwd)
        #=================================
        elif cat_method == 'internal':
            #Get files to process
            histlist = []
            for filei in file_list:
                #Add path unless full path is provided
                if not ('/' in filei):
                    histlist.append(path2data + '/' + filei)
                else:
                    histlist.append(filei)

            fnum = len(histlist)
            #Easy case: merging *****.fixed.nc means delete all but the first file:
            if file_list[0][5:] == '.fixed.nc' and fnum >= 2:
                rm_cmd = 'rm -f '
                for i in range(1, fnum):
                    rm_cmd += ' ' + histlist[i]
                p = subprocess.run(rm_cmd, universal_newlines=True, shell=True)
                prCyan('Cleaned all but ' + file_list[0])
                exit()
            #=========
            fnum = len(histlist)
            prCyan('Merging %i files, starting with %s ...' %
                   (fnum, file_list[0]))

            #this is a temporaty file ***_tmp.nc
            file_tmp = histlist[0][:-3] + '_tmp' + '.nc'
            Log = Ncdf(file_tmp, 'Merged file')
            Log.merge_files_from_list(histlist)
            Log.close()

            #=====Delete files that have been combined====

            #Rename merged file  LegacyGCM_LsINI_LsEND.nc or first files of the list (e.g 00010.atmos_average.nc)
            if file_list[0][:12] == 'LegacyGCM_Ls':
                ls_ini = file_list[0][12:15]
                ls_end = file_list[-1][18:21]
                fileout = 'LegacyGCM_Ls%s_Ls%s.nc' % (ls_ini, ls_end)

            else:
                fileout = histlist[0]

            #---Assemble 'remove' and 'move' commands to execute-----
            rm_cmd = 'rm -f '
            for ifile in histlist:
                rm_cmd += ' ' + ifile
            cmd_txt = 'mv ' + file_tmp + ' ' + fileout
            p = subprocess.run(rm_cmd, universal_newlines=True, shell=True)
            p = subprocess.run(cmd_txt, universal_newlines=True, shell=True)
            prCyan(fileout + ' was merged')


#=========== Tshift implemation by Victoria!! ===========================

    elif parser.parse_args().tshift:
        #Get files to process
        histlist = []
        for filei in file_list:
            #Add path unless full path is provided
            if not ('/' in filei):
                fullnameIN = path2data + '/' + filei
            else:
                fullnameIN = filei

            fullnameOUT = fullnameIN[:-3] + '_T' + '.nc'

            fdiurn = Dataset(fullnameIN, 'r', format='NETCDF4_CLASSIC')
            fnew = Ncdf(fullnameOUT
                        )  # define a Ncdf object from the Ncdf wrapper module
            #Copy some dimensions from the old file to the new file
            fnew.copy_all_dims_from_Ncfile(fdiurn)

            #find time of day variable name
            tod_name = find_tod_in_diurn(fdiurn)

            # find vertical dimension variable name
            if filei[:-3].endswith('_pstd'):
                zaxis = 'pstd'
            elif filei[:3].endswith('_zagl'):
                zaxis = 'zagl'
            elif filei[:3].endswith('_zstd'):
                zaxis = 'zstd'
            else:
                zaxis = 'pfull'

            # Copy some variables from the old file to the new file
            fnew.copy_Ncaxis_with_content(fdiurn.variables['lon'])
            fnew.copy_Ncaxis_with_content(fdiurn.variables['lat'])

            #Only create a vertical axis if the original file contains 3D fields
            if zaxis in fdiurn.dimensions.keys():
                fnew.copy_Ncaxis_with_content(fdiurn.variables[zaxis])

            fnew.copy_Ncaxis_with_content(fdiurn.variables['time'])
            fnew.copy_Ncaxis_with_content(fdiurn.variables[tod_name])
            #Only copy areo if existing in the original file:
            if 'areo' in fdiurn.variables.keys():
                fnew.copy_Ncvar(fdiurn.variables['areo'])

            # read 4D field and do time shift
            tod_in = np.array(fdiurn.variables[tod_name])
            longitude = np.array(fdiurn.variables['lon'])
            var_list = fdiurn.variables.keys(
            )  # get all variables from old file

            for ivar in var_list:
                varIN = fdiurn.variables[ivar][:]
                vkeys = fdiurn.variables[ivar].dimensions
                if (len(vkeys) == 4):
                    print(ivar)
                    ilat = vkeys.index('lat')
                    ilon = vkeys.index('lon')
                    itime = vkeys.index('time')
                    itod = vkeys.index(tod_name)
                    newvar = np.transpose(varIN, (ilon, ilat, itime, itod))
                    newvarOUT = tshift(newvar, lon=longitude, timex=tod_in)
                    varOUT = np.transpose(newvarOUT, (2, 3, 1, 0))

                    fnew.log_variable(ivar, varOUT,
                                      ['time', tod_name, 'lat', 'lon'],
                                      fdiurn.variables[ivar].long_name,
                                      fdiurn.variables[ivar].units)
                if (len(vkeys) == 5):
                    print(ivar)
                    ilat = vkeys.index('lat')
                    ilon = vkeys.index('lon')
                    iz = vkeys.index(zaxis)
                    itime = vkeys.index('time')
                    itod = vkeys.index(tod_name)
                    newvar = np.transpose(varIN, (ilon, ilat, iz, itime, itod))
                    newvarOUT = tshift(newvar, lon=longitude, timex=tod_in)
                    varOUT = np.transpose(newvarOUT, (3, 4, 2, 1, 0))
                    fnew.log_variable(ivar, varOUT,
                                      ['time', tod_name, zaxis, 'lat', 'lon'],
                                      fdiurn.variables[ivar].long_name,
                                      fdiurn.variables[ivar].units)
            fnew.close()
            fdiurn.close()

    else:
        prRed(
            """Error: no action requested: use 'MarsFiles *nc --fv3 --combine, or --tshift'"""
        )
Ejemplo n.º 2
0
def main():
    start_time = time.time()
    debug = parser.parse_args().debug
    #load all the .nc files
    file_list = parser.parse_args().input_file
    interp_type = parser.parse_args().type  #e.g.  'pstd'
    custom_level = parser.parse_args().level  #e.g.  'p44'

    #The fixed file is needed if pk, bk are not available in the requested file, or
    # to load the topography is zstd output is requested
    name_fixed = filepath + '/' + file_list[0][0:5] + '.fixed.nc'

    # PRELIMINARY DEFINITIONS
    #===========================pstd============================================
    if interp_type == 'pstd':
        longname_txt = 'standard pressure'
        units_txt = 'Pa'
        need_to_reverse = False
        interp_technic = 'log'
        if custom_level:
            content_txt = section_content_amesgcm_profile(
                'Pressure definitions for pstd')
            #print(content_txt)
            exec(content_txt)  #load all variables in that section
            lev_in = eval('np.array(' + custom_level +
                          ')')  #copy requested variable
        else:
            #Default levels, this is size 36
            lev_in = np.array([
                1.0e+03, 9.5e+02, 9.0e+02, 8.5e+02, 8.0e+02, 7.5e+02, 7.0e+02,
                6.5e+02, 6.0e+02, 5.5e+02, 5.0e+02, 4.5e+02, 4.0e+02, 3.5e+02,
                3.0e+02, 2.5e+02, 2.0e+02, 1.5e+02, 1.0e+02, 7.0e+01, 5.0e+01,
                3.0e+01, 2.0e+01, 1.0e+01, 7.0e+00, 5.0e+00, 3.0e+00, 2.0e+00,
                1.0e+00, 5.0e-01, 3.0e-01, 2.0e-01, 1.0e-01, 5.0e-02, 3.0e-02,
                1.0e-02
            ])
    #===========================zstd============================================
    elif interp_type == 'zstd':
        longname_txt = 'standard altitude'
        units_txt = 'm'
        need_to_reverse = True
        interp_technic = 'lin'
        if custom_level:
            content_txt = section_content_amesgcm_profile(
                'Altitude definitions for zstd')
            exec(content_txt)  #load all variables in that section
            lev_in = eval('np.array(' + custom_level +
                          ')')  #copy requested variable
        else:
            #Default levels, this is size 45
            lev_in = np.array([
                -7000, -6000, -5000, -4500, -4000, -3500, -3000, -2500, -2000,
                -1500, -1000, -500, 0, 500, 1000, 1500, 2000, 2500, 3000, 3500,
                4000, 4500, 5000, 6000, 7000, 8000, 9000, 10000, 12000, 14000,
                16000, 18000, 20000, 25000, 30000, 35000, 40000, 45000, 50000,
                55000, 60000, 70000, 80000, 90000, 100000
            ])
        try:
            f_fixed = Dataset(name_fixed, 'r')
            zsurf = f_fixed.variables['zsurf'][:]
            f_fixed.close()
        except FileNotFoundError:
            prRed(
                '***Error*** Topography is needed for zstd interpolation, however'
            )
            prRed('file %s not found' % (name_fixed))
            exit()
    #===========================zagl============================================
    elif interp_type == 'zagl':
        longname_txt = 'altitude above ground level'
        units_txt = 'm'
        need_to_reverse = True
        interp_technic = 'lin'
        if custom_level:
            content_txt = section_content_amesgcm_profile(
                'Altitude definitions for zagl')
            #print(content_txt)
            exec(content_txt)  #load all variables in that section
            lev_in = eval('np.array(' + custom_level +
                          ')')  #copy requested variable
        else:
            #Default levels, this is size 45
            lev_in = np.array([
                -7000, -6000, -5000, -4500, -4000, -3500, -3000, -2500, -2000,
                -1500, -1000, -500, 0, 500, 1000, 1500, 2000, 2500, 3000, 3500,
                4000, 4500, 5000, 6000, 7000, 8000, 9000, 10000, 12000, 14000,
                16000, 18000, 20000, 25000, 30000, 35000, 40000, 45000, 50000,
                55000, 60000, 70000, 80000, 90000, 100000
            ])
    else:
        prRed(
            "Interpolation type '%s' is not supported, use  'pstd','zstd' or 'zagl'"
            % (interp_type))
        exit()

    #For all the files
    for ifile in file_list:
        #First check if file is present on the disk (Lou only)
        check_file_tape(ifile)
        newname = filepath + '/' + ifile[:-3] + '_' + interp_type + '.nc'

        #=================================================================
        #=======================Interpolate action========================
        #=================================================================

        fNcdf = Dataset(ifile, 'r', format='NETCDF4_CLASSIC')
        # Load pk and bk and ps for 3D pressure field calculation.
        # We will read the pk and bk for each file in case the vertical resolution is changed.

        try:
            #First try to read pk and bk in the file
            pk = np.array(fNcdf.variables['pk'])
            bk = np.array(fNcdf.variables['bk'])
        except:
            #If pk and bk are not available in the file, try the matching XXXXX.fixed.nc
            name_fixed = filepath + '/' + ifile[0:5] + '.fixed.nc'
            f_fixed = Dataset(name_fixed, 'r', format='NETCDF4_CLASSIC')
            pk = np.array(f_fixed.variables['pk'])
            bk = np.array(f_fixed.variables['bk'])
            f_fixed.close()

        ps = np.array(fNcdf.variables['ps'])

        if len(ps.shape) == 3:
            do_diurn = False
            tod_name = 'not_used'
            permut = [
                1, 0, 2, 3
            ]  # Put vertical axis first for 4D variable, e.g (time,lev,lat,lon) >>> (lev,time,lat,lon)
            #                              ( 0    1   2   3 ) >>> ( 1   0    2   3 )
        elif len(ps.shape) == 4:
            do_diurn = True
            #find time of day variable name
            tod_name = find_tod_in_diurn(fNcdf)
            permut = [
                2, 1, 0, 3, 4
            ]  #Same for diun files, e.g (time,time_of_day_XX,lev,lat,lon) >>> (lev,time_of_day_XX,time,lat,lon)
            #                         (  0        1         2   3   4)  >>> ( 2       1          0    3   4 )
        #== Compute levels in the file, these are permutted arrays

        # Suppress divided by zero error ==
        with np.errstate(divide='ignore', invalid='ignore'):
            if interp_type == 'pstd':
                L_3D_P = fms_press_calc(
                    ps, pk, bk,
                    lev_type='full')  #permuted by default, e.g lev is first

            elif interp_type == 'zagl':
                temp = fNcdf.variables['temp'][:]
                L_3D_P = fms_Z_calc(ps,
                                    pk,
                                    bk,
                                    temp.transpose(permut),
                                    topo=0.,
                                    lev_type='full')

            elif interp_type == 'zstd':
                temp = fNcdf.variables['temp'][:]
                #Expend the zsurf array to the time dimension
                zflat = np.repeat(zsurf[np.newaxis, :], ps.shape[0], axis=0)
                if do_diurn:
                    zflat = np.repeat(zflat[:, np.newaxis, :, :],
                                      ps.shape[1],
                                      axis=1)

                L_3D_P = fms_Z_calc(ps,
                                    pk,
                                    bk,
                                    temp.transpose(permut),
                                    topo=zflat,
                                    lev_type='full')

        fnew = Ncdf(newname, 'Pressure interpolation using MarsInterp.py')
        #===========      Replicate existing DIMENSIONS but pfull  =================
        #get all variables in file:
        var_list = fNcdf.variables.keys()

        fnew.copy_all_dims_from_Ncfile(fNcdf, exclude_dim=['pfull'])
        fnew.add_dim_with_content(interp_type, lev_in, longname_txt,
                                  units_txt)  #Add new vertical dimension

        fnew.copy_Ncaxis_with_content(fNcdf.variables['lon'])
        fnew.copy_Ncaxis_with_content(fNcdf.variables['lat'])
        fnew.copy_Ncaxis_with_content(fNcdf.variables['time'])

        if do_diurn: fnew.copy_Ncaxis_with_content(fNcdf.variables[tod_name])

        #We will re-use the indices for each files, this speeds-up the calculation
        compute_indices = True
        for ivar in var_list:
            if (fNcdf.variables[ivar].dimensions
                    == ('time', 'pfull', 'lat', 'lon')
                    or fNcdf.variables[ivar].dimensions
                    == ('time', tod_name, 'pfull', 'lat', 'lon')):
                if compute_indices:
                    prCyan("Computing indices ...")
                    index = find_n(L_3D_P,
                                   lev_in,
                                   reverse_input=need_to_reverse)
                    compute_indices = False

                prCyan("Interpolating: %s ..." % (ivar))
                varIN = fNcdf.variables[ivar][:]
                #==This with loop suppresses divided by zero errors==
                with np.errstate(divide='ignore', invalid='ignore'):
                    varOUT = vinterp(varIN.transpose(permut),
                                     L_3D_P,
                                     lev_in,
                                     type=interp_technic,
                                     reverse_input=need_to_reverse,
                                     masktop=True,
                                     index=index).transpose(permut)
                if not do_diurn:
                    fnew.log_variable(ivar, varOUT,
                                      ('time', interp_type, 'lat', 'lon'),
                                      fNcdf.variables[ivar].long_name,
                                      fNcdf.variables[ivar].units)
                else:
                    fnew.log_variable(
                        ivar, varOUT,
                        ('time', tod_name, interp_type, 'lat', 'lon'),
                        fNcdf.variables[ivar].long_name,
                        fNcdf.variables[ivar].units)
            else:

                if ivar not in [
                        'time', 'pfull', 'lat', 'lon', 'phalf', 'pk', 'bk',
                        'pstd', 'zstd', 'zagl', tod_name
                ]:
                    #print("\r Copying over: %s..."%(ivar), end='')
                    prCyan("Copying over: %s..." % (ivar))
                    fnew.copy_Ncvar(fNcdf.variables[ivar])

        print('\r ', end='')
        fNcdf.close()
        fnew.close()
        print("Completed in %.3f sec" % (time.time() - start_time))
Ejemplo n.º 3
0
def main():
    #load all the .nc files
    file_list = parser.parse_args().input_file
    add_list = parser.parse_args().add
    zdiff_list = parser.parse_args().zdiff
    col_list = parser.parse_args().col
    remove_list = parser.parse_args().remove
    debug = parser.parse_args().debug

    #Check if an operation is requested, otherwise print file content.
    if not (add_list or zdiff_list or remove_list or col_list):
        print_fileContent(file_list[0])
        prYellow(
            ''' ***Notice***  No operation requested, use '-add var',  '-zdiff var', '-col var', '-rm var' '''
        )
        exit()  #Exit cleanly

    #For all the files
    for ifile in file_list:
        #First check if file is present on the disk (Lou only)
        check_file_tape(ifile)

        #=================================================================
        #====================Remove action================================
        #=================================================================

        if remove_list:
            cmd_txt = 'ncks --version'
            try:
                #If ncks is available, use it:--
                subprocess.check_call(cmd_txt,
                                      shell=True,
                                      stdout=open(os.devnull, "w"),
                                      stderr=open(os.devnull, "w"))
                for ivar in remove_list:
                    print('Creating new file %s without %s:' % (ifile, ivar))
                    cmd_txt = 'ncks -C -O -x -v %s %s %s' % (ivar, ifile,
                                                             ifile)
                    try:
                        subprocess.check_call(cmd_txt,
                                              shell=True,
                                              stdout=open(os.devnull, "w"),
                                              stderr=open(os.devnull, "w"))
                    except Exception as exception:
                        print(exception.__class__.__name__ + ": " +
                              exception.message)
            #ncks is not available, we use internal method.
            except subprocess.CalledProcessError:
                f_IN = Dataset(ifile, 'r', format='NETCDF4_CLASSIC')
                ifile_tmp = ifile[:-3] + '_tmp' + '.nc'
                Log = Ncdf(ifile_tmp, 'Edited in postprocessing')
                Log.copy_all_dims_from_Ncfile(f_IN)
                Log.copy_all_vars_from_Ncfile(f_IN, remove_list)
                f_IN.close()
                Log.close()
                cmd_txt = 'mv ' + ifile_tmp + ' ' + ifile
                p = subprocess.run(cmd_txt,
                                   universal_newlines=True,
                                   shell=True)
                prCyan(ifile + ' was updated')

        #=================================================================
        #=======================Add action================================
        #=================================================================

        #If the list is not empty, load ak and bk for pressure calculation, those are always needed.
        if add_list:
            name_fixed = ifile[0:5] + '.fixed.nc'
            f_fixed = Dataset(name_fixed, 'r', format='NETCDF4_CLASSIC')
            variableNames = f_fixed.variables.keys()
            ak = np.array(f_fixed.variables['pk'])
            bk = np.array(f_fixed.variables['bk'])
            f_fixed.close()
        #----
        #----Check if the variable is currently supported---
        for ivar in add_list:
            if ivar not in VAR.keys():
                prRed("Variable '%s' is not supported" % (ivar))
            else:
                print('Processing: %s...' % (ivar))
                try:
                    fileNC = Dataset(ifile, 'a', format='NETCDF4_CLASSIC')
                    #---temp and ps are always needed---
                    dim_out = fileNC.variables[
                        'temp'].dimensions  #get dimension
                    temp = fileNC.variables['temp'][:]
                    shape_out = temp.shape
                    ps = fileNC.variables['ps'][:]
                    p_3D = compute_p_3D(ps, ak, bk, shape_out)
                    #----
                    if ivar == 'pfull3D': OUT = p_3D
                    if ivar == 'rho':
                        OUT = compute_rho(p_3D, temp)
                    if ivar == 'theta':
                        OUT = compute_theta(p_3D, ps, temp)
                    if ivar == 'w':
                        omega = fileNC.variables['omega'][:]
                        rho = compute_rho(p_3D, temp)
                        OUT = compute_w(rho, omega)

                    if ivar == 'zfull':
                        OUT = compute_zfull(ps, ak, bk,
                                            temp)  #TODO not with _pstd

                    if ivar == 'wspeed' or ivar == 'wdir':
                        ucomp = fileNC.variables['ucomp'][:]
                        vcomp = fileNC.variables['vcomp'][:]
                        theta, mag = cart_to_azimut_TR(ucomp,
                                                       vcomp,
                                                       mode='from')
                        if ivar == 'wdir': OUT = theta
                        if ivar == 'wspeed': OUT = mag

                    if ivar == 'N':
                        theta = compute_theta(p_3D, ps, temp)
                        zfull = compute_zfull(ps, ak, bk,
                                              temp)  #TODO not with _pstd
                        OUT = compute_N(theta, zfull)
                    if ivar == 'Ri':
                        theta = compute_theta(p_3D, ps, temp)
                        zfull = compute_zfull(ps, ak, bk,
                                              temp)  #TODO not with _pstd
                        N = compute_N(theta, zfull)

                        ucomp = fileNC.variables['ucomp'][:]
                        vcomp = fileNC.variables['vcomp'][:]
                        du_dz = dvar_dh(ucomp.transpose([1, 0, 2, 3]),
                                        zfull.transpose([1, 0, 2,
                                                         3])).transpose(
                                                             [1, 0, 2, 3])
                        dv_dz = dvar_dh(vcomp.transpose([1, 0, 2, 3]),
                                        zfull.transpose([1, 0, 2,
                                                         3])).transpose(
                                                             [1, 0, 2, 3])
                        OUT = N**2 / (du_dz**2 + dv_dz**2)

                    if ivar == 'Tco2': OUT = compute_Tco2(p_3D, temp)
                    if ivar == 'scorer_wl':
                        ucomp = fileNC.variables['ucomp'][:]
                        theta = compute_theta(p_3D, ps, temp)
                        zfull = compute_zfull(ps, ak, bk, temp)
                        N = compute_N(theta, zfull)
                        OUT = compute_scorer(N, ucomp, zfull)

                    #filter nan
                    OUT[np.isnan(OUT)] = fill_value
                    #Log the variable
                    var_Ncdf = fileNC.createVariable(ivar, 'f4', dim_out)
                    var_Ncdf.long_name = VAR[ivar][0]
                    var_Ncdf.units = VAR[ivar][1]
                    var_Ncdf[:] = OUT
                    fileNC.close()

                    print('%s: \033[92mDone\033[00m' % (ivar))
                except Exception as exception:
                    if debug: raise
                    if str(exception) == 'NetCDF: String match to name in use':
                        prYellow("""***Error*** Variable already exists""")
                        prYellow(
                            """Delete existing variables %s with 'MarsVars.py %s -rm %s'"""
                            % (ivar, ifile, ivar))

        #=================================================================
        #=============Vertical Differentiation action=====================
        #=================================================================

        #ak and bk are needed to derive the distance between layer pfull
        if zdiff_list:
            name_fixed = ifile[0:5] + '.fixed.nc'
            f_fixed = Dataset(name_fixed, 'r', format='NETCDF4_CLASSIC')
            variableNames = f_fixed.variables.keys()
            ak = np.array(f_fixed.variables['pk'])
            bk = np.array(f_fixed.variables['bk'])
            f_fixed.close()

        for idiff in zdiff_list:
            fileNC = Dataset(ifile, 'a', format='NETCDF4_CLASSIC')

            if idiff not in fileNC.variables.keys():
                prRed("zdiff error: variable '%s' is not present in %s" %
                      (idiff, ifile))
                fileNC.close()
            else:
                print('Differentiating: %s...' % (idiff))

                try:
                    var = fileNC.variables[idiff][:, :, :, :]
                    newUnits = fileNC.variables[
                        idiff].units[:-2] + '/m]'  #remove the last ']' to update units, e.g turn '[kg]' to '[kg/m]'
                    newLong_name = 'vertical gradient of ' + fileNC.variables[
                        idiff].long_name

                    #---temp and ps are always needed---
                    dim_out = fileNC.variables[
                        'temp'].dimensions  #get dimension
                    temp = fileNC.variables['temp'][:]
                    ps = fileNC.variables['ps'][:]
                    zfull = fms_Z_calc(ps,
                                       ak,
                                       bk,
                                       temp.transpose([1, 0, 2, 3]),
                                       topo=0.,
                                       lev_type='full')  #z is first axis

                    #differentiate the variable zith respect to z:
                    darr_dz = dvar_dh(var.transpose([1, 0, 2, 3]),
                                      zfull).transpose([1, 0, 2, 3])

                    #Log the variable
                    var_Ncdf = fileNC.createVariable('d_dz_' + idiff, 'f4',
                                                     dim_out)
                    var_Ncdf.long_name = newLong_name
                    var_Ncdf.units = newUnits
                    var_Ncdf[:] = darr_dz
                    fileNC.close()

                    print('%s: \033[92mDone\033[00m' % ('d_dz_' + idiff))
                except Exception as exception:
                    if debug: raise
                    if str(exception) == 'NetCDF: String match to name in use':
                        prYellow("""***Error*** Variable already exists""")
                        prYellow(
                            """Delete existing variable %s with 'MarsVars %s -rm %s'"""
                            % ('d_dz_' + idiff, ifile, 'd_dz_' + idiff))

        #=================================================================
        #=============  Column  integration   ============================
        #=================================================================

        #ak and bk are needed to derive the distance between layer pfull
        if col_list:
            name_fixed = ifile[0:5] + '.fixed.nc'
            f_fixed = Dataset(name_fixed, 'r', format='NETCDF4_CLASSIC')
            variableNames = f_fixed.variables.keys()
            ak = np.array(f_fixed.variables['pk'])
            bk = np.array(f_fixed.variables['bk'])
            f_fixed.close()

        for icol in col_list:
            fileNC = Dataset(ifile, 'a')  #, format='NETCDF4_CLASSIC

            if icol not in fileNC.variables.keys():
                prRed(
                    "column integration error: variable '%s' is not present in %s"
                    % (icol, ifile))
                fileNC.close()
            else:
                print('Performing colum integration: %s...' % (icol))

                try:
                    var = fileNC.variables[icol][:]
                    #prRed(fileNC.variables[icol].units+'|')
                    newUnits = fileNC.variables[
                        icol].units[:-3] + '/m2'  # turn 'kg/kg'> to 'kg/m2'
                    newLong_name = 'column integration of ' + fileNC.variables[
                        icol].long_name

                    #---temp and ps are always needed---
                    dim_in = fileNC.variables[
                        'temp'].dimensions  #get dimension
                    shape_in = fileNC.variables['temp'].shape
                    #TODO edged cases where time =1
                    dim_out = tuple([dim_in[0], dim_in[2], dim_in[3]])
                    ps = fileNC.variables['ps'][:, :, :]
                    DP = compute_DP_3D(ps, ak, bk, shape_in)
                    out = np.sum(var * DP / g, axis=1)

                    #Log the variable
                    var_Ncdf = fileNC.createVariable(icol + '_col', 'f4',
                                                     dim_out)
                    var_Ncdf.long_name = newLong_name
                    var_Ncdf.units = newUnits
                    var_Ncdf[:] = out

                    fileNC.close()

                    print('%s: \033[92mDone\033[00m' % (icol + '_col'))
                except Exception as exception:
                    if debug: raise
                    if str(exception) == 'NetCDF: String match to name in use':
                        prYellow("""***Error*** Variable already exists""")
                        prYellow(
                            """Delete existing variable %s with 'MarsVars %s -rm %s'"""
                            % (icol + '_col', ifile, icol + '_col'))
Ejemplo n.º 4
0
from amesgcm.Script_utils import check_file_tape, prYellow, prRed, prCyan, prGreen, prPurple, print_fileContent
from amesgcm.Script_utils import section_content_amesgcm_profile, find_tod_in_diurn
from amesgcm.Ncdf_wrapper import Ncdf

#=====Attempt to import specific scientic modules one may not find in the default python on NAS ====
try:
    import matplotlib
    matplotlib.use('Agg')  # Force matplotlib to not use any Xwindows backend.
    import numpy as np
    from netCDF4 import Dataset, MFDataset

except ImportError as error_msg:
    prYellow("Error while importing modules")
    prYellow('Your are using python ' + str(sys.version_info[0:3]))
    prYellow('Please, source your virtual environment')
    prCyan('    source envPython3.7/bin/activate.csh \n')
    print("Error was: " + error_msg.message)
    exit()
except Exception as exception:
    # Output unexpected Exceptions.
    print(exception, False)
    print(exception.__class__.__name__ + ": " + exception.message)
    exit()

#======================================================
#                  ARGUMENTS PARSER
#======================================================
parser = argparse.ArgumentParser(
    description=
    """\033[93m MarsInterp, pressure interpolation on fixed layers\n \033[00m""",
    formatter_class=argparse.RawTextHelpFormatter)
Ejemplo n.º 5
0
from amesgcm.FV3_utils import fms_press_calc, fms_Z_calc, dvar_dh, cart_to_azimut_TR
from amesgcm.Script_utils import check_file_tape, prYellow, prRed, prCyan, prGreen, prPurple, print_fileContent
from amesgcm.Ncdf_wrapper import Ncdf
#=====Attempt to import specific scientic modules one may not find in the default python on NAS ====
try:
    import matplotlib
    matplotlib.use('Agg')  # Force matplotlib to not use any Xwindows backend.
    import numpy as np
    from netCDF4 import Dataset, MFDataset

except ImportError as error_msg:
    prYellow("Error while importing modules")
    prYellow('Your are using python ' + str(sys.version_info[0:3]))
    prYellow('Please, source your virtual environment')
    prCyan('    source amesGCM3/bin/activate \n')
    print("Error was: " + error_msg.message)
    exit()

except Exception as exception:
    # Output unexpected Exceptions.
    print(exception, False)
    print(exception.__class__.__name__ + ": " + exception.message)
    exit()

#======================================================
#                  ARGUMENTS PARSER
#======================================================
parser = argparse.ArgumentParser(
    description=
    """\033[93m MarsVars, variable manager,  utility to add or remove variables to the diagnostic files\n Use MarsFiles ****.atmos.average.nc to view file content \033[00m""",