Пример #1
0
def create_delta_emission(path_emission_cdf, precursor_lst,
                          reduction_area_array, path_reduction_txt):

    # create a dictionary with reductions per precursor and macro sector
    emission_reduction_dict = create_emission_reduction_dict(
        path_reduction_txt)

    # open the emission netcdf
    emission_dict = create_emission_dict(path_emission_cdf, precursor_lst)

    # calculate a dictionary with the emission reductions per pollutant, macrosector and position
    delta_emission_dict = {}
    for precursor in precursor_lst:
        delta_emission_dict[precursor] = zeros(emission_dict[precursor].shape)
        # calculate the emission reduction
        # reductions are positive!
        # make the sum over all snap sectors
        #EP20210518
        for snap in range(1, sector_lst[-1]):
            #for snap in range(1, 13):
            delta_emission_dict[precursor][
                snap - 1, :, :] = emission_dict[precursor][
                    snap - 1] * reduction_area_array * emission_reduction_dict[
                        precursor][snap]

    # sum over all snap sectors
    for precursor in precursor_lst:
        delta_emission_dict[precursor] = sum(delta_emission_dict[precursor],
                                             axis=0)

    return delta_emission_dict
Пример #2
0
def create_delta_emission(path_emission_cdf, precursor_lst, path_area_cdf,
                          path_reduction_txt, path_result_cdf,
                          write_netcdf_output, pollName, downscale_request):
    """
    Function that applies reductions per snap sector and precursor to the
    emission netcdf.
    """
    # create a dictionary with reductions per precursor and macro sector
    emission_reduction_dict = create_emission_reduction_dict(
        path_reduction_txt)

    #20220530 if you do downscaling, only reduce PPM
    if downscale_request == 1:
        emission_reduction_dict['NOx'] = {
            1: 0,
            2: 0,
            3: 0,
            4: 0,
            5: 0,
            6: 0,
            7: 0,
            8: 0,
            9: 0,
            10: 0,
            11: 0,
            12: 0
        }
        emission_reduction_dict['NMVOC'] = {
            1: 0,
            2: 0,
            3: 0,
            4: 0,
            5: 0,
            6: 0,
            7: 0,
            8: 0,
            9: 0,
            10: 0,
            11: 0,
            12: 0
        }
        emission_reduction_dict['NH3'] = {
            1: 0,
            2: 0,
            3: 0,
            4: 0,
            5: 0,
            6: 0,
            7: 0,
            8: 0,
            9: 0,
            10: 0,
            11: 0,
            12: 0
        }
        emission_reduction_dict['SOx'] = {
            1: 0,
            2: 0,
            3: 0,
            4: 0,
            5: 0,
            6: 0,
            7: 0,
            8: 0,
            9: 0,
            10: 0,
            11: 0,
            12: 0
        }

    # open the emission netcdf
    emission_dict = create_emission_dict(path_emission_cdf, precursor_lst)

    # open the area netcdf
    rootgrp = Dataset(path_area_cdf, 'r')
    reduction_area = rootgrp.variables['AREA'][:] / 100.0
    rootgrp.close()

    # calculate a dictionary with the emission reductions per pollutant, macrosector and position
    delta_emission_dict = {}
    for precursor in precursor_lst:
        delta_emission_dict[precursor] = zeros(emission_dict[precursor].shape)
        # calculate the emission reduction
        # reductions are positive!
        # make the sum over all snap sectors
        #for snap in range(1, 13):
        #EP20210518
        for snap in range(1, sector_lst[-1]):
            delta_emission_dict[precursor][
                snap - 1, :, :] = emission_dict[precursor][
                    snap - 1] * reduction_area * emission_reduction_dict[
                        precursor][snap]

    # before summing over all snap sectors write the delta emissions per precursor and snap to a netcdf
    # create an output netcdf with delta emissions
    # --------------------------------------------
    if write_netcdf_output == True:
        filename_delta_emission_cdf = path_result_cdf + 'DCemis_emepV434_camsV42_' + pollName + '.nc'

        #change name of emission file in case of downscaling
        if downscale_request == 1:
            filename_delta_emission_cdf = path_result_cdf + 'DCemis_emepV434_camsV42_' + pollName[
                0:8] + 'P' + pollName[8:] + '_.nc'

        rootgrp = Dataset(filename_delta_emission_cdf,
                          'w',
                          format='NETCDF3_CLASSIC')

        # create dimensions in the netcdf file
        rootgrp.createDimension('latitude', len(emission_dict['lat_array']))
        rootgrp.createDimension('longitude', len(emission_dict['lon_array']))
        rootgrp.createDimension('GNFRsector', len(emission_dict['GNFRsector']))
        latitudes = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
        latitudes.units = "degrees_north"
        latitudes[:] = emission_dict['lat_array']
        longitudes = rootgrp.createVariable('longitude', 'f4', ('longitude', ))
        longitudes.units = "degrees_east"
        longitudes[:] = emission_dict['lon_array']
        GNFRsector = rootgrp.createVariable('GNFRsector', 'f4',
                                            ('GNFRsector', ))
        GNFRsector[:] = emission_dict['GNFRsector']

        #20220413, units for the emission file
        unitsForEmis = Dataset(path_emission_cdf, 'r').variables['NOx'].units

        # create delta emission data
        for precursor in precursor_lst:
            delta_emission_precursor = rootgrp.createVariable(
                precursor, 'f4', (
                    'GNFRsector',
                    'latitude',
                    'longitude',
                ))
            delta_emission_precursor.units = unitsForEmis  #"Mg/km2"
            delta_emission_precursor[:] = delta_emission_dict[precursor]

        rootgrp.close()

    # sum over all snap sectors
    for precursor in precursor_lst:
        delta_emission_dict[precursor] = sum(delta_emission_dict[precursor],
                                             axis=0)

    return delta_emission_dict
Пример #3
0
def module3a(path_emission_cdf, path_area_cdf, path_reduction_txt,
             path_base_conc_cdf, path_model_cdf, path_result_cdf,
             downscale_request):

    # get precursor list from model
    rootgrp = Dataset(path_model_cdf, 'r')
    precursor_lst = getattr(rootgrp, 'Order_Pollutant').split(', ')
    rootgrp.close()

    # create a dictionary with reductions per precursor and macro sector
    emission_reduction_dict = create_emission_reduction_dict(
        path_reduction_txt)

    # is this necessary???????????
    #     # check which precursor reductions are non-zero
    #     for precursor in emission_reduction_dict.keys():
    #         precursor_reduction = 0
    #         for

    # look up which precursor(s) is(are) reduced
    reduced_precursor_lst = []
    for precursor in emission_reduction_dict.keys():
        sum_over_snaps = 0
        for snap in emission_reduction_dict[precursor].keys():
            sum_over_snaps += emission_reduction_dict[precursor][snap]
        if sum_over_snaps > 0:
            reduced_precursor_lst.append(precursor)
    # print(reduced_precursor_lst)

    # make module 4 reduction input files for each snap sector
    f_red_mod_3 = open(path_reduction_txt, 'r')
    header = f_red_mod_3.readline()
    f_red_mod_3.close()

    # declare a dictonary to store results
    results = {}

    # progress counter of module 3a
    counter = 0.0

    # name of the file in which the progress of the calculation will be kept and transfered to sub processes
    progress_log_filename = path_result_cdf + 'proglogmod3.txt'

    for snap in sector_lst[0:-1]:

        # write progress log file
        start = float(counter) / (len(sector_lst[0:-1]) + 1) * 100
        divisor = len(sector_lst[0:-1]) + 1
        write_progress_log(progress_log_filename, start, divisor)

        # print(snap)
        # create the emission reduction file for the precusor and only one sector
        filename_mod4_reductions = path_result_cdf + 'mod4_reductions_snap_%s.txt' % (
            snap)
        f_red_mod_4_snap = open(filename_mod4_reductions, 'w')
        f_red_mod_4_snap.write(header)
        for precursor in precursor_lst:
            f_red_mod_4_snap.write(precursor)
            for snap2 in sector_lst[0:-1]:
                if precursor in reduced_precursor_lst and snap2 == snap:
                    f_red_mod_4_snap.write('\t' + str(alpha_potency))
                else:
                    f_red_mod_4_snap.write('\t0')
            f_red_mod_4_snap.write('\n')
        f_red_mod_4_snap.close()

        # call module 4 with the newly created emission reduction file
        res_mod4_snap = module4(path_emission_cdf, path_area_cdf,
                                filename_mod4_reductions, path_base_conc_cdf,
                                path_model_cdf, path_result_cdf,
                                downscale_request, progress_log_filename)

        # remove potencies output
        remove(path_result_cdf + 'potencies.nc')

        # update counter
        counter += 1
        # print(counter)

        # store the results for each individual snap sector
        results[snap] = res_mod4_snap

        # remove filename_mod4_reductions
        remove(filename_mod4_reductions)

    # write progress log file
    start = float(counter) / (len(sector_lst[0:-1]) + 1) * 100
    divisor = len(sector_lst[0:-1]) + 1
    write_progress_log(progress_log_filename, start, divisor)

    # execute module 4 with a reduction in all sectors together
    res_mod4_all = module4(path_emission_cdf, path_area_cdf,
                           path_reduction_txt, path_base_conc_cdf,
                           path_model_cdf, path_result_cdf, downscale_request,
                           progress_log_filename)
    n_lat = res_mod4_all['n_lat']
    n_lon = res_mod4_all['n_lon']
    n_nuts = len(sector_lst[0:-1])

    # remove potencies output
    remove(path_result_cdf + 'potencies.nc')

    # remove progress log file
    remove(progress_log_filename)

    # create results netcdf
    # -----------------------
    filename_result_cdf = path_result_cdf + 'potencies_overview_per_sector.nc'
    rootgrp = Dataset(filename_result_cdf, 'w', format='NETCDF3_CLASSIC')

    # create dimensions in the netcdf file
    rootgrp.createDimension('GNFRsector', n_nuts)
    rootgrp.createDimension('latitude', n_lat)
    rootgrp.createDimension('longitude', n_lon)
    GNFRsectors = rootgrp.createVariable('GNFRsector', 'f4', ('GNFRsector', ))
    GNFRsectors[:] = sector_lst[0:-1]
    latitudes = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
    latitudes.units = "degrees_north"
    longitudes = rootgrp.createVariable('longitude', 'f4', ('longitude', ))
    longitudes.units = "degrees_east"
    latitudes[:] = res_mod4_all['latitude_array']
    longitudes[:] = res_mod4_all['longitude_array']

    # create variables and initialize with zeros
    DC_alpha_snap_var = rootgrp.createVariable('DC_alpha_snap', 'f4', (
        'GNFRsector',
        'latitude',
        'longitude',
    ))
    DC_alpha_snap_var.units = "ug/m3"
    DC_alpha_snap_var[:] = zeros((n_nuts, n_lat, n_lon))
    DC_C_alpha_snap_var = rootgrp.createVariable('DC_C_alpha_snap', 'f4', (
        'GNFRsector',
        'latitude',
        'longitude',
    ))
    DC_C_alpha_snap_var.units = "%"
    DC_C_alpha_snap_var[:] = zeros((n_nuts, n_lat, n_lon))
    #     DC_DE_snap_var = rootgrp.createVariable('DC_DE_snap', 'f4', ('GNFRsector', 'latitude', 'longitude',))
    #     DC_DE_snap_var[:] = zeros((n_nuts, n_lat, n_lon))

    DC_alpha_all_var = rootgrp.createVariable('DC_alpha_all', 'f4', (
        'latitude',
        'longitude',
    ))
    DC_alpha_all_var.units = "ug/m3"
    DC_alpha_all_var[:] = res_mod4_all['DC_alpha']
    DC_C_alpha_all_var = rootgrp.createVariable('DC_C_alpha_all', 'f4', (
        'latitude',
        'longitude',
    ))
    DC_C_alpha_all_var.units = "%"
    DC_C_alpha_all_var[:] = res_mod4_all['DC_C_alpha'] * 100

    for snap in sector_lst[0:-1]:
        DC_alpha_snap_var[snap - 1, :, :] = results[snap]['DC_alpha']
        DC_C_alpha_snap_var[snap - 1, :, :] = results[snap]['DC_C_alpha'] * 100
    rootgrp.close()
Пример #4
0
def module4(path_emission_cdf, path_area_cdf, path_reduction_txt,
            path_base_conc_cdf, path_model_cdf, path_result_cdf,
            downscale_request, *progresslog):

    # get precursor list from model
    rootgrp = Dataset(path_model_cdf, 'r')
    precursor_lst = getattr(rootgrp, 'Order_Pollutant').split(', ')
    rootgrp.close()

    # module 4 can be ran independently without progress log argument or...
    if progresslog:
        progresslog_filename = progresslog[0]
        progress_dict = read_progress_log(progresslog_filename)
    # called by another model which passes a progress log argument
    else:
        progresslog_filename = path_result_cdf + 'progresslog'
        write_progress_log(progresslog_filename, 0, 1)
        progress_dict = read_progress_log(progresslog_filename)

    # calculate the delta emissions, dictionary per pollutant a matrix of dimension n_lat x n_lon
    mod1_res = module1(path_emission_cdf, path_area_cdf, path_reduction_txt,
                       path_base_conc_cdf, path_model_cdf, path_result_cdf,
                       downscale_request, progresslog_filename)
    delta_conc = mod1_res['delta_conc']
    delta_emis_dict = mod1_res['delta_emis_dict']
    n_lat = mod1_res['n_lat']
    n_lon = mod1_res['n_lon']

    # remove progress log if he was made inside module 4, not if it was an external argument
    if not progresslog:
        remove(progresslog_filename)

    # only if only one precursor is reduced delta_emission is used for potency calculations
    # check this
    emission_reduction_dict = create_emission_reduction_dict(
        path_reduction_txt)
    number_reduced_precursors = 0
    for precursor in precursor_lst:
        sum_reductions = 0
        for snap in emission_reduction_dict[precursor].keys():
            sum_reductions += emission_reduction_dict[precursor][snap]
        if sum_reductions > 0:
            number_reduced_precursors += 1
            reduced_precursor = precursor
            delta_emis = delta_emis_dict[precursor]
            # print('precusor %s was reduced' % precursor)
    if number_reduced_precursors != 1:
        reduced_precursor = ''
    # print(sum(delta_emis))

    # read baseline concentrations
    rootgrp = Dataset(path_base_conc_cdf, 'r')
    conc = rootgrp.variables['conc'][:]

    #ENR 20180126 - fix for NO2. in case of NO2, load NO2 and not NOx variable as baseline
    if (path_model_cdf.find('NO2eq') > -1):
        conc = rootgrp.variables['NO2'][:]

    # close model netcdf
    rootgrp.close()

    # calculate different potencies
    #------------------------------
    #     print('check delta conc')
    #     print(sum(delta_conc))
    # Delta_C/alfa
    DC_alpha = delta_conc / (alpha_potency / 100.0)

    DC_C_alpha = delta_conc / conc / (alpha_potency / 100.0)

    if number_reduced_precursors == 1:
        ## select reduced precursor automatically !!!!!!!!!!!
        DC_DE = zeros((n_lat, n_lon))
        DC_DE = delta_conc / delta_emis_dict[reduced_precursor].sum() * 1000
#         for i in range(n_lat):
#             for j in range(n_lon):
#                 if delta_emis_dict[reduced_precursor][i, j] > 0:
# DC_DE[i, j] = delta_conc[i, j] / delta_emis_dict[reduced_precursor][i, j]
#                     DC_DE[i, j] = delta_conc[i, j] / DE

# create a result netcdf
# -----------------------
    if progress_dict['start'] != -1:
        filename_result_cdf = path_result_cdf + 'potencies.nc'
        rootgrp = Dataset(filename_result_cdf, 'w', format='NETCDF3_CLASSIC')

        # create dimensions in the netcdf file
        rootgrp.createDimension('latitude', n_lat)
        rootgrp.createDimension('longitude', n_lon)
        latitudes = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
        latitudes.units = "degrees_north"
        longitudes = rootgrp.createVariable('longitude', 'f4', ('longitude', ))
        longitudes.units = "degrees_east"
        latitudes[:] = mod1_res['latitude_array']
        longitudes[:] = mod1_res['longitude_array']

        # create potency variables data
        DC_alpha_var = rootgrp.createVariable('DC_alpha', 'f4', (
            'latitude',
            'longitude',
        ))
        DC_alpha_var[:] = DC_alpha
        DC_C_alpha_var = rootgrp.createVariable('DC_C_alpha', 'f4', (
            'latitude',
            'longitude',
        ))
        DC_C_alpha_var[:] = DC_C_alpha
        if number_reduced_precursors == 1:
            DC_DE_var = rootgrp.createVariable('DC_DE', 'f4', (
                'latitude',
                'longitude',
            ))
            DC_DE_var[:] = DC_DE

        delta_conc_var = rootgrp.createVariable('delta_conc_var', 'f4', (
            'latitude',
            'longitude',
        ))
        delta_conc_var[:] = delta_conc
        delta_emis_var = rootgrp.createVariable('delta_emis_var', 'f4', (
            'latitude',
            'longitude',
        ))
        delta_emis_var[:] = delta_emis

        rootgrp.close()

    # create a results object
    mod4_res = {}
    mod4_res['n_lat'] = n_lat
    mod4_res['n_lon'] = n_lon
    mod4_res['latitude_array'] = mod1_res['latitude_array']
    mod4_res['longitude_array'] = mod1_res['longitude_array']
    # add DC_alpha to the result dictionary
    mod4_res['DC_alpha'] = DC_alpha
    # add DC_C_alpha to the result dictionary
    mod4_res['DC_C_alpha'] = DC_C_alpha
    if number_reduced_precursors == 1:
        # add DC_DE to the result dictionary
        mod4_res['DC_DE'] = DC_DE

    return mod4_res
Пример #5
0
def module2(path_emission_cdf, path_nuts_cdf, path_reduction_txt,
            path_model_cdf, path_result_cdf):

    # read the model netcdf
    #----------------------
    # open file
    rootgrp = Dataset(path_model_cdf, 'r')

    longitude_array = rootgrp.variables['lon'][0, :]
    latitude_array = rootgrp.variables['lat'][:, 0]
    n_lon = len(longitude_array)  # len(rootgrp.dimensions['longitude'])
    n_lat = len(latitude_array)  # len(rootgrp.dimensions['latitude'])
    inner_radius = int(getattr(rootgrp, 'Radius of influence'))

    alpha = rootgrp.variables['alpha'][:, :, :]
    omega = rootgrp.variables['omega'][:, :, :]
    flatWeight = rootgrp.variables['flatWeight'][:, :, :]
    precursor_lst = getattr(rootgrp, 'Order_Pollutant').split(', ')

    alpha_dict = {}
    omega_dict = {}
    flatWeight_dict = {}

    for i in range(len(precursor_lst)):
        alpha_dict[precursor_lst[i]] = alpha[i, :, :]
        omega_dict[precursor_lst[i]] = omega[i, :, :]
        flatWeight_dict[precursor_lst[i]] = flatWeight[i, :, :]

    # close netcdf
    rootgrp.close()

    # create a dictionary with reductions per precursor and macro sector
    emission_reduction_dict = create_emission_reduction_dict(
        path_reduction_txt)

    # create list of reduced precursors
    reduced_precursor_lst = create_reduced_precursor_lst(
        emission_reduction_dict)

    # read nuts netcdf: values between 0 and 100 indicate which share of the cell has to be taken into account
    rootgrp_nuts = Dataset(path_nuts_cdf, 'r')
    n_nuts = len(rootgrp_nuts.dimensions['nuts_id'])

    # declare results array for delta concentration, dimension: n_nuts x lat x lon
    delta_conc_nuts = zeros((n_nuts, n_lat, n_lon))

    # create emission dictionary
    emission_dict = create_emission_dict(path_emission_cdf, precursor_lst)

    # make a window
    window = create_window(inner_radius)
    (n_lon_inner_win, n_lat_inner_win) = window.shape

    # create flat window and a inner window
    borderweight = window[inner_radius, 0]

    for i in range(n_lat_inner_win):
        for j in range(n_lon_inner_win):
            if window[i, j] < borderweight:
                window[i, j] = 0

    # result dictionary with emission reductions per nuts of the reduced precursor
    # only if one precursor is reduced this result has to be stored for potency calculations
    delta_emission_nuts_dict = {}

    # loop over all nuts
    nuts_counter = 0
    for nuts in range(n_nuts):

        # create delta emission dictionary for nuts area
        reduction_area = rootgrp_nuts.variables['AREA'][nuts, :, :] / 100
        pad_delta_emission_dict = {}
        # dictionary with sum of emissions over full domain per precursor
        sum_emissions_flat = {}

        for precursor in precursor_lst:
            # calculate emission reductions per nuts area and snap sector
            delta_emission_precursor_snap = zeros(
                emission_dict[precursor].shape)
            for snap in range(1, 11):
                delta_emission_precursor_snap[
                    snap - 1, :, :] = emission_dict[precursor][
                        snap - 1] * reduction_area * emission_reduction_dict[
                            precursor][snap]
            # sum delta emission per snap over all snap sectors
            delta_emission_precursor = sum(delta_emission_precursor_snap,
                                           axis=0)

            # store delta emissions per nuts and precursor for potency calculation
            if len(reduced_precursor_lst
                   ) == 1 and precursor == reduced_precursor_lst[0]:
                delta_emission_nuts_dict[nuts] = delta_emission_precursor

            # pad the delta emission arrays with zeros
            pad_delta_emission_dict[precursor] = lib.pad(
                delta_emission_precursor,
                inner_radius,
                'constant',
                constant_values=0)

            sum_emissions_flat[precursor] = delta_emission_precursor.sum()

        # apply source receptor relationships
        # -----------------------------------
        delta_conc = zeros((n_lat, n_lon))
        for ie in range(n_lat):
            for je in range(n_lon):
                # test if the cell overlaps with the nuts sector
                if reduction_area[ie, je] > 0:
                    # apply the correlation between delta_emission and delta concentration
                    for precursor in precursor_lst:
                        alpha_ij = alpha_dict[precursor][ie, je]
                        omega_ij = omega_dict[precursor][ie, je]
                        flatWeight_ij = flatWeight_dict[precursor][ie, je]

                        if not (isnan(alpha_ij)):
                            # apply the weight to the flat weighted emissions
                            weighted_emissions_flat = flatWeight_ij * sum_emissions_flat[
                                precursor]

                            emissions_centre = pad_delta_emission_dict[
                                precursor][ie:(ie + n_lon_inner_win),
                                           je:(je + n_lat_inner_win)]

                            # weighted_emissions_centre = (power(weights_centre, omega_ij) * emissions_centre).sum()
                            weighted_emissions_centre = (
                                (power(window, omega_ij) - flatWeight_ij) *
                                emissions_centre).sum()
                            delta_conc[ie,
                                       je] = delta_conc[ie, je] + alpha_ij * (
                                           weighted_emissions_centre +
                                           weighted_emissions_flat)

        # store the result
        delta_conc_nuts[nuts, :, :] = delta_conc
        # print('NUTS %d calculated' % (nuts))
        nuts_counter += 1
        progress = float(nuts_counter) / float(n_nuts) * 100
        sys.stdout.write('\r')
        sys.stdout.flush()
        sys.stdout.write('progress:%f' % progress)
        sys.stdout.flush()

    # create a result netcdf
    # -----------------------
    filename_result_cdf = path_result_cdf + 'delta_concentration_nuts.nc'
    rootgrp = Dataset(filename_result_cdf, 'w', format='NETCDF3_CLASSIC')

    # create dimensions in the netcdf file
    rootgrp.createDimension('nuts_id', n_nuts)
    rootgrp.createDimension('latitude', n_lat)
    rootgrp.createDimension('longitude', n_lon)
    nuts_ids = rootgrp.createVariable('nuts_id', 'f4', ('nuts_id', ))
    nuts_vector = range(1, n_nuts + 1)
    nuts_ids[:] = nuts_vector
    latitudes = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
    latitudes.units = "degrees_north"
    latitudes[:] = latitude_array
    longitudes = rootgrp.createVariable('longitude', 'f4', ('longitude', ))
    longitudes.units = "degrees_east"
    longitudes[:] = longitude_array

    # create delta concentration data
    delta_conc_nuts_var = rootgrp.createVariable('delta_concentration_nuts',
                                                 'f4', (
                                                     'nuts_id',
                                                     'latitude',
                                                     'longitude',
                                                 ))
    delta_conc_nuts_var.units = "ug/m3"
    delta_conc_nuts_var[:] = delta_conc_nuts

    rootgrp.close()

    # create a results dictionary
    mod2_res = {}
    mod2_res['delta_conc_nuts'] = delta_conc_nuts
    mod2_res['delta_emis_precursor_nuts'] = delta_emission_nuts_dict
    mod2_res['n_nuts'] = n_nuts
    mod2_res['n_lat'] = n_lat
    mod2_res['n_lon'] = n_lon
    mod2_res['nuts_vector'] = nuts_vector
    mod2_res['latitude_array'] = latitude_array
    mod2_res['longitude_array'] = longitude_array

    # close rootgrp_nuts
    rootgrp_nuts.close()

    return mod2_res
Пример #6
0
def create_delta_emission(path_emission_cdf, precursor_lst, path_area_cdf,
                          path_reduction_txt, path_result_cdf,
                          write_netcdf_output):
    # create a dictionary with reductions per precursor and macro sector
    emission_reduction_dict = create_emission_reduction_dict(
        path_reduction_txt)

    # open the emission netcdf
    emission_dict = create_emission_dict(path_emission_cdf, precursor_lst)

    # open the area netcdf
    rootgrp = Dataset(path_area_cdf, 'r')
    reduction_area = rootgrp.variables['AREA'][:] / 100.0
    rootgrp.close()

    # calculate a dictionary with the emission reductions per pollutant, macrosector and position
    delta_emission_dict = {}
    for precursor in precursor_lst:
        delta_emission_dict[precursor] = zeros(emission_dict[precursor].shape)
        # calculate the emission reduction
        # reductions are positive!
        # make the sum over all snap sectors
        for snap in range(1, 11):
            delta_emission_dict[precursor][
                snap - 1, :, :] = emission_dict[precursor][
                    snap - 1] * reduction_area * emission_reduction_dict[
                        precursor][snap]
#             print(snap)
#             print(sum(delta_emission_dict[precursor][snap - 1, :, :]))

# before summing over all snap sectors write the delta emissions per precursor and snap to a netcdf
# create an output netcdf with delta emissions
# --------------------------------------------
    if write_netcdf_output == True:
        filename_delta_emission_cdf = path_result_cdf + 'delta_emission.nc'
        rootgrp = Dataset(filename_delta_emission_cdf,
                          'w',
                          format='NETCDF3_CLASSIC')

        # create dimensions in the netcdf file
        rootgrp.createDimension('latitude', len(emission_dict['lat_array']))
        rootgrp.createDimension('longitude', len(emission_dict['lon_array']))
        rootgrp.createDimension('Nsnaps', len(emission_dict['Nsnaps']))
        latitudes = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
        latitudes.units = "degrees_north"
        latitudes[:] = emission_dict['lat_array']
        longitudes = rootgrp.createVariable('longitude', 'f4', ('longitude', ))
        longitudes.units = "degrees_east"
        longitudes[:] = emission_dict['lon_array']
        Nsnaps = rootgrp.createVariable('Nsnaps', 'f4', ('Nsnaps', ))
        Nsnaps[:] = emission_dict['Nsnaps']

        # create delta emission data
        for precursor in precursor_lst:
            delta_emission_precursor = rootgrp.createVariable(
                precursor, 'f4', (
                    'Nsnaps',
                    'latitude',
                    'longitude',
                ))
            delta_emission_precursor.units = "Mg/km2"
            delta_emission_precursor[:] = delta_emission_dict[precursor]

        rootgrp.close()

    # sum over all snap sectors
    for precursor in precursor_lst:
        delta_emission_dict[precursor] = sum(delta_emission_dict[precursor],
                                             axis=0)

    return delta_emission_dict
Пример #7
0
def module3b(path_emission_cdf, path_area_cdf, path_reduction_txt,
             path_base_conc_cdf, path_model_cdf, path_result_cdf):

    # get precursor list from model
    rootgrp = Dataset(path_model_cdf, 'r')
    precursor_lst = getattr(rootgrp, 'Order_Pollutant').split(', ')
    rootgrp.close()

    # create a dictionary with reductions per precursor and macro sector
    emission_reduction_dict = create_emission_reduction_dict(
        path_reduction_txt)

    # look up which sector(s) that is(are) reduced
    reduced_sectors = []
    for sector in sector_lst:
        sum_over_precursors = 0
        for precursor in precursor_lst:
            sum_over_precursors += emission_reduction_dict[precursor][sector]
        if sum_over_precursors > 0:
            reduced_sectors.append(sector)

    # make module 4 reduction input files for each snap sector
    f_red_mod_3 = open(path_reduction_txt, 'r')
    header = f_red_mod_3.readline()
    f_red_mod_3.close()

    # declare a dictonary to store results
    results = {}

    # progress counter of module 3b
    counter = 0.0

    # name of the file in which the progress of the calculation will be kept and transfered to sub processes
    progress_log_filename = path_result_cdf + 'proglogmod3.txt'

    for precursor in precursor_lst:

        # write progress log file
        start = float(counter) / (len(precursor_lst) + 1) * 100
        divisor = len(precursor_lst) + 1
        write_progress_log(progress_log_filename, start, divisor)

        # create the emission reduction file for the precusor and only one sector
        filename_mod4_reductions = path_result_cdf + 'mod4_reductions_precursor_of_%s.txt' % (
            precursor)
        f_red_mod_4_snap = open(filename_mod4_reductions, 'w')
        f_red_mod_4_snap.write(header)
        for precursor2 in precursor_lst:
            f_red_mod_4_snap.write(precursor2)
            for sector in sector_lst:
                if sector in reduced_sectors and precursor2 == precursor:
                    f_red_mod_4_snap.write('\t' + str(alpha_potency))
                else:
                    f_red_mod_4_snap.write('\t0')
            f_red_mod_4_snap.write('\n')
        f_red_mod_4_snap.close()

        # call module 4 with the newly created emission reduction file
        res_mod4_snap = module4(path_emission_cdf, path_area_cdf,
                                filename_mod4_reductions, path_base_conc_cdf,
                                path_model_cdf, path_result_cdf,
                                progress_log_filename)

        # remove potencies output
        remove(path_result_cdf + 'potencies.nc')

        # update counter
        counter += 1
        # print(counter)

        # store the results for each individual precursor
        results[precursor] = res_mod4_snap

        # delete filename_mod4_reductions
        remove(filename_mod4_reductions)

    # write progress log file
    start = float(counter) / (len(precursor_lst) + 1) * 100
    divisor = len(precursor_lst) + 1
    write_progress_log(progress_log_filename, start, divisor)

    # execute module 4 with a reduction in all precursors
    res_mod4_all = module4(path_emission_cdf, path_area_cdf,
                           path_reduction_txt, path_base_conc_cdf,
                           path_model_cdf, path_result_cdf,
                           progress_log_filename)
    n_lat = res_mod4_all['n_lat']
    n_lon = res_mod4_all['n_lon']

    # remove potencies output
    remove(path_result_cdf + 'potencies.nc')

    # remove progress log file
    remove(progress_log_filename)

    # create results netcdf
    # -----------------------
    filename_result_cdf = path_result_cdf + 'potencies_overview_per_precursor.nc'
    rootgrp = Dataset(filename_result_cdf, 'w', format='NETCDF3_CLASSIC')

    # create dimensions in the netcdf file
    rootgrp.createDimension('latitude', n_lat)
    rootgrp.createDimension('longitude', n_lon)
    latitudes = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
    latitudes.units = "degrees_north"
    longitudes = rootgrp.createVariable('longitude', 'f4', ('longitude', ))
    longitudes.units = "degrees_east"
    latitudes[:] = res_mod4_all['latitude_array']
    longitudes[:] = res_mod4_all['longitude_array']

    # create variables and initialize with zeros
    for precursor in precursor_lst:
        DC_alpha_var = rootgrp.createVariable(
            'DC_alpha_precursor_%s' % (precursor), 'f4', (
                'latitude',
                'longitude',
            ))
        DC_alpha_var.units = "ug/m3"
        DC_alpha_var[:] = zeros((n_lat, n_lon))
        DC_C_alpha_var = rootgrp.createVariable(
            'DC_C_alpha_precursor_%s' % (precursor), 'f4', (
                'latitude',
                'longitude',
            ))
        DC_C_alpha_var.units = "%"
        DC_C_alpha_var[:] = zeros((n_lat, n_lon))

        DC_alpha_var[:, :] = results[precursor]['DC_alpha']
        DC_C_alpha_var[:, :] = results[precursor]['DC_C_alpha'] * 100

    DC_alpha_all_var = rootgrp.createVariable('DC_alpha_all', 'f4', (
        'latitude',
        'longitude',
    ))
    DC_alpha_all_var.units = "ug/m3"
    DC_alpha_all_var[:] = res_mod4_all['DC_alpha']
    DC_C_alpha_all_var = rootgrp.createVariable('DC_C_alpha_all', 'f4', (
        'latitude',
        'longitude',
    ))
    DC_C_alpha_all_var.units = "%"
    DC_C_alpha_all_var[:] = res_mod4_all['DC_C_alpha'] * 100

    rootgrp.close()