Exemple #1
0
def _horizontal_metrics_from_coordinates(xcoord, ycoord):
    """Return horizontal scale factors computed from arrays of projection
    coordinates.

        Parameters
        ----------
        xcoord : xarray dataarray
            array of x_coordinate used to  build the grid metrics.
            either plane_x_coordinate or projection_x_coordinate
            assume that the order of the dimensions is ('y','x').
        ycoord :xarray dataarray
            array of y_coordinate used to build the grid metrics.
            either plane_y_coordinate or projection_y_coordinate
            assume that the order of the dimensions is ('y','x').

        Return
        ------
        e1 : xarray dataarray
            Array of grid cell width corresponding to cell_x_size_at_*_location
        e2 : xarray dataarray
            Array of grid cell width corresponding to cell_y_size_at_*_location
    """
    #- Compute the centered first order derivatives of proj. coordinate arrays
    dy_dj, dy_di = _horizontal_gradient(ycoord)
    dx_dj, dx_di = _horizontal_gradient(xcoord)

    #- Compute the approximate size of the cells in x and y direction
    e1 = sqrt(dx_di**2. + dy_di**2.)
    e2 = sqrt(dx_dj**2. + dy_dj**2.)

    return e1, e2
Exemple #2
0
def custom_mean(group, min_coverage=0.5):
    """
    Custom mean function.
    :param group: dataset group created by using the groupby builtin function of an xarray dataset.
    :param min_coverage: Percent of time data must be present for a grid point to be averaged (fraction)
    :return: mean of dataset group.
    """
    # percent coverage. count where grid points are not null and get mean
    percent_coverage = group['u'].notnull().mean('time')

    # filter by the min_coverage input variable
    filtered = group.where(percent_coverage > min_coverage)

    # get standard deviations of u, v and uv variables
    u_stdev = filtered['u'].std('time')
    v_stdev = filtered['v'].std('time')
    uv_stdev = xu.sqrt(u_stdev ** 2 + v_stdev ** 2)

    # get the mean of u and v
    result = filtered[['u', 'v']].mean('time')

    # calculate magnitude
    mag = xu.sqrt(result['u'] ** 2 + result['v'] ** 2)

    # add variables  to this group
    result['percent_coverage'] = (coords, percent_coverage)
    result['magnitude'] = (coords, mag)
    result['u_stdev'] = (coords, u_stdev)
    result['v_stdev'] = (coords, v_stdev)
    result['uv_stdev'] = (coords, uv_stdev)

    return result
Exemple #3
0
def custom_mean(group, min_coverage=0.5):
    # percent coverage. count where grid points are not null and get mean
    percent_coverage = group['u'].notnull().mean('time')

    # filter by the min_coverage input variable
    filtered = group.where(percent_coverage > min_coverage)

    # get standard deviations of u, v and uv variables
    u_stdev = filtered['u'].std('time')
    v_stdev = filtered['v'].std('time')
    uv_stdev = xu.sqrt(u_stdev**2 + v_stdev**2)

    # get the mean of u and v
    result = filtered[['u', 'v']].mean('time')

    # calculate magnitude
    mag = xu.sqrt(result['u']**2 + result['v']**2)

    # add variables  to this group
    result['percent_coverage'] = (coords, percent_coverage)
    result['magnitude'] = (coords, mag)
    result['u_stdev'] = (coords, u_stdev)
    result['v_stdev'] = (coords, v_stdev)
    result['uv_stdev'] = (coords, uv_stdev)

    return result
Exemple #4
0
def _horizontal_metrics_from_geographical_coordinates(latitudes, longitudes):
    """Return horizontal scale factors computed from lat, lon arrays.

        Parameters
        ----------
        latitudes : xarray dataarray
            array of latitudes from which to build the grid metrics.
            assume that the order of the dimensions is ('y','x').
        longitudes :xarray dataarray
            array of latitudes from which to build the grid metrics.
            assume that the order of the dimensions is ('y','x').

        Return
        ------
        e1 : xarray dataarray
            Array of grid cell width corresponding to cell_x_size_at_*_location
        e2 : xarray dataarray
            Array of grid cell width corresponding to cell_y_size_at_*_location
    """
    #- Define the centered first order derivatives of lat/lon arrays
    dlat_dj, dlat_di = _horizontal_gradient(latitudes)
    dlon_dj, dlon_di = _horizontal_gradient(longitudes)

    #- Define the approximate size of the cells in x and y direction
    e1 = earthrad * deg2rad \
          * sqrt(( dlon_di * cos( deg2rad * latitudes ) )**2. + dlat_di**2.)
    e2 = earthrad * deg2rad \
          * sqrt(( dlon_dj * cos( deg2rad * latitudes ) )**2. + dlat_dj**2.)

    return e1, e2
Exemple #5
0
 def distance_to_point(self, lat, lon):
     """
     Use Haversine formula to estimate distances from all
     gridpoints to a given location (lat, lon)
     """
     R = 6371. # Radius of earth in km
     lat = np.radians(lat)
     lon = np.radians(lon)
     dlat = lat - xu.radians(self['lat'].values)
     dlon = lon - xu.radians(self['lon'].values)
     a = xu.sin(dlat/2)**2 + xu.cos(lat) * xu.cos(xu.radians(self['lat'].values)) * \
             xu.sin(dlon/2)**2
     c = 2 * xu.arctan2(xu.sqrt(a), xu.sqrt(1.0-a))
     return R*c
Exemple #6
0
 def distance_to_point(self, lat, lon):
     """
     Use Haversine formula to estimate distances from all
     gridpoints to a given location (lat, lon)
     """
     R = 6371.  # Radius of earth in km
     lat = np.radians(lat)
     lon = np.radians(lon)
     dlat = lat - xu.radians(self['lat'].values)
     dlon = lon - xu.radians(self['lon'].values)
     a = xu.sin(dlat/2)**2 + xu.cos(lat) * xu.cos(xu.radians(self['lat'].values)) * \
             xu.sin(dlon/2)**2
     c = 2 * xu.arctan2(xu.sqrt(a), xu.sqrt(1.0 - a))
     return R * c
def annual_avg_hourly_ws():
	'''Average hourly windspeed for MPI DTU 2006 Agora WRF simulation.
	
	Loops over daily netcdf WRF simulation outputs to 
	Extract U and V wind speeds components at level = 6, 
	close to hub-height, destagger to mass point. Calculate
	windspeeds, and then average over grid to estimate
	average hourly wind speeds. 
	
	Parameters:
	None
	
	Returns:
	*_hourly.nc: netcdf file 
	 
	'''
	wsl = []
	d_geo = xr.open_dataset('landmask.nc')
	
	for idx, file in enumerate(ncfiles):
		ds_f = xr.open_dataset(file)
		ds_f_U = ds_f.U # extract X component of wind speeds
		ds_f_V = ds_f.V # extract y component of wind speeds
		dnx = np.shape(ds_f_U)[3] - 1
		dny = np.shape(ds_f_V)[2] - 1
		U_dstag = 0.5 * (ds_f_U[:,:,:,0:dnx] + ds_f_U[:,:,:,1:dnx+1])  # destagger X wind speeds 
		V_dstag = 0.5 * (ds_f_V[:,:,0:dny,:] + ds_f_V[:,:,1:dny+1,:])  # destagger Y wind speeds
		U_ren = U_dstag.rename({'west_east_stag':'west_east'})
		V_ren = V_dstag.rename({'south_north_stag':'south_north'})
		ws_X_sea = U_ren.sel(bottom_top = 6).where(d_geo.LANDMASK == 0)
		ws_Y_sea = V_ren.sel(bottom_top = 6).where(d_geo.LANDMASK == 0)
		ws_Y = ws_X_sea.mean(dim = ['south_north','west_east']) # calculate spatial mean of X wind speeds
		ws_X = ws_Y_sea.mean(dim = ['south_north','west_east']) # calculate spatial mean of Y wind speeds
		ws_XY = xu.sqrt(ws_X**2 + ws_Y**2) # calculate effective wind speeds 
		ws_XY.to_netcdf('s_'+str(file)+'_hourly.nc')
Exemple #8
0
    def matrix(self):
        """Return the normalizing weights in an xarray

        The weights are proportional to sig * sqrt(w). I could have used sig**2
        w as the convention, but this is more useful.
        """
        scale = self.scale_
        w = self.weight
        return xu.sqrt(w) * scale
def A1_yr_avg_hourly_ws():
    '''Average hourly windspeed for MPI DTU 2006 Agora WRF simulation.
    This function selects data over an areal selection. 
    
    # west_east and west_east_stag :
    # model dimension (not lat-long) 
    A1_mod_WE_S_L = 130
    A1_mod_WE_S_R = 220

    # south_north and south_north_stag :
    #  model dimension (not lat-long)
    A1_mod_SN_T = 140
    A1_mod_SN_B = 50
	
	Loops over daily netcdf WRF simulation outputs to 
	Extract U and V wind speeds components at level = 6, 
	close to hub-height, destagger to mass point. Calculate
	windspeeds, and then average over grid to estimate
	average hourly wind speeds. In addtion, the function also writes out
	a netcdf file with the hourly average wind directions over the selected 
    model area.
    
	Parameters:
	None
	
	Returns:
	prints *_hourly_mpers.nc: netcdf file 
    prints *_hourly_rad.nc: netcdf file
	 
	'''
	wsl = []
	d_geo = xr.open_dataset('landmask.nc')
	
	for idx, file in enumerate(ncfiles):
		ds_f = xr.open_dataset(file) 
		ds_f_U = ds_f.U.sel(south_north = slice(50,140),west_east_stag = slice(130,220))
		ds_f_V = ds_f.V.sel(south_north_stag = slice(50,140), west_east = slice(130,220))
		dnx = np.shape(ds_f_U)[3]-1
		dny = np.shape(ds_f_V)[2]-1
		U_dstag = 0.5 * (ds_f_U[:,:,:,0:dnx] + ds_f_U[:,:,:,1:dnx+1])
		V_dstag = 0.5 * (ds_f_V[:,:,0:dny,:] + ds_f_V[:,:,1:dny+1,:])
		U_ren = U_dstag.rename({'west_east_stag':'west_east'})
		V_ren = V_dstag.rename({'south_north_stag':'south_north'})
		ws_X_sea = U_ren.sel(bottom_top = 6)
		ws_Y_sea = V_ren.sel(bottom_top = 6)
		ws_X = ws_X_sea.mean(dim = ['south_north','west_east'])
		ws_Y = ws_Y_sea.mean(dim = ['south_north','west_east'])
		ws_XY = xu.sqrt(ws_X**2 + ws_Y**2)
        dir_XY = xu.arctan(ws_y/ws_x)
		ws_XY.to_netcdf('s_A1_'+str(file)+'_hourly_mpers.nc')
Exemple #10
0
    def _get_var(self, data):
        if self.in_var == 'wnd':
            subset_variable = xu.sqrt(data[[n for n in data.data_vars][0]]**2 +
                                      data[[n for n in data.data_vars][1]]**2)
            subset_variable = subset_variable.drop(['heightAboveGround'])

        elif self.in_var == 'tmp925':
            subset_variable = data['t'] - 273.15
        elif self.in_var == 'tmp850':
            subset_variable = data['t'] - 273.15
        elif self.in_var == 'pwat':
            subset_variable = data.drop(['level'])
        else:
            subset_variable = data
        return subset_variable
Exemple #11
0
    def _correlationCalc(self, x, y, correlating=True, statistical_test=False):
        """
        Calculate the correlations between two fields or time series.

        **Optional arguments:**
        *x*
            time series of x in `xarray.DataArray`.

        *y*
            time series of y in `xarray.DataArray`.

        *correlating*
            Calculating correlations or regression coefficients.
            Default is True (correlations). Otherwise, regression coefficients.

        **Returns:**
        *r*
           An `xarray.DataArray` correlations between x and y.

        """
        x = x - x.mean(dim=self._timeCoords.name)
        y = y - y.mean(dim=self._timeCoords.name)
        xy = x * y  # cov(x,y)
        xy = xy.mean(dim=self._timeCoords.name)
        xx = x.std(dim=self._timeCoords.name)
        yy = y.std(dim=self._timeCoords.name)

        if correlating:
            r = xy / xx / yy
        else:
            r = xy / xx**2

        if statistical_test:

            from xarray.ufuncs import sqrt
            if correlating:
                r_coe = r
            else:
                r_coe = xy / xx / yy

            dof = len(x[self._timeCoords.name]) - 2
            t0 = r_coe * sqrt(dof / ((1 - r_coe + 1e-20) *
                                     (1 + r_coe + 1e-20)))
            p = self._double_t_test(t0, dof)
        else:
            p = None

        return r, p
Exemple #12
0
    def get_dataset(self, key, info):
        """Load a dataset."""
        if self._polarization != key.polarization:
            return

        logger.debug('Reading %s.', key.name)

        if key.name in ['longitude', 'latitude']:
            logger.debug('Constructing coordinate arrays.')

            if self.lons is None or self.lats is None:
                self.lons, self.lats = self.get_lonlats()

            if key.name == 'latitude':
                data = self.lats
            else:
                data = self.lons
            data.attrs = info

        else:
            data = self.read_band()
            logger.debug('Reading noise data.')

            noise = self.noise.get_noise_correction(data.shape)

            logger.debug('Reading calibration data.')

            cal = self.calibration.get_calibration('gamma', data.shape)
            cal_constant = self.calibration.get_calibration_constant()

            logger.debug('Calibrating.')

            data = data.astype(np.float64)
            data = (data * data + cal_constant - noise) / cal

            data = xu.sqrt(data.clip(min=0))

            data.attrs = info

            del noise, cal

            data.attrs['units'] = 'sigma'

        return data
Exemple #13
0
    def get_dataset(self, key, info):
        """Load a dataset."""
        if self._polarization != key.polarization:
            return

        logger.debug('Reading %s.', key.name)

        if key.name in ['longitude', 'latitude']:
            logger.debug('Constructing coordinate arrays.')

            if self.lons is None or self.lats is None:
                self.lons, self.lats = self.get_lonlats()

            if key.name == 'latitude':
                data = self.lats
            else:
                data = self.lons
            data.attrs = info

        else:
            data = self.read_band()
            logger.debug('Reading noise data.')

            noise = self.noise.get_noise_correction(data.shape)

            logger.debug('Reading calibration data.')

            cal = self.calibration.get_calibration('gamma', data.shape)
            cal_constant = self.calibration.get_calibration_constant()

            logger.debug('Calibrating.')

            data = data.astype(np.float64)
            data = (data * data + cal_constant - noise) / cal

            data = xu.sqrt(data.clip(min=0))

            data.attrs = info

            del noise, cal

            data.attrs['units'] = 'sigma'

        return data
Exemple #14
0
def ttest_1samp_new(a, popmean, dim, n):
    """
    This is a two-sided test for the null hypothesis that the expected value
    (mean) of a sample of independent observations `a` is equal to the given
    population mean, `popmean`
    
    Inspired here: https://github.com/scipy/scipy/blob/v0.19.0/scipy/stats/stats.py#L3769-L3846
    
    Parameters
    ----------
    a : xarray
        sample observation
    popmean : float or array_like
        expected value in null hypothesis, if array_like than it must have the
        same shape as `a` excluding the axis dimension
    dim : string
        dimension along which to compute test
    
    Returns
    -------
    mean : xarray
        averaged sample along which dimension t-test was computed
    maskt_idx : array, bool
        Boolean array of where the tvalue is greater than the critical value
    """
    df = n - 1
    a_mean = a.mean(dim)
    d = a_mean - popmean
    v = a.var(dim, ddof=1)
    denom = xrf.sqrt(v / float(n))

    tval = d / denom
    # calculate the critical value
    cv = stats.distributions.t.ppf(1.0 - 0.05, df)
    maskt_idx = (abs(tval) >= cv)
    #     prob = stats.distributions.t.sf(xrf.fabs(tval), df) * 2
    #     prob_xa = xr.DataArray(prob, coords=a_mean.coords)
    return a_mean, maskt_idx
Exemple #15
0
def eady_growth_rate(data):
    """Calculate the local Eady Growth rate.
    Following Vallis (2017) p.354.

        EGR = 0.31*du/dz*f/N

    Parameters
    ----------
        data : xarray.DataSet
        The Isca dataset.  Requires fields 'temp', 'ps', 'pfull' and 'phalf'

    Returns a new xarray.DataArray of growth rate values on phalf levels,
    in s^-1.
    """
    N2 = ixr.brunt_vaisala(data)
    f = 2.0 * omega * xruf.sin(xruf.deg2rad(data.lat))

    dz = ixr.domain.calculate_dz(data)
    du = ixr.domain.diff_pfull(data.ucomp, data)

    N = xruf.sqrt(N2.where(N2 > 0))

    egr = 0.31 * du / dz * f / N
    return np.abs(egr)
Exemple #16
0
    def __call__(self, datasets, **info):
        if len(datasets) != 4:
            raise ValueError("Expected 4 datasets, got %d" % (len(datasets), ))

        from scipy.special import erf
        dnb_data = datasets[0]
        sza_data = datasets[1]
        lza_data = datasets[2]
        output_dataset = dnb_data.where(~(dnb_data.isnull()
                                          | sza_data.isnull()))
        # this algorithm assumes units of "W cm-2 sr-1" so if there are other
        # units we need to adjust for that
        if dnb_data.attrs.get("units", "W m-2 sr-1") == "W m-2 sr-1":
            unit_factor = 10000.
        else:
            unit_factor = 1.

        # convert to decimal instead of %
        moon_illum_fraction = da.mean(datasets[3].data) * 0.01

        # From Steve Miller and Curtis Seaman
        # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0))))))
        # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0))))))
        # scaled_radiance = (radiance - minval) / (maxval - minval)
        # radiance = sqrt(scaled_radiance)

        # Version 2: Update from Curtis Seaman
        # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0))))))
        # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0))))))
        # saturated_pixels = where(radiance gt maxval, nsatpx)
        # saturation_pct = float(nsatpx)/float(n_elements(radiance))
        # print, 'Saturation (%) = ', saturation_pct
        #
        # while saturation_pct gt 0.005 do begin
        #   maxval = maxval*1.1
        #   saturated_pixels = where(radiance gt maxval, nsatpx)
        #   saturation_pct = float(nsatpx)/float(n_elements(radiance))
        #   print, saturation_pct
        # endwhile
        #
        # scaled_radiance = (radiance - minval) / (maxval - minval)
        # radiance = sqrt(scaled_radiance)

        moon_factor1 = 0.7 * (1.0 - moon_illum_fraction)
        moon_factor2 = 0.0022 * lza_data.data
        erf_portion = 1 + erf((sza_data.data - 95.0) / (5.0 * np.sqrt(2.0)))
        max_val = da.power(
            10, -1.7 -
            (2.65 + moon_factor1 + moon_factor2) * erf_portion) * unit_factor
        min_val = da.power(10, -4.0 -
                           (2.95 + moon_factor2) * erf_portion) * unit_factor

        # Update from Curtis Seaman, increase max radiance curve until less
        # than 0.5% is saturated
        if self.saturation_correction:
            delayed = dask.delayed(self._saturation_correction)(
                output_dataset.data, unit_factor, min_val, max_val)
            output_dataset.data = da.from_delayed(delayed,
                                                  output_dataset.shape,
                                                  output_dataset.dtype)
            output_dataset.data = output_dataset.data.rechunk(
                dnb_data.data.chunks)
        else:
            inner_sqrt = (output_dataset - min_val) / (max_val - min_val)
            # clip negative values to 0 before the sqrt
            inner_sqrt = inner_sqrt.where(inner_sqrt > 0, 0)
            output_dataset.data = xu.sqrt(inner_sqrt).data

        info = dnb_data.attrs.copy()
        info.update(self.attrs)
        info["standard_name"] = "equalized_radiance"
        info["mode"] = "L"
        output_dataset.attrs = info
        return output_dataset
Exemple #17
0
def xyz2angle(x, y, z):
    """Convert cartesian to azimuth and zenith."""
    azi = xu.rad2deg(xu.arctan2(x, y))
    zen = 90 - xu.rad2deg(xu.arctan2(z, xu.sqrt(x**2 + y**2)))
    return azi, zen
Exemple #18
0
def trans_func(schema, d, med_wavelen, cfsp=0, gradient_filter=0):
    """
    Calculates the optical transfer function to use in reconstruction

    This routine uses the analytical form of the transfer function
    found in in Kreis [1]_.  It can optionally do cascaded free-space
    propagation for greater accuracy [2]_, although the code will run
    slightly more slowly.

    Parameters
    ----------
    shape : (int, int)
       maximum dimensions of the transfer function
    spacing : (float, float)
       the spacing between points is the grid to calculate
    wavelen : float
       the wavelength in the medium you are propagating through
    d : float or list of floats
       reconstruction distance.  If list or array, this function will
       return an array of transfer functions, one for each distance
    cfsp : integer (optional)
       cascaded free-space propagation factor.  If this is an integer
       > 0, the transfer function G will be calculated at d/csf and
       the value returned will be G**csf. 
    gradient_filter : float (optional)
       Subtract a second transfer function a distance gradient_filter
       from each z

    Returns
    -------
    trans_func : np.ndarray
       The calculated transfer function.  This will be at most as large as
       shape, but may be smaller if the frequencies outside that are zero

    References
    ----------
    .. [1] Kreis, Handbook of Holographic Interferometry (Wiley,
       2005), equation 3.79 (page 116)

    .. [2] Kreis, Optical Engineering 41(8):1829, section 5

    """
    if not hasattr(d, 'z'):
        d = xr.DataArray(ensure_array(d),
                         dims=['z'],
                         coords={'z': ensure_array(d)})

    if (cfsp > 0):
        cfsp = int(abs(cfsp))  # should be nonnegative integer
        d = d / cfsp

    m, n = ft_coord(schema.x), ft_coord(schema.y)
    m = xr.DataArray(m, dims='m', coords={'m': m})
    n = xr.DataArray(n, dims='n', coords={'n': n})

    root = 1. + 0j - (med_wavelen * n)**2 - (med_wavelen * m)**2

    root *= (root >= 0)

    g = np.exp(-1j * 2 * np.pi * d / med_wavelen * sqrt(root))

    if gradient_filter:
        g -= np.exp(-1j * 2 * np.pi * (d + gradient_filter) / med_wavelen *
                    sqrt(root))

    # set the transfer function to zero where the sqrt is imaginary
    # (this is equivalent to making sure that the largest spatial
    # frequency is 1/wavelength).  (root>=0) returns a boolean matrix
    # that is equal to 1 where the condition is true and 0 where it is
    # false.  Multiplying by this boolean matrix masks the array.
    g = g * (root >= 0)

    if cfsp > 0:
        g = g**cfsp

    return g
def main(config_path):
    config = {}
    with open(config_path) as f_config:
        config = json.load(f_config)

    #    out, times, h_agl = xr.Dataset(), [], []
    for i, f_path in enumerate(
            sorted(g.glob(os.path.join(config['output-wrf-raw'],
                                       'wrfout_*')))):
        print(f_path)
        f_basename = os.path.basename(f_path)
        domain = f_basename.split('_')[1]
        d = xr.open_dataset(f_path).isel(Time=0)
        time = dt.datetime.strptime(
            f_basename, 'wrfout_{}_%Y-%m-%d_%H:%M:%S'.format(domain))
        h_agl_staggered = (d.PHB + d.PH) / wrf.G0
        h_agl = bn.move_mean(
            h_agl_staggered.mean(dim='south_north').mean(dim='west_east'),
            2)[1:]
        h = bn.move_mean(h_agl_staggered.values, 2, axis=0)[1:]
        t = d['T'].values
        p = (d.P + d.PB).values
        q = d.QVAPOR.values
        #cosalpha and sinalpha to account for earth's rotation relative to the grid
        cosa = d.COSALPHA.values
        sina = d.SINALPHA.values

        out = xr.Dataset()
        out.coords['time'] = time
        ##        out.coords['h_agl'] = np.mean(h_agl, axis=0)
        out.coords['x'] = range(d.XLONG.shape[1])
        out.coords['y'] = range(d.XLAT.shape[0])
        out['lat'] = (('y', 'x'), d.XLAT.values[:])
        out['lon'] = (('y', 'x'), d.XLONG.values[:])
        out['terrain'] = (('y', 'x'), d.HGT.values)
        out['u10'] = (('y', 'x'), d.U10.values * cosa - d.V10.values * sina)
        out['v10'] = (('y', 'x'), d.V10.values * cosa + d.U10.values * sina)
        out['rain'] = (('y', 'x'), d.RAINC + d.RAINNC)
        out['snow'] = (('y', 'x'), d.SNOWNC)
        out['t2'] = (('y', 'x'), d.T2.values)
        out['so2_concentration'] = (('h_agl', 'y', 'x'), d.so2.values)
        out['o3_concentration'] = (('h_agl', 'y', 'x'), d.o3.values)
        out['nox_concentration'] = (('h_agl', 'y', 'x'), (d.no2 + d.no).values)
        # This was changed today to dimish pm2.5 by a factor of 4
        wind10 = xu.sqrt(d.U10.values**2 + d.V10.values**2)
        divfac = (wind10 / 5).clip(1, 20)
        #.clip(1,10)
        #cappedpm25=d.PM2_5_DRY.values/divfac
        #out['pm25'] = (('h_agl', 'y', 'x'), cappedpm25)
        #newpm10=d.PM10.values-d.PM2_5_DRY.values+cappedpm25
        shp = d.so2.shape
        #print(shp)
        ss_01 = (d.na_a01 + d.cl_a01).values
        ss_02 = (d.na_a02 + d.cl_a02).values
        ss_03 = (d.na_a03 + d.cl_a03).values
        ss_04 = (d.na_a04 + d.cl_a04).values
        ss_05 = (d.na_a05 + d.cl_a05).values
        ss_06 = (d.na_a06 + d.cl_a06).values
        ss_25 = ss_01 + ss_02 + ss_03 + ss_04 + ss_05 + ss_06
        ss_new = ss_25 / divfac
        cappedpm25 = d.PM2_5_DRY.values - ss_25 + ss_new
        out['pm25'] = (('h_agl', 'y', 'x'), cappedpm25)
        newpm10 = d.PM10.values - d.PM2_5_DRY.values + cappedpm25
        out['pm10'] = (('h_agl', 'y', 'x'), newpm10)
        #out['ss_25'] = (('h_agl', 'y', 'x'), ss_25)
        #out['newss'] = (('h_agl', 'y', 'x'), ss_new)
        #out['ss_25'] = (('h_agl', 'y', 'x'), ss_25)
        #out['wind10'] = (('y', 'x'), wind10)
        #out['divfac']=(('y', 'x'), divfac)
        #organics
        org_01 = d.oc_a01.values
        org_02 = d.oc_a02.values
        org_03 = d.oc_a03.values
        org_04 = d.oc_a04.values
        org_05 = d.oc_a05.values
        org_06 = d.oc_a06.values
        org_07 = d.oc_a07.values
        org_08 = d.oc_a08.values
        org25 = org_01 + org_02 + org_03 + org_04 + org_05 + org_06 + org_07 + org_08
        out['org25'] = (('h_agl', 'y', 'x'), org25)
        #sulphate
        sulf_01 = d.so4_a01.values
        sulf_02 = d.so4_a02.values
        sulf_03 = d.so4_a03.values
        sulf_04 = d.so4_a04.values
        sulf_05 = d.so4_a05.values
        sulf_06 = d.so4_a06.values
        sulf_07 = d.so4_a07.values
        sulf_08 = d.so4_a08.values
        sulf25 = sulf_01 + sulf_02 + sulf_03 + sulf_04 + sulf_05 + sulf_06 + sulf_07 + sulf_08
        out['sulf25'] = (('h_agl', 'y', 'x'), sulf25)
        #nitrate
        nitr_01 = d.no3_a01.values
        nitr_02 = d.no3_a02.values
        nitr_03 = d.no3_a03.values
        nitr_04 = d.no3_a04.values
        nitr_05 = d.no3_a05.values
        nitr_06 = d.no3_a06.values
        nitr_07 = d.no3_a07.values
        nitr_08 = d.no3_a08.values
        nitr25 = nitr_01 + nitr_02 + nitr_03 + nitr_04 + nitr_05 + nitr_06 + nitr_07 + nitr_08
        out['nitr25'] = (('h_agl', 'y', 'x'), nitr25)
        #Ammonium
        nh4_01 = d.nh4_a01.values
        nh4_02 = d.nh4_a02.values
        nh4_03 = d.nh4_a03.values
        nh4_04 = d.nh4_a04.values
        nh4_05 = d.nh4_a05.values
        nh4_06 = d.nh4_a06.values
        nh4_07 = d.nh4_a07.values
        nh4_08 = d.nh4_a08.values
        nh425 = nh4_01 + nh4_02 + nh4_03 + nh4_04 + nh4_05 + nh4_06 + nh4_07 + nh4_08
        out['nh425'] = (('h_agl', 'y', 'x'), nh425)
        #Chloride
        cl_01 = d.cl_a01.values
        cl_02 = d.cl_a02.values
        cl_03 = d.cl_a03.values
        cl_04 = d.cl_a04.values
        cl_05 = d.cl_a05.values
        cl_06 = d.cl_a06.values
        cl_07 = d.cl_a07.values
        cl_08 = d.cl_a08.values
        #24/2/20:LC: have just pm2.5, so up to 6th bin
        cl25 = cl_01 + cl_02 + cl_03 + cl_04 + cl_05 + cl_06
        out['cl25'] = (('h_agl', 'y', 'x'), cl25)
        #Black Carbon
        bc_01 = d.bc_a01.values
        bc_02 = d.bc_a02.values
        bc_03 = d.bc_a03.values
        bc_04 = d.bc_a04.values
        bc_05 = d.bc_a05.values
        bc_06 = d.bc_a06.values
        bc_07 = d.bc_a07.values
        bc_08 = d.bc_a08.values
        bc = bc_01 + bc_02 + bc_03 + bc_04 + bc_05 + bc_06 + bc_07 + bc_08
        out['bc'] = (('h_agl', 'y', 'x'), bc)
        #print(d.PM2_5_DRY[1:10], d.PM10[1:10])
        #aaa=np.subtract(d.PM10.values, d.PM2_5_DRY.values)
        #out['aaa']=(('h_agl', 'y', 'x'), d.PM10[:,:,:]-d.PM2_5_DRY[:,:,:])
        #pm25=d.PM2_5_DRY.values
        #newpm25=pm25/4.
        #pm10=d.PM10.values
        #print(pm25[0:10,0:10,0:10])
        #print(pm10[0:10,0:10,0:10])
        #np.warnings.filterwarnings('ignore')
        #newpm10=(pm10-pm25+pm25/4.)
        #print(pm25.shape)
        #out['pm25'] = (
        #('h_agl', 'y', 'x'),d.PM2_5_DRY.values)
        #out['pm10'] = (('h_agl', 'y', 'x'), (d.PM10-d.PM2_5_DRY).values+d.PM2_5_DRY.values/4)
        #out['pm10'] = (('h_agl', 'y', 'x'), d.PM10.values)

        #('h_agl', 'y', 'x'), (d.PM10.values-d.PM2_5_DRY.values/4.))
        #-d.PM2_5_DRY).values)
        ##            wrf.x_to_yOm3(d.so2.values, (d.PB + d.P).values,
        ##                          d['T'].values, mm=64)
        ##        )
        out['pb'] = (('h_agl', 'y', 'x'), d.PB.values)
        out['p_sl'] = (('y', 'x'), wrf.slp(h, p, t, q))
        out['rh'] = (('y', 'x'), wrf.rh(p, t, q)[0])
        out['swdown'] = (('y', 'x'), d.SWDOWN.values)
        out['cldfra'] = (('h_agl', 'y', 'x'), d.CLDFRA.values)
        out['qsnow'] = (('h_agl', 'y', 'x'), d.QSNOW.values)
        out['qgraup'] = (('h_agl', 'y', 'x'), d.QGRAUP.values)
        out.to_netcdf(
            os.path.join(
                config['output-wrf'], '{domain}_{date}.nc'.format(
                    domain=domain, date=(time.strftime('%Y%m%d%H%M')))))
Exemple #20
0
def compute_capacity_factors(tech_points_dict: Dict[str, List[Tuple[float, float]]],
                             spatial_res: float, timestamps: pd.DatetimeIndex,
                             precision: int = 3,
                             smooth_wind_power_curve: bool = True) -> pd.DataFrame:
    """
    Compute capacity factors for a list of points associated to a list of technologies.

    Parameters
    ----------
    tech_points_dict : Dict[str, List[Tuple[float, float]]]
        Dictionary associating to each tech a list of points.
    spatial_res: float
        Spatial resolution of coordinates
    timestamps: pd.DatetimeIndex
        Time stamps for which we want capacity factors
    precision: int (default: 3)
        Indicates at which decimal capacity factors should be rounded
    smooth_wind_power_curve : boolean (default True)
        If "True", the transfer function of wind assets replicates the one of a wind farm,
        rather than one of a wind turbine.

    Returns
    -------
    cap_factor_df : pd.DataFrame
         DataFrame storing capacity factors for each technology and each point

    """

    for tech, points in tech_points_dict.items():
        assert len(points) != 0, f"Error: No points were defined for tech {tech}"

    assert len(timestamps) != 0, f"Error: No timestamps were defined."

    # Get the converters corresponding to the input technologies
    # Dictionary indicating for each technology which converter(s) to use.
    #    For each technology in the dictionary:
    #        - if it is pv-based, the name of the converter must be specified as a string
    #        - if it is wind, a dictionary must be defined associated for the four wind regimes
    #        defined below (I, II, III, IV), the name of the converter as a string
    converters_dict = get_config_dict(list(tech_points_dict.keys()), ["converter"])

    vres_profiles_dir = f"{data_path}generation/vres/profiles/source/"
    transfer_function_dir = f"{vres_profiles_dir}transfer_functions/"
    data_converter_wind = pd.read_csv(f"{transfer_function_dir}data_wind_turbines.csv", sep=';', index_col=0)
    data_converter_pv = pd.read_csv(f"{transfer_function_dir}data_pv_modules.csv", sep=';', index_col=0)

    dataset = read_resource_database(spatial_res).sel(time=timestamps)

    # Create output dataframe with MultiIndex (tech, coords)
    tech_points_tuples = sorted([(tech, point[0], point[1]) for tech, points in tech_points_dict.items()
                                 for point in points])
    cap_factor_df = pd.DataFrame(index=timestamps,
                                 columns=pd.MultiIndex.from_tuples(tech_points_tuples,
                                                                   names=['technologies', 'lon', 'lat']),
                                 dtype=float)

    for tech in tech_points_dict.keys():

        resource = get_config_values(tech, ["plant"])
        # Round points at the given resolution
        non_rounded_points = tech_points_dict[tech]
        rounded_points = [(round(point[0] / spatial_res) * spatial_res,
                           round(point[1] / spatial_res) * spatial_res)
                          for point in non_rounded_points]
        non_rounded_to_rounded_dict = dict(zip(non_rounded_points, rounded_points))
        sub_dataset = dataset.sel(locations=sorted(list(set(rounded_points))))

        if resource == 'Wind':

            wind_speed_reference_height = 100.
            roughness = sub_dataset.fsr

            # Compute wind speed for the all the coordinates
            wind = xu.sqrt(sub_dataset.u100 ** 2 + sub_dataset.v100 ** 2)

            wind_mean = wind.mean(dim='time')

            # Split according to the IEC 61400 WTG classes
            wind_classes = {'IV': [0., 6.5], 'III': [6.5, 8.], 'II': [8., 9.5], 'I': [9.5, 99.]}
            list_df_per_wind_class = []

            for cls in wind_classes:

                filtered_wind_data = wind_mean.where((wind_mean.data >= wind_classes[cls][0]) &
                                                     (wind_mean.data < wind_classes[cls][1]), 0)
                coords_classes = filtered_wind_data[da.nonzero(filtered_wind_data)].locations.values.tolist()

                if len(coords_classes) > 0:

                    wind_filtered = wind.sel(locations=coords_classes)
                    roughness_filtered = roughness.sel(locations=coords_classes)

                    # Get the transfer function curve
                    # literal_eval converts a string to an array (in this case)
                    converter = converters_dict[tech]["converter"][cls]
                    power_curve_array = literal_eval(data_converter_wind.loc['Power curve', converter])
                    wind_speed_references = np.asarray([i[0] for i in power_curve_array])
                    capacity_factor_references = np.asarray([i[1] for i in power_curve_array])
                    capacity_factor_references_pu = capacity_factor_references / max(capacity_factor_references)

                    wind_log = windpowerlib.wind_speed.logarithmic_profile(
                        wind_filtered.values, wind_speed_reference_height,
                        float(data_converter_wind.loc['Hub height [m]', converter]),
                        roughness_filtered.values)
                    wind_data = da.from_array(wind_log, chunks='auto', asarray=True)

                    # The transfer function of wind assets replicates the one of a
                    # wind farm rather than one of a wind turbine.
                    if smooth_wind_power_curve:

                        turbulence_intensity = wind_filtered.std(dim='time') / wind_filtered.mean(dim='time')

                        capacity_factor_farm = windpowerlib.power_curves.smooth_power_curve(
                            pd.Series(wind_speed_references), pd.Series(capacity_factor_references_pu),
                            standard_deviation_method='turbulence_intensity',
                            turbulence_intensity=float(turbulence_intensity.min().values),
                            wind_speed_range=10.0)

                        power_output = da.map_blocks(np.interp, wind_data,
                                                     capacity_factor_farm['wind_speed'].values,
                                                     capacity_factor_farm['value'].values).compute()
                    else:

                        power_output = da.map_blocks(np.interp, wind_data,
                                                     wind_speed_references,
                                                     capacity_factor_references_pu).compute()

                    # Convert rounded point back into non-rounded points
                    power_output_df = pd.DataFrame(power_output, columns=coords_classes)
                    coords_classes_rounded = [non_rounded_to_rounded_dict[point] for point in non_rounded_points]
                    power_output_corrected = [power_output_df[point].values
                                              for point in coords_classes_rounded
                                              if point in power_output_df.columns]
                    coords_classes_non_rounded = [point for point in non_rounded_to_rounded_dict
                                                  if non_rounded_to_rounded_dict[point] in power_output_df.columns]
                    tech_points_tuples = [(lon, lat) for lon, lat in coords_classes_non_rounded]
                    df_per_wind_class = pd.DataFrame(np.array(power_output_corrected).T,
                                                     index=timestamps, columns=tech_points_tuples)
                    list_df_per_wind_class.append(df_per_wind_class)

                else:

                    continue

            cap_factor_df_concat = pd.concat(list_df_per_wind_class, axis=1)
            cap_factor_df[tech] = cap_factor_df_concat.reindex(sorted(cap_factor_df_concat.columns), axis=1)

        elif resource == 'PV':

            converter = converters_dict[tech]["converter"]

            # Get irradiance in W from J
            irradiance = sub_dataset.ssrd / 3600.
            # Get temperature in C from K
            temperature = sub_dataset.t2m - 273.15

            # Homer equation here:
            # https://www.homerenergy.com/products/pro/docs/latest/how_homer_calculates_the_pv_array_power_output.html
            # https://enphase.com/sites/default/files/Enphase_PVWatts_Derate_Guide_ModSolar_06-2014.pdf
            power_output = (float(data_converter_pv.loc['f', converter]) *
                            (irradiance/float(data_converter_pv.loc['G_ref', converter])) *
                            (1. + float(data_converter_pv.loc['k_P [%/C]', converter])/100. *
                             (temperature - float(data_converter_pv.loc['t_ref', converter]))))

            power_output = np.array(power_output)

            # Convert rounded point back into non rounded points
            power_output_df = pd.DataFrame(power_output, columns=sub_dataset.locations.values.tolist())
            coords_classes_rounded = [non_rounded_to_rounded_dict[point] for point in non_rounded_points]
            power_output_corrected = [power_output_df[point].values
                                      for point in coords_classes_rounded if point in power_output_df.columns]
            cap_factor_df[tech] = np.array(power_output_corrected).T

        else:
            raise ValueError(' Profiles for the specified resource is not available yet.')

    # Check that we do not have NANs
    assert cap_factor_df.isna().to_numpy().sum() == 0, "Some capacity factors are not available."

    # Decrease precision of capacity factors
    cap_factor_df = cap_factor_df.round(precision)

    return cap_factor_df
def main(config_path):
    config = {}
    with open(config_path) as f_config:
        config = json.load(f_config)

    doms = sorted(
        set(
            map(lambda x: x.split('_')[0],
                os.listdir(os.path.join(config['output-wrf'])))))
    ds = [
        xr.open_mfdataset(os.path.join(config['output-wrf'],
                                       '{}*.nc'.format(dom)),
                          concat_dim='time') for dom in doms
    ]
    extents = {
        'ireland': [-12, -3, 51, 55.5],
        'europe': [
            ds[0].lon.min(), ds[0].lon.max(), ds[0].lat.min(),
            ds[0].lat.max() - 1
        ]
    }

    for z in zip(*map(lambda x: list(x.groupby('time')), ds)):
        for i, (t, d) in enumerate(z):
            is_fst = i == 0
            is_lst = i == len(doms) - 1

            t = pd.to_datetime(t)
            t_save = t.strftime('%Y%m%d%H%M')

            if is_fst:
                print(t)

            print('\t{} - temperature & pressure (Ireland)'.format(i))
            figs = ['t2-p-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.t2 - 273.15,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   levels=LEVELS['t2-ir'],
                   norm=MidpointNormalize(midpoint=0),
                   cmap=CMAP['t2'],
                   extent=extents['ireland'],
                   extend='both',
                   label='$^o$C',
                   title='Temperature and Pressure',
                   colorbar=is_lst,
                   config=config)
            if is_fst:
                print('is first')
                print(extents['ireland'])
                plot2d(d.lon.loc[extents['ireland'][0]:extents['ireland'][1]],
                       d.lat.loc[extents['ireland'][2]:extents['ireland'][3]],
                       d.p_sl.sel(lon=slice(extents['ireland'][0],
                                            extents['ireland'][1]),
                                  lat=slice(extents['ireland'][2],
                                            extents['ireland'][3])) * 1e-2,
                       fig=figs[-1],
                       newfig=False,
                       levels_n=10,
                       what='contour',
                       config=config)
            print('\t{} - temperature & pressure (Europe)'.format(i))
            figs += ['t2-p-e_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.t2 - 273.15,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   levels=LEVELS['t2-e'],
                   norm=MidpointNormalize(midpoint=0),
                   cmap=CMAP['t2'],
                   extent=extents['europe'],
                   extend='both',
                   label='$^o$C',
                   title='Temperature and Pressure',
                   colorbar=is_lst,
                   config=config)
            if is_fst:
                plot2d(d.lon,
                       d.lat,
                       d.p_sl * 1e-2,
                       fig=figs[-1],
                       newfig=False,
                       levels_n=20,
                       what='contour',
                       config=config)

            print('\t{} - rain (Ireland)'.format(i))
            figs += ['rain-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.rain,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   levels=LEVELS['rain'],
                   cmap=CMAP['rain'],
                   extent=extents['ireland'],
                   label='mm/h',
                   format='%.1f',
                   title='Precipitation',
                   colorbar=is_lst,
                   config=config)
            print('\t{} - rain (Europe)'.format(i))
            figs += ['rain-e_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.rain,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   levels=LEVELS['rain'],
                   cmap=CMAP['rain'],
                   extent=extents['europe'],
                   label='mm/h',
                   format='%.1f',
                   title='Precipitation',
                   colorbar=is_lst,
                   config=config)

            print('\t{} - wind (Ireland)'.format(i))
            step = 2
            figs += ['wind-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   xu.sqrt(d.u10**2 + d.v10**2),
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   levels=LEVELS['wind'],
                   extent=extents['ireland'],
                   cmap=CMAP['wind'],
                   label='m/s',
                   format='%.0f',
                   title='Wind speed & direction',
                   colorbar=is_lst,
                   config=config)
            if is_fst:
                plot2d(d.lon[::step],
                       d.lat[::step],
                       d.isel(lat=slice(None, None, step),
                              lon=slice(None, None, step)),
                       fig=figs[-1],
                       newfig=False,
                       t=t,
                       what='quiver',
                       config=config)
            print('\t{} - wind (Europe)'.format(i))
            step = 4
            figs += ['wind-e_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   xu.sqrt(d.u10**2 + d.v10**2),
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   levels=LEVELS['wind'],
                   extent=extents['europe'],
                   cmap=CMAP['wind'],
                   label='m/s',
                   format='%.0f',
                   title='Wind speed & direction',
                   colorbar=is_lst,
                   config=config)
            if is_fst:
                plot2d(d.lon[::step],
                       d.lat[::step],
                       d.isel(lat=slice(None, None, step),
                              lon=slice(None, None, step)),
                       fig=figs[-1],
                       newfig=False,
                       t=t,
                       what='quiver',
                       config=config)

            print('\t -PM2.5 (Ireland)')
            figs += ['pm25-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.pm25[0, :, :],
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['ireland'],
                   levels=LEVELS['pm25'],
                   cmap=CMAP['a'],
                   label='PM2.5 (ug/m$^3$)',
                   format='%.1f',
                   title='PM2.5',
                   colorbar=is_lst,
                   config=config)
            print('\t -PM2.5 (Europe)')
            figs += ['pm25-eu_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.pm25[0, :, :],
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['europe'],
                   levels=LEVELS['pm25'],
                   cmap=CMAP['a'],
                   label='PM2.5 (ug/m$^3$)',
                   format='%.1f',
                   title='PM2.5',
                   colorbar=is_lst,
                   config=config)

            print('\t -PM10 (Ireland)')
            figs += ['pm10-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.pm10[0, :, :],
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['ireland'],
                   levels=LEVELS['pm10'],
                   cmap=CMAP['a'],
                   label='PM10 (ug/m$^3$)',
                   format='%.1f',
                   title='PM10',
                   colorbar=is_lst,
                   config=config)
            print('\t -PM10 (Europe)')
            figs += ['pm10-eu_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.pm10[0, :, :],
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['europe'],
                   levels=LEVELS['pm10'],
                   cmap=CMAP['a'],
                   label='PM10 (ug/m$^3$)',
                   format='%.1f',
                   title='PM10',
                   colorbar=is_lst,
                   config=config)

            print('\t -SO2 (Ireland)')
            figs += ['so2-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.so2_concentration[0, :, :] * 1e3,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['ireland'],
                   extend='both',
                   levels=LEVELS['so2'],
                   cmap=CMAP['a'],
                   label='SO2 (ppbv)',
                   format='%.1f',
                   title='SO2',
                   colorbar=is_lst,
                   config=config)
            print('\t -SO2 (Europe)')
            figs += ['so2-eu_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.so2_concentration[0, :, :] * 1e3,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['europe'],
                   extend='both',
                   levels=LEVELS['so2'],
                   cmap=CMAP['a'],
                   label='SO2 (ppbv)',
                   format='%.1f',
                   title='SO2',
                   colorbar=is_lst,
                   config=config)

            print('\t -O3 (Ireland)')
            figs += ['o3-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.o3_concentration[0, :, :] * 1e3,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['ireland'],
                   levels=LEVELS['o3'],
                   cmap=CMAP['a'],
                   label='O3 (ppbv)',
                   format='%.1f',
                   title='O3',
                   colorbar=is_lst,
                   config=config)
            print('\t -O3 (Europe)')
            figs += ['o3-eu_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.o3_concentration[0, :, :] * 1e3,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['europe'],
                   levels=LEVELS['o3'],
                   cmap=CMAP['a'],
                   label='O3 (ppbv)',
                   format='%.1f',
                   title='O3',
                   colorbar=is_lst,
                   config=config)

            print('\t -NOx (Ireland)')
            figs += ['nox-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.nox_concentration[0, :, :] * 1e3,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['ireland'],
                   extend='both',
                   levels=LEVELS['nox'],
                   cmap=CMAP['a'],
                   label='NOx (ppbv)',
                   format='%.1f',
                   title='NOx',
                   colorbar=is_lst,
                   config=config)
            print('\t -NOx (Europe)')
            figs += ['nox-eu_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.nox_concentration[0, :, :] * 1e3,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['europe'],
                   extend='both',
                   levels=LEVELS['nox'],
                   cmap=CMAP['a'],
                   label='NOx (ppbv)',
                   format='%.1f',
                   title='NOx',
                   colorbar=is_lst,
                   config=config)

            #            plot2d(
            #            d.lon, d.lat, d.rh,
            #            fig=figs[-1], newfig=is_fst, t=t,
            #            extend=extents['europe'],
            #            levels=LEVELS['pm25'], cmap=CMAP['a'],
            #            label='PM2.5 (ug/m$^2$)',
            #            title='PM2.5',
            #            config=config)

            print('\t{} - relative humidity (Ireland)'.format(i))
            figs += ['rh-ir_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.rh,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['ireland'],
                   levels=LEVELS['rh'],
                   cmap=CMAP['rh'],
                   label='%',
                   format='%.1f',
                   title='Relative Humidity',
                   colorbar=is_lst,
                   config=config)
            print('\t{} - relative humidity (Europe)'.format(i))
            figs += ['rh-e_{}'.format(t_save)]
            plot2d(d.lon,
                   d.lat,
                   d.rh,
                   fig=figs[-1],
                   newfig=is_fst,
                   t=t,
                   extent=extents['europe'],
                   levels=LEVELS['rh'],
                   cmap=CMAP['rh'],
                   label='%',
                   format='%.1f',
                   title='Relative Humidity',
                   colorbar=is_lst,
                   config=config)

        for fig in figs:
            plt.figure(fig)
            plt.savefig(os.path.join(config['imgs'], fig))
            plt.close(fig)
Exemple #22
0
def xyz2lonlat(x, y, z):
    """Convert cartesian to lon lat."""
    lon = xu.rad2deg(xu.arctan2(y, x))
    lat = xu.rad2deg(xu.arctan2(z, xu.sqrt(x**2 + y**2)))
    return lon, lat
Exemple #23
0
def xyz2angle(x, y, z):
    """Convert cartesian to azimuth and zenith."""
    azi = xu.rad2deg(xu.arctan2(x, y))
    zen = 90 - xu.rad2deg(xu.arctan2(z, xu.sqrt(x**2 + y**2)))
    return azi, zen
Exemple #24
0
def _get_sunz_corr_li_and_shibata(cos_zen):

    return 24.35 / (2. * cos_zen +
                    xu.sqrt(498.5225 * cos_zen**2 + 1))
Exemple #25
0
def _get_sunz_corr_li_and_shibata(cos_zen):

    return 24.35 / (2. * cos_zen + xu.sqrt(498.5225 * cos_zen**2 + 1))
        #PM2.5 = 75% of all PM
        E_PM25_ugperm2pers = E_PM_10_ugperm2pers * .75
        E_BC_1_ugperm2pers = E_PM_10_ugperm2pers * .13
        E_OC_DOM_ugperm2pers = E_PM25_ugperm2pers * .65
        E_OC_25_10_ugperm2pers = E_PM_10_ugperm2pers * .65 - E_OC_DOM_ugperm2pers
        E_OIN_10_ugperm2pers = E_PM_10_ugperm2pers * .22
        E_OIN_25_ugperm2pers = E_PM25_ugperm2pers * .22

        #link_dest=os.path.join(emis_preproc_dir, 'wrfinput_d01')
        loc_lon, loc_lat = city_list[ss]['lon'], city_list[ss]['lat']
        ## Get the index of dublin grid cell from the wrfinput file as the produced wrfchemi files don't contain xlat, xlon
        lat = np.asarray(dwrf.XLAT[0, :, :])
        longg = np.asarray(dwrf.XLONG[0, :, :])
        diffarray_lats = lat[:] - [loc_lat]
        diffarray_lons = longg[:] - [loc_lon]
        diffarrayabs = xu.sqrt(
            xu.square(diffarray_lons) + xu.square(diffarray_lats))
        ixlat, ixlon = np.where(diffarrayabs == np.min(diffarrayabs))
        idxx = {'x': ixlon, 'y': ixlat}
        lat_ix = np.argmin((arraylat - loc_lat)**2)
        lon_ix = np.argmin((arraylon - loc_lon)**2)
        print(lon_ix, lat_ix)  #verified as corresponding with the location
        idx = {'x': lon_ix, 'y': lat_ix}
        dub_temp = data.var235[:, lat_ix, lon_ix].values - 273.15
        print('Todays temp in ', ss, '  ', dub_temp)
        ## firstly, apply fix to NO, NO2 ratios
        ds.variables['E_NO2'][:,
                              0, :, :] = ds.variables['E_NO'][:,
                                                              0, :, :] * 0.22
        ds.variables['E_NO'][:,
                             0, :, :] = ds.variables['E_NO'][:, 0, :, :] * 0.88
        if dub_temp < 12.0:
Exemple #27
0
#ax.outline_patch.set_visible(False)
##ax.add_feature(cfeature.BORDERS,linewidth=0.25)
#ax.axes.get_xaxis().set_visible(False)
#ax.axes.get_yaxis().set_visible(False)
##plt.contourf(LON, LAT, data.t2.values[0,:,:]-273.15, levels, cmap=plt.cm.get_cmap('jet'), extend="both")
#plt.savefig(t2_out_path, dpi=288, tilesize=768, transparent=True)
#plt.close()

# Wind
print('Wind')
wind_out_path = (png_out_dir + '/' + datestr + ':00-wind-10m.png')
fig = plt.figure(figsize=(tilesize / dpi, tilesize / dpi), dpi=dpi)
ax = fig.add_subplot(111, projection=ccrs.Mercator())
lev_range = np.arange(0, 30, 0.5)
levels = lev_range
wind10 = xu.sqrt(data.u10.values[0, :, :]**2 + data.v10.values[0, :, :]**2)
cs = ax.contourf(LON,
                 LAT,
                 wind10,
                 levels,
                 cmap=plt.cm.jet,
                 extend="min",
                 transform=ccrs.PlateCarree())
plt.box(on=None)
plt.subplots_adjust(bottom=0, left=0, right=1, top=1, hspace=0, wspace=0)
ax.coastlines('10m')
plt.axis('off')
ax.figsize = (tilesize / dpi, tilesize / dpi)
ax.dpi = dpi
ax.outline_patch.set_visible(False)
#ax.add_feature(cfeature.BORDERS,linewidth=0.25)
Exemple #28
0
def main(config_path):
    config = {}
    with open(config_path) as f_config:
        config = json.load(f_config)

    #fmt_date = 'd01_{}'.format('%Y%m%d%H00.nc')
    path_in = ('/mnt/raid/wrf-chem/wrfchem_v39_cri/data_back/output-wrf')
    path_out = '../data/output-mh-archive'
    mh = config['coords']['Mace Head']

    out = pd.DataFrame()
    f_paths = sorted(g.glob(os.path.join(path_in, 'd01_*')))
    rainmm = {}
    rainmmt = {}
    snowmm = {}
    snowmmt = {}
    ddstr = {}

    for i, f_path in enumerate(f_paths):
        index = [
            pd.Timestamp(
                dt.datetime.strptime(os.path.basename(f_path),
                                     'd01_%Y%m%d%H00.nc'))
        ]

        print('innnndex', index)
        print(i, f_path)
        if i == 0:
            index0 = index[0]
            print('1st inx', index0)
        dayofyear = to_dayofyear(index[0])
        tmp = {}
        #d = xa.open_dataset(f_path).sel(time=0)[VARS]
        d = xa.open_dataset(f_path)[VARS]
        # Code to pick out the defined gridcell
        diffarraylon = np.asarray(d['lon'][:] - mh['lon'])
        diffarraylat = np.asarray(d['lat'][:] - mh['lat'])
        diffarrayabs = xau.sqrt(
            xau.square(diffarraylon) + xau.square(diffarraylat))
        #note: the array is stored in array dims [lat, lon]
        ixlat, ixlon = np.where(diffarrayabs == np.min(diffarrayabs))
        idx = {'x': ixlon, 'y': ixlat}

        #tmp_p, tmp_rh = pressure_rh(d)
        print(d.dims)
        d = d.isel(x=idx['x'], y=idx['y'])
        # windspeed
        tmp['dayofyear'] = dayofyear
        for vv in VARS:
            #print('variable', vv)

            # first: put in the rain loop
            if vv == 'rain':

                print('look out! its the', vv, 'vbl')
                print(d[vv].shape)
                cmmd = ('stat -c %y ' + str(f_path))
                dstr = str(os.system(cmmd))
                ddstr[i] = os.popen(cmmd).read()[:10]
                if i == 0:
                    print('the first datapoint')
                    rainmm[i] = d['rain'][0, 0].values
                    tmp[vv] = np.nan
                    print(tmp[vv])
                else:
                    if ddstr[i - 1] == ddstr[i]:
                        print('same dataset')
                        print(d[vv].shape)
                        rainmm[i] = d['rain'][0, 0] - rainmm[i - 1]
                        tmp[vv] = rainmm[i].values
                    else:
                        print(
                            'new dataset alert!!!!!! must disregard current rain rate value'
                        )
                        rainmm[i] = d['rain'][0, 0].values
                        tmp[vv] = np.nan
                        print('fuuuuuuuuuuuuuuucl', tmp[vv])

                #print('uuup', ddstr)

                ## if i=0: then disregard the first value, and subtract all subsequent. if new date (find out by executing stat -c '%y' d01_201909110000.nc), then deal with it the same way etc.

            else:
                if d[vv].ndim == 3:
                    #print(d[vv].shape)
                    tmp[vv] = d[vv][0, 0, 0].values
                else:
                    #print(vv, d[vv].shape)
                    tmp[vv] = d[vv][0, 0].values
        ## extract windspeed and wind direction
        #winddirection_deg = np.asscalar(
        #270 - xau.rad2deg(xau.arctan2(d.V10, d.U10)).values) % 360
        tmp['winddirection_deg'] = np.asscalar(
            270 - xau.rad2deg(xau.arctan2(d.v10, d.u10)).values) % 360
        tmp['windspeed_mPs'] = np.asscalar(
            xau.sqrt(d.u10**2 + d.v10**2).values)
        tmp = pd.DataFrame(tmp, index=index).sort_index(axis=1)
        #print(tmp.keys())
        #for kk in tmp.keys():
        #print(kk, tmp[kk].values)
        out = pd.concat((out, tmp))
        #print('done {}'.format(f_path))
#'BC1', 'BC2', 'OC1', 'OC2']


#        out = out.rename({'pressure_sea_hPa': 'pressure_hPa'})
#        (out[['dayofyear', 'pressure_hPa', 'relativehumidity_percent',
#          'temperature2m_C', 'winddirection_deg', 'windspeed_mPs']]
#        .to_dataframe()
#        .to_csv(os.path.join(path_out,
#                          '{}.csv'.format(index0.strftime('%Y%m%d%H%M')))))

    out.to_csv(
        os.path.join(path_out, '{}.csv'.format(index0.strftime('%Y%m%d%H%M'))))
Exemple #29
0
def xyz2lonlat(x, y, z):
    """Convert cartesian to lon lat."""
    lon = xu.rad2deg(xu.arctan2(y, x))
    lat = xu.rad2deg(xu.arctan2(z, xu.sqrt(x**2 + y**2)))
    return lon, lat
Exemple #30
0
            tmp = {}
            print('di', i, f_path)
            d = xa.open_dataset(f_path)[VARS]
            index = [
                pd.Timestamp(
                    dt.datetime.strptime(os.path.basename(f_path),
                                         'd01_%Y%m%d%H00.nc'))
            ]
            if i == 0:
                index0 = index[0]
                dayofyear = to_dayofyear(index[0])

            mh = coords[kk]
            diffarraylon = np.asarray(d['lon'][:] - mh['lon'])
            diffarraylat = np.asarray(d['lat'][:] - mh['lat'])
            diffarrayabs = xau.sqrt(
                xau.square(diffarraylon) + xau.square(diffarraylat))
            #note: the array is stored in array dims [lat, lon]
            ixlat, ixlon = np.where(diffarrayabs == np.min(diffarrayabs))
            idx = {'x': ixlon, 'y': ixlat}
            d = d.isel(x=idx['x'], y=idx['y'])

            tmp['dayofyear'] = dayofyear
            for vv in VARS:
                if d[vv].ndim == 3:
                    tmp[vv] = d[vv][0, 0, 0].values
                else:
                    tmp[vv] = d[vv][0, 0].values
            #tmp[vv] = 0.0
            # first: put in the rain loop
            #if vv=='rain':
            #print('look out! its the', vv, 'vbl')
def main(config_path):
    config = {}
    with open(config_path) as f_config:
        config = json.load(f_config)

    fmt_date = 'wrfout_d01_{}'.format(config['fmt']['date'])
    path_in = config['output-wrf-raw']
    path_out = config['output-gw']
    mh = config['coords']['Galway']

    out = pd.DataFrame()
    f_paths = sorted(g.glob(os.path.join(path_in, 'wrfout_d01*')))
    rainmm = {}
    rainmmt = {}
    snowmm = {}
    snowmmt = {}
    
    for i, f_path in enumerate(f_paths):
        index = [pd.Timestamp(dt.datetime.strptime(
            os.path.basename(f_path), fmt_date
        ))]
        if i == 0:
            index0 = index[0]
        dayofyear = to_dayofyear(index[0])
        tmp = {}
        d = xa.open_dataset(f_path).sel(Time=0)[VARS]
        # Code to pick out the defined gridcell
        diffarraylon=np.asarray(d.XLONG[:]-mh['lon'])
        diffarraylat=np.asarray(d.XLAT[:]-mh['lat'])
        diffarrayabs=xau.sqrt(xau.square(diffarraylon)+xau.square(diffarraylat))
        #note: the array is stored in array dims [lat, lon]
        ixlat,ixlon=np.where(diffarrayabs == np.min(diffarrayabs))
        idx = {
            'x': ixlon,
            'y': ixlat
        }
        
        tmp_p, tmp_rh = pressure_rh(d)
        d = d.isel(west_east=idx['x'], south_north=idx['y'])
        # windspeed
        tmp['dayofyear'] = dayofyear
#        tmp['winddirection_deg'] = np.asscalar(xau.rad2deg(
#            xau.arctan2(-d.U10, -d.V10)
#        ).values)
        tmp['pressure_hPa'] = tmp_p[idx['y'], idx['x']]
        tmp['qcloud'] = d.isel(bottom_top=0).QCLOUD[0,:].values
        tmp['qgraup'] = d.isel(bottom_top=0).QGRAUP[0,:].values
        tmp['qicd'] = d.isel(bottom_top=0).QICE[0,:].values
        tmp['qrain'] = d.isel(bottom_top=0).QRAIN[0,:].values
        tmp['qsnow'] = d.isel(bottom_top=0).QSNOW[0,:].values
        if i == 0:
          rainmm[i] = np.asscalar((d.RAINNC.values + d.RAINC.values))
          rainmmt[i] = np.asscalar((d.RAINNC.values + d.RAINC.values))
          snowmm[i] = np.asscalar(d.SNOWNC.values)
          snowmmt[i] = np.asscalar(d.SNOWNC.values)
        else:
          rainmmt[i] = np.asscalar((d.RAINNC.values + d.RAINC.values))
          rainmm[i] = np.asscalar(d.RAINNC.values + d.RAINC.values)-rainmmt[i-1]
          snowmmt[i] = np.asscalar(d.SNOWNC.values)
          snowmm[i] = np.asscalar(d.SNOWNC.values)-snowmmt[i-1]
        tmp['rain_mm'] = rainmm[i]  
        tmp['relativehumidity_percent'] = tmp_rh[idx['y'], idx['x']]
        tmp['temperature2m_C'] = np.asscalar((d.T2 - 273.15).values)
        tmp['winddirection_deg'] = np.asscalar(
            270 - xau.rad2deg(xau.arctan2(d.V10, d.U10)).values) % 360
        tmp['windspeed_mPs'] = np.asscalar(xau.sqrt(d.U10**2 + d.V10**2).values)
        tmp['zcldfra'] = d.isel(bottom_top=0).CLDFRA[0,:].values
        tmp['znox'] = d.isel(bottom_top=0).no2[0,:].values + d.isel(bottom_top=0).no[0,:].values
        tmp['zo3'] = d.isel(bottom_top=0).o3[0,:].values*1000.0
        tmp['zpm25'] = d.isel(bottom_top=0).PM2_5_DRY[0,:].values
        tmp['zpm10'] = d.isel(bottom_top=0).PM10[0,:].values
        tmp['zbc1'] = d.isel(bottom_top=0).BC1[0,:].values
        tmp['zbc2'] = d.isel(bottom_top=0).BC2[0,:].values
        tmp['zoc1'] = d.isel(bottom_top=0).OC1[0,:].values
        tmp['zoc2'] = d.isel(bottom_top=0).OC2[0,:].values
        tmp['zso2'] = d.isel(bottom_top=0).so2[0,:].values
        tmp['zpblh'] = np.asscalar(d.PBLH.values)
        tmp['zuv_index'] = np.sum((d.o3*d.PB/6950.0).values)
        tmp = pd.DataFrame(tmp, index=index).sort_index(axis=1)
        out = pd.concat((out, tmp))
        print('done {}'.format(f_path))
 #'BC1', 'BC2', 'OC1', 'OC2']
#        out = out.rename({'pressure_sea_hPa': 'pressure_hPa'})
#        (out[['dayofyear', 'pressure_hPa', 'relativehumidity_percent',
#          'temperature2m_C', 'winddirection_deg', 'windspeed_mPs']]
#        .to_dataframe()
#        .to_csv(os.path.join(path_out,
#                          '{}.csv'.format(index0.strftime('%Y%m%d%H%M')))))

    out.to_csv(os.path.join(
        path_out,
        '{}.csv'.format(index0.strftime('%Y%m%d%H%M'))
    ))
def plot2d(lon,
           lat,
           d,
           fig,
           newfig=True,
           t=None,
           levels=None,
           levels_n=None,
           norm=None,
           cmap=None,
           label='',
           title='',
           extent=None,
           what='contourf',
           colorbar=True,
           extend='neither',
           format=None,
           config={}):
    if newfig:
        plt.figure(num=fig, figsize=(6, 5))
        ax = set_map(extent)
    else:
        plt.figure(num=fig)
        ax = plt.gca()

    if what == 'quiver':
        l = xu.sqrt(d.u10**2 + d.v10**2)
        u10 = d.u10 / l
        v10 = d.v10 / l
        plt.quiver(lon.values,
                   lat.values,
                   u10.values,
                   v10.values,
                   scale=40,
                   color='k',
                   alpha=0.35,
                   zorder=10,
                   transform=PROJ_T)
    elif what == 'contour':
        if levels_n:
            cs = plt.contour(lon.values,
                             lat.values,
                             d.values,
                             levels_n,
                             linewidths=0.75,
                             colors='k',
                             transform=PROJ_T)
        else:
            cs = plt.contour(lon.values,
                             lat.values,
                             d.values,
                             linewidths=0.75,
                             colors='k',
                             transform=PROJ_T)
        plt.clabel(cs, inline=1, fmt='%.0f', fontsize=8)
    elif what == 'contourf':
        #        plt.contourf(lon.values, lat.values, d.values, 9,
        plt.contourf(lon.values,
                     lat.values,
                     d.values,
                     norm=norm,
                     cmap=cmap,
                     extend=extend,
                     transform=PROJ_T)
        #        plt.contourf(lon.values, lat.values, d.values,
        #                     levels=levels, norm=norm, cmap=cmap,
        #                     extend=extend, transform=PROJ_T)
        if colorbar:
            if format:
                cbar = plt.colorbar(shrink=0.9, format=format)
            else:
                cbar = plt.colorbar(shrink=0.9)
            cbar.set_label(label)

    if newfig:
        ax.set_xlabel('lon')
        ax.set_ylabel('lat')
        plt.title(title)
        plt.text(
            0.01,
            0.01,
            t.tz_localize(tz='Europe/Dublin').strftime('%Y.%m.%d %a %H:%M'),
            bbox=dict(facecolor='0.75', alpha=0.75),
            transform=ax.transAxes)
        plt.tight_layout()
        plt.subplots_adjust(left=0.075, bottom=0.075)
lo1 = -21.75
lo2 = 18.5
gridsize = 162 * 114
longs = np.arange(lo1, lo2 + dx, dx)
latit = np.arange(la1, la2 - dy, -dy)
times = newf.variables['Times']
timestr = 9999
dirpath = args.outroot
outpath = os.path.join(dirpath,
                       '%s-w10-surface-level-gfs-1.0.json' % (timestr, ))
gc = 0
outf = open(outpath, 'w')
outf.write('{ ')
uu10 = newf.variables[args.ucomponent]
vv10 = newf.variables[args.vcomponent]
w10 = xu.sqrt(uu10[0, :, :]**2 + vv10[0, :, :]**2)
for xways in range(0, nx):
    for yways in range(0, ny):
        strlat = str(latit[yways])
        strlon = str(longs[xways])
        rawdatavals = w10[yways, xways]
        rounddatavals = round(rawdatavals, 3)
        strval = str(rounddatavals)
        gc = gc + 1
        if gc == gridsize:
            strtowrite = (' "' + strlat + ', ' + strlon + '": {"value":"' +
                          strval + 'm/s"} }')
        else:
            strtowrite = (' "' + strlat + ',' + strlon + '": {"value":"' +
                          strval + 'm/s"},')
        outf.write(strtowrite)
Exemple #34
0
def main(config_path):
    config = {}
    with open(config_path) as f_config:
        config = json.load(f_config)

    fmt_date = 'wrfout_d01_{}'.format(config['fmt']['date'])
    path_in = config['output-wrf-raw']
    path_out = config['output-mh']
    mh = config['coords']['Galway']

    out = pd.DataFrame()
    f_paths = sorted(g.glob(os.path.join(path_in, 'wrfout_d01*')))
    for i, f_path in enumerate(f_paths):
        index = [
            pd.Timestamp(
                dt.datetime.strptime(os.path.basename(f_path), fmt_date))
        ]
        if i == 0:
            index0 = index[0]
        dayofyear = to_dayofyear(index[0])
        tmp = {}
        d = xa.open_dataset(f_path).sel(Time=0)[VARS]
        idx = {
            'x': np.asscalar(abs(d.XLONG[0, :] - mh['lon']).argmin()),
            'y': np.asscalar(abs(d.XLAT[:, 0] - mh['lat']).argmin())
        }
        tmp_p, tmp_rh = pressure_rh(d)
        d = d.isel(west_east=idx['x'], south_north=idx['y'])

        # windspeed
        tmp['dayofyear'] = dayofyear
        #        tmp['winddirection_deg'] = np.asscalar(xau.rad2deg(
        #            xau.arctan2(-d.U10, -d.V10)
        #        ).values)
        tmp['pressure_hPa'] = tmp_p[idx['y'], idx['x']]
        tmp['qcloud'] = d.isel(bottom_top=0).QCLOUD.values
        tmp['qgraup'] = d.isel(bottom_top=0).QGRAUP.values
        tmp['qicd'] = d.isel(bottom_top=0).QICE.values
        tmp['qrain'] = d.isel(bottom_top=0).QRAIN.values
        tmp['qsnow'] = d.isel(bottom_top=0).QSNOW.values
        tmp['relativehumidity_percent'] = tmp_rh[idx['y'], idx['x']]
        tmp['temperature2m_C'] = np.asscalar((d.T2 - 273.15).values)
        tmp['winddirection_deg'] = np.asscalar(
            270 - xau.rad2deg(xau.arctan2(d.V10, d.U10)).values) % 360
        tmp['windspeed_mPs'] = np.asscalar(
            xau.sqrt(d.U10**2 + d.V10**2).values)
        tmp['zcldfra'] = d.isel(bottom_top=0).CLDFRA.values

        tmp = pd.DataFrame(tmp, index=index).sort_index(axis=1)
        out = pd.concat((out, tmp))
        print('done {}'.format(f_path))


#        out = out.rename({'pressure_sea_hPa': 'pressure_hPa'})
#        (out[['dayofyear', 'pressure_hPa', 'relativehumidity_percent',
#          'temperature2m_C', 'winddirection_deg', 'windspeed_mPs']]
#        .to_dataframe()
#        .to_csv(os.path.join(path_out,
#                          '{}.csv'.format(index0.strftime('%Y%m%d%H%M')))))

    out.to_csv(
        os.path.join(path_out,
                     '{}_galway.csv'.format(index0.strftime('%Y%m%d%H%M'))))