def test_custom_uncert(self): def wf(dist): return 1 - dist / 100000.0 if (sys.version_info < (2, 6) or (sys.version_info >= (3, 0) and sys.version_info < (3, 4))): res, stddev, counts = kd_tree.resample_custom(self.tswath, self.tdata, self.tgrid, 100000, wf, with_uncert=True) else: with warnings.catch_warnings(record=True) as w: res, stddev, counts = kd_tree.resample_custom(self.tswath, self.tdata, self.tgrid, 100000, wf, with_uncert=True) self.assertFalse( len(w) != 1, 'Failed to create neighbour warning') self.assertFalse(('Searching' not in str( w[0].message)), 'Failed to create correct neighbour warning') self.assertAlmostEqual(res[0], 2.32193149, 5, 'Failed to calculate custom weighting with uncertainty') self.assertAlmostEqual(stddev[0], 0.81817972, 5, 'Failed to calculate custom for gaussian weighting') self.assertEqual( counts[0], 3, 'Wrong data point count for custom weighting with uncertainty')
def test_custom_multi(self): def wf1(dist): return 1 - dist / 100000.0 def wf2(dist): return 1 def wf3(dist): return numpy.cos(dist) ** 2 data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100)) lons = numpy.fromfunction( lambda y, x: 3 + (10.0 / 100) * x, (5000, 100)) lats = numpy.fromfunction( lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100)) swath_def = geometry.SwathDefinition(lons=lons, lats=lats) data_multi = numpy.column_stack((data.ravel(), data.ravel(), data.ravel())) if (sys.version_info < (2, 6) or (sys.version_info >= (3, 0) and sys.version_info < (3, 4))): res = kd_tree.resample_custom(swath_def, data_multi, self.area_def, 50000, [wf1, wf2, wf3], segments=1) else: with warnings.catch_warnings(record=True) as w: res = kd_tree.resample_custom(swath_def, data_multi, self.area_def, 50000, [wf1, wf2, wf3], segments=1) self.assertFalse( len(w) != 1, 'Failed to create neighbour radius warning') self.assertFalse(('Possible more' not in str( w[0].message)), 'Failed to create correct neighbour radius warning') cross_sum = res.sum() expected = 1461.842980746 self.assertAlmostEqual(cross_sum, expected, msg='Swath multi channel custom resampling failed')
def test_custom(self): def wf(dist): return 1 - dist / 100000.0 data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -5, (5000, 100)) lons = numpy.fromfunction( lambda y, x: 3 + (10.0 / 100) * x, (5000, 100)) lats = numpy.fromfunction( lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100)) swath_def = geometry.SwathDefinition(lons=lons, lats=lats) if (sys.version_info < (2, 6) or (sys.version_info >= (3, 0) and sys.version_info < (3, 4))): res = kd_tree.resample_custom(swath_def, data.ravel(), self.area_def, 50000, wf, segments=1) else: with warnings.catch_warnings(record=True) as w: res = kd_tree.resample_custom(swath_def, data.ravel(), self.area_def, 50000, wf, segments=1) self.assertFalse( len(w) != 1, 'Failed to create neighbour radius warning') self.assertFalse(('Possible more' not in str( w[0].message)), 'Failed to create correct neighbour radius warning') cross_sum = res.sum() expected = 4872.81050729 self.assertAlmostEqual(cross_sum, expected, msg='Swath custom resampling failed')
def resample( in_lat, in_lon, out_lat, out_lon, data, method="inv_square", neighbours=8, radius_of_influence=500000, nprocs=4, ): masked_lat = in_lat.view(np.ma.MaskedArray) masked_lon = in_lon.view(np.ma.MaskedArray) masked_lon.mask = masked_lat.mask = data.view(np.ma.MaskedArray).mask input_def = SwathDefinition(lons=masked_lon, lats=masked_lat) target_def = SwathDefinition(lons=out_lon, lats=out_lat) if method == "inv_square": res = resample_custom( input_def, data, target_def, radius_of_influence=radius_of_influence, neighbours=neighbours, weight_funcs=lambda r: 1 / np.clip(r, 0.0625, np.finfo(r.dtype).max)**2, fill_value=None, nprocs=nprocs, ) elif method == "bilinear": res = resample_custom( input_def, data, target_def, radius_of_influence=radius_of_influence, neighbours=4, weight_funcs=lambda r: 1 / np.clip(r, 0.0625, np.finfo(r.dtype).max), fill_value=None, nprocs=nprocs, ) elif method == "nn": res = resample_nearest( input_def, data, target_def, radius_of_influence=radius_of_influence, fill_value=None, nprocs=nprocs, ) else: raise ValueError("Unknown resample method: %s", method) if type(res.mask) == bool: res.mask = np.tile(res.mask, len(res)) return res
def test_custom_base(self): def wf(dist): return 1 - dist / 100000.0 if sys.version_info < (2, 6) or (sys.version_info >= (3, 0) and sys.version_info < (3, 4)): res = kd_tree.resample_custom( self.tswath, self.tdata.ravel(), self.tgrid, 50000, wf, reduce_data=False, segments=1 ) else: with warnings.catch_warnings(record=True) as w: res = kd_tree.resample_custom( self.tswath, self.tdata.ravel(), self.tgrid, 50000, wf, reduce_data=False, segments=1 ) self.assertFalse(len(w) != 1, "Failed to create neighbour warning") self.assertFalse(("Searching" not in str(w[0].message)), "Failed to create correct neighbour warning") self.assertAlmostEqual(res[0], 2.4356757, 5, "Failed to calculate custom weighting")
def test_custom(self): def wf(dist): return 1 - dist / 100000.0 data = np.fromfunction(lambda y, x: (y + x) * 10**-5, (5000, 100)) lons = np.fromfunction(lambda y, x: 3 + (10.0 / 100) * x, (5000, 100)) lats = np.fromfunction(lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100)) swath_def = geometry.SwathDefinition(lons=lons, lats=lats) with catch_warnings(UserWarning) as w: res = kd_tree.resample_custom(swath_def, data.ravel(), self.area_def, 50000, wf, segments=1) # PyProj proj/CRS and "more than 8 neighbours" are warned about self.assertFalse(len(w) > 2) neighbour_warn = False for warn in w: if 'Possible more' in str(warn.message): neighbour_warn = True break self.assertTrue(neighbour_warn) if len(w) == 2: proj_crs_warn = False for warn in w: if 'important projection information' in str(warn.message): proj_crs_warn = True break self.assertTrue(proj_crs_warn) cross_sum = res.sum() expected = 4872.8100347930776 self.assertAlmostEqual(cross_sum, expected)
def test_custom_multi(self): def wf1(dist): return 1 - dist / 100000.0 def wf2(dist): return 1 def wf3(dist): return np.cos(dist)**2 data = np.fromfunction(lambda y, x: (y + x) * 10**-6, (5000, 100)) lons = np.fromfunction(lambda y, x: 3 + (10.0 / 100) * x, (5000, 100)) lats = np.fromfunction(lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100)) swath_def = geometry.SwathDefinition(lons=lons, lats=lats) data_multi = np.column_stack( (data.ravel(), data.ravel(), data.ravel())) with catch_warnings(UserWarning) as w: res = kd_tree.resample_custom(swath_def, data_multi, self.area_def, 50000, [wf1, wf2, wf3], segments=1) self.assertFalse(len(w) != 1) self.assertFalse('Possible more' not in str(w[0].message)) cross_sum = res.sum() expected = 1461.8428378742638 self.assertAlmostEqual(cross_sum, expected)
def test_custom_multi(self): def wf1(dist): return 1 - dist / 100000.0 def wf2(dist): return 1 def wf3(dist): return np.cos(dist) ** 2 data = np.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100)) lons = np.fromfunction( lambda y, x: 3 + (10.0 / 100) * x, (5000, 100)) lats = np.fromfunction( lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100)) swath_def = geometry.SwathDefinition(lons=lons, lats=lats) data_multi = np.column_stack((data.ravel(), data.ravel(), data.ravel())) with catch_warnings(UserWarning) as w: res = kd_tree.resample_custom(swath_def, data_multi, self.area_def, 50000, [wf1, wf2, wf3], segments=1) self.assertFalse(len(w) != 1) self.assertFalse('Possible more' not in str(w[0].message)) cross_sum = res.sum() expected = 1461.8428378742638 self.assertAlmostEqual(cross_sum, expected)
def resample(in_lat, in_lon, out_lat, out_lon, data, method='inv_square', neighbours=8, radius_of_influence=500000, nprocs=4): masked_lat = in_lat.view(np.ma.MaskedArray) masked_lon = in_lon.view(np.ma.MaskedArray) masked_lon.mask = masked_lat.mask = data.view(np.ma.MaskedArray).mask input_def = SwathDefinition(lons=masked_lon, lats=masked_lat) target_def = SwathDefinition(lons=out_lon, lats=out_lat) if method == 'inv_square': res = resample_custom( input_def, data, target_def, radius_of_influence=radius_of_influence, neighbours=neighbours, weight_funcs=lambda r: 1 / np.clip(r, 0.0625, np.finfo(r.dtype).max) ** 2, fill_value=None, nprocs=nprocs) elif method == 'bilinear': res = resample_custom( input_def, data, target_def, radius_of_influence=radius_of_influence, neighbours=4, weight_funcs=lambda r: 1 / np.clip(r, 0.0625, np.finfo(r.dtype).max), fill_value=None, nprocs=nprocs) elif method == 'nn': res = resample_nearest( input_def, data, target_def, radius_of_influence=radius_of_influence, fill_value=None, nprocs=nprocs) else: raise ValueError("Unknown resample method: %s", method) if type(res.mask) == bool: res.mask = np.tile(res.mask, len(res)) return res
def test_custom_base(self): def wf(dist): return 1 - dist / 100000.0 with catch_warnings() as w: res = kd_tree.resample_custom(self.tswath, self.tdata.ravel(), self.tgrid, 50000, wf, reduce_data=False, segments=1) self.assertFalse(len(w) != 1) self.assertFalse(('Searching' not in str(w[0].message))) self.assertAlmostEqual(res[0], 2.4356757, 5)
def test_custom_base(self): def wf(dist): return 1 - dist / 100000.0 if (sys.version_info < (2, 6) or (sys.version_info >= (3, 0) and sys.version_info < (3, 4))): res = kd_tree.resample_custom(self.tswath, self.tdata.ravel(), self.tgrid, 50000, wf, reduce_data=False, segments=1) else: with warnings.catch_warnings(record=True) as w: res = kd_tree.resample_custom(self.tswath, self.tdata.ravel(), self.tgrid, 50000, wf, reduce_data=False, segments=1) self.assertFalse( len(w) != 1, 'Failed to create neighbour warning') self.assertFalse(('Searching' not in str( w[0].message)), 'Failed to create correct neighbour warning') self.assertAlmostEqual(res[0], 2.4356757, 5, 'Failed to calculate custom weighting')
def test_custom_base(self): def wf(dist): return 1 - dist / 100000.0 with catch_warnings(UserWarning) as w: res = kd_tree.resample_custom(self.tswath, self.tdata.ravel(), self.tgrid, 50000, wf, reduce_data=False, segments=1) self.assertFalse(len(w) != 1) self.assertFalse(('Searching' not in str(w[0].message))) self.assertAlmostEqual(res[0], 2.4356757, 5)
def calculate_resampled_grid(area_original, grid, area_new, resolution, printStatus): if printStatus: print(" Step 4: Resampling via pyresample...") wf = lambda r: 1.0 start_time = time.time() radiusOfInfluence = math.sqrt(2.0) * (resolution / 2.0) #assuming that original data comes in 2.45 m grid (2.45^2 = 6) nNeighborsMax = int((np.pi * (radiusOfInfluence + 1.0)**2) / 6.0) print(" Averaging up to " + str(nNeighborsMax) + " values from the original GLISTIN data for each new grid cell") print(" (the # of points within a circle with r=" + str(round(radiusOfInfluence, 0)) + "m ") print( " assuming 2m separation of original GLISTIN data, and using r equal to the distance" ) print( " between the center of each new grid cell and its corners: r=sqrt(2)*new grid resolution/2" ) output = kd_tree.resample_custom(area_original, grid, area_new, radius_of_influence=radiusOfInfluence, fill_value=np.nan, neighbours=nNeighborsMax, weight_funcs=wf, with_uncert=True, reduce_data=False) if printStatus: print(" Resampling calculation time: %s seconds" % round( (time.time() - start_time), 2)) # comes back as a double result = output[0] stddev = output[1] count = output[2] #if printStatus: lon, lat = area_new.get_lonlats() #change the result to be in steps of 20 cm - this is the vertical resolution of the data result *= 0.5 result = np.round(result, 1) result *= 2 return (result, stddev, count, lon, lat)
def test_custom_uncert(self): def wf(dist): return 1 - dist / 100000.0 with catch_warnings(UserWarning) as w: res, stddev, counts = kd_tree.resample_custom(self.tswath, self.tdata, self.tgrid, 100000, wf, with_uncert=True) self.assertTrue(len(w) > 0) self.assertTrue((any('Searching' in str(_w.message) for _w in w))) self.assertAlmostEqual(res[0], 2.32193149, 5) self.assertAlmostEqual(stddev[0], 0.81817972, 5) self.assertEqual(counts[0], 3)
def test_custom_base(self): def wf(dist): return 1 - dist / 100000.0 with catch_warnings() as w: res = kd_tree.resample_custom(self.tswath, self.tdata.ravel(), self.tgrid, 50000, wf, reduce_data=False, segments=1) self.assertFalse( len(w) != 1, 'Failed to create neighbour warning') self.assertFalse(('Searching' not in str( w[0].message)), 'Failed to create correct neighbour warning') self.assertAlmostEqual(res[0], 2.4356757, 5, 'Failed to calculate custom weighting')
def test_custom_uncert(self): def wf(dist): return 1 - dist / 100000.0 with catch_warnings() as w: res, stddev, counts = kd_tree.resample_custom(self.tswath, self.tdata, self.tgrid, 100000, wf, with_uncert=True) self.assertTrue(len(w) > 0) self.assertTrue((any('Searching' in str(_w.message) for _w in w))) self.assertAlmostEqual(res[0], 2.32193149, 5) self.assertAlmostEqual(stddev[0], 0.81817972, 5) self.assertEqual(counts[0], 3)
def test_custom(self): def wf(dist): return 1 - dist / 100000.0 data = np.fromfunction(lambda y, x: (y + x) * 10 ** -5, (5000, 100)) lons = np.fromfunction( lambda y, x: 3 + (10.0 / 100) * x, (5000, 100)) lats = np.fromfunction( lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100)) swath_def = geometry.SwathDefinition(lons=lons, lats=lats) with catch_warnings(UserWarning) as w: res = kd_tree.resample_custom(swath_def, data.ravel(), self.area_def, 50000, wf, segments=1) self.assertFalse(len(w) != 1) self.assertFalse(('Possible more' not in str(w[0].message))) cross_sum = res.sum() expected = 4872.8100347930776 self.assertAlmostEqual(cross_sum, expected)
def test_custom(self): def wf(dist): return 1 - dist / 100000.0 data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -5, (5000, 100)) lons = numpy.fromfunction( lambda y, x: 3 + (10.0 / 100) * x, (5000, 100)) lats = numpy.fromfunction( lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100)) swath_def = geometry.SwathDefinition(lons=lons, lats=lats) with catch_warnings() as w: res = kd_tree.resample_custom(swath_def, data.ravel(), self.area_def, 50000, wf, segments=1) self.assertFalse(len(w) != 1) self.assertFalse(('Possible more' not in str(w[0].message))) cross_sum = res.sum() expected = 4872.8100347930776 self.assertAlmostEqual(cross_sum, expected)
def test_custom_uncert(self): def wf(dist): return 1 - dist / 100000.0 with catch_warnings() as w: res, stddev, counts = kd_tree.resample_custom(self.tswath, self.tdata, self.tgrid, 100000, wf, with_uncert=True) self.assertTrue( len(w) > 0, 'Failed to create neighbour warning') self.assertTrue((any('Searching' in str(_w.message) for _w in w)), 'Failed to create correct neighbour warning') self.assertAlmostEqual(res[0], 2.32193149, 5, 'Failed to calculate custom weighting with uncertainty') self.assertAlmostEqual(stddev[0], 0.81817972, 5, 'Failed to calculate custom for gaussian weighting') self.assertEqual( counts[0], 3, 'Wrong data point count for custom weighting with uncertainty')
def resample(grid, variables, resolution, test, name): # Assuming data and annotation file have the same name, just different type and contain only one period # lat/lon_lines required to be int not float lat_lines = int(variables[0]) lon_lines = int(variables[1]) print(lat_lines) print(lon_lines) lat_start = variables[2] lon_start = variables[3] lat_space = variables[4] lon_space = variables[5] # ul_lat = variables[6] # ul_lon = variables[7] ur_lat = variables[8] ur_lon = variables[9] ll_lat = variables[10] ll_lon = variables[11] # lr_lat = variables[12] # lr_lon = variables[13] (x_utm_corners, y_utm_corners, x_utm_centers, y_utm_centers, n_xcells, n_ycells) = \ create_utms(resolution, ll_lat, ll_lon, ur_lat, ur_lon) lats = np.linspace(lat_start - 0.5 * lat_space, (lat_start - 0.5 * lat_space) + (lat_space * (lat_lines + 1)), lat_lines + 1) lons = np.linspace(lon_start - 0.5 * lon_space, (lon_start - 0.5 * lon_space) + (lon_space * (lon_lines + 1)), lon_lines + 1) # Option to downside for testing purposes grid = grid[::test, ::test] lats = lats[::test] lons = lons[::test] lon_lines = lons.size lat_lines = lats.size '''if test is not 1: lon_lines = math.ceil(lon_lines / test) lat_lines = math.ceil(lat_lines / test) ''' # # Original Area definition: # area_id = 'WGS84' description = 'lat-lon' proj_id = annotation[annotation.find("gr"):annotation.find(".")] proj_string = 'EPSG:4326' width = lon_lines height = lat_lines area_extent = (ll_lon, ll_lat, ur_lon, ur_lat) area_original = geometry.AreaDefinition(area_id, description, proj_id, proj_string, width, height, area_extent) print(area_original.shape) # (x_low, x_high, dx, y_low, y_high, dy) = get_utm_range(ll_lat, ll_lon, ur_lat, ur_lon) # (x_lower, x_higher, y_lower, y_higher) = scale_dimensions(.2, x_low, x_high, y_low, y_high, dx, dy) tag = str(utm.from_latlon(ll_lat, ll_lon)[2]) print("Tag: " + tag) # # New Area definition we have defined: # area_id_new = area_id description_new = 'UTM ' + tag + "N" proj_id_new = proj_id + '_new' proj_string_new = 'EPSG:326' + tag width_new = n_xcells height_new = n_ycells print(width_new) print(height_new) area_extent_new = (x_utm_corners[0], y_utm_corners[-1], x_utm_centers[-1], y_utm_corners[0]) # may need to revisit area_new = geometry.AreaDefinition(area_id_new, description_new, proj_id_new, proj_string_new, width_new, height_new, area_extent_new) print("New area shape:") print(area_new.shape) # print("get_lonlats:") # print(area_new.get_lonlats()) print("get_lonlats shape:") print(area_new.get_lonlats()[0].shape, area_new.get_lonlats()[1].shape) print("area_original shape:") print(area_original.shape) print("Grid shape:") print(grid.shape) wf = lambda r: 1 start_time = time.time() # Multiplying radius of influence by test to account for skipping data when downsizing result = kd_tree.resample_custom(area_original, grid, area_new, radius_of_influence=test * math.sqrt(2 * (resolution / 2)**2), fill_value=np.nan, weight_funcs=wf) #result = kd_tree.resample_nearest(area_original, grid, area_new, radius_of_influence=test * math.sqrt(2 * (resolution / 2)**2), fill_value=np.nan) print("Result calculation time: --- %s seconds ---" % (time.time() - start_time)) print("Result shape:") print(result.shape) ''' xe = np.linspace(x_low, x_high, width_new) ye = np.linspace(y_high, y_low, height_new) ''' xx, yy = np.meshgrid(x_utm_corners, y_utm_corners) print("Resample complete") plot_data(grid, xx, yy, result, name) return (result, area_new, x_utm_corners, y_utm_corners, x_utm_centers, y_utm_centers)
def resample(grid, annotation, vardict, lat_centers, lon_centers, area_new, test, resolution): print("Grid shape:") print(grid.shape) # num_grid_rows = grid.shape[0] # num_grid_columns = grid.shape[1] # # Original Area definition: # #area_id = 'WGS84' #description = 'lat-lon' #proj_id = annotation[annotation.find("gr"):annotation.find(".")] #proj_string = 'EPSG:4326' #width = num_grid_columns #height = num_grid_rows area_id = 'WGS84' description = 'lat-lon' proj_string = 'proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs' proj_id = 'EPSG:4326' width = np.shape(lon_centers)[0] height = np.shape(lat_centers)[0] print(lon_centers[0], lat_centers[-1], lon_centers[0], lat_centers[0]) # define the area_extent as [min_lon, min_lat, max_lon, max_lat] ## if the last lat value is smaller than the first lat value, then we have to switch ## which lat value to use when defining area_extent if lat_centers[-1] < lat_centers[0]: area_extent = (lon_centers[0], lat_centers[-1], lon_centers[-1], lat_centers[0]) ## if the last lat value is smaller than the first lat value, then we have to switch ## which lat value to use when defining area_extent else: area_extent = (lon_centers[0], lat_centers[0], lon_centers[-1], lat_centers[-1]) area_original = geometry.AreaDefinition(area_id, description, \ proj_id, proj_string, width, height, area_extent) print(datetime.now()) print("--- Resampling ---") wf = lambda r: 1 start_time = time() # Multiplying radius of influence by test to account for skipping data when downsizing result = kd_tree.resample_custom(area_original, grid, area_new, \ radius_of_influence=test * math.sqrt(2 * (resolution / 2)**2), \ fill_value=np.nan, weight_funcs=wf) ## if the last lat value is smaller than the first lat value, then we have to flip ## the result in the up/down direction (rows) if lat_centers[-1] < lat_centers[0]: result = np.flipud(result) print("Result calculation time: --- %s seconds ---" % (time() - start_time)) print("Resulting shape:") print(result.shape) ''' xe = np.linspace(x_low, x_high, width_new) ye = np.linspace(y_high, y_low, height_new) ''' print("Resample complete\n") return result
def resample_WRF(self, st, et, delta): ''' Create Times variable and resample emission species DataArray. ''' # generate date every hour datetime_list = list(self.perdelta(st, et, timedelta(hours=1))) t_format = '%Y-%m-%d_%H:%M:%S' # convert datetime to date string Times = [] for timstep in datetime_list: times_str = strftime(t_format, timstep.timetuple()) Times.append(times_str) # the method of creating "Times" with unlimited dimension # ref: htttps://github.com/pydata/xarray/issues/3407 Times = xr.DataArray(np.array(Times, dtype=np.dtype(('S', 19))), dims=['Time']) self.chemi = xr.Dataset({'Times': Times}) # resample orig_def = SwathDefinition(lons=self.emi['longitude'], lats=self.emi['latitude']) for vname in self.emi.data_vars: if 'E_' in vname: logging.info(f'Resample {vname} ...') resampled_list = [] for t in range(self.emi[vname].shape[0]): # different resample methods # see: http://earthpy.org/interpolation_between_grids_with_pyresample.html if resample_method == 'nearest': resampled_list.append(resample_nearest(orig_def, self.emi[vname][t, :, :].values, self.area_def, radius_of_influence=500000, fill_value=0.) ) elif resample_method == 'idw': resampled_list.append(resample_custom(orig_def, self.emi[vname][t, :, :].values, self.area_def, radius_of_influence=500000, neighbours=10, weight_funcs=lambda r: 1/r**2, fill_value=0.) ) # combine 2d array list to one 3d # ref: https://stackoverflow.com/questions/4341359/ # convert-a-list-of-2d-numpy-arrays-to-one-3d-numpy-array # we also need to flip the 3d array, # because of the "strange" order of 1d array in MEIC nc file # then add another dimension for zdim. resampled_data = np.flip( np.rollaxis( np.dstack(resampled_list), -1), 1)[:, np.newaxis, ...] # assign to self.chemi with dims self.chemi[vname] = xr.DataArray(resampled_data, dims=['Time', 'emissions_zdim', 'south_north', 'west_east']) # add attrs needed by WRF-Chem v_attrs = {'FieldType': 104, 'MemoryOrder': 'XYZ', 'description': vname, 'stagger': '', 'coordinates': 'XLONG XLAT', 'units': self.emi[vname].attrs['units'] } self.chemi[vname] = self.chemi[vname].assign_attrs(v_attrs) logging.debug(' '*8 + ' min: ' + str(self.chemi[vname].min().values) + ' max: ' + str(self.chemi[vname].max().values) + ' mean ' + str(self.chemi[vname].mean().values) )