def make_coordinate_combinations(lat=None, lon=None, alt=None, time=None): ''' Generates every combination of stacked and unstacked coordinates podpac expects to handle Parameters ----------- lat: podpac.core.coordinates.Coordinates1d, optional 1D coordinate object used to create the Coordinate objects that contain the latitude dimension. By default uses: UniformCoord(start=0, stop=2, size=3) lon: podpac.core.coordinates.Coordinates1d, optional Same as above but for longitude. By default uses: UniformCoord(start=2, stop=6, size=3) alt: podpac.core.coordinates.Coordinates1d, optional Same as above but for longitude. By default uses: UniformCoord(start=6, stop=12, size=3) time: podpac.core.coordinates.Coordinates1d, optional Same as above but for longitude. By default uses: UniformCoord(start='2018-01-01T00:00:00', stop='2018-03-01T00:00:00', size=3) Returns ------- OrderedDict: Dictionary of all the podpac.Core.Coordinate objects podpac expects to handle. The dictionary keys is a tuple of coordinate dimensions, and the values are the actual Coordinate objects. Notes ------ When custom lat, lon, alt, and time 1D coordinates are given, only those with the same number of coordinates are stacked together. For example, if lat, lon, alt, and time have sizes 3, 4, 5, and 6, respectively, no stacked coordinates are created. Also, no exception or warning is thrown for this case. ''' # make the 1D coordinates if lat is None: lat = ArrayCoordinates1d([0, 1, 2], name='lat') if lon is None: lon = ArrayCoordinates1d([2, 4, 6], name='lon') if alt is None: alt = ArrayCoordinates1d([6, 9, 12], name='alt') if time is None: time = ArrayCoordinates1d(['2018-01-01', '2018-02-01', '2018-03-01'], name='time') d = dict([('lat', lat), ('lon', lon), ('alt', alt), ('tim', time)]) dims_list = get_dims_list() # make the stacked coordinates for dim in [dim for dims in dims_list for dim in dims if '_' in dim]: cs = [d[k] for k in dim.split('_')] if any(c.size != cs[0].size for c in cs): continue # can't stack these d[dim] = StackedCoordinates(cs) # make the ND coordinates coord_collection = OrderedDict() for dims in dims_list: if any(dim not in d for dim in dims): continue coord_collection[dims] = Coordinates([d[dim] for dim in dims]) return coord_collection
def test_custom_creation_no_stack(self): lat = ArrayCoordinates1d([0, 1, 2], name="lat") lon = ArrayCoordinates1d([2, 3, 4, 5, 6], name="lon") alt = ArrayCoordinates1d([6, 7, 8, 9, 10, 11, 12], name="alt") time = ArrayCoordinates1d(["2018-01-01", "2018-02-01"], name="time") coords = ctu.make_coordinate_combinations(lat=lat, lon=lon, alt=alt, time=time) assert len(coords) > 0 assert len(coords) == 48
def test_custom_creation_no_stack(self): lat = ArrayCoordinates1d([0, 1, 2], name='lat') lon = ArrayCoordinates1d([2, 3, 4, 5, 6], name='lon') alt = ArrayCoordinates1d([6, 7, 8, 9, 10, 11, 12], name='alt') time = ArrayCoordinates1d(['2018-01-01', '2018-02-01'], name='time') coords = ctu.make_coordinate_combinations(lat=lat, lon=lon, alt=alt, time=time) assert (len(coords) > 0) assert (len(coords) == 48)
def get_modified_coordinates1d(self, coord, dim): """ Get the desired 1d coordinates for the given dimension, depending on the selection attr for the given dimension:: Parameters ---------- coords : Coordinates The requested input coordinates dim : str Dimension for doing the selection Returns ------- coords1d : ArrayCoordinates1d The selected coordinates for the given dimension. """ if dim != "time": return coord[dim] times = coord["time"] delta = np.datetime64(self.year) new_times = [ add_coord(c, delta - c.astype("datetime64[Y]")) for c in times.coordinates ] return ArrayCoordinates1d(new_times, name="time")
def get_coordinates(self): """ Get the full WCS grid. """ metadata = self.client.contents[self.layer] # TODO select correct boundingbox by crs # coordinates w, s, e, n = metadata.boundingBoxWGS84 low = metadata.grid.lowlimits high = metadata.grid.highlimits xsize = int(high[0]) - int(low[0]) ysize = int(high[1]) - int(low[1]) coords = [] coords.append(UniformCoordinates1d(s, n, size=ysize, name="lat")) coords.append(UniformCoordinates1d(w, e, size=xsize, name="lon")) if metadata.timepositions: coords.append( ArrayCoordinates1d(metadata.timepositions, name="time")) if metadata.timelimits: raise NotImplementedError("TODO") return Coordinates(coords, crs=self.crs)
def get_coordinates(self): """ Get the full WCS grid. """ metadata = self.client.contents[self.layer] # coordinates bbox = metadata.boundingBoxWGS84 crs = "EPSG:4326" logging.debug("WCS available boundingboxes: {}".format( metadata.boundingboxes)) for bboxes in metadata.boundingboxes: if bboxes["nativeSrs"] == self.crs: bbox = bboxes["bbox"] crs = self.crs break low = metadata.grid.lowlimits high = metadata.grid.highlimits xsize = int(high[0]) - int(low[0]) ysize = int(high[1]) - int(low[1]) # Based on https://www.ctps.org/geoserver/web/wicket/bookmarkable/org.geoserver.wcs.web.demo.WCSRequestBuilder;jsessionid=9E2AA99F95410C694D05BA609F25527C?0 # The above link points to a geoserver implementation, which is the reference implementation. # WCS version 1.0.0 always has order lon/lat while version 1.1.1 actually follows the CRS if self.version == "1.0.0": rbbox = { "lat": [bbox[1], bbox[3], ysize], "lon": [bbox[0], bbox[2], xsize] } else: rbbox = resolve_bbox_order(bbox, crs, (xsize, ysize)) coords = [] coords.append( UniformCoordinates1d(rbbox["lat"][0], rbbox["lat"][1], size=rbbox["lat"][2], name="lat")) coords.append( UniformCoordinates1d(rbbox["lon"][0], rbbox["lon"][1], size=rbbox["lon"][2], name="lon")) if metadata.timepositions: coords.append( ArrayCoordinates1d(metadata.timepositions, name="time")) if metadata.timelimits: raise NotImplementedError("TODO") return Coordinates(coords, crs=crs)
def get_modified_coordinates1d(self, coords, dim): """Returns the expanded coordinates for the requested dimension, depending on the expansion parameter for the given dimension. Parameters ---------- dim : str Dimension to expand Returns ------- expanded : Coordinates1d Expanded coordinates """ coords1d = coords[dim] expansion = getattr(self, dim) if not expansion: # i.e. if list is empty # no expansion in this dimension return coords1d if len(expansion) == 2: # use available native coordinates dstart = make_coord_delta(expansion[0]) dstop = make_coord_delta(expansion[1]) available_coordinates = self.coordinates_source.find_coordinates() if len(available_coordinates) != 1: raise ValueError( "Cannot implicity expand coordinates; too many available coordinates" ) acoords = available_coordinates[0][dim] cs = [ acoords.select((add_coord(x, dstart), add_coord(x, dstop))) for x in coords1d.coordinates ] elif len(expansion) == 3: # use a explicit step size dstart = make_coord_delta(expansion[0]) dstop = make_coord_delta(expansion[1]) step = make_coord_delta(expansion[2]) cs = [ UniformCoordinates1d(add_coord(x, dstart), add_coord(x, dstop), step) for x in coords1d.coordinates ] else: raise ValueError("Invalid expansion attrs for '%s'" % dim) return ArrayCoordinates1d(np.concatenate([c.coordinates for c in cs]), **coords1d.properties)
def get_modified_coordinates1d(self, coords, dim): """ Get the desired 1d coordinates for the given dimension, depending on the selection attr for the given dimension:: Parameters ---------- coords : Coordinates The requested input coordinates dim : str Dimension for doing the selection Returns ------- coords1d : ArrayCoordinates1d The selected coordinates for the given dimension. """ coords1d = coords[dim] selection = getattr(self, dim) if not selection: # no selection in this dimension return coords1d if len(selection) == 1 or ((len(selection) == 2) and (selection[0] == selection[1])): # a single value coords1d = ArrayCoordinates1d(selection, **coords1d.properties) elif len(selection) == 2: # use available source coordinates within the selected bounds available_coordinates = self.coordinates_source.find_coordinates() if len(available_coordinates) != 1: raise ValueError( "SelectCoordinates Node cannot determine the step size between bounds for dimension" + "{} because source node (source.find_coordinates()) has {} different coordinates." .format(dim, len(available_coordinates)) + "Please specify step-size for this dimension.") coords1d = available_coordinates[0][dim].select(selection) elif len(selection) == 3: # uniform coordinates using start, stop, and step coords1d = UniformCoordinates1d(*selection, **coords1d.properties) else: raise ValueError("Invalid selection attrs for '%s'" % dim) return coords1d
def get_modified_coordinates1d(self, coords, dim): """ Get the desired 1d coordinates for the given dimension, depending on the selection attr for the given dimension:: Parameters ---------- coords : Coordinates The requested input coordinates dim : str Dimension for doing the selection Returns ------- coords1d : ArrayCoordinates1d The selected coordinates for the given dimension. """ coords1d = coords[dim] selection = getattr(self, dim) if not selection: # no selection in this dimension return coords1d if len(selection) == 1: # a single value coords1d = ArrayCoordinates1d(selection, **coords1d.properties) elif len(selection) == 2: # use available source coordinates within the selected bounds available_coordinates = self.coordinates_source.find_coordinates() if len(available_coordinates) != 1: raise ValueError( "Cannot select within bounds; too many available coordinates" ) coords1d = available_coordinates[0][dim].select(selection) elif len(selection) == 3: # uniform coordinates using start, stop, and step coords1d = UniformCoordinates1d(*selection, **coords1d.properties) else: raise ValueError("Invalid selection attrs for '%s'" % dim) return coords1d
def native_coordinates(self): """{native_coordinates} Returns ------- Coordinates {native_coordinates} Notes ------ This is a little tricky and doesn't fit into the usual PODPAC method, as the service is actually doing the data wrangling for us... """ # TODO update so that we don't rely on _requested_coordinates if possible if not self._requested_coordinates: return self.wcs_coordinates cs = [] for dim in self.wcs_coordinates.dims: if dim in self._requested_coordinates.dims: c = self._requested_coordinates[dim] if c.size == 1: cs.append(ArrayCoordinates1d(c.coordinates[0], name=dim)) elif isinstance(c, UniformCoordinates1d): cs.append( UniformCoordinates1d(c.bounds[0], c.bounds[1], abs(c.step), name=dim)) else: # TODO: generalize/fix this # WCS calls require a regular grid, could (otherwise we have to do multiple WCS calls) cs.append( UniformCoordinates1d(c.bounds[0], c.bounds[1], size=c.size, name=dim)) else: cs.append(self.wcs_coordinates[dim]) c = Coordinates(cs) return c
def get_native_coordinates(self): """{get_native_coordinates} The default implementation tries to find the lat/lon coordinates based on dataset.affine or dataset.transform (depending on the version of rasterio). It cannot determine the alt or time dimensions, so child classes may have to overload this method. """ coords = [] for d in self.dims: if trait_is_defined( self, d + '_col') or (d + '_col' not in self.trait_names() and hasattr(self, d + '_col')): i = getattr(self, '_{}_col'.format(d)) if d is 'time': c = np.array(self.dataset.iloc[:, i], np.datetime64) else: c = np.array(self.dataset.iloc[:, i]) coords.append(ArrayCoordinates1d(c, name=d)) if len(coords) > 1: coords = [StackedCoordinates(coords)] return Coordinates(coords)
def test_custom_creation_mixed_type_1d(self): lat = ArrayCoordinates1d([0.0, 1.0, 2.0, 4.0], name='lat') coords = ctu.make_coordinate_combinations(lat=lat) assert (len(coords) > 0) assert (len(coords) == 84)
def test_custom_creation_latlon_stack(self): alt = ArrayCoordinates1d([6, 7, 8, 9, 10, 11, 12], name='alt') time = ArrayCoordinates1d(['2018-01-01', '2018-02-01'], name='time') coords = ctu.make_coordinate_combinations(alt=alt, time=time) assert (len(coords) > 0) assert (len(coords) == 70)
def get_wcs_coordinates(self): """Retrieves the native coordinates reported by the WCS service. Returns ------- Coordinates The native coordinates reported by the WCS service. Notes ------- This assumes a `time`, `lat`, `lon` order for the coordinates, and currently doesn't handle `alt` coordinates Raises ------ Exception Raises this if the required dependencies are not installed. """ if requests is not None: capabilities = requests.get(self.get_capabilities_url) if capabilities.status_code != 200: raise Exception("Could not get capabilities from WCS server") capabilities = capabilities.text # TODO: remove support urllib3 - requests is sufficient elif urllib3 is not None: if certifi is not None: http = urllib3.PoolManager(ca_certs=certifi.where()) else: http = urllib3.PoolManager() r = http.request('GET', self.get_capabilities_url) capabilities = r.data if r.status != 200: raise Exception("Could not get capabilities from WCS server") else: raise Exception( "Do not have a URL request library to get WCS data.") if lxml is not None: # could skip using lxml and always use html.parser instead, which seems to work but lxml might be faster capabilities = bs4.BeautifulSoup(capabilities, 'lxml') else: capabilities = bs4.BeautifulSoup(capabilities, 'html.parser') domain = capabilities.find('wcs:spatialdomain') pos = domain.find('gml:envelope').get_text().split() lonlat = np.array(pos, float).reshape(2, 2) grid_env = domain.find('gml:gridenvelope') low = np.array(grid_env.find('gml:low').text.split(), int) high = np.array(grid_env.find('gml:high').text.split(), int) size = high - low dlondlat = (lonlat[1, :] - lonlat[0, :]) / size bottom = lonlat[:, 1].min() + dlondlat[1] / 2 top = lonlat[:, 1].max() - dlondlat[1] / 2 left = lonlat[:, 0].min() + dlondlat[0] / 2 right = lonlat[:, 0].max() - dlondlat[0] / 2 timedomain = capabilities.find("wcs:temporaldomain") if timedomain is None: return Coordinates([ UniformCoordinates1d(top, bottom, size=size[1], name='lat'), UniformCoordinates1d(left, right, size=size[0], name='lon') ]) date_re = re.compile( '[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}') times = str(timedomain).replace('<gml:timeposition>', '').replace('</gml:timeposition>', '').split('\n') times = np.array([t for t in times if date_re.match(t)], np.datetime64) return Coordinates([ ArrayCoordinates1d(times, name='time'), UniformCoordinates1d(top, bottom, size=size[1], name='lat'), UniformCoordinates1d(left, right, size=size[0], name='lon') ])
def _eval(self, coordinates, output=None, _selector=None): """Evaluates this nodes using the supplied coordinates. Parameters ---------- coordinates : podpac.Coordinates {requested_coordinates} output : podpac.UnitsDataArray, optional {eval_output} _selector: callable(coordinates, request_coordinates) {eval_selector} Returns ------- {eval_return} """ # The size of this kernel is used to figure out the expanded size full_kernel = self.kernel # expand the coordinates # The next line effectively drops extra coordinates, so we have to add those later in case the # source is some sort of reduction Node. kernel_dims = [kd for kd in coordinates.dims if kd in self.kernel_dims] missing_dims = [kd for kd in coordinates.dims if kd not in self.kernel_dims] exp_coords = [] exp_slice = [] for dim in kernel_dims: coord = coordinates[dim] s = full_kernel.shape[self.kernel_dims.index(dim)] if s == 1 or not isinstance(coord, (UniformCoordinates1d, ArrayCoordinates1d)): exp_coords.append(coord) exp_slice.append(slice(None)) continue if isinstance(coord, UniformCoordinates1d): s_start = -s // 2 s_end = max(s // 2 - ((s + 1) % 2), 1) # The 1e-14 is for floating point error because if endpoint is slightly # in front of step * N then the endpoint is excluded # ALSO: MUST use size instead of step otherwise floating point error # makes the xarray arrays not align. The following HAS to be true: # np.diff(coord.coordinates).mean() == coord.step exp_coords.append( UniformCoordinates1d( add_coord(coord.start, s_start * coord.step), add_coord(coord.stop, s_end * coord.step + 1e-14 * coord.step), size=coord.size - s_start + s_end, # HAVE to use size, see note above **coord.properties ) ) exp_slice.append(slice(-s_start, -s_end)) elif isinstance(coord, ArrayCoordinates1d): if not coord.is_monotonic or coord.size < 2: exp_coords.append(coord) exp_slice.append(slice(None)) continue arr_coords = coord.coordinates delta_start = arr_coords[1] - arr_coords[0] extra_start = np.arange(arr_coords[0] - delta_start * (s // 2), arr_coords[0], delta_start) delta_end = arr_coords[-1] - arr_coords[-2] # The 1e-14 is for floating point error to make sure endpoint is included extra_end = np.arange( arr_coords[-1] + delta_end, arr_coords[-1] + delta_end * (s // 2) + delta_end * 1e-14, delta_end ) arr_coords = np.concatenate([extra_start, arr_coords, extra_end]) exp_coords.append(ArrayCoordinates1d(arr_coords, **coord.properties)) exp_slice.append(slice(extra_start.size, -extra_end.size)) # Add missing dims back in -- this is needed in case the source is a reduce node. exp_coords += [coordinates[d] for d in missing_dims] # Create expanded coordinates exp_slice = tuple(exp_slice) expanded_coordinates = Coordinates(exp_coords, crs=coordinates.crs, validate_crs=False) if settings["DEBUG"]: self._expanded_coordinates = expanded_coordinates # evaluate source using expanded coordinates, convolve, and then slice out original coordinates source = self.source.eval(expanded_coordinates, _selector=_selector) kernel_dims_u = kernel_dims kernel_dims = self.kernel_dims sum_dims = [d for d in kernel_dims if d not in source.dims] # Sum out the extra dims full_kernel = full_kernel.sum(axis=tuple([kernel_dims.index(d) for d in sum_dims])) exp_slice = [exp_slice[i] for i in range(len(kernel_dims_u)) if kernel_dims_u[i] not in sum_dims] kernel_dims = [d for d in kernel_dims if d in source.dims] # Put the kernel axes in the correct order # The (if d in kernel_dims) takes care of "output", which can be optionally present full_kernel = full_kernel.transpose([kernel_dims.index(d) for d in source.dims if (d in kernel_dims)]) # Check for extra dimensions in the source and reshape the kernel appropriately if any([d not in kernel_dims for d in source.dims if d != "output"]): new_axis = [] new_exp_slice = [] for d in source.dims: if d in kernel_dims: new_axis.append(slice(None)) new_exp_slice.append(exp_slice[kernel_dims.index(d)]) else: new_axis.append(None) new_exp_slice.append(slice(None)) full_kernel = full_kernel[new_axis] exp_slice = new_exp_slice if np.any(np.isnan(source)): method = "direct" else: method = "auto" if ("output" not in source.dims) or ("output" in source.dims and "output" in kernel_dims): result = scipy.signal.convolve(source, full_kernel, mode="same", method=method) else: # source with multiple outputs result = np.stack( [ scipy.signal.convolve(source.sel(output=output), full_kernel, mode="same", method=method) for output in source.coords["output"] ], axis=source.dims.index("output"), ) result = result[exp_slice] if output is None: missing_dims = [d for d in coordinates.dims if d not in source.dims] output = self.create_output_array(coordinates.drop(missing_dims), data=result) else: output[:] = result return output
def test_custom_creation_latlon_stack(self): alt = ArrayCoordinates1d([6, 7, 8, 9, 10, 11, 12], name="alt") time = ArrayCoordinates1d(["2018-01-01", "2018-02-01"], name="time") coords = ctu.make_coordinate_combinations(alt=alt, time=time) assert len(coords) > 0 assert len(coords) == 70