def get_block_details(auth, block_name): """ Get details of block by block name. """ logger.info( pprint_json( Tools(auth).get_block_details( Tools(auth).get_blocks()[block_name])))
def plot_results( figsize: Tuple[int, int] = (8, 8), filepaths: List[Union[str, Path]] = None, titles: List[str] = None, ) -> None: tools = Tools(auth=_auth) tools.plot_results(figsize, filepaths, titles)
def get_blocks( block_type=None, basic: bool = True, as_dataframe=False, ): tools = Tools(auth=_auth) return tools.get_blocks(block_type, basic, as_dataframe)
def plot_coverage( scenes: GeoDataFrame, aoi: GeoDataFrame = None, legend_column: str = "scene_id", figsize=(12, 16), ) -> None: tools = Tools(auth=_auth) tools.plot_coverage(scenes, aoi, legend_column, figsize)
def construct_parameters(catalog, geom_file, start_date, end_date, sensors, limit, max_cloud_cover): """ Follows STAC principles and property names to create a filter for catalog search. """ geometry = Tools(catalog.auth).read_vector_file(geom_file) start_date_str = start_date.strftime("%Y-%m-%d") end_date_str = end_date.strftime("%Y-%m-%d") logger.info( pprint_json( catalog.construct_parameters(geometry, start_date_str, end_date_str, sensors, limit, max_cloud_cover)))
def _construct_full_workflow_tasks_dict( self, input_tasks: Union[List]) -> List[dict]: """ Constructs the full workflow task definition from a simplified version. Accepts blocks ids, block names, block display names & combinations of them. Args: input_tasks: List of block names, block ids, or block display names. Returns: The full workflow task definition. Example: ```python input_tasks = ["sobloo-s2-l1c-aoiclipped", "tiling"] ``` ```python input_tasks = ["a2daaab4-196d-4226-a018-a810444dcad1", "4ed70368-d4e1-4462-bef6-14e768049471"] ``` ```python input_tasks = ["Sentinel-2 L1C MSI AOI clipped", "Raster Tiling"] ``` """ full_input_tasks_definition = [] # Get public + custom blocks. logging.getLogger("up42.tools").setLevel(logging.CRITICAL) blocks: dict = Tools(auth=self.auth).get_blocks( basic=False) # type: ignore logging.getLogger("up42.tools").setLevel(logging.INFO) # Get ids of the input tasks, regardless of the specified format. blocks_id_name = {block["id"]: block["name"] for block in blocks} blocks_name_id = {block["name"]: block["id"] for block in blocks} blocks_displaynames_id = { block["displayName"]: block["id"] for block in blocks } input_tasks_ids = [] for task in input_tasks: if task in list(blocks_id_name.keys()): input_tasks_ids.append(task) elif task in list(blocks_name_id.keys()): input_tasks_ids.append(blocks_name_id[task]) elif task in list(blocks_displaynames_id.keys()): input_tasks_ids.append(blocks_displaynames_id[task]) else: raise ValueError( f"The specified input task {task} does not match any " f"available block.") # Add first task, the data block. data_task = { "name": f"{blocks_id_name[input_tasks_ids[0]]}:1", "parentName": None, "blockId": input_tasks_ids[0], } full_input_tasks_definition.append(data_task) previous_task_name = data_task["name"] # All following (processing) blocks. for block_id in input_tasks_ids[1:]: # Check if multiple of the same block are in the input tasks definition, # so that is does not get skipped as it has the same id. counts = Counter( [x["blockId"] for x in full_input_tasks_definition]) try: count_block = int(counts[block_id]) + 1 except KeyError: count_block = 1 next_task = { "name": f"{blocks_id_name[block_id]}:{count_block}", "parentName": previous_task_name, "blockId": block_id, } full_input_tasks_definition.append(next_task) previous_task_name = next_task["name"] return full_input_tasks_definition
def plot_quicklooks(figsize: Tuple[int, int] = (8, 8), filepaths: List = None) -> None: tools = Tools(auth=_auth) tools.plot_quicklooks(figsize, filepaths)
def read_vector_file( filename: str = "aoi.geojson", as_dataframe: bool = False ) -> Union[Dict, GeoDataFrame]: return Tools().read_vector_file(filename, as_dataframe)
def get_example_aoi( location: str = "Berlin", as_dataframe: bool = False ) -> FeatureCollection: return Tools().get_example_aoi(location, as_dataframe)
def validate_manifest(path_or_json: Union[str, Path, Dict]) -> Dict: tools = Tools(auth=_auth) return tools.validate_manifest(path_or_json)
def validate_manifest(auth, manifest_json): """ Validate a block manifest. """ logger.info(pprint_json(Tools(auth).validate_manifest(manifest_json)))
def full_process_value(self, ctx, value): self.type = click.Choice(Tools(ctx.obj).get_blocks().keys()) return super().full_process_value(ctx, value) # pylint: disable=no-member
def get_blocks(auth, block_type, basic): """ Get public blocks information. """ logger.info(pprint_json(Tools(auth).get_blocks(block_type, basic)))
def get_example_aoi( location: str = "Berlin", as_dataframe: bool = False ) -> Union[Dict, GeoDataFrame]: return Tools().get_example_aoi(location, as_dataframe)
def read_vector_file(filename: str = "aoi.geojson", as_dataframe: bool = False) -> FeatureCollection: tools = Tools(auth=_auth) return tools.read_vector_file(filename, as_dataframe)
def get_block_details(block_id: str, as_dataframe=False) -> Dict: tools = Tools(auth=_auth) return tools.get_block_details(block_id, as_dataframe)
def get_example_aoi(location: str = "Berlin", as_dataframe: bool = False) -> FeatureCollection: tools = Tools(auth=_auth) return tools.get_example_aoi(location, as_dataframe)
def read_vector_file( filename: str = "aoi.geojson", as_dataframe: bool = False ) -> FeatureCollection: return Tools().read_vector_file(filename, as_dataframe)
def draw_aoi() -> None: tools = Tools(auth=_auth) tools.draw_aoi()
def draw_aoi() -> None: return Tools().draw_aoi()
def full_process_value(self, ctx, value): self.type = click.Choice(Tools(ctx.obj).get_blocks().keys()) return super(OptionChoiceFromContext, self).full_process_value(ctx, value)