def ai_upload(repo: Repo, task: Task, roi_id, recipe_path, cos_key, dry_run): """Upload COG TIFF to cloud storage""" _recipe = recipe_path if recipe_path else resolve_recipe( repo, task, roi_id) recipe = Recipe(_recipe) filenames = Filenames('zone', recipe) cog_file = filenames.out_cog_tiff doc_json = Path(filenames.out_cog_tiff + '.geojson') if not doc_json.is_file(): raise OCLIException(f'Could not find "{doc_json.absolute()}"') _json = open(doc_json, 'r').read() try: doc = json.loads(_json) except JSONDecodeError: raise OCLIException(f'Could not parse json "{doc_json.absolute()}"') if not cos_key and "ResultKey" in doc['properties']: cos_key = doc['properties'].get('ResultKey') if not cos_key: raise click.UsageError("No COS key (upload file name)") if not cos_key.endswith('.tiff'): cos_key += '.tiff' log.info( f"About to upload {cog_file} as {cos_key} to bucket {recipe['COS'].get('bucket')} " ) try: cos = COS(recipe) except SystemExit: raise click.UsageError( f'Invalid recipe: COS credentials in "{_recipe}" are required for upload' ) try: if dry_run: output.comment( f'Uploading "{cog_file}" as "{cos_key}" into bucket "{cos.bucket}"' ) output.comment( f'Uploading "{cog_file}.geojson" as "{cos_key}.geojson" into bucket "{cos.bucket}"' ) else: filesize = os.stat(cog_file).st_size with tqdm(total=filesize, unit='B', unit_scale=True, desc=cos_key) as t: cos.upload_to_cos(cog_file, cos_key, hook(t)) if os.path.isfile(cog_file + '.geojson'): filesize = os.stat(cog_file + '.geojson').st_size with tqdm(total=filesize, unit='B', unit_scale=True, desc=cos_key + '.geojson') as t: cos.upload_to_cos(cog_file + '.geojson', cos_key + '.geojson', hook(t)) except SystemExit as e: raise click.UsageError(e)
def pairs_load(repo: Repo, roi_id, reload, quiet, completion_date): """ load data into DB """ # todo convert name to ID if completion_date: completion_date = parse_to_utc_string(completion_date) if completion_date is None: raise OCLIException( f"Completion date {completion_date} is invalid") output.comment(f"loading products up to {completion_date}") if not roi_id and not repo.active_roi: raise click.BadOptionUsage( 'roi_id', "ROI is required , set active ROI or provide --roi option") _id = int(roi_id) if roi_id else int(repo.active_roi) # check roi exists db = repo.roi.db try: geometry = db.loc[_id, 'geometry'] except KeyError: raise click.BadOptionUsage('roi_id', f'ROI "{_id}" not found') cache_file_name = _cache_pairs_file_name(repo) finder_conf = repo.get_config('finder', {}).copy() if completion_date: finder_conf['completionDate'] = completion_date if not quiet: output.table(finder_conf.items()) if quiet: d = pairs.load_data( geometry, reload=reload, callback=None, finder_conf=finder_conf, cache_file_name=cache_file_name, ) else: with click.progressbar(length=100, label='Loading sat products') as bar: def callback(total, step): if bar.length != total: bar.length = total bar.update(step) d = pairs.load_data(geometry, reload=reload, callback=callback, finder_conf=finder_conf, cache_file_name=cache_file_name) if d.empty: raise OCLIException('0 products loaded, product list is not updated!') else: output.success(f'{len(d)} products loaded into list')
def _resolve_tensor_filenames(repo, task, zone, roi_id, data_path, recipe_path, tnorm) -> Filenames: if not data_path: try: _recipe = recipe_path if recipe_path else resolve_recipe(repo, task, roi_id) recipe = Recipe(_recipe) output.comment(f'Using recipe file "{_recipe}"') except (RuntimeError, AssertionError, click.UsageError) as e: output.comment(f'Using tensor from ai_results') try: data_path = task.get_ai_results_path(full=True) if not os.path.isdir(data_path): raise AssertionError(f'Directory "{data_path}" is not exists ') recipe = {'OUTDIR': data_path} except AssertionError as e: raise click.UsageError(f'Could not get ai_results: {e}') else: recipe = {'OUTDIR': data_path} if tnorm and 'PREDICTOR_DIR' not in recipe: try: _filenames = Filenames(zone, recipe) with open(_filenames.process_info, 'r') as f: _prcinfo = yaml.load(f, Loader=yaml.FullLoader) recipe['PREDICTOR_DIR'] = _prcinfo['process']['PREDICTOR_DIR'] except Exception as e: raise OCLIException(f"Could not resolve tnorm file: {e}") return Filenames(zone, recipe)
def create_stack_rgb(band1, band2, band3, df, vis_mode, slice_range): bands_2 = ['false-color', 'false-color-enhanced'] bands_3 = ['sar', 'composite', 'raw'] if band1 is None and band2 is None: raise OCLIException('At least b1 and b2 Bands are required for preview') if vis_mode in bands_2 and band3 is not None: raise OCLIException(f"'{vis_mode}': requires band1 and band2 only ") if vis_mode in bands_3 and band3 is None: raise OCLIException(f"'{vis_mode}': requires 3 bands ") try: import spectral.io.envi as s_envi if band3 is None: _b1 = df.iloc[band1].path _b2 = df.iloc[band2].path title = f"B1: {_b1}\nB2: {_b2}" b1 = s_envi.open(_b1 + '.hdr', _b1 + '.img').read_band(0, use_memmap=True) b2 = s_envi.open(_b2 + '.hdr', _b2 + '.img').read_band(0, use_memmap=True) if slice_range[0] != -1: title += f"\n slice {slice_range}" b1 = b1[slice_range[0]:slice_range[2], slice_range[1]:slice_range[3]] b2 = b2[slice_range[0]:slice_range[2], slice_range[1]:slice_range[3]] (r, g, b) = compute_stack_pol2(b1, b2, vis_mode=vis_mode) else: _b1 = df.iloc[band1].path _b2 = df.iloc[band2].path _b3 = df.iloc[band3].path b1 = s_envi.open(_b1 + '.hdr', _b1 + '.img').read_band(0, use_memmap=True) b2 = s_envi.open(_b2 + '.hdr', _b2 + '.img').read_band(0, use_memmap=True) b3 = s_envi.open(_b3 + '.hdr', _b3 + '.img').read_band(0, use_memmap=True) title = f"B1: {_b1}\nB2: {_b2}\nB3: {_b3}" if slice_range[0] != -1: title += f"\n slice {slice_range}" b1 = b1[slice_range[0]:slice_range[2], slice_range[1]:slice_range[3]] b2 = b2[slice_range[0]:slice_range[2], slice_range[1]:slice_range[3]] b3 = b3[slice_range[0]:slice_range[2], slice_range[1]:slice_range[3]] (r, g, b) = compute_stack_pol3(b1, b2, b3, vis_mode=vis_mode, ) title = f"{vis_mode} {b1.shape[0]}x{b1.shape[1]}\n{title}" return title, (r, g, b) except ValueError as e: raise OCLIException(str(e))
def _show_tnsr_list(tnsr_hdr_fname, df=None): output.comment(f'tensor HDR: {tnsr_hdr_fname}') if df is None: try: full_shape, df = get_tensor_df(tnsr_hdr_fname) except FileNotFoundError as e: raise OCLIException(f"{e}") output.table(df, showindex=True, headers=['band', 'name', 'resolution']) return
def create_tensor_rgb(band1, band2, band3, df, vis_mode, slice_range, filenames, tnorm, gauss): bands_2 = ['simple', 'rgb-ratio', 'rgb-diff', 'false-color', 'false-color-enhanced'] bands_3 = ['sar', 'composite', 'composite-u'] if band1 is None and band2 is None: raise OCLIException('At least b1 and b2 Bands are required for preview') if vis_mode in bands_2 and band3 is not None: raise OCLIException(f"'{vis_mode}': requires band1 and band2 only ") if vis_mode in bands_3 and band3 is None: raise OCLIException(f"'{vis_mode}': requires 3 bands ") if tnorm and not Path(filenames.tnorm).is_file(): raise OCLIException(f"tensor normalisation file '{filenames.tnorm}' not found") if band3: blist = [band1, band2, band3] else: blist = [band1, band2] try: df.iloc[blist] except IndexError: raise AssertionError("Band number is invalid") try: title, (b1, b2, b3) = read_tensor(blist, df=df, slice_range=slice_range, filenames=filenames, tnorm=tnorm, gauss=gauss, split=True ) if band3: (r, g, b) = compute_tensor_pol3(b1, b2, b3, vis_mode=vis_mode) else: (r, g, b) = compute_tensor_pol2(b1, b2, vis_mode=vis_mode) title = f"{vis_mode} {r.shape[0]}x{r.shape[1]}\n{title}" return title, (r, g, b) except ValueError as e: raise OCLIException(str(e))
def task_set(task: Task, d: Dict): task_config = task.config my_keys = TASK_KIND_CLUSTER.keys() for k in my_keys: if k in d: value = d[k] # type: string """set new value""" if k in ['master', 'slave']: cache_file_name = _cache_pairs_file_name(task) try: _df = pairs.load_from_cache( cache_file_name=cache_file_name) _id = value _p = _df.loc[_df['productId'] == value.upper(), ['title', 'productIdentifier']] if _p.size == 0: _p = _df.loc[_df['title'] == value.upper(), ['title', 'productIdentifier']] if _p.size == 0: raise ValueError _p = _p.iloc[0] task_config[k] = _p['title'] task_config[k + '_path'] = _p['productIdentifier'] except RuntimeError as e: raise OCLIException( f'Could not load products for project "{task.project}",reason:{e}' ) except (ValueError, KeyError, IndexError) as e: log.exception(e) raise OCLIException( f'key "{k}" could not find product id {d[k]}') else: # TODO - validate! task_config[k] = value """remove processed""" for k in my_keys: d.pop(k, None)
def ai_preview_tensor_math(repo: Repo, task: Task, roi_id, recipe_path, slice_range, show_list, band1, band2, band3, vis_mode, data_path, save, tnorm, zone, gauss, hist, ylog, export): """ Bands math for tansor {} """ filenames = _resolve_tensor_filenames( repo, task, zone=zone, roi_id=roi_id, data_path=data_path, recipe_path=recipe_path, tnorm=tnorm ) output.comment(f"Data dir: {data_path}") full_shape, df = get_tensor_df(filenames.tnsr_hdr) if show_list: _show_tnsr_list(filenames.tnsr_hdr, df=df) return try: title, (r, g, b) = create_tensor_rgb(band1, band2, band3, df=df, filenames=filenames, tnorm=tnorm, gauss=gauss, vis_mode=vis_mode, slice_range=slice_range, ) if export: georef = filenames.tnsr_hdr[:-4] _save_envi_rgb(r, g, b, export=export, georef=georef, data_path=data_path, slice_range=slice_range, title=title ) else: _plt = _vis_rgb(r, g, b, title, hist, ylog) _show_plt(_plt, save) except Exception as e: log.exception(e) raise OCLIException(f"{e}")
def set_config(self, key, value, only_existed=True): """ set current task config key-value pair""" if only_existed and key not in self.config: raise OCLIException(f'Unknown key "{key}"') self.config[key] = value
def ai_makecog(repo: Repo, task: Task, roi_id, recipe_path, json_only, quiet, no_color, less, zone, kind, source, cos_key, friendly_name, print_res, warp_resampleAlg, overview_resampleAlg): """ Make COG TIFF from visualized results \b * to make GeoTIFF from custom ENVI file use --source option with filename of ENVI file (without extension) * to override recipe kind, use --kind option, example: making image from 'ai preview --export path/to/envi' file use makecog zone --kind Image --source path/to/envi * to avoid overriding recipe main results use --cos-key and --friendly-name option if --friendly-name starts with '+' value will be used as suffix for friendly_name in GeoJSON if --cos-key starts with '+' value will be used as suffix for COS.ResultKey in GeoJSON """ driver = 'MAKECOG' if source: try: # Only valid ENVI or GeoTiff files are alloed str = gdal.Info(source, format='json') driver = str['driverShortName'] if driver not in ['ENVI', 'GTiff']: raise OCLIException( f"Unsupported source file type: {str['driverShortName']} ({str['driverLongName']})" ) except Exception as e: raise OCLIException(f"Option --source: {e}") _recipe = recipe_path if recipe_path else resolve_recipe( repo, task, roi_id) recipe = Recipe(_recipe) kind = kind if kind != 'auto' else recipe.get('type') if kind in ['Rvi', 'Image']: recipe['type'] = 'Image' # log.error(recipe['COS']['ResultKey']) if cos_key: if cos_key.startswith('+'): recipe['COS']['ResultKey'] += cos_key[1:] else: recipe['COS']['ResultKey'] = cos_key if friendly_name: if friendly_name.startswith('+'): recipe['friendly_name'] += friendly_name[1:] else: recipe['friendly_name'] = friendly_name # log.error(recipe['COS']['ResultKey']) # log.error(recipe['friendly_name']) filenames = Filenames(zone, recipe) if source: input_file = source else: input_file = filenames.pred8c_img out_file = filenames.out_tiff cog_file = filenames.out_cog_tiff check_file = cog_file if json_only else input_file if not Path(check_file).is_file(): raise OCLIException(f'file not found: {check_file}') os.makedirs(Path(cog_file).parent, exist_ok=True) w = GDALWrap3(recipe, input_file, out_file, cog_file) try: if not json_only: if driver == 'GTiff': try: shutil.copy(input_file, cog_file) output.success(f"file {input_file} copied to {cog_file}") except Exception as e: raise OCLIException( f'Could not copy "{input_file}" to "{cog_file}" : {e}' ) else: if not quiet: with pfac(log, total=100, desc='Assembling') as (_, callback): def cb(pct, msg, user_data): _ud = user_data[0] # self.log.debug(f"translaing: {round(pct * 100, 2)}% -{msg}- {user_data} {pct}") # reset counter, this t\callback called from multiple prcesses if pct < 0.01: user_data[0] = 0 if user_data[0] == 0 or pct - _ud > 0.10: log.debug( f"Local translating : {round(pct * 100, 2)}%" ) user_data[0] = pct callback(100, pct, 'translating') w.make_cog(cb, warp_resampleAlg, overview_resampleAlg) else: w.make_cog(None, warp_resampleAlg, overview_resampleAlg) _json = w.make_geo_json() _json = _json if no_color else colorful_json(_json) if print_res: click.echo("\n\n\n") if less: click.echo_via_pager(_json) else: click.echo(_json) except (RuntimeError, OCLIException) as e: raise click.UsageError( output.error_style(f"COG-tiff generation failed,reason: {e}")) pass
def create_tensor_plt(band1, band2, band3, vis_mode, slice_range, filenames: Filenames, tnorm=False, gauss=None, hist=None, ylog=False): # TODO Google Earth overlay? https://ocefpaf.github.io/python4oceanographers/blog/2014/03/10/gearth/ bands_2 = ['simple', 'rgb-ratio', 'rgb-diff', 'false-color', 'false-color-enhanced'] bands_3 = ['sar', 'composite', 'composite-u'] if band1 is None and band2 is None: raise OCLIException('At least b1 and b2 Bands are required for preview') if vis_mode in bands_2 and band3 is not None: raise OCLIException(f"'{vis_mode}': requires band1 and band2 only ") if vis_mode in bands_3 and band3 is None: raise OCLIException(f"'{vis_mode}': requires 3 bands ") if tnorm and not Path(filenames.tnorm).is_file(): raise OCLIException(f"tensor normalisation file '{filenames.tnorm}' not found") try: import spectral.io.envi as s_envi try: bn = s_envi.open(filenames.tnsr_hdr).metadata['band names'] ary = np.load(filenames.tnsr, mmap_mode='r') # type: np.ndarray except Exception as e: raise OCLIException(e) if band3 is None: title = f"tensor: {filenames.tnsr}\n B1: {bn[band1]} B2: {bn[band2]}" blist = [band1, band2] else: title = f"tensor: {filenames.tnsr}\n B1: {bn[band1]} B2: {bn[band2]} B3: {bn[band3]}" blist = [band1, band2, band3] try: if slice_range[0] != -1: tnsr = ary[slice_range[0]:slice_range[2], slice_range[1]:slice_range[3], blist] else: tnsr = ary[..., blist] except KeyError: raise OCLIException(f"One of band not found in tensor") if tnorm: tnsr = tnsr.copy() tn = np.load(filenames.tnorm) # pprint(tn) for i, b in enumerate(blist): # pprint(tn[b]) tnsr[..., i] -= tn[b, 0] tnsr[..., i] /= tn[b, 1] title += "\n" else: title += "\n not normalised" if gauss: from scipy.ndimage.filters import gaussian_filter for i in range(0, tnsr.shape[2]): tnsr[..., i] = gaussian_filter(tnsr[..., i], gauss) title += f" Gauss={gauss}" if band3 is None: _plt = compute_tensor_pol2(tnsr[..., 0], tnsr[..., 1], title=title, vis_mode=vis_mode, hist=hist, ylog=ylog) else: _plt = compute_tensor_pol3(tnsr[..., 0], tnsr[..., 1], tnsr[..., 2], title=title, vis_mode=vis_mode, hist=hist, ylog=ylog) return _plt except Exception as e: log.exception(e) if slice_range[0] != -1: raise OCLIException(f"Could not render image, check bands and slice: {e}") else: raise OCLIException(f"Could not render image, check bands: {e}")
def _bkt_info( repo: Repo, task: Task, geometry: Polygon, bucket_name: str, sort: tuple, limit: int, column: tuple, where: str, check=False, ) -> (GeoDataFrame, list): if geometry.area == 0: raise click.BadArgumentUsage('ROI has zero area') cache_file_name = _cache_pairs_file_name(repo) # TODO check ROI exists _df = pairs.load_from_cache(cache_file_name=cache_file_name, geometry=geometry) _bk = bucket.create_list(_df, buckets_dir='') if _bk is None or _bk.empty: raise AssertionError(f'No products could be found for ROI ') if bucket_name.isnumeric(): _t = _bk.groupby('bucket', as_index=False).ngroup() _df = _bk[_t == int(bucket_name)] else: _df = _bk[_bk['bucket'] == bucket_name] _f = list(column) _ds = _list_products(_df, where=where, sort=list(sort), limit=limit) _m, _s = _task_ms(task) # log.error(f"{_m} {_s}") if task.loaded: if _m or _s: def _ms(b): _x = 'm' if _m == b else ' ' _x += 's' if _s == b else ' ' return _x _ds['task'] = _ds['title'].apply(_ms) _f += ['task'] _e, eodata = task.get_valid_key('eodata') # TODO other formats parsers: Sentinel-1.ZIP ,TOPSAR def _ch_fs(b): _p = _local_eodata_relative_path(eodata, b) if os.path.isfile(os.path.join(_p, 'manifest.safe')): _m = os.path.join(_p, 'measurement') if os.path.isdir(_m): return '+' if any(os.scandir(_m)) else '~' return '' if check and not _e: _ds['exists'] = _ds['productIdentifier'].apply(_ch_fs) _f += ['exists'] pass # _ds = _ds.reindex(['task', *_f], axis=1, copy=False) # output.comment(f"Task '{_tname}' applied\n\n") headers = ['#', 'task', *_f] if _df.empty: raise OCLIException(f"bucket {bucket_name} not found") bname = _df.iloc[0]['bucket'] _ds = _ds[_f] # remove injected title if not in columns return bname, _ds, _ds.columns.to_list()
def bkt_info( repo: Repo, task: Task, roi_id, less, # sort, limit, column, where, check, delta, product_id, platform): """ find pairs by given PRODUCT_ID \b PRODUCT_ID: 4-digits hex number (Sentinel product identifier, last 4 symbols in product name). PLATFORM: like 'S1A' or 'S1B' to narrow search in case PRODUCT_ID is ambiguous """ _id, _roi = resolve_roi(roi_id, repo) _m, _s = _task_ms(task) geometry = _roi['geometry'] output.comment(f"active task master: {_m}") _df = pairs.load_from_cache(cache_file_name=(_cache_pairs_file_name(repo))) _df = _df.set_index('productId') try: _ds = _df.loc[product_id][['startDate', 'platform']] # print( _ds) if isinstance(_ds, DataFrame): # print(f"-----{len(_ds)}--------{type(_ds)}----------") if platform != '': _ds = _ds[_ds['platform'] == platform].loc[product_id] if isinstance(_ds, DataFrame): raise OCLIException( f"Could not resolve '{product_id}' for platform {platform}" ) else: output.table(_ds, headers=['PRODUCT_ID', 'startDate', 'platform']) # print( _ds) raise OCLIException( f"Product ID {product_id} is ambiguous, use <PALTFORM> argument to narrow search " ) ts, platform = _ds[['startDate', 'platform']] # print(f"----------- {ts}") except KeyError: raise OCLIException(f'Product id "{product_id}" not found') output.comment( f"Building bucket for product {product_id} , startDate={ts}") f = unitime_delta_factory(ts) _df['cycle_dt'] = _df['startDate'].apply(f) _df = _df[(_df['cycle_dt'] <= delta) & (_df['platform'] == platform)] cols = [ 'productId', 'cycle_dt', 'startDate', 'platform', 'relativeOrbitNumber', 'polarisation', 'fit', 'task' ] try: if geometry.area == 0: raise AssertionError('ROI has zero area') _df['fit'] = _df['geometry'].intersection( geometry).area / geometry.area _df['task'] = '' _df = _df.reset_index() _df = _df.set_index('title') if _m in _df.index: _df.loc[_m, 'task'] = 'm' else: output.warning('Current task master not found in bucket') if _s in _df.index: _df.loc[_s, 'task'] = 's' else: output.warning('Current task slave not found in bucket') _df = _df.reset_index() _e, eodata = task.get_valid_key('eodata') def _ch_fs(b): _p = _local_eodata_relative_path(eodata, b) if os.path.isfile(os.path.join(_p, 'manifest.safe')): _m = os.path.join(_p, 'measurement') if os.path.isdir(_m): return '+' if any(os.scandir(_m)) else '~' return '' if check and not _e: _df['exists'] = _df['productIdentifier'].apply(_ch_fs) cols += ['exists'] pass _df = _df[cols] except AssertionError as e: raise RuntimeError(e) headers = ['#'] + cols output.table( _df, headers=headers, )