示例#1
0
def wp_info(repo: Repo, list_projects, config):
    """ Display information about current tsar install."""
    # log.error(f"Here we are {list_projects}")
    if list_projects:

        _dirs = repo.list_projects()
        if not len(_dirs):
            comment(f"no projects found in workspace '{repo.projects_home}'")
            comment(
                " run 'create --name project name' to create new project\n\n")
        for v in _dirs:
            v['active'] = '*' if v['active'] else ''
        if repo.active_project:
            output.comment(f'active project "{repo.active_project}"')
        else:
            output.error(f"No active project selected")
        output.table(_dirs,
                     showindex=True,
                     headers={
                         'active': '',
                         'name': 'name',
                         'path': 'path'
                     })
    else:
        output.comment(
            f"config file {os.path.join(repo.rc_home, RC_FILE_NAME)}")
        output.comment(
            f"Active project config file {repo.get_project_rc_name()}")
        output.table(repo.tolist(), headers=['key', 'value'])
示例#2
0
def pairs_info(repo, all_pairs):
    """ show products DB info """
    cache_file_name = _cache_pairs_file_name(repo)
    _df = pairs.load_from_cache(cache_file_name=cache_file_name)

    output.comment("productd DB information\n\n")
    rons = _df['relativeOrbitNumber'].unique()
    _st = [['Cache file', cache_file_name],
           [
               'startDate', _df['startDate'].min(axis=0),
               _df['startDate'].max(axis=0)
           ],
           [
               'completionDate', _df['completionDate'].min(axis=0),
               _df['completionDate'].max(axis=0)
           ], ['Relative orbits', rons],
           ['Relative orbit number count',
            len(rons)], ['records total', len(_df)]]
    # output.comment("date intervals\n\n")
    output.table(_st, tablefmt='plain')
    if all_pairs:
        buf = io.StringIO()
        _df.info(verbose=True, memory_usage='deep', buf=buf)
        _t = buf.getvalue()
        output.comment("---------------------")
        click.echo(_t)

    pass
示例#3
0
def _resolve_tensor_filenames(repo, task, zone, roi_id, data_path, recipe_path, tnorm) -> Filenames:
    if not data_path:
        try:
            _recipe = recipe_path if recipe_path else resolve_recipe(repo, task, roi_id)
            recipe = Recipe(_recipe)
            output.comment(f'Using recipe file "{_recipe}"')
        except (RuntimeError, AssertionError, click.UsageError) as e:
            output.comment(f'Using tensor from ai_results')
            try:
                data_path = task.get_ai_results_path(full=True)
                if not os.path.isdir(data_path):
                    raise AssertionError(f'Directory "{data_path}" is not exists ')
                recipe = {'OUTDIR': data_path}
            except AssertionError as e:
                raise click.UsageError(f'Could not get ai_results: {e}')
    else:
        recipe = {'OUTDIR': data_path}
    if tnorm and 'PREDICTOR_DIR' not in recipe:
        try:
            _filenames = Filenames(zone, recipe)
            with open(_filenames.process_info, 'r') as f:
                _prcinfo = yaml.load(f, Loader=yaml.FullLoader)
            recipe['PREDICTOR_DIR'] = _prcinfo['process']['PREDICTOR_DIR']
        except Exception as e:
            raise OCLIException(f"Could not resolve tnorm file: {e}")
    return Filenames(zone, recipe)
示例#4
0
文件: ai.py 项目: bopopescu/TSAR-AI
def ai_upload(repo: Repo, task: Task, roi_id, recipe_path, cos_key, dry_run):
    """Upload COG TIFF to cloud storage"""
    _recipe = recipe_path if recipe_path else resolve_recipe(
        repo, task, roi_id)
    recipe = Recipe(_recipe)

    filenames = Filenames('zone', recipe)
    cog_file = filenames.out_cog_tiff

    doc_json = Path(filenames.out_cog_tiff + '.geojson')

    if not doc_json.is_file():
        raise OCLIException(f'Could not find "{doc_json.absolute()}"')
    _json = open(doc_json, 'r').read()
    try:
        doc = json.loads(_json)
    except JSONDecodeError:
        raise OCLIException(f'Could not parse json "{doc_json.absolute()}"')

    if not cos_key and "ResultKey" in doc['properties']:
        cos_key = doc['properties'].get('ResultKey')
    if not cos_key:
        raise click.UsageError("No COS key (upload file name)")

    if not cos_key.endswith('.tiff'):
        cos_key += '.tiff'
    log.info(
        f"About to upload {cog_file} as {cos_key} to bucket {recipe['COS'].get('bucket')} "
    )
    try:
        cos = COS(recipe)
    except SystemExit:
        raise click.UsageError(
            f'Invalid recipe: COS credentials in "{_recipe}" are required for upload'
        )
    try:
        if dry_run:

            output.comment(
                f'Uploading "{cog_file}" as "{cos_key}" into bucket "{cos.bucket}"'
            )
            output.comment(
                f'Uploading "{cog_file}.geojson" as "{cos_key}.geojson" into bucket "{cos.bucket}"'
            )
        else:
            filesize = os.stat(cog_file).st_size
            with tqdm(total=filesize, unit='B', unit_scale=True,
                      desc=cos_key) as t:
                cos.upload_to_cos(cog_file, cos_key, hook(t))
            if os.path.isfile(cog_file + '.geojson'):
                filesize = os.stat(cog_file + '.geojson').st_size
                with tqdm(total=filesize,
                          unit='B',
                          unit_scale=True,
                          desc=cos_key + '.geojson') as t:
                    cos.upload_to_cos(cog_file + '.geojson',
                                      cos_key + '.geojson', hook(t))
    except SystemExit as e:
        raise click.UsageError(e)
示例#5
0
def ai_preview_stack(repo: Repo, task: Task, roi_id, recipe_path, slice_range,
                     show_list,
                     # rgb,
                     band, columns, clip, hist, save, export, ylog):
    """ Preview assembled tensor band

        ** use --clip <minl> <max> to apply np.log10(np.clip(.., 10**min, 10**max)) to stack values

        \b
        * Windows WSL: follow  https://www.scivision.dev/pyqtmatplotlib-in-windows-subsystem-for-linux/
    """
    try:

        _recipe = recipe_path if recipe_path else resolve_recipe(repo, task, roi_id)
        recipe = Recipe(_recipe)
        _dir = recipe.get('DATADIR')
    except (RuntimeError, AssertionError, click.UsageError) as e:

        output.comment(f"Could not resolve recipe {e}, fall-back to task")
        try:
            _dir = task.get_stack_path('snap_path')
        except AssertionError as e:
            raise click.UsageError(f'Could not get stack path: {e}')
    except Exception as e:
        log.exception("Could not resolve Stack results")
        raise click.UsageError('Could not resolve Stack results')
    output.comment(f"Stack dir: {_dir}\n\n")
    full_shape, df = get_stack_df(_dir)
    if show_list:
        output.table(df[['filename', 'resolution', 'path']], showindex=True,
                     headers=['band', 'name', 'resolution', 'path'])
    else:
        try:
            # if rgb:
            #     if len(rgb) != 3:
            #         raise AssertionError('rgb', '--rgb should contain exactly 3 digits without spaces')
            #     band = (int(rgb[0]), int(rgb[1]), int(rgb[2]))
            if band[0] == -1:
                band = list(range(0, len(df)))
            else:
                band = list(band)
            _ds = df.iloc[band]  # type: gpd.GeoDataFrame
            output.table(_ds, showindex=True)
            _plt = preview_stack(_ds, _dir,
                                 full_shape=full_shape,
                                 slice_region=slice_range,
                                 band=band,
                                 clip=clip,
                                 columns=columns,
                                 hist=hist,
                                 ylog=ylog
                                 )
            _show_plt(_plt, save=save)
        except AssertionError as e:
            log.exception(e)
            raise click.UsageError(str(e))
示例#6
0
def _show_tnsr_list(tnsr_hdr_fname, df=None):
    output.comment(f'tensor HDR: {tnsr_hdr_fname}')
    if df is None:
        try:
            full_shape, df = get_tensor_df(tnsr_hdr_fname)
        except FileNotFoundError as e:
            raise OCLIException(f"{e}")
    output.table(df, showindex=True, headers=['band', 'name', 'resolution'])

    return
示例#7
0
def pairs_load(repo: Repo, roi_id, reload, quiet, completion_date):
    """ load data into DB """
    # todo convert name to ID
    if completion_date:
        completion_date = parse_to_utc_string(completion_date)
        if completion_date is None:
            raise OCLIException(
                f"Completion date {completion_date} is invalid")

        output.comment(f"loading products up to {completion_date}")
    if not roi_id and not repo.active_roi:
        raise click.BadOptionUsage(
            'roi_id',
            "ROI is required , set active ROI or provide --roi option")
    _id = int(roi_id) if roi_id else int(repo.active_roi)
    # check roi exists

    db = repo.roi.db
    try:
        geometry = db.loc[_id, 'geometry']
    except KeyError:
        raise click.BadOptionUsage('roi_id', f'ROI "{_id}" not found')
    cache_file_name = _cache_pairs_file_name(repo)
    finder_conf = repo.get_config('finder', {}).copy()
    if completion_date:
        finder_conf['completionDate'] = completion_date
    if not quiet:
        output.table(finder_conf.items())
    if quiet:
        d = pairs.load_data(
            geometry,
            reload=reload,
            callback=None,
            finder_conf=finder_conf,
            cache_file_name=cache_file_name,
        )
    else:
        with click.progressbar(length=100,
                               label='Loading sat products') as bar:

            def callback(total, step):
                if bar.length != total:
                    bar.length = total
                bar.update(step)

            d = pairs.load_data(geometry,
                                reload=reload,
                                callback=callback,
                                finder_conf=finder_conf,
                                cache_file_name=cache_file_name)
    if d.empty:
        raise OCLIException('0 products loaded, product list is not updated!')
    else:
        output.success(f'{len(d)} products loaded into list')
示例#8
0
def bkt_info(ctx, repo, task: Task, roi_id, bucket_name, reload, less, sort,
             limit, column, where, check):
    """ show bucket info by BUCKET_NAME OR record number

    *  list buckets  names and record numbers  via 'bucket list' command
    """
    def_col = [
        'productId', 'startDate', 'title', 'relativeOrbitNumber', 'cycle_dt'
    ]
    if column is None or not column:
        column = def_col
    else:
        _cp = []
        _cd = []
        for c in column:

            if c.startswith('+'):
                _cp += c[1:].split(',')
            else:
                _cd.extend(c)
            if not _cd:
                _cd = def_col
            column = _cd + _cp
    # log.error(column)
    _id, _roi = resolve_roi(roi_id, repo)
    if reload:
        ctx.invoke(pairs_load, id=_id, reload=True)
    bname, _ds, headers = _bkt_info(
        repo,
        task,
        geometry=_roi['geometry'],
        bucket_name=bucket_name,
        sort=sort,
        limit=limit,
        column=column,
        where=where,
        check=check,
    )
    if limit >= 0:
        output.comment(f"Dataset limited to  {limit} records")
    cols = _ds.columns.to_list()
    if task.loaded:
        output.comment(f"Task: {task.name}")
        if 'task' in cols:
            output.comment(
                f"INFO: 'task'   column:  'm' -  used as master in task, 's' - used as slave in task  "
            )
        if 'exists' in cols:
            output.comment(
                f"INFO: 'exists' column:  '+' -  full data loaded, '~' - metadata only loaded"
            )

    output.comment(f'Bucket name: {bname}')
    output.table(_ds, headers=headers, less=less)
示例#9
0
文件: cli.py 项目: bopopescu/TSAR-AI
def ocli_repl(ctx, repo: Repo, fullscreen):
    """ start interactive console"""
    repo.is_repl = True
    output.comment("""        
        use  ? command to OCLI help
        use :? command to show console help
        use <TAB> to auto-complete commands
        use :q or Ctrl-d to exit       
        """)
    if fullscreen:
        prompt_kwargs['bottom_toolbar'] = bottom_toolbar
    prompt_kwargs['message'] = prompt_message_callback(repo)
    repl(ctx, prompt_kwargs=prompt_kwargs)
示例#10
0
def ai_preview_cluster(repo: Repo, task: Task, roi_id, recipe_path, slice_range, show_list, band, columns,
                       # threshold,
                       zone,
                       hist, ylog,
                       save, export,
                       rgb=False
                       ):
    """ Preview assembled tensor band

        \b
        * Windows WSL: follow  https://www.scivision.dev/pyqtmatplotlib-in-windows-subsystem-for-linux/
    """
    try:
        _recipe = recipe_path if recipe_path else resolve_recipe(repo, task, roi_id)

        recipe = Recipe(_recipe)
        filenames = Filenames(zone, recipe)
        pred8c_img = filenames.pred8c_img
        pred8c_hdr = filenames.pred8c_hdr
        if not os.path.isfile(pred8c_img):
            raise AssertionError(f"IMG file '{pred8c_img}' not fond")
        if not os.path.isfile(pred8c_hdr):
            raise AssertionError(f"HDR file '{pred8c_hdr}' not fond")
        pred8c_hdr = s_envi.open(filenames.pred8c_hdr)
    except (AssertionError) as e:
        raise click.UsageError(f"Could not visualize:  {e}")
    if show_list:
        output.comment(f'Cluster HDR: {filenames.pred8c_hdr}')
        x, y = pred8c_hdr.shape[:2]
        bn = pred8c_hdr.metadata['band names']
        bn = [[b, f'{x}x{y}'] for b in bn]
        output.table(bn, showindex=True, headers=['band', 'name', 'resolution'])
        return
    # if rgb:
    #     if len(rgb) != 3:
    #         raise click.BadOptionUsage('rgb', '--rgb should contain exactly 3 digits without spaces')
    #     band = (int(rgb[0]), int(rgb[1]), int(rgb[2]))
    if band[0] == -1:
        band = list(range(0, pred8c_hdr.shape[2]))

    preview_cluster(filenames.pred8c_hdr, filenames.pred8c_img,
                    band=band,
                    slice_region=slice_range,
                    columns=columns,
                    rgb=rgb
                    )
示例#11
0
def ai_preview_tensor_math(repo: Repo, task: Task, roi_id, recipe_path, slice_range, show_list,
                           band1, band2, band3,
                           vis_mode, data_path,
                           save, tnorm,
                           zone, gauss, hist, ylog, export):
    """ Bands math for tansor

    {}
    """
    filenames = _resolve_tensor_filenames(
        repo, task,
        zone=zone,
        roi_id=roi_id,
        data_path=data_path,
        recipe_path=recipe_path,
        tnorm=tnorm
    )
    output.comment(f"Data dir: {data_path}")
    full_shape, df = get_tensor_df(filenames.tnsr_hdr)
    if show_list:
        _show_tnsr_list(filenames.tnsr_hdr, df=df)
        return

    try:
        title, (r, g, b) = create_tensor_rgb(band1, band2, band3,
                                             df=df,
                                             filenames=filenames,
                                             tnorm=tnorm,
                                             gauss=gauss,
                                             vis_mode=vis_mode,
                                             slice_range=slice_range,
                                             )
        if export:
            georef = filenames.tnsr_hdr[:-4]
            _save_envi_rgb(r, g, b, export=export,
                           georef=georef, data_path=data_path, slice_range=slice_range,
                           title=title
                           )
        else:
            _plt = _vis_rgb(r, g, b, title, hist, ylog)
            _show_plt(_plt, save)
    except Exception as e:
        log.exception(e)
        raise OCLIException(f"{e}")
示例#12
0
def bkt_list(ctx: click.Context, repo: Repo, task: Task, roi_id, reload, fit):
    _id, _roi = resolve_roi(roi_id, repo)
    if reload:
        ctx.invoke(pairs_load, roi_id=_id, reload=True)
    _m, _s = _task_ms(task)
    try:
        _t, headers = _bkt_list(
            repo,
            geometry=_roi['geometry'],
            fit=fit,
            master=_m,
            slave=_s,
        )
    except RuntimeError as e:
        raise click.UsageError(str(e))
    output.comment(
        f'{len(_t)} possible  bucket for roi "{_roi["name"]}" found')
    if fit is not None:
        _t = _t[_t['fit']['mean'] >= fit]
        _l2 = len(_t)
        output.comment(f"shown {_l2} with  fit >= {fit}")
    output.table(_t, headers=headers)
示例#13
0
def ai_preview_stack_math(repo: Repo, task: Task, roi_id, recipe_path, slice_range, show_list,
                          band1, band2, band3,
                          vis_mode, data_path, save, export, hist, ylog):
    """ Band math for stack

    {common}
    """
    if not data_path:
        try:

            _recipe = recipe_path if recipe_path else resolve_recipe(repo, task, roi_id)
            recipe = Recipe(_recipe)
            data_path = recipe.get('DATADIR')
            output.comment(f'Using recipe file "{recipe_path}"')
        except (RuntimeError, AssertionError, click.UsageError) as e:
            output.comment(f'Using stack from task stack_results')
            try:
                data_path = task.get_stack_path('snap_path')
                if not os.path.isdir(data_path):
                    raise AssertionError(f'Directory "{data_path}" is not exists ')
            except AssertionError as e:
                raise click.UsageError(f'Could not get stack_results: {e}')

    output.comment(f"Stack dir: {data_path}\n\n")
    full_shape, df = get_stack_df(data_path)
    if show_list:
        output.table(df, showindex=True)
    else:
        title, (r, g, b) = create_stack_rgb(band1, band2, band3,
                                            df=df,
                                            vis_mode=vis_mode,
                                            slice_range=slice_range,
                                            )

        if export:
            georef = df.iloc[band1].path
            _save_envi_rgb(r, g, b, export=export,
                           georef=georef, data_path=data_path, slice_range=slice_range,
                           title=title
                           )
        else:
            _plt = _vis_rgb(r, g, b, title, hist, ylog)
            _show_plt(_plt, save)
示例#14
0
def ai_preview_tnsr(repo: Repo, task: Task, roi_id, recipe_path, show_list
                    , zone, slice_range
                    , band,
                    # rgb,
                    columns, hist, tnorm, save, ylog, export, data_path):
    """ Preview assembled tensor band
        \b
        * Windows WSL: follow https://www.scivision.dev/pyqtmatplotlib-in-windows-subsystem-for-linux/ instructions
    """

    filenames = _resolve_tensor_filenames(
        repo, task,
        zone=zone,
        roi_id=roi_id,
        data_path=data_path,
        recipe_path=recipe_path,
        tnorm=tnorm
    )

    output.comment(f"Data dir: {data_path}")
    full_shape, df = get_tensor_df(filenames.tnsr_hdr)
    if show_list:
        _show_tnsr_list(filenames.tnsr_hdr, df=df)
        return
    try:
        e = s_envi.open(filenames.tnsr_hdr)
    except FileNotFoundError as e:
        raise click.UsageError(e)

    tnsr_name = filenames.tnsr
    tnsr_hdr = filenames.tnsr_hdr
    log.info(tnsr_name)
    log.info(zone)
    if band[0] == -1:
        band = list(range(0, e.shape[2]))
    else:
        band = list(band)
    if tnorm:
        tnorm = filenames.tnorm
    tnsr = read_tensor(band, df,
                       slice_range=slice_range,
                       filenames=filenames,
                       tnorm=tnorm,
                       gauss=None,
                       split=False
                       )
    band_names = df.iloc[band]['name'].tolist()
    if export:
        georef = filenames.tnsr_hdr[:-4]
        _sqave_envy_tnsr(tnsr, export=export,
                         band_names=band_names,
                         georef=georef,
                         data_path=data_path,
                         slice_range=slice_range,
                         title="Normalized" if tnorm else ""
                         )
    else:
        _plt = preview_tnsr(tnsr,
                            band=band,
                            band_names=band_names,
                            hist=hist,
                            slice_range=slice_range,
                            columns=columns,
                            title="Normalized" if tnorm else "",
                            ylog=ylog
                            )
        _show_plt(_plt, save=save)
示例#15
0
def bkt_info(
        repo: Repo,
        task: Task,
        roi_id,
        less,
        # sort, limit, column, where,
        check,
        delta,
        product_id,
        platform):
    """ find pairs by given PRODUCT_ID

    \b
    PRODUCT_ID:  4-digits hex number (Sentinel product identifier, last 4 symbols in product name).
    PLATFORM:    like 'S1A' or 'S1B' to narrow search in case PRODUCT_ID is ambiguous
    """

    _id, _roi = resolve_roi(roi_id, repo)
    _m, _s = _task_ms(task)
    geometry = _roi['geometry']
    output.comment(f"active task master: {_m}")

    _df = pairs.load_from_cache(cache_file_name=(_cache_pairs_file_name(repo)))
    _df = _df.set_index('productId')
    try:
        _ds = _df.loc[product_id][['startDate', 'platform']]
        # print( _ds)
        if isinstance(_ds, DataFrame):
            # print(f"-----{len(_ds)}--------{type(_ds)}----------")
            if platform != '':
                _ds = _ds[_ds['platform'] == platform].loc[product_id]
                if isinstance(_ds, DataFrame):
                    raise OCLIException(
                        f"Could not resolve  '{product_id}' for platform {platform}"
                    )
            else:
                output.table(_ds,
                             headers=['PRODUCT_ID', 'startDate', 'platform'])
                # print( _ds)
                raise OCLIException(
                    f"Product ID {product_id} is ambiguous, use <PALTFORM> argument to narrow search "
                )
        ts, platform = _ds[['startDate', 'platform']]
        # print(f"----------- {ts}")
    except KeyError:
        raise OCLIException(f'Product id "{product_id}" not found')
    output.comment(
        f"Building bucket for product {product_id} , startDate={ts}")
    f = unitime_delta_factory(ts)
    _df['cycle_dt'] = _df['startDate'].apply(f)
    _df = _df[(_df['cycle_dt'] <= delta) & (_df['platform'] == platform)]

    cols = [
        'productId', 'cycle_dt', 'startDate', 'platform',
        'relativeOrbitNumber', 'polarisation', 'fit', 'task'
    ]
    try:
        if geometry.area == 0:
            raise AssertionError('ROI has zero area')
        _df['fit'] = _df['geometry'].intersection(
            geometry).area / geometry.area
        _df['task'] = ''
        _df = _df.reset_index()
        _df = _df.set_index('title')
        if _m in _df.index:
            _df.loc[_m, 'task'] = 'm'
        else:
            output.warning('Current task master not found in bucket')
        if _s in _df.index:
            _df.loc[_s, 'task'] = 's'
        else:
            output.warning('Current task slave  not found in bucket')
        _df = _df.reset_index()
        _e, eodata = task.get_valid_key('eodata')

        def _ch_fs(b):
            _p = _local_eodata_relative_path(eodata, b)
            if os.path.isfile(os.path.join(_p, 'manifest.safe')):
                _m = os.path.join(_p, 'measurement')
                if os.path.isdir(_m):
                    return '+' if any(os.scandir(_m)) else '~'
            return ''

        if check and not _e:
            _df['exists'] = _df['productIdentifier'].apply(_ch_fs)
            cols += ['exists']
        pass

        _df = _df[cols]

    except AssertionError as e:
        raise RuntimeError(e)

    headers = ['#'] + cols
    output.table(
        _df,
        headers=headers,
    )