예제 #1
0
def update_recipe(task: Task, recipe):
    errors = []

    deep_update(recipe, RECIPE_CLUSTER_TPL)

    files = []
    try:
        path = task.get_stack_path(full=True)
    except AssertionError as e:
        errors.append(str(e))
        return errors
    if os.path.isdir(path):
        # raise AssertionError(f"Task stack directory '{path}' not found")
        files = os.listdir(path)
    __resolve_files(FILE_PATTERN, files, recipe)
    recipe['PREDICTOR_DIR'] = task.config['predictor']
    recipe['COS']['bucket'] = task.config['cos_bucket']
    try:
        with open(task.get_predictor_config_file_name(), 'r') as _f:
            j = json.load(_f)
            deep_update(recipe, j)  # DO NOT INLINE j
            # pprint(self.recipe)
    except JSONDecodeError as e:
        errors.append(f'Predictor config JSON is invalid, reason: {e}')
    except (RuntimeError, AssertionError) as e:
        errors.append(str(e))

    # pprint(recipe)
    return errors
예제 #2
0
def _task_ms(task: Task) -> (str, str):
    try:
        task.resolve()
        _, _m = task.get_valid_key('master')
        if task.kind == 'cluster':
            _, _s = task.get_valid_key('slave')
        else:
            _s = None
        return _m, _s
    except RuntimeError as e:
        log.debug(str(e))
        return None, None
예제 #3
0
def resolve_recipe(repo: Repo, task: Task, roi_id):
    try:
        task.resolve()
        _id, _roi = resolve_roi(roi_id, repo)
        r = TaskRecipe(task=task)
        f = r.get_ai_recipe_name(_roi['name'])
        if not os.path.isfile(f):
            raise RuntimeError(f"task recipe file {f} not found")
        log.info(f'recipe resolved via task: {f}')
        return f
    except AssertionError as e:
        raise click.UsageError(f'Task is invalid, reason: {e}')
    except RuntimeError as e:
        raise click.UsageError(str(e))
예제 #4
0
def task_stack_snap(task: Task, dry_run, gpt_cache,cmd_dir,log):
    # TODO http://remote-sensing.eu/preprocessing-of-sentinel-1-sar-data-via-snappy-python-module/
    """ Run master-slave Stacking

    """
    snap_path = task.get_stack_path(full=True)
    log(f"Using ESA SNAP processing pipeline in  {snap_path}")
    os.makedirs(snap_path, exist_ok=True)
    cmd = os.path.join(cmd_dir, 'local-snap.sh')
    _eodata = task.config['eodata']
    _docker_mount = 'mnt'
    opts = [
        cmd,
        '--gpt-cache', gpt_cache,
        '--eodata', _eodata,
        '--snap_results', snap_path,
        '--swath', task.config['swath'],
        '--firstBurstIndex', task.config['firstBurstIndex'],
        '--lastBurstIndex', task.config['lastBurstIndex'],

        '--master', _local_eodata_relative_path(_eodata, task.config['master_path']),
        '--slave', _local_eodata_relative_path(_eodata, task.config['slave_path']),

    ]

    if dry_run:
        log("Command:")
        opts = opts + ['--dry-run']
    opts = opts + [' ']  # add space to the end for booleans
    # print(opts)
    # print(" ".join(opts))
    subprocess.run(opts)
예제 #5
0
def ai_preview_stack(repo: Repo, task: Task, roi_id, recipe_path, slice_range,
                     show_list,
                     # rgb,
                     band, columns, clip, hist, save, export, ylog):
    """ Preview assembled tensor band

        ** use --clip <minl> <max> to apply np.log10(np.clip(.., 10**min, 10**max)) to stack values

        \b
        * Windows WSL: follow  https://www.scivision.dev/pyqtmatplotlib-in-windows-subsystem-for-linux/
    """
    try:

        _recipe = recipe_path if recipe_path else resolve_recipe(repo, task, roi_id)
        recipe = Recipe(_recipe)
        _dir = recipe.get('DATADIR')
    except (RuntimeError, AssertionError, click.UsageError) as e:

        output.comment(f"Could not resolve recipe {e}, fall-back to task")
        try:
            _dir = task.get_stack_path('snap_path')
        except AssertionError as e:
            raise click.UsageError(f'Could not get stack path: {e}')
    except Exception as e:
        log.exception("Could not resolve Stack results")
        raise click.UsageError('Could not resolve Stack results')
    output.comment(f"Stack dir: {_dir}\n\n")
    full_shape, df = get_stack_df(_dir)
    if show_list:
        output.table(df[['filename', 'resolution', 'path']], showindex=True,
                     headers=['band', 'name', 'resolution', 'path'])
    else:
        try:
            # if rgb:
            #     if len(rgb) != 3:
            #         raise AssertionError('rgb', '--rgb should contain exactly 3 digits without spaces')
            #     band = (int(rgb[0]), int(rgb[1]), int(rgb[2]))
            if band[0] == -1:
                band = list(range(0, len(df)))
            else:
                band = list(band)
            _ds = df.iloc[band]  # type: gpd.GeoDataFrame
            output.table(_ds, showindex=True)
            _plt = preview_stack(_ds, _dir,
                                 full_shape=full_shape,
                                 slice_region=slice_range,
                                 band=band,
                                 clip=clip,
                                 columns=columns,
                                 hist=hist,
                                 ylog=ylog
                                 )
            _show_plt(_plt, save=save)
        except AssertionError as e:
            log.exception(e)
            raise click.UsageError(str(e))
예제 #6
0
def ai_preview_stack_math(repo: Repo, task: Task, roi_id, recipe_path, slice_range, show_list,
                          band1, band2, band3,
                          vis_mode, data_path, save, export, hist, ylog):
    """ Band math for stack

    {common}
    """
    if not data_path:
        try:

            _recipe = recipe_path if recipe_path else resolve_recipe(repo, task, roi_id)
            recipe = Recipe(_recipe)
            data_path = recipe.get('DATADIR')
            output.comment(f'Using recipe file "{recipe_path}"')
        except (RuntimeError, AssertionError, click.UsageError) as e:
            output.comment(f'Using stack from task stack_results')
            try:
                data_path = task.get_stack_path('snap_path')
                if not os.path.isdir(data_path):
                    raise AssertionError(f'Directory "{data_path}" is not exists ')
            except AssertionError as e:
                raise click.UsageError(f'Could not get stack_results: {e}')

    output.comment(f"Stack dir: {data_path}\n\n")
    full_shape, df = get_stack_df(data_path)
    if show_list:
        output.table(df, showindex=True)
    else:
        title, (r, g, b) = create_stack_rgb(band1, band2, band3,
                                            df=df,
                                            vis_mode=vis_mode,
                                            slice_range=slice_range,
                                            )

        if export:
            georef = df.iloc[band1].path
            _save_envi_rgb(r, g, b, export=export,
                           georef=georef, data_path=data_path, slice_range=slice_range,
                           title=title
                           )
        else:
            _plt = _vis_rgb(r, g, b, title, hist, ylog)
            _show_plt(_plt, save)
예제 #7
0
def create_task(task: Task):
    """
    create all task params
    :param task:
    :return:
    """
    config = {**TASK_HEAD, **TASK_KIND}
    """get defaults"""
    try:
        _task_items = get_tsar_defaults(task, 'task')
        for key, value in _task_items.items():
            # log.error(f"{key}={value}")
            if key in config:
                config[key] = value
    except AssertionError as e:
        output.warning(str(e))

    config.get('tag', []).append('cluster')
    config['friendly_name'] = "{project}/{name}/{m_completionDate:%Y%m%d}"
    config[
        'cos_key'] = "{project}/{kind}/{name}/{m_completionDate:%Y/%m/%d}/{m_id}_{s_id}_{swath}_{firstBurstIndex}_{lastBurstIndex}_{predictor}"
    config['template'] = __name__
    config['template_version'] = 0.1
    task.config = config
예제 #8
0
def _bkt_info(
    repo: Repo,
    task: Task,
    geometry: Polygon,
    bucket_name: str,
    sort: tuple,
    limit: int,
    column: tuple,
    where: str,
    check=False,
) -> (GeoDataFrame, list):
    if geometry.area == 0:
        raise click.BadArgumentUsage('ROI has zero area')
    cache_file_name = _cache_pairs_file_name(repo)
    # TODO check ROI exists

    _df = pairs.load_from_cache(cache_file_name=cache_file_name,
                                geometry=geometry)

    _bk = bucket.create_list(_df, buckets_dir='')

    if _bk is None or _bk.empty:
        raise AssertionError(f'No products could be found for ROI ')
    if bucket_name.isnumeric():
        _t = _bk.groupby('bucket', as_index=False).ngroup()
        _df = _bk[_t == int(bucket_name)]
    else:
        _df = _bk[_bk['bucket'] == bucket_name]
    _f = list(column)
    _ds = _list_products(_df, where=where, sort=list(sort), limit=limit)
    _m, _s = _task_ms(task)
    # log.error(f"{_m} {_s}")
    if task.loaded:
        if _m or _s:

            def _ms(b):
                _x = 'm' if _m == b else ' '
                _x += 's' if _s == b else ' '
                return _x

            _ds['task'] = _ds['title'].apply(_ms)
            _f += ['task']
        _e, eodata = task.get_valid_key('eodata')

        # TODO other formats parsers: Sentinel-1.ZIP ,TOPSAR
        def _ch_fs(b):
            _p = _local_eodata_relative_path(eodata, b)
            if os.path.isfile(os.path.join(_p, 'manifest.safe')):
                _m = os.path.join(_p, 'measurement')
                if os.path.isdir(_m):
                    return '+' if any(os.scandir(_m)) else '~'
            return ''

        if check and not _e:
            _ds['exists'] = _ds['productIdentifier'].apply(_ch_fs)
            _f += ['exists']
            pass

        # _ds = _ds.reindex(['task', *_f], axis=1, copy=False)
        # output.comment(f"Task '{_tname}' applied\n\n")
        headers = ['#', 'task', *_f]
    if _df.empty:
        raise OCLIException(f"bucket {bucket_name} not found")
    bname = _df.iloc[0]['bucket']
    _ds = _ds[_f]
    # remove injected title if not in columns
    return bname, _ds, _ds.columns.to_list()
예제 #9
0
def bkt_info(
        repo: Repo,
        task: Task,
        roi_id,
        less,
        # sort, limit, column, where,
        check,
        delta,
        product_id,
        platform):
    """ find pairs by given PRODUCT_ID

    \b
    PRODUCT_ID:  4-digits hex number (Sentinel product identifier, last 4 symbols in product name).
    PLATFORM:    like 'S1A' or 'S1B' to narrow search in case PRODUCT_ID is ambiguous
    """

    _id, _roi = resolve_roi(roi_id, repo)
    _m, _s = _task_ms(task)
    geometry = _roi['geometry']
    output.comment(f"active task master: {_m}")

    _df = pairs.load_from_cache(cache_file_name=(_cache_pairs_file_name(repo)))
    _df = _df.set_index('productId')
    try:
        _ds = _df.loc[product_id][['startDate', 'platform']]
        # print( _ds)
        if isinstance(_ds, DataFrame):
            # print(f"-----{len(_ds)}--------{type(_ds)}----------")
            if platform != '':
                _ds = _ds[_ds['platform'] == platform].loc[product_id]
                if isinstance(_ds, DataFrame):
                    raise OCLIException(
                        f"Could not resolve  '{product_id}' for platform {platform}"
                    )
            else:
                output.table(_ds,
                             headers=['PRODUCT_ID', 'startDate', 'platform'])
                # print( _ds)
                raise OCLIException(
                    f"Product ID {product_id} is ambiguous, use <PALTFORM> argument to narrow search "
                )
        ts, platform = _ds[['startDate', 'platform']]
        # print(f"----------- {ts}")
    except KeyError:
        raise OCLIException(f'Product id "{product_id}" not found')
    output.comment(
        f"Building bucket for product {product_id} , startDate={ts}")
    f = unitime_delta_factory(ts)
    _df['cycle_dt'] = _df['startDate'].apply(f)
    _df = _df[(_df['cycle_dt'] <= delta) & (_df['platform'] == platform)]

    cols = [
        'productId', 'cycle_dt', 'startDate', 'platform',
        'relativeOrbitNumber', 'polarisation', 'fit', 'task'
    ]
    try:
        if geometry.area == 0:
            raise AssertionError('ROI has zero area')
        _df['fit'] = _df['geometry'].intersection(
            geometry).area / geometry.area
        _df['task'] = ''
        _df = _df.reset_index()
        _df = _df.set_index('title')
        if _m in _df.index:
            _df.loc[_m, 'task'] = 'm'
        else:
            output.warning('Current task master not found in bucket')
        if _s in _df.index:
            _df.loc[_s, 'task'] = 's'
        else:
            output.warning('Current task slave  not found in bucket')
        _df = _df.reset_index()
        _e, eodata = task.get_valid_key('eodata')

        def _ch_fs(b):
            _p = _local_eodata_relative_path(eodata, b)
            if os.path.isfile(os.path.join(_p, 'manifest.safe')):
                _m = os.path.join(_p, 'measurement')
                if os.path.isdir(_m):
                    return '+' if any(os.scandir(_m)) else '~'
            return ''

        if check and not _e:
            _df['exists'] = _df['productIdentifier'].apply(_ch_fs)
            cols += ['exists']
        pass

        _df = _df[cols]

    except AssertionError as e:
        raise RuntimeError(e)

    headers = ['#'] + cols
    output.table(
        _df,
        headers=headers,
    )