コード例 #1
0
 def test_refresh_materialized_view(self, session, Article, User,
                                    ArticleMV):
     article = Article(name='Some article', author=User(name='Some user'))
     session.add(article)
     session.commit()
     refresh_materialized_view(session, 'article_mv')
     materialized = session.query(ArticleMV).first()
     assert materialized.name == 'Some article'
     assert materialized.author_name == 'Some user'
コード例 #2
0
def publish(blends):
    """Execute publish task and catalog datacube result.

    Args:
        activity - Datacube Activity Model
    """
    logging.warning('Executing publish')

    cube = Collection.query().filter(
        Collection.id == blends[0]['datacube']).first()
    warped_datacube = blends[0]['warped_datacube']
    tile_id = blends[0]['tile_id']
    period = blends[0]['period']
    cloudratio = blends[0]['cloudratio']

    # Retrieve which bands to generate quick look
    quick_look_bands = cube.bands_quicklook.split(',')

    merges = dict()
    blend_files = dict()

    for blend_result in blends:
        blend_files[blend_result['band']] = blend_result['blends']

        if blend_result.get('cloud_count_file'):
            blend_files['cnc'] = dict(MED=blend_result['cloud_count_file'],
                                      STK=blend_result['cloud_count_file'])

        for merge_date, definition in blend_result['scenes'].items():
            merges.setdefault(
                merge_date,
                dict(dataset=definition['dataset'],
                     cloudratio=definition['cloudratio'],
                     ARDfiles=dict()))
            merges[merge_date]['ARDfiles'].update(definition['ARDfiles'])

    # Generate quick looks for cube scenes
    publish_datacube(cube, quick_look_bands, cube.id, tile_id, period,
                     blend_files, cloudratio)

    # Generate quick looks of irregular cube
    wcube = Collection.query().filter(Collection.id == warped_datacube).first()

    for merge_date, definition in merges.items():
        date = merge_date.replace(definition['dataset'], '')

        publish_merge(quick_look_bands, wcube, definition['dataset'], tile_id,
                      period, date, definition)

    try:
        refresh_materialized_view(db.session, AssetMV.__table__)
        db.session.commit()
        logging.info('View refreshed.')
    except:
        db.session.rollback()
コード例 #3
0
def refresh_assets_view(refresh_on_aws=True):
    """Update the Brazil Data Cube Assets View."""
    if not Config.ENABLE_REFRESH_VIEW:
        logging.info('Skipping refresh view.')
        return

    refresh_materialized_view(db.session, AssetMV.__table__)
    commit(db)

    if refresh_on_aws:
        refresh_materialized_view(db_aws.session, AssetMV.__table__)
        commit(db)

    logging.info('View refreshed.')
コード例 #4
0
def publish(self, activity):
    print('==> start PUBLISH')
    services = self.services

    activity['mystart'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')   
    warped_cube = '_'.join(activity['datacube'].split('_')[0:2])

    # Generate quicklooks for CUBES (MEDIAN, STACK ...) 
    qlbands = activity['quicklook'].split(',')
    for function in ['MED', 'STK']:
        cube_id = get_cube_id(activity['datacube'], function)
        general_scene_id = '{}_{}_{}_{}'.format(
            cube_id, activity['tileid'], activity['start'], activity['end'])

        qlfiles = []
        for band in qlbands:
            qlfiles.append(services.prefix + activity['blended'][band][function + 'file'])

        pngname = generateQLook(general_scene_id, qlfiles)
        dirname_ql = activity['dirname'].replace(
            '{}/'.format(warped_cube), '{}/'.format(cube_id))
        if pngname is None:
            print('publish - Error generateQLook for {}'.format(general_scene_id))
            return False
        s3pngname = os.path.join(dirname_ql, '{}_{}'.format(activity['start'], activity['end']), os.path.basename(pngname))
        services.upload_file_S3(pngname, s3pngname, {'ACL': 'public-read'})
        os.remove(pngname)

    # Generate quicklooks for all ARD scenes (WARPED)
    for datedataset in activity['scenes']:
        scene = activity['scenes'][datedataset]

        cube_id = get_cube_id(activity['datacube'])
        general_scene_id = '{}_{}_{}'.format(
            cube_id, activity['tileid'], str(scene['date'])[0:10])
        qlfiles = []
        for band in qlbands:
            filename = os.path.join(services.prefix + activity['dirname'], str(scene['date'])[0:10], scene['ARDfiles'][band])
            qlfiles.append(filename)

        pngname = generateQLook(general_scene_id, qlfiles)
        if pngname is None:
            print('publish - Error generateQLook for {}'.format(general_scene_id))
            return False
        s3pngname = os.path.join(activity['dirname'], str(scene['date'])[0:10], os.path.basename(pngname))
        services.upload_file_S3(pngname, s3pngname, {'ACL': 'public-read'})
        os.remove(pngname)

    # register collection_items and assets in DB (MEDIAN, STACK ...)
    for function in ['MED', 'STK']:
        cube_id = '{}_{}'.format(activity['datacube'], function)
        cube = Collection.query().filter(
            Collection.id == cube_id
        ).first()
        if not cube:
            print('cube {} not found!'.format(cube_id))
            continue

        general_scene_id = '{}_{}_{}_{}'.format(
            cube_id, activity['tileid'], activity['start'], activity['end'])

        # delete collection_items and assets if exists
        assets = Asset.query().filter(
            Asset.collection_item_id == general_scene_id
        ).all()
        for asset in assets:
            db.session().delete(asset)
            db.session().commit()

        coll_item = CollectionItem.query().filter(
            CollectionItem.id == general_scene_id
        ).first()
        if coll_item:
            db.session().delete(coll_item)
            db.session().commit()

        # insert 'collection_item'
        range_date = '{}_{}'.format(activity['start'], activity['end'])
        png_name = '{}.png'.format(general_scene_id)
        dirname_ql = activity['dirname'].replace(
            '{}/'.format(warped_cube), '{}/'.format(cube_id))
        s3_pngname = os.path.join(dirname_ql, range_date, png_name)
        CollectionItem(
            id=general_scene_id,
            collection_id=cube_id,
            grs_schema_id=cube.grs_schema_id,
            tile_id=activity['tileid'],
            item_date=activity['start'],
            composite_start=activity['start'],
            composite_end=activity['end'],
            quicklook='{}/{}'.format(BUCKET_NAME, s3_pngname),
            cloud_cover=activity['cloudratio'],
            scene_type=function,
            compressed_file=None
        ).save()

        # insert 'assets'
        bands_by_cube = Band.query().filter(
            Band.collection_id == cube_id
        ).all()
        for band in activity['bands']:
            if band == 'quality': 
                continue
            band_id = list(filter(lambda b: str(b.common_name) == band, bands_by_cube))
            if not band_id:
                print('band {} not found!'.format(band))
                continue

            Asset(
                collection_id=cube_id,
                band_id=band_id[0].id,
                grs_schema_id=cube.grs_schema_id,
                tile_id=activity['tileid'],
                collection_item_id=general_scene_id,
                url='{}/{}'.format(BUCKET_NAME, activity['blended'][band][function + 'file']),
                source=None,
                raster_size_x=activity['raster_size_x'],
                raster_size_y=activity['raster_size_y'],
                raster_size_t=1,
                chunk_size_x=activity['chunk_size_x'],
                chunk_size_y=activity['chunk_size_y'],
                chunk_size_t=1
            ).save()

    # Register all ARD scenes - WARPED Collection
    for datedataset in activity['scenes']:
        scene = activity['scenes'][datedataset]

        cube_id = get_cube_id(activity['datacube'])
        cube = Collection.query().filter(
            Collection.id == cube_id
        ).first()
        if not cube:
            print('cube {} not found!'.format(cube_id))
            continue

        general_scene_id = '{}_{}_{}'.format(
            cube_id, activity['tileid'], str(scene['date'])[0:10])

        # delete 'assets' and 'collection_items' if exists
        assets = Asset.query().filter(
            Asset.collection_item_id == general_scene_id
        ).all()
        for asset in assets:
            db.session().delete(asset)
            db.session().commit()

        coll_item = CollectionItem.query().filter(
            CollectionItem.id == general_scene_id
        ).first()
        if coll_item:
            db.session().delete(coll_item)
            db.session().commit()

        # insert 'collection_item'
        pngname = '{}.png'.format(general_scene_id)
        s3pngname = os.path.join(activity['dirname'], str(scene['date'])[0:10], pngname)
        CollectionItem(
            id=general_scene_id,
            collection_id=cube_id,
            grs_schema_id=cube.grs_schema_id,
            tile_id=activity['tileid'],
            item_date=scene['date'],
            composite_start=scene['date'],
            composite_end=scene['date'],
            quicklook='{}/{}'.format(BUCKET_NAME, s3pngname),
            cloud_cover=int(scene['cloudratio']),
            scene_type='WARPED',
            compressed_file=None
        ).save()

        # insert 'assets'
        bands_by_cube = Band.query().filter(
            Band.collection_id == cube_id
        ).all()
        for band in activity['bands']:
            if band not in scene['ARDfiles']:
                print('publish - problem - band {} not in scene[files]'.format(band))
                continue
            band_id = list(filter(lambda b: str(b.common_name) == band, bands_by_cube))
            if not band_id:
                print('band {} not found!'.format(band))
                continue
            
            raster_size_x = scene['raster_size_x'] if scene.get('raster_size_x') else activity.get('raster_size_x')
            raster_size_y = scene['raster_size_y'] if scene.get('raster_size_y') else activity.get('raster_size_y')
            block_size = scene['block_size'] if scene.get('block_size') else activity.get('block_size')
            Asset(
                collection_id=cube_id,
                band_id=band_id[0].id,
                grs_schema_id=cube.grs_schema_id,
                tile_id=activity['tileid'],
                collection_item_id=general_scene_id,
                url='{}/{}'.format(BUCKET_NAME, os.path.join(activity['dirname'], str(scene['date'])[0:10], scene['ARDfiles'][band])),
                source=None,
                raster_size_x=raster_size_x,
                raster_size_y=raster_size_y,
                raster_size_t=1,
                chunk_size_x=block_size,
                chunk_size_y=block_size,
                chunk_size_t=1
            ).save()

    # Update status and end time in DynamoDB
    activity['myend'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    activity['mystatus'] = 'DONE'
    services.put_item_kinesis(activity)

    refresh_materialized_view(db.session, AssetMV.__table__)
    return True
コード例 #5
0
                                [
                                    Article.id, Article.name,
                                    User.id.label('author_id'),
                                    User.name.label('author_name')
                                ],
                                from_obj=(Article.__table__.join(
                                    User, Article.author_id == User.id))),
                            metadata=Base.metadata)


Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)

# Test
session = Session()

article = Article(name='Some article', author=User(name='Some user'))
session.add(article)
session.commit()
refresh_materialized_view(session, 'article_mv')
materialized = session.query(ArticleMV).first()
assert materialized.name == 'Some article'
assert materialized.author_name == 'Some user'

article = Article(name='Some article', author=User(name='Some user'))
session.add(article)
session.commit()
row = session.query(ArticleView).first()
assert row.name == 'Some article'
assert row.author_name == 'Some user'
コード例 #6
0
def refresh_otu_sample_otu_mv(session, mv):
    refresh_materialized_view(session, mv)
コード例 #7
0
def refresh_lastest_man_day_id():
    """刷新最新工数ID的View
    :return:
    """
    refresh_materialized_view(db.session, 'lastest_man_day_id', concurrently=True)