def generate_report( item: Tuple[LocalConfig, str, bool, bool] ) -> Tuple[str, Optional[TimePeriodOverview]]: config, product_name, force_refresh, recreate_dataset_extents = item log = _LOG.bind(product=product_name, force=force_refresh, extents=recreate_dataset_extents) store = SummaryStore.create(_get_index(config, product_name), log=log) try: product = store.index.products.get_by_name(product_name) if product is None: raise ValueError(f"Unknown product: {product_name}") log.info("generate.product.refresh") store.refresh_product( product, refresh_older_than=(timedelta( minutes=-1) if force_refresh else timedelta(days=1)), force_dataset_extent_recompute=recreate_dataset_extents, ) log.info("generate.product.refresh.done") log.info("generate.product") updated = store.get_or_update(product.name, force_refresh=force_refresh) log.info("generate.product.done") return product_name, updated except Exception: log.exception("generate.product.error", exc_info=True) return product_name, None finally: store.index.close()
def generate_report(item: Tuple[LocalConfig, str, bool]): config, product_name, force_refresh = item log = _LOG.bind(product=product_name) store = SummaryStore.create(_get_index(config, product_name), log=log) try: product = store.index.products.get_by_name(product_name) if product is None: raise ValueError(f"Unknown product: {product_name}") # If we're going to force things, we need a time that will always update if force_refresh: refresh_time = timedelta(minutes=-1) else: refresh_time = timedelta(days=1) log.info("generate.product.refresh") store.refresh_product(product, refresh_older_than=refresh_time) log.info("generate.product.refresh.done") log.info("generate.product") updated = store.get_or_update(product.name, None, None, None, force_refresh) log.info("generate.product.done") return product_name, updated except Exception: log.exception("generate.product.error", exc_info=True) return product_name, None finally: store.index.close()
def generate_report(item): config: LocalConfig product_name: str config, product_name = item log = _LOG.bind(product=product_name) store = SummaryStore.create(_get_index(config, product_name), log=log) try: product = store.index.products.get_by_name(product_name) if product is None: raise ValueError(f"Unknown product: {product_name}") log.info('generate.product.refresh') store.refresh_product(product) log.info('generate.product.refresh.done') log.info('generate.product') updated = store.get_or_update(product.name, None, None, None) log.info('generate.product.done') return product_name, updated except Exception: log.exception('generate.product.error', exc_info=True) return product_name, None finally: store.index.close()
def cli(config: LocalConfig, generate_all_products: bool, jobs: int, product_names: List[str], event_log_file: str, refresh_stats: bool, force_concurrently: bool, verbose: bool): """ Generate summary files for the given products """ init_logging(open(event_log_file, 'a') if event_log_file else None, verbose=verbose) index = _get_index(config, 'setup') store = SummaryStore.create(index, init_schema=True) if generate_all_products: products = sorted(store.all_dataset_types(), key=lambda p: p.name) else: products = list(_load_products(store.index, product_names)) completed, failures = run_generation( config, products, workers=jobs, ) if refresh_stats: echo("Refreshing statistics...", nl=False) store.refresh_stats(concurrently=force_concurrently) secho("done", color='green') _LOG.info('stats.refresh', ) sys.exit(failures)
def summary_store(module_dea_index: Index) -> SummaryStore: store = SummaryStore.create(module_dea_index) store.drop_all() module_dea_index.close() store.init() _make_all_tables_unlogged(_utils.alchemy_engine(module_dea_index), CUBEDASH_METADATA) return store
def cli( config: LocalConfig, generate_all_products: bool, jobs: int, product_names: List[str], event_log_file: str, refresh_stats: bool, force_concurrently: bool, verbose: bool, init_database: bool, drop_database: bool, force_refresh: bool, recreate_dataset_extents: bool, ): init_logging(open(event_log_file, "a") if event_log_file else None, verbose=verbose) index = _get_index(config, "setup") store = SummaryStore.create(index) if drop_database: user_message("Dropping all Explorer additions to the database") store.drop_all() user_message("Done. Goodbye.") sys.exit(0) if init_database: user_message("Initialising schema") store.init() elif not store.is_initialised(): user_message( style("No cubedash schema exists. ", fg="red") + "Please rerun with --init to create one", ) sys.exit(-1) elif not store.is_schema_compatible(): user_message( style("Cubedash schema is out of date. ", fg="red") + "Please rerun with --init to apply updates.", ) sys.exit(-2) if generate_all_products: products = sorted(store.all_dataset_types(), key=lambda p: p.name) else: products = list(_load_products(store.index, product_names)) completed, failures = run_generation( config, products, workers=jobs, force_refresh=force_refresh, recreate_dataset_extents=recreate_dataset_extents, ) if refresh_stats: user_message("Refreshing statistics...", nl=False) store.refresh_stats(concurrently=force_concurrently) user_message("done", color="green") _LOG.info("stats.refresh") sys.exit(failures)
def eo3_index(module_dea_index: Index, dataset_loader): loaded = dataset_loader( "usgs_ls5t_level1_1", TEST_EO3_DATASET_L1, ) assert loaded == 1 loaded = dataset_loader( "ga_ls5t_ard_3", TEST_EO3_DATASET_ARD, ) assert loaded == 1 # We need postgis and some support tables (eg. srid lookup). SummaryStore.create(module_dea_index).init() return module_dea_index
def eo3_index(module_dea_index: Index, dataset_loader): loaded = dataset_loader( "usgs_ls5t_level1_1", TEST_DATA_DIR / "LT05_L1TP_113081_19880330_20170209_01_T1.odc-metadata.yaml", ) assert loaded == 1 loaded = dataset_loader( "ga_ls5t_ard_3", TEST_DATA_DIR / "ga_ls5t_ard_3-1-20200605_113081_1988-03-30_final.odc-metadata.yaml", ) assert loaded == 1 # We need postgis and some support tables (eg. srid lookup). SummaryStore.create(module_dea_index).init() return module_dea_index
def cli( config: LocalConfig, generate_all_products: bool, jobs: int, product_names: List[str], event_log_file: str, refresh_stats: bool, force_concurrently: bool, verbose: bool, init_database: bool, force_refresh: bool, custom_crs_definition_file: str, ): """ Generate summary files for the given products """ init_logging(open(event_log_file, "a") if event_log_file else None, verbose=verbose) index = _get_index(config, "setup") store = SummaryStore.create(index) if custom_crs_definition_file: CustomCRSConfigHandlerSingleton().configure_database_with_custom_crs(index) if init_database: user_message("Initialising schema") store.init() elif not store.is_initialised(): user_message( style("No cubedash schema exists. ", fg="red") + "Please rerun with --init to create one", ) sys.exit(-1) elif not store.is_schema_compatible(): user_message( style("Cubedash schema is out of date. ", fg="red") + "Please rerun with --init to apply updates.", ) sys.exit(-2) if generate_all_products: products = sorted(store.all_dataset_types(), key=lambda p: p.name) else: products = list(_load_products(store.index, product_names)) completed, failures = run_generation( config, products, workers=jobs, force_refresh=force_refresh ) if refresh_stats: user_message("Refreshing statistics...", nl=False) store.refresh_stats(concurrently=force_concurrently) user_message("done", color="green") _LOG.info("stats.refresh") sys.exit(failures)
def generate_report( item: Tuple[str, GenerateSettings], ) -> Tuple[str, GenerateResult, Optional[TimePeriodOverview]]: product_name, settings = item log = _LOG.bind(product=product_name) started_years = set() def print_status(product_name=None, year=None, month=None, day=None, summary=None): """Print status each time we start a year.""" if year: if (product_name, year) not in started_years: user_message(f"\t {product_name} {year}") started_years.add((product_name, year)) store = SummaryStore.create(_get_index(settings.config, product_name), log=log) store.add_change_listener(print_status) try: product = store.index.products.get_by_name(product_name) if product is None: raise ValueError(f"Unknown product: {product_name}") user_message(f"{product_name} refresh") result, updated_summary = store.refresh( product_name, force=settings.force_refresh, recreate_dataset_extents=settings.recreate_dataset_extents, reset_incremental_position=settings.reset_incremental_position, minimum_change_scan_window=settings.minimum_change_scan_window, ) return product_name, result, updated_summary except UnsupportedWKTProductCRS as e: log.warning("product.unsupported", reason=e.reason) return product_name, GenerateResult.UNSUPPORTED, None except Exception: log.exception("product.error", exc_info=True) return product_name, GenerateResult.ERROR, None finally: store.index.close()
def generate_report( item: Tuple[LocalConfig, str, bool, bool] ) -> Tuple[str, GenerateResult, Optional[TimePeriodOverview]]: config, product_name, force_refresh, recreate_dataset_extents = item log = _LOG.bind( product=product_name, force=force_refresh, extents=recreate_dataset_extents ) started_years = set() def print_status(product_name=None, year=None, month=None, day=None, summary=None): """Print status each time we start a year.""" if year: if (product_name, year) not in started_years: user_message(f"\t {product_name} {year}") started_years.add((product_name, year)) store = SummaryStore.create(_get_index(config, product_name), log=log) store.add_change_listener(print_status) try: product = store.index.products.get_by_name(product_name) if product is None: raise ValueError(f"Unknown product: {product_name}") user_message(f"{product_name} refresh") result, updated_summary = store.refresh( product_name, force=force_refresh, recreate_dataset_extents=recreate_dataset_extents, ) return product_name, result, updated_summary except Exception: log.exception("generate.product.error", exc_info=True) return product_name, GenerateResult.ERROR, None finally: store.index.close()
def _get_store(config: LocalConfig, variant: str, log=_LOG) -> SummaryStore: index: Index = index_connect(config, application_name=f"cubedash.show.{variant}", validate_connection=False) return SummaryStore.create(index, log=log)
@themer.current_theme_loader def get_current_theme(): return app.config["CUBEDASH_THEME"] # The theme can set its own default config options. with (Path(app.root_path) / "themes" / themer.current_theme / "info.json").open("r") as f: for key, value in json.load(f)["defaults"].items(): app.config.setdefault(key, value) # Thread and multiprocess safe. # As long as we don't run queries (ie. open db connections) before forking # (hence validate=False). STORE: SummaryStore = SummaryStore.create( index_connect(application_name=NAME, validate_connection=False)) # Which product to show by default when loading '/'. Picks the first available. DEFAULT_START_PAGE_PRODUCTS = app.config.get("CUBEDASH_DEFAULT_PRODUCTS") or ( "ls7_nbar_scene", "ls5_nbar_scene", ) _LOG = structlog.get_logger() @cache.memoize(timeout=60) def get_time_summary( product_name: str, year: Optional[int] = None, month: Optional[int] = None,
def summary_store(module_dea_index: Index) -> SummaryStore: store = SummaryStore.create(module_dea_index) store.drop_all() module_dea_index.close() store.init() return store
def summary_store(module_dea_index: Index) -> SummaryStore: SummaryStore.create(module_dea_index, init_schema=False).drop_all() module_dea_index.close() store = SummaryStore.create(module_dea_index, init_schema=True) return store