def cli(config: LocalConfig, generate_all_products: bool, jobs: int, product_names: List[str], event_log_file: str, refresh_stats: bool, force_concurrently: bool, verbose: bool): """ Generate summary files for the given products """ init_logging(open(event_log_file, 'a') if event_log_file else None, verbose=verbose) index = _get_index(config, 'setup') store = SummaryStore.create(index, init_schema=True) if generate_all_products: products = sorted(store.all_dataset_types(), key=lambda p: p.name) else: products = list(_load_products(store.index, product_names)) completed, failures = run_generation( config, products, workers=jobs, ) if refresh_stats: echo("Refreshing statistics...", nl=False) store.refresh_stats(concurrently=force_concurrently) secho("done", color='green') _LOG.info('stats.refresh', ) sys.exit(failures)
def cli(config: LocalConfig, allow_cache: bool, product_name: str, year: int, month: int, day: int, event_log_file: str, verbose: bool): """ Print the recorded summary information for the given product """ init_logging(open(event_log_file, 'a') if event_log_file else None, verbose=verbose) store = _get_store(config, 'setup') dataset_type = store.get_dataset_type(product_name) region_info = RegionInfo.for_product(dataset_type) t = time.time() if allow_cache: summary = store.get_or_update(product_name, year, month, day) else: summary = store.update(product_name, year, month, day) t_end = time.time() echo(f"{summary.dataset_count} ", nl=False) secho(product_name, nl=False, bold=True) echo(f" datasets for ", nl=False) secho(f"{year or 'all'} {month or 'all'} {day or 'all'}", fg='blue') if summary.size_bytes is not None: echo(sizeof_fmt(summary.size_bytes)) echo(f"{round(t_end - t, 2)} seconds") echo() if region_info is not None: echo(region_info.description) print_count_table(summary.region_dataset_counts)
def cli( config: LocalConfig, generate_all_products: bool, jobs: int, product_names: List[str], event_log_file: str, refresh_stats: bool, force_concurrently: bool, verbose: bool, init_database: bool, drop_database: bool, force_refresh: bool, recreate_dataset_extents: bool, ): init_logging(open(event_log_file, "a") if event_log_file else None, verbose=verbose) index = _get_index(config, "setup") store = SummaryStore.create(index) if drop_database: user_message("Dropping all Explorer additions to the database") store.drop_all() user_message("Done. Goodbye.") sys.exit(0) if init_database: user_message("Initialising schema") store.init() elif not store.is_initialised(): user_message( style("No cubedash schema exists. ", fg="red") + "Please rerun with --init to create one", ) sys.exit(-1) elif not store.is_schema_compatible(): user_message( style("Cubedash schema is out of date. ", fg="red") + "Please rerun with --init to apply updates.", ) sys.exit(-2) if generate_all_products: products = sorted(store.all_dataset_types(), key=lambda p: p.name) else: products = list(_load_products(store.index, product_names)) completed, failures = run_generation( config, products, workers=jobs, force_refresh=force_refresh, recreate_dataset_extents=recreate_dataset_extents, ) if refresh_stats: user_message("Refreshing statistics...", nl=False) store.refresh_stats(concurrently=force_concurrently) user_message("done", color="green") _LOG.info("stats.refresh") sys.exit(failures)
def cli( config: LocalConfig, generate_all_products: bool, jobs: int, product_names: List[str], event_log_file: str, refresh_stats: bool, force_concurrently: bool, verbose: bool, init_database: bool, force_refresh: bool, custom_crs_definition_file: str, ): """ Generate summary files for the given products """ init_logging(open(event_log_file, "a") if event_log_file else None, verbose=verbose) index = _get_index(config, "setup") store = SummaryStore.create(index) if custom_crs_definition_file: CustomCRSConfigHandlerSingleton().configure_database_with_custom_crs(index) if init_database: user_message("Initialising schema") store.init() elif not store.is_initialised(): user_message( style("No cubedash schema exists. ", fg="red") + "Please rerun with --init to create one", ) sys.exit(-1) elif not store.is_schema_compatible(): user_message( style("Cubedash schema is out of date. ", fg="red") + "Please rerun with --init to apply updates.", ) sys.exit(-2) if generate_all_products: products = sorted(store.all_dataset_types(), key=lambda p: p.name) else: products = list(_load_products(store.index, product_names)) completed, failures = run_generation( config, products, workers=jobs, force_refresh=force_refresh ) if refresh_stats: user_message("Refreshing statistics...", nl=False) store.refresh_stats(concurrently=force_concurrently) user_message("done", color="green") _LOG.info("stats.refresh") sys.exit(failures)
def cli( hostname: str, port: int, debug_mode: bool, workers: int, event_log_file: str, verbose: bool, ): from cubedash import app from cubedash.logs import init_logging init_logging(open(event_log_file, "a") if event_log_file else None, verbosity=verbose) if debug_mode: app.debug = True run_simple(hostname, port, app, use_reloader=debug_mode, processes=workers)
def _init_logs(pytestconfig): logs.init_logging(verbose=pytestconfig.getoption("verbose") > 0)
def cli( config: LocalConfig, product_name: str, year: int, month: int, day: int, event_log_file: str, verbose: bool, ): """ Print the recorded summary information for the given product """ init_logging(open(event_log_file, "a") if event_log_file else None, verbosity=verbose) store = _get_store(config, "setup") t = time.time() summary = store.get(product_name, year, month, day) t_end = time.time() if not store.index.products.get_by_name(product_name): echo(f"Unknown product {product_name!r}", err=True) sys.exit(-1) product = store.get_product_summary(product_name) if product is None: echo(f"No info: product {product_name!r} has not been summarised", err=True) sys.exit(-1) secho(product_name, bold=True) echo() dataset_count = summary.dataset_count if summary else product.dataset_count echo(f"{dataset_count} datasets") if product.dataset_count: echo(f"from {product.time_earliest.isoformat()} ") echo(f" to {product.time_latest.isoformat()} ") echo() if store.needs_extent_refresh(product_name): secho("Has changes", bold=True) echo(f"Last extent refresh: {product.last_refresh_time}") echo(f"Last summary completion: {product.last_successful_summary_time}") if product.fixed_metadata: echo() secho("Metadata", fg="blue") for k, v in product.fixed_metadata.items(): echo(f"\t{k}: {v}") echo() secho( f"Period: {year or 'all-years'} {month or 'all-months'} {day or 'all-days'}", fg="blue", ) if summary: if summary.size_bytes: echo(f"\tStorage size: {sizeof_fmt(summary.size_bytes)}") echo(f"\t{summary.dataset_count} datasets") echo(f"\tSummarised: {summary.summary_gen_time}") if summary.footprint_geometry: secho(f"\tFootprint area: {summary.footprint_geometry.area}") if not summary.footprint_geometry.is_valid: secho("\tInvalid Geometry", fg="red") else: secho("\tNo footprint") elif year or month or day: echo("\tNo summary for chosen period.") echo() echo(f"(fetched in {round(t_end - t, 2)} seconds)")
def _init_logs(pytestconfig): logs.init_logging(verbosity=pytestconfig.getoption("verbose"), cache_logger_on_first_use=False)
def init_logs(pytestconfig): logs.init_logging( verbose=pytestconfig.getoption('verbose') > 0 )