def from_config(cls, config=LocalConfig.find(), application_name=None, validate_db=True): app_name = cls._expand_app_name(application_name) return PostgresDb.connect( config.db_hostname, config.db_database, config.db_username, config.db_password, config.db_port, application_name=app_name, validate=validate_db )
def test_find_config(): files = util.write_files({ 'base.conf': """[datacube] db_hostname: fakehost.test.lan """, 'override.conf': """[datacube] db_hostname: overridden.test.lan db_database: overridden_db """ }) # One config file config = LocalConfig.find(paths=[str(files.joinpath('base.conf'))]) assert config.db_hostname == 'fakehost.test.lan' # Not set: uses default assert config.db_database == 'datacube' # Now two config files, with the latter overriding earlier options. config = LocalConfig.find(paths=[str(files.joinpath('base.conf')), str(files.joinpath('override.conf'))]) assert config.db_hostname == 'overridden.test.lan' assert config.db_database == 'overridden_db'
def connect(local_config=LocalConfig.find(), application_name=None, validate_connection=True): """ Connect to the index. Default Postgres implementation. :param application_name: A short, alphanumeric name to identify this application. :param local_config: Config object to use. :type local_config: :py:class:`datacube.config.LocalConfig`, optional :rtype: Index :raises: datacube.index.postgres._api.EnvironmentError """ return Index( PostgresDb.from_config(local_config, application_name=application_name, validate_db=validate_connection), local_config )
def test_get_locations(): files = util.write_files({ 'base.conf': """[locations] ls7_ortho: file:///tmp/test/ls7_ortho t_archive: file:///tmp/test/t_archive """, 'override.conf': """[locations] t_archive: file:///tmp/override """ }) config = LocalConfig.find(paths=[str(files.joinpath('base.conf'))]) assert config.location_mappings == { 'ls7_ortho': 'file:///tmp/test/ls7_ortho', 't_archive': 'file:///tmp/test/t_archive' } config = LocalConfig.find(paths=[str(files.joinpath('base.conf')), str(files.joinpath('override.conf'))]) assert config.location_mappings == { 'ls7_ortho': 'file:///tmp/test/ls7_ortho', 't_archive': 'file:///tmp/override' }
def test_find_config(): files = write_files({ 'base.conf': dedent("""\ [datacube] db_hostname: fakehost.test.lan """), 'override.conf': dedent("""\ [datacube] db_hostname: overridden.test.lan db_database: overridden_db """) }) # One config file config = LocalConfig.find(paths=[str(files.joinpath('base.conf'))]) assert config['db_hostname'] == 'fakehost.test.lan' # Not set: uses default assert config['db_database'] == 'datacube' # Now two config files, with the latter overriding earlier options. config = LocalConfig.find(paths=[str(files.joinpath('base.conf')), str(files.joinpath('override.conf'))]) assert config['db_hostname'] == 'overridden.test.lan' assert config['db_database'] == 'overridden_db'
def from_config(cls, config=None, application_name=None, validate_connection=True): config = LocalConfig.find() if config is None else config app_name = cls._expand_app_name(application_name) return PostgresDb.create( config.db_hostname, config.db_database, config.db_username, config.db_password, config.db_port, application_name=app_name, validate=validate_connection, pool_timeout=config.db_connection_timeout )
def connect(local_config=LocalConfig.find(), application_name=None, validate_connection=True): """ Connect to the index. Default Postgres implementation. :param application_name: A short, alphanumeric name to identify this application. :param local_config: Config object to use. :type local_config: :py:class:`datacube.config.LocalConfig`, optional :rtype: Index :raises: datacube.index.postgres._api.EnvironmentError """ return Index( PostgresDb.from_config(local_config, application_name=application_name, validate_db=validate_connection), local_config)
def db_connect(cfg=None): from datacube.config import LocalConfig import psycopg2 if isinstance(cfg, str) or cfg is None: cfg = LocalConfig.find(env=cfg) cfg_remap = dict(dbname='db_database', user='******', password='******', host='db_hostname', port='db_port') pg_cfg = {k: cfg.get(cfg_name, None) for k, cfg_name in cfg_remap.items()} return psycopg2.connect(**pg_cfg)
def run_ingestion(ingestion_definition): """Kick off the standard system database ingestion process using a user defined configuration Args: ingestion_definition: dict representing a Data Cube ingestion def produced using the utils func. Returns: The primary key of the new dataset type. """ conf_path = os.environ.get('DATACUBE_CONFIG_PATH') index = index_connect(local_config=LocalConfig.find([conf_path])) source_type, output_type = ingest.make_output_type(index, ingestion_definition) ingestion_work.delay(output_type, source_type, ingestion_definition) index.close() return output_type.id
def connect(local_config=None, application_name=None, validate_connection=True): """ Connect to the index. Default Postgres implementation. :param application_name: A short, alphanumeric name to identify this application. :param local_config: Config object to use. :type local_config: :py:class:`datacube.config.LocalConfig`, optional :param validate_connection: Validate database connection and schema immediately :rtype: Index :raises datacube.index.postgres._api.EnvironmentError: """ if local_config is None: local_config = LocalConfig.find() return Index( PostgresDb.from_config(local_config, application_name=application_name, validate_connection=validate_connection) )
def run_ingestion(ingestion_definition): """Kick off the standard system database ingestion process using a user defined configuration Args: ingestion_definition: dict representing a Data Cube ingestion def produced using the utils func. Returns: The primary key of the new dataset type. """ conf_path = '/home/' + settings.LOCAL_USER + '/Datacube/NE-GeoCloud/config/.datacube.conf' index = index_connect(local_config=LocalConfig.find([conf_path])) source_type, output_type = ingest.make_output_type(index, ingestion_definition) ingestion_work.delay(output_type, source_type, ingestion_definition) index.close() return output_type.id
def inner(*args, **kwargs): obj = click.get_current_context().obj paths = obj.get("config_files", None) # If the user is overriding the defaults specific_environment = obj.get("config_environment") parsed_config = None try: parsed_config = LocalConfig.find(paths=paths, env=specific_environment) except ValueError as e: if specific_environment: raise click.ClickException( f"No datacube config found for '{specific_environment}'" ) from e elif required: raise click.ClickException( "No datacube config found") from e return fn(parsed_config, *args, **kwargs)
def ingestion_work(output_type, source_type, ingestion_definition): """Run the ingestion process for a user defined configuration Args: output_type, source_type: types produced by ingest.make_output_type ingestion_definition: dict representing a Data Cube ingestion def produced using the utils func. """ conf_path = os.environ.get('DATACUBE_CONFIG_PATH') index = index_connect(local_config=LocalConfig.find([conf_path])) tasks = ingest.create_task_list(index, output_type, None, source_type, ingestion_definition) # this is a dry run # paths = [ingest.get_filename(ingestion_definition, task['tile_index'], task['tile'].sources) for task in tasks] # ingest.check_existing_files(paths) # this actually ingests stuff successful, failed = ingest.process_tasks(index, ingestion_definition, source_type, output_type, tasks, 3200, get_executor(None, None)) index.close() return 0
def __init__(self, driver_manager, index=None, *args, **kargs): """Initialise the generic index. :param index: An index object behaving like :class:`datacube.index._api.Index` and used for testing purposes only. In the current implementation, only the `index._db` variable is used, and is passed to the index initialisation method, that should basically replace the existing DB connection with that variable. :param args: Optional positional arguments to be passed to the index on initialisation. Caution: In the current implementation all parameters get passed to all potential indexes. :param kargs: Optional keyword arguments to be passed to the index on initialisation. Caution: In the current implementation all parameters get passed to all potential indexes. """ self.logger = logging.getLogger(self.__class__.__name__) if index is None: local_config = kargs[ 'local_config'] if 'local_config' in kargs else None application_name = kargs[ 'application_name'] if 'application_name' in kargs else None validate_connection = kargs[ 'validate_connection'] if 'validate_connection' in kargs else True if local_config is None: local_config = LocalConfig.find() db = PostgresDb.from_config( local_config, application_name=application_name, validate_connection=validate_connection) else: db = index._db # pylint: disable=protected-access super(Index, self).__init__(driver_manager, db)
def db_connect(cfg=None): """ Create database connection from datacube config. cfg: None -- use default datacube config str -- use config with a given name LocalConfig -- use loaded config object """ from datacube.config import LocalConfig import psycopg2 if isinstance(cfg, str) or cfg is None: cfg = LocalConfig.find(env=cfg) cfg_remap = dict(dbname='db_database', user='******', password='******', host='db_hostname', port='db_port') pg_cfg = {k: cfg.get(cfg_name, None) for k, cfg_name in cfg_remap.items()} return psycopg2.connect(**pg_cfg)
def local_config(integration_config_paths): return LocalConfig.find(integration_config_paths)
def normalise_config(config): if config is None: return LocalConfig.find(env=env) if isinstance(config, string_types): return LocalConfig.find([config], env=env) return config
def load_config(config_file, env=None): """ Load configuration from file. """ paths = DEFAULT_CONF_PATHS + (config_file, ) return LocalConfig.find(paths=paths, env=env)
def test_find_defaults(): config = LocalConfig.find(paths=[]) assert config.db_hostname == '' assert config.db_database == 'datacube'